# my global configuration which means it will applicable for all jobs in fileglobal: scrape_interval: 15s # Set the scrape interval to every 15 seconds. Default is every 1 minute. scrape_interval should be provided for scraping data from exporters
evaluation_interval: 15s # Evaluate rules every 15 seconds. The default is every 1 minute. Evaluation interval checks at particular time is there any update on alerting rules or not.
# Load rules once and periodically evaluate them according to the global 'evaluation_interval'. Here we will define our rules file path
#rule_files:# - "node_rules.yml"# - "db_rules.yml"# A scrape configuration containing exactly one endpoint to scrape: In the scrape config we can define our job definitions
scrape_configs:# The job name is added as a label `job=<job_name>` to any timeseries scraped from this config. - job_name:'node-exporter'# metrics_path defaults to '/metrics'# scheme defaults to 'http'. # target are the machine on which exporter are running and exposing data at particular port.static_configs: - targets: ['localhost:9100']