Skip to content

Commit

Permalink
Merged pull request #1397 from influxdata/nc-example-config
Browse files Browse the repository at this point in the history
Update example config to have default discoverer sections that are disabled
  • Loading branch information
nathanielc committed May 22, 2017
2 parents 9509ef9 + 0e6c9d3 commit a5347f0
Show file tree
Hide file tree
Showing 2 changed files with 138 additions and 126 deletions.
1 change: 1 addition & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
# Bugfixes

- [#1396](https://github.com/influxdata/kapacitor/pull/1396): Fix broken ENV var config overrides for the kubernetes section.
- [#1397](https://github.com/influxdata/kapacitor/pull/1397): Update default configuration file to include sections for each discoverer service.

## v1.3.0-rc4 [2017-05-19]

Expand Down
263 changes: 137 additions & 126 deletions etc/kapacitor/kapacitor.conf
Original file line number Diff line number Diff line change
Expand Up @@ -179,6 +179,11 @@ default-retention-policy = ""
# api-servers = ["http://192.168.99.100:8443"]
# token = "..."
# ca-path = "/path/to/kubernetes/ca.crt"
#
# Kubernetes can also serve as a discoverer for scrape targets.
# In that case the type of resources to discoverer must be specified.
# Valid values are: "node", "pod", "service", and "endpoint".
# resource = "pod"



Expand Down Expand Up @@ -467,129 +472,135 @@ default-retention-policy = ""
batch-pending = 5
batch-timeout = "1s"

#[[scraper]]
# enabled = false
# name = "myscraper"
# discoverer-id = ""
# discoverer-service = ""
# db = "mydb"
# rp = "myrp"
# type = "prometheus"
# scheme = "http"
# metrics-path = "/metrics"
# scrape-interval = "1m0s"
# scrape-timeout = "10s"
# username = ""
# password = ""
# bearer-token = ""
# ssl-ca = ""
# ssl-cert = ""
# ssl-key = ""
# ssl-server-name = ""
# insecure-skip-verify = false
#
#[[azure]]
# enabled = false
# id = "myazure"
# port = 80
# subscription-id = ""
# tenant-id = ""
# client-id = ""
# client-secret = ""
# refresh-interval = "5m0s"
#
#[[consul]]
# enabled = false
# id = "myconsul"
# address = "127.0.0.1:8500"
# token = ""
# datacenter = ""
# tag-separator = ","
# scheme = "http"
# username = ""
# password = ""
# ssl-ca = ""
# ssl-cert = ""
# ssl-key = ""
# ssl-server-name = ""
# insecure-skip-verify = false
#
#[[dns]]
# enabled = false
# id = "mydns"
# refresh-interval = "30s"
# ## Type can be SRV, A, or AAAA
# type = "SRV"
# ## Port is the port to scrape for records returned by A or AAAA types
# port = 80
#
#[[ec2]]
# enabled = false
# id = "myec2"
# region = "us-east-1"
# access-key = ""
# secret-key = ""
# profile = ""
# refresh-interval = "1m0s"
# port = 80
#
#[[file-discovery]]
# enabled = false
# id = "myfile"
# refresh-interval = "5m0s"
# files = []
#
#[[gce]]
# enabled = false
# id = "mygce"
# project = ""
# zone = ""
# filter = ""
# refresh-interval = "1m0s"
# port = 80
# tag-separator = ","
#
#[[marathon]]
# enabled = false
# id = "mymarathon"
# timeout = "30s"
# refresh-interval = "30s"
# bearer-token = ""
# ssl-ca = ""
# ssl-cert = ""
# ssl-key = ""
# ssl-server-name = ""
# insecure-skip-verify = false
#
#[[nerve]]
# enabled = false
# id = "mynerve"
# timeout = "10s"
#
#[[serverset]]
# enabled = false
# id = "myserverset"
# timeout = "10s"
#
#[[static-discovery]]
# enabled = false
# id = "mystatic"
# targets = ["localhost:9100"]
# [static.labels]
# region = "us-east-1"
#
#[[triton]]
# enabled = false
# id = "mytriton"
# account = ""
# dns-suffix = ""
# endpoint = ""
# port = 9163
# refresh-interval = "1m0s"
# version = 1
# ssl-ca = ""
# ssl-cert = ""
# ssl-key = ""
# ssl-server-name = ""
# insecure-skip-verify = false
#
# Service Discovery and metric scraping

[[scraper]]
enabled = false
name = "myscraper"
# Specify the id of a discoverer service specified below
discoverer-id = ""
# Specify the type of discoverer service being used.
discoverer-service = ""
db = "prometheus_raw"
rp = "autogen"
type = "prometheus"
scheme = "http"
metrics-path = "/metrics"
scrape-interval = "1m0s"
scrape-timeout = "10s"
username = ""
password = ""
bearer-token = ""
ssl-ca = ""
ssl-cert = ""
ssl-key = ""
ssl-server-name = ""
insecure-skip-verify = false

# Supported discovery services

[[azure]]
enabled = false
id = "myazure"
port = 80
subscription-id = ""
tenant-id = ""
client-id = ""
client-secret = ""
refresh-interval = "5m0s"

[[consul]]
enabled = false
id = "myconsul"
address = "127.0.0.1:8500"
token = ""
datacenter = ""
tag-separator = ","
scheme = "http"
username = ""
password = ""
ssl-ca = ""
ssl-cert = ""
ssl-key = ""
ssl-server-name = ""
insecure-skip-verify = false

[[dns]]
enabled = false
id = "mydns"
refresh-interval = "30s"
## Type can be SRV, A, or AAAA
type = "SRV"
## Port is the port to scrape for records returned by A or AAAA types
port = 80

[[ec2]]
enabled = false
id = "myec2"
region = "us-east-1"
access-key = ""
secret-key = ""
profile = ""
refresh-interval = "1m0s"
port = 80

[[file-discovery]]
enabled = false
id = "myfile"
refresh-interval = "5m0s"
files = []

[[gce]]
enabled = false
id = "mygce"
project = ""
zone = ""
filter = ""
refresh-interval = "1m0s"
port = 80
tag-separator = ","

[[marathon]]
enabled = false
id = "mymarathon"
timeout = "30s"
refresh-interval = "30s"
bearer-token = ""
ssl-ca = ""
ssl-cert = ""
ssl-key = ""
ssl-server-name = ""
insecure-skip-verify = false

[[nerve]]
enabled = false
id = "mynerve"
timeout = "10s"

[[serverset]]
enabled = false
id = "myserverset"
timeout = "10s"

[[static-discovery]]
enabled = false
id = "mystatic"
targets = ["localhost:9100"]
[static.labels]
region = "us-east-1"

[[triton]]
enabled = false
id = "mytriton"
account = ""
dns-suffix = ""
endpoint = ""
port = 9163
refresh-interval = "1m0s"
version = 1
ssl-ca = ""
ssl-cert = ""
ssl-key = ""
ssl-server-name = ""
insecure-skip-verify = false

0 comments on commit a5347f0

Please sign in to comment.