diff --git a/internal/config/config.go b/internal/config/config.go index 24dec4169bfc9..9b8f287569da6 100644 --- a/internal/config/config.go +++ b/internal/config/config.go @@ -4,6 +4,7 @@ import ( "bytes" "errors" "fmt" + "io" "io/ioutil" "log" "os" @@ -554,21 +555,67 @@ func (c *Config) LoadConfig(path string) error { return err } } - tbl, err := parseFile(path) + contents, err := loadFile(path) if err != nil { + return fmt.Errorf("Error loading %s, %s", path, err) + } + + if err = c.ParseConfig(bytes.NewBuffer(contents)); err != nil { return fmt.Errorf("Error parsing %s, %s", path, err) } + return nil +} + +// trimBOM trims the Byte-Order-Marks from the beginning of the file. +// this is for Windows compatability only. +// see https://github.com/influxdata/telegraf/issues/1378 +func trimBOM(f []byte) []byte { + return bytes.TrimPrefix(f, []byte("\xef\xbb\xbf")) +} + +// loadFile loads a TOML configuration. When loading the file, it +// will find environment variables and replace them. +func loadFile(fpath string) ([]byte, error) { + contents, err := ioutil.ReadFile(fpath) + if err != nil { + return nil, err + } + // ugh windows why + contents = trimBOM(contents) + + env_vars := envVarRe.FindAll(contents, -1) + for _, env_var := range env_vars { + env_val := os.Getenv(strings.TrimPrefix(string(env_var), "$")) + if env_val != "" { + contents = bytes.Replace(contents, env_var, []byte(env_val), 1) + } + } + + return contents, nil +} + +func (c *Config) ParseConfig(reader io.Reader) error { + contents, err := ioutil.ReadAll(reader) + if err != nil { + return err + } + + tbl, err := toml.Parse(contents) + if err != nil { + return err + } + // Parse tags tables first: for _, tableName := range []string{"tags", "global_tags"} { if val, ok := tbl.Fields[tableName]; ok { subTable, ok := val.(*ast.Table) if !ok { - return fmt.Errorf("%s: invalid configuration", path) + return fmt.Errorf("invalid configuration") } if err = config.UnmarshalTable(subTable, c.Tags); err != nil { log.Printf("E! Could not parse [global_tags] config\n") - return fmt.Errorf("Error parsing %s, %s", path, err) + return err } } } @@ -577,11 +624,11 @@ func (c *Config) LoadConfig(path string) error { if val, ok := tbl.Fields["agent"]; ok { subTable, ok := val.(*ast.Table) if !ok { - return fmt.Errorf("%s: invalid configuration", path) + return fmt.Errorf("invalid configuration") } if err = config.UnmarshalTable(subTable, c.Agent); err != nil { log.Printf("E! Could not parse [agent] config\n") - return fmt.Errorf("Error parsing %s, %s", path, err) + return err } } @@ -589,7 +636,7 @@ func (c *Config) LoadConfig(path string) error { for name, val := range tbl.Fields { subTable, ok := val.(*ast.Table) if !ok { - return fmt.Errorf("%s: invalid configuration", path) + return fmt.Errorf("invalid configuration") } switch name { @@ -600,17 +647,16 @@ func (c *Config) LoadConfig(path string) error { // legacy [outputs.influxdb] support case *ast.Table: if err = c.addOutput(pluginName, pluginSubTable); err != nil { - return fmt.Errorf("Error parsing %s, %s", path, err) + return err } case []*ast.Table: for _, t := range pluginSubTable { if err = c.addOutput(pluginName, t); err != nil { - return fmt.Errorf("Error parsing %s, %s", path, err) + return err } } default: - return fmt.Errorf("Unsupported config format: %s, file %s", - pluginName, path) + return fmt.Errorf("Unsupported config format: %s", pluginName) } } case "inputs", "plugins": @@ -619,17 +665,16 @@ func (c *Config) LoadConfig(path string) error { // legacy [inputs.cpu] support case *ast.Table: if err = c.addInput(pluginName, pluginSubTable); err != nil { - return fmt.Errorf("Error parsing %s, %s", path, err) + return err } case []*ast.Table: for _, t := range pluginSubTable { if err = c.addInput(pluginName, t); err != nil { - return fmt.Errorf("Error parsing %s, %s", path, err) + return err } } default: - return fmt.Errorf("Unsupported config format: %s, file %s", - pluginName, path) + return fmt.Errorf("Unsupported config format: %s", pluginName) } } case "processors": @@ -638,12 +683,11 @@ func (c *Config) LoadConfig(path string) error { case []*ast.Table: for _, t := range pluginSubTable { if err = c.addProcessor(pluginName, t); err != nil { - return fmt.Errorf("Error parsing %s, %s", path, err) + return err } } default: - return fmt.Errorf("Unsupported config format: %s, file %s", - pluginName, path) + return fmt.Errorf("Unsupported config format: %s", pluginName) } } case "aggregators": @@ -652,19 +696,18 @@ func (c *Config) LoadConfig(path string) error { case []*ast.Table: for _, t := range pluginSubTable { if err = c.addAggregator(pluginName, t); err != nil { - return fmt.Errorf("Error parsing %s, %s", path, err) + return err } } default: - return fmt.Errorf("Unsupported config format: %s, file %s", - pluginName, path) + return fmt.Errorf("Unsupported config format: %s", pluginName) } } // Assume it's an input input for legacy config file support if no other // identifiers are present default: if err = c.addInput(name, subTable); err != nil { - return fmt.Errorf("Error parsing %s, %s", path, err) + return err } } } @@ -672,36 +715,8 @@ func (c *Config) LoadConfig(path string) error { if len(c.Processors) > 1 { sort.Sort(c.Processors) } - return nil -} - -// trimBOM trims the Byte-Order-Marks from the beginning of the file. -// this is for Windows compatability only. -// see https://github.com/influxdata/telegraf/issues/1378 -func trimBOM(f []byte) []byte { - return bytes.TrimPrefix(f, []byte("\xef\xbb\xbf")) -} - -// parseFile loads a TOML configuration from a provided path and -// returns the AST produced from the TOML parser. When loading the file, it -// will find environment variables and replace them. -func parseFile(fpath string) (*ast.Table, error) { - contents, err := ioutil.ReadFile(fpath) - if err != nil { - return nil, err - } - // ugh windows why - contents = trimBOM(contents) - - env_vars := envVarRe.FindAll(contents, -1) - for _, env_var := range env_vars { - env_val := os.Getenv(strings.TrimPrefix(string(env_var), "$")) - if env_val != "" { - contents = bytes.Replace(contents, env_var, []byte(env_val), 1) - } - } - return toml.Parse(contents) + return nil } func (c *Config) addAggregator(name string, table *ast.Table) error { diff --git a/internal/config/config_test.go b/internal/config/config_test.go index 3498d815d0078..895ad60b1aa8a 100644 --- a/internal/config/config_test.go +++ b/internal/config/config_test.go @@ -2,6 +2,7 @@ package config import ( "os" + "strings" "testing" "time" @@ -174,3 +175,20 @@ func TestConfig_LoadDirectory(t *testing.T) { assert.Equal(t, pConfig, c.Inputs[3].Config, "Merged Testdata did not produce correct procstat metadata.") } + +func TestConfig_ParseConfig(t *testing.T) { + c := NewConfig() + r := strings.NewReader(` +[global_tags] + foo = "bar" + +[agent] + debug = true + `) + + err := c.ParseConfig(r) + assert.NoError(t, err) + + assert.Equal(t, c.Tags["foo"], "bar") + assert.True(t, c.Agent.Debug) +} diff --git a/plugins/inputs/all/all.go b/plugins/inputs/all/all.go index 7846f8c9a851b..3a99e2daa471e 100644 --- a/plugins/inputs/all/all.go +++ b/plugins/inputs/all/all.go @@ -31,6 +31,7 @@ import ( _ "github.com/influxdata/telegraf/plugins/inputs/ipmi_sensor" _ "github.com/influxdata/telegraf/plugins/inputs/iptables" _ "github.com/influxdata/telegraf/plugins/inputs/jolokia" + _ "github.com/influxdata/telegraf/plugins/inputs/jolokia2" _ "github.com/influxdata/telegraf/plugins/inputs/kafka_consumer" _ "github.com/influxdata/telegraf/plugins/inputs/kubernetes" _ "github.com/influxdata/telegraf/plugins/inputs/leofs" diff --git a/plugins/inputs/jolokia2/README.md b/plugins/inputs/jolokia2/README.md new file mode 100644 index 0000000000000..5f01149ade26e --- /dev/null +++ b/plugins/inputs/jolokia2/README.md @@ -0,0 +1,111 @@ +# Jolokia2 Input Plugin + +The [Jolokia](http://jolokia.org) input plugin collects JVM metrics exposed as JMX MBean attributes +through the Jolokia REST endpoint and its [JSON-over-HTTP protocol](https://jolokia.org/reference/html/protocol.html). + +### Configuration: + +```toml + +# Read JMX metrics through Jolokia + +[[inputs.jolokia2]] + #default_field_delimiter = "." + #default_field_prefix = "" + #default_tag_delimiter = "_" + #default_tag_prefix = "mbean" + + # Add agents to query + [inputs.jolokia2.agents] + urls = ["http://kafka:8080/jolokia"] + + [[inputs.jolokia2.metric]] + name = "jvm_runtime" + mbean = "java.lang:type=Runtime" + paths = ["Uptime"] + + [[inputs.jolokia2.metric]] + name = "jvm_memory" + mbean = "java.lang:type=Memory" + paths = ["HeapMemoryUsage", "NonHeapMemoryUsage", "ObjectPendingFinalizationCount"] + + # By default, all mbean keys are added as tags + # Use 'taginclude' to specify the exact tags to add. + [[inputs.jolokia2.metric]] + name = "jvm_g1_garbage_collector" + mbean = "java.lang:name=G1*,type=GarbageCollector" + paths = [ + "CollectionTime", + "CollectionCount", + "LastGcInfo/duration", + "LastGcInfo/GcThreadCount", + ] + taginclude = ["name"] + + # Use 'tagexclude' to specify just the tags to remove. + [[inputs.jolokia2.metric]] + name = "jvm_memory_pool" + mbean = "java.lang:name=*,type=MemoryPool" + paths = ["Usage", "PeakUsage, "CollectionUsage"] + tagexclude = ["type"] + + [[inputs.jolokia2.metric]] + name = "kafka_topic" + mbean = "kafka.server:name=*,topic=*,type=BrokerTopicMetrics" + field_prefix = "$1" + taginclude = ["topic"] + + [[inputs.jolokia2.metric]] + name = "kafka_log" + mbean = "kafka.log:name=*,partition=*,topic=*,type=Log" + field_name = "$1" + taginclude = ["topic", "partition"] +``` + +To specify timeouts for slower/over-loaded clients: + +``` +[[inputs.jolokia2]] + [inputs.jolokia2.agents] + urls = ["http://kafka:8080/jolokia"] + + # The amount of time to wait for any requests made by this client. + # Includes connection time, any redirects, and reading the response body. + # (default is 5s) + response_timeout = "10s" +``` + +To specify SSL options, add details to the `agents` configuration: + +``` +[[inputs.jolokia2]] + [inputs.jolokia2.agents] + urls = [ + "https://kafka:8080/jolokia", + ] + #username = "" + #password = "" + ssl_ca = "/var/private/ca.pem" + ssl_cert = "/var/private/client.pem" + ssl_key = "/var/private/client-key.pem" + #insecure_skip_verify = false +``` + +To interact with agents via a Jolokia proxy, use a `proxy` configuration instead: + +``` +[[inputs.jolokia2]] + [inputs.jolokia2.proxy] + url = "https://proxy:8080/jolokia" + response_timeout = "10s" + #default_target_username = "" + #default_target_password = "" + ssl_ca = "/var/private/ca.pem" + ssl_cert = "/var/private/client.pem" + ssl_key = "/var/private/client-key.pem" + + [[inputs.jolokia2.proxy.target]] + url = "service:jmx:rmi:///jndi/rmi://targethost:9999/jmxrmi" + #username = "" + #password = "" +``` diff --git a/plugins/inputs/jolokia2/agent.go b/plugins/inputs/jolokia2/agent.go new file mode 100644 index 0000000000000..ff68dc82c3f88 --- /dev/null +++ b/plugins/inputs/jolokia2/agent.go @@ -0,0 +1,165 @@ +package jolokia2 + +import ( + "bytes" + "encoding/json" + "fmt" + "io/ioutil" + "net/http" + "net/url" + "path" +) + +type ReadResponse struct { + Status int + Request ReadRequest + Value interface{} +} + +type ReadRequest struct { + Mbean string + Attributes []string + Path string +} + +type Agent struct { + url string + client *http.Client + username string + password string +} + +type agentResponse struct { + Status int `json:"status"` + Request agentRequest `json:"request"` + Value interface{} `json:"value"` +} + +type agentRequest struct { + Type string `json:"type"` + Mbean string `json:"mbean"` + Attribute interface{} `json:"attribute,omitempty"` + Path string `json:"path,omitempty"` +} + +func NewAgent(url string, config *remoteConfig) *Agent { + client := &http.Client{ + Timeout: config.ResponseTimeout, + } + + return &Agent{ + url: url, + client: client, + } +} + +func (a *Agent) Read(requests []ReadRequest) ([]ReadResponse, error) { + requestObjects := makeRequests(requests) + requestBody, err := json.Marshal(requestObjects) + if err != nil { + return nil, err + } + + requestUrl, err := makeReadUrl(a.url, a.username, a.password) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", requestUrl, bytes.NewBuffer(requestBody)) + req.Header.Add("Content-type", "application/json") + + resp, err := a.client.Do(req) + if err != nil { + return nil, err + } + defer resp.Body.Close() + + if resp.StatusCode != http.StatusOK { + return nil, fmt.Errorf("Response from url \"%s\" has status code %d (%s), expected %d (%s)", + a.url, resp.StatusCode, http.StatusText(resp.StatusCode), http.StatusOK, http.StatusText(http.StatusOK)) + } + + responseBody, err := ioutil.ReadAll(resp.Body) + if err != nil { + return nil, err + } + + var responses []agentResponse + if err = json.Unmarshal([]byte(responseBody), &responses); err != nil { + return nil, fmt.Errorf("Error decoding JSON response: %s: %s", err, responseBody) + } + + return makeResponses(responses), nil +} + +func makeReadUrl(configUrl, username, password string) (string, error) { + parsedUrl, err := url.Parse(configUrl) + if err != nil { + return "", err + } + + readUrl := url.URL{ + Host: parsedUrl.Host, + Scheme: parsedUrl.Scheme, + } + + if username != "" || password != "" { + readUrl.User = url.UserPassword(username, password) + } + + readUrl.Path = path.Join(parsedUrl.Path, "read") + return readUrl.String(), nil +} + +func makeRequests(requests []ReadRequest) []agentRequest { + requestObjects := make([]agentRequest, len(requests)) + for i, request := range requests { + requestObjects[i] = agentRequest{ + Type: "read", + Mbean: request.Mbean, + Path: request.Path, + } + if len(request.Attributes) == 1 { + requestObjects[i].Attribute = request.Attributes[0] + } + if len(request.Attributes) > 1 { + requestObjects[i].Attribute = request.Attributes + } + } + + return requestObjects +} + +func makeResponses(responseObjects []agentResponse) []ReadResponse { + responses := make([]ReadResponse, len(responseObjects)) + + for i, object := range responseObjects { + request := ReadRequest{ + Mbean: object.Request.Mbean, + Path: object.Request.Path, + Attributes: []string{}, + } + + attrValue := object.Request.Attribute + if attrValue != nil { + attribute, ok := attrValue.(string) + if ok { + request.Attributes = []string{attribute} + } else { + attributes, _ := attrValue.([]interface{}) + request.Attributes = make([]string, len(attributes)) + for i, attr := range attributes { + request.Attributes[i] = attr.(string) + } + } + } + + responses[i] = ReadResponse{ + Request: request, + Value: object.Value, + Status: object.Status, + } + } + + return responses +} diff --git a/plugins/inputs/jolokia2/gatherer.go b/plugins/inputs/jolokia2/gatherer.go new file mode 100644 index 0000000000000..3d45a9c3ef772 --- /dev/null +++ b/plugins/inputs/jolokia2/gatherer.go @@ -0,0 +1,210 @@ +package jolokia2 + +import ( + "fmt" + "strings" + + "github.com/influxdata/telegraf" +) + +type Metric struct { + Name string + Mbean string + Paths []string + AllowTags []string + DenyTags []string + FieldPrefix string + FieldDelimiter string + TagPrefix string + TagDelimiter string +} + +type Gatherer struct { + metrics []Metric + accumulator telegraf.Accumulator +} + +func NewGatherer(metrics []Metric, acc telegraf.Accumulator) *Gatherer { + return &Gatherer{ + metrics: metrics, + accumulator: acc, + } +} + +func (g *Gatherer) Gather(responses []ReadResponse, tags map[string]string) { + for _, metric := range g.metrics { + g.gatherMetric(metric, responses, tags) + } +} + +func (g *Gatherer) gatherMetric(metric Metric, responses []ReadResponse, tags map[string]string) { + hasPattern := strings.Contains(metric.Mbean, "*") + + for _, response := range responses { + request := response.Request + + if metric.Mbean != request.Mbean { + continue + } + + if !hasPattern { + fieldMap := make(map[string]interface{}) + extractFieldsFromValue(request.Attributes, request.Path, + metric.FieldPrefix, metric.FieldDelimiter, response.Value, fieldMap) + + tagMap := extractTagsFromName(request.Mbean, + metric.AllowTags, metric.DenyTags, metric.TagPrefix, metric.TagDelimiter, tags) + + g.accumulator.AddFields(metric.Name, fieldMap, tagMap) + + } else { + valueMap, ok := response.Value.(map[string]interface{}) + + if !ok { + panic(fmt.Sprintf("FIXME! There should be a map here for %s!\n", request.Mbean)) + } + + for mbeanName, mbeanValue := range valueMap { + fieldMap := make(map[string]interface{}) + extractFieldsFromValue(request.Attributes, request.Path, + metric.FieldPrefix, metric.FieldDelimiter, mbeanValue, fieldMap) + + tagMap := extractTagsFromName(mbeanName, + metric.AllowTags, metric.DenyTags, metric.TagPrefix, metric.TagDelimiter, tags) + + g.accumulator.AddFields(metric.Name, fieldMap, tagMap) + } + } + } +} + +func extractTagsFromName(name string, allowTags, denyTags []string, tagPrefix, tagDelimiter string, appendTags map[string]string) map[string]string { + tagMap := make(map[string]string) + + object := strings.SplitN(name, ":", 2) + domain := object[0] + if domain != "" && len(object) == 2 { + properties := object[1] + + for _, property := range strings.Split(properties, ",") { + propertyPair := strings.SplitN(property, "=", 2) + if len(propertyPair) != 2 { + continue + } + + propertyName := propertyPair[0] + if propertyName == "" { + continue + } + + if tagCanBeExtracted(propertyName, allowTags, denyTags) { + if tagPrefix != "" { + propertyName = tagPrefix + tagDelimiter + propertyName + } + + tagMap[propertyName] = propertyPair[1] + } + } + } + + for tagKey, tagValue := range appendTags { + tagMap[tagKey] = tagValue + } + + return tagMap +} + +func tagCanBeExtracted(name string, allowTags, denyTags []string) bool { + for _, t := range allowTags { + if name == t { + return true + } + } + + for _, t := range denyTags { + if name == t { + return false + } + } + + if len(allowTags) == 0 { + return true + } + + return false +} + +func extractFieldsFromValue(attributes []string, path, fieldPrefix, fieldDelimiter string, value interface{}, fieldMap map[string]interface{}) { + valueMap, ok := value.(map[string]interface{}) + if ok { + // complex value + if len(attributes) == 0 { + // if there were no attributes requested, + // then the keys are attributes + fieldName := fieldPrefix + extractInnerFieldsFromValue(fieldName, fieldDelimiter, valueMap, fieldMap) + + } else if len(attributes) == 1 { + // if there was a single attribute requested, + // then the keys are the attribute's properties + fieldName := joinFieldName(attributes[0], path, fieldPrefix, fieldDelimiter) + extractInnerFieldsFromValue(fieldName, fieldDelimiter, valueMap, fieldMap) + + } else { + // if there were multiple attributes requested, + // then the keys are the attribute names + for _, attribute := range attributes { + fieldName := joinFieldName(attribute, path, fieldPrefix, fieldDelimiter) + extractInnerFieldsFromValue(fieldName, fieldDelimiter, valueMap[attribute], fieldMap) + } + } + } else { + // scalar value + var fieldName string + if len(attributes) == 0 { + fieldName = joinFieldName("value", path, fieldPrefix, fieldDelimiter) + } else { + fieldName = joinFieldName(attributes[0], path, fieldPrefix, fieldDelimiter) + } + + fieldMap[fieldName] = value + } +} + +func joinFieldName(attribute, path, prefix, delimiter string) string { + fieldName := attribute + if prefix != "" { + fieldName = prefix + delimiter + fieldName + } + + if path != "" { + fieldName = fieldName + delimiter + strings.Replace(path, "/", delimiter, -1) + } + + return fieldName +} + +func extractInnerFieldsFromValue(name, delimiter string, value interface{}, fieldMap map[string]interface{}) { + if valueMap, ok := value.(map[string]interface{}); ok { + // keep going until we get to something that is not a map + for key, innerValue := range valueMap { + var innerName string + + if name == "" { + innerName = key + } else { + innerName = name + delimiter + key + } + + extractInnerFieldsFromValue(innerName, delimiter, innerValue, fieldMap) + } + + return + } + + if name == "" { + name = "value" + } + + fieldMap[name] = value +} diff --git a/plugins/inputs/jolokia2/jolokia.go b/plugins/inputs/jolokia2/jolokia.go new file mode 100644 index 0000000000000..701a4b1cf777c --- /dev/null +++ b/plugins/inputs/jolokia2/jolokia.go @@ -0,0 +1,161 @@ +package jolokia2 + +import ( + "fmt" + "time" + + "github.com/influxdata/telegraf" + "github.com/influxdata/telegraf/plugins/inputs" +) + +type Jolokia struct { + Agents agentsConfig + Proxy proxyConfig + Metrics []metricConfig `toml:"metric"` + DefaultFieldPrefix string `toml:"default_field_prefix"` + DefaultFieldDelimiter string `toml:"default_field_delimiter"` + DefaultTagPrefix string `toml:"default_tag_prefix"` + DefaultTagDelimiter string `toml:"default_tag_delimiter"` +} + +type remoteConfig struct { + ResponseTimeout time.Duration `toml:"response_timeout"` + Username string + Password string + SSLCA string `toml:"ssl_ca"` + SSLCert string `toml:"ssl_cert"` + SSLKey string `toml:"ssl_key"` + InsecureSkipVerify bool `toml:"insecure_skip_verify"` +} + +type agentsConfig struct { + remoteConfig + Urls []string +} + +type proxyConfig struct { + remoteConfig + Url string + DefaultTargetPassword string `toml:"default_target_username"` + DefaultTargetUsername string `toml:"default_target_password"` + + Targets []proxyTargetConfig +} + +type proxyTargetConfig struct { + Url string + Username string + Password string +} + +type metricConfig struct { + Name string + Mbean string + Paths []string + FieldName string `toml:"field_name"` + FieldPrefix *string `toml:"field_prefix"` + FieldDelimiter *string `toml:"field_delimiter"` + TagPrefix *string `toml:"tag_prefix"` + TagDelimiter *string `toml:"tag_delimiter"` + TagInclude []string `toml:"taginclude"` + TagExclude []string `toml:"tagexclude"` +} + +func (jc *Jolokia) SampleConfig() string { + return fmt.Sprintf(` +# %s + +[[inputs.jolokia2]] + # Add a metric name prefix + #name_prefix = "example_" + + # Add agents to query + [inputs.jolokia2.agents] + urls = ["http://kafka:8080/jolokia"] + #username = "" + #password = "" + #ssl_ca = "/var/private/ca.pem" + #ssl_cert = "/var/private/client.pem" + #ssl_key = "/var/private/client-key.pem" + #insecure_skip_verify = false + + [[inputs.jolokia2.metric]] + name = "jvm_runtime" + mbean = "java.lang:type=Runtime" + paths = ["Uptime"] +`, jc.Description()) +} + +func (jc *Jolokia) Description() string { + return "Read JMX metrics from a Jolokia REST endpoint" +} + +func (jc *Jolokia) Gather(acc telegraf.Accumulator) error { + var metrics []Metric + + for _, config := range jc.Metrics { + metric := Metric{ + Name: config.Name, + Mbean: config.Mbean, + Paths: config.Paths, + AllowTags: config.TagInclude, + DenyTags: config.TagExclude, + } + + if config.FieldPrefix == nil { + metric.FieldPrefix = jc.DefaultFieldPrefix + } else { + metric.FieldPrefix = *config.FieldPrefix + } + + if config.FieldDelimiter == nil { + metric.FieldDelimiter = jc.DefaultFieldDelimiter + } else { + metric.FieldDelimiter = *config.FieldDelimiter + } + + if config.TagPrefix == nil { + metric.TagPrefix = jc.DefaultTagPrefix + } else { + metric.TagPrefix = *config.TagPrefix + } + + if config.TagDelimiter == nil { + metric.TagDelimiter = jc.DefaultTagDelimiter + } else { + metric.TagDelimiter = *config.TagDelimiter + } + + metrics = append(metrics, metric) + } + + gatherer := NewGatherer(metrics, acc) + requests := RequestPayload(metrics) + + // for each remote config... + for _, url := range jc.Agents.Urls { + agent := NewAgent(url, &jc.Agents.remoteConfig) + tags := map[string]string{"jolokia_agent_url": agent.url} + + responses, err := agent.Read(requests) + if err != nil { + return err + } + + gatherer.Gather(responses, tags) + } + + return nil +} + +func init() { + inputs.Add("jolokia2", func() telegraf.Input { + return &Jolokia{ + Metrics: []metricConfig{}, + DefaultFieldPrefix: "", + DefaultFieldDelimiter: ".", + DefaultTagPrefix: "mbean", + DefaultTagDelimiter: "_", + } + }) +} diff --git a/plugins/inputs/jolokia2/jolokia_test.go b/plugins/inputs/jolokia2/jolokia_test.go new file mode 100644 index 0000000000000..8b471d785e84d --- /dev/null +++ b/plugins/inputs/jolokia2/jolokia_test.go @@ -0,0 +1,113 @@ +package jolokia2 + +import ( + "errors" + "fmt" + "io/ioutil" + "net/http" + "net/http/httptest" + "strings" + "testing" + + "github.com/influxdata/telegraf/internal/config" + "github.com/influxdata/telegraf/testutil" + "github.com/stretchr/testify/assert" + + "github.com/influxdata/toml" +) + +func TestJolokia2_ScalarValuesFixture(t *testing.T) { + runFixture(t, "./testdata/scalar_values.toml") +} + +func TestJolokia2_ObjectValuesFixture(t *testing.T) { + runFixture(t, "./testdata/object_values.toml") +} + +func TestJolokia2_TagAndFieldCustomizations(t *testing.T) { + runFixture(t, "./testdata/tag_and_field_customizations.toml") +} + +func TestJolokia2_JvmFixture(t *testing.T) { + runFixture(t, "./testdata/jvm.toml") +} + +//func TestJolokia2_KafkaLogFixture(t *testing.T) { +// runFixture(t, "./testdata/kafka_log.toml") +//} + +//func TestJolokia2_KafkaTopicFixture(t *testing.T) { +// runFixture(t, "./testdata/kafka_topic.toml") +//} + +func runFixture(t *testing.T, path string) { + fixture := setupFixture(t, path) + + server := setupServer(http.StatusOK, fixture.Response) + defer server.Close() + + jolokia, err := setupPlugin(fixture.Config, server.URL) + if err != nil { + t.Fatalf("Could not setup plugin. %v", err) + } + + var acc testutil.Accumulator + err = jolokia.Gather(&acc) + + assert.Nil(t, err) + for _, expect := range fixture.Expects { + expect.Tags["jolokia_agent_url"] = server.URL + acc.AssertContainsTaggedFields(t, + expect.Measurement, expect.Fields, expect.Tags) + } +} + +type Fixture struct { + Config string + Expects []Expect + Response string +} + +type Expect struct { + Measurement string + Tags map[string]string + Fields map[string]interface{} +} + +func setupFixture(t *testing.T, path string) *Fixture { + contents, err := ioutil.ReadFile(path) + if err != nil { + t.Fatalf("Could not read fixture %s", path) + } + + var fixture Fixture + if err := toml.Unmarshal(contents, &fixture); err != nil { + t.Fatalf("Could not unmarshal fixture. %v", err) + } + + return &fixture +} + +func setupServer(status int, resp string) *httptest.Server { + return httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + w.WriteHeader(http.StatusOK) + fmt.Fprintln(w, resp) + })) +} + +func setupPlugin(conf, url string) (*Jolokia, error) { + c := config.NewConfig() + r := strings.NewReader(conf) + err := c.ParseConfig(r) + if err != nil { + return nil, err + } + + jc := c.Inputs[0].Input.(*Jolokia) + if jc == nil { + return nil, errors.New("Missing jolokia2 from config") + } + + jc.Agents.Urls = []string{url} + return jc, nil +} diff --git a/plugins/inputs/jolokia2/payload.go b/plugins/inputs/jolokia2/payload.go new file mode 100644 index 0000000000000..e80f840a0261d --- /dev/null +++ b/plugins/inputs/jolokia2/payload.go @@ -0,0 +1,79 @@ +package jolokia2 + +import ( + "sort" + "strings" +) + +func RequestPayload(metrics []Metric) []ReadRequest { + var requests []ReadRequest + for _, metric := range metrics { + + if len(metric.Paths) == 0 { + requests = append(requests, ReadRequest{ + Mbean: metric.Mbean, + Attributes: []string{}, + }) + } else { + attributes := make(map[string][]string) + + for _, path := range metric.Paths { + segments := strings.Split(path, "/") + attribute := segments[0] + + if _, ok := attributes[attribute]; !ok { + attributes[attribute] = make([]string, 0) + } + + if len(segments) > 1 { + paths := attributes[attribute] + attributes[attribute] = append(paths, strings.Join(segments[1:], "/")) + } + } + + rootAttributes := payloadAttributesWithoutPaths(attributes) + if len(rootAttributes) > 0 { + requests = append(requests, ReadRequest{ + Mbean: metric.Mbean, + Attributes: rootAttributes, + }) + } + + for _, deepAttribute := range payloadAttributesWithPaths(attributes) { + for _, path := range attributes[deepAttribute] { + requests = append(requests, ReadRequest{ + Mbean: metric.Mbean, + Attributes: []string{deepAttribute}, + Path: path, + }) + } + } + } + } + + return requests +} + +func payloadAttributesWithoutPaths(attributes map[string][]string) []string { + results := make([]string, 0) + for attr, paths := range attributes { + if len(paths) == 0 { + results = append(results, attr) + } + } + + sort.Strings(results) + return results +} + +func payloadAttributesWithPaths(attributes map[string][]string) []string { + results := make([]string, 0) + for attr, paths := range attributes { + if len(paths) != 0 { + results = append(results, attr) + } + } + + sort.Strings(results) + return results +} diff --git a/plugins/inputs/jolokia2/payload_test.go b/plugins/inputs/jolokia2/payload_test.go new file mode 100644 index 0000000000000..9cdbebff91ec6 --- /dev/null +++ b/plugins/inputs/jolokia2/payload_test.go @@ -0,0 +1,104 @@ +package jolokia2 + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestJolokia2_BuildRequestPayloads(t *testing.T) { + cases := []struct { + metric Metric + expected []ReadRequest + }{ + { + metric: Metric{ + Name: "object", + Mbean: "test:foo=bar", + }, + expected: []ReadRequest{ + ReadRequest{ + Mbean: "test:foo=bar", + Attributes: []string{}, + }, + }, + }, { + metric: Metric{ + Name: "object_with_an_attribute", + Mbean: "test:foo=bar", + Paths: []string{"biz"}, + }, + expected: []ReadRequest{ + ReadRequest{ + Mbean: "test:foo=bar", + Attributes: []string{"biz"}, + }, + }, + }, { + metric: Metric{ + Name: "object_with_attributes", + Mbean: "test:foo=bar", + Paths: []string{"baz", "biz"}, + }, + expected: []ReadRequest{ + ReadRequest{ + Mbean: "test:foo=bar", + Attributes: []string{"baz", "biz"}, + }, + }, + }, { + metric: Metric{ + Name: "object_with_an_attribute_and_path", + Mbean: "test:foo=bar", + Paths: []string{"biz/baz"}, + }, + expected: []ReadRequest{ + ReadRequest{ + Mbean: "test:foo=bar", + Attributes: []string{"biz"}, + Path: "baz", + }, + }, + }, { + metric: Metric{ + Name: "object_with_an_attribute_and_a_deep_path", + Mbean: "test:foo=bar", + Paths: []string{"biz/baz/fiz/faz"}, + }, + expected: []ReadRequest{ + ReadRequest{ + Mbean: "test:foo=bar", + Attributes: []string{"biz"}, + Path: "baz/fiz/faz", + }, + }, + }, { + metric: Metric{ + Name: "object_with_attributes_and_paths", + Mbean: "test:foo=bar", + Paths: []string{"baz/biz", "faz/fiz"}, + }, + expected: []ReadRequest{ + ReadRequest{ + Mbean: "test:foo=bar", + Attributes: []string{"baz"}, + Path: "biz", + }, + ReadRequest{ + Mbean: "test:foo=bar", + Attributes: []string{"faz"}, + Path: "fiz", + }, + }, + }, + } + + for _, c := range cases { + payload := RequestPayload([]Metric{c.metric}) + + assert.Equal(t, len(c.expected), len(payload), "Failing case: "+c.metric.Name) + for _, actual := range payload { + assert.Contains(t, c.expected, actual, "Failing case: "+c.metric.Name) + } + } +} diff --git a/plugins/inputs/jolokia2/testdata/jvm.toml b/plugins/inputs/jolokia2/testdata/jvm.toml new file mode 100644 index 0000000000000..5e10cc8759e26 --- /dev/null +++ b/plugins/inputs/jolokia2/testdata/jvm.toml @@ -0,0 +1,112 @@ +config = ''' + [[inputs.jolokia2]] + default_field_delimiter = "_" + default_tag_prefix = "" + + [[inputs.jolokia2.metric]] + name = "jvm_gc" + mbean = "java.lang:name=G1*,type=GarbageCollector" + paths = [ + "CollectionTime", + "CollectionCount" + ] + taginclude = ["name"] + + [[inputs.jolokia2.metric]] + name = "jvm_memory" + mbean = "java.lang:type=Memory" + paths = [ + "HeapMemoryUsage", + "NonHeapMemoryUsage", + "ObjectPendingFinalizationCount", + ] + tagexclude = ["type"] +''' + +response = ''' + [{ + "request": { + "mbean": "java.lang:name=G1*,type=GarbageCollector", + "attribute": [ + "CollectionTime", + "CollectionCount" + ], + "type": "read" + }, + "value": { + "java.lang:name=G1 Young Generation,type=GarbageCollector": { + "CollectionTime": 123, + "CollectionCount": 456 + }, + "java.lang:name=G1 Old Generation,type=GarbageCollector": { + "CollectionTime": 321, + "CollectionCount": 654 + } + }, + "timestamp": 1485180256, + "status": 200 + }, { + "request": { + "mbean": "java.lang:type=Memory", + "attribute": [ + "HeapMemoryUsage", + "NonHeapMemoryUsage", + "ObjectPendingFinalizationCount" + ], + "type": "read" + }, + "value": { + "ObjectPendingFinalizationCount": 0, + "HeapMemoryUsage": { + "init": 1073741824, + "committed": 1073741824, + "max": 1073741824, + "used": 415973072 + }, + "NonHeapMemoryUsage": { + "init": 2555904, + "committed": 45932544, + "max": -1, + "used": 45111832 + } + }, + "timestamp": 1485057232, + "status": 200 + }] +''' + +[[expects]] + measurement = "jvm_gc" + + [expects.tags] + name = "G1 Young Generation" + + [expects.fields] + CollectionTime = 123.0 + CollectionCount = 456.0 + +[[expects]] + measurement = "jvm_gc" + + [expects.tags] + name = "G1 Old Generation" + + [expects.fields] + CollectionTime = 321.0 + CollectionCount = 654.0 + +[[expects]] + measurement = "jvm_memory" + + [expects.tags] + + [expects.fields] + HeapMemoryUsage_init = 1073741824.0 + HeapMemoryUsage_committed = 1073741824.0 + HeapMemoryUsage_max = 1073741824.0 + HeapMemoryUsage_used = 415973072.0 + NonHeapMemoryUsage_init = 2555904.0 + NonHeapMemoryUsage_committed = 45932544.0 + NonHeapMemoryUsage_max = -1.0 + NonHeapMemoryUsage_used = 45111832.0 + ObjectPendingFinalizationCount = 0.0 diff --git a/plugins/inputs/jolokia2/testdata/kafka_log.toml b/plugins/inputs/jolokia2/testdata/kafka_log.toml new file mode 100644 index 0000000000000..1ff798d0f88e3 --- /dev/null +++ b/plugins/inputs/jolokia2/testdata/kafka_log.toml @@ -0,0 +1,73 @@ +config = ''' + [[inputs.jolokia2]] + + [[inputs.jolokia2.metric]] + name = "kafka_log" + mbean = "kafka.log:name=*,partition=*,topic=*,type=Log" + field_name = "$1" + taginclude = ["topic", "partition"] +''' + +response = ''' + [{ + "request": { + "mbean": "kafka.log:name=*,partition=*,topic=*,type=Log", + "type": "read" + }, + "value": { + "kafka.log:name=NumLogSegments,partition=0,topic=bar-logs,type=Log": { + "Value": 1 + }, + "kafka.log:name=NumLogSegments,partition=0,topic=foo-logs,type=Log": { + "Value": 1 + }, + "kafka.log:name=Size,partition=0,topic=foo-logs,type=Log": { + "Value": 123 + }, + "kafka.log:name=LogEndOffset,partition=0,topic=bar-logs,type=Log": { + "Value": 1 + }, + "kafka.log:name=LogStartOffset,partition=0,topic=foo-logs,type=Log": { + "Value": 0 + }, + "kafka.log:name=LogEndOffset,partition=0,topic=foo-logs,type=Log": { + "Value": 1 + }, + "kafka.log:name=Size,partition=0,topic=bar-logs,type=Log": { + "Value": 123 + }, + "kafka.log:name=LogStartOffset,partition=0,topic=bar-logs,type=Log": { + "Value": 0 + } + }, + "timestamp": 1485053430, + "status": 200 + }] +''' + +[[expects]] + measurement = "kafka_log" + + [expects.tags] + topic = "foo" + partition = "0" + + [expects.fields] + LogStartOffset = 0 + LogEndOffset = 1 + NumLogSegments = 1 + Size = 123 + +[[expects]] + measurement = "kafka_log" + + [expects.tags] + topic = "bar" + partition = "0" + + [expects.fields] + LogStartOffset = 0 + LogEndOffset = 1 + NumLogSegments = 1 + Size = 456 + diff --git a/plugins/inputs/jolokia2/testdata/kafka_topic.toml b/plugins/inputs/jolokia2/testdata/kafka_topic.toml new file mode 100644 index 0000000000000..67a7c3b4edfa7 --- /dev/null +++ b/plugins/inputs/jolokia2/testdata/kafka_topic.toml @@ -0,0 +1,408 @@ +config = ''' + [[inputs.jolokia2]] + default_field_delimiter = "_" + fielddrop = ["*RateUnit", "*EventType"] + + [[inputs.jolokia2.metric]] + name = "kafka_topic" + mbean = "kafka.server:name=*,type=BrokerTopicMetrics" + field_prefix = "$1_" + taginclude = ["topic"] + + [[inputs.jolokia2.metric]] + name = "kafka_topic" + mbean = "kafka.server:name=*,topic=*,type=BrokerTopicMetrics" + field_prefix = "$1_" + taginclude = ["topic"] +''' + +response = ''' + [{ + "request": { + "mbean": "kafka.server:name=*,type=BrokerTopicMetrics", + "type": "read" + }, + "value": { + "kafka.server:name=TotalProduceRequestsPerSec,type=BrokerTopicMetrics": { + "RateUnit": "SECONDS", + "OneMinuteRate": 0.0006674316447548538, + "EventType": "requests", + "Count": 39, + "FifteenMinuteRate": 0.025966524551559757, + "FiveMinuteRate": 0.030802904798411808, + "MeanRate": 0.0033841757148844274 + }, + "kafka.server:name=BytesOutPerSec,type=BrokerTopicMetrics": { + "RateUnit": "SECONDS", + "OneMinuteRate": 0.002706979419277259, + "EventType": "bytes", + "Count": 403, + "FifteenMinuteRate": 0.26108768790093473, + "FiveMinuteRate": 0.2695018052860003, + "MeanRate": 0.03496981457283606 + }, + "kafka.server:name=BytesInPerSec,type=BrokerTopicMetrics": { + "RateUnit": "SECONDS", + "OneMinuteRate": 0.020690380987400503, + "EventType": "bytes", + "Count": 1209, + "FifteenMinuteRate": 0.8049622610983529, + "FiveMinuteRate": 0.9548900487507653, + "MeanRate": 0.1049094427700357 + }, + "kafka.server:name=BytesRejectedPerSec,type=BrokerTopicMetrics": { + "RateUnit": "SECONDS", + "OneMinuteRate": 0, + "EventType": "bytes", + "Count": 0, + "FifteenMinuteRate": 0, + "FiveMinuteRate": 0, + "MeanRate": 0 + }, + "kafka.server:name=FailedProduceRequestsPerSec,type=BrokerTopicMetrics": { + "RateUnit": "SECONDS", + "OneMinuteRate": 0, + "EventType": "requests", + "Count": 0, + "FifteenMinuteRate": 0, + "FiveMinuteRate": 0, + "MeanRate": 0 + }, + "kafka.server:name=MessagesInPerSec,type=BrokerTopicMetrics": { + "RateUnit": "SECONDS", + "OneMinuteRate": 0.0006674316447548538, + "EventType": "messages", + "Count": 39, + "FifteenMinuteRate": 0.025966524551559757, + "FiveMinuteRate": 0.030802904798411808, + "MeanRate": 0.0033841755202843205 + }, + "kafka.server:name=FailedFetchRequestsPerSec,type=BrokerTopicMetrics": { + "RateUnit": "SECONDS", + "OneMinuteRate": 0, + "EventType": "requests", + "Count": 0, + "FifteenMinuteRate": 0, + "FiveMinuteRate": 0, + "MeanRate": 0 + }, + "kafka.server:name=TotalFetchRequestsPerSec,type=BrokerTopicMetrics": { + "RateUnit": "SECONDS", + "OneMinuteRate": 0.007322750245612493, + "EventType": "requests", + "Count": 1656, + "FifteenMinuteRate": 1.054825843846544, + "FiveMinuteRate": 1.0437567631823905, + "MeanRate": 0.14369730607826303 + } + }, + "timestamp": 1485055457, + "status": 200 + }, + { + "request": { + "mbean": "kafka.server:name=*,topic=*,type=BrokerTopicMetrics", + "type": "read" + }, + "value": { + "kafka.server:name=FailedProduceRequestsPerSec,topic=bar-logs,type=BrokerTopicMetrics": { + "RateUnit": "SECONDS", + "OneMinuteRate": 0, + "EventType": "requests", + "Count": 0, + "FifteenMinuteRate": 0, + "FiveMinuteRate": 0, + "MeanRate": 0 + }, + "kafka.server:name=FailedFetchRequestsPerSec,topic=foo-logs,type=BrokerTopicMetrics": { + "RateUnit": "SECONDS", + "OneMinuteRate": 0, + "EventType": "requests", + "Count": 0, + "FifteenMinuteRate": 0, + "FiveMinuteRate": 0, + "MeanRate": 0 + }, + "kafka.server:name=TotalFetchRequestsPerSec,topic=bar-logs,type=BrokerTopicMetrics": { + "RateUnit": "SECONDS", + "OneMinuteRate": 0.006300021856177634, + "EventType": "requests", + "Count": 1508, + "FifteenMinuteRate": 0.9590978506704709, + "FiveMinuteRate": 0.9450261313740151, + "MeanRate": 0.17039943403316887 + }, + "kafka.server:name=TotalFetchRequestsPerSec,topic=foo-logs,type=BrokerTopicMetrics": { + "RateUnit": "SECONDS", + "OneMinuteRate": 0.001079794036185152, + "EventType": "requests", + "Count": 148, + "FifteenMinuteRate": 0.09631145553236763, + "FiveMinuteRate": 0.10045679022593561, + "MeanRate": 0.07084240116251303 + }, + "kafka.server:name=BytesInPerSec,topic=bar-logs,type=BrokerTopicMetrics": { + "RateUnit": "SECONDS", + "OneMinuteRate": 6.4023295116672e-64, + "EventType": "bytes", + "Count": 31, + "FifteenMinuteRate": 0.00033624418187523094, + "FiveMinuteRate": 9.889650777135403e-13, + "MeanRate": 0.0035029057204553197 + }, + "kafka.server:name=MessagesInPerSec,topic=foo-logs,type=BrokerTopicMetrics": { + "RateUnit": "SECONDS", + "OneMinuteRate": 0.0006663589527842697, + "EventType": "messages", + "Count": 38, + "FifteenMinuteRate": 0.045682978793683694, + "FiveMinuteRate": 0.030981380824981756, + "MeanRate": 0.01818924889006622 + }, + "kafka.server:name=BytesRejectedPerSec,topic=foo-logs,type=BrokerTopicMetrics": { + "RateUnit": "SECONDS", + "OneMinuteRate": 0, + "EventType": "bytes", + "Count": 0, + "FifteenMinuteRate": 0, + "FiveMinuteRate": 0, + "MeanRate": 0 + }, + "kafka.server:name=BytesOutPerSec,topic=foo-logs,type=BrokerTopicMetrics": { + "RateUnit": "SECONDS", + "OneMinuteRate": 0.0026282745558926465, + "EventType": "bytes", + "Count": 341, + "FifteenMinuteRate": 0.22489274005718393, + "FiveMinuteRate": 0.2393701084292563, + "MeanRate": 0.16322461550553757 + }, + "kafka.server:name=MessagesInPerSec,topic=bar-logs,type=BrokerTopicMetrics": { + "RateUnit": "SECONDS", + "OneMinuteRate": 2.0652675844087803e-65, + "EventType": "messages", + "Count": 1, + "FifteenMinuteRate": 1.0846586512104194e-05, + "FiveMinuteRate": 3.1902099281081966e-14, + "MeanRate": 0.00011299695422737837 + }, + "kafka.server:name=TotalProduceRequestsPerSec,topic=foo-logs,type=BrokerTopicMetrics": { + "RateUnit": "SECONDS", + "OneMinuteRate": 0.0006663589527842697, + "EventType": "requests", + "Count": 38, + "FifteenMinuteRate": 0.045682978793683694, + "FiveMinuteRate": 0.030981380824981756, + "MeanRate": 0.01818926201269608 + }, + "kafka.server:name=BytesRejectedPerSec,topic=bar-logs,type=BrokerTopicMetrics": { + "RateUnit": "SECONDS", + "OneMinuteRate": 0, + "EventType": "bytes", + "Count": 0, + "FifteenMinuteRate": 0, + "FiveMinuteRate": 0, + "MeanRate": 0 + }, + "kafka.server:name=FailedFetchRequestsPerSec,topic=bar-logs,type=BrokerTopicMetrics": { + "RateUnit": "SECONDS", + "OneMinuteRate": 0, + "EventType": "requests", + "Count": 0, + "FifteenMinuteRate": 0, + "FiveMinuteRate": 0, + "MeanRate": 0 + }, + "kafka.server:name=FailedProduceRequestsPerSec,topic=foo-logs,type=BrokerTopicMetrics": { + "RateUnit": "SECONDS", + "OneMinuteRate": 0, + "EventType": "requests", + "Count": 0, + "FifteenMinuteRate": 0, + "FiveMinuteRate": 0, + "MeanRate": 0 + }, + "kafka.server:name=BytesOutPerSec,topic=bar-logs,type=BrokerTopicMetrics": { + "RateUnit": "SECONDS", + "OneMinuteRate": 7.870486338461278e-05, + "EventType": "bytes", + "Count": 62, + "FifteenMinuteRate": 0.036194947843750734, + "FiveMinuteRate": 0.030131696856743852, + "MeanRate": 0.0070058114404174514 + }, + "kafka.server:name=BytesInPerSec,topic=foo-logs,type=BrokerTopicMetrics": { + "RateUnit": "SECONDS", + "OneMinuteRate": 0.020657127536312377, + "EventType": "bytes", + "Count": 1178, + "FifteenMinuteRate": 1.416172342604195, + "FiveMinuteRate": 0.9604228055744342, + "MeanRate": 0.5638667600464213 + }, + "kafka.server:name=TotalProduceRequestsPerSec,topic=bar-logs,type=BrokerTopicMetrics": { + "RateUnit": "SECONDS", + "OneMinuteRate": 2.0652675844087803e-65, + "EventType": "requests", + "Count": 1, + "FifteenMinuteRate": 1.0846586512104194e-05, + "FiveMinuteRate": 3.1902099281081966e-14, + "MeanRate": 0.00011299696685545709 + } + }, + "timestamp": 1485055457, + "status": 200 + }] +''' + +[[expects]] + measurement = "kafka_topic" + + [expects.tags] + topic = "" + + [expects.fields] + TotalProduceRequestsPerSec_OneMinuteRate = 0.0006674316447548538 + TotalProduceRequestsPerSec_Count = 39.0 + TotalProduceRequestsPerSec_FifteenMinuteRate = 0.025966524551559757 + TotalProduceRequestsPerSec_FiveMinuteRate = 0.030802904798411808 + TotalProduceRequestsPerSec_MeanRate = 0.0033841757148844274 + + BytesInProduceRequestsPerSec_OneMinuteRate = 0.002706979419277259 + BytesInProduceRequestsPerSec_Count = 403.0 + BytesInProduceRequestsPerSec_FifteenMinuteRate = 0.26108768790093473 + BytesInProduceRequestsPerSec_FiveMinuteRate = 0.2695018052860003 + BytesInProduceRequestsPerSec_MeanRate = 0.03496981457283606 + + BytesOutProduceRequestsPerSec_OneMinuteRate = 0.002706979419277259 + BytesOutProduceRequestsPerSec_Count = 403.0 + BytesOutProduceRequestsPerSec_FifteenMinuteRate = 0.26108768790093473 + BytesOutProduceRequestsPerSec_FiveMinuteRate = 0.2695018052860003 + BytesOutProduceRequestsPerSec_MeanRate = 0.03496981457283606 + + BytesRejectedPerSec_OneMinuteRate = 0 + BytesRejectedPerSec_Count = 0 + BytesRejectedPerSec_FifteenMinuteRate = 0 + BytesRejectedPerSec_FiveMinuteRate = 0 + BytesRejectedPerSec_MeanRate = 0 + + FailedProduceRequestsPerSec_OneMinuteRate = 0 + FailedProduceRequestsPerSec_Count = 0 + FailedProduceRequestsPerSec_FifteenMinuteRate = 0 + FailedProduceRequestsPerSec_FiveMinuteRate = 0 + FailedProduceRequestsPerSec_MeanRate = 0 + + MessagesInPerSec_OneMinuteRate = 0.0006674316447548538 + MessagesInPerSec_Count = 39.0 + MessagesInPerSec_FifteenMinuteRate = 0.025966524551559757 + MessagesInPerSec_FiveMinuteRate = 0.030802904798411808 + MessagesInPerSec_MeanRate = 0.0033841755202843205 + + FailedFetchRequestsPerSec_OneMinuteRate = 0 + FailedFetchRequestsPerSec_Count = 0 + FailedFetchRequestsPerSec_FifteenMinuteRate = 0 + FailedFetchRequestsPerSec_FiveMinuteRate = 0 + FailedFetchRequestsPerSec_MeanRate = 0 + + TotalFetchRequestsPerSec_OneMinuteRate = 0.007322750245612493 + TotalFetchRequestsPerSec_Count = 1656.0 + TotalFetchRequestsPerSec_FifteenMinuteRate = 1.054825843846544 + TotalFetchRequestsPerSec_FiveMinuteRate = 1.0437567631823905 + TotalFetchRequestsPerSec_MeanRate = 0.14369730607826303 + +[[expects]] + measurement = "kafka_topic" + + [expects.tags] + topic = "foo-logs" + + [expects.fields] + TotalProduceRequestsPerSec_OneMinuteRate = 0 + TotalProduceRequestsPerSec_Count = 0 + TotalProduceRequestsPerSec_FifteenMinuteRate = 0 + TotalProduceRequestsPerSec_FiveMinuteRate = 0 + TotalProduceRequestsPerSec_MeanRate = 0 + BytesInProduceRequestsPerSec_OneMinuteRate = 0 + BytesInProduceRequestsPerSec_Count = 0 + BytesInProduceRequestsPerSec_FifteenMinuteRate = 0 + BytesInProduceRequestsPerSec_FiveMinuteRate = 0 + BytesInProduceRequestsPerSec_MeanRate = 0 + BytesOutProduceRequestsPerSec_OneMinuteRate = 0 + BytesOutProduceRequestsPerSec_Count = 0 + BytesOutProduceRequestsPerSec_FifteenMinuteRate = 0 + BytesOutProduceRequestsPerSec_FiveMinuteRate = 0 + BytesOutProduceRequestsPerSec_MeanRate = 0 + BytesRejectedPerSec_OneMinuteRate = 0 + BytesRejectedPerSec_Count = 0 + BytesRejectedPerSec_FifteenMinuteRate = 0 + BytesRejectedPerSec_FiveMinuteRate = 0 + BytesRejectedPerSec_MeanRate = 0 + FailedProduceRequestsPerSec_OneMinuteRate = 0 + FailedProduceRequestsPerSec_Count = 0 + FailedProduceRequestsPerSec_FifteenMinuteRate = 0 + FailedProduceRequestsPerSec_FiveMinuteRate = 0 + FailedProduceRequestsPerSec_MeanRate = 0 + MessagesInPerSec_OneMinuteRate = 0 + MessagesInPerSec_Count = 0 + MessagesInPerSec_FifteenMinuteRate = 0 + MessagesInPerSec_FiveMinuteRate = 0 + MessagesInPerSec_MeanRate = 0 + FailedFetchRequestsPerSec_OneMinuteRate = 0 + FailedFetchRequestsPerSec_Count = 0 + FailedFetchRequestsPerSec_FifteenMinuteRate = 0 + FailedFetchRequestsPerSec_FiveMinuteRate = 0 + FailedFetchRequestsPerSec_MeanRate = 0 + TotalFetchRequestsPerSec_OneMinuteRate = 0 + TotalFetchRequestsPerSec_Count = 0 + TotalFetchRequestsPerSec_FifteenMinuteRate = 0 + TotalFetchRequestsPerSec_FiveMinuteRate = 0 + TotalFetchRequestsPerSec_MeanRate = 0 + +[[expects]] + measurement = "kafka_topic" + + [expects.tags] + topic = "bar-logs" + + [expects.fields] + TotalProduceRequestsPerSec_OneMinuteRate = 0 + TotalProduceRequestsPerSec_Count = 0 + TotalProduceRequestsPerSec_FifteenMinuteRate = 0 + TotalProduceRequestsPerSec_FiveMinuteRate = 0 + TotalProduceRequestsPerSec_MeanRate = 0 + BytesInProduceRequestsPerSec_OneMinuteRate = 0 + BytesInProduceRequestsPerSec_Count = 0 + BytesInProduceRequestsPerSec_FifteenMinuteRate = 0 + BytesInProduceRequestsPerSec_FiveMinuteRate = 0 + BytesInProduceRequestsPerSec_MeanRate = 0 + BytesOutProduceRequestsPerSec_OneMinuteRate = 0 + BytesOutProduceRequestsPerSec_Count = 0 + BytesOutProduceRequestsPerSec_FifteenMinuteRate = 0 + BytesOutProduceRequestsPerSec_FiveMinuteRate = 0 + BytesOutProduceRequestsPerSec_MeanRate = 0 + BytesRejectedPerSec_OneMinuteRate = 0 + BytesRejectedPerSec_Count = 0 + BytesRejectedPerSec_FifteenMinuteRate = 0 + BytesRejectedPerSec_FiveMinuteRate = 0 + BytesRejectedPerSec_MeanRate = 0 + FailedProduceRequestsPerSec_OneMinuteRate = 0 + FailedProduceRequestsPerSec_Count = 0 + FailedProduceRequestsPerSec_FifteenMinuteRate = 0 + FailedProduceRequestsPerSec_FiveMinuteRate = 0 + FailedProduceRequestsPerSec_MeanRate = 0 + MessagesInPerSec_OneMinuteRate = 0 + MessagesInPerSec_Count = 0 + MessagesInPerSec_FifteenMinuteRate = 0 + MessagesInPerSec_FiveMinuteRate = 0 + MessagesInPerSec_MeanRate = 0 + FailedFetchRequestsPerSec_OneMinuteRate = 0 + FailedFetchRequestsPerSec_Count = 0 + FailedFetchRequestsPerSec_FifteenMinuteRate = 0 + FailedFetchRequestsPerSec_FiveMinuteRate = 0 + FailedFetchRequestsPerSec_MeanRate = 0 + TotalFetchRequestsPerSec_OneMinuteRate = 0 + TotalFetchRequestsPerSec_Count = 0 + TotalFetchRequestsPerSec_FifteenMinuteRate = 0 + TotalFetchRequestsPerSec_FiveMinuteRate = 0 + TotalFetchRequestsPerSec_MeanRate = 0 + diff --git a/plugins/inputs/jolokia2/testdata/object_values.toml b/plugins/inputs/jolokia2/testdata/object_values.toml new file mode 100644 index 0000000000000..0d3190f3d422c --- /dev/null +++ b/plugins/inputs/jolokia2/testdata/object_values.toml @@ -0,0 +1,122 @@ +config = ''' + [[inputs.jolokia2]] + default_field_delimiter = "_" + default_tag_prefix = "" + + [[inputs.jolokia2.metric]] + name = "object_value_without_attribute" + mbean = "object_value_without_attribute:foo=bar" + + [[inputs.jolokia2.metric]] + name = "object_value_with_attribute" + mbean = "object_value_with_attribute:foo=bar" + paths = ["biz"] + + [[inputs.jolokia2.metric]] + name = "object_value_with_attribute_and_path" + mbean = "object_value_with_attribute_and_path:foo=bar" + paths = ["biz/baz"] + + [[inputs.jolokia2.metric]] + name = "object_value_with_pattern" + mbean = "object_value_with_pattern:test=*" +''' + +response = ''' + [{ + "request": { + "mbean": "object_value_without_attribute:foo=bar", + "type": "read" + }, + "value": { + "biz": 123, + "baz": 456 + }, + "status": 200 + }, { + "request": { + "mbean": "object_value_with_attribute:foo=bar", + "attribute": "biz", + "type": "read" + }, + "value": { + "fiz": 123, + "faz": 456 + }, + "status": 200 + }, { + "request": { + "mbean": "object_value_with_attribute_and_path:foo=bar", + "attribute": "biz", + "path": "baz", + "type": "read" + }, + "value": { + "bing": 123, + "bang": 456 + }, + "status": 200 + }, { + "request": { + "mbean": "object_value_with_pattern:test=*", + "type": "read" + }, + "value": { + "object_value_with_pattern:test=foo": { + "fiz": 123 + }, + "object_value_with_pattern:test=bar": { + "biz": 456 + } + }, + "status": 200 +}] +''' + +[[expects]] + measurement = "object_value_without_attribute" + + [expects.tags] + foo = "bar" + + [expects.fields] + biz = 123.0 + baz = 456.0 + +[[expects]] + measurement = "object_value_with_attribute" + + [expects.tags] + foo = "bar" + + [expects.fields] + biz_fiz = 123.0 + biz_faz = 456.0 + +[[expects]] + measurement = "object_value_with_attribute_and_path" + + [expects.tags] + foo = "bar" + + [expects.fields] + biz_baz_bing = 123.0 + biz_baz_bang = 456.0 + +[[expects]] + measurement = "object_value_with_pattern" + + [expects.tags] + test = "foo" + + [expects.fields] + fiz = 123.0 + +[[expects]] + measurement = "object_value_with_pattern" + + [expects.tags] + test = "bar" + + [expects.fields] + biz = 456.0 diff --git a/plugins/inputs/jolokia2/testdata/scalar_values.toml b/plugins/inputs/jolokia2/testdata/scalar_values.toml new file mode 100644 index 0000000000000..c34b623cc740b --- /dev/null +++ b/plugins/inputs/jolokia2/testdata/scalar_values.toml @@ -0,0 +1,106 @@ +config = ''' + [[inputs.jolokia2]] + default_field_delimiter = "_" + default_tag_prefix = "" + + [[inputs.jolokia2.metric]] + name = "scalar_value_without_attribute" + mbean = "scalar_value_without_attribute:foo=bar" + + [[inputs.jolokia2.metric]] + name = "scalar_value_with_attribute" + mbean = "scalar_value_with_attribute:foo=bar" + paths = ["biz"] + + [[inputs.jolokia2.metric]] + name = "scalar_value_with_attribute_and_path" + mbean = "scalar_value_with_attribute_and_path:foo=bar" + paths = ["biz/baz"] + + [[inputs.jolokia2.metric]] + name = "scalar_value_with_pattern" + mbean = "scalar_value_with_pattern:test=*" +''' + +response = ''' + [{ + "request": { + "mbean": "scalar_value_without_attribute:foo=bar", + "type": "read" + }, + "value": 123, + "status": 200 + }, { + "request": { + "mbean": "scalar_value_with_attribute:foo=bar", + "attribute": "biz", + "type": "read" + }, + "value": 456, + "status": 200 + }, { + "request": { + "mbean": "scalar_value_with_attribute_and_path:foo=bar", + "attribute": "biz", + "path": "baz", + "type": "read" + }, + "value": 789, + "status": 200 + }, { + "request": { + "mbean": "scalar_value_with_pattern:test=*", + "type": "read" + }, + "value": { + "scalar_value_with_pattern:test=foo": 123, + "scalar_value_with_pattern:test=bar": 456 + }, + "status": 200 + }] +''' + +[[expects]] + measurement = "scalar_value_without_attribute" + + [expects.tags] + foo = "bar" + + [expects.fields] + value = 123.0 + +[[expects]] + measurement = "scalar_value_with_attribute" + + [expects.tags] + foo = "bar" + + [expects.fields] + biz = 456.0 + +[[expects]] + measurement = "scalar_value_with_attribute_and_path" + + [expects.tags] + foo = "bar" + + [expects.fields] + biz_baz = 789.0 + +[[expects]] + measurement = "scalar_value_with_pattern" + + [expects.tags] + test = "foo" + + [expects.fields] + value = 123.0 + +[[expects]] + measurement = "scalar_value_with_pattern" + + [expects.tags] + test = "bar" + + [expects.fields] + value = 456.0 diff --git a/plugins/inputs/jolokia2/testdata/tag_and_field_customizations.toml b/plugins/inputs/jolokia2/testdata/tag_and_field_customizations.toml new file mode 100644 index 0000000000000..034071738e01a --- /dev/null +++ b/plugins/inputs/jolokia2/testdata/tag_and_field_customizations.toml @@ -0,0 +1,156 @@ +config = ''' + [[inputs.jolokia2]] + default_field_prefix = "foo" + default_field_delimiter = "xxx" + + default_tag_prefix = "bar" + default_tag_delimiter = "yyy" + + [[inputs.jolokia2.metric]] + name = "tag_and_field_defaults1" + mbean = "tag_and_field_defaults1:biz=baz,fiz=faz" + + [[inputs.jolokia2.metric]] + name = "tag_and_field_defaults2" + mbean = "tag_and_field_defaults2:biz=baz,fiz=faz" + paths = ["test"] + + [[inputs.jolokia2.metric]] + name = "tag_and_field_customizations1" + mbean = "tag_and_field_customizations1:biz=baz,fiz=faz" + + tag_delimiter = "YYY" + tag_prefix = "BAR" + + field_delimiter = "XXX" + field_prefix = "FOO" + + [[inputs.jolokia2.metric]] + name = "tag_and_field_customizations2" + mbean = "tag_and_field_customizations2:biz=baz,fiz=faz" + paths = ["test"] + + tag_delimiter = "YYY" + tag_prefix = "BAR" + + field_delimiter = "XXX" + field_prefix = "FOO" + + [[inputs.jolokia2.metric]] + name = "clear_prefixes" + mbean = "clear_prefixes:biz=baz,fiz=faz" + + tag_prefix = "" + field_prefix = "" +''' + +response = ''' + [{ + "request": { + "mbean": "tag_and_field_defaults1:biz=baz,fiz=faz", + "type": "read" + }, + "value": { + "hello": 123, + "world": 456 + }, + "status": 200 + }, { + "request": { + "mbean": "tag_and_field_defaults2:biz=baz,fiz=faz", + "attribute": "test", + "type": "read" + }, + "value": { + "hello": 123, + "world": 456 + }, + "status": 200 + }, { + "request": { + "mbean": "tag_and_field_customizations1:biz=baz,fiz=faz", + "type": "read" + }, + "value": { + "hello": 123, + "world": 456 + }, + "status": 200 + }, { + "request": { + "mbean": "tag_and_field_customizations2:biz=baz,fiz=faz", + "attribute": "test", + "type": "read" + }, + "value": { + "hello": 123, + "world": 456 + }, + "status": 200 + }, { + "request": { + "mbean": "clear_prefixes:biz=baz,fiz=faz", + "type": "read" + }, + "value": { + "hello": 123, + "world": 456 + }, + "status": 200 + }] +''' + +[[expects]] + measurement = "tag_and_field_defaults1" + + [expects.tags] + baryyybiz = "baz" + baryyyfiz = "faz" + + [expects.fields] + fooxxxhello = 123.0 + fooxxxworld = 456.0 + +[[expects]] + measurement = "tag_and_field_defaults2" + + [expects.tags] + baryyybiz = "baz" + baryyyfiz = "faz" + + [expects.fields] + fooxxxtestxxxhello = 123.0 + fooxxxtestxxxworld = 456.0 + +[[expects]] + measurement = "tag_and_field_customizations1" + + [expects.tags] + BARYYYbiz = "baz" + BARYYYfiz = "faz" + + [expects.fields] + FOOXXXhello = 123.0 + FOOXXXworld = 456.0 + +[[expects]] + measurement = "tag_and_field_customizations2" + + [expects.tags] + BARYYYbiz = "baz" + BARYYYfiz = "faz" + + [expects.fields] + FOOXXXtestXXXhello = 123.0 + FOOXXXtestXXXworld = 456.0 + +[[expects]] + measurement = "clear_prefixes" + + [expects.tags] + biz = "baz" + fiz = "faz" + + [expects.fields] + hello = 123.0 + world = 456.0