diff --git a/NOTICE.txt b/NOTICE.txt index c5d1a7ea587..cf40fb42552 100644 --- a/NOTICE.txt +++ b/NOTICE.txt @@ -639,7 +639,7 @@ Contents of "LICENSE": -------------------------------------------------------------------- Dependency: github.com/elastic/beats/v7 Version: v7.0.0 -Revision: 633285e75a03 +Revision: 0fda3061815d License type (autodetected): Apache-2.0 -------------------------------------------------------------------- @@ -994,8 +994,7 @@ Contents of "NOTICE.txt": -------------------------------------------------------------------- Dependency: github.com/elastic/gosigar -Version: v0.10.6 -Revision: f115143bb233 +Version: v0.12.0 License type (autodetected): Apache-2.0 Contents of "NOTICE": @@ -1011,7 +1010,7 @@ Contents of "NOTICE": -------------------------------------------------------------------- Dependency: github.com/fatih/color -Version: v1.9.0 +Version: v1.10.0 License type (autodetected): MIT Contents of "LICENSE.md": @@ -3573,8 +3572,7 @@ License type (autodetected): Apache-2.0 -------------------------------------------------------------------- Dependency: go.elastic.co/apm -Version: v1.8.1 -Revision: 2aef45b9cf4b +Version: v1.9.0 License type (autodetected): Apache-2.0 Contents of "NOTICE": @@ -3905,7 +3903,7 @@ Contents of "LICENSE": -------------------------------------------------------------------- Dependency: golang.org/x/net -Revision: 942e2f445f3c +Revision: 69a78807bb2b License type (autodetected): BSD-3-Clause Contents of "LICENSE": @@ -4007,7 +4005,7 @@ Contents of "LICENSE": -------------------------------------------------------------------- Dependency: golang.org/x/sys -Revision: 201ba4db2418 +Revision: 5cba982894dd License type (autodetected): BSD-3-Clause Contents of "LICENSE": diff --git a/_meta/beat.yml b/_meta/beat.yml index 9619f41f98c..d1b8a337f6a 100644 --- a/_meta/beat.yml +++ b/_meta/beat.yml @@ -27,6 +27,10 @@ apm-server: # Maximum number of new connections to accept simultaneously (0 means unlimited). #max_connections: 0 + # Custom HTTP headers to add to all HTTP responses, e.g. for security policy compliance. + #response_headers: + # X-My-Header: Contents of the header + # If true (default), APM Server captures the IP of the instrumented service # or the IP and User Agent of the real user (RUM requests). #capture_personal_data: true diff --git a/apm-server.docker.yml b/apm-server.docker.yml index ee079ccc6f1..4da27ac1b09 100644 --- a/apm-server.docker.yml +++ b/apm-server.docker.yml @@ -27,6 +27,10 @@ apm-server: # Maximum number of new connections to accept simultaneously (0 means unlimited). #max_connections: 0 + # Custom HTTP headers to add to all HTTP responses, e.g. for security policy compliance. + #response_headers: + # X-My-Header: Contents of the header + # If true (default), APM Server captures the IP of the instrumented service # or the IP and User Agent of the real user (RUM requests). #capture_personal_data: true diff --git a/apm-server.yml b/apm-server.yml index f7dc6a6dee7..2b099f5fa08 100644 --- a/apm-server.yml +++ b/apm-server.yml @@ -27,6 +27,10 @@ apm-server: # Maximum number of new connections to accept simultaneously (0 means unlimited). #max_connections: 0 + # Custom HTTP headers to add to all HTTP responses, e.g. for security policy compliance. + #response_headers: + # X-My-Header: Contents of the header + # If true (default), APM Server captures the IP of the instrumented service # or the IP and User Agent of the real user (RUM requests). #capture_personal_data: true diff --git a/apmpackage/README.md b/apmpackage/README.md new file mode 100644 index 00000000000..aee889aac93 --- /dev/null +++ b/apmpackage/README.md @@ -0,0 +1,125 @@ +## Developer documentation + +### ~Requirements + +- Checkout `elastic/package-registry`, `elastic/package-storage` and `elastic/beats` +- Have `elastic/package-spec` at hand + +### Guide + +#### Update / fix a package + +1. Actual work + - Make changes in `apmpackage/apm/`, `apmpackage/docs/README.template.md` and/or code as needed + - Run `make update`. That will update fields, pipeline and doc files based on apm-server fields and pipeline defs. + +2. Run the registry + - Checkout a fresh master from the registry and run `mage build` + - Copy `apmpackage/apm` in apm-server to `build/package-storage/packages` in the registry + - `go run .` + +3. Run the stack + - Update Kibana settings with `xpack.fleet.registryUrl: http://localhost:8080` + - Start Kibana and Elasticsearch with X-Pack enabled. One way to do it is with a local Kibana: + - `yarn es snapshot --license trial --ssl -E xpack.security.authc.api_key.enabled=true` + - `yarn start --ssl` + +4. Test + - Go to the Fleet UI, install the integration and test what you need. You generally will want to have a look at the + installed assets (ie. templates and pipelines), and the generated `apm` input in the policy. + - If you need to change the package, you *must* remove the installed integration first. You can use the UI + or the API, eg: `curl -X DELETE -k -u elastic:changeme https://localhost:5601/abc/api/fleet/epm/packages/apm-0.1.0 -H 'kbn-xsrf: xyz'` + See [API docs](https://github.com/elastic/kibana/tree/master/x-pack/plugins/fleet/dev_docs/api) for details. + You normally don't need to restart the registry (an exception to this is eg. if you change a `hbs` template file). + +5. Upload to the snapshot registry + - When everything works and `apmpackage/apm/` changes have been merged to `master`, copy the new package to + `package-storage/packages` in the `package-storage` repo, `snapshot` branch. + Do *NOT* override any existing packages. Instead, bump the qualifier version. Eg: rename `apm/0.1.0-dev.1` to `apm/0.1.0-dev.2` + - Bump the qualifier in the contents too: `find . -type f -print0 | xargs -0 sed -i "" "s/0.1.0-dev.1/0.1.0-dev.2/g"` + This step can be done in a separate commit to facilitate review. + + +#### Create a new package version + +Follow steps described above, except: + +1. New local version + - Copy `apmpackage/apm/` to `apmpackage/apm/`. + - Add a new version mapping in the `versionMapping` variable at the top of `apmpackage/cmd/gen-package/main.go`. + Most likely this won't be needed in the future. + - Then do any changes in the new folder. The rest of the steps are the same. + +2. First dev version + - When copying to the `package-storage`, create the first version qualifier instead of bumping the last one. + Eg: `apm/0.2.0` -> `apm/0.2.0-dev.1` + + +#### Run the Elastic Agent + +If you do code changes or a whole new version, you need to run the Elastic Agent locally. +Most of the work here is done in `beats/x-pack/elastic-agent` + +0. Optional: Update the spec + + The spec informs whether the Elastic Agent should or should not start apm-server based on the policy file, + and what settings to pass via GRPC call. + - Edit `spec/apm-server.yml` + - `mage update` + +1. Build / Package + + *First time* + - `DEV=true PLATFORMS=darwin mage package` (replace platform as needed) + - Untar `build/distributions` contents + + *Every time after* + - `DEV=true mage build` + - Copy `build/elastic-agent` to `build/distributions/elastic-agent--/data/elastic-agent-/` + +2. Override policy / apm-server + - Use the right `elastic-agent.yml` policy + + It might be one you just generated with the UI, or one you have at hand with an apm input. + Copy to `build/distributions/elastic-agent--/elastic-agent.yml` + + - Override apm-server in `install` and `downloads` folders. Approximately: + ``` + # compile apm-server + cd ~//apm-server + make && make update + + # tar and compress + cp build/fields/fields.yml . + tar cvf apm-server--.tar apm-server LICENSE.txt NOTICE.txt README.md apm-server.yml ingest fields.yml + gzip apm-server--.tar + sha512sum apm-server--.tar.gz | tee apm-server--.tar.gz.sha512 + + # delete old stuff + cd ~//beats/x-pack/elastic-agent/build/distributions/elastic-agent--/data/elastic-agent-/downloads + rm apm* + rm -rf ../install/apm* + + # copy new files + mv /apm-server--.tar* . + mkdir -p ../install/apm-server-- + tar zxvf apm-server-- -C ../install/apm-server-- + ``` +3. Run the Elastic Agent + - `./build/distributions//elastic-agent -e` + - Check apm-server logs at `build/distributions//data//logs/default` + + (The last default in the path comes from the namespace in the policy) + +#### Promote a package + +Generally it should be done between FF and release. +1. Remove the qualifier version from the package +2. Push to the corresponding production branch(es) + + +### Caveats + +Fleet is under active development and this guide might become obsolete quickly. + +Take everything with a grain of salt. diff --git a/apmpackage/apm/0.1.0/data_stream/logs/elasticsearch/ingest_pipeline/apm_ingest_timestamp.json b/apmpackage/apm/0.1.0/data_stream/app_metrics/elasticsearch/ingest_pipeline/apm_ingest_timestamp.json similarity index 100% rename from apmpackage/apm/0.1.0/data_stream/logs/elasticsearch/ingest_pipeline/apm_ingest_timestamp.json rename to apmpackage/apm/0.1.0/data_stream/app_metrics/elasticsearch/ingest_pipeline/apm_ingest_timestamp.json diff --git a/apmpackage/apm/0.1.0/data_stream/logs/elasticsearch/ingest_pipeline/apm_remove_span_metadata.json b/apmpackage/apm/0.1.0/data_stream/app_metrics/elasticsearch/ingest_pipeline/apm_remove_span_metadata.json similarity index 100% rename from apmpackage/apm/0.1.0/data_stream/logs/elasticsearch/ingest_pipeline/apm_remove_span_metadata.json rename to apmpackage/apm/0.1.0/data_stream/app_metrics/elasticsearch/ingest_pipeline/apm_remove_span_metadata.json diff --git a/apmpackage/apm/0.1.0/data_stream/logs/elasticsearch/ingest_pipeline/apm_user_agent.json b/apmpackage/apm/0.1.0/data_stream/app_metrics/elasticsearch/ingest_pipeline/apm_user_agent.json similarity index 100% rename from apmpackage/apm/0.1.0/data_stream/logs/elasticsearch/ingest_pipeline/apm_user_agent.json rename to apmpackage/apm/0.1.0/data_stream/app_metrics/elasticsearch/ingest_pipeline/apm_user_agent.json diff --git a/apmpackage/apm/0.1.0/data_stream/logs/elasticsearch/ingest_pipeline/apm_user_geo.json b/apmpackage/apm/0.1.0/data_stream/app_metrics/elasticsearch/ingest_pipeline/apm_user_geo.json similarity index 100% rename from apmpackage/apm/0.1.0/data_stream/logs/elasticsearch/ingest_pipeline/apm_user_geo.json rename to apmpackage/apm/0.1.0/data_stream/app_metrics/elasticsearch/ingest_pipeline/apm_user_geo.json diff --git a/apmpackage/apm/0.1.0/data_stream/metrics/elasticsearch/ingest_pipeline/default.json b/apmpackage/apm/0.1.0/data_stream/app_metrics/elasticsearch/ingest_pipeline/default.json similarity index 100% rename from apmpackage/apm/0.1.0/data_stream/metrics/elasticsearch/ingest_pipeline/default.json rename to apmpackage/apm/0.1.0/data_stream/app_metrics/elasticsearch/ingest_pipeline/default.json diff --git a/apmpackage/apm/0.1.0/data_stream/logs/fields/base-fields.yml b/apmpackage/apm/0.1.0/data_stream/app_metrics/fields/base-fields.yml similarity index 100% rename from apmpackage/apm/0.1.0/data_stream/logs/fields/base-fields.yml rename to apmpackage/apm/0.1.0/data_stream/app_metrics/fields/base-fields.yml diff --git a/apmpackage/apm/0.1.0/data_stream/metrics/fields/ecs.yml b/apmpackage/apm/0.1.0/data_stream/app_metrics/fields/ecs.yml similarity index 100% rename from apmpackage/apm/0.1.0/data_stream/metrics/fields/ecs.yml rename to apmpackage/apm/0.1.0/data_stream/app_metrics/fields/ecs.yml diff --git a/apmpackage/apm/0.1.0/data_stream/metrics/fields/fields.yml b/apmpackage/apm/0.1.0/data_stream/app_metrics/fields/fields.yml similarity index 100% rename from apmpackage/apm/0.1.0/data_stream/metrics/fields/fields.yml rename to apmpackage/apm/0.1.0/data_stream/app_metrics/fields/fields.yml diff --git a/apmpackage/apm/0.1.0/data_stream/metrics/manifest.yml b/apmpackage/apm/0.1.0/data_stream/app_metrics/manifest.yml similarity index 100% rename from apmpackage/apm/0.1.0/data_stream/metrics/manifest.yml rename to apmpackage/apm/0.1.0/data_stream/app_metrics/manifest.yml diff --git a/apmpackage/apm/0.1.0/data_stream/metrics/elasticsearch/ingest_pipeline/apm_ingest_timestamp.json b/apmpackage/apm/0.1.0/data_stream/error_logs/elasticsearch/ingest_pipeline/apm_ingest_timestamp.json similarity index 100% rename from apmpackage/apm/0.1.0/data_stream/metrics/elasticsearch/ingest_pipeline/apm_ingest_timestamp.json rename to apmpackage/apm/0.1.0/data_stream/error_logs/elasticsearch/ingest_pipeline/apm_ingest_timestamp.json diff --git a/apmpackage/apm/0.1.0/data_stream/metrics/elasticsearch/ingest_pipeline/apm_remove_span_metadata.json b/apmpackage/apm/0.1.0/data_stream/error_logs/elasticsearch/ingest_pipeline/apm_remove_span_metadata.json similarity index 100% rename from apmpackage/apm/0.1.0/data_stream/metrics/elasticsearch/ingest_pipeline/apm_remove_span_metadata.json rename to apmpackage/apm/0.1.0/data_stream/error_logs/elasticsearch/ingest_pipeline/apm_remove_span_metadata.json diff --git a/apmpackage/apm/0.1.0/data_stream/metrics/elasticsearch/ingest_pipeline/apm_user_agent.json b/apmpackage/apm/0.1.0/data_stream/error_logs/elasticsearch/ingest_pipeline/apm_user_agent.json similarity index 100% rename from apmpackage/apm/0.1.0/data_stream/metrics/elasticsearch/ingest_pipeline/apm_user_agent.json rename to apmpackage/apm/0.1.0/data_stream/error_logs/elasticsearch/ingest_pipeline/apm_user_agent.json diff --git a/apmpackage/apm/0.1.0/data_stream/metrics/elasticsearch/ingest_pipeline/apm_user_geo.json b/apmpackage/apm/0.1.0/data_stream/error_logs/elasticsearch/ingest_pipeline/apm_user_geo.json similarity index 100% rename from apmpackage/apm/0.1.0/data_stream/metrics/elasticsearch/ingest_pipeline/apm_user_geo.json rename to apmpackage/apm/0.1.0/data_stream/error_logs/elasticsearch/ingest_pipeline/apm_user_geo.json diff --git a/apmpackage/apm/0.1.0/data_stream/logs/elasticsearch/ingest_pipeline/default.json b/apmpackage/apm/0.1.0/data_stream/error_logs/elasticsearch/ingest_pipeline/default.json similarity index 100% rename from apmpackage/apm/0.1.0/data_stream/logs/elasticsearch/ingest_pipeline/default.json rename to apmpackage/apm/0.1.0/data_stream/error_logs/elasticsearch/ingest_pipeline/default.json diff --git a/apmpackage/apm/0.1.0/data_stream/metrics/fields/base-fields.yml b/apmpackage/apm/0.1.0/data_stream/error_logs/fields/base-fields.yml similarity index 100% rename from apmpackage/apm/0.1.0/data_stream/metrics/fields/base-fields.yml rename to apmpackage/apm/0.1.0/data_stream/error_logs/fields/base-fields.yml diff --git a/apmpackage/apm/0.1.0/data_stream/logs/fields/ecs.yml b/apmpackage/apm/0.1.0/data_stream/error_logs/fields/ecs.yml similarity index 100% rename from apmpackage/apm/0.1.0/data_stream/logs/fields/ecs.yml rename to apmpackage/apm/0.1.0/data_stream/error_logs/fields/ecs.yml diff --git a/apmpackage/apm/0.1.0/data_stream/logs/fields/fields.yml b/apmpackage/apm/0.1.0/data_stream/error_logs/fields/fields.yml similarity index 100% rename from apmpackage/apm/0.1.0/data_stream/logs/fields/fields.yml rename to apmpackage/apm/0.1.0/data_stream/error_logs/fields/fields.yml diff --git a/apmpackage/apm/0.1.0/data_stream/logs/manifest.yml b/apmpackage/apm/0.1.0/data_stream/error_logs/manifest.yml similarity index 100% rename from apmpackage/apm/0.1.0/data_stream/logs/manifest.yml rename to apmpackage/apm/0.1.0/data_stream/error_logs/manifest.yml diff --git a/apmpackage/apm/0.1.0/data_stream/profiles/elasticsearch/ingest_pipeline/apm_ingest_timestamp.json b/apmpackage/apm/0.1.0/data_stream/profile_metrics/elasticsearch/ingest_pipeline/apm_ingest_timestamp.json similarity index 100% rename from apmpackage/apm/0.1.0/data_stream/profiles/elasticsearch/ingest_pipeline/apm_ingest_timestamp.json rename to apmpackage/apm/0.1.0/data_stream/profile_metrics/elasticsearch/ingest_pipeline/apm_ingest_timestamp.json diff --git a/apmpackage/apm/0.1.0/data_stream/profiles/elasticsearch/ingest_pipeline/apm_remove_span_metadata.json b/apmpackage/apm/0.1.0/data_stream/profile_metrics/elasticsearch/ingest_pipeline/apm_remove_span_metadata.json similarity index 100% rename from apmpackage/apm/0.1.0/data_stream/profiles/elasticsearch/ingest_pipeline/apm_remove_span_metadata.json rename to apmpackage/apm/0.1.0/data_stream/profile_metrics/elasticsearch/ingest_pipeline/apm_remove_span_metadata.json diff --git a/apmpackage/apm/0.1.0/data_stream/profiles/elasticsearch/ingest_pipeline/apm_user_agent.json b/apmpackage/apm/0.1.0/data_stream/profile_metrics/elasticsearch/ingest_pipeline/apm_user_agent.json similarity index 100% rename from apmpackage/apm/0.1.0/data_stream/profiles/elasticsearch/ingest_pipeline/apm_user_agent.json rename to apmpackage/apm/0.1.0/data_stream/profile_metrics/elasticsearch/ingest_pipeline/apm_user_agent.json diff --git a/apmpackage/apm/0.1.0/data_stream/profiles/elasticsearch/ingest_pipeline/apm_user_geo.json b/apmpackage/apm/0.1.0/data_stream/profile_metrics/elasticsearch/ingest_pipeline/apm_user_geo.json similarity index 100% rename from apmpackage/apm/0.1.0/data_stream/profiles/elasticsearch/ingest_pipeline/apm_user_geo.json rename to apmpackage/apm/0.1.0/data_stream/profile_metrics/elasticsearch/ingest_pipeline/apm_user_geo.json diff --git a/apmpackage/apm/0.1.0/data_stream/profile_metrics/elasticsearch/ingest_pipeline/default.json b/apmpackage/apm/0.1.0/data_stream/profile_metrics/elasticsearch/ingest_pipeline/default.json new file mode 100644 index 00000000000..b0b9f729f40 --- /dev/null +++ b/apmpackage/apm/0.1.0/data_stream/profile_metrics/elasticsearch/ingest_pipeline/default.json @@ -0,0 +1,25 @@ +{ + "description": "Default enrichment for APM events", + "processors": [ + { + "pipeline": { + "name": "metrics-apm.profiling-0.1.0-apm_user_agent" + } + }, + { + "pipeline": { + "name": "metrics-apm.profiling-0.1.0-apm_user_geo" + } + }, + { + "pipeline": { + "name": "metrics-apm.profiling-0.1.0-apm_ingest_timestamp" + } + }, + { + "pipeline": { + "name": "metrics-apm.profiling-0.1.0-apm_remove_span_metadata" + } + } + ] +} \ No newline at end of file diff --git a/apmpackage/apm/0.1.0/data_stream/profiles/fields/base-fields.yml b/apmpackage/apm/0.1.0/data_stream/profile_metrics/fields/base-fields.yml similarity index 100% rename from apmpackage/apm/0.1.0/data_stream/profiles/fields/base-fields.yml rename to apmpackage/apm/0.1.0/data_stream/profile_metrics/fields/base-fields.yml diff --git a/apmpackage/apm/0.1.0/data_stream/profiles/fields/ecs.yml b/apmpackage/apm/0.1.0/data_stream/profile_metrics/fields/ecs.yml similarity index 100% rename from apmpackage/apm/0.1.0/data_stream/profiles/fields/ecs.yml rename to apmpackage/apm/0.1.0/data_stream/profile_metrics/fields/ecs.yml diff --git a/apmpackage/apm/0.1.0/data_stream/profiles/fields/fields.yml b/apmpackage/apm/0.1.0/data_stream/profile_metrics/fields/fields.yml similarity index 100% rename from apmpackage/apm/0.1.0/data_stream/profiles/fields/fields.yml rename to apmpackage/apm/0.1.0/data_stream/profile_metrics/fields/fields.yml diff --git a/apmpackage/apm/0.1.0/data_stream/profiles/manifest.yml b/apmpackage/apm/0.1.0/data_stream/profile_metrics/manifest.yml similarity index 100% rename from apmpackage/apm/0.1.0/data_stream/profiles/manifest.yml rename to apmpackage/apm/0.1.0/data_stream/profile_metrics/manifest.yml diff --git a/apmpackage/apm/0.1.0/data_stream/profiles/elasticsearch/ingest_pipeline/default.json b/apmpackage/apm/0.1.0/data_stream/profiles/elasticsearch/ingest_pipeline/default.json deleted file mode 100644 index f278bd45b28..00000000000 --- a/apmpackage/apm/0.1.0/data_stream/profiles/elasticsearch/ingest_pipeline/default.json +++ /dev/null @@ -1,25 +0,0 @@ -{ - "description": "Default enrichment for APM events", - "processors": [ - { - "pipeline": { - "name": "profiles-apm-0.1.0-apm_user_agent" - } - }, - { - "pipeline": { - "name": "profiles-apm-0.1.0-apm_user_geo" - } - }, - { - "pipeline": { - "name": "profiles-apm-0.1.0-apm_ingest_timestamp" - } - }, - { - "pipeline": { - "name": "profiles-apm-0.1.0-apm_remove_span_metadata" - } - } - ] -} \ No newline at end of file diff --git a/apmpackage/apm/0.1.0/docs/README.md b/apmpackage/apm/0.1.0/docs/README.md index 81ba46ece78..3b44d60cbd1 100644 --- a/apmpackage/apm/0.1.0/docs/README.md +++ b/apmpackage/apm/0.1.0/docs/README.md @@ -1,25 +1,63 @@ # APM Integration -The APM integration installs templates and pipelines for APM data. -If a policy contains an `apm` input, any Elastic Agent(s) set up with that policy will run an APM Server binary, and bind to `localhost:8200`. +The APM integration installs Elasticsearch templates and Ingest Node pipelines for APM data. + +### How to use this integration + +When you add an APM integration to a policy, that policy will contain an `apm` input. +If a policy contains an `apm` input, any Elastic Agent(s) set up with that policy will run locally an APM Server binary. You must configure your APM Agents to communicate with that APM Server. If you have RUM enabled, you must run APM Server centrally. Otherwise, you can run it at the edge machines. To do so, download and enroll an Elastic Agent in the same machines where your instrumented services run. +If you want to change the default APM Server configuration, you need to edit the `elastic-agent.yml` policy file manually. +Find the input with `type:apm` and add any settings under `apm-server`. +For instance: + +```yaml +inputs: + - id: ba928403-d7b8-4c09-adcb-d670c5eac89c + name: apm-1 + revision: 1 + type: apm + use_output: default + meta: + package: + name: apm + version: 0.1.0 + data_stream: + namespace: default + apm-server: + rum: + enabled: true + event_rate.limit: 100 + secret_token: changeme +``` + +Note that template, pipeline and ILM settings cannot be configured through this file - Templates and pipelines are installed by the integration, +and ILM policies must be created externally. If you need additional pipelines, they must also be created externally. + +#### Namespace + +When you create a policy in the Fleet UI, under "Advanced Settings" you can choose a Namespace. +In future versions, data streams created by the APM integration will include the service name, +and you will be recommended to use the environment as namespace. + +This version doesn't automatically use the service name, so the recommendation instead is to use +both the service name and the environment as the namespace. ### Compatibility and limitations The APM integration requires Kibana 7.11 and Elasticsearch with basic license. This version is experimental and has some limitations, listed bellow: -- Elastic Cloud is not supported. -- Standalone mode is not supported. -- If you need to customize settings for APM Server, you need to update the agent policy manually. -Look for `apm-server` in the `apm` input. -- It is not possible to change APM Server settings dynamically. -You must update the policy with any changes you need and stop the APM Server process. +- It is not yet possible to change APM Server settings dynamically. +You must update the policy with any changes you need and restart the APM Server process. +- Sourcemap enrichment is not yet supported. +- There is no default ILM policy for traces (spans and transactions). +IMPORTANT: If you run APM Server with Elastic Agent manually in standalone mode, you must install the APM integration before ingestion starts. ### Configuration parameters @@ -534,8 +572,8 @@ Metrics are written to `metrics-apm.*`, `metrics-apm.internal.*` and `metrics-ap ### Logs -Logs are application log and error events. -Logs are written to `logs-apm.*` and `logs-apm.error.*` indices. +Logs are application error events. +Logs are written to `logs-apm.error.*` indices. **Exported Fields** diff --git a/apmpackage/apm/0.1.0/manifest.yml b/apmpackage/apm/0.1.0/manifest.yml index 774aec5d4b9..803dd6735a7 100644 --- a/apmpackage/apm/0.1.0/manifest.yml +++ b/apmpackage/apm/0.1.0/manifest.yml @@ -4,9 +4,9 @@ title: Elastic APM version: 0.1.0 license: basic description: Ingest APM data -type: solution # integration / solution +type: integration categories: - - monitoring # TODO do we need a new category? + - monitoring release: experimental # experimental / beta / ga conditions: kibana.version: '^7.11.0' diff --git a/apmpackage/cmd/gen-package/gendocs.go b/apmpackage/cmd/gen-package/gendocs.go index 49e5190cafc..9ca93c63c2f 100644 --- a/apmpackage/cmd/gen-package/gendocs.go +++ b/apmpackage/cmd/gen-package/gendocs.go @@ -29,8 +29,8 @@ import ( func generateDocs(inputFields map[string][]field, version string) { data := docsData{ Traces: prepareFields(inputFields, version, "traces"), - Metrics: prepareFields(inputFields, version, "metrics"), - Logs: prepareFields(inputFields, version, "logs"), + Metrics: prepareFields(inputFields, version, "app_metrics"), + Logs: prepareFields(inputFields, version, "error_logs"), TransactionExample: loadExample("transactions.json"), SpanExample: loadExample("spans.json"), MetricsExample: loadExample("metricsets.json"), @@ -65,17 +65,17 @@ type docsData struct { ErrorExample string } -func prepareFields(inputFields map[string][]field, version, streamType string) []field { +func prepareFields(inputFields map[string][]field, version, stream string) []field { extend := func(fs []field) []field { var baseFields []field - for _, f := range loadFieldsFile(baseFieldsFilePath(version, streamType)) { + for _, f := range loadFieldsFile(baseFieldsFilePath(version, stream)) { f.IsECS = true baseFields = append(baseFields, f) } fs = append(baseFields, fs...) return fs } - return extend(inputFields[streamType]) + return extend(inputFields[stream]) } func loadExample(file string) string { diff --git a/apmpackage/cmd/gen-package/genfields.go b/apmpackage/cmd/gen-package/genfields.go index c5094332e87..c093441093a 100644 --- a/apmpackage/cmd/gen-package/genfields.go +++ b/apmpackage/cmd/gen-package/genfields.go @@ -34,12 +34,12 @@ func generateFields(version string) map[string][]field { ecsFlatFields := loadECSFields() inputFieldsFiles := map[string][]field{ - "logs": format("model/error/_meta/fields.yml"), + "error_logs": format("model/error/_meta/fields.yml"), "internal_metrics": format("model/metricset/_meta/fields.yml", "x-pack/apm-server/fields/_meta/fields.yml"), - "profiles": format("model/profile/_meta/fields.yml"), + "profile_metrics": format("model/profile/_meta/fields.yml"), "traces": format("model/transaction/_meta/fields.yml", "model/span/_meta/fields.yml"), } - inputFieldsFiles["metrics"] = filterInternalMetrics(inputFieldsFiles["internal_metrics"]) + inputFieldsFiles["app_metrics"] = filterInternalMetrics(inputFieldsFiles["internal_metrics"]) for streamType, inputFields := range inputFieldsFiles { var ecsFields []field diff --git a/apmpackage/cmd/gen-package/genpipelines.go b/apmpackage/cmd/gen-package/genpipelines.go index 329e5cc88c4..ea8ea1cfddf 100644 --- a/apmpackage/cmd/gen-package/genpipelines.go +++ b/apmpackage/cmd/gen-package/genpipelines.go @@ -23,14 +23,16 @@ import ( "io/ioutil" "os" "path/filepath" + + "github.com/elastic/apm-server/model" ) var streamMappings = map[string]string{ - "logs": "logs-apm.error", - "traces": "traces-apm", - "metrics": "metrics-apm", - "internal_metrics": "metrics-apm.internal", - "profiles": "profiles-apm", + "error_logs": "logs-" + model.ErrorsDataset, + "traces": "traces-" + model.TracesDataset, + "app_metrics": "metrics-" + model.AppMetricsDataset, + "internal_metrics": "metrics-" + model.InternalMetricsDataset, + "profile_metrics": "metrics-" + model.ProfilesDataset, } type PipelineDef struct { diff --git a/apmpackage/docs/README.template.md b/apmpackage/docs/README.template.md index d66347cdb17..61f9c928028 100644 --- a/apmpackage/docs/README.template.md +++ b/apmpackage/docs/README.template.md @@ -1,25 +1,63 @@ # APM Integration -The APM integration installs templates and pipelines for APM data. -If a policy contains an `apm` input, any Elastic Agent(s) set up with that policy will run an APM Server binary, and bind to `localhost:8200`. +The APM integration installs Elasticsearch templates and Ingest Node pipelines for APM data. + +### How to use this integration + +When you add an APM integration to a policy, that policy will contain an `apm` input. +If a policy contains an `apm` input, any Elastic Agent(s) set up with that policy will run locally an APM Server binary. You must configure your APM Agents to communicate with that APM Server. If you have RUM enabled, you must run APM Server centrally. Otherwise, you can run it at the edge machines. To do so, download and enroll an Elastic Agent in the same machines where your instrumented services run. +If you want to change the default APM Server configuration, you need to edit the `elastic-agent.yml` policy file manually. +Find the input with `type:apm` and add any settings under `apm-server`. +For instance: + +```yaml +inputs: + - id: ba928403-d7b8-4c09-adcb-d670c5eac89c + name: apm-1 + revision: 1 + type: apm + use_output: default + meta: + package: + name: apm + version: 0.1.0 + data_stream: + namespace: default + apm-server: + rum: + enabled: true + event_rate.limit: 100 + secret_token: changeme +``` + +Note that template, pipeline and ILM settings cannot be configured through this file - Templates and pipelines are installed by the integration, +and ILM policies must be created externally. If you need additional pipelines, they must also be created externally. + +#### Namespace + +When you create a policy in the Fleet UI, under "Advanced Settings" you can choose a Namespace. +In future versions, data streams created by the APM integration will include the service name, +and you will be recommended to use the environment as namespace. + +This version doesn't automatically use the service name, so the recommendation instead is to use +both the service name and the environment as the namespace. ### Compatibility and limitations The APM integration requires Kibana 7.11 and Elasticsearch with basic license. This version is experimental and has some limitations, listed bellow: -- Elastic Cloud is not supported. -- Standalone mode is not supported. -- If you need to customize settings for APM Server, you need to update the agent policy manually. -Look for `apm-server` in the `apm` input. -- It is not possible to change APM Server settings dynamically. -You must update the policy with any changes you need and stop the APM Server process. +- It is not yet possible to change APM Server settings dynamically. +You must update the policy with any changes you need and restart the APM Server process. +- Sourcemap enrichment is not yet supported. +- There is no default ILM policy for traces (spans and transactions). +IMPORTANT: If you run APM Server with Elastic Agent manually in standalone mode, you must install the APM integration before ingestion starts. ### Configuration parameters @@ -71,8 +109,8 @@ Metrics are written to `metrics-apm.*`, `metrics-apm.internal.*` and `metrics-ap ### Logs -Logs are application log and error events. -Logs are written to `logs-apm.*` and `logs-apm.error.*` indices. +Logs are application error events. +Logs are written to `logs-apm.error.*` indices. **Exported Fields** diff --git a/beater/api/mux.go b/beater/api/mux.go index ca4c2b90d13..04dbfa0e0ff 100644 --- a/beater/api/mux.go +++ b/beater/api/mux.go @@ -178,6 +178,7 @@ func apmMiddleware(m map[request.ResultID]*monitoring.Int) []middleware.Middlewa func backendMiddleware(cfg *config.Config, auth *authorization.Handler, m map[request.ResultID]*monitoring.Int) []middleware.Middleware { backendMiddleware := append(apmMiddleware(m), + middleware.ResponseHeadersMiddleware(cfg.ResponseHeaders), middleware.AuthorizationMiddleware(auth, true), ) if cfg.AugmentEnabled { @@ -191,6 +192,7 @@ func rumMiddleware(cfg *config.Config, _ *authorization.Handler, m map[request.R "Configure the `apm-server.rum` section in apm-server.yml to enable ingestion of RUM events. " + "If you are not using the RUM agent, you can safely ignore this error." rumMiddleware := append(apmMiddleware(m), + middleware.ResponseHeadersMiddleware(cfg.ResponseHeaders), middleware.ResponseHeadersMiddleware(cfg.RumConfig.ResponseHeaders), middleware.SetRumFlagMiddleware(), middleware.SetIPRateLimitMiddleware(cfg.RumConfig.EventRate), @@ -212,7 +214,8 @@ func sourcemapMiddleware(cfg *config.Config, auth *authorization.Handler) []midd middleware.KillSwitchMiddleware(enabled, msg)) } -func rootMiddleware(_ *config.Config, auth *authorization.Handler) []middleware.Middleware { +func rootMiddleware(cfg *config.Config, auth *authorization.Handler) []middleware.Middleware { return append(apmMiddleware(root.MonitoringMap), + middleware.ResponseHeadersMiddleware(cfg.ResponseHeaders), middleware.AuthorizationMiddleware(auth, false)) } diff --git a/beater/config/config.go b/beater/config/config.go index b1b82070ea5..8ff68759c68 100644 --- a/beater/config/config.go +++ b/beater/config/config.go @@ -72,6 +72,7 @@ type Config struct { ShutdownTimeout time.Duration `config:"shutdown_timeout"` TLS *tlscommon.ServerConfig `config:"ssl"` MaxConnections int `config:"max_connections"` + ResponseHeaders map[string][]string `config:"response_headers"` Expvar *ExpvarConfig `config:"expvar"` AugmentEnabled bool `config:"capture_personal_data"` SelfInstrumentation *InstrumentationConfig `config:"instrumentation"` diff --git a/beater/config/config_test.go b/beater/config/config_test.go index a4bf7d7440e..60d18ad27ca 100644 --- a/beater/config/config_test.go +++ b/beater/config/config_test.go @@ -49,6 +49,15 @@ func TestUnpackConfig(t *testing.T) { kibanaHeadersConfig.Kibana.Enabled = true kibanaHeadersConfig.Kibana.Headers = map[string]string{"foo": "bar"} + responseHeadersConfig := DefaultConfig() + responseHeadersConfig.ResponseHeaders = map[string][]string{ + "k1": []string{"v1"}, + "k2": []string{"v2", "v3"}, + } + responseHeadersConfig.RumConfig.ResponseHeaders = map[string][]string{ + "k4": []string{"v4"}, + } + tests := map[string]struct { inpCfg map[string]interface{} outCfg *Config @@ -392,6 +401,20 @@ func TestUnpackConfig(t *testing.T) { }, outCfg: kibanaHeadersConfig, }, + "response headers": { + inpCfg: map[string]interface{}{ + "response_headers": map[string]interface{}{ + "k1": "v1", + "k2": []string{"v2", "v3"}, + }, + "rum": map[string]interface{}{ + "response_headers": map[string]interface{}{ + "k4": []string{"v4"}, + }, + }, + }, + outCfg: responseHeadersConfig, + }, } for name, test := range tests { diff --git a/changelogs/head.asciidoc b/changelogs/head.asciidoc index ce9af52631f..4426a95395d 100644 --- a/changelogs/head.asciidoc +++ b/changelogs/head.asciidoc @@ -27,4 +27,5 @@ https://github.com/elastic/apm-server/compare/7.10\...master[View commits] * Experimental support for data streams {pull}4409[4409] * Label/custom/mark keys are now sanitized (rather than validated and rejected) by the server {pull}4465[4465] * Upgrade Go to 1.14.12 {pull}4478[4478] +* Added apm-server.response_headers config {pull}4523[4523] * Switch logging format to be ECS compliant where possible {pull}3829[3829] \ No newline at end of file diff --git a/datastreams/servicename.go b/datastreams/servicename.go index ac49422727b..d6208237f6e 100644 --- a/datastreams/servicename.go +++ b/datastreams/servicename.go @@ -24,6 +24,8 @@ import "strings" // // Concretely, this function will lowercase the string and replace any // reserved characters with "_". +// +// TODO: use when Fleet supports variables in data streams (see #4492) func NormalizeServiceName(s string) string { s = strings.ToLower(s) s = strings.Map(replaceReservedRune, s) diff --git a/docs/copied-from-beats/docs/command-reference.asciidoc b/docs/copied-from-beats/docs/command-reference.asciidoc index a00a2baed24..3c393a3d332 100644 --- a/docs/copied-from-beats/docs/command-reference.asciidoc +++ b/docs/copied-from-beats/docs/command-reference.asciidoc @@ -1012,7 +1012,7 @@ default config file, +{beatname_lc}.yml+, is used. Enables debugging for the specified selectors. For the selectors, you can specify a comma-separated list of components, or you can use `-d "*"` to enable debugging for all -components. For example, `-d "publish"` displays all the "publish" related +components. For example, `-d "publisher"` displays all the publisher-related messages. *`-e, --e`*:: diff --git a/docs/copied-from-beats/docs/debugging.asciidoc b/docs/copied-from-beats/docs/debugging.asciidoc index abb5ed252d9..08cdc3f7152 100644 --- a/docs/copied-from-beats/docs/debugging.asciidoc +++ b/docs/copied-from-beats/docs/debugging.asciidoc @@ -27,12 +27,12 @@ platform). You can use a different configuration file by specifying the `-c` fla ------------------------------------------------------------ You can increase the verbosity of debug messages by enabling one or more debug -selectors. For example, to view the published transactions, you can start {beatname_uc} -with the `publish` selector like this: +selectors. For example, to view publisher-related messages, start {beatname_uc} +with the `publisher` selector: ["source","sh",subs="attributes"] ------------------------------------------------------------ -{beatname_lc} -e -d "publish" +{beatname_lc} -e -d "publisher" ------------------------------------------------------------ If you want all the debugging output (fair warning, it's quite a lot), you can diff --git a/docs/copied-from-beats/docs/howto/load-index-templates.asciidoc b/docs/copied-from-beats/docs/howto/load-index-templates.asciidoc index bd5e249b90c..e8c6cf6f1eb 100644 --- a/docs/copied-from-beats/docs/howto/load-index-templates.asciidoc +++ b/docs/copied-from-beats/docs/howto/load-index-templates.asciidoc @@ -48,6 +48,10 @@ If the template already exists, it’s not overwritten unless you configure [[overwrite-template]] === Overwrite an existing index template +WARNING: Do not enable this option for more than one instance of {beatname_uc}. If you start +multiple instances at the same time, it can overload your {es} with too many +template update requests. + To overwrite a template that's already loaded into {es}, set: [source,yaml] diff --git a/docs/copied-from-beats/docs/loggingconfig.asciidoc b/docs/copied-from-beats/docs/loggingconfig.asciidoc index 3a1cc925461..2d2e9eaa065 100644 --- a/docs/copied-from-beats/docs/loggingconfig.asciidoc +++ b/docs/copied-from-beats/docs/loggingconfig.asciidoc @@ -68,7 +68,7 @@ messages related to event publishing: ["source","yaml",subs="attributes"] ---- logging.level: debug -logging.selectors: ["publish"] +logging.selectors: ["publisher"] ---- The logs generated by {beatname_uc} are written to the CloudWatch log group for @@ -143,11 +143,22 @@ published. Also logs any warnings, errors, or critical errors. ==== `logging.selectors` The list of debugging-only selector tags used by different {beatname_uc} components. -Use `*` to enable debug output for all components. For example add `publish` to display -all the debug messages related to event publishing. +Use `*` to enable debug output for all components. Use `publisher` to display +debug messages related to event publishing. + +[TIP] +===== +The list of available selectors may change between releases, so avoid creating +tests that depend on specific selectors. + +To see which selectors are available, run {beatname_uc} in debug mode +(set `logging.level: debug` in the configuration). The selector name appears +after the log level and is enclosed in brackets. +===== + ifndef::serverless[] -When starting {beatname_lc}, selectors can be overwritten using the `-d` command -line option (`-d` also sets the debug log level). +To override selectors at the command line, use the `-d` global flag (`-d` also +sets the debug log level). For more information, see <>. endif::serverless[] [float] diff --git a/docs/copied-from-beats/docs/monitoring/monitoring-beats.asciidoc b/docs/copied-from-beats/docs/monitoring/monitoring-beats.asciidoc index ecad33a4a04..6f31c73aa2d 100644 --- a/docs/copied-from-beats/docs/monitoring/monitoring-beats.asciidoc +++ b/docs/copied-from-beats/docs/monitoring/monitoring-beats.asciidoc @@ -28,7 +28,6 @@ ifndef::serverless[] and sends it directly to your monitoring cluster. endif::[] - //Commenting out this link temporarily until the general monitoring docs can be //updated. //To learn about monitoring in general, see diff --git a/docs/copied-from-beats/docs/repositories.asciidoc b/docs/copied-from-beats/docs/repositories.asciidoc index a7104414465..1b27a6c0b44 100644 --- a/docs/copied-from-beats/docs/repositories.asciidoc +++ b/docs/copied-from-beats/docs/repositories.asciidoc @@ -122,7 +122,7 @@ sudo apt-get update && sudo apt-get install {beatname_pkg} -------------------------------------------------- sudo systemctl enable {beatname_pkg} -------------------------------------------------- - ++ If your system does not use `systemd` then run: + ["source","sh",subs="attributes"] @@ -224,7 +224,7 @@ sudo yum install {beatname_pkg} -------------------------------------------------- sudo systemctl enable {beatname_pkg} -------------------------------------------------- - ++ If your system does not use `systemd` then run: + ["source","sh",subs="attributes"] @@ -233,4 +233,3 @@ sudo chkconfig --add {beatname_pkg} -------------------------------------------------- endif::[] - diff --git a/docs/copied-from-beats/docs/security/api-keys.asciidoc b/docs/copied-from-beats/docs/security/api-keys.asciidoc index 403fd011122..aa397ff5fee 100644 --- a/docs/copied-from-beats/docs/security/api-keys.asciidoc +++ b/docs/copied-from-beats/docs/security/api-keys.asciidoc @@ -14,6 +14,8 @@ API key. For different clusters, you need to use an API key per cluster. NOTE: For security reasons, we recommend using a unique API key per {beatname_uc} instance. You can create as many API keys per user as necessary. +IMPORTANT: Review <> before creating API keys for {beatname_uc}. + [float] [[beats-api-key-publish]] === Create an API key for publishing @@ -41,6 +43,8 @@ POST /_security/api_key <1> Name of the API key <2> Granted privileges, see <> +NOTE: See <> for the list of privileges required to publish events. + The return value will look something like this: [source,console-result,subs="attributes,callouts"] @@ -89,6 +93,8 @@ POST /_security/api_key <1> Name of the API key <2> Granted privileges, see <> +NOTE: See <> for the list of privileges required to send monitoring data. + The return value will look something like this: [source,console-result,subs="attributes,callouts"] diff --git a/docs/copied-from-beats/docs/shared-securing-beat.asciidoc b/docs/copied-from-beats/docs/shared-securing-beat.asciidoc index b8dcc3b1957..e1c47d91f2c 100644 --- a/docs/copied-from-beats/docs/shared-securing-beat.asciidoc +++ b/docs/copied-from-beats/docs/shared-securing-beat.asciidoc @@ -29,11 +29,13 @@ For secure communication between APM Server and APM Agents, see <> endif::[] +endif::[] // APM HTTPS information ifdef::beat-specific-security[] @@ -70,5 +72,7 @@ endif::[] // Linux Seccomp ifndef::serverless[] +ifndef::win_only[] include::./security/linux-seccomp.asciidoc[] endif::[] +endif::[] diff --git a/docs/copied-from-beats/docs/shared-ssl-config.asciidoc b/docs/copied-from-beats/docs/shared-ssl-config.asciidoc index f850aeedd68..ce573aae38d 100644 --- a/docs/copied-from-beats/docs/shared-ssl-config.asciidoc +++ b/docs/copied-from-beats/docs/shared-ssl-config.asciidoc @@ -104,7 +104,33 @@ NOTE: SSL settings are disabled if either `enabled` is set to `false` or the [float] ==== `certificate_authorities` -The list of root certificates for server verifications. If `certificate_authorities` is empty or not set, the trusted certificate authorities of the host system are used. +The list of root certificates for server verifications. If `certificate_authorities` is empty or not set, the trusted certificate authorities of the host system are used. If `certificate_authorities` is self-signed, the host system needs to trust that CA cert as well. +By default you can specify a list of file that +{beatname_lc} will read, but you can also embed a certificate directly in the `YAML` configuration: + +[source,yaml] +---- +certificate_authorities: + - | + -----BEGIN CERTIFICATE----- + MIIDCjCCAfKgAwIBAgITJ706Mu2wJlKckpIvkWxEHvEyijANBgkqhkiG9w0BAQsF + ADAUMRIwEAYDVQQDDAlsb2NhbGhvc3QwIBcNMTkwNzIyMTkyOTA0WhgPMjExOTA2 + MjgxOTI5MDRaMBQxEjAQBgNVBAMMCWxvY2FsaG9zdDCCASIwDQYJKoZIhvcNAQEB + BQADggEPADCCAQoCggEBANce58Y/JykI58iyOXpxGfw0/gMvF0hUQAcUrSMxEO6n + fZRA49b4OV4SwWmA3395uL2eB2NB8y8qdQ9muXUdPBWE4l9rMZ6gmfu90N5B5uEl + 94NcfBfYOKi1fJQ9i7WKhTjlRkMCgBkWPkUokvBZFRt8RtF7zI77BSEorHGQCk9t + /D7BS0GJyfVEhftbWcFEAG3VRcoMhF7kUzYwp+qESoriFRYLeDWv68ZOvG7eoWnP + PsvZStEVEimjvK5NSESEQa9xWyJOmlOKXhkdymtcUd/nXnx6UTCFgnkgzSdTWV41 + CI6B6aJ9svCTI2QuoIq2HxX/ix7OvW1huVmcyHVxyUECAwEAAaNTMFEwHQYDVR0O + BBYEFPwN1OceFGm9v6ux8G+DZ3TUDYxqMB8GA1UdIwQYMBaAFPwN1OceFGm9v6ux + 8G+DZ3TUDYxqMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQELBQADggEBAG5D + 874A4YI7YUwOVsVAdbWtgp1d0zKcPRR+r2OdSbTAV5/gcS3jgBJ3i1BN34JuDVFw + 3DeJSYT3nxy2Y56lLnxDeF8CUTUtVQx3CuGkRg1ouGAHpO/6OqOhwLLorEmxi7tA + H2O8mtT0poX5AnOAhzVy7QW0D/k4WaoLyckM5hUa6RtvgvLxOwA0U+VGurCDoctu + 8F4QOgTAWyh8EZIwaKCliFRSynDpv3JTUwtfZkxo6K6nce1RhCWFAsMvDZL8Dgc0 + yvgJ38BRsFOtkRuAGSf6ZUwTO8JJRRIFnpUzXflAnGivK9M13D5GEQMmIl6U9Pvk + sxSmbIUfc2SGJGCJD4I= + -----END CERTIFICATE----- +---- [float] [[certificate]] @@ -117,12 +143,72 @@ require client authentication, the certificate will be loaded, but not requested by the server. When this option is configured, the <> option is also required. +The certificate option support embedding of the certificate: + +[source,yaml] +---- +certificate: | + -----BEGIN CERTIFICATE----- + MIIDCjCCAfKgAwIBAgITJ706Mu2wJlKckpIvkWxEHvEyijANBgkqhkiG9w0BAQsF + ADAUMRIwEAYDVQQDDAlsb2NhbGhvc3QwIBcNMTkwNzIyMTkyOTA0WhgPMjExOTA2 + MjgxOTI5MDRaMBQxEjAQBgNVBAMMCWxvY2FsaG9zdDCCASIwDQYJKoZIhvcNAQEB + BQADggEPADCCAQoCggEBANce58Y/JykI58iyOXpxGfw0/gMvF0hUQAcUrSMxEO6n + fZRA49b4OV4SwWmA3395uL2eB2NB8y8qdQ9muXUdPBWE4l9rMZ6gmfu90N5B5uEl + 94NcfBfYOKi1fJQ9i7WKhTjlRkMCgBkWPkUokvBZFRt8RtF7zI77BSEorHGQCk9t + /D7BS0GJyfVEhftbWcFEAG3VRcoMhF7kUzYwp+qESoriFRYLeDWv68ZOvG7eoWnP + PsvZStEVEimjvK5NSESEQa9xWyJOmlOKXhkdymtcUd/nXnx6UTCFgnkgzSdTWV41 + CI6B6aJ9svCTI2QuoIq2HxX/ix7OvW1huVmcyHVxyUECAwEAAaNTMFEwHQYDVR0O + BBYEFPwN1OceFGm9v6ux8G+DZ3TUDYxqMB8GA1UdIwQYMBaAFPwN1OceFGm9v6ux + 8G+DZ3TUDYxqMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQELBQADggEBAG5D + 874A4YI7YUwOVsVAdbWtgp1d0zKcPRR+r2OdSbTAV5/gcS3jgBJ3i1BN34JuDVFw + 3DeJSYT3nxy2Y56lLnxDeF8CUTUtVQx3CuGkRg1ouGAHpO/6OqOhwLLorEmxi7tA + H2O8mtT0poX5AnOAhzVy7QW0D/k4WaoLyckM5hUa6RtvgvLxOwA0U+VGurCDoctu + 8F4QOgTAWyh8EZIwaKCliFRSynDpv3JTUwtfZkxo6K6nce1RhCWFAsMvDZL8Dgc0 + yvgJ38BRsFOtkRuAGSf6ZUwTO8JJRRIFnpUzXflAnGivK9M13D5GEQMmIl6U9Pvk + sxSmbIUfc2SGJGCJD4I= + -----END CERTIFICATE----- +---- + [float] [[key]] ==== `key: "/etc/pki/client/cert.key"` The client certificate key used for client authentication. This option is required if <> is specified. +The key option support embedding of the private key: + +[source,yaml] +---- +key: | + -----BEGIN PRIVATE KEY----- + MIIEvQIBADANBgkqhkiG9w0BAQEFAASCBKcwggSjAgEAAoIBAQDXHufGPycpCOfI + sjl6cRn8NP4DLxdIVEAHFK0jMRDup32UQOPW+DleEsFpgN9/ebi9ngdjQfMvKnUP + Zrl1HTwVhOJfazGeoJn7vdDeQebhJfeDXHwX2DiotXyUPYu1ioU45UZDAoAZFj5F + KJLwWRUbfEbRe8yO+wUhKKxxkApPbfw+wUtBicn1RIX7W1nBRABt1UXKDIRe5FM2 + MKfqhEqK4hUWC3g1r+vGTrxu3qFpzz7L2UrRFRIpo7yuTUhEhEGvcVsiTppTil4Z + HcprXFHf5158elEwhYJ5IM0nU1leNQiOgemifbLwkyNkLqCKth8V/4sezr1tYblZ + nMh1cclBAgMBAAECggEBAKdP5jyOicqknoG9/G564RcDsDyRt64NuO7I6hBg7SZx + Jn7UKWDdFuFP/RYtoabn6QOxkVVlydp5Typ3Xu7zmfOyss479Q/HIXxmmbkD0Kp0 + eRm2KN3y0b6FySsS40KDRjKGQCuGGlNotW3crMw6vOvvsLTlcKgUHF054UVCHoK/ + Piz7igkDU7NjvJeha53vXL4hIjb10UtJNaGPxIyFLYRZdRPyyBJX7Yt3w8dgz8WM + epOPu0dq3bUrY3WQXcxKZo6sQjE1h7kdl4TNji5jaFlvD01Y8LnyG0oThOzf0tve + Gaw+kuy17gTGZGMIfGVcdeb+SlioXMAAfOps+mNIwTECgYEA/gTO8W0hgYpOQJzn + BpWkic3LAoBXWNpvsQkkC3uba8Fcps7iiEzotXGfwYcb5Ewf5O3Lrz1EwLj7GTW8 + VNhB3gb7bGOvuwI/6vYk2/dwo84bwW9qRWP5hqPhNZ2AWl8kxmZgHns6WTTxpkRU + zrfZ5eUrBDWjRU2R8uppgRImsxMCgYEA2MxuL/C/Ko0d7XsSX1kM4JHJiGpQDvb5 + GUrlKjP/qVyUysNF92B9xAZZHxxfPWpdfGGBynhw7X6s+YeIoxTzFPZVV9hlkpAA + 5igma0n8ZpZEqzttjVdpOQZK8o/Oni/Q2S10WGftQOOGw5Is8+LY30XnLvHBJhO7 + TKMurJ4KCNsCgYAe5TDSVmaj3dGEtFC5EUxQ4nHVnQyCpxa8npL+vor5wSvmsfUF + hO0s3GQE4sz2qHecnXuPldEd66HGwC1m2GKygYDk/v7prO1fQ47aHi9aDQB9N3Li + e7Vmtdn3bm+lDjtn0h3Qt0YygWj+wwLZnazn9EaWHXv9OuEMfYxVgYKpdwKBgEze + Zy8+WDm5IWRjn8cI5wT1DBT/RPWZYgcyxABrwXmGZwdhp3wnzU/kxFLAl5BKF22T + kRZ+D+RVZvVutebE9c937BiilJkb0AXLNJwT9pdVLnHcN2LHHHronUhV7vetkop+ + kGMMLlY0lkLfoGq1AxpfSbIea9KZam6o6VKxEnPDAoGAFDCJm+ZtsJK9nE5GEMav + NHy+PwkYsHhbrPl4dgStTNXLenJLIJ+Ke0Pcld4ZPfYdSyu/Tv4rNswZBNpNsW9K + 0NwJlyMBfayoPNcJKXrH/csJY7hbKviAHr1eYy9/8OL0dHf85FV+9uY5YndLcsDc + nygO9KTJuUiBrLr0AHEnqko= + -----END PRIVATE KEY----- +---- [float] ==== `key_passphrase` @@ -143,13 +229,20 @@ The default value is `[TLSv1.1, TLSv1.2, TLSv1.3]`. [float] ==== `verification_mode` -This option controls whether the client verifies server certificates and host -names. Valid values are `none` and `full`. If `verification_mode` is set -to `none`, all server host names and certificates are accepted. In this mode, -TLS-based connections are susceptible to man-in-the-middle attacks. Use this -option for testing only. +Controls the verification of certificates. Valid values are: + + * `full`, which verifies that the provided certificate is signed by a trusted +authority (CA) and also verifies that the server's hostname (or IP address) +matches the names identified within the certificate. + * `certificate`, which verifies that the provided certificate is signed by a +trusted authority (CA), but does not perform any hostname verification. + * `none`, which performs _no verification_ of the server's certificate. This +mode disables many of the security benefits of SSL/TLS and should only be used +after very careful consideration. It is primarily intended as a temporary +diagnostic mechanism when attempting to resolve TLS errors; its use in +production environments is strongly discouraged. -The default is `full`. +The default value is `full`. [float] ==== `cipher_suites` diff --git a/docs/copied-from-beats/docs/template-config.asciidoc b/docs/copied-from-beats/docs/template-config.asciidoc index 3271d567c2a..44dfd6d8194 100644 --- a/docs/copied-from-beats/docs/template-config.asciidoc +++ b/docs/copied-from-beats/docs/template-config.asciidoc @@ -23,9 +23,16 @@ endif::[] You can adjust the following settings to load your own template or overwrite an existing one. -*`setup.template.enabled`*:: Set to false to disable template loading. If set this to false, +*`setup.template.enabled`*:: Set to false to disable template loading. If this is set to false, you must <>. +ifndef::apm-server[] +*`setup.template.type`*:: The type of template to use. Available options: `legacy` (default), index templates +before Elasticsearch v7.8. Use this to avoid breaking existing deployments. New options are `component` +and `index`. Selecting `component` loads a component template which can be included in new index templates. +The option `index` loads the new index template. +endif::[] + *`setup.template.name`*:: The name of the template. The default is +{beatname_lc}+. The {beatname_uc} version is always appended to the given name, so the final name is +{beatname_lc}-%{[{beat_version_key}]}+. @@ -55,7 +62,8 @@ relative path is set, it is considered relative to the config path. See the <> for more information. + +===== `kerberos` + +beta[] + +Configuration options for Kerberos authentication. + +See <> for more information. diff --git a/docs/spec/rumv3/error.json b/docs/spec/rumv3/error.json index ac58c10259e..ac0996808b9 100644 --- a/docs/spec/rumv3/error.json +++ b/docs/spec/rumv3/error.json @@ -363,10 +363,7 @@ "array" ], "items": { - "type": [ - "null", - "object" - ] + "type": "object" }, "minItems": 0 }, @@ -407,86 +404,89 @@ "null", "array" ], - "properties": { - "ap": { - "description": "AbsPath is the absolute path of the frame's file.", - "type": [ - "null", - "string" - ] - }, - "cli": { - "description": "ContextLine is the line from the frame's file.", - "type": [ - "null", - "string" - ] - }, - "cn": { - "description": "Classname of the frame.", - "type": [ - "null", - "string" - ] - }, - "co": { - "description": "ColumnNumber of the frame.", - "type": [ - "null", - "integer" - ] - }, - "f": { - "description": "Filename is the relative name of the frame's file.", - "type": "string" - }, - "fn": { - "description": "Function represented by the frame.", - "type": [ - "null", - "string" - ] - }, - "li": { - "description": "LineNumber of the frame.", - "type": [ - "null", - "integer" - ] - }, - "mo": { - "description": "Module to which the frame belongs to.", - "type": [ - "null", - "string" - ] - }, - "poc": { - "description": "PostContext is a slice of code lines immediately before the line from the frame's file.", - "type": [ - "null", - "array" - ], - "items": { - "type": "string" + "items": { + "type": "object", + "properties": { + "ap": { + "description": "AbsPath is the absolute path of the frame's file.", + "type": [ + "null", + "string" + ] }, - "minItems": 0 - }, - "prc": { - "description": "PreContext is a slice of code lines immediately after the line from the frame's file.", - "type": [ - "null", - "array" - ], - "items": { + "cli": { + "description": "ContextLine is the line from the frame's file.", + "type": [ + "null", + "string" + ] + }, + "cn": { + "description": "Classname of the frame.", + "type": [ + "null", + "string" + ] + }, + "co": { + "description": "ColumnNumber of the frame.", + "type": [ + "null", + "integer" + ] + }, + "f": { + "description": "Filename is the relative name of the frame's file.", "type": "string" }, - "minItems": 0 - } + "fn": { + "description": "Function represented by the frame.", + "type": [ + "null", + "string" + ] + }, + "li": { + "description": "LineNumber of the frame.", + "type": [ + "null", + "integer" + ] + }, + "mo": { + "description": "Module to which the frame belongs to.", + "type": [ + "null", + "string" + ] + }, + "poc": { + "description": "PostContext is a slice of code lines immediately before the line from the frame's file.", + "type": [ + "null", + "array" + ], + "items": { + "type": "string" + }, + "minItems": 0 + }, + "prc": { + "description": "PreContext is a slice of code lines immediately after the line from the frame's file.", + "type": [ + "null", + "array" + ], + "items": { + "type": "string" + }, + "minItems": 0 + } + }, + "required": [ + "f" + ] }, - "required": [ - "f" - ], "minItems": 0 }, "t": { @@ -567,86 +567,89 @@ "null", "array" ], - "properties": { - "ap": { - "description": "AbsPath is the absolute path of the frame's file.", - "type": [ - "null", - "string" - ] - }, - "cli": { - "description": "ContextLine is the line from the frame's file.", - "type": [ - "null", - "string" - ] - }, - "cn": { - "description": "Classname of the frame.", - "type": [ - "null", - "string" - ] - }, - "co": { - "description": "ColumnNumber of the frame.", - "type": [ - "null", - "integer" - ] - }, - "f": { - "description": "Filename is the relative name of the frame's file.", - "type": "string" - }, - "fn": { - "description": "Function represented by the frame.", - "type": [ - "null", - "string" - ] - }, - "li": { - "description": "LineNumber of the frame.", - "type": [ - "null", - "integer" - ] - }, - "mo": { - "description": "Module to which the frame belongs to.", - "type": [ - "null", - "string" - ] - }, - "poc": { - "description": "PostContext is a slice of code lines immediately before the line from the frame's file.", - "type": [ - "null", - "array" - ], - "items": { - "type": "string" + "items": { + "type": "object", + "properties": { + "ap": { + "description": "AbsPath is the absolute path of the frame's file.", + "type": [ + "null", + "string" + ] }, - "minItems": 0 - }, - "prc": { - "description": "PreContext is a slice of code lines immediately after the line from the frame's file.", - "type": [ - "null", - "array" - ], - "items": { + "cli": { + "description": "ContextLine is the line from the frame's file.", + "type": [ + "null", + "string" + ] + }, + "cn": { + "description": "Classname of the frame.", + "type": [ + "null", + "string" + ] + }, + "co": { + "description": "ColumnNumber of the frame.", + "type": [ + "null", + "integer" + ] + }, + "f": { + "description": "Filename is the relative name of the frame's file.", "type": "string" }, - "minItems": 0 - } + "fn": { + "description": "Function represented by the frame.", + "type": [ + "null", + "string" + ] + }, + "li": { + "description": "LineNumber of the frame.", + "type": [ + "null", + "integer" + ] + }, + "mo": { + "description": "Module to which the frame belongs to.", + "type": [ + "null", + "string" + ] + }, + "poc": { + "description": "PostContext is a slice of code lines immediately before the line from the frame's file.", + "type": [ + "null", + "array" + ], + "items": { + "type": "string" + }, + "minItems": 0 + }, + "prc": { + "description": "PreContext is a slice of code lines immediately after the line from the frame's file.", + "type": [ + "null", + "array" + ], + "items": { + "type": "string" + }, + "minItems": 0 + } + }, + "required": [ + "f" + ] }, - "required": [ - "f" - ], "minItems": 0 } }, diff --git a/docs/spec/rumv3/span.json b/docs/spec/rumv3/span.json index 9d5e0e364ed..e59c48e0e81 100644 --- a/docs/spec/rumv3/span.json +++ b/docs/spec/rumv3/span.json @@ -244,86 +244,89 @@ "null", "array" ], - "properties": { - "ap": { - "description": "AbsPath is the absolute path of the frame's file.", - "type": [ - "null", - "string" - ] - }, - "cli": { - "description": "ContextLine is the line from the frame's file.", - "type": [ - "null", - "string" - ] - }, - "cn": { - "description": "Classname of the frame.", - "type": [ - "null", - "string" - ] - }, - "co": { - "description": "ColumnNumber of the frame.", - "type": [ - "null", - "integer" - ] - }, - "f": { - "description": "Filename is the relative name of the frame's file.", - "type": "string" - }, - "fn": { - "description": "Function represented by the frame.", - "type": [ - "null", - "string" - ] - }, - "li": { - "description": "LineNumber of the frame.", - "type": [ - "null", - "integer" - ] - }, - "mo": { - "description": "Module to which the frame belongs to.", - "type": [ - "null", - "string" - ] - }, - "poc": { - "description": "PostContext is a slice of code lines immediately before the line from the frame's file.", - "type": [ - "null", - "array" - ], - "items": { - "type": "string" + "items": { + "type": "object", + "properties": { + "ap": { + "description": "AbsPath is the absolute path of the frame's file.", + "type": [ + "null", + "string" + ] }, - "minItems": 0 - }, - "prc": { - "description": "PreContext is a slice of code lines immediately after the line from the frame's file.", - "type": [ - "null", - "array" - ], - "items": { + "cli": { + "description": "ContextLine is the line from the frame's file.", + "type": [ + "null", + "string" + ] + }, + "cn": { + "description": "Classname of the frame.", + "type": [ + "null", + "string" + ] + }, + "co": { + "description": "ColumnNumber of the frame.", + "type": [ + "null", + "integer" + ] + }, + "f": { + "description": "Filename is the relative name of the frame's file.", "type": "string" }, - "minItems": 0 - } + "fn": { + "description": "Function represented by the frame.", + "type": [ + "null", + "string" + ] + }, + "li": { + "description": "LineNumber of the frame.", + "type": [ + "null", + "integer" + ] + }, + "mo": { + "description": "Module to which the frame belongs to.", + "type": [ + "null", + "string" + ] + }, + "poc": { + "description": "PostContext is a slice of code lines immediately before the line from the frame's file.", + "type": [ + "null", + "array" + ], + "items": { + "type": "string" + }, + "minItems": 0 + }, + "prc": { + "description": "PreContext is a slice of code lines immediately after the line from the frame's file.", + "type": [ + "null", + "array" + ], + "items": { + "type": "string" + }, + "minItems": 0 + } + }, + "required": [ + "f" + ] }, - "required": [ - "f" - ], "minItems": 0 }, "su": { diff --git a/docs/spec/rumv3/transaction.json b/docs/spec/rumv3/transaction.json index cdabc370ecb..e715b7a8eb7 100644 --- a/docs/spec/rumv3/transaction.json +++ b/docs/spec/rumv3/transaction.json @@ -431,138 +431,141 @@ "null", "array" ], - "properties": { - "g": { - "description": "Tags are a flat mapping of user-defined tags. Allowed value types are string, boolean and number values. Tags are indexed and searchable.", - "type": [ - "null", - "object" - ], - "additionalProperties": { + "items": { + "type": "object", + "properties": { + "g": { + "description": "Tags are a flat mapping of user-defined tags. Allowed value types are string, boolean and number values. Tags are indexed and searchable.", "type": [ "null", - "string", - "boolean", - "number" + "object" ], - "maxLength": 1024 - } - }, - "sa": { - "description": "Samples hold application metrics collected from the agent.", - "type": "object", - "properties": { - "xbc": { - "description": "TransactionBreakdownCount The number of transactions for which breakdown metrics (span.self_time) have been created. As the Java agent tracks the breakdown for both sampled and non-sampled transactions, this metric is equivalent to transaction.duration.count", + "additionalProperties": { "type": [ "null", - "object" + "string", + "boolean", + "number" ], - "properties": { - "v": { - "description": "Value holds the value of a single metric sample.", - "type": "number" - } + "maxLength": 1024 + } + }, + "sa": { + "description": "Samples hold application metrics collected from the agent.", + "type": "object", + "properties": { + "xbc": { + "description": "TransactionBreakdownCount The number of transactions for which breakdown metrics (span.self_time) have been created. As the Java agent tracks the breakdown for both sampled and non-sampled transactions, this metric is equivalent to transaction.duration.count", + "type": [ + "null", + "object" + ], + "properties": { + "v": { + "description": "Value holds the value of a single metric sample.", + "type": "number" + } + }, + "required": [ + "v" + ] }, - "required": [ - "v" - ] - }, - "xdc": { - "description": "TransactionDurationCount is the number of transactions since the last report (the delta). The duration of transactions is tracked, which allows for the creation of graphs displaying a weighted average.", - "type": [ - "null", - "object" - ], - "properties": { - "v": { - "description": "Value holds the value of a single metric sample.", - "type": "number" - } + "xdc": { + "description": "TransactionDurationCount is the number of transactions since the last report (the delta). The duration of transactions is tracked, which allows for the creation of graphs displaying a weighted average.", + "type": [ + "null", + "object" + ], + "properties": { + "v": { + "description": "Value holds the value of a single metric sample.", + "type": "number" + } + }, + "required": [ + "v" + ] }, - "required": [ - "v" - ] - }, - "xds": { - "description": "TransactionDurationSum is the sum of all transactions durations in ms since the last report (the delta). The duration of transactions is tracked, which allows for the creation of graphs displaying a weighted average.", - "type": [ - "null", - "object" - ], - "properties": { - "v": { - "description": "Value holds the value of a single metric sample.", - "type": "number" - } + "xds": { + "description": "TransactionDurationSum is the sum of all transactions durations in ms since the last report (the delta). The duration of transactions is tracked, which allows for the creation of graphs displaying a weighted average.", + "type": [ + "null", + "object" + ], + "properties": { + "v": { + "description": "Value holds the value of a single metric sample.", + "type": "number" + } + }, + "required": [ + "v" + ] }, - "required": [ - "v" - ] - }, - "ysc": { - "description": "SpanSelfTimeCount holds the count of the related spans' self_time.", - "type": [ - "null", - "object" - ], - "properties": { - "v": { - "description": "Value holds the value of a single metric sample.", - "type": "number" - } + "ysc": { + "description": "SpanSelfTimeCount holds the count of the related spans' self_time.", + "type": [ + "null", + "object" + ], + "properties": { + "v": { + "description": "Value holds the value of a single metric sample.", + "type": "number" + } + }, + "required": [ + "v" + ] }, - "required": [ - "v" - ] - }, - "yss": { - "description": "SpanSelfTimeSum holds the sum of the related spans' self_time.", - "type": [ - "null", - "object" - ], - "properties": { - "v": { - "description": "Value holds the value of a single metric sample.", - "type": "number" - } + "yss": { + "description": "SpanSelfTimeSum holds the sum of the related spans' self_time.", + "type": [ + "null", + "object" + ], + "properties": { + "v": { + "description": "Value holds the value of a single metric sample.", + "type": "number" + } + }, + "required": [ + "v" + ] + } + } + }, + "y": { + "description": "Span holds selected information about the correlated transaction.", + "type": [ + "null", + "object" + ], + "properties": { + "su": { + "description": "Subtype is a further sub-division of the type (e.g. postgresql, elasticsearch)", + "type": [ + "null", + "string" + ], + "maxLength": 1024 }, - "required": [ - "v" - ] + "t": { + "description": "Type expresses the correlated span's type as keyword that has specific relevance within the service's domain, eg: 'request', 'backgroundjob'.", + "type": [ + "null", + "string" + ], + "maxLength": 1024 + } } } }, - "y": { - "description": "Span holds selected information about the correlated transaction.", - "type": [ - "null", - "object" - ], - "properties": { - "su": { - "description": "Subtype is a further sub-division of the type (e.g. postgresql, elasticsearch)", - "type": [ - "null", - "string" - ], - "maxLength": 1024 - }, - "t": { - "description": "Type expresses the correlated span's type as keyword that has specific relevance within the service's domain, eg: 'request', 'backgroundjob'.", - "type": [ - "null", - "string" - ], - "maxLength": 1024 - } - } - } + "required": [ + "sa" + ] }, - "required": [ - "sa" - ], "minItems": 0 }, "n": { @@ -632,359 +635,365 @@ "null", "array" ], - "properties": { - "ac": { - "description": "Action holds the specific kind of event within the sub-type represented by the span (e.g. query, connect)", - "type": [ - "null", - "string" - ], - "maxLength": 1024 - }, - "c": { - "description": "Context holds arbitrary contextual information for the event.", - "type": [ - "null", - "object" - ], - "properties": { - "dt": { - "description": "Destination contains contextual data about the destination of spans", - "type": [ - "null", - "object" - ], - "properties": { - "ad": { - "description": "Address is the destination network address: hostname (e.g. 'localhost'), FQDN (e.g. 'elastic.co'), IPv4 (e.g. '127.0.0.1') IPv6 (e.g. '::1')", + "items": { + "type": "object", + "properties": { + "ac": { + "description": "Action holds the specific kind of event within the sub-type represented by the span (e.g. query, connect)", + "type": [ + "null", + "string" + ], + "maxLength": 1024 + }, + "c": { + "description": "Context holds arbitrary contextual information for the event.", + "type": [ + "null", + "object" + ], + "properties": { + "dt": { + "description": "Destination contains contextual data about the destination of spans", + "type": [ + "null", + "object" + ], + "properties": { + "ad": { + "description": "Address is the destination network address: hostname (e.g. 'localhost'), FQDN (e.g. 'elastic.co'), IPv4 (e.g. '127.0.0.1') IPv6 (e.g. '::1')", + "type": [ + "null", + "string" + ], + "maxLength": 1024 + }, + "po": { + "description": "Port is the destination network port (e.g. 443)", + "type": [ + "null", + "integer" + ] + }, + "se": { + "description": "Service describes the destination service", + "type": [ + "null", + "object" + ], + "properties": { + "n": { + "description": "Name is the identifier for the destination service, e.g. 'http://elastic.co', 'elasticsearch', 'rabbitmq'", + "type": "string", + "maxLength": 1024 + }, + "rc": { + "description": "Resource identifies the destination service resource being operated on e.g. 'http://elastic.co:80', 'elasticsearch', 'rabbitmq/queue_name'", + "type": "string", + "maxLength": 1024 + }, + "t": { + "description": "Type of the destination service, e.g. db, elasticsearch. Should typically be the same as span.type.", + "type": "string", + "maxLength": 1024 + } + }, + "required": [ + "n", + "rc", + "t" + ] + } + } + }, + "g": { + "description": "Tags are a flat mapping of user-defined tags. Allowed value types are string, boolean and number values. Tags are indexed and searchable.", + "type": [ + "null", + "object" + ], + "additionalProperties": { "type": [ "null", - "string" + "string", + "boolean", + "number" ], "maxLength": 1024 - }, - "po": { - "description": "Port is the destination network port (e.g. 443)", - "type": [ - "null", - "integer" - ] - }, - "se": { - "description": "Service describes the destination service", - "type": [ - "null", - "object" - ], - "properties": { - "n": { - "description": "Name is the identifier for the destination service, e.g. 'http://elastic.co', 'elasticsearch', 'rabbitmq'", - "type": "string", - "maxLength": 1024 - }, - "rc": { - "description": "Resource identifies the destination service resource being operated on e.g. 'http://elastic.co:80', 'elasticsearch', 'rabbitmq/queue_name'", - "type": "string", - "maxLength": 1024 - }, - "t": { - "description": "Type of the destination service, e.g. db, elasticsearch. Should typically be the same as span.type.", - "type": "string", - "maxLength": 1024 + } + }, + "h": { + "description": "HTTP contains contextual information when the span concerns an HTTP request.", + "type": [ + "null", + "object" + ], + "properties": { + "mt": { + "description": "Method holds information about the method of the HTTP request.", + "type": [ + "null", + "string" + ], + "maxLength": 1024 + }, + "r": { + "description": "Response describes the HTTP response information in case the event was created as a result of an HTTP request.", + "type": [ + "null", + "object" + ], + "properties": { + "dbs": { + "description": "DecodedBodySize holds the size of the decoded payload.", + "type": [ + "null", + "number" + ] + }, + "ebs": { + "description": "EncodedBodySize holds the size of the encoded payload.", + "type": [ + "null", + "number" + ] + }, + "ts": { + "description": "TransferSize holds the total size of the payload.", + "type": [ + "null", + "number" + ] + } } }, - "required": [ - "n", - "rc", - "t" - ] + "sc": { + "description": "Deprecated: Use Response.StatusCode instead. StatusCode sent in the http response.", + "type": [ + "null", + "integer" + ] + }, + "url": { + "description": "URL is the raw url of the correlating HTTP request.", + "type": [ + "null", + "string" + ] + } } - } - }, - "g": { - "description": "Tags are a flat mapping of user-defined tags. Allowed value types are string, boolean and number values. Tags are indexed and searchable.", - "type": [ - "null", - "object" - ], - "additionalProperties": { + }, + "se": { + "description": "Service related information can be sent per span. Information provided here will override the more generic information retrieved from metadata, missing service fields will be retrieved from the metadata information.", "type": [ "null", - "string", - "boolean", - "number" + "object" ], - "maxLength": 1024 + "properties": { + "a": { + "description": "Agent holds information about the APM agent capturing the event.", + "type": [ + "null", + "object" + ], + "properties": { + "n": { + "description": "Name of the APM agent capturing information.", + "type": [ + "null", + "string" + ], + "maxLength": 1024 + }, + "ve": { + "description": "Version of the APM agent capturing information.", + "type": [ + "null", + "string" + ], + "maxLength": 1024 + } + } + }, + "n": { + "description": "Name of the monitored service.", + "type": [ + "null", + "string" + ], + "maxLength": 1024, + "pattern": "^[a-zA-Z0-9 _-]+$" + } + } } - }, - "h": { - "description": "HTTP contains contextual information when the span concerns an HTTP request.", - "type": [ - "null", - "object" - ], + } + }, + "d": { + "description": "Duration of the span in milliseconds", + "type": "number", + "minimum": 0 + }, + "id": { + "description": "ID holds the hex encoded 64 random bits ID of the event.", + "type": "string", + "maxLength": 1024 + }, + "n": { + "description": "Name is the generic designation of a span in the scope of a transaction.", + "type": "string", + "maxLength": 1024 + }, + "o": { + "description": "Outcome of the span: success, failure, or unknown. Outcome may be one of a limited set of permitted values describing the success or failure of the span. It can be used for calculating error rates for outgoing requests.", + "type": [ + "null", + "string" + ], + "enum": [ + "success", + "failure", + "unknown", + null + ] + }, + "pi": { + "description": "ParentIndex is the index of the parent span in the list. Absent when the parent is a transaction.", + "type": [ + "null", + "integer" + ] + }, + "s": { + "description": "Start is the offset relative to the transaction's timestamp identifying the start of the span, in milliseconds.", + "type": "number" + }, + "sr": { + "description": "SampleRate applied to the monitored service at the time where this span was recorded.", + "type": [ + "null", + "number" + ] + }, + "st": { + "description": "Stacktrace connected to this span event.", + "type": [ + "null", + "array" + ], + "items": { + "type": "object", "properties": { - "mt": { - "description": "Method holds information about the method of the HTTP request.", + "ap": { + "description": "AbsPath is the absolute path of the frame's file.", "type": [ "null", "string" - ], - "maxLength": 1024 + ] }, - "r": { - "description": "Response describes the HTTP response information in case the event was created as a result of an HTTP request.", + "cli": { + "description": "ContextLine is the line from the frame's file.", "type": [ "null", - "object" - ], - "properties": { - "dbs": { - "description": "DecodedBodySize holds the size of the decoded payload.", - "type": [ - "null", - "number" - ] - }, - "ebs": { - "description": "EncodedBodySize holds the size of the encoded payload.", - "type": [ - "null", - "number" - ] - }, - "ts": { - "description": "TransferSize holds the total size of the payload.", - "type": [ - "null", - "number" - ] - } - } + "string" + ] + }, + "cn": { + "description": "Classname of the frame.", + "type": [ + "null", + "string" + ] }, - "sc": { - "description": "Deprecated: Use Response.StatusCode instead. StatusCode sent in the http response.", + "co": { + "description": "ColumnNumber of the frame.", "type": [ "null", "integer" ] }, - "url": { - "description": "URL is the raw url of the correlating HTTP request.", + "f": { + "description": "Filename is the relative name of the frame's file.", + "type": "string" + }, + "fn": { + "description": "Function represented by the frame.", "type": [ "null", "string" ] - } - } - }, - "se": { - "description": "Service related information can be sent per span. Information provided here will override the more generic information retrieved from metadata, missing service fields will be retrieved from the metadata information.", - "type": [ - "null", - "object" - ], - "properties": { - "a": { - "description": "Agent holds information about the APM agent capturing the event.", + }, + "li": { + "description": "LineNumber of the frame.", "type": [ "null", - "object" - ], - "properties": { - "n": { - "description": "Name of the APM agent capturing information.", - "type": [ - "null", - "string" - ], - "maxLength": 1024 - }, - "ve": { - "description": "Version of the APM agent capturing information.", - "type": [ - "null", - "string" - ], - "maxLength": 1024 - } - } + "integer" + ] }, - "n": { - "description": "Name of the monitored service.", + "mo": { + "description": "Module to which the frame belongs to.", "type": [ "null", "string" + ] + }, + "poc": { + "description": "PostContext is a slice of code lines immediately before the line from the frame's file.", + "type": [ + "null", + "array" + ], + "items": { + "type": "string" + }, + "minItems": 0 + }, + "prc": { + "description": "PreContext is a slice of code lines immediately after the line from the frame's file.", + "type": [ + "null", + "array" ], - "maxLength": 1024, - "pattern": "^[a-zA-Z0-9 _-]+$" + "items": { + "type": "string" + }, + "minItems": 0 } - } - } - } - }, - "d": { - "description": "Duration of the span in milliseconds", - "type": "number", - "minimum": 0 - }, - "id": { - "description": "ID holds the hex encoded 64 random bits ID of the event.", - "type": "string", - "maxLength": 1024 - }, - "n": { - "description": "Name is the generic designation of a span in the scope of a transaction.", - "type": "string", - "maxLength": 1024 - }, - "o": { - "description": "Outcome of the span: success, failure, or unknown. Outcome may be one of a limited set of permitted values describing the success or failure of the span. It can be used for calculating error rates for outgoing requests.", - "type": [ - "null", - "string" - ], - "enum": [ - "success", - "failure", - "unknown", - null - ] - }, - "pi": { - "description": "ParentIndex is the index of the parent span in the list. Absent when the parent is a transaction.", - "type": [ - "null", - "integer" - ] - }, - "s": { - "description": "Start is the offset relative to the transaction's timestamp identifying the start of the span, in milliseconds.", - "type": "number" - }, - "sr": { - "description": "SampleRate applied to the monitored service at the time where this span was recorded.", - "type": [ - "null", - "number" - ] - }, - "st": { - "description": "Stacktrace connected to this span event.", - "type": [ - "null", - "array" - ], - "properties": { - "ap": { - "description": "AbsPath is the absolute path of the frame's file.", - "type": [ - "null", - "string" - ] - }, - "cli": { - "description": "ContextLine is the line from the frame's file.", - "type": [ - "null", - "string" - ] - }, - "cn": { - "description": "Classname of the frame.", - "type": [ - "null", - "string" - ] - }, - "co": { - "description": "ColumnNumber of the frame.", - "type": [ - "null", - "integer" - ] - }, - "f": { - "description": "Filename is the relative name of the frame's file.", - "type": "string" - }, - "fn": { - "description": "Function represented by the frame.", - "type": [ - "null", - "string" - ] - }, - "li": { - "description": "LineNumber of the frame.", - "type": [ - "null", - "integer" - ] - }, - "mo": { - "description": "Module to which the frame belongs to.", - "type": [ - "null", - "string" - ] - }, - "poc": { - "description": "PostContext is a slice of code lines immediately before the line from the frame's file.", - "type": [ - "null", - "array" - ], - "items": { - "type": "string" }, - "minItems": 0 + "required": [ + "f" + ] }, - "prc": { - "description": "PreContext is a slice of code lines immediately after the line from the frame's file.", - "type": [ - "null", - "array" - ], - "items": { - "type": "string" - }, - "minItems": 0 - } + "minItems": 0 }, - "required": [ - "f" - ], - "minItems": 0 - }, - "su": { - "description": "Subtype is a further sub-division of the type (e.g. postgresql, elasticsearch)", - "type": [ - "null", - "string" - ], - "maxLength": 1024 - }, - "sy": { - "description": "Sync indicates whether the span was executed synchronously or asynchronously.", - "type": [ - "null", - "boolean" - ] + "su": { + "description": "Subtype is a further sub-division of the type (e.g. postgresql, elasticsearch)", + "type": [ + "null", + "string" + ], + "maxLength": 1024 + }, + "sy": { + "description": "Sync indicates whether the span was executed synchronously or asynchronously.", + "type": [ + "null", + "boolean" + ] + }, + "t": { + "description": "Type holds the span's type, and can have specific keywords within the service's domain (eg: 'request', 'backgroundjob', etc)", + "type": "string", + "maxLength": 1024 + } }, - "t": { - "description": "Type holds the span's type, and can have specific keywords within the service's domain (eg: 'request', 'backgroundjob', etc)", - "type": "string", - "maxLength": 1024 - } + "required": [ + "d", + "id", + "n", + "s", + "t" + ] }, - "required": [ - "d", - "id", - "n", - "s", - "t" - ], "minItems": 0 }, "yc": { diff --git a/docs/spec/v2/error.json b/docs/spec/v2/error.json index e02f2fbb41c..bfb24161f84 100644 --- a/docs/spec/v2/error.json +++ b/docs/spec/v2/error.json @@ -587,10 +587,7 @@ "array" ], "items": { - "type": [ - "null", - "object" - ] + "type": "object" }, "minItems": 0 }, @@ -631,123 +628,126 @@ "null", "array" ], - "properties": { - "abs_path": { - "description": "AbsPath is the absolute path of the frame's file.", - "type": [ - "null", - "string" - ] - }, - "classname": { - "description": "Classname of the frame.", - "type": [ - "null", - "string" - ] - }, - "colno": { - "description": "ColumnNumber of the frame.", - "type": [ - "null", - "integer" - ] - }, - "context_line": { - "description": "ContextLine is the line from the frame's file.", - "type": [ - "null", - "string" - ] - }, - "filename": { - "description": "Filename is the relative name of the frame's file.", - "type": [ - "null", - "string" - ] - }, - "function": { - "description": "Function represented by the frame.", - "type": [ - "null", - "string" - ] - }, - "library_frame": { - "description": "LibraryFrame indicates whether the frame is from a third party library.", - "type": [ - "null", - "boolean" - ] - }, - "lineno": { - "description": "LineNumber of the frame.", - "type": [ - "null", - "integer" - ] - }, - "module": { - "description": "Module to which the frame belongs to.", - "type": [ - "null", - "string" - ] - }, - "post_context": { - "description": "PostContext is a slice of code lines immediately before the line from the frame's file.", - "type": [ - "null", - "array" - ], - "items": { - "type": "string" + "items": { + "type": "object", + "properties": { + "abs_path": { + "description": "AbsPath is the absolute path of the frame's file.", + "type": [ + "null", + "string" + ] }, - "minItems": 0 - }, - "pre_context": { - "description": "PreContext is a slice of code lines immediately after the line from the frame's file.", - "type": [ - "null", - "array" - ], - "items": { - "type": "string" + "classname": { + "description": "Classname of the frame.", + "type": [ + "null", + "string" + ] }, - "minItems": 0 - }, - "vars": { - "description": "Vars is a flat mapping of local variables of the frame.", - "type": [ - "null", - "object" - ] - } - }, - "minItems": 0, - "anyOf": [ - { - "properties": { - "classname": { + "colno": { + "description": "ColumnNumber of the frame.", + "type": [ + "null", + "integer" + ] + }, + "context_line": { + "description": "ContextLine is the line from the frame's file.", + "type": [ + "null", + "string" + ] + }, + "filename": { + "description": "Filename is the relative name of the frame's file.", + "type": [ + "null", + "string" + ] + }, + "function": { + "description": "Function represented by the frame.", + "type": [ + "null", + "string" + ] + }, + "library_frame": { + "description": "LibraryFrame indicates whether the frame is from a third party library.", + "type": [ + "null", + "boolean" + ] + }, + "lineno": { + "description": "LineNumber of the frame.", + "type": [ + "null", + "integer" + ] + }, + "module": { + "description": "Module to which the frame belongs to.", + "type": [ + "null", + "string" + ] + }, + "post_context": { + "description": "PostContext is a slice of code lines immediately before the line from the frame's file.", + "type": [ + "null", + "array" + ], + "items": { "type": "string" - } + }, + "minItems": 0 }, - "required": [ - "classname" - ] - }, - { - "properties": { - "filename": { + "pre_context": { + "description": "PreContext is a slice of code lines immediately after the line from the frame's file.", + "type": [ + "null", + "array" + ], + "items": { "type": "string" - } + }, + "minItems": 0 }, - "required": [ - "filename" - ] - } - ] + "vars": { + "description": "Vars is a flat mapping of local variables of the frame.", + "type": [ + "null", + "object" + ] + } + }, + "anyOf": [ + { + "properties": { + "classname": { + "type": "string" + } + }, + "required": [ + "classname" + ] + }, + { + "properties": { + "filename": { + "type": "string" + } + }, + "required": [ + "filename" + ] + } + ] + }, + "minItems": 0 }, "type": { "description": "Type of the exception.", @@ -827,123 +827,126 @@ "null", "array" ], - "properties": { - "abs_path": { - "description": "AbsPath is the absolute path of the frame's file.", - "type": [ - "null", - "string" - ] - }, - "classname": { - "description": "Classname of the frame.", - "type": [ - "null", - "string" - ] - }, - "colno": { - "description": "ColumnNumber of the frame.", - "type": [ - "null", - "integer" - ] - }, - "context_line": { - "description": "ContextLine is the line from the frame's file.", - "type": [ - "null", - "string" - ] - }, - "filename": { - "description": "Filename is the relative name of the frame's file.", - "type": [ - "null", - "string" - ] - }, - "function": { - "description": "Function represented by the frame.", - "type": [ - "null", - "string" - ] - }, - "library_frame": { - "description": "LibraryFrame indicates whether the frame is from a third party library.", - "type": [ - "null", - "boolean" - ] - }, - "lineno": { - "description": "LineNumber of the frame.", - "type": [ - "null", - "integer" - ] - }, - "module": { - "description": "Module to which the frame belongs to.", - "type": [ - "null", - "string" - ] - }, - "post_context": { - "description": "PostContext is a slice of code lines immediately before the line from the frame's file.", - "type": [ - "null", - "array" - ], - "items": { - "type": "string" + "items": { + "type": "object", + "properties": { + "abs_path": { + "description": "AbsPath is the absolute path of the frame's file.", + "type": [ + "null", + "string" + ] }, - "minItems": 0 - }, - "pre_context": { - "description": "PreContext is a slice of code lines immediately after the line from the frame's file.", - "type": [ - "null", - "array" - ], - "items": { - "type": "string" + "classname": { + "description": "Classname of the frame.", + "type": [ + "null", + "string" + ] }, - "minItems": 0 - }, - "vars": { - "description": "Vars is a flat mapping of local variables of the frame.", - "type": [ - "null", - "object" - ] - } - }, - "minItems": 0, - "anyOf": [ - { - "properties": { - "classname": { + "colno": { + "description": "ColumnNumber of the frame.", + "type": [ + "null", + "integer" + ] + }, + "context_line": { + "description": "ContextLine is the line from the frame's file.", + "type": [ + "null", + "string" + ] + }, + "filename": { + "description": "Filename is the relative name of the frame's file.", + "type": [ + "null", + "string" + ] + }, + "function": { + "description": "Function represented by the frame.", + "type": [ + "null", + "string" + ] + }, + "library_frame": { + "description": "LibraryFrame indicates whether the frame is from a third party library.", + "type": [ + "null", + "boolean" + ] + }, + "lineno": { + "description": "LineNumber of the frame.", + "type": [ + "null", + "integer" + ] + }, + "module": { + "description": "Module to which the frame belongs to.", + "type": [ + "null", + "string" + ] + }, + "post_context": { + "description": "PostContext is a slice of code lines immediately before the line from the frame's file.", + "type": [ + "null", + "array" + ], + "items": { "type": "string" - } + }, + "minItems": 0 }, - "required": [ - "classname" - ] - }, - { - "properties": { - "filename": { + "pre_context": { + "description": "PreContext is a slice of code lines immediately after the line from the frame's file.", + "type": [ + "null", + "array" + ], + "items": { "type": "string" - } + }, + "minItems": 0 }, - "required": [ - "filename" - ] - } - ] + "vars": { + "description": "Vars is a flat mapping of local variables of the frame.", + "type": [ + "null", + "object" + ] + } + }, + "anyOf": [ + { + "properties": { + "classname": { + "type": "string" + } + }, + "required": [ + "classname" + ] + }, + { + "properties": { + "filename": { + "type": "string" + } + }, + "required": [ + "filename" + ] + } + ] + }, + "minItems": 0 } }, "required": [ diff --git a/docs/spec/v2/span.json b/docs/spec/v2/span.json index 77c6b57d6a6..24a1e5c4dbd 100644 --- a/docs/spec/v2/span.json +++ b/docs/spec/v2/span.json @@ -517,123 +517,126 @@ "null", "array" ], - "properties": { - "abs_path": { - "description": "AbsPath is the absolute path of the frame's file.", - "type": [ - "null", - "string" - ] - }, - "classname": { - "description": "Classname of the frame.", - "type": [ - "null", - "string" - ] - }, - "colno": { - "description": "ColumnNumber of the frame.", - "type": [ - "null", - "integer" - ] - }, - "context_line": { - "description": "ContextLine is the line from the frame's file.", - "type": [ - "null", - "string" - ] - }, - "filename": { - "description": "Filename is the relative name of the frame's file.", - "type": [ - "null", - "string" - ] - }, - "function": { - "description": "Function represented by the frame.", - "type": [ - "null", - "string" - ] - }, - "library_frame": { - "description": "LibraryFrame indicates whether the frame is from a third party library.", - "type": [ - "null", - "boolean" - ] - }, - "lineno": { - "description": "LineNumber of the frame.", - "type": [ - "null", - "integer" - ] - }, - "module": { - "description": "Module to which the frame belongs to.", - "type": [ - "null", - "string" - ] - }, - "post_context": { - "description": "PostContext is a slice of code lines immediately before the line from the frame's file.", - "type": [ - "null", - "array" - ], - "items": { - "type": "string" + "items": { + "type": "object", + "properties": { + "abs_path": { + "description": "AbsPath is the absolute path of the frame's file.", + "type": [ + "null", + "string" + ] }, - "minItems": 0 - }, - "pre_context": { - "description": "PreContext is a slice of code lines immediately after the line from the frame's file.", - "type": [ - "null", - "array" - ], - "items": { - "type": "string" + "classname": { + "description": "Classname of the frame.", + "type": [ + "null", + "string" + ] }, - "minItems": 0 - }, - "vars": { - "description": "Vars is a flat mapping of local variables of the frame.", - "type": [ - "null", - "object" - ] - } - }, - "minItems": 0, - "anyOf": [ - { - "properties": { - "classname": { + "colno": { + "description": "ColumnNumber of the frame.", + "type": [ + "null", + "integer" + ] + }, + "context_line": { + "description": "ContextLine is the line from the frame's file.", + "type": [ + "null", + "string" + ] + }, + "filename": { + "description": "Filename is the relative name of the frame's file.", + "type": [ + "null", + "string" + ] + }, + "function": { + "description": "Function represented by the frame.", + "type": [ + "null", + "string" + ] + }, + "library_frame": { + "description": "LibraryFrame indicates whether the frame is from a third party library.", + "type": [ + "null", + "boolean" + ] + }, + "lineno": { + "description": "LineNumber of the frame.", + "type": [ + "null", + "integer" + ] + }, + "module": { + "description": "Module to which the frame belongs to.", + "type": [ + "null", + "string" + ] + }, + "post_context": { + "description": "PostContext is a slice of code lines immediately before the line from the frame's file.", + "type": [ + "null", + "array" + ], + "items": { "type": "string" - } + }, + "minItems": 0 }, - "required": [ - "classname" - ] - }, - { - "properties": { - "filename": { + "pre_context": { + "description": "PreContext is a slice of code lines immediately after the line from the frame's file.", + "type": [ + "null", + "array" + ], + "items": { "type": "string" - } + }, + "minItems": 0 }, - "required": [ - "filename" - ] - } - ] + "vars": { + "description": "Vars is a flat mapping of local variables of the frame.", + "type": [ + "null", + "object" + ] + } + }, + "anyOf": [ + { + "properties": { + "classname": { + "type": "string" + } + }, + "required": [ + "classname" + ] + }, + { + "properties": { + "filename": { + "type": "string" + } + }, + "required": [ + "filename" + ] + } + ] + }, + "minItems": 0 }, "start": { "description": "Start is the offset relative to the transaction's timestamp identifying the start of the span, in milliseconds.", diff --git a/model/error.go b/model/error.go index 11cffa61551..3c10dfda6b8 100644 --- a/model/error.go +++ b/model/error.go @@ -48,6 +48,7 @@ var ( const ( errorProcessorName = "error" errorDocType = "error" + ErrorsDataset = "apm.error" ) type Error struct { @@ -119,9 +120,8 @@ func (e *Error) Transform(ctx context.Context, cfg *transform.Config) []beat.Eve // Errors are stored in an APM errors-specific "logs" data stream, per service. // By storing errors in a "logs" data stream, they can be viewed in the Logs app // in Kibana. - dataset := fmt.Sprintf("apm.error.%s", datastreams.NormalizeServiceName(e.Metadata.Service.Name)) fields[datastreams.TypeField] = datastreams.LogsType - fields[datastreams.DatasetField] = dataset + fields[datastreams.DatasetField] = ErrorsDataset } // first set the generic metadata (order is relevant) diff --git a/model/error_test.go b/model/error_test.go index cf99375dbf0..3b9fa17cb8b 100644 --- a/model/error_test.go +++ b/model/error_test.go @@ -308,7 +308,7 @@ func TestEvents(t *testing.T) { Transformable: &Error{Timestamp: timestamp, Metadata: md}, Output: common.MapStr{ "data_stream.type": "logs", - "data_stream.dataset": "apm.error.myservice", + "data_stream.dataset": "apm.error", "agent": common.MapStr{"name": "go", "version": "1.0"}, "service": common.MapStr{"name": "myservice", "version": "1.0"}, "error": common.MapStr{ @@ -323,7 +323,7 @@ func TestEvents(t *testing.T) { Transformable: &Error{Timestamp: timestamp, Metadata: md, TransactionSampled: &sampledFalse}, Output: common.MapStr{ "data_stream.type": "logs", - "data_stream.dataset": "apm.error.myservice", + "data_stream.dataset": "apm.error", "transaction": common.MapStr{"sampled": false}, "agent": common.MapStr{"name": "go", "version": "1.0"}, "service": common.MapStr{"name": "myservice", "version": "1.0"}, @@ -339,7 +339,7 @@ func TestEvents(t *testing.T) { Transformable: &Error{Timestamp: timestamp, Metadata: md, TransactionType: &transactionType}, Output: common.MapStr{ "data_stream.type": "logs", - "data_stream.dataset": "apm.error.myservice", + "data_stream.dataset": "apm.error", "transaction": common.MapStr{"type": "request"}, "error": common.MapStr{ "grouping_key": "d41d8cd98f00b204e9800998ecf8427e", @@ -370,7 +370,7 @@ func TestEvents(t *testing.T) { Output: common.MapStr{ "data_stream.type": "logs", - "data_stream.dataset": "apm.error.myservice", + "data_stream.dataset": "apm.error", "labels": common.MapStr{"key": true, "label": 101}, "service": common.MapStr{"name": "myservice", "version": "1.0"}, "agent": common.MapStr{"name": "go", "version": "1.0"}, diff --git a/model/metricset.go b/model/metricset.go index 00443bccfdc..2eaa773f33d 100644 --- a/model/metricset.go +++ b/model/metricset.go @@ -38,6 +38,8 @@ const ( metricsetEventKey = "event" metricsetTransactionKey = "transaction" metricsetSpanKey = "span" + AppMetricsDataset = "apm" + InternalMetricsDataset = "apm.internal" ) var ( @@ -186,16 +188,15 @@ func (me *Metricset) Transform(ctx context.Context, cfg *transform.Config) []bea if cfg.DataStreams { // Metrics are stored in "metrics" data streams. - dataset := "apm." if isInternal { // Metrics that include well-defined transaction/span fields // (i.e. breakdown metrics, transaction and span metrics) will // be stored separately from application and runtime metrics. - dataset = "apm.internal." + fields[datastreams.DatasetField] = InternalMetricsDataset + } else { + fields[datastreams.DatasetField] = AppMetricsDataset } - dataset += datastreams.NormalizeServiceName(me.Metadata.Service.Name) fields[datastreams.TypeField] = datastreams.MetricsType - fields[datastreams.DatasetField] = dataset } return []beat.Event{{ diff --git a/model/metricset_test.go b/model/metricset_test.go index 3a95eb46a4a..32e7ce7bba1 100644 --- a/model/metricset_test.go +++ b/model/metricset_test.go @@ -63,7 +63,7 @@ func TestTransform(t *testing.T) { Output: []common.MapStr{ { "data_stream.type": "metrics", - "data_stream.dataset": "apm.myservice", + "data_stream.dataset": "apm", "processor": common.MapStr{"event": "metric", "name": "metric"}, "service": common.MapStr{ "name": "myservice", @@ -91,7 +91,7 @@ func TestTransform(t *testing.T) { Output: []common.MapStr{ { "data_stream.type": "metrics", - "data_stream.dataset": "apm.myservice", + "data_stream.dataset": "apm", "processor": common.MapStr{"event": "metric", "name": "metric"}, "service": common.MapStr{"name": "myservice"}, "labels": common.MapStr{"a_b": "a.b.value"}, @@ -116,7 +116,7 @@ func TestTransform(t *testing.T) { Output: []common.MapStr{ { "data_stream.type": "metrics", - "data_stream.dataset": "apm.internal.myservice", + "data_stream.dataset": "apm.internal", "processor": common.MapStr{"event": "metric", "name": "metric"}, "service": common.MapStr{"name": "myservice"}, "transaction": common.MapStr{"type": trType, "name": trName}, @@ -154,7 +154,7 @@ func TestTransform(t *testing.T) { Output: []common.MapStr{ { "data_stream.type": "metrics", - "data_stream.dataset": "apm.internal.myservice", + "data_stream.dataset": "apm.internal", "processor": common.MapStr{"event": "metric", "name": "metric"}, "service": common.MapStr{"name": "myservice"}, "event": common.MapStr{"outcome": eventOutcome}, @@ -196,7 +196,7 @@ func TestTransform(t *testing.T) { Output: []common.MapStr{ { "data_stream.type": "metrics", - "data_stream.dataset": "apm.internal.myservice", + "data_stream.dataset": "apm.internal", "processor": common.MapStr{"event": "metric", "name": "metric"}, "service": common.MapStr{"name": "myservice"}, "span": common.MapStr{"type": spType, "subtype": spSubtype, diff --git a/model/modeldecoder/generator/jsonschema.go b/model/modeldecoder/generator/jsonschema.go index 65e25ea8aa2..a1aca2619f2 100644 --- a/model/modeldecoder/generator/jsonschema.go +++ b/model/modeldecoder/generator/jsonschema.go @@ -136,12 +136,15 @@ func (g *JSONSchemaGenerator) generate(st structType, key string, prop *property if !ok { break } + childProp.Items = &property{ + Type: &propertyType{names: []propertyTypeName{TypeNameObject}, required: true}, + Properties: make(map[string]*property), + } if child.name == st.name { - // if recursive reference to struct itself, set object type and do not call generate function - childProp.Items = &property{Type: &propertyType{names: []propertyTypeName{TypeNameObject}}} + // if recursive reference to struct itself do not call generate function break } - err = g.generate(child, flattenedName, &childProp) + err = g.generate(child, flattenedName, childProp.Items) case *types.Struct: if err = generateJSONPropertyStruct(&info, prop, &childProp); err != nil { break diff --git a/model/profile.go b/model/profile.go index 4c6c3a9f494..1c3be53bfa2 100644 --- a/model/profile.go +++ b/model/profile.go @@ -36,6 +36,7 @@ import ( const ( profileProcessorName = "profile" profileDocType = "profile" + ProfilesDataset = "apm.profiling" ) var profileProcessorEntry = common.MapStr{ @@ -70,10 +71,6 @@ func (pp PprofProfile) Transform(ctx context.Context, cfg *transform.Config) []b // Profiles are stored in their own "metrics" data stream, with a data // set per service. This enables managing retention of profiling data // per-service, and indepedently of lower volume metrics. - var dataset string - if cfg.DataStreams { - dataset = fmt.Sprintf("apm.profiling.%s", datastreams.NormalizeServiceName(pp.Metadata.Service.Name)) - } samples := make([]beat.Event, len(pp.Profile.Sample)) for i, sample := range pp.Profile.Sample { @@ -131,7 +128,7 @@ func (pp PprofProfile) Transform(ctx context.Context, cfg *transform.Config) []b } if cfg.DataStreams { event.Fields[datastreams.TypeField] = datastreams.MetricsType - event.Fields[datastreams.DatasetField] = dataset + event.Fields[datastreams.DatasetField] = ProfilesDataset } var profileLabels common.MapStr if len(sample.Label) > 0 { diff --git a/model/profile_test.go b/model/profile_test.go index f5c1d520fae..f4a0b038330 100644 --- a/model/profile_test.go +++ b/model/profile_test.go @@ -98,7 +98,7 @@ func TestPprofProfileTransform(t *testing.T) { Timestamp: timestamp, Fields: common.MapStr{ "data_stream.type": "metrics", - "data_stream.dataset": "apm.profiling.myservice", + "data_stream.dataset": "apm.profiling", "processor": common.MapStr{"event": "profile", "name": "profile"}, "service": common.MapStr{ "name": "myService", diff --git a/model/span.go b/model/span.go index 024e7e2f256..2e3c0377f00 100644 --- a/model/span.go +++ b/model/span.go @@ -19,7 +19,6 @@ package model import ( "context" - "fmt" "net" "time" @@ -198,9 +197,8 @@ func (e *Span) Transform(ctx context.Context, cfg *transform.Config) []beat.Even if cfg.DataStreams { // Spans are stored in a "traces" data stream along with transactions. - dataset := fmt.Sprintf("apm.%s", datastreams.NormalizeServiceName(e.Metadata.Service.Name)) fields[datastreams.TypeField] = datastreams.TracesType - fields[datastreams.DatasetField] = dataset + fields[datastreams.DatasetField] = TracesDataset } // first set the generic metadata diff --git a/model/span_test.go b/model/span_test.go index 64789fac59c..6e6ef285cf1 100644 --- a/model/span_test.go +++ b/model/span_test.go @@ -59,7 +59,7 @@ func TestSpanTransform(t *testing.T) { Span: Span{Timestamp: timestamp, Metadata: metadata}, Output: common.MapStr{ "data_stream.type": "traces", - "data_stream.dataset": "apm.myservice", + "data_stream.dataset": "apm", "processor": common.MapStr{"event": "span", "name": "transaction"}, "service": common.MapStr{"name": serviceName, "environment": env, "version": serviceVersion}, "span": common.MapStr{ @@ -77,7 +77,7 @@ func TestSpanTransform(t *testing.T) { Span: Span{Timestamp: timestamp, Metadata: metadata, Outcome: "success"}, Output: common.MapStr{ "data_stream.type": "traces", - "data_stream.dataset": "apm.myservice", + "data_stream.dataset": "apm", "processor": common.MapStr{"event": "span", "name": "transaction"}, "service": common.MapStr{"name": serviceName, "environment": env, "version": serviceVersion}, "span": common.MapStr{ @@ -126,7 +126,7 @@ func TestSpanTransform(t *testing.T) { }, Output: common.MapStr{ "data_stream.type": "traces", - "data_stream.dataset": "apm.myservice", + "data_stream.dataset": "apm", "span": common.MapStr{ "id": hexID, "duration": common.MapStr{"us": 1200}, diff --git a/model/transaction.go b/model/transaction.go index 0c46e2c128c..182e9a8dbe3 100644 --- a/model/transaction.go +++ b/model/transaction.go @@ -19,7 +19,6 @@ package model import ( "context" - "fmt" "time" "github.com/elastic/beats/v7/libbeat/beat" @@ -34,6 +33,7 @@ import ( const ( transactionProcessorName = "transaction" transactionDocType = "transaction" + TracesDataset = "apm" ) var ( @@ -120,9 +120,8 @@ func (e *Transaction) Transform(_ context.Context, cfg *transform.Config) []beat if cfg.DataStreams { // Transactions are stored in a "traces" data stream along with spans. - dataset := fmt.Sprintf("apm.%s", datastreams.NormalizeServiceName(e.Metadata.Service.Name)) fields[datastreams.TypeField] = datastreams.TracesType - fields[datastreams.DatasetField] = dataset + fields[datastreams.DatasetField] = TracesDataset } // first set generic metadata (order is relevant) diff --git a/model/transaction_test.go b/model/transaction_test.go index cef19b0a8ec..563ca2b15f1 100644 --- a/model/transaction_test.go +++ b/model/transaction_test.go @@ -182,7 +182,7 @@ func TestEventsTransformWithMetadata(t *testing.T) { require.Len(t, events, 1) assert.Equal(t, events[0].Fields, common.MapStr{ "data_stream.type": "traces", - "data_stream.dataset": "apm." + serviceName, + "data_stream.dataset": "apm", "user": common.MapStr{"id": "123", "name": "jane"}, "client": common.MapStr{"ip": ip}, "source": common.MapStr{"ip": ip}, diff --git a/processor/otel/test_approved/consume_span.approved.json b/processor/otel/test_approved/consume_span.approved.json index 38d72b66e0b..eea09e4f70d 100644 --- a/processor/otel/test_approved/consume_span.approved.json +++ b/processor/otel/test_approved/consume_span.approved.json @@ -6,7 +6,7 @@ "name": "Jaeger", "version": "unknown" }, - "data_stream.dataset": "apm.unknown", + "data_stream.dataset": "apm", "data_stream.type": "traces", "event": { "outcome": "success" @@ -40,7 +40,7 @@ "name": "Jaeger", "version": "unknown" }, - "data_stream.dataset": "apm.unknown", + "data_stream.dataset": "apm", "data_stream.type": "traces", "event": { "outcome": "unknown" diff --git a/processor/otel/test_approved/jaeger_sampling_rate.approved.json b/processor/otel/test_approved/jaeger_sampling_rate.approved.json index cb858f95734..95ffa7ac15c 100644 --- a/processor/otel/test_approved/jaeger_sampling_rate.approved.json +++ b/processor/otel/test_approved/jaeger_sampling_rate.approved.json @@ -6,7 +6,7 @@ "name": "Jaeger", "version": "unknown" }, - "data_stream.dataset": "apm.unknown", + "data_stream.dataset": "apm", "data_stream.type": "traces", "event": { "outcome": "success" @@ -47,7 +47,7 @@ "name": "Jaeger", "version": "unknown" }, - "data_stream.dataset": "apm.unknown", + "data_stream.dataset": "apm", "data_stream.type": "traces", "event": { "outcome": "unknown" diff --git a/processor/otel/test_approved/metadata_jaeger-no-language.approved.json b/processor/otel/test_approved/metadata_jaeger-no-language.approved.json index 7947c0590d6..07902047cdc 100644 --- a/processor/otel/test_approved/metadata_jaeger-no-language.approved.json +++ b/processor/otel/test_approved/metadata_jaeger-no-language.approved.json @@ -6,7 +6,7 @@ "name": "Jaeger", "version": "3.4.12" }, - "data_stream.dataset": "apm.unknown", + "data_stream.dataset": "apm", "data_stream.type": "traces", "event": { "outcome": "success" diff --git a/processor/otel/test_approved/metadata_jaeger-version.approved.json b/processor/otel/test_approved/metadata_jaeger-version.approved.json index 1968c9e3e6c..34fcfb1bd2d 100644 --- a/processor/otel/test_approved/metadata_jaeger-version.approved.json +++ b/processor/otel/test_approved/metadata_jaeger-version.approved.json @@ -6,7 +6,7 @@ "name": "Jaeger/PHP", "version": "3.4.12" }, - "data_stream.dataset": "apm.unknown", + "data_stream.dataset": "apm", "data_stream.type": "traces", "event": { "outcome": "success" diff --git a/processor/otel/test_approved/metadata_jaeger.approved.json b/processor/otel/test_approved/metadata_jaeger.approved.json index 12045a12e69..05692908e79 100644 --- a/processor/otel/test_approved/metadata_jaeger.approved.json +++ b/processor/otel/test_approved/metadata_jaeger.approved.json @@ -7,7 +7,7 @@ "name": "Jaeger/C++", "version": "3.2.1" }, - "data_stream.dataset": "apm.foo", + "data_stream.dataset": "apm", "data_stream.type": "traces", "event": { "outcome": "success" diff --git a/processor/otel/test_approved/metadata_jaeger_full-traceid.approved.json b/processor/otel/test_approved/metadata_jaeger_full-traceid.approved.json index f08c8b749b5..faf4cd9c2de 100644 --- a/processor/otel/test_approved/metadata_jaeger_full-traceid.approved.json +++ b/processor/otel/test_approved/metadata_jaeger_full-traceid.approved.json @@ -6,7 +6,7 @@ "name": "Jaeger", "version": "unknown" }, - "data_stream.dataset": "apm.unknown", + "data_stream.dataset": "apm", "data_stream.type": "traces", "event": { "outcome": "success" diff --git a/processor/otel/test_approved/metadata_jaeger_minimal.approved.json b/processor/otel/test_approved/metadata_jaeger_minimal.approved.json index 2ea014e11d3..423a3674c6a 100644 --- a/processor/otel/test_approved/metadata_jaeger_minimal.approved.json +++ b/processor/otel/test_approved/metadata_jaeger_minimal.approved.json @@ -6,7 +6,7 @@ "name": "Jaeger", "version": "unknown" }, - "data_stream.dataset": "apm.unknown", + "data_stream.dataset": "apm", "data_stream.type": "traces", "event": { "outcome": "success" diff --git a/processor/otel/test_approved/metadata_minimal.approved.json b/processor/otel/test_approved/metadata_minimal.approved.json index 34008f72ff6..7588296aa08 100644 --- a/processor/otel/test_approved/metadata_minimal.approved.json +++ b/processor/otel/test_approved/metadata_minimal.approved.json @@ -6,7 +6,7 @@ "name": "Foo", "version": "unknown" }, - "data_stream.dataset": "apm.unknown", + "data_stream.dataset": "apm", "data_stream.type": "traces", "event": { "outcome": "success" diff --git a/processor/otel/test_approved/span_jaeger_custom.approved.json b/processor/otel/test_approved/span_jaeger_custom.approved.json index 1ac4cb50267..ab0c26d1939 100644 --- a/processor/otel/test_approved/span_jaeger_custom.approved.json +++ b/processor/otel/test_approved/span_jaeger_custom.approved.json @@ -6,7 +6,7 @@ "name": "Jaeger", "version": "unknown" }, - "data_stream.dataset": "apm.unknown", + "data_stream.dataset": "apm", "data_stream.type": "traces", "event": { "outcome": "unknown" diff --git a/processor/otel/test_approved/span_jaeger_db.approved.json b/processor/otel/test_approved/span_jaeger_db.approved.json index af989813dc9..2fd7416a916 100644 --- a/processor/otel/test_approved/span_jaeger_db.approved.json +++ b/processor/otel/test_approved/span_jaeger_db.approved.json @@ -6,7 +6,7 @@ "name": "Jaeger", "version": "unknown" }, - "data_stream.dataset": "apm.unknown", + "data_stream.dataset": "apm", "data_stream.type": "traces", "destination": { "address": "db", diff --git a/processor/otel/test_approved/span_jaeger_http.approved.json b/processor/otel/test_approved/span_jaeger_http.approved.json index 8bc36446961..7628148e5aa 100644 --- a/processor/otel/test_approved/span_jaeger_http.approved.json +++ b/processor/otel/test_approved/span_jaeger_http.approved.json @@ -6,7 +6,7 @@ "name": "Jaeger", "version": "unknown" }, - "data_stream.dataset": "apm.unknown", + "data_stream.dataset": "apm", "data_stream.type": "traces", "destination": { "address": "foo.bar.com", @@ -81,7 +81,7 @@ "name": "Jaeger", "version": "unknown" }, - "data_stream.dataset": "apm.error.unknown", + "data_stream.dataset": "apm.error", "data_stream.type": "logs", "error": { "exception": [ @@ -142,7 +142,7 @@ "name": "Jaeger", "version": "unknown" }, - "data_stream.dataset": "apm.error.unknown", + "data_stream.dataset": "apm.error", "data_stream.type": "logs", "error": { "grouping_key": "23b7ac1bdf1ca957f9f581cfadee467c", @@ -198,7 +198,7 @@ "name": "Jaeger", "version": "unknown" }, - "data_stream.dataset": "apm.error.unknown", + "data_stream.dataset": "apm.error", "data_stream.type": "logs", "error": { "exception": [ @@ -256,7 +256,7 @@ "name": "Jaeger", "version": "unknown" }, - "data_stream.dataset": "apm.error.unknown", + "data_stream.dataset": "apm.error", "data_stream.type": "logs", "error": { "exception": [ @@ -314,7 +314,7 @@ "name": "Jaeger", "version": "unknown" }, - "data_stream.dataset": "apm.error.unknown", + "data_stream.dataset": "apm.error", "data_stream.type": "logs", "error": { "exception": [ @@ -372,7 +372,7 @@ "name": "Jaeger", "version": "unknown" }, - "data_stream.dataset": "apm.error.unknown", + "data_stream.dataset": "apm.error", "data_stream.type": "logs", "error": { "grouping_key": "c9221918248f05433f6b81c46a666aee", diff --git a/processor/otel/test_approved/span_jaeger_http_status_code.approved.json b/processor/otel/test_approved/span_jaeger_http_status_code.approved.json index 7ee56b57b25..7c13ba7dc46 100644 --- a/processor/otel/test_approved/span_jaeger_http_status_code.approved.json +++ b/processor/otel/test_approved/span_jaeger_http_status_code.approved.json @@ -6,7 +6,7 @@ "name": "Jaeger", "version": "unknown" }, - "data_stream.dataset": "apm.unknown", + "data_stream.dataset": "apm", "data_stream.type": "traces", "destination": { "address": "foo.bar.com", diff --git a/processor/otel/test_approved/span_jaeger_https_default_port.approved.json b/processor/otel/test_approved/span_jaeger_https_default_port.approved.json index af475472371..0c349033049 100644 --- a/processor/otel/test_approved/span_jaeger_https_default_port.approved.json +++ b/processor/otel/test_approved/span_jaeger_https_default_port.approved.json @@ -6,7 +6,7 @@ "name": "Jaeger", "version": "unknown" }, - "data_stream.dataset": "apm.unknown", + "data_stream.dataset": "apm", "data_stream.type": "traces", "destination": { "address": "foo.bar.com", diff --git a/processor/otel/test_approved/span_jaeger_messaging.approved.json b/processor/otel/test_approved/span_jaeger_messaging.approved.json index 032f4516de9..799ae7483a1 100644 --- a/processor/otel/test_approved/span_jaeger_messaging.approved.json +++ b/processor/otel/test_approved/span_jaeger_messaging.approved.json @@ -6,7 +6,7 @@ "name": "Jaeger", "version": "unknown" }, - "data_stream.dataset": "apm.unknown", + "data_stream.dataset": "apm", "data_stream.type": "traces", "destination": { "address": "mq", diff --git a/processor/otel/test_approved/transaction_jaeger_custom.approved.json b/processor/otel/test_approved/transaction_jaeger_custom.approved.json index cf301f83203..31d795e1e78 100644 --- a/processor/otel/test_approved/transaction_jaeger_custom.approved.json +++ b/processor/otel/test_approved/transaction_jaeger_custom.approved.json @@ -6,7 +6,7 @@ "name": "Jaeger", "version": "unknown" }, - "data_stream.dataset": "apm.unknown", + "data_stream.dataset": "apm", "data_stream.type": "traces", "event": { "outcome": "success" diff --git a/processor/otel/test_approved/transaction_jaeger_full.approved.json b/processor/otel/test_approved/transaction_jaeger_full.approved.json index 7ffece71652..070a671f2dc 100644 --- a/processor/otel/test_approved/transaction_jaeger_full.approved.json +++ b/processor/otel/test_approved/transaction_jaeger_full.approved.json @@ -6,7 +6,7 @@ "name": "Jaeger", "version": "unknown" }, - "data_stream.dataset": "apm.unknown", + "data_stream.dataset": "apm", "data_stream.type": "traces", "event": { "outcome": "success" @@ -76,7 +76,7 @@ "name": "Jaeger", "version": "unknown" }, - "data_stream.dataset": "apm.error.unknown", + "data_stream.dataset": "apm.error", "data_stream.type": "logs", "error": { "exception": [ @@ -143,7 +143,7 @@ "name": "Jaeger", "version": "unknown" }, - "data_stream.dataset": "apm.error.unknown", + "data_stream.dataset": "apm.error", "data_stream.type": "logs", "error": { "grouping_key": "23b7ac1bdf1ca957f9f581cfadee467c", @@ -205,7 +205,7 @@ "name": "Jaeger", "version": "unknown" }, - "data_stream.dataset": "apm.error.unknown", + "data_stream.dataset": "apm.error", "data_stream.type": "logs", "error": { "exception": [ @@ -269,7 +269,7 @@ "name": "Jaeger", "version": "unknown" }, - "data_stream.dataset": "apm.error.unknown", + "data_stream.dataset": "apm.error", "data_stream.type": "logs", "error": { "exception": [ @@ -333,7 +333,7 @@ "name": "Jaeger", "version": "unknown" }, - "data_stream.dataset": "apm.error.unknown", + "data_stream.dataset": "apm.error", "data_stream.type": "logs", "error": { "exception": [ @@ -397,7 +397,7 @@ "name": "Jaeger", "version": "unknown" }, - "data_stream.dataset": "apm.error.unknown", + "data_stream.dataset": "apm.error", "data_stream.type": "logs", "error": { "grouping_key": "c9221918248f05433f6b81c46a666aee", diff --git a/processor/otel/test_approved/transaction_jaeger_no_attrs.approved.json b/processor/otel/test_approved/transaction_jaeger_no_attrs.approved.json index 81eb60c050b..79a8732a45f 100644 --- a/processor/otel/test_approved/transaction_jaeger_no_attrs.approved.json +++ b/processor/otel/test_approved/transaction_jaeger_no_attrs.approved.json @@ -6,7 +6,7 @@ "name": "Jaeger", "version": "unknown" }, - "data_stream.dataset": "apm.unknown", + "data_stream.dataset": "apm", "data_stream.type": "traces", "event": { "outcome": "failure" diff --git a/processor/otel/test_approved/transaction_jaeger_type_component.approved.json b/processor/otel/test_approved/transaction_jaeger_type_component.approved.json index b414c1171e4..9ae85c5d296 100644 --- a/processor/otel/test_approved/transaction_jaeger_type_component.approved.json +++ b/processor/otel/test_approved/transaction_jaeger_type_component.approved.json @@ -6,7 +6,7 @@ "name": "Jaeger", "version": "unknown" }, - "data_stream.dataset": "apm.unknown", + "data_stream.dataset": "apm", "data_stream.type": "traces", "event": { "outcome": "success" diff --git a/processor/otel/test_approved/transaction_jaeger_type_messaging.approved.json b/processor/otel/test_approved/transaction_jaeger_type_messaging.approved.json index 3ca0d234b8f..d9ca4da167d 100644 --- a/processor/otel/test_approved/transaction_jaeger_type_messaging.approved.json +++ b/processor/otel/test_approved/transaction_jaeger_type_messaging.approved.json @@ -6,7 +6,7 @@ "name": "Jaeger", "version": "unknown" }, - "data_stream.dataset": "apm.unknown", + "data_stream.dataset": "apm", "data_stream.type": "traces", "event": { "outcome": "success" diff --git a/processor/otel/test_approved/transaction_jaeger_type_request.approved.json b/processor/otel/test_approved/transaction_jaeger_type_request.approved.json index 9f10e0ec06f..b8ea71c8f77 100644 --- a/processor/otel/test_approved/transaction_jaeger_type_request.approved.json +++ b/processor/otel/test_approved/transaction_jaeger_type_request.approved.json @@ -6,7 +6,7 @@ "name": "Jaeger", "version": "unknown" }, - "data_stream.dataset": "apm.unknown", + "data_stream.dataset": "apm", "data_stream.type": "traces", "event": { "outcome": "failure" diff --git a/processor/otel/test_approved/transaction_jaeger_type_request_result.approved.json b/processor/otel/test_approved/transaction_jaeger_type_request_result.approved.json index 679228c4af0..cc9d4b28eed 100644 --- a/processor/otel/test_approved/transaction_jaeger_type_request_result.approved.json +++ b/processor/otel/test_approved/transaction_jaeger_type_request_result.approved.json @@ -6,7 +6,7 @@ "name": "Jaeger", "version": "unknown" }, - "data_stream.dataset": "apm.unknown", + "data_stream.dataset": "apm", "data_stream.type": "traces", "event": { "outcome": "success" diff --git a/processor/stream/test_approved_es_documents/testIntakeIntegrationErrors.approved.json b/processor/stream/test_approved_es_documents/testIntakeIntegrationErrors.approved.json index 0389ee8613c..96a9a1e3e91 100644 --- a/processor/stream/test_approved_es_documents/testIntakeIntegrationErrors.approved.json +++ b/processor/stream/test_approved_es_documents/testIntakeIntegrationErrors.approved.json @@ -33,7 +33,7 @@ "container": { "id": "container-id" }, - "data_stream.dataset": "apm.error.service1", + "data_stream.dataset": "apm.error", "data_stream.type": "logs", "error": { "culprit": "my.module.function_name", @@ -377,7 +377,7 @@ "container": { "id": "container-id" }, - "data_stream.dataset": "apm.error.1234_service_12a3", + "data_stream.dataset": "apm.error", "data_stream.type": "logs", "error": { "grouping_key": "dc8dd667f7036ec5f0bae87bf2188243", @@ -483,7 +483,7 @@ "container": { "id": "container-id" }, - "data_stream.dataset": "apm.error.1234_service_12a3", + "data_stream.dataset": "apm.error", "data_stream.type": "logs", "error": { "exception": [ @@ -585,7 +585,7 @@ "container": { "id": "container-id" }, - "data_stream.dataset": "apm.error.service1", + "data_stream.dataset": "apm.error", "data_stream.type": "logs", "error": { "exception": [ @@ -693,7 +693,7 @@ "container": { "id": "container-id" }, - "data_stream.dataset": "apm.error.1234_service_12a3", + "data_stream.dataset": "apm.error", "data_stream.type": "logs", "error": { "grouping_key": "d6b3f958dfea98dc9ed2b57d5f0c48bb", diff --git a/processor/stream/test_approved_es_documents/testIntakeIntegrationEvents.approved.json b/processor/stream/test_approved_es_documents/testIntakeIntegrationEvents.approved.json index d44e28bd1f4..b8bdea2433b 100644 --- a/processor/stream/test_approved_es_documents/testIntakeIntegrationEvents.approved.json +++ b/processor/stream/test_approved_es_documents/testIntakeIntegrationEvents.approved.json @@ -13,7 +13,7 @@ "container": { "id": "8ec7ceb990749e79b37f6dc6cd3628633618d6ce412553a552a0fa6b69419ad4" }, - "data_stream.dataset": "apm.experimental_java", + "data_stream.dataset": "apm", "data_stream.type": "traces", "event": { "outcome": "success" @@ -196,7 +196,7 @@ "container": { "id": "8ec7ceb990749e79b37f6dc6cd3628633618d6ce412553a552a0fa6b69419ad4" }, - "data_stream.dataset": "apm.1234_service_12a3", + "data_stream.dataset": "apm", "data_stream.type": "traces", "event": { "outcome": "success" @@ -343,7 +343,7 @@ "container": { "id": "8ec7ceb990749e79b37f6dc6cd3628633618d6ce412553a552a0fa6b69419ad4" }, - "data_stream.dataset": "apm.internal.1234_service_12a3", + "data_stream.dataset": "apm.internal", "data_stream.type": "metrics", "dotted": { "float": { @@ -469,7 +469,7 @@ "container": { "id": "8ec7ceb990749e79b37f6dc6cd3628633618d6ce412553a552a0fa6b69419ad4" }, - "data_stream.dataset": "apm.error.service1", + "data_stream.dataset": "apm.error", "data_stream.type": "logs", "error": { "culprit": "opbeans.controllers.DTInterceptor.preHandle(DTInterceptor.java:73)", diff --git a/processor/stream/test_approved_es_documents/testIntakeIntegrationInvalidEvent.approved.json b/processor/stream/test_approved_es_documents/testIntakeIntegrationInvalidEvent.approved.json index f659fa936a9..3c0be4e4581 100644 --- a/processor/stream/test_approved_es_documents/testIntakeIntegrationInvalidEvent.approved.json +++ b/processor/stream/test_approved_es_documents/testIntakeIntegrationInvalidEvent.approved.json @@ -6,7 +6,7 @@ "name": "elastic-node", "version": "3.14.0" }, - "data_stream.dataset": "apm.1234_service_12a3", + "data_stream.dataset": "apm", "data_stream.type": "traces", "event": { "outcome": "unknown" diff --git a/processor/stream/test_approved_es_documents/testIntakeIntegrationInvalidJSONEvent.approved.json b/processor/stream/test_approved_es_documents/testIntakeIntegrationInvalidJSONEvent.approved.json index f659fa936a9..3c0be4e4581 100644 --- a/processor/stream/test_approved_es_documents/testIntakeIntegrationInvalidJSONEvent.approved.json +++ b/processor/stream/test_approved_es_documents/testIntakeIntegrationInvalidJSONEvent.approved.json @@ -6,7 +6,7 @@ "name": "elastic-node", "version": "3.14.0" }, - "data_stream.dataset": "apm.1234_service_12a3", + "data_stream.dataset": "apm", "data_stream.type": "traces", "event": { "outcome": "unknown" diff --git a/processor/stream/test_approved_es_documents/testIntakeIntegrationMetadataNullValues.approved.json b/processor/stream/test_approved_es_documents/testIntakeIntegrationMetadataNullValues.approved.json index 28a23968ebf..c15059bb4b6 100644 --- a/processor/stream/test_approved_es_documents/testIntakeIntegrationMetadataNullValues.approved.json +++ b/processor/stream/test_approved_es_documents/testIntakeIntegrationMetadataNullValues.approved.json @@ -6,7 +6,7 @@ "name": "elastic-node", "version": "3.14.0" }, - "data_stream.dataset": "apm.error.1234_service_12a3", + "data_stream.dataset": "apm.error", "data_stream.type": "logs", "error": { "grouping_key": "d6b3f958dfea98dc9ed2b57d5f0c48bb", diff --git a/processor/stream/test_approved_es_documents/testIntakeIntegrationMetricsets.approved.json b/processor/stream/test_approved_es_documents/testIntakeIntegrationMetricsets.approved.json index 7290188947c..3f63c1aea3b 100644 --- a/processor/stream/test_approved_es_documents/testIntakeIntegrationMetricsets.approved.json +++ b/processor/stream/test_approved_es_documents/testIntakeIntegrationMetricsets.approved.json @@ -7,7 +7,7 @@ "version": "3.14.0" }, "byte_counter": 1, - "data_stream.dataset": "apm.internal.1234_service_12a3", + "data_stream.dataset": "apm.internal", "data_stream.type": "metrics", "dotted": { "float": { @@ -99,7 +99,7 @@ "name": "elastic-node", "version": "3.14.0" }, - "data_stream.dataset": "apm.1234_service_12a3", + "data_stream.dataset": "apm", "data_stream.type": "metrics", "go": { "memstats": { @@ -145,7 +145,7 @@ "name": "elastic-node", "version": "3.14.0" }, - "data_stream.dataset": "apm.1234_service_12a3", + "data_stream.dataset": "apm", "data_stream.type": "metrics", "host": { "ip": "192.0.0.1" diff --git a/processor/stream/test_approved_es_documents/testIntakeIntegrationMinimalService.approved.json b/processor/stream/test_approved_es_documents/testIntakeIntegrationMinimalService.approved.json index 640713b5f48..8e288c89924 100644 --- a/processor/stream/test_approved_es_documents/testIntakeIntegrationMinimalService.approved.json +++ b/processor/stream/test_approved_es_documents/testIntakeIntegrationMinimalService.approved.json @@ -6,7 +6,7 @@ "name": "elastic-node", "version": "3.14.0" }, - "data_stream.dataset": "apm.1234_service_12a3", + "data_stream.dataset": "apm", "data_stream.type": "metrics", "go": { "memstats": { @@ -37,7 +37,7 @@ "name": "elastic-node", "version": "3.14.0" }, - "data_stream.dataset": "apm.error.1234_service_12a3", + "data_stream.dataset": "apm.error", "data_stream.type": "logs", "error": { "grouping_key": "d6b3f958dfea98dc9ed2b57d5f0c48bb", diff --git a/processor/stream/test_approved_es_documents/testIntakeIntegrationOptionalTimestamps.approved.json b/processor/stream/test_approved_es_documents/testIntakeIntegrationOptionalTimestamps.approved.json index 00525a34146..1de26b78e7b 100644 --- a/processor/stream/test_approved_es_documents/testIntakeIntegrationOptionalTimestamps.approved.json +++ b/processor/stream/test_approved_es_documents/testIntakeIntegrationOptionalTimestamps.approved.json @@ -6,7 +6,7 @@ "name": "elastic-node", "version": "3.14.0" }, - "data_stream.dataset": "apm.backendspans", + "data_stream.dataset": "apm", "data_stream.type": "traces", "event": { "outcome": "unknown" @@ -83,7 +83,7 @@ "name": "elastic-node", "version": "3.14.0" }, - "data_stream.dataset": "apm.backendspans", + "data_stream.dataset": "apm", "data_stream.type": "traces", "event": { "outcome": "unknown" @@ -165,7 +165,7 @@ "name": "elastic-node", "version": "3.14.0" }, - "data_stream.dataset": "apm.backendspans", + "data_stream.dataset": "apm", "data_stream.type": "metrics", "host": { "architecture": "x64", diff --git a/processor/stream/test_approved_es_documents/testIntakeIntegrationRumErrors.approved.json b/processor/stream/test_approved_es_documents/testIntakeIntegrationRumErrors.approved.json index 943b273fc39..502f1b0ba89 100644 --- a/processor/stream/test_approved_es_documents/testIntakeIntegrationRumErrors.approved.json +++ b/processor/stream/test_approved_es_documents/testIntakeIntegrationRumErrors.approved.json @@ -9,7 +9,7 @@ "client": { "ip": "192.0.0.1" }, - "data_stream.dataset": "apm.error.apm_agent_js", + "data_stream.dataset": "apm.error", "data_stream.type": "logs", "error": { "culprit": "test/e2e/general-usecase/bundle.js.map", diff --git a/processor/stream/test_approved_es_documents/testIntakeIntegrationRumTransactions.approved.json b/processor/stream/test_approved_es_documents/testIntakeIntegrationRumTransactions.approved.json index 79051b7e7da..e7c7de01031 100644 --- a/processor/stream/test_approved_es_documents/testIntakeIntegrationRumTransactions.approved.json +++ b/processor/stream/test_approved_es_documents/testIntakeIntegrationRumTransactions.approved.json @@ -9,7 +9,7 @@ "client": { "ip": "192.0.0.1" }, - "data_stream.dataset": "apm.apm_agent_js", + "data_stream.dataset": "apm", "data_stream.type": "traces", "event": { "outcome": "unknown" @@ -77,7 +77,7 @@ "client": { "ip": "192.0.0.1" }, - "data_stream.dataset": "apm.apm_agent_js", + "data_stream.dataset": "apm", "data_stream.type": "traces", "event": { "outcome": "unknown" diff --git a/processor/stream/test_approved_es_documents/testIntakeIntegrationSpans.approved.json b/processor/stream/test_approved_es_documents/testIntakeIntegrationSpans.approved.json index f822471e299..f31821ca061 100644 --- a/processor/stream/test_approved_es_documents/testIntakeIntegrationSpans.approved.json +++ b/processor/stream/test_approved_es_documents/testIntakeIntegrationSpans.approved.json @@ -34,7 +34,7 @@ "container": { "id": "container-id" }, - "data_stream.dataset": "apm.backendspans", + "data_stream.dataset": "apm", "data_stream.type": "traces", "event": { "outcome": "success" @@ -151,7 +151,7 @@ "container": { "id": "container-id" }, - "data_stream.dataset": "apm.backendspans", + "data_stream.dataset": "apm", "data_stream.type": "traces", "event": { "outcome": "unknown" @@ -269,7 +269,7 @@ "container": { "id": "container-id" }, - "data_stream.dataset": "apm.backendspans", + "data_stream.dataset": "apm", "data_stream.type": "traces", "event": { "outcome": "unknown" @@ -391,7 +391,7 @@ "container": { "id": "container-id" }, - "data_stream.dataset": "apm.backendspans", + "data_stream.dataset": "apm", "data_stream.type": "traces", "event": { "outcome": "unknown" @@ -510,7 +510,7 @@ "container": { "id": "container-id" }, - "data_stream.dataset": "apm.backendspans", + "data_stream.dataset": "apm", "data_stream.type": "traces", "destination": { "address": "0:0::0:1", @@ -711,7 +711,7 @@ "container": { "id": "container-id" }, - "data_stream.dataset": "apm.backendspans", + "data_stream.dataset": "apm", "data_stream.type": "traces", "destination": { "address": "0:0::0:1", diff --git a/processor/stream/test_approved_es_documents/testIntakeIntegrationTransactions.approved.json b/processor/stream/test_approved_es_documents/testIntakeIntegrationTransactions.approved.json index 40942881c59..b45ae418722 100644 --- a/processor/stream/test_approved_es_documents/testIntakeIntegrationTransactions.approved.json +++ b/processor/stream/test_approved_es_documents/testIntakeIntegrationTransactions.approved.json @@ -29,7 +29,7 @@ "container": { "id": "container-id" }, - "data_stream.dataset": "apm.1234_service_12a3", + "data_stream.dataset": "apm", "data_stream.type": "traces", "event": { "outcome": "unknown" @@ -148,7 +148,7 @@ "container": { "id": "container-id" }, - "data_stream.dataset": "apm.1234_service_12a3", + "data_stream.dataset": "apm", "data_stream.type": "traces", "event": { "outcome": "success" @@ -358,7 +358,7 @@ "container": { "id": "container-id" }, - "data_stream.dataset": "apm.service1", + "data_stream.dataset": "apm", "data_stream.type": "traces", "event": { "outcome": "unknown" @@ -500,7 +500,7 @@ "container": { "id": "container-id" }, - "data_stream.dataset": "apm.1234_service_12a3", + "data_stream.dataset": "apm", "data_stream.type": "traces", "event": { "outcome": "unknown" diff --git a/processor/stream/test_approved_es_documents/testIntakeRUMV3Errors.approved.json b/processor/stream/test_approved_es_documents/testIntakeRUMV3Errors.approved.json index 9d4b28ce273..e58bbaba770 100644 --- a/processor/stream/test_approved_es_documents/testIntakeRUMV3Errors.approved.json +++ b/processor/stream/test_approved_es_documents/testIntakeRUMV3Errors.approved.json @@ -9,7 +9,7 @@ "client": { "ip": "192.0.0.1" }, - "data_stream.dataset": "apm.error.apm_a_rum_test_e2e_general_usecase", + "data_stream.dataset": "apm.error", "data_stream.type": "logs", "error": { "culprit": "test/e2e/general-usecase/app.e2e-bundle.min.js?token=secret", diff --git a/processor/stream/test_approved_es_documents/testIntakeRUMV3Events.approved.json b/processor/stream/test_approved_es_documents/testIntakeRUMV3Events.approved.json index 6cf3af3b8c7..4711169427a 100644 --- a/processor/stream/test_approved_es_documents/testIntakeRUMV3Events.approved.json +++ b/processor/stream/test_approved_es_documents/testIntakeRUMV3Events.approved.json @@ -9,7 +9,7 @@ "client": { "ip": "192.0.0.1" }, - "data_stream.dataset": "apm.apm_a_rum_test_e2e_general_usecase", + "data_stream.dataset": "apm", "data_stream.type": "traces", "event": { "outcome": "success" @@ -157,7 +157,7 @@ "client": { "ip": "192.0.0.1" }, - "data_stream.dataset": "apm.apm_a_rum_test_e2e_general_usecase", + "data_stream.dataset": "apm", "data_stream.type": "traces", "event": { "outcome": "unknown" @@ -228,7 +228,7 @@ "client": { "ip": "192.0.0.1" }, - "data_stream.dataset": "apm.apm_a_rum_test_e2e_general_usecase", + "data_stream.dataset": "apm", "data_stream.type": "traces", "event": { "outcome": "unknown" @@ -299,7 +299,7 @@ "client": { "ip": "192.0.0.1" }, - "data_stream.dataset": "apm.apm_a_rum_test_e2e_general_usecase", + "data_stream.dataset": "apm", "data_stream.type": "traces", "destination": { "address": "localhost", @@ -391,7 +391,7 @@ "client": { "ip": "192.0.0.1" }, - "data_stream.dataset": "apm.apm_a_rum_test_e2e_general_usecase", + "data_stream.dataset": "apm", "data_stream.type": "traces", "event": { "outcome": "unknown" @@ -461,7 +461,7 @@ "client": { "ip": "192.0.0.1" }, - "data_stream.dataset": "apm.apm_a_rum_test_e2e_general_usecase", + "data_stream.dataset": "apm", "data_stream.type": "traces", "destination": { "address": "localhost", @@ -553,7 +553,7 @@ "client": { "ip": "192.0.0.1" }, - "data_stream.dataset": "apm.apm_a_rum_test_e2e_general_usecase", + "data_stream.dataset": "apm", "data_stream.type": "traces", "destination": { "address": "localhost", @@ -645,7 +645,7 @@ "client": { "ip": "192.0.0.1" }, - "data_stream.dataset": "apm.apm_a_rum_test_e2e_general_usecase", + "data_stream.dataset": "apm", "data_stream.type": "traces", "destination": { "address": "localhost", @@ -738,7 +738,7 @@ "client": { "ip": "192.0.0.1" }, - "data_stream.dataset": "apm.apm_a_rum_test_e2e_general_usecase", + "data_stream.dataset": "apm", "data_stream.type": "traces", "event": { "outcome": "success" @@ -831,7 +831,7 @@ "client": { "ip": "192.0.0.1" }, - "data_stream.dataset": "apm.internal.apm_a_rum_test_e2e_general_usecase", + "data_stream.dataset": "apm.internal", "data_stream.type": "metrics", "labels": { "testTagKey": "testTagValue" @@ -888,7 +888,7 @@ "client": { "ip": "192.0.0.1" }, - "data_stream.dataset": "apm.internal.apm_a_rum_test_e2e_general_usecase", + "data_stream.dataset": "apm.internal", "data_stream.type": "metrics", "labels": { "testTagKey": "testTagValue" @@ -945,7 +945,7 @@ "client": { "ip": "192.0.0.1" }, - "data_stream.dataset": "apm.internal.apm_a_rum_test_e2e_general_usecase", + "data_stream.dataset": "apm.internal", "data_stream.type": "metrics", "labels": { "testTagKey": "testTagValue" @@ -1002,7 +1002,7 @@ "client": { "ip": "192.0.0.1" }, - "data_stream.dataset": "apm.internal.apm_a_rum_test_e2e_general_usecase", + "data_stream.dataset": "apm.internal", "data_stream.type": "metrics", "labels": { "tag1": "value1", diff --git a/systemtest/apikeycmd_test.go b/systemtest/apikeycmd_test.go index 4c59762ee33..1d08f5681da 100644 --- a/systemtest/apikeycmd_test.go +++ b/systemtest/apikeycmd_test.go @@ -37,6 +37,10 @@ import ( func apiKeyCommand(subcommand string, args ...string) *apmservertest.ServerCmd { cfg := apmservertest.DefaultConfig() + return apiKeyCommandConfig(cfg, subcommand, args...) +} + +func apiKeyCommandConfig(cfg apmservertest.Config, subcommand string, args ...string) *apmservertest.ServerCmd { cfgargs, err := cfg.Args() if err != nil { panic(err) @@ -86,6 +90,22 @@ func TestAPIKeyCreateExpiration(t *testing.T) { assert.Contains(t, attrs, "expiration") } +func TestAPIKeyCreateInvalidUser(t *testing.T) { + // heartbeat_user lacks cluster privileges, and cannot create keys + // beats_user has cluster privileges, but not APM application privileges + for _, username := range []string{"heartbeat_user", "beats_user"} { + cfg := apmservertest.DefaultConfig() + cfg.Output.Elasticsearch.Username = username + cfg.Output.Elasticsearch.Password = "changeme" + + cmd := apiKeyCommandConfig(cfg, "create", "--name", t.Name(), "--json") + out, err := cmd.CombinedOutput() + require.Error(t, err) + attrs := decodeJSONMap(t, bytes.NewReader(out)) + assert.Regexp(t, username+` is missing the following requested privilege\(s\): .*`, attrs["error"]) + } +} + func TestAPIKeyInvalidateName(t *testing.T) { systemtest.InvalidateAPIKeys(t) defer systemtest.InvalidateAPIKeys(t) diff --git a/systemtest/apmservertest/config.go b/systemtest/apmservertest/config.go index 42c05e34f2e..9f5e63f8bd7 100644 --- a/systemtest/apmservertest/config.go +++ b/systemtest/apmservertest/config.go @@ -21,6 +21,7 @@ import ( "encoding/json" "fmt" "net" + "net/http" "net/url" "os" "sort" @@ -48,6 +49,10 @@ type Config struct { Sampling *SamplingConfig `json:"apm-server.sampling,omitempty"` RUM *RUMConfig `json:"apm-server.rum,omitempty"` DataStreams *DataStreamsConfig `json:"apm-server.data_streams,omitempty"` + APIKey *APIKeyConfig `json:"apm-server.api_key,omitempty"` + + // ResponseHeaders holds headers to add to all APM Server HTTP responses. + ResponseHeaders http.Header `json:"apm-server.response_headers,omitempty"` // Instrumentation holds configuration for libbeat and apm-server instrumentation. Instrumentation *InstrumentationConfig `json:"instrumentation,omitempty"` @@ -121,12 +126,12 @@ func (t *TailSamplingConfig) MarshalJSON() ([]byte, error) { // Convert time.Durations to durations, to encode as duration strings. type config struct { Enabled bool `json:"enabled"` - Interval duration `json:"interval"` + Interval string `json:"interval"` Policies []TailSamplingPolicy `json:"policies,omitempty"` } return json.Marshal(config{ Enabled: t.Enabled, - Interval: duration(t.Interval), + Interval: durationString(t.Interval), Policies: t.Policies, }) } @@ -143,6 +148,9 @@ type TailSamplingPolicy struct { // RUMConfig holds APM Server RUM configuration. type RUMConfig struct { Enabled bool `json:"enabled"` + + // ResponseHeaders holds headers to add to all APM Server RUM HTTP responses. + ResponseHeaders http.Header `json:"response_headers,omitempty"` } // DataStreamsConfig holds APM Server data streams configuration. @@ -150,9 +158,66 @@ type DataStreamsConfig struct { Enabled bool `json:"enabled"` } +// APIKeyConfig holds APM Server API Key auth configuration. +type APIKeyConfig struct { + Enabled bool `json:"enabled"` +} + // InstrumentationConfig holds APM Server instrumentation configuration. type InstrumentationConfig struct { - Enabled bool `json:"enabled"` + Enabled bool `json:"enabled"` + Profiling *ProfilingConfig `json:"profiling,omitempty"` + + Hosts []string `json:"hosts,omitempty"` + APIKey string `json:"api_key,omitempty"` + SecretToken string `json:"secret_token,omitempty"` +} + +// ProfilingConfig holds APM Server profiling configuration. +type ProfilingConfig struct { + CPU *CPUProfilingConfig `json:"cpu,omitempty"` + Heap *HeapProfilingConfig `json:"heap,omitempty"` +} + +// CPUProfilingConfig holds APM Server profiling configuration. +type CPUProfilingConfig struct { + Enabled bool `json:"enabled"` + Interval time.Duration `json:"interval,omitempty"` + Duration time.Duration `json:"duration,omitempty"` +} + +func (c *CPUProfilingConfig) MarshalJSON() ([]byte, error) { + // time.Duration is encoded as int64. + // Convert time.Durations to durations, to encode as duration strings. + type config struct { + Enabled bool `json:"enabled"` + Interval string `json:"interval,omitempty"` + Duration string `json:"duration,omitempty"` + } + return json.Marshal(config{ + Enabled: c.Enabled, + Interval: durationString(c.Interval), + Duration: durationString(c.Duration), + }) +} + +// HeapProfilingConfig holds APM Server profiling configuration. +type HeapProfilingConfig struct { + Enabled bool `json:"enabled"` + Interval time.Duration `json:"interval,omitempty"` +} + +func (c *HeapProfilingConfig) MarshalJSON() ([]byte, error) { + // time.Duration is encoded as int64. + // Convert time.Durations to durations, to encode as duration strings. + type config struct { + Enabled bool `json:"enabled"` + Interval string `json:"interval,omitempty"` + } + return json.Marshal(config{ + Enabled: c.Enabled, + Interval: durationString(c.Interval), + }) } // OutputConfig holds APM Server libbeat output configuration. @@ -196,14 +261,14 @@ func (m *MemoryQueueConfig) MarshalJSON() ([]byte, error) { // time.Duration is encoded as int64. // Convert time.Durations to durations, to encode as duration strings. type config struct { - Events int `json:"events"` - FlushMinEvents int `json:"flush.min_events"` - FlushTimeout duration `json:"flush.timeout"` + Events int `json:"events"` + FlushMinEvents int `json:"flush.min_events"` + FlushTimeout string `json:"flush.timeout,omitempty"` } return json.Marshal(config{ Events: m.Events, FlushMinEvents: m.FlushMinEvents, - FlushTimeout: duration(m.FlushTimeout), + FlushTimeout: durationString(m.FlushTimeout), }) } @@ -221,14 +286,14 @@ func (m *MonitoringConfig) MarshalJSON() ([]byte, error) { type config struct { Enabled bool `json:"enabled"` Elasticsearch *ElasticsearchOutputConfig `json:"elasticsearch,omitempty"` - MetricsPeriod duration `json:"elasticsearch.metrics.period,omitempty"` - StatePeriod duration `json:"elasticsearch.state.period,omitempty"` + MetricsPeriod string `json:"elasticsearch.metrics.period,omitempty"` + StatePeriod string `json:"elasticsearch.state.period,omitempty"` } return json.Marshal(config{ Enabled: m.Enabled, Elasticsearch: m.Elasticsearch, - MetricsPeriod: duration(m.MetricsPeriod), - StatePeriod: duration(m.StatePeriod), + MetricsPeriod: durationString(m.MetricsPeriod), + StatePeriod: durationString(m.StatePeriod), }) } @@ -248,12 +313,12 @@ func (m *TransactionAggregationConfig) MarshalJSON() ([]byte, error) { // time.Duration is encoded as int64. // Convert time.Durations to durations, to encode as duration strings. type config struct { - Enabled bool `json:"enabled"` - Interval duration `json:"interval,omitempty"` + Enabled bool `json:"enabled"` + Interval string `json:"interval,omitempty"` } return json.Marshal(config{ Enabled: m.Enabled, - Interval: duration(m.Interval), + Interval: durationString(m.Interval), }) } @@ -267,19 +332,20 @@ func (s *ServiceDestinationAggregationConfig) MarshalJSON() ([]byte, error) { // time.Duration is encoded as int64. // Convert time.Durations to durations, to encode as duration strings. type config struct { - Enabled bool `json:"enabled"` - Interval duration `json:"interval,omitempty"` + Enabled bool `json:"enabled"` + Interval string `json:"interval,omitempty"` } return json.Marshal(config{ Enabled: s.Enabled, - Interval: duration(s.Interval), + Interval: durationString(s.Interval), }) } -type duration time.Duration - -func (d duration) MarshalText() (text []byte, err error) { - return []byte(time.Duration(d).String()), nil +func durationString(d time.Duration) string { + if d == 0 { + return "" + } + return d.String() } func configArgs(cfg Config, extra map[string]interface{}) ([]string, error) { diff --git a/systemtest/approvals/TestDataStreamsEnabled/true.approved.json b/systemtest/approvals/TestDataStreamsEnabled/true.approved.json index c648c1bfbc5..00f6eb78089 100644 --- a/systemtest/approvals/TestDataStreamsEnabled/true.approved.json +++ b/systemtest/approvals/TestDataStreamsEnabled/true.approved.json @@ -6,7 +6,7 @@ "name": "go", "version": "0.0.0" }, - "data_stream.dataset": "apm.systemtest", + "data_stream.dataset": "apm", "data_stream.namespace": "", "data_stream.type": "traces", "ecs": { diff --git a/systemtest/datastreams_test.go b/systemtest/datastreams_test.go index 5f7bfa058a2..9ece776264b 100644 --- a/systemtest/datastreams_test.go +++ b/systemtest/datastreams_test.go @@ -98,7 +98,7 @@ func TestDataStreamsEnabled(t *testing.T) { tx.End() tracer.Flush(nil) - result := systemtest.Elasticsearch.ExpectDocs(t, "apm-*,traces-apm.*", estest.TermQuery{ + result := systemtest.Elasticsearch.ExpectDocs(t, "apm-*,traces-apm*", estest.TermQuery{ Field: "processor.event", Value: "transaction", }) systemtest.ApproveEvents( diff --git a/systemtest/headers_test.go b/systemtest/headers_test.go new file mode 100644 index 00000000000..73474777a1a --- /dev/null +++ b/systemtest/headers_test.go @@ -0,0 +1,56 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +package systemtest_test + +import ( + "net/http" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + "github.com/elastic/apm-server/systemtest" + "github.com/elastic/apm-server/systemtest/apmservertest" +) + +func TestResponseHeaders(t *testing.T) { + systemtest.CleanupElasticsearch(t) + srv := apmservertest.NewUnstartedServer(t) + srv.Config.ResponseHeaders = http.Header{} + srv.Config.ResponseHeaders.Set("both", "all_value") + srv.Config.RUM = &apmservertest.RUMConfig{Enabled: true, ResponseHeaders: http.Header{}} + srv.Config.RUM.ResponseHeaders.Set("only_rum", "rum_value") + srv.Config.RUM.ResponseHeaders.Set("both", "rum_value") + err := srv.Start() + require.NoError(t, err) + + // Non-RUM response headers are added to responses of non-RUM specific routes. + resp, err := http.Get(srv.URL) + require.NoError(t, err) + resp.Body.Close() + assert.Equal(t, []string{"all_value"}, resp.Header.Values("both")) + assert.Nil(t, resp.Header.Values("only_rum")) + + // Both RUM and non-RUM response headers are added to responses of RUM-specific routes. + // If the same key is defined in both, then the values are concatenated. + resp, err = http.Get(srv.URL + "/config/v1/rum/agents") + require.NoError(t, err) + resp.Body.Close() + assert.Equal(t, []string{"all_value", "rum_value"}, resp.Header.Values("both")) + assert.Equal(t, []string{"rum_value"}, resp.Header.Values("only_rum")) +} diff --git a/systemtest/instrumentation_test.go b/systemtest/instrumentation_test.go index d7c6d51e4b3..52d4a65a90a 100644 --- a/systemtest/instrumentation_test.go +++ b/systemtest/instrumentation_test.go @@ -18,8 +18,16 @@ package systemtest_test import ( + "bytes" "encoding/json" + "net/http" + "net/http/httptest" + "net/http/httputil" + "net/url" + "sort" + "sync" "testing" + "time" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" @@ -83,3 +91,161 @@ func TestAPMServerInstrumentation(t *testing.T) { } t.Fatal("failed to identify log message with matching trace IDs") } + +func TestAPMServerInstrumentationAuth(t *testing.T) { + test := func(t *testing.T, external, useSecretToken, useAPIKey bool) { + systemtest.CleanupElasticsearch(t) + srv := apmservertest.NewUnstartedServer(t) + srv.Config.SecretToken = "hunter2" + srv.Config.APIKey = &apmservertest.APIKeyConfig{Enabled: true} + srv.Config.Instrumentation = &apmservertest.InstrumentationConfig{Enabled: true} + + serverURLChan := make(chan string, 1) + if external { + // The server URL is not known ahead of time, so we run + // a reverse proxy which waits for the server URL. + var serverURL string + var serverURLOnce sync.Once + proxy := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + serverURLOnce.Do(func() { + select { + case <-r.Context().Done(): + case serverURL = <-serverURLChan: + } + }) + u, err := url.Parse(serverURL) + if err != nil { + http.Error(w, err.Error(), http.StatusInternalServerError) + return + } + rp := httputil.NewSingleHostReverseProxy(u) + rp.ServeHTTP(w, r) + })) + defer proxy.Close() + srv.Config.Instrumentation.Hosts = []string{proxy.URL} + } + if useSecretToken { + srv.Config.Instrumentation.SecretToken = srv.Config.SecretToken + } + if useAPIKey { + systemtest.InvalidateAPIKeys(t) + defer systemtest.InvalidateAPIKeys(t) + + cmd := apiKeyCommand("create", "--name", t.Name(), "--json") + out, err := cmd.CombinedOutput() + require.NoError(t, err) + attrs := decodeJSONMap(t, bytes.NewReader(out)) + srv.Config.Instrumentation.APIKey = attrs["credentials"].(string) + } + + err := srv.Start() + require.NoError(t, err) + serverURLChan <- srv.URL + + // Send a transaction to the server, causing the server to + // trace the request from the agent. + tracer := srv.Tracer() + tracer.StartTransaction("name", "type").End() + tracer.Flush(nil) + + systemtest.Elasticsearch.ExpectDocs(t, "apm-*", estest.BoolQuery{ + Filter: []interface{}{ + estest.TermQuery{ + Field: "processor.event", + Value: "transaction", + }, + estest.TermQuery{ + Field: "service.name", + Value: "apm-server", + }, + estest.TermQuery{ + Field: "transaction.type", + Value: "request", + }, + }, + }) + } + t.Run("self_no_auth", func(t *testing.T) { + // sending data to self, no auth specified + test(t, false, false, false) + }) + t.Run("external_secret_token", func(t *testing.T) { + // sending data to external server, secret token specified + test(t, true, true, false) + }) + t.Run("external_api_key", func(t *testing.T) { + // sending data to external server, API Key specified + test(t, true, false, true) + }) +} + +func TestAPMServerProfiling(t *testing.T) { + test := func(t *testing.T, profilingConfig *apmservertest.ProfilingConfig, expectedMetrics []string) { + systemtest.CleanupElasticsearch(t) + srv := apmservertest.NewUnstartedServer(t) + srv.Config.Instrumentation = &apmservertest.InstrumentationConfig{ + Enabled: true, + Profiling: profilingConfig, + } + err := srv.Start() + require.NoError(t, err) + + // Generate some load to cause the server to consume resources. + tracer := srv.Tracer() + for i := 0; i < 1000; i++ { + tracer.StartTransaction("name", "type").End() + } + tracer.Flush(nil) + + result := systemtest.Elasticsearch.ExpectDocs(t, "apm-*", estest.TermQuery{ + Field: "processor.event", + Value: "profile", + }) + assert.Equal(t, expectedMetrics, profileMetricNames(result)) + } + t.Run("cpu", func(t *testing.T) { + test(t, &apmservertest.ProfilingConfig{ + CPU: &apmservertest.CPUProfilingConfig{ + Enabled: true, + Interval: time.Second, + Duration: time.Second, + }, + }, []string{"cpu.ns", "duration", "samples.count"}) + }) + t.Run("heap", func(t *testing.T) { + test(t, &apmservertest.ProfilingConfig{ + Heap: &apmservertest.HeapProfilingConfig{ + Enabled: true, + Interval: time.Second, + }, + }, []string{ + "alloc_objects.count", + "alloc_space.bytes", + "inuse_objects.count", + "inuse_space.bytes", + }) + }) +} + +func profileMetricNames(result estest.SearchResult) []string { + unique := make(map[string]struct{}) + var metricNames []string + for _, hit := range result.Hits.Hits { + profileField, ok := hit.Source["profile"].(map[string]interface{}) + if !ok { + continue + } + for k, v := range profileField { + if _, ok := v.(float64); !ok { + continue + } + if _, ok := unique[k]; ok { + continue + } + unique[k] = struct{}{} + metricNames = append(metricNames, k) + } + } + sort.Strings(metricNames) + return metricNames +} diff --git a/systemtest/sampling_test.go b/systemtest/sampling_test.go index f407a2d5629..401c2f9047e 100644 --- a/systemtest/sampling_test.go +++ b/systemtest/sampling_test.go @@ -68,6 +68,23 @@ func TestKeepUnsampled(t *testing.T) { } } +func TestKeepUnsampledWarning(t *testing.T) { + systemtest.CleanupElasticsearch(t) + srv := apmservertest.NewUnstartedServer(t) + srv.Config.Sampling = &apmservertest.SamplingConfig{KeepUnsampled: false} + require.NoError(t, srv.Start()) + require.NoError(t, srv.Close()) + + var messages []string + for _, log := range srv.Logs.All() { + messages = append(messages, log.Message) + } + assert.Contains(t, messages, ""+ + "apm-server.sampling.keep_unsampled and apm-server.aggregation.transactions.enabled are both false, "+ + "which will lead to incorrect metrics being reported in the APM UI", + ) +} + func TestTailSampling(t *testing.T) { systemtest.CleanupElasticsearch(t) diff --git a/tests/system/config/apm-server.yml.j2 b/tests/system/config/apm-server.yml.j2 index 8594d183aef..8b075e8e97f 100644 --- a/tests/system/config/apm-server.yml.j2 +++ b/tests/system/config/apm-server.yml.j2 @@ -109,34 +109,6 @@ apm-server: register.ingest.pipeline.overwrite: {{ register_pipeline_overwrite }} {% endif %} - {% if instrumentation_enabled %} - instrumentation.enabled: {{ instrumentation_enabled }} - {% endif %} - {% if instrumentation_host %} - instrumentation.hosts: [{{ instrumentation_host }}] - {% endif %} - {% if instrumentation_api_key %} - instrumentation.api_key: {{ instrumentation_api_key }} - {% endif %} - {% if instrumentation_secret_token %} - instrumentation.secret_token: {{ instrumentation_secret_token }} - {% endif %} - {% if profiling_cpu_enabled %} - instrumentation.profiling.cpu.enabled: {{ profiling_cpu_enabled }} - {% endif %} - {% if profiling_cpu_interval %} - instrumentation.profiling.cpu.interval: {{ profiling_cpu_interval }} - {% endif %} - {% if profiling_cpu_duration %} - instrumentation.profiling.cpu.duration: {{ profiling_cpu_duration }} - {% endif %} - {% if profiling_heap_enabled %} - instrumentation.profiling.heap.enabled: {{ profiling_heap_enabled }} - {% endif %} - {% if profiling_heap_interval %} - instrumentation.profiling.heap.interval: {{ profiling_heap_interval }} - {% endif %} - {% if aggregation_enabled %} aggregation.transactions.enabled: {{ aggregation_enabled }} {% endif %} @@ -207,37 +179,6 @@ apm-server: {% endif %} {% if acm_cache_expiration is not none %} agent.config.cache.expiration: {{ acm_cache_expiration }}{% endif %} - -################### Libbeat instrumentation ############################### -{% if libbeat_instrumentation_enabled %} -instrumentation.enabled: {{ libbeat_instrumentation_enabled }} -{% endif %} -{% if libbeat_instrumentation_host %} -instrumentation.hosts: [{{ libbeat_instrumentation_host }}] -{% endif %} -{% if libbeat_instrumentation_api_key %} -instrumentation.api_key: {{ libbeat_instrumentation_api_key }} -{% endif %} -{% if libbeat_instrumentation_secret_token %} -instrumentation.secret_token: {{ libbeat_instrumentation_secret_token }} -{% endif %} -{% if libbeat_profiling_cpu_enabled %} -instrumentation.profiling.cpu.enabled: {{ libbeat_profiling_cpu_enabled }} -{% endif %} -{% if libbeat_profiling_cpu_interval %} -instrumentation.profiling.cpu.interval: {{ libbeat_profiling_cpu_interval }} -{% endif %} -{% if libbeat_profiling_cpu_duration %} -instrumentation.profiling.cpu.duration: {{ libbeat_profiling_cpu_duration }} -{% endif %} -{% if libbeat_profiling_heap_enabled %} -instrumentation.profiling.heap.enabled: {{ libbeat_profiling_heap_enabled }} -{% endif %} -{% if libbeat_profiling_heap_interval %} -instrumentation.profiling.heap.interval: {{ libbeat_profiling_heap_interval }} -{% endif %} - - ############################# Setup ########################################## {% if override_template %} diff --git a/tests/system/test_apikey_cmd.py b/tests/system/test_apikey_cmd.py index edcac357b82..ae8530a6daa 100644 --- a/tests/system/test_apikey_cmd.py +++ b/tests/system/test_apikey_cmd.py @@ -186,38 +186,3 @@ def test_verify_each(self): apikey = self.create("--agent-config") result = self.subcommand_output("verify", "--credentials={}".format(apikey["credentials"])) assert result == {'event:write': False, 'config_agent:read': True, 'sourcemap:write': False}, result - - -@integration_test -class APIKeyCommandBadUserTest(APIKeyCommandBaseTest): - - def config(self): - return { - "elasticsearch_host": self.get_elasticsearch_url(user="heartbeat_user", password="changeme"), - "file_enabled": "false", - "kibana_enabled": "false", - } - - def test_create_bad_user(self): - """heartbeat_user doesn't have required cluster privileges, so it can't create keys""" - result = self.subcommand_output("create", "--name", self.apikey_name, exit_code=1) - assert result.get("error") is not None - - -@integration_test -class APIKeyCommandBadUser2Test(APIKeyCommandBaseTest): - - def config(self): - return { - "elasticsearch_host": self.get_elasticsearch_url(user="beats_user", password="changeme"), - "file_enabled": "false", - "kibana_enabled": "false", - } - - def test_create_bad_user(self): - """beats_user does have required cluster privileges, but not APM application privileges, - so it can't create keys - """ - result = self.subcommand_output("create", "--name", self.apikey_name, exit_code=1) - assert result.get("error") is not None, result - assert "beats_user is missing the following requested privilege(s):" in result.get("error"), result diff --git a/tests/system/test_instrumentation.py b/tests/system/test_instrumentation.py deleted file mode 100644 index 42bd821ffdc..00000000000 --- a/tests/system/test_instrumentation.py +++ /dev/null @@ -1,177 +0,0 @@ -from datetime import datetime, timedelta -import os -import time -import requests - -from apmserver import integration_test -from apmserver import ElasticTest -from test_auth import APIKeyBaseTest -from helper import wait_until -from es_helper import index_profile, index_transaction - -# Set ELASTIC_APM_API_REQUEST_TIME to a short duration -# to speed up the time taken for self-tracing events -# to be ingested. -os.environ["ELASTIC_APM_API_REQUEST_TIME"] = "1s" - - -# Exercises the DEPRECATED apm-server.instrumentation.* config -# When updating this file, consider test_libbeat_instrumentation.py -# Remove in 8.0 - -def get_instrumentation_event(es, index): - query = {"term": {"service.name": "apm-server"}} - return es.count(index=index, body={"query": query})['count'] > 0 - - -@integration_test -class TestInMemoryTracingAPIKey(APIKeyBaseTest): - def config(self): - cfg = super(TestInMemoryTracingAPIKey, self).config() - cfg.update({ - "api_key_enabled": True, - "instrumentation_enabled": "true", - }) - return cfg - - def test_api_key_auth(self): - """Self-instrumentation using in-memory listener without configuring an APIKey""" - - # Send a POST request to the intake API URL. Doesn't matter what the - # request body contents are, as the request will fail due to lack of - # authorization. We just want to trigger the server's in-memory tracing, - # and test that the in-memory tracer works without having an api_key configured - r = requests.post(self.intake_url, data="invalid") - self.assertEqual(401, r.status_code) - - wait_until(lambda: get_instrumentation_event(self.es, index_transaction), - name='have in-memory instrumentation documents without api_key') - - -@integration_test -class TestExternalTracingAPIKey(APIKeyBaseTest): - def config(self): - cfg = super(TestExternalTracingAPIKey, self).config() - api_key = self.create_apm_api_key([self.privilege_event], self.resource_any) - cfg.update({ - "api_key_enabled": True, - "instrumentation_enabled": "true", - "instrumentation_api_key": api_key, - # Set instrumentation.hosts to the same APM Server. - # - # Explicitly specifying hosts configures the tracer to - # behave as if it's sending to an external server, rather - # than using the in-memory transport that bypasses auth. - "instrumentation_host": APIKeyBaseTest.host, - }) - return cfg - - def test_api_key_auth(self): - # Send a POST request to the intake API URL. Doesn't matter what the - # request body contents are, as the request will fail due to lack of - # authorization. We just want to trigger the server's tracing. - r = requests.post(self.intake_url, data="invalid") - self.assertEqual(401, r.status_code) - - wait_until(lambda: get_instrumentation_event(self.es, index_transaction), - name='have external server instrumentation documents with api_key') - - -@integration_test -class TestExternalTracingSecretToken(ElasticTest): - def config(self): - cfg = super(TestExternalTracingSecretToken, self).config() - secret_token = "abc123" - cfg.update({ - "secret_token": secret_token, - "instrumentation_enabled": "true", - "instrumentation_secret_token": secret_token, - # Set instrumentation.hosts to the same APM Server. - # - # Explicitly specifying hosts configures the tracer to - # behave as if it's sending to an external server, rather - # than using the in-memory transport that bypasses auth. - "instrumentation_host": ElasticTest.host, - }) - return cfg - - def test_secret_token_auth(self): - # Send a POST request to the intake API URL. Doesn't matter what the - # request body contents are, as the request will fail due to lack of - # authorization. We just want to trigger the server's tracing. - r = requests.post(self.intake_url, data="invalid") - self.assertEqual(401, r.status_code) - - wait_until(lambda: get_instrumentation_event(self.es, index_transaction), - name='have external server instrumentation documents with secret_token') - - -class ProfilingTest(ElasticTest): - def metric_fields(self): - metric_fields = set() - rs = self.es.search(index=index_profile) - for hit in rs["hits"]["hits"]: - profile = hit["_source"]["profile"] - metric_fields.update((k for (k, v) in profile.items() if type(v) is int)) - return metric_fields - - def wait_for_profile(self): - def cond(): - response = self.es.count(index=index_profile, body={"query": {"term": {"processor.name": "profile"}}}) - return response['count'] != 0 - wait_until(cond, max_timeout=10, name="waiting for profile") - - -@integration_test -class TestCPUProfiling(ProfilingTest): - config_overrides = { - "instrumentation_enabled": "true", - "profiling_cpu_enabled": "true", - "profiling_cpu_interval": "1s", - "profiling_cpu_duration": "5s", - } - - def test_self_profiling(self): - """CPU profiling enabled""" - - def create_load(): - payload_path = self.get_payload_path("transactions_spans.ndjson") - with open(payload_path) as f: - requests.post(self.intake_url, data=f, headers={'content-type': 'application/x-ndjson'}) - - # Wait for profiling to begin, and then start sending data - # to the server to create some CPU load. - - time.sleep(1) - start = datetime.now() - while datetime.now()-start < timedelta(seconds=5): - create_load() - self.wait_for_profile() - - expected_metric_fields = set([u"cpu.ns", u"samples.count", u"duration"]) - metric_fields = self.metric_fields() - self.assertEqual(metric_fields, expected_metric_fields) - - -@integration_test -class TestHeapProfiling(ProfilingTest): - config_overrides = { - "instrumentation_enabled": "true", - "profiling_heap_enabled": "true", - "profiling_heap_interval": "1s", - } - - def test_self_profiling(self): - """Heap profiling enabled""" - - time.sleep(1) - self.wait_for_profile() - - expected_metric_fields = set([ - u"alloc_objects.count", - u"inuse_objects.count", - u"alloc_space.bytes", - u"inuse_space.bytes", - ]) - metric_fields = self.metric_fields() - self.assertEqual(metric_fields, expected_metric_fields) diff --git a/tests/system/test_libbeat_instrumentation.py b/tests/system/test_libbeat_instrumentation.py deleted file mode 100644 index d813f6d8545..00000000000 --- a/tests/system/test_libbeat_instrumentation.py +++ /dev/null @@ -1,176 +0,0 @@ -from datetime import datetime, timedelta -import os -import time -import requests - -from apmserver import integration_test -from apmserver import ElasticTest -from test_auth import APIKeyBaseTest -from helper import wait_until -from es_helper import index_profile, index_transaction - -# Set ELASTIC_APM_API_REQUEST_TIME to a short duration -# to speed up the time taken for self-tracing events -# to be ingested. -os.environ["ELASTIC_APM_API_REQUEST_TIME"] = "1s" - -# This exercises the instrumentation.* config -# When updating this file, consider test_instrumentation.py - - -def get_instrumentation_event(es, index): - query = {"term": {"service.name": "apm-server"}} - return es.count(index=index, body={"query": query})['count'] > 0 - - -@integration_test -class TestInMemoryTracingAPIKey(APIKeyBaseTest): - def config(self): - cfg = super(TestInMemoryTracingAPIKey, self).config() - cfg.update({ - "api_key_enabled": True, - "libbeat_instrumentation_enabled": "true", - }) - return cfg - - def test_api_key_auth(self): - """Self-instrumentation using in-memory listener without configuring an APIKey""" - - # Send a POST request to the intake API URL. Doesn't matter what the - # request body contents are, as the request will fail due to lack of - # authorization. We just want to trigger the server's in-memory tracing, - # and test that the in-memory tracer works without having an api_key configured - r = requests.post(self.intake_url, data="invalid") - self.assertEqual(401, r.status_code) - - wait_until(lambda: get_instrumentation_event(self.es, index_transaction), - name='have in-memory instrumentation documents without api_key') - - -@integration_test -class TestExternalTracingAPIKey(APIKeyBaseTest): - def config(self): - cfg = super(TestExternalTracingAPIKey, self).config() - api_key = self.create_apm_api_key([self.privilege_event], self.resource_any) - cfg.update({ - "api_key_enabled": True, - "libbeat_instrumentation_enabled": "true", - "libbeat_instrumentation_api_key": api_key, - # Set instrumentation.hosts to the same APM Server. - # - # Explicitly specifying hosts configures the tracer to - # behave as if it's sending to an external server, rather - # than using the in-memory transport that bypasses auth. - "libbeat_instrumentation_host": APIKeyBaseTest.host, - }) - return cfg - - def test_api_key_auth(self): - # Send a POST request to the intake API URL. Doesn't matter what the - # request body contents are, as the request will fail due to lack of - # authorization. We just want to trigger the server's tracing. - r = requests.post(self.intake_url, data="invalid") - self.assertEqual(401, r.status_code) - - wait_until(lambda: get_instrumentation_event(self.es, index_transaction), - name='have external server instrumentation documents with api_key') - - -@integration_test -class TestExternalTracingSecretToken(ElasticTest): - def config(self): - cfg = super(TestExternalTracingSecretToken, self).config() - secret_token = "abc123" - cfg.update({ - "secret_token": secret_token, - "libbeat_instrumentation_enabled": "true", - "libbeat_instrumentation_secret_token": secret_token, - # Set instrumentation.hosts to the same APM Server. - # - # Explicitly specifying hosts configures the tracer to - # behave as if it's sending to an external server, rather - # than using the in-memory transport that bypasses auth. - "libbeat_instrumentation_host": ElasticTest.host, - }) - return cfg - - def test_secret_token_auth(self): - # Send a POST request to the intake API URL. Doesn't matter what the - # request body contents are, as the request will fail due to lack of - # authorization. We just want to trigger the server's tracing. - r = requests.post(self.intake_url, data="invalid") - self.assertEqual(401, r.status_code) - - wait_until(lambda: get_instrumentation_event(self.es, index_transaction), - name='have external server instrumentation documents with secret_token') - - -class ProfilingTest(ElasticTest): - def metric_fields(self): - metric_fields = set() - rs = self.es.search(index=index_profile) - for hit in rs["hits"]["hits"]: - profile = hit["_source"]["profile"] - metric_fields.update((k for (k, v) in profile.items() if type(v) is int)) - return metric_fields - - def wait_for_profile(self): - def cond(): - response = self.es.count(index=index_profile, body={"query": {"term": {"processor.name": "profile"}}}) - return response['count'] != 0 - wait_until(cond, max_timeout=10, name="waiting for profile") - - -@integration_test -class TestCPUProfiling(ProfilingTest): - config_overrides = { - "libbeat_instrumentation_enabled": "true", - "libbeat_profiling_cpu_enabled": "true", - "libbeat_profiling_cpu_interval": "1s", - "libbeat_profiling_cpu_duration": "5s", - } - - def test_self_profiling(self): - """CPU profiling enabled""" - - def create_load(): - payload_path = self.get_payload_path("transactions_spans.ndjson") - with open(payload_path, 'rb') as f: - requests.post(self.intake_url, data=f, headers={'content-type': 'application/x-ndjson'}) - - # Wait for profiling to begin, and then start sending data - # to the server to create some CPU load. - - time.sleep(1) - start = datetime.now() - while datetime.now()-start < timedelta(seconds=5): - create_load() - self.wait_for_profile() - - expected_metric_fields = set([u"cpu.ns", u"samples.count", u"duration"]) - metric_fields = self.metric_fields() - self.assertEqual(metric_fields, expected_metric_fields) - - -@integration_test -class TestHeapProfiling(ProfilingTest): - config_overrides = { - "libbeat_instrumentation_enabled": "true", - "libbeat_profiling_heap_enabled": "true", - "libbeat_profiling_heap_interval": "1s", - } - - def test_self_profiling(self): - """Heap profiling enabled""" - - time.sleep(1) - self.wait_for_profile() - - expected_metric_fields = set([ - u"alloc_objects.count", - u"inuse_objects.count", - u"alloc_space.bytes", - u"inuse_space.bytes", - ]) - metric_fields = self.metric_fields() - self.assertEqual(metric_fields, expected_metric_fields) diff --git a/tests/system/test_sampling.py b/tests/system/test_sampling.py deleted file mode 100644 index 52c6255b4a8..00000000000 --- a/tests/system/test_sampling.py +++ /dev/null @@ -1,56 +0,0 @@ -import time - -from apmserver import integration_test -from apmserver import ClientSideElasticTest, ElasticTest, ExpvarBaseTest, ProcStartupFailureTest -from helper import wait_until -from es_helper import index_smap, index_metric, index_transaction - - -@integration_test -class TestKeepUnsampled(ElasticTest): - def config(self): - cfg = super(TestKeepUnsampled, self).config() - cfg.update({"sampling_keep_unsampled": True}) - return cfg - - def test(self): - self.load_docs_with_template(self.get_payload_path("transactions_spans.ndjson"), - self.intake_url, 'transaction', 9) - self.assert_no_logged_warnings() - docs = self.wait_for_events('transaction', 4, index=index_transaction) - self.approve_docs('keep_unsampled_transactions', docs) - - -@integration_test -class TestDropUnsampled(ElasticTest): - def config(self): - cfg = super(TestDropUnsampled, self).config() - cfg.update({ - "sampling_keep_unsampled": False, - # Enable aggregation to avoid a warning. - "aggregation_enabled": True, - }) - return cfg - - def test(self): - self.load_docs_with_template(self.get_payload_path("transactions_spans.ndjson"), - self.intake_url, 'transaction', 8) - self.assert_no_logged_warnings() - docs = self.wait_for_events('transaction', 3, index=index_transaction) - self.approve_docs('drop_unsampled_transactions', docs) - - -@integration_test -class TestConfigWarning(ElasticTest): - def config(self): - cfg = super(TestConfigWarning, self).config() - cfg.update({ - "sampling_keep_unsampled": False, - # Disable aggregation to force a warning. - "aggregation_enabled": False, - }) - return cfg - - def test(self): - expected = "apm-server.sampling.keep_unsampled and apm-server.aggregation.transactions.enabled are both false, which will lead to incorrect metrics being reported in the APM UI" - self.assertIn(expected, self.get_log())