diff --git a/go.mod b/go.mod index 9149961adb..42ca26a766 100644 --- a/go.mod +++ b/go.mod @@ -15,74 +15,74 @@ require ( github.com/hashicorp/vault/api v1.9.2 github.com/jaegertracing/jaeger v1.41.0 github.com/knadh/koanf v1.5.0 - github.com/open-telemetry/opentelemetry-collector-contrib/connector/countconnector v0.81.0 - github.com/open-telemetry/opentelemetry-collector-contrib/connector/spanmetricsconnector v0.81.0 - github.com/open-telemetry/opentelemetry-collector-contrib/exporter/fileexporter v0.81.0 - github.com/open-telemetry/opentelemetry-collector-contrib/exporter/kafkaexporter v0.81.0 - github.com/open-telemetry/opentelemetry-collector-contrib/exporter/sapmexporter v0.81.0 - github.com/open-telemetry/opentelemetry-collector-contrib/exporter/signalfxexporter v0.81.0 - github.com/open-telemetry/opentelemetry-collector-contrib/exporter/splunkhecexporter v0.81.0 - github.com/open-telemetry/opentelemetry-collector-contrib/extension/basicauthextension v0.81.0 - github.com/open-telemetry/opentelemetry-collector-contrib/extension/healthcheckextension v0.81.0 - github.com/open-telemetry/opentelemetry-collector-contrib/extension/httpforwarder v0.81.0 - github.com/open-telemetry/opentelemetry-collector-contrib/extension/observer v0.81.0 - github.com/open-telemetry/opentelemetry-collector-contrib/extension/observer/dockerobserver v0.81.0 - github.com/open-telemetry/opentelemetry-collector-contrib/extension/observer/ecsobserver v0.81.0 - github.com/open-telemetry/opentelemetry-collector-contrib/extension/observer/ecstaskobserver v0.81.0 - github.com/open-telemetry/opentelemetry-collector-contrib/extension/observer/hostobserver v0.81.0 - github.com/open-telemetry/opentelemetry-collector-contrib/extension/observer/k8sobserver v0.81.0 - github.com/open-telemetry/opentelemetry-collector-contrib/extension/pprofextension v0.81.0 - github.com/open-telemetry/opentelemetry-collector-contrib/extension/storage v0.81.0 - github.com/open-telemetry/opentelemetry-collector-contrib/pkg/pdatatest v0.81.0 - github.com/open-telemetry/opentelemetry-collector-contrib/pkg/translator/jaeger v0.81.0 - github.com/open-telemetry/opentelemetry-collector-contrib/processor/attributesprocessor v0.81.0 - github.com/open-telemetry/opentelemetry-collector-contrib/processor/filterprocessor v0.81.0 - github.com/open-telemetry/opentelemetry-collector-contrib/processor/groupbyattrsprocessor v0.81.0 - github.com/open-telemetry/opentelemetry-collector-contrib/processor/k8sattributesprocessor v0.81.0 - github.com/open-telemetry/opentelemetry-collector-contrib/processor/logstransformprocessor v0.81.0 - github.com/open-telemetry/opentelemetry-collector-contrib/processor/metricstransformprocessor v0.81.0 - github.com/open-telemetry/opentelemetry-collector-contrib/processor/probabilisticsamplerprocessor v0.81.0 - github.com/open-telemetry/opentelemetry-collector-contrib/processor/resourcedetectionprocessor v0.81.1-0.20230725205232-a877923c3c3d - github.com/open-telemetry/opentelemetry-collector-contrib/processor/resourceprocessor v0.81.0 - github.com/open-telemetry/opentelemetry-collector-contrib/processor/routingprocessor v0.81.0 - github.com/open-telemetry/opentelemetry-collector-contrib/processor/spanmetricsprocessor v0.81.0 - github.com/open-telemetry/opentelemetry-collector-contrib/processor/spanprocessor v0.81.0 - github.com/open-telemetry/opentelemetry-collector-contrib/processor/tailsamplingprocessor v0.81.0 - github.com/open-telemetry/opentelemetry-collector-contrib/processor/transformprocessor v0.81.0 - github.com/open-telemetry/opentelemetry-collector-contrib/receiver/azureeventhubreceiver v0.81.0 - github.com/open-telemetry/opentelemetry-collector-contrib/receiver/carbonreceiver v0.81.0 - github.com/open-telemetry/opentelemetry-collector-contrib/receiver/cloudfoundryreceiver v0.81.0 - github.com/open-telemetry/opentelemetry-collector-contrib/receiver/collectdreceiver v0.81.0 - github.com/open-telemetry/opentelemetry-collector-contrib/receiver/filelogreceiver v0.81.0 - github.com/open-telemetry/opentelemetry-collector-contrib/receiver/fluentforwardreceiver v0.81.0 - github.com/open-telemetry/opentelemetry-collector-contrib/receiver/hostmetricsreceiver v0.81.0 - github.com/open-telemetry/opentelemetry-collector-contrib/receiver/jaegerreceiver v0.81.0 - github.com/open-telemetry/opentelemetry-collector-contrib/receiver/jmxreceiver v0.81.0 - github.com/open-telemetry/opentelemetry-collector-contrib/receiver/journaldreceiver v0.81.0 - github.com/open-telemetry/opentelemetry-collector-contrib/receiver/k8sclusterreceiver v0.81.0 - github.com/open-telemetry/opentelemetry-collector-contrib/receiver/k8seventsreceiver v0.81.0 - github.com/open-telemetry/opentelemetry-collector-contrib/receiver/k8sobjectsreceiver v0.81.0 - github.com/open-telemetry/opentelemetry-collector-contrib/receiver/kafkametricsreceiver v0.81.0 - github.com/open-telemetry/opentelemetry-collector-contrib/receiver/kafkareceiver v0.81.0 - github.com/open-telemetry/opentelemetry-collector-contrib/receiver/kubeletstatsreceiver v0.81.0 - github.com/open-telemetry/opentelemetry-collector-contrib/receiver/mongodbatlasreceiver v0.81.0 - github.com/open-telemetry/opentelemetry-collector-contrib/receiver/oracledbreceiver v0.81.0 - github.com/open-telemetry/opentelemetry-collector-contrib/receiver/postgresqlreceiver v0.81.0 - github.com/open-telemetry/opentelemetry-collector-contrib/receiver/prometheusexecreceiver v0.81.0 - github.com/open-telemetry/opentelemetry-collector-contrib/receiver/prometheusreceiver v0.81.0 - github.com/open-telemetry/opentelemetry-collector-contrib/receiver/receivercreator v0.81.0 - github.com/open-telemetry/opentelemetry-collector-contrib/receiver/redisreceiver v0.81.0 - github.com/open-telemetry/opentelemetry-collector-contrib/receiver/sapmreceiver v0.81.0 - github.com/open-telemetry/opentelemetry-collector-contrib/receiver/signalfxreceiver v0.81.0 - github.com/open-telemetry/opentelemetry-collector-contrib/receiver/simpleprometheusreceiver v0.81.0 - github.com/open-telemetry/opentelemetry-collector-contrib/receiver/splunkhecreceiver v0.81.0 - github.com/open-telemetry/opentelemetry-collector-contrib/receiver/sqlqueryreceiver v0.81.0 - github.com/open-telemetry/opentelemetry-collector-contrib/receiver/statsdreceiver v0.81.0 - github.com/open-telemetry/opentelemetry-collector-contrib/receiver/syslogreceiver v0.81.0 - github.com/open-telemetry/opentelemetry-collector-contrib/receiver/tcplogreceiver v0.81.0 - github.com/open-telemetry/opentelemetry-collector-contrib/receiver/windowseventlogreceiver v0.81.0 - github.com/open-telemetry/opentelemetry-collector-contrib/receiver/windowsperfcountersreceiver v0.81.0 - github.com/open-telemetry/opentelemetry-collector-contrib/receiver/zipkinreceiver v0.81.0 + github.com/open-telemetry/opentelemetry-collector-contrib/connector/countconnector v0.82.0 + github.com/open-telemetry/opentelemetry-collector-contrib/connector/spanmetricsconnector v0.82.0 + github.com/open-telemetry/opentelemetry-collector-contrib/exporter/fileexporter v0.82.0 + github.com/open-telemetry/opentelemetry-collector-contrib/exporter/kafkaexporter v0.82.0 + github.com/open-telemetry/opentelemetry-collector-contrib/exporter/sapmexporter v0.82.0 + github.com/open-telemetry/opentelemetry-collector-contrib/exporter/signalfxexporter v0.82.0 + github.com/open-telemetry/opentelemetry-collector-contrib/exporter/splunkhecexporter v0.82.0 + github.com/open-telemetry/opentelemetry-collector-contrib/extension/basicauthextension v0.82.0 + github.com/open-telemetry/opentelemetry-collector-contrib/extension/healthcheckextension v0.82.0 + github.com/open-telemetry/opentelemetry-collector-contrib/extension/httpforwarder v0.82.0 + github.com/open-telemetry/opentelemetry-collector-contrib/extension/observer v0.82.0 + github.com/open-telemetry/opentelemetry-collector-contrib/extension/observer/dockerobserver v0.82.0 + github.com/open-telemetry/opentelemetry-collector-contrib/extension/observer/ecsobserver v0.82.0 + github.com/open-telemetry/opentelemetry-collector-contrib/extension/observer/ecstaskobserver v0.82.0 + github.com/open-telemetry/opentelemetry-collector-contrib/extension/observer/hostobserver v0.82.0 + github.com/open-telemetry/opentelemetry-collector-contrib/extension/observer/k8sobserver v0.82.0 + github.com/open-telemetry/opentelemetry-collector-contrib/extension/pprofextension v0.82.0 + github.com/open-telemetry/opentelemetry-collector-contrib/extension/storage v0.82.0 + github.com/open-telemetry/opentelemetry-collector-contrib/pkg/pdatatest v0.82.0 + github.com/open-telemetry/opentelemetry-collector-contrib/pkg/translator/jaeger v0.82.0 + github.com/open-telemetry/opentelemetry-collector-contrib/processor/attributesprocessor v0.82.0 + github.com/open-telemetry/opentelemetry-collector-contrib/processor/filterprocessor v0.82.0 + github.com/open-telemetry/opentelemetry-collector-contrib/processor/groupbyattrsprocessor v0.82.0 + github.com/open-telemetry/opentelemetry-collector-contrib/processor/k8sattributesprocessor v0.82.0 + github.com/open-telemetry/opentelemetry-collector-contrib/processor/logstransformprocessor v0.82.0 + github.com/open-telemetry/opentelemetry-collector-contrib/processor/metricstransformprocessor v0.82.0 + github.com/open-telemetry/opentelemetry-collector-contrib/processor/probabilisticsamplerprocessor v0.82.0 + github.com/open-telemetry/opentelemetry-collector-contrib/processor/resourcedetectionprocessor v0.82.0 + github.com/open-telemetry/opentelemetry-collector-contrib/processor/resourceprocessor v0.82.0 + github.com/open-telemetry/opentelemetry-collector-contrib/processor/routingprocessor v0.82.0 + github.com/open-telemetry/opentelemetry-collector-contrib/processor/spanmetricsprocessor v0.82.0 + github.com/open-telemetry/opentelemetry-collector-contrib/processor/spanprocessor v0.82.0 + github.com/open-telemetry/opentelemetry-collector-contrib/processor/tailsamplingprocessor v0.82.0 + github.com/open-telemetry/opentelemetry-collector-contrib/processor/transformprocessor v0.82.0 + github.com/open-telemetry/opentelemetry-collector-contrib/receiver/azureeventhubreceiver v0.82.0 + github.com/open-telemetry/opentelemetry-collector-contrib/receiver/carbonreceiver v0.82.0 + github.com/open-telemetry/opentelemetry-collector-contrib/receiver/cloudfoundryreceiver v0.82.0 + github.com/open-telemetry/opentelemetry-collector-contrib/receiver/collectdreceiver v0.82.0 + github.com/open-telemetry/opentelemetry-collector-contrib/receiver/filelogreceiver v0.82.0 + github.com/open-telemetry/opentelemetry-collector-contrib/receiver/fluentforwardreceiver v0.82.0 + github.com/open-telemetry/opentelemetry-collector-contrib/receiver/hostmetricsreceiver v0.82.0 + github.com/open-telemetry/opentelemetry-collector-contrib/receiver/jaegerreceiver v0.82.0 + github.com/open-telemetry/opentelemetry-collector-contrib/receiver/jmxreceiver v0.82.0 + github.com/open-telemetry/opentelemetry-collector-contrib/receiver/journaldreceiver v0.82.0 + github.com/open-telemetry/opentelemetry-collector-contrib/receiver/k8sclusterreceiver v0.82.0 + github.com/open-telemetry/opentelemetry-collector-contrib/receiver/k8seventsreceiver v0.82.0 + github.com/open-telemetry/opentelemetry-collector-contrib/receiver/k8sobjectsreceiver v0.82.0 + github.com/open-telemetry/opentelemetry-collector-contrib/receiver/kafkametricsreceiver v0.82.0 + github.com/open-telemetry/opentelemetry-collector-contrib/receiver/kafkareceiver v0.82.0 + github.com/open-telemetry/opentelemetry-collector-contrib/receiver/kubeletstatsreceiver v0.82.0 + github.com/open-telemetry/opentelemetry-collector-contrib/receiver/mongodbatlasreceiver v0.82.0 + github.com/open-telemetry/opentelemetry-collector-contrib/receiver/oracledbreceiver v0.82.0 + github.com/open-telemetry/opentelemetry-collector-contrib/receiver/postgresqlreceiver v0.82.0 + github.com/open-telemetry/opentelemetry-collector-contrib/receiver/prometheusexecreceiver v0.82.0 + github.com/open-telemetry/opentelemetry-collector-contrib/receiver/prometheusreceiver v0.82.0 + github.com/open-telemetry/opentelemetry-collector-contrib/receiver/receivercreator v0.82.0 + github.com/open-telemetry/opentelemetry-collector-contrib/receiver/redisreceiver v0.82.0 + github.com/open-telemetry/opentelemetry-collector-contrib/receiver/sapmreceiver v0.82.0 + github.com/open-telemetry/opentelemetry-collector-contrib/receiver/signalfxreceiver v0.82.0 + github.com/open-telemetry/opentelemetry-collector-contrib/receiver/simpleprometheusreceiver v0.82.0 + github.com/open-telemetry/opentelemetry-collector-contrib/receiver/splunkhecreceiver v0.82.0 + github.com/open-telemetry/opentelemetry-collector-contrib/receiver/sqlqueryreceiver v0.82.0 + github.com/open-telemetry/opentelemetry-collector-contrib/receiver/statsdreceiver v0.82.0 + github.com/open-telemetry/opentelemetry-collector-contrib/receiver/syslogreceiver v0.82.0 + github.com/open-telemetry/opentelemetry-collector-contrib/receiver/tcplogreceiver v0.82.0 + github.com/open-telemetry/opentelemetry-collector-contrib/receiver/windowseventlogreceiver v0.82.0 + github.com/open-telemetry/opentelemetry-collector-contrib/receiver/windowsperfcountersreceiver v0.82.0 + github.com/open-telemetry/opentelemetry-collector-contrib/receiver/zipkinreceiver v0.82.0 github.com/prometheus/client_model v0.4.0 github.com/prometheus/common v0.44.0 github.com/prometheus/prometheus v0.46.0 @@ -97,26 +97,26 @@ require ( github.com/stretchr/testify v1.8.4 go.etcd.io/bbolt v1.3.7 go.etcd.io/etcd/client/v2 v2.305.9 - go.opentelemetry.io/collector v0.81.0 - go.opentelemetry.io/collector/config/confighttp v0.81.0 - go.opentelemetry.io/collector/config/configtelemetry v0.81.0 - go.opentelemetry.io/collector/config/configtls v0.81.0 - go.opentelemetry.io/collector/confmap v0.81.0 - go.opentelemetry.io/collector/connector v0.81.0 - go.opentelemetry.io/collector/connector/forwardconnector v0.81.0 - go.opentelemetry.io/collector/exporter v0.81.0 - go.opentelemetry.io/collector/exporter/loggingexporter v0.81.0 - go.opentelemetry.io/collector/exporter/otlpexporter v0.81.0 - go.opentelemetry.io/collector/exporter/otlphttpexporter v0.81.0 - go.opentelemetry.io/collector/extension v0.81.0 - go.opentelemetry.io/collector/extension/ballastextension v0.81.0 - go.opentelemetry.io/collector/extension/zpagesextension v0.81.0 - go.opentelemetry.io/collector/pdata v1.0.0-rcv0013 - go.opentelemetry.io/collector/processor v0.81.0 - go.opentelemetry.io/collector/processor/batchprocessor v0.81.0 - go.opentelemetry.io/collector/processor/memorylimiterprocessor v0.81.0 - go.opentelemetry.io/collector/receiver v0.81.0 - go.opentelemetry.io/collector/receiver/otlpreceiver v0.81.0 + go.opentelemetry.io/collector v0.82.0 + go.opentelemetry.io/collector/config/confighttp v0.82.0 + go.opentelemetry.io/collector/config/configtelemetry v0.82.0 + go.opentelemetry.io/collector/config/configtls v0.82.0 + go.opentelemetry.io/collector/confmap v0.82.0 + go.opentelemetry.io/collector/connector v0.82.0 + go.opentelemetry.io/collector/connector/forwardconnector v0.82.0 + go.opentelemetry.io/collector/exporter v0.82.0 + go.opentelemetry.io/collector/exporter/loggingexporter v0.82.0 + go.opentelemetry.io/collector/exporter/otlpexporter v0.82.0 + go.opentelemetry.io/collector/exporter/otlphttpexporter v0.82.0 + go.opentelemetry.io/collector/extension v0.82.0 + go.opentelemetry.io/collector/extension/ballastextension v0.82.0 + go.opentelemetry.io/collector/extension/zpagesextension v0.82.0 + go.opentelemetry.io/collector/pdata v1.0.0-rcv0014 + go.opentelemetry.io/collector/processor v0.82.0 + go.opentelemetry.io/collector/processor/batchprocessor v0.82.0 + go.opentelemetry.io/collector/processor/memorylimiterprocessor v0.82.0 + go.opentelemetry.io/collector/receiver v0.82.0 + go.opentelemetry.io/collector/receiver/otlpreceiver v0.82.0 go.opentelemetry.io/otel/metric v1.16.0 go.opentelemetry.io/otel/trace v1.16.0 go.uber.org/atomic v1.11.0 @@ -155,10 +155,10 @@ require ( github.com/minio/c2goasm v0.0.0-20190812172519-36a3d3bbc4f3 // indirect github.com/moby/patternmatcher v0.5.0 // indirect github.com/moby/sys/sequential v0.5.0 // indirect - github.com/open-telemetry/opentelemetry-collector-contrib/pkg/ottl v0.81.0 // indirect - github.com/open-telemetry/opentelemetry-collector-contrib/pkg/pdatautil v0.81.0 // indirect - github.com/open-telemetry/opentelemetry-collector-contrib/pkg/stanza v0.81.0 // indirect - github.com/open-telemetry/opentelemetry-collector-contrib/pkg/translator/prometheus v0.81.0 // indirect + github.com/open-telemetry/opentelemetry-collector-contrib/pkg/ottl v0.82.0 // indirect + github.com/open-telemetry/opentelemetry-collector-contrib/pkg/pdatautil v0.82.0 // indirect + github.com/open-telemetry/opentelemetry-collector-contrib/pkg/stanza v0.82.0 // indirect + github.com/open-telemetry/opentelemetry-collector-contrib/pkg/translator/prometheus v0.82.0 // indirect github.com/ovh/go-ovh v1.4.1 // indirect github.com/pkg/errors v0.9.1 // indirect github.com/prometheus/client_golang v1.16.0 // indirect @@ -171,17 +171,24 @@ require ( github.com/tg123/go-htpasswd v1.2.1 // indirect github.com/tilinna/clock v1.1.0 // indirect github.com/zeebo/xxh3 v1.0.2 // indirect - go.opentelemetry.io/collector/config/configauth v0.81.0 // indirect - go.opentelemetry.io/collector/config/configcompression v0.81.0 // indirect - go.opentelemetry.io/collector/config/configgrpc v0.81.0 // indirect - go.opentelemetry.io/collector/config/confignet v0.81.0 // indirect - go.opentelemetry.io/collector/config/configopaque v0.81.0 // indirect - go.opentelemetry.io/collector/config/internal v0.81.0 // indirect - go.opentelemetry.io/collector/extension/auth v0.81.0 // indirect + go.opentelemetry.io/collector/config/configauth v0.82.0 // indirect + go.opentelemetry.io/collector/config/configcompression v0.82.0 // indirect + go.opentelemetry.io/collector/config/configgrpc v0.82.0 // indirect + go.opentelemetry.io/collector/config/confignet v0.82.0 // indirect + go.opentelemetry.io/collector/config/configopaque v0.82.0 // indirect + go.opentelemetry.io/collector/config/internal v0.82.0 // indirect + go.opentelemetry.io/collector/extension/auth v0.82.0 // indirect go.opentelemetry.io/otel/bridge/opencensus v0.39.0 // indirect + go.opentelemetry.io/otel/exporters/otlp/internal/retry v1.16.0 // indirect + go.opentelemetry.io/otel/exporters/otlp/otlpmetric v0.39.0 // indirect + go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetricgrpc v0.39.0 // indirect + go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetrichttp v0.39.0 // indirect go.opentelemetry.io/otel/exporters/prometheus v0.39.0 // indirect + go.opentelemetry.io/otel/exporters/stdout/stdoutmetric v0.39.0 // indirect + go.opentelemetry.io/otel/exporters/stdout/stdouttrace v1.16.0 // indirect + go.opentelemetry.io/proto/otlp v1.0.0 // indirect google.golang.org/genproto/googleapis/api v0.0.0-20230717213848-3f92550aa753 // indirect - google.golang.org/genproto/googleapis/rpc v0.0.0-20230717213848-3f92550aa753 // indirect + google.golang.org/genproto/googleapis/rpc v0.0.0-20230720185612-659f7aaaa771 // indirect ) require ( @@ -207,7 +214,7 @@ require ( github.com/Azure/go-autorest/logger v0.2.1 // indirect github.com/Azure/go-autorest/tracing v0.6.0 // indirect github.com/DataDog/zstd v1.5.0 // indirect - github.com/GoogleCloudPlatform/opentelemetry-operations-go/detectors/gcp v1.17.0 // indirect + github.com/GoogleCloudPlatform/opentelemetry-operations-go/detectors/gcp v1.18.0 // indirect github.com/Microsoft/go-winio v0.6.1 // indirect github.com/SAP/go-hdb v1.3.10 // indirect github.com/Sectorbob/mlab-ns2 v0.0.0-20171030222938-d3aa0c295a8a // indirect @@ -220,16 +227,16 @@ require ( github.com/armon/go-metrics v0.4.1 // indirect github.com/armon/go-radix v1.0.0 // indirect github.com/aws/aws-sdk-go v1.44.312 // indirect - github.com/aws/aws-sdk-go-v2 v1.18.1 // indirect + github.com/aws/aws-sdk-go-v2 v1.19.0 // indirect github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.4.10 // indirect - github.com/aws/aws-sdk-go-v2/credentials v1.13.26 // indirect + github.com/aws/aws-sdk-go-v2/credentials v1.13.27 // indirect github.com/aws/aws-sdk-go-v2/feature/s3/manager v1.11.59 // indirect - github.com/aws/aws-sdk-go-v2/internal/configsources v1.1.34 // indirect - github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.4.28 // indirect + github.com/aws/aws-sdk-go-v2/internal/configsources v1.1.35 // indirect + github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.4.29 // indirect github.com/aws/aws-sdk-go-v2/internal/v4a v1.0.23 // indirect github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.9.11 // indirect github.com/aws/aws-sdk-go-v2/service/internal/checksum v1.1.26 // indirect - github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.9.28 // indirect + github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.9.29 // indirect github.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.14.0 // indirect github.com/aws/aws-sdk-go-v2/service/s3 v1.31.0 // indirect github.com/aws/smithy-go v1.13.5 // indirect @@ -306,7 +313,7 @@ require ( github.com/grafana/regexp v0.0.0-20221122212121-6b5c0a4cb7fd // indirect github.com/grpc-ecosystem/grpc-gateway/v2 v2.16.0 // indirect github.com/gsterjov/go-libsecret v0.0.0-20161001094733-a6f4afe4910c // indirect - github.com/hashicorp/consul/api v1.22.0 // indirect + github.com/hashicorp/consul/api v1.23.0 // indirect github.com/hashicorp/cronexpr v1.1.2 // indirect github.com/hashicorp/errwrap v1.1.0 // indirect github.com/hashicorp/go-cleanhttp v0.5.2 // indirect @@ -397,25 +404,24 @@ require ( github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822 // indirect github.com/mwielbut/pointy v1.1.0 // indirect github.com/mwitkow/go-conntrack v0.0.0-20190716064945-2f068394615f // indirect - github.com/observiq/ctimefmt v1.0.0 // indirect github.com/oklog/run v1.1.0 // indirect github.com/olekukonko/tablewriter v0.0.5 // indirect - github.com/open-telemetry/opentelemetry-collector-contrib/internal/aws/ecsutil v0.81.0 // indirect - github.com/open-telemetry/opentelemetry-collector-contrib/internal/common v0.81.0 // indirect - github.com/open-telemetry/opentelemetry-collector-contrib/internal/coreinternal v0.81.0 // indirect - github.com/open-telemetry/opentelemetry-collector-contrib/internal/docker v0.81.0 // indirect - github.com/open-telemetry/opentelemetry-collector-contrib/internal/filter v0.81.0 // indirect - github.com/open-telemetry/opentelemetry-collector-contrib/internal/k8sconfig v0.81.0 // indirect - github.com/open-telemetry/opentelemetry-collector-contrib/internal/kubelet v0.81.0 // indirect - github.com/open-telemetry/opentelemetry-collector-contrib/internal/metadataproviders v0.81.1-0.20230725205232-a877923c3c3d // indirect - github.com/open-telemetry/opentelemetry-collector-contrib/internal/sharedcomponent v0.81.0 // indirect - github.com/open-telemetry/opentelemetry-collector-contrib/internal/splunk v0.81.0 // indirect - github.com/open-telemetry/opentelemetry-collector-contrib/pkg/batchperresourceattr v0.81.0 // indirect - github.com/open-telemetry/opentelemetry-collector-contrib/pkg/experimentalmetricmetadata v0.81.0 // indirect - github.com/open-telemetry/opentelemetry-collector-contrib/pkg/translator/opencensus v0.81.0 // indirect - github.com/open-telemetry/opentelemetry-collector-contrib/pkg/translator/signalfx v0.81.0 // indirect - github.com/open-telemetry/opentelemetry-collector-contrib/pkg/translator/zipkin v0.81.0 // indirect - github.com/open-telemetry/opentelemetry-collector-contrib/pkg/winperfcounters v0.81.0 // indirect + github.com/open-telemetry/opentelemetry-collector-contrib/internal/aws/ecsutil v0.82.0 // indirect + github.com/open-telemetry/opentelemetry-collector-contrib/internal/common v0.82.0 // indirect + github.com/open-telemetry/opentelemetry-collector-contrib/internal/coreinternal v0.82.0 // indirect + github.com/open-telemetry/opentelemetry-collector-contrib/internal/docker v0.82.0 // indirect + github.com/open-telemetry/opentelemetry-collector-contrib/internal/filter v0.82.0 // indirect + github.com/open-telemetry/opentelemetry-collector-contrib/internal/k8sconfig v0.82.0 // indirect + github.com/open-telemetry/opentelemetry-collector-contrib/internal/kubelet v0.82.0 // indirect + github.com/open-telemetry/opentelemetry-collector-contrib/internal/metadataproviders v0.82.0 // indirect + github.com/open-telemetry/opentelemetry-collector-contrib/internal/sharedcomponent v0.82.0 // indirect + github.com/open-telemetry/opentelemetry-collector-contrib/internal/splunk v0.82.0 // indirect + github.com/open-telemetry/opentelemetry-collector-contrib/pkg/batchperresourceattr v0.82.0 // indirect + github.com/open-telemetry/opentelemetry-collector-contrib/pkg/experimentalmetricmetadata v0.82.0 // indirect + github.com/open-telemetry/opentelemetry-collector-contrib/pkg/translator/opencensus v0.82.0 // indirect + github.com/open-telemetry/opentelemetry-collector-contrib/pkg/translator/signalfx v0.82.0 // indirect + github.com/open-telemetry/opentelemetry-collector-contrib/pkg/translator/zipkin v0.82.0 // indirect + github.com/open-telemetry/opentelemetry-collector-contrib/pkg/winperfcounters v0.82.0 // indirect github.com/opencontainers/go-digest v1.0.0 // indirect github.com/opencontainers/image-spec v1.1.0-rc2.0.20221005185240-3a7f492d3f1b // indirect github.com/opencontainers/runc v1.1.6 // indirect @@ -443,7 +449,7 @@ require ( github.com/signalfx/sapm-proto v0.13.0 // indirect github.com/signalfx/signalfx-agent/pkg/apm v0.0.0-20230222185249-54e5d1064c5b // indirect github.com/signalfx/signalfx-go v1.33.0 // indirect - github.com/sijms/go-ora/v2 v2.7.8 // indirect + github.com/sijms/go-ora/v2 v2.7.9 // indirect github.com/sirupsen/logrus v1.9.3 // indirect github.com/snowflakedb/gosnowflake v1.6.23 // indirect github.com/soniah/gosnmp v0.0.0-20190220004421-68e8beac0db9 // indirect @@ -470,12 +476,12 @@ require ( github.com/yusufpapurcu/wmi v1.2.3 // indirect go.etcd.io/etcd/api/v3 v3.5.9 // indirect go.etcd.io/etcd/client/pkg/v3 v3.5.9 // indirect - go.mongodb.org/atlas v0.30.0 // indirect + go.mongodb.org/atlas v0.31.0 // indirect go.opencensus.io v0.24.0 - go.opentelemetry.io/collector/component v0.81.0 - go.opentelemetry.io/collector/consumer v0.81.0 - go.opentelemetry.io/collector/featuregate v1.0.0-rcv0013 // indirect - go.opentelemetry.io/collector/semconv v0.81.0 + go.opentelemetry.io/collector/component v0.82.0 + go.opentelemetry.io/collector/consumer v0.82.0 + go.opentelemetry.io/collector/featuregate v1.0.0-rcv0014 // indirect + go.opentelemetry.io/collector/semconv v0.82.0 go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.42.1-0.20230612162650-64be7e574a17 // indirect go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.42.0 // indirect go.opentelemetry.io/contrib/propagators/b3 v1.17.0 // indirect @@ -496,10 +502,10 @@ require ( golang.org/x/tools v0.11.0 // indirect golang.org/x/xerrors v0.0.0-20220907171357-04be3eba64a2 // indirect gonum.org/v1/gonum v0.13.0 // indirect - google.golang.org/api v0.132.0 // indirect + google.golang.org/api v0.134.0 // indirect google.golang.org/appengine v1.6.7 // indirect google.golang.org/genproto v0.0.0-20230717213848-3f92550aa753 // indirect - google.golang.org/grpc v1.56.2 // indirect + google.golang.org/grpc v1.57.0 // indirect google.golang.org/protobuf v1.31.0 // indirect gopkg.in/fsnotify.v1 v1.4.7 // indirect gopkg.in/go-playground/validator.v9 v9.31.0 // indirect @@ -514,7 +520,7 @@ require ( k8s.io/client-go v0.27.4 // indirect k8s.io/klog/v2 v2.100.1 // indirect k8s.io/kube-openapi v0.0.0-20230525220651-2546d827e515 // indirect - k8s.io/kubelet v0.27.3 // indirect + k8s.io/kubelet v0.27.4 // indirect k8s.io/utils v0.0.0-20230711102312-30195339c3c7 // indirect sigs.k8s.io/json v0.0.0-20221116044647-bc3834ca7abd // indirect sigs.k8s.io/structured-merge-diff/v4 v4.3.0 // indirect diff --git a/go.sum b/go.sum index 8e8f84c54f..880d3d4e1d 100644 --- a/go.sum +++ b/go.sum @@ -123,8 +123,8 @@ github.com/DataDog/zstd v1.5.0 h1:+K/VEwIAaPcHiMtQvpLD4lqW7f0Gk3xdYZmI1hD+CXo= github.com/DataDog/zstd v1.5.0/go.mod h1:g4AWEaM3yOg3HYfnJ3YIawPnVdXJh9QME85blwSAmyw= github.com/GehirnInc/crypt v0.0.0-20200316065508-bb7000b8a962 h1:KeNholpO2xKjgaaSyd+DyQRrsQjhbSeS7qe4nEw8aQw= github.com/GehirnInc/crypt v0.0.0-20200316065508-bb7000b8a962/go.mod h1:kC29dT1vFpj7py2OvG1khBdQpo3kInWP+6QipLbdngo= -github.com/GoogleCloudPlatform/opentelemetry-operations-go/detectors/gcp v1.17.0 h1:lRSgPuLYhzZEwjNVSkmSDkhQfg4gxGuXL8453lg/PwY= -github.com/GoogleCloudPlatform/opentelemetry-operations-go/detectors/gcp v1.17.0/go.mod h1:Xx0VKh7GJ4si3rmElbh19Mejxz68ibWg/J30ZOMrqzU= +github.com/GoogleCloudPlatform/opentelemetry-operations-go/detectors/gcp v1.18.0 h1:ugYJK/neZQtQeh2jc5xNoDFiMQojlAkoqJMRb7vTu1U= +github.com/GoogleCloudPlatform/opentelemetry-operations-go/detectors/gcp v1.18.0/go.mod h1:Xx0VKh7GJ4si3rmElbh19Mejxz68ibWg/J30ZOMrqzU= github.com/HdrHistogram/hdrhistogram-go v1.1.0/go.mod h1:yDgFjdqOqDEKOvasDdhWNXYg9BVp4O+o5f6V/ehm6Oo= github.com/HdrHistogram/hdrhistogram-go v1.1.2 h1:5IcZpTvzydCQeHzK4Ef/D5rrSqwxob0t8PQPMybUNFM= github.com/HdrHistogram/hdrhistogram-go v1.1.2/go.mod h1:yDgFjdqOqDEKOvasDdhWNXYg9BVp4O+o5f6V/ehm6Oo= @@ -141,7 +141,6 @@ github.com/Microsoft/go-winio v0.4.15/go.mod h1:tTuCMEN+UleMWgg9dVx4Hu52b1bJo+59 github.com/Microsoft/go-winio v0.6.1 h1:9/kr64B9VUZrLm5YYwbGtUJnMgqWVOdUAXu6Migciow= github.com/Microsoft/go-winio v0.6.1/go.mod h1:LRdKpFKfdobln8UmuiYcKPot9D2v6svN5+sAH+4kjUM= github.com/Microsoft/hcsshim v0.9.6 h1:VwnDOgLeoi2du6dAznfmspNqTiwczvjv4K7NxuY9jsY= -github.com/Mottl/ctimefmt v0.0.0-20190803144728-fd2ac23a585a/go.mod h1:eyj2WSIdoPMPs2eNTLpSmM6Nzqo4V80/d6jHpnJ1SAI= github.com/NYTimes/gziphandler v1.1.1 h1:ZUDjpQae29j0ryrS0u/B8HZfJBtBQHjqw2rQ2cqUQ3I= github.com/OneOfOne/xxhash v1.2.2/go.mod h1:HSdplMjZKSmBqAxg5vPj2TmRDmfkzw+cTzAElWljhcU= github.com/ProtonMail/go-crypto v0.0.0-20220824120805-4b6e5c587895 h1:NsReiLpErIPzRrnogAXYwSoU7txA977LjDGrbkewJbg= @@ -209,8 +208,8 @@ github.com/aws/aws-sdk-go v1.44.312/go.mod h1:aVsgQcEevwlmQ7qHE9I3h+dtQgpqhFB+i8 github.com/aws/aws-sdk-go-v2 v1.9.1/go.mod h1:cK/D0BBs0b/oWPIcX/Z/obahJK1TT7IPVjy53i/mX/4= github.com/aws/aws-sdk-go-v2 v1.9.2/go.mod h1:cK/D0BBs0b/oWPIcX/Z/obahJK1TT7IPVjy53i/mX/4= github.com/aws/aws-sdk-go-v2 v1.17.7/go.mod h1:uzbQtefpm44goOPmdKyAlXSNcwlRgF3ePWVW6EtJvvw= -github.com/aws/aws-sdk-go-v2 v1.18.1 h1:+tefE750oAb7ZQGzla6bLkOwfcQCEtC5y2RqoqCeqKo= -github.com/aws/aws-sdk-go-v2 v1.18.1/go.mod h1:uzbQtefpm44goOPmdKyAlXSNcwlRgF3ePWVW6EtJvvw= +github.com/aws/aws-sdk-go-v2 v1.19.0 h1:klAT+y3pGFBU/qVf1uzwttpBbiuozJYWzNLHioyDJ+k= +github.com/aws/aws-sdk-go-v2 v1.19.0/go.mod h1:uzbQtefpm44goOPmdKyAlXSNcwlRgF3ePWVW6EtJvvw= github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.4.10 h1:dK82zF6kkPeCo8J1e+tGx4JdvDIQzj7ygIoLg8WMuGs= github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.4.10/go.mod h1:VeTZetY5KRJLuD/7fkQXMU6Mw7H5m/KP2J5Iy9osMno= github.com/aws/aws-sdk-go-v2/config v1.8.3/go.mod h1:4AEiLtAb8kLs7vgw2ZV3p2VZ1+hBavOc84hqxVNpCyw= @@ -218,20 +217,20 @@ github.com/aws/aws-sdk-go-v2/config v1.18.19 h1:AqFK6zFNtq4i1EYu+eC7lcKHYnZagMn6 github.com/aws/aws-sdk-go-v2/config v1.18.19/go.mod h1:XvTmGMY8d52ougvakOv1RpiTLPz9dlG/OQHsKU/cMmY= github.com/aws/aws-sdk-go-v2/credentials v1.4.3/go.mod h1:FNNC6nQZQUuyhq5aE5c7ata8o9e4ECGmS4lAXC7o1mQ= github.com/aws/aws-sdk-go-v2/credentials v1.13.18/go.mod h1:vnwlwjIe+3XJPBYKu1et30ZPABG3VaXJYr8ryohpIyM= -github.com/aws/aws-sdk-go-v2/credentials v1.13.26 h1:qmU+yhKmOCyujmuPY7tf5MxR/RKyZrOPO3V4DobiTUk= -github.com/aws/aws-sdk-go-v2/credentials v1.13.26/go.mod h1:GoXt2YC8jHUBbA4jr+W3JiemnIbkXOfxSXcisUsZ3os= +github.com/aws/aws-sdk-go-v2/credentials v1.13.27 h1:dz0yr/yR1jweAnsCx+BmjerUILVPQ6FS5AwF/OyG1kA= +github.com/aws/aws-sdk-go-v2/credentials v1.13.27/go.mod h1:syOqAek45ZXZp29HlnRS/BNgMIW6uiRmeuQsz4Qh2UE= github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.6.0/go.mod h1:gqlclDEZp4aqJOancXK6TN24aKhT0W0Ae9MHk3wzTMM= github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.13.1/go.mod h1:lfUx8puBRdM5lVVMQlwt2v+ofiG/X6Ms+dy0UkG/kXw= -github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.13.4 h1:LxK/bitrAr4lnh9LnIS6i7zWbCOdMsfzKFBI6LUCS0I= -github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.13.4/go.mod h1:E1hLXN/BL2e6YizK1zFlYd8vsfi2GTjbjBazinMmeaM= +github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.13.5 h1:kP3Me6Fy3vdi+9uHd7YLr6ewPxRL+PU6y15urfTaamU= +github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.13.5/go.mod h1:Gj7tm95r+QsDoN2Fhuz/3npQvcZbkEf5mL70n3Xfluc= github.com/aws/aws-sdk-go-v2/feature/s3/manager v1.11.59 h1:E3Y+OfzOK1+rmRo/K2G0ml8Vs+Xqk0kOnf4nS0kUtBc= github.com/aws/aws-sdk-go-v2/feature/s3/manager v1.11.59/go.mod h1:1M4PLSBUVfBI0aP+C9XI7SM6kZPCGYyI6izWz0TGprE= github.com/aws/aws-sdk-go-v2/internal/configsources v1.1.31/go.mod h1:QT0BqUvX1Bh2ABdTGnjqEjvjzrCfIniM9Sc8zn9Yndo= -github.com/aws/aws-sdk-go-v2/internal/configsources v1.1.34 h1:A5UqQEmPaCFpedKouS4v+dHCTUo2sKqhoKO9U5kxyWo= -github.com/aws/aws-sdk-go-v2/internal/configsources v1.1.34/go.mod h1:wZpTEecJe0Btj3IYnDx/VlUzor9wm3fJHyvLpQF0VwY= +github.com/aws/aws-sdk-go-v2/internal/configsources v1.1.35 h1:hMUCiE3Zi5AHrRNGf5j985u0WyqI6r2NULhUfo0N/No= +github.com/aws/aws-sdk-go-v2/internal/configsources v1.1.35/go.mod h1:ipR5PvpSPqIqL5Mi82BxLnfMkHVbmco8kUwO2xrCi0M= github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.4.25/go.mod h1:zBHOPwhBc3FlQjQJE/D3IfPWiWaQmT06Vq9aNukDo0k= -github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.4.28 h1:srIVS45eQuewqz6fKKu6ZGXaq6FuFg5NzgQBAM6g8Y4= -github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.4.28/go.mod h1:7VRpKQQedkfIEXb4k52I7swUnZP0wohVajJMRn3vsUw= +github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.4.29 h1:yOpYx+FTBdpk/g+sBU6Cb1H0U/TLEcYYp66mYqsPpcc= +github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.4.29/go.mod h1:M/eUABlDbw2uVrdAn+UsI6M727qp2fxkp8K0ejcBDUY= github.com/aws/aws-sdk-go-v2/internal/ini v1.2.4/go.mod h1:ZcBrrI3zBKlhGFNYWvju0I3TR93I7YIgAfy82Fh4lcQ= github.com/aws/aws-sdk-go-v2/internal/ini v1.3.32 h1:p5luUImdIqywn6JpQsW3tq5GNOxKmOnEpybzPx+d1lk= github.com/aws/aws-sdk-go-v2/internal/ini v1.3.32/go.mod h1:XGhIBZDEgfqmFIugclZ6FU7v75nHhBDtzuB4xB/tEi4= @@ -245,23 +244,23 @@ github.com/aws/aws-sdk-go-v2/service/internal/checksum v1.1.26 h1:CeuSeq/8FnYpPt github.com/aws/aws-sdk-go-v2/service/internal/checksum v1.1.26/go.mod h1:2UqAAwMUXKeRkAHIlDJqvMVgOWkUi/AUXPk/YIe+Dg4= github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.3.2/go.mod h1:72HRZDLMtmVQiLG2tLfQcaWLCssELvGl+Zf2WVxMmR8= github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.9.25/go.mod h1:/95IA+0lMnzW6XzqYJRpjjsAbKEORVeO0anQqjd2CNU= -github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.9.28 h1:bkRyG4a929RCnpVSTvLM2j/T4ls015ZhhYApbmYs15s= -github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.9.28/go.mod h1:jj7znCIg05jXlaGBlFMGP8+7UN3VtCkRBG2spnmRQkU= +github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.9.29 h1:IiDolu/eLmuB18DRZibj77n1hHQT7z12jnGO7Ze3pLc= +github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.9.29/go.mod h1:fDbkK4o7fpPXWn8YAPmTieAMuB9mk/VgvW64uaUqxd4= github.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.14.0 h1:e2ooMhpYGhDnBfSvIyusvAwX7KexuZaHbQY2Dyei7VU= github.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.14.0/go.mod h1:bh2E0CXKZsQN+faiKVqC40vfNMAWheoULBCnEgO9K+8= github.com/aws/aws-sdk-go-v2/service/s3 v1.31.0 h1:B1G2pSPvbAtQjilPq+Y7jLIzCOwKzuVEl+aBBaNG0AQ= github.com/aws/aws-sdk-go-v2/service/s3 v1.31.0/go.mod h1:ncltU6n4Nof5uJttDtcNQ537uNuwYqsZZQcpkd2/GUQ= github.com/aws/aws-sdk-go-v2/service/sso v1.4.2/go.mod h1:NBvT9R1MEF+Ud6ApJKM0G+IkPchKS7p7c2YPKwHmBOk= github.com/aws/aws-sdk-go-v2/service/sso v1.12.6/go.mod h1:Y1VOmit/Fn6Tz1uFAeCO6Q7M2fmfXSCLeL5INVYsLuY= -github.com/aws/aws-sdk-go-v2/service/sso v1.12.12 h1:nneMBM2p79PGWBQovYO/6Xnc2ryRMw3InnDJq1FHkSY= -github.com/aws/aws-sdk-go-v2/service/sso v1.12.12/go.mod h1:HuCOxYsF21eKrerARYO6HapNeh9GBNq7fius2AcwodY= +github.com/aws/aws-sdk-go-v2/service/sso v1.12.13 h1:sWDv7cMITPcZ21QdreULwxOOAmE05JjEsT6fCDtDA9k= +github.com/aws/aws-sdk-go-v2/service/sso v1.12.13/go.mod h1:DfX0sWuT46KpcqbMhJ9QWtxAIP1VozkDWf8VAkByjYY= github.com/aws/aws-sdk-go-v2/service/ssooidc v1.14.6/go.mod h1:Lh/bc9XUf8CfOY6Jp5aIkQtN+j1mc+nExc+KXj9jx2s= -github.com/aws/aws-sdk-go-v2/service/ssooidc v1.14.12 h1:2qTR7IFk7/0IN/adSFhYu9Xthr0zVFTgBrmPldILn80= -github.com/aws/aws-sdk-go-v2/service/ssooidc v1.14.12/go.mod h1:E4VrHCPzmVB/KFXtqBGKb3c8zpbNBgKe3fisDNLAW5w= +github.com/aws/aws-sdk-go-v2/service/ssooidc v1.14.13 h1:BFubHS/xN5bjl818QaroN6mQdjneYQ+AOx44KNXlyH4= +github.com/aws/aws-sdk-go-v2/service/ssooidc v1.14.13/go.mod h1:BzqsVVFduubEmzrVtUFQQIQdFqvUItF8XUq2EnS8Wog= github.com/aws/aws-sdk-go-v2/service/sts v1.7.2/go.mod h1:8EzeIqfWt2wWT4rJVu3f21TfrhJ8AEMzVybRNSb/b4g= github.com/aws/aws-sdk-go-v2/service/sts v1.18.7/go.mod h1:JuTnSoeePXmMVe9G8NcjjwgOKEfZ4cOjMuT2IBT/2eI= -github.com/aws/aws-sdk-go-v2/service/sts v1.19.2 h1:XFJ2Z6sNUUcAz9poj+245DMkrHE4h2j5I9/xD50RHfE= -github.com/aws/aws-sdk-go-v2/service/sts v1.19.2/go.mod h1:dp0yLPsLBOi++WTxzCjA/oZqi6NPIhoR+uF7GeMU9eg= +github.com/aws/aws-sdk-go-v2/service/sts v1.19.3 h1:e5mnydVdCVWxP+5rPAGi2PYxC7u2OZgH1ypC114H04U= +github.com/aws/aws-sdk-go-v2/service/sts v1.19.3/go.mod h1:yVGZA1CPkmUhBdA039jXNJJG7/6t+G+EBWmFq23xqnY= github.com/aws/smithy-go v1.8.0/go.mod h1:SObp3lf9smib00L/v3U2eAKG8FyQ7iLrJnQiAmR5n+E= github.com/aws/smithy-go v1.13.5 h1:hgz0X/DX0dGqTYpGALqXJoRKRj5oQ7150i5FdTePzO8= github.com/aws/smithy-go v1.13.5/go.mod h1:Tg+OJXh4MB2R/uN61Ko2f6hTZwB/ZYGOtib8J3gBHzA= @@ -673,8 +672,8 @@ github.com/gsterjov/go-libsecret v0.0.0-20161001094733-a6f4afe4910c/go.mod h1:NM github.com/guregu/null v4.0.0+incompatible h1:4zw0ckM7ECd6FNNddc3Fu4aty9nTlpkkzH7dPn4/4Gw= github.com/hashicorp/consul/api v1.10.1/go.mod h1:XjsvQN+RJGWI2TWy1/kqaE16HrR2J/FWgkYjdZQsX9M= github.com/hashicorp/consul/api v1.13.0/go.mod h1:ZlVrynguJKcYr54zGaDbaL3fOvKC9m72FhPvA8T35KQ= -github.com/hashicorp/consul/api v1.22.0 h1:ydEvDooB/A0c/xpsBd8GSt7P2/zYPBui4KrNip0xGjE= -github.com/hashicorp/consul/api v1.22.0/go.mod h1:zHpYgZ7TeYqS6zaszjwSt128OwESRpnhU9aGa6ue3Eg= +github.com/hashicorp/consul/api v1.23.0 h1:L6e4v1AfoumqAHq/Rrsmuulev+nd7vltM3k8H329tyI= +github.com/hashicorp/consul/api v1.23.0/go.mod h1:SfvUIT74b0EplDuNgAJQ/FVqSO6KyK2ia80UI39/Ye8= github.com/hashicorp/consul/sdk v0.14.0 h1:Hly+BMNMssVzoWddbBnBFi3W+Fzytvm0haSkihhj3GU= github.com/hashicorp/consul/sdk v0.14.0/go.mod h1:gHYeuDa0+0qRAD6Wwr6yznMBvBwHKoxSBoW5l73+saE= github.com/hashicorp/cronexpr v1.1.2 h1:wG/ZYIKT+RT3QkOdgYc+xsKWVRgnxJ1OJtjjy84fJ9A= @@ -1100,8 +1099,6 @@ github.com/nu7hatch/gouuid v0.0.0-20131221200532-179d4d0c4d8d/go.mod h1:YUTz3bUH github.com/nxadm/tail v1.4.4/go.mod h1:kenIhsEOeOJmVchQTgglprH7qJGnHDVpk1VPCcaMI8A= github.com/nxadm/tail v1.4.8 h1:nPr65rt6Y5JFSKQO7qToXr7pePgD6Gwiw05lkbyAQTE= github.com/nxadm/tail v1.4.8/go.mod h1:+ncqLTQzXmGhMZNUePPaPqPvBxHAIsmXswZKocGu+AU= -github.com/observiq/ctimefmt v1.0.0 h1:r7vTJ+Slkrt9fZ67mkf+mA6zAdR5nGIJRMTzkUyvilk= -github.com/observiq/ctimefmt v1.0.0/go.mod h1:mxi62//WbSpG/roCO1c6MqZ7zQTvjVtYheqHN3eOjvc= github.com/observiq/nanojack v0.0.0-20201106172433-343928847ebc h1:49ewVBwLcy+eYqI4R0ICilCI4dPjddpFXWv3liXzUxM= github.com/oklog/run v1.0.0/go.mod h1:dlhp/R75TPv97u0XWUtDeV/lRKWPKSdTuV0TZvrmrQA= github.com/oklog/run v1.1.0 h1:GEenZ1cK0+q0+wsJew9qUg/DyD8k3JzYsZAi5gYi2mA= @@ -1127,186 +1124,186 @@ github.com/onsi/gomega v1.10.1/go.mod h1:iN09h71vgCQne3DLsj+A5owkum+a2tYe+TOCB1y github.com/onsi/gomega v1.13.0/go.mod h1:lRk9szgn8TxENtWd0Tp4c3wjlRfMTMH27I+3Je41yGY= github.com/onsi/gomega v1.27.9 h1:qIyVWbOsvQEye2QCqLsNSeH/5L1RS9vS382erEWfT3o= github.com/op/go-logging v0.0.0-20160315200505-970db520ece7/go.mod h1:HzydrMdWErDVzsI23lYNej1Htcns9BCg93Dk0bBINWk= -github.com/open-telemetry/opentelemetry-collector-contrib/connector/countconnector v0.81.0 h1:dU5Sy+YrObfpq9XIX/c9wmRJ3DxbD1mna9pCsqU9ckw= -github.com/open-telemetry/opentelemetry-collector-contrib/connector/countconnector v0.81.0/go.mod h1:e8IViKiskEknHmSRdIgsTDCOUv4KsOSi797mqLMxxJA= -github.com/open-telemetry/opentelemetry-collector-contrib/connector/spanmetricsconnector v0.81.0 h1:KWpQLwATMlby+fxTEF/bxESXgZnyHpY3dERnwEfgNCs= -github.com/open-telemetry/opentelemetry-collector-contrib/connector/spanmetricsconnector v0.81.0/go.mod h1:HgdAJofem+cKZcNcem1ByOaCuN+p/ez8dbQoyH2zsEs= -github.com/open-telemetry/opentelemetry-collector-contrib/exporter/fileexporter v0.81.0 h1:Uej91kyfvfXnclwY0fDfWutM7MhFYpSHhr2/WsDEEpc= -github.com/open-telemetry/opentelemetry-collector-contrib/exporter/fileexporter v0.81.0/go.mod h1:GmTJKqMN9B2h7JonMwjeKM4U1dAlefuBHDo4DPrK19k= -github.com/open-telemetry/opentelemetry-collector-contrib/exporter/kafkaexporter v0.81.0 h1:Y5qa2u8+t4Om7ThbKww7N6HpPJX4ZZvHpem/l5gGWCU= -github.com/open-telemetry/opentelemetry-collector-contrib/exporter/kafkaexporter v0.81.0/go.mod h1:cBidQZOaH9DQwI8O58M3Fer7EqfYyAndW67LY60Md3c= -github.com/open-telemetry/opentelemetry-collector-contrib/exporter/prometheusremotewriteexporter v0.81.0 h1:ts0afKsnRpVQrX9w9dphbmrWI4hTbOt0P3p6Un9oDIk= -github.com/open-telemetry/opentelemetry-collector-contrib/exporter/sapmexporter v0.81.0 h1:I320Tp2Bxs46yfQN63J6lOq8NNLLu94yDVyUMVMngo4= -github.com/open-telemetry/opentelemetry-collector-contrib/exporter/sapmexporter v0.81.0/go.mod h1:K7IkeryB3UBqpEg4ZEQX8OkSH7l0NEsargQWT7kT4l0= -github.com/open-telemetry/opentelemetry-collector-contrib/exporter/signalfxexporter v0.81.0 h1:WgHJwa4Dl+rMr9v9dHuhRrImKkwAQNHtjJgTO9LpiMo= -github.com/open-telemetry/opentelemetry-collector-contrib/exporter/signalfxexporter v0.81.0/go.mod h1:WAQrgajoWC4x9BkCaiRSortkUK4OKjbN8aw4coXsuJM= -github.com/open-telemetry/opentelemetry-collector-contrib/exporter/splunkhecexporter v0.81.0 h1:xsRmOPWyAWNFnEnuK2d3JtD+RaO0JR6/BIKw0eSjNy8= -github.com/open-telemetry/opentelemetry-collector-contrib/exporter/splunkhecexporter v0.81.0/go.mod h1:jlILI9syx/7cbTRKvP0pzr2I7wv5vnz1q+OYM76escw= -github.com/open-telemetry/opentelemetry-collector-contrib/extension/basicauthextension v0.81.0 h1:5rnbwxwn9+PF6hMpRYoz8fI8n/Oyj6uFTi+nCqXcITo= -github.com/open-telemetry/opentelemetry-collector-contrib/extension/basicauthextension v0.81.0/go.mod h1:RFbrR6/CmOHkuJIlGoi3xfUpkSZ15EubW0qcpAAHUC8= -github.com/open-telemetry/opentelemetry-collector-contrib/extension/healthcheckextension v0.81.0 h1:ZqX6htxXlPr5GrAj/zpGjYGvhBIRkosyzV4VjGYLvKg= -github.com/open-telemetry/opentelemetry-collector-contrib/extension/healthcheckextension v0.81.0/go.mod h1:Kd2693zrJXP/BoRA59N/obF1Fjx+LPSB7L1nmR6pSz4= -github.com/open-telemetry/opentelemetry-collector-contrib/extension/httpforwarder v0.81.0 h1:52o4B6JzTn3lghhCPZk0/4rzUaCEMjLVEMKhck7Gb6c= -github.com/open-telemetry/opentelemetry-collector-contrib/extension/httpforwarder v0.81.0/go.mod h1:XSfqdZLfBdi4PdGoCYI6aMjGEbpygXg/e8ezzzn1Uko= -github.com/open-telemetry/opentelemetry-collector-contrib/extension/observer v0.81.0 h1:KvBuf0k2gCxXwWVziKfc5LIXDkS7J04cwQfOiGgns8c= -github.com/open-telemetry/opentelemetry-collector-contrib/extension/observer v0.81.0/go.mod h1:kJcZzVAOGYW9AcQOF1nC1ZsZCeCSShmB6i8/xART2LA= -github.com/open-telemetry/opentelemetry-collector-contrib/extension/observer/dockerobserver v0.81.0 h1:cJcHEZYVEONPfef+lEw62A5QQh0ptatyQFLWPdw6qdw= -github.com/open-telemetry/opentelemetry-collector-contrib/extension/observer/dockerobserver v0.81.0/go.mod h1:nQ3Wdib2yeODYHU+9FJV1zckkj+fYDQrqQi+898fsbE= -github.com/open-telemetry/opentelemetry-collector-contrib/extension/observer/ecsobserver v0.81.0 h1:38yXFb61m57NNr2t0JuAiXDszbLF5qgLYmeFy+Aw5qE= -github.com/open-telemetry/opentelemetry-collector-contrib/extension/observer/ecsobserver v0.81.0/go.mod h1:YnBxgeXdIB0T7RhQwrt1E2G+aVoDNk00n+SOctdN/ps= -github.com/open-telemetry/opentelemetry-collector-contrib/extension/observer/ecstaskobserver v0.81.0 h1:wZc+A88dVWfAy4ZSD+mX0HmJEIo0B/liCV59Jjs+/gY= -github.com/open-telemetry/opentelemetry-collector-contrib/extension/observer/ecstaskobserver v0.81.0/go.mod h1:3xf0yiP5VBR43GaWUOHI2V2OLWJTj8nxcxhiMiTzjAw= -github.com/open-telemetry/opentelemetry-collector-contrib/extension/observer/hostobserver v0.81.0 h1:f279AsRQdzbg8I1JIGhsNpn4k6L7YkuWN8XZHj+onIU= -github.com/open-telemetry/opentelemetry-collector-contrib/extension/observer/hostobserver v0.81.0/go.mod h1:9eSMHZunl05fLTITHa8hiSkFm2SWI28MZcCtgvgGUmg= -github.com/open-telemetry/opentelemetry-collector-contrib/extension/observer/k8sobserver v0.81.0 h1:+rU0H2/MHpdeQR+jnAbXIhx9xzdXFEU3yWtEWUkRagY= -github.com/open-telemetry/opentelemetry-collector-contrib/extension/observer/k8sobserver v0.81.0/go.mod h1:WVBKYbTBa+UjTFIkxnMuRZMTLXAsrORnd6knVaa3VxU= -github.com/open-telemetry/opentelemetry-collector-contrib/extension/pprofextension v0.81.0 h1:HQHt9loCLoETcq3h43UYmONBuG8FUoWhGIGATDG6TGk= -github.com/open-telemetry/opentelemetry-collector-contrib/extension/pprofextension v0.81.0/go.mod h1:2grO1VS4qYBtey1+ChDF7gKMGCCDN65QMdIuOVPzYGA= -github.com/open-telemetry/opentelemetry-collector-contrib/extension/storage v0.81.0 h1:btaPMj8wYYogfmK0cKcd6VxrXnJkVN8zQvdc2vbq4tM= -github.com/open-telemetry/opentelemetry-collector-contrib/extension/storage v0.81.0/go.mod h1:eS5jvx0z1B/TTpVFQaWpDB33UxrCrtSKLayxyqO0rTY= -github.com/open-telemetry/opentelemetry-collector-contrib/internal/aws/ecsutil v0.81.0 h1:WN6hNUDTQcmODuHeK9XUh5LavKr0JFrpR6+gIMtbHlw= -github.com/open-telemetry/opentelemetry-collector-contrib/internal/aws/ecsutil v0.81.0/go.mod h1:4KJ5vtmaPwZECM2MEseFkz0y02f2aipnYWRtrHpAwJ8= -github.com/open-telemetry/opentelemetry-collector-contrib/internal/common v0.81.0 h1:EIbmD7EzonXaKDyq2MrCpfpar1VEKjVFJ50rzfTsWpc= -github.com/open-telemetry/opentelemetry-collector-contrib/internal/common v0.81.0/go.mod h1:pJHh21NYSJNpnkzaDSy0xCZ/Jg4ETd24l54/XHVkr3s= -github.com/open-telemetry/opentelemetry-collector-contrib/internal/coreinternal v0.81.0 h1:sPjCHuqjn5UYDJOai4FulMCfLP+7AbspjHfv0jAtmD0= -github.com/open-telemetry/opentelemetry-collector-contrib/internal/coreinternal v0.81.0/go.mod h1:moQ6krtZ8dyziij2P+9eao5+gBfCJjiNDwN7n2MZZs4= -github.com/open-telemetry/opentelemetry-collector-contrib/internal/docker v0.81.0 h1:h6LwCbYiZBNdKM9aDTu8QvOq3SFcfL8HBztYb4vMjTg= -github.com/open-telemetry/opentelemetry-collector-contrib/internal/docker v0.81.0/go.mod h1:WeaX0JaWaTFdBDEqZijuyy/AO1SNMhg5NaCiybM8qrw= -github.com/open-telemetry/opentelemetry-collector-contrib/internal/filter v0.81.0 h1:WddoIEOfszVY9fN1/MocdB3E/4VhpR0XCvzM4rI/Zo4= -github.com/open-telemetry/opentelemetry-collector-contrib/internal/filter v0.81.0/go.mod h1:gkymmEWoAYS3IAJizCVWHsnLlO2srV6jTlauy3ew8Vg= -github.com/open-telemetry/opentelemetry-collector-contrib/internal/k8sconfig v0.81.0 h1:msu2VMtc1vuGT5vqWdMLiLigGIOxLQTb9p6YkkJ79lY= -github.com/open-telemetry/opentelemetry-collector-contrib/internal/k8sconfig v0.81.0/go.mod h1:wnpEQ9JRrLAhnqYSxpa6f/S40xGri7DHG92qhZdTNS4= -github.com/open-telemetry/opentelemetry-collector-contrib/internal/k8stest v0.81.0 h1:RakPwL13EbYBSIszk4ahd09IJrWlCEdWIb453rOr8lo= -github.com/open-telemetry/opentelemetry-collector-contrib/internal/kubelet v0.81.0 h1:Lzq4iYipnrt7ioOFlZRQcLwpchuK2bpcdFK/iFmZsoQ= -github.com/open-telemetry/opentelemetry-collector-contrib/internal/kubelet v0.81.0/go.mod h1:bKa1dpIjQ5v+FX1ReDlvCfC9DlOtAezIcnr+rCXTQLM= -github.com/open-telemetry/opentelemetry-collector-contrib/internal/metadataproviders v0.81.1-0.20230725205232-a877923c3c3d h1:nDeX35mAp9Kpd9z38e8KccxBmWvdUbbhZUrhXjjzbeo= -github.com/open-telemetry/opentelemetry-collector-contrib/internal/metadataproviders v0.81.1-0.20230725205232-a877923c3c3d/go.mod h1:IUS/I2ZsQlkLipYGz05HFbjybG06EvmWQ8zgP2yYf7A= -github.com/open-telemetry/opentelemetry-collector-contrib/internal/sharedcomponent v0.81.0 h1:SMUb1r6uBrXO7R7ylZMHnlPN2VrdS4GzXgYWuJghPJg= -github.com/open-telemetry/opentelemetry-collector-contrib/internal/sharedcomponent v0.81.0/go.mod h1:Gfcl9pg+6klCvdemOCmuYv+3EHZJt8gqyJ5S7rkr1Ks= -github.com/open-telemetry/opentelemetry-collector-contrib/internal/splunk v0.81.0 h1:sjx/pl6c+Uu3UNegnnSg3mDn4Yb2Nrbmwx4xreGTVH4= -github.com/open-telemetry/opentelemetry-collector-contrib/internal/splunk v0.81.0/go.mod h1:HNOp2p6/gqZDu91IinicZQX5CkB5oGLXyh9MWRCzZdU= -github.com/open-telemetry/opentelemetry-collector-contrib/pkg/batchperresourceattr v0.81.0 h1:Ye3XFnb1hNpB6MOZKI/qqf6wRHjHAuRGrm9k1gVeBIU= -github.com/open-telemetry/opentelemetry-collector-contrib/pkg/batchperresourceattr v0.81.0/go.mod h1:On9ZefTz99s4Cxo8cO005g/qjDx7mrBtb4toWr2nOis= -github.com/open-telemetry/opentelemetry-collector-contrib/pkg/experimentalmetricmetadata v0.81.0 h1:O7D2OVWhVgR5JzLoM+Q7/1Pbt+zpVrNsFzJt+/5TonM= -github.com/open-telemetry/opentelemetry-collector-contrib/pkg/experimentalmetricmetadata v0.81.0/go.mod h1:jTNj31j5LsaNnYZnaNsEtOfXaYgjdZPeLf0cS3Fjg5w= -github.com/open-telemetry/opentelemetry-collector-contrib/pkg/ottl v0.81.0 h1:Rb8e1O31dgjTEn6823RsPs2RaOwl7fVuFWz2qK9DRpY= -github.com/open-telemetry/opentelemetry-collector-contrib/pkg/ottl v0.81.0/go.mod h1:tnyFHqiWxeNUqAAaGuKDD7XDL0KwBMSqvRB9PsKCzng= -github.com/open-telemetry/opentelemetry-collector-contrib/pkg/pdatatest v0.81.0 h1:mPkMu2Dx9QrGmZxnfwcSSvAyUZzBtaeYIdvmFSrC0KA= -github.com/open-telemetry/opentelemetry-collector-contrib/pkg/pdatatest v0.81.0/go.mod h1:HGW+MymIh+h0Gc9TBCsh/R7X+wauRpNtV34sqNd4YG0= -github.com/open-telemetry/opentelemetry-collector-contrib/pkg/pdatautil v0.81.0 h1:qUNZEYelezsSH6KrbE4u1TrzXCggSFPZqFI1m29gJFQ= -github.com/open-telemetry/opentelemetry-collector-contrib/pkg/pdatautil v0.81.0/go.mod h1:OUF0FMVFQQnlqc+QUMyQScszuBnqO9pO7xL7/kK1PM0= -github.com/open-telemetry/opentelemetry-collector-contrib/pkg/resourcetotelemetry v0.81.0 h1:Ic7Jg3q2TU1t7cPRcG6wNncQS4r85TvYU8N2iPVv0j0= -github.com/open-telemetry/opentelemetry-collector-contrib/pkg/stanza v0.81.0 h1:IeAco3Q34Aduu8g7/6kKzavZojQTj18G+uXJypyEa3c= -github.com/open-telemetry/opentelemetry-collector-contrib/pkg/stanza v0.81.0/go.mod h1:O6eCO9lo7HPJ9EXSBmfN0Su/f25HJQE00zK0SurhJiQ= -github.com/open-telemetry/opentelemetry-collector-contrib/pkg/translator/jaeger v0.81.0 h1:UCF4zWe24m1+yQeYGEQjzq5c9yNAf4d1lknbR7PAoH4= -github.com/open-telemetry/opentelemetry-collector-contrib/pkg/translator/jaeger v0.81.0/go.mod h1:/HK52N8ufi1rKuShEanBZVLVpOygLoIT9zqs0azKQ/s= -github.com/open-telemetry/opentelemetry-collector-contrib/pkg/translator/opencensus v0.81.0 h1:+rYKV1vjMFfTyf+RemKNLP0ChKki4habOG2flUdfPdw= -github.com/open-telemetry/opentelemetry-collector-contrib/pkg/translator/opencensus v0.81.0/go.mod h1:rBevrbOUnW5jic2PxtNj1pcS2muiFVl1boQdMMJX2MY= -github.com/open-telemetry/opentelemetry-collector-contrib/pkg/translator/prometheus v0.81.0 h1:syUuhc/z2yIsTmL9jt2gmXkYUM/jgsQZoH6NAE6i54Q= -github.com/open-telemetry/opentelemetry-collector-contrib/pkg/translator/prometheus v0.81.0/go.mod h1:LRzHoUpI8xZN5KXulEglZnbkYBHyiqHgQXGudIqpTtk= -github.com/open-telemetry/opentelemetry-collector-contrib/pkg/translator/prometheusremotewrite v0.81.0 h1:0Wzd9nh8WFfcY769RKfJU1qnEVSwy7g6+nfyyuFNCeE= -github.com/open-telemetry/opentelemetry-collector-contrib/pkg/translator/signalfx v0.81.0 h1:i/kiWIdAKxFalTGlvLgeZGSEx7F4Mj5rnAxiQK03j0c= -github.com/open-telemetry/opentelemetry-collector-contrib/pkg/translator/signalfx v0.81.0/go.mod h1:+gWTUZfWY7pSoArUW/7OUSiR2PByaCoyuBv4Ln3p7pU= -github.com/open-telemetry/opentelemetry-collector-contrib/pkg/translator/zipkin v0.81.0 h1:/2cI6UppJgjmc9voPGDUWWv3Bhd0N4LgbZ+qFfrLeI4= -github.com/open-telemetry/opentelemetry-collector-contrib/pkg/translator/zipkin v0.81.0/go.mod h1:BzoMZIy0wKuBbdUv0FJaWPY2xWZeaOC8tgrxjUp07Zg= -github.com/open-telemetry/opentelemetry-collector-contrib/pkg/winperfcounters v0.81.0 h1:1ksl6ZqVzLmjOqLLhCGFwijaGQIxAtiClmWuL2im+lo= -github.com/open-telemetry/opentelemetry-collector-contrib/pkg/winperfcounters v0.81.0/go.mod h1:j69tUmtJ/8CMnn+OYljZZ+2N/w4jISk1MCSG43r9/jk= -github.com/open-telemetry/opentelemetry-collector-contrib/processor/attributesprocessor v0.81.0 h1:Vy/Cdd+6n0dADHqOpRTzogYYCDQcZV+/Vtev7hKzXjk= -github.com/open-telemetry/opentelemetry-collector-contrib/processor/attributesprocessor v0.81.0/go.mod h1:0lkF8ouVsG5bsZ17zSIRz8y7uo53dp+KP44/oD7xV14= -github.com/open-telemetry/opentelemetry-collector-contrib/processor/filterprocessor v0.81.0 h1:AoZYnk7AOzxDuLcZek4sRQgv6fsMp9ufxplVCcPKY/U= -github.com/open-telemetry/opentelemetry-collector-contrib/processor/filterprocessor v0.81.0/go.mod h1:SCXzjDN/HNBffKyoJmAUkoAcGScmRkSFkiVY2PVlOUY= -github.com/open-telemetry/opentelemetry-collector-contrib/processor/groupbyattrsprocessor v0.81.0 h1:MBF10bAMzWddNiivgiWwNl0NqFtwIwWhQTpsNhhgIwo= -github.com/open-telemetry/opentelemetry-collector-contrib/processor/groupbyattrsprocessor v0.81.0/go.mod h1:wCZn4P2NaOQn9xzVLRRZBCED2zcxilJvrXJFeFiAMLk= -github.com/open-telemetry/opentelemetry-collector-contrib/processor/k8sattributesprocessor v0.81.0 h1:5opgAzbubdgZ3Xu6O4TMBuo68IXXLahlyI50BvhIciY= -github.com/open-telemetry/opentelemetry-collector-contrib/processor/k8sattributesprocessor v0.81.0/go.mod h1:4RJwmx/TyCF1RtbwZFjDHWHzuEOi2sBH0/n05na0cNc= -github.com/open-telemetry/opentelemetry-collector-contrib/processor/logstransformprocessor v0.81.0 h1:D4fXLya9L+RWaiatBGCwwRRHr3QLcTC03cYn4WKHvtc= -github.com/open-telemetry/opentelemetry-collector-contrib/processor/logstransformprocessor v0.81.0/go.mod h1:vDgxLWxQZMYHbqcCR1j5J9VoSs/h+dFh51YyhuKjrE8= -github.com/open-telemetry/opentelemetry-collector-contrib/processor/metricstransformprocessor v0.81.0 h1:ogNySeBs0zqWuf4cDGvBzIZwZ86MznAcv/EBtrj9s9w= -github.com/open-telemetry/opentelemetry-collector-contrib/processor/metricstransformprocessor v0.81.0/go.mod h1:Z0lAQbH2a3kOewras2isLHT+hfGw+wPhXXZu1NUtXqY= -github.com/open-telemetry/opentelemetry-collector-contrib/processor/probabilisticsamplerprocessor v0.81.0 h1:aw3yb/WRZg6u0qUswZdYjOz8ciFHCgDsFsu7/8YD2bM= -github.com/open-telemetry/opentelemetry-collector-contrib/processor/probabilisticsamplerprocessor v0.81.0/go.mod h1:LbNkwwM/xEoGXhF3UF4/QB0wuwl7bkWhAKEVIJyo3QQ= -github.com/open-telemetry/opentelemetry-collector-contrib/processor/resourcedetectionprocessor v0.81.1-0.20230725205232-a877923c3c3d h1:ngUOKpOokO9TGxyIfZGO7V70HjRhUnzkMXa9fvLgtIk= -github.com/open-telemetry/opentelemetry-collector-contrib/processor/resourcedetectionprocessor v0.81.1-0.20230725205232-a877923c3c3d/go.mod h1:anrVzFRSmSglc1p/FM8PRYLqtPsrQ/5jzfgrYEcdI7s= -github.com/open-telemetry/opentelemetry-collector-contrib/processor/resourceprocessor v0.81.0 h1:1ct0JB0jCi4cyBvl4Ektq3G47dg7pAbiXsHlOqSK/aI= -github.com/open-telemetry/opentelemetry-collector-contrib/processor/resourceprocessor v0.81.0/go.mod h1:tXgkAkD5/uIOvsooOrNmd5FayKcvfasVoTV89SSihME= -github.com/open-telemetry/opentelemetry-collector-contrib/processor/routingprocessor v0.81.0 h1:bizsQKqFDXZOvFt8oUa0IweEiHsxCMk4888JyOlkmI0= -github.com/open-telemetry/opentelemetry-collector-contrib/processor/routingprocessor v0.81.0/go.mod h1:UiY2ehllPEaNJNXd/lCFAiTCor6KlTLD/d9kF+2biSs= -github.com/open-telemetry/opentelemetry-collector-contrib/processor/spanmetricsprocessor v0.81.0 h1:bsCZYnbIM15crCijPdFGnjwMEtWfItvEcWZsfrPyQl0= -github.com/open-telemetry/opentelemetry-collector-contrib/processor/spanmetricsprocessor v0.81.0/go.mod h1:/nsxcV4GVoT4NI0G0WXS/Z/1saU2/n3s9r0TBWrYyn0= -github.com/open-telemetry/opentelemetry-collector-contrib/processor/spanprocessor v0.81.0 h1:Yc5iGF0mhGM0vdLJXO2GmMDBFw6G7tWsNGuAZwxq6fU= -github.com/open-telemetry/opentelemetry-collector-contrib/processor/spanprocessor v0.81.0/go.mod h1:YXTpXxJz3e/O/hycRV3Oq6VAipY3aFbTcyinNUMtBAk= -github.com/open-telemetry/opentelemetry-collector-contrib/processor/tailsamplingprocessor v0.81.0 h1:YAb4rAwIC2LvSTfrlXzcbexLy1u0eP2WsMAL045vRaw= -github.com/open-telemetry/opentelemetry-collector-contrib/processor/tailsamplingprocessor v0.81.0/go.mod h1:r2M1vxBks5gXX1I29QALOzzb7gwcUvwxn4scvCbDySY= -github.com/open-telemetry/opentelemetry-collector-contrib/processor/transformprocessor v0.81.0 h1:sVmU1X/9txOvdQX0VbnqwetrKThnsPXlJO52dmaFng8= -github.com/open-telemetry/opentelemetry-collector-contrib/processor/transformprocessor v0.81.0/go.mod h1:x71DNfrdvoQSN3AiovAsoSi4aG74Rary8oJATXzzkMw= -github.com/open-telemetry/opentelemetry-collector-contrib/receiver/azureeventhubreceiver v0.81.0 h1:hr5PqVqOMERoQu5sMxB+MMOj4tLOqiGN3kb0E90MzNY= -github.com/open-telemetry/opentelemetry-collector-contrib/receiver/azureeventhubreceiver v0.81.0/go.mod h1:Ri8wGSgPbbjQ6vPab0mOGbhTSIGt9awUXMFx5ZzTVmQ= -github.com/open-telemetry/opentelemetry-collector-contrib/receiver/carbonreceiver v0.81.0 h1:VWI3XBQqopMd4QMN0/sAIA4NQgNZrwfJEyJ6UURqivc= -github.com/open-telemetry/opentelemetry-collector-contrib/receiver/carbonreceiver v0.81.0/go.mod h1:xzynAsJHFWk28/0iOQicsg31Y0gLBl1RuMHELiS+3mQ= -github.com/open-telemetry/opentelemetry-collector-contrib/receiver/cloudfoundryreceiver v0.81.0 h1:uJrSvS7gHks3HJE8+AH+z+T3/UpzYAXEtthwfy88Fxw= -github.com/open-telemetry/opentelemetry-collector-contrib/receiver/cloudfoundryreceiver v0.81.0/go.mod h1:g9LtkctSHzgmHejYcwz+j9IlJFGC+1k0mUlbptvFr5M= -github.com/open-telemetry/opentelemetry-collector-contrib/receiver/collectdreceiver v0.81.0 h1:hXtoprYw4r7H/jEeCw/uUgGr9bKCtAZV72EbibpZA1g= -github.com/open-telemetry/opentelemetry-collector-contrib/receiver/collectdreceiver v0.81.0/go.mod h1:V8T/oyAIo/hKDetJkxi9eHvd92cUZwOaIA6uh6wetFw= -github.com/open-telemetry/opentelemetry-collector-contrib/receiver/filelogreceiver v0.81.0 h1:hAhbuCBvFCQc/TRcBb/7l8MtyUdB1FFVS+80ffQlGg4= -github.com/open-telemetry/opentelemetry-collector-contrib/receiver/filelogreceiver v0.81.0/go.mod h1:mc4DUjy7KQEqTVf1pFMABTwU0a7jmYlFMRGwWtscSXQ= -github.com/open-telemetry/opentelemetry-collector-contrib/receiver/fluentforwardreceiver v0.81.0 h1:QjAGLAVupmdJVysJlULs35X7vPTiC3XLL7oLPlEWYMI= -github.com/open-telemetry/opentelemetry-collector-contrib/receiver/fluentforwardreceiver v0.81.0/go.mod h1:vIG9Fq1CjYZE6ZOfgSydRf3wq5TnKvn7Gze1veBwuwE= -github.com/open-telemetry/opentelemetry-collector-contrib/receiver/hostmetricsreceiver v0.81.0 h1:v07X0ChTvpFZCubBD9t+4puQEVGKHN8KsC1HAsd6ZM4= -github.com/open-telemetry/opentelemetry-collector-contrib/receiver/hostmetricsreceiver v0.81.0/go.mod h1:pWTNb7aQhdzVqIOnyrh+GKsLJqDfpAP/acO1W6ZbJMI= -github.com/open-telemetry/opentelemetry-collector-contrib/receiver/jaegerreceiver v0.81.0 h1:oTnYJKHy7gRs6o5R6vZLIa2SwcXjc8LAztKkcm2ALOI= -github.com/open-telemetry/opentelemetry-collector-contrib/receiver/jaegerreceiver v0.81.0/go.mod h1:FMbLDLBEWMFODFGxQNuStF0IHn3i0SvcHHKEw+q8Gxs= -github.com/open-telemetry/opentelemetry-collector-contrib/receiver/jmxreceiver v0.81.0 h1:bbqlX6/nmy1r8xQiNpizqyl9qWZKk1+IeRTZvMN/+io= -github.com/open-telemetry/opentelemetry-collector-contrib/receiver/jmxreceiver v0.81.0/go.mod h1:HlTytDlom6CPfjveV+YkI+bOxucFHUBS40uf76oUVVw= -github.com/open-telemetry/opentelemetry-collector-contrib/receiver/journaldreceiver v0.81.0 h1:l+7JSL6qfQ2b28bRHGUl02s+Qw1uchANS5oTIBcb4Kw= -github.com/open-telemetry/opentelemetry-collector-contrib/receiver/journaldreceiver v0.81.0/go.mod h1:SnoZRoQR0mX9gt3PkVYuPhB/uOLlLxNVu9IZz1OJTIE= -github.com/open-telemetry/opentelemetry-collector-contrib/receiver/k8sclusterreceiver v0.81.0 h1:eeNz79kASjlz2SDjRKmeyYwnUhXBKi8W8PPJZ42RU+c= -github.com/open-telemetry/opentelemetry-collector-contrib/receiver/k8sclusterreceiver v0.81.0/go.mod h1:Ndz+lfGMpOROJ4qT5EXDgHbJFAeGb8Bk6DiexlOS8yI= -github.com/open-telemetry/opentelemetry-collector-contrib/receiver/k8seventsreceiver v0.81.0 h1:V4RFwmrBtsOlh3U6KvXRZW2wVkApe8BhgM1oiLhyf/Y= -github.com/open-telemetry/opentelemetry-collector-contrib/receiver/k8seventsreceiver v0.81.0/go.mod h1:AiwCghx51jNJKtaAggtCdBwrFt5OL4a+77S0X6GBc50= -github.com/open-telemetry/opentelemetry-collector-contrib/receiver/k8sobjectsreceiver v0.81.0 h1:oVnTA9SMCfXS2Sii9ENrM/ifG8THRZ3o2xwB5Bg+ufU= -github.com/open-telemetry/opentelemetry-collector-contrib/receiver/k8sobjectsreceiver v0.81.0/go.mod h1:0kY0TfuwXsW9GL2/Ace0uncXZriDONmWgQro8H+H5MU= -github.com/open-telemetry/opentelemetry-collector-contrib/receiver/kafkametricsreceiver v0.81.0 h1:ST5seqrbY1RmsCk9uYBg3w9jLPgv902tHUWibVXjNgs= -github.com/open-telemetry/opentelemetry-collector-contrib/receiver/kafkametricsreceiver v0.81.0/go.mod h1:6gbUZv6S9/nCCZQpBwmQY1sV+BspqYvgnlFJz3jve6U= -github.com/open-telemetry/opentelemetry-collector-contrib/receiver/kafkareceiver v0.81.0 h1:VYn9ZCS0wA1bZF09zTD9yvPpCYCGxZm2UEGl5nc8z7o= -github.com/open-telemetry/opentelemetry-collector-contrib/receiver/kafkareceiver v0.81.0/go.mod h1:GlBEfi6NuVjcsB1tWxwXRAtTotgrX1qPZVBNx0epxbQ= -github.com/open-telemetry/opentelemetry-collector-contrib/receiver/kubeletstatsreceiver v0.81.0 h1:1xVNbdf+FQx/FP7uVf6zVD/vuMRR45HmcXFebuEbNlQ= -github.com/open-telemetry/opentelemetry-collector-contrib/receiver/kubeletstatsreceiver v0.81.0/go.mod h1:GYD/+H3PKJoCGWjtfhNj05OCV48cSPPbWkcitgFezu0= -github.com/open-telemetry/opentelemetry-collector-contrib/receiver/mongodbatlasreceiver v0.81.0 h1:3yOMHLTDWcVAqnLaJlUXbv1F8C6sDzJYxcALM2RcI4g= -github.com/open-telemetry/opentelemetry-collector-contrib/receiver/mongodbatlasreceiver v0.81.0/go.mod h1:KrPGDI6BBnZcGKMhVTC7NuFsKtB8Js2SIggEJLN6M5s= -github.com/open-telemetry/opentelemetry-collector-contrib/receiver/oracledbreceiver v0.81.0 h1:CiwugSYFWOaOfzJ290vbMgfDQTXkluSCU4sShBGWpjc= -github.com/open-telemetry/opentelemetry-collector-contrib/receiver/oracledbreceiver v0.81.0/go.mod h1:FgTnI90mNeBfaUIq7DBkFVD/qV9XCnMO6YCUo5FUHQE= -github.com/open-telemetry/opentelemetry-collector-contrib/receiver/postgresqlreceiver v0.81.0 h1:BnEtBDHi2rQFECkqSST6QGF6UaLi3l5EB33rAdC9ciE= -github.com/open-telemetry/opentelemetry-collector-contrib/receiver/postgresqlreceiver v0.81.0/go.mod h1:7ZFC8bJ1/6busLvJ/+mmP9gDYw5lCri4KYrt3H1lnv0= -github.com/open-telemetry/opentelemetry-collector-contrib/receiver/prometheusexecreceiver v0.81.0 h1:8WQ9n7zRGd40f/77bXWBfKbVt60AZhFR/VdcaMEKtgI= -github.com/open-telemetry/opentelemetry-collector-contrib/receiver/prometheusexecreceiver v0.81.0/go.mod h1:2P2vW/PqtlNiauCAmr5DDLZqJcXR/6JWzOg0QwsEAlg= -github.com/open-telemetry/opentelemetry-collector-contrib/receiver/prometheusreceiver v0.81.0 h1:cjXm3oW/PJv4/HPh8s45J/6EIkyje7kKE+2EQP6rRbg= -github.com/open-telemetry/opentelemetry-collector-contrib/receiver/prometheusreceiver v0.81.0/go.mod h1:Nw4MxOT8HL3BFNVdRaTnQkSBJusY4Z8hjcSrbcVp72Q= -github.com/open-telemetry/opentelemetry-collector-contrib/receiver/receivercreator v0.81.0 h1:msKTM3dQ2ARvOva11CPovWvccV1JUact5uBtFlwpEWs= -github.com/open-telemetry/opentelemetry-collector-contrib/receiver/receivercreator v0.81.0/go.mod h1:A5HLPGzX9YlOpZeEi+B365F6YAH741V/dDlemAFuf5s= -github.com/open-telemetry/opentelemetry-collector-contrib/receiver/redisreceiver v0.81.0 h1:rkAcYCTPUD3xzLR0Af6eQMsKplJ16iZ9R6RYNUJUxVU= -github.com/open-telemetry/opentelemetry-collector-contrib/receiver/redisreceiver v0.81.0/go.mod h1:YheIYX/GCV8SeCLW/Wzh5PchhVG3v1uEReeTVXhp3U8= -github.com/open-telemetry/opentelemetry-collector-contrib/receiver/sapmreceiver v0.81.0 h1:WzGRSESuhED304EL/yTPzCIwfWQHOBQYR5v/FSgODAg= -github.com/open-telemetry/opentelemetry-collector-contrib/receiver/sapmreceiver v0.81.0/go.mod h1:kigjV9V6QpCs/n3VXyBC8GVw4CZvA/clPh0lEtG6mfw= -github.com/open-telemetry/opentelemetry-collector-contrib/receiver/signalfxreceiver v0.81.0 h1:i7edEZW/kC0tIbqCrLdnHfVON59DFZOSPq6mzLcIYzw= -github.com/open-telemetry/opentelemetry-collector-contrib/receiver/signalfxreceiver v0.81.0/go.mod h1:EJC1OVcA0gbOyM6/yo43KaJ8r3bW48g8nandg8OyGq4= -github.com/open-telemetry/opentelemetry-collector-contrib/receiver/simpleprometheusreceiver v0.81.0 h1:112grW0/9nnmf4GAm6Yj7LTj7T9RGGSshPWDs5nK23g= -github.com/open-telemetry/opentelemetry-collector-contrib/receiver/simpleprometheusreceiver v0.81.0/go.mod h1:WsjGM0XqcCIpToGg7wYubTXAB9xQCRx1AY9Zd9/b0gw= -github.com/open-telemetry/opentelemetry-collector-contrib/receiver/splunkhecreceiver v0.81.0 h1:509SVfkRMTDXxPqKxektsouAIosWNvnEQssV5UxqlyU= -github.com/open-telemetry/opentelemetry-collector-contrib/receiver/splunkhecreceiver v0.81.0/go.mod h1:Lkh9L4s1lmAd9Mrh03VXqIiGhQCv3O9pxi9posh1Esk= -github.com/open-telemetry/opentelemetry-collector-contrib/receiver/sqlqueryreceiver v0.81.0 h1:3T72+6ahGCn51LwVgeJdVIGT/QkYYOXyhB8pZ0oKFto= -github.com/open-telemetry/opentelemetry-collector-contrib/receiver/sqlqueryreceiver v0.81.0/go.mod h1:N0MVWtVJ2GMxue7MB4ulh3eMQ5/fHoATtGkcZyAwYRQ= -github.com/open-telemetry/opentelemetry-collector-contrib/receiver/statsdreceiver v0.81.0 h1:DuX7wfHoRLj0HvwqY/DCalPtEeXgewu4Jbo4Ux7q4h4= -github.com/open-telemetry/opentelemetry-collector-contrib/receiver/statsdreceiver v0.81.0/go.mod h1:Wuy9uKxhdd2C5Tr2PB+iVt1ZqfVFQRMkpyGmL/xZ7dM= -github.com/open-telemetry/opentelemetry-collector-contrib/receiver/syslogreceiver v0.81.0 h1:j/sEQm8KIMswxAz//l9jAgHJU/TN/+NHex0dLf4Y23g= -github.com/open-telemetry/opentelemetry-collector-contrib/receiver/syslogreceiver v0.81.0/go.mod h1:Rc94THKBFUypMG6HDP84mFhl/TL5IHIm4OFNeDidtmw= -github.com/open-telemetry/opentelemetry-collector-contrib/receiver/tcplogreceiver v0.81.0 h1:4UYN4wevn8IKOgyfbI0nr8peHQ+C32BEWnbskWK2O30= -github.com/open-telemetry/opentelemetry-collector-contrib/receiver/tcplogreceiver v0.81.0/go.mod h1:golluxPKP+Q9kXYR0nXzZwX6eo423S2EaVf2IFcr6Z8= -github.com/open-telemetry/opentelemetry-collector-contrib/receiver/windowseventlogreceiver v0.81.0 h1:9r6nVfTEWRSbr6hHAnpUV3UPnkyfuPqwFxf4hJf4h2w= -github.com/open-telemetry/opentelemetry-collector-contrib/receiver/windowseventlogreceiver v0.81.0/go.mod h1:qYLya7ADhp1UllpbMhYA/FAN0zZq8mBVniSHBRFj+bo= -github.com/open-telemetry/opentelemetry-collector-contrib/receiver/windowsperfcountersreceiver v0.81.0 h1:Byipcb594xQ5yfFSwEeuvI7bjEJ+7IBgYc8qSJBZ9ws= -github.com/open-telemetry/opentelemetry-collector-contrib/receiver/windowsperfcountersreceiver v0.81.0/go.mod h1:EbFpP8S5ZXftsGeP4BSfS6ggLxIn2cZMfPpvnWn2ArE= -github.com/open-telemetry/opentelemetry-collector-contrib/receiver/zipkinreceiver v0.81.0 h1:P15Tw/ah3glxB9PC6Llf70SxOWT2H7A+hzAc6BPthzw= -github.com/open-telemetry/opentelemetry-collector-contrib/receiver/zipkinreceiver v0.81.0/go.mod h1:RayWl/RSnnO39YA5gzc1XPWlDwkwJ6EEyWTfPeFtU5c= +github.com/open-telemetry/opentelemetry-collector-contrib/connector/countconnector v0.82.0 h1:aRYvJaw3rdy+f8EpvYJ3lVOI3A8njAIBMpLV795TQss= +github.com/open-telemetry/opentelemetry-collector-contrib/connector/countconnector v0.82.0/go.mod h1:m4jnKGZQdorxZhCDPurovJ2HsPHih3VnIcvojQBMiq0= +github.com/open-telemetry/opentelemetry-collector-contrib/connector/spanmetricsconnector v0.82.0 h1:WMjRjnbz7wJ4T0E+yTp6GRDjdkJ3kYYjhRuIhLhl7Gc= +github.com/open-telemetry/opentelemetry-collector-contrib/connector/spanmetricsconnector v0.82.0/go.mod h1:bcaAmq0FkJYIK1/tww+4vyiqUI+Iq2o/s3QxUzCbmEY= +github.com/open-telemetry/opentelemetry-collector-contrib/exporter/fileexporter v0.82.0 h1:1PLJXk12CueltS9JA3X2BNzMKbYD3B/VEKTJBUR5Iy0= +github.com/open-telemetry/opentelemetry-collector-contrib/exporter/fileexporter v0.82.0/go.mod h1:5m7wwTnnssL2m9zzWwaK40t6VZLUlC7Tbx3hsSyrWQM= +github.com/open-telemetry/opentelemetry-collector-contrib/exporter/kafkaexporter v0.82.0 h1:6Zj2gzJl5sefJFQlCvL46kLyoozBstUQiB6wsPkPU2E= +github.com/open-telemetry/opentelemetry-collector-contrib/exporter/kafkaexporter v0.82.0/go.mod h1:1G20L9KOMqKQdqtNI0X4zfkMQ151bk4z3YUbx1Mcjh4= +github.com/open-telemetry/opentelemetry-collector-contrib/exporter/prometheusremotewriteexporter v0.82.0 h1:Pbdhyt4aVha6QgscPYRwD70/QlHJJpaGBJkHbH0D57s= +github.com/open-telemetry/opentelemetry-collector-contrib/exporter/sapmexporter v0.82.0 h1:L+A8ZQzzsV+Rg02ZJS1ugdzEuiNtJr0uONVD0NvgrSs= +github.com/open-telemetry/opentelemetry-collector-contrib/exporter/sapmexporter v0.82.0/go.mod h1:tua7yWaFweDM9mzRDFP1DNhsNVMYbRz/Rt3k52JBexc= +github.com/open-telemetry/opentelemetry-collector-contrib/exporter/signalfxexporter v0.82.0 h1:i4LnK+f/XbJzGlCrYnLRg/EFbVYYXcmSF88pVN5loGI= +github.com/open-telemetry/opentelemetry-collector-contrib/exporter/signalfxexporter v0.82.0/go.mod h1:4xBWenKUqvfXOwlBLX5nbr0YIMSsZne4FGdn+lTyajk= +github.com/open-telemetry/opentelemetry-collector-contrib/exporter/splunkhecexporter v0.82.0 h1:5eJb+8ibFf/3CZpSnob9P+Yc+ru7UY3ypAbYJ879jpI= +github.com/open-telemetry/opentelemetry-collector-contrib/exporter/splunkhecexporter v0.82.0/go.mod h1:1SIRo2TJSu26wDvrVTMWAfXB8zXMUkOB6IZl5F6B8pE= +github.com/open-telemetry/opentelemetry-collector-contrib/extension/basicauthextension v0.82.0 h1:o4W5thUcOVlfAK9yJyyY0KbxfuTfPTlnXXxNYJgnZc0= +github.com/open-telemetry/opentelemetry-collector-contrib/extension/basicauthextension v0.82.0/go.mod h1:EUKen+da19oVzO1Iq+iC0Lg5kf/VFem10Vu7xTyzisw= +github.com/open-telemetry/opentelemetry-collector-contrib/extension/healthcheckextension v0.82.0 h1:xzz0A7cxxtvraU22ZleikEc32ghZ3GMCDiCevs+37Zw= +github.com/open-telemetry/opentelemetry-collector-contrib/extension/healthcheckextension v0.82.0/go.mod h1:eaA8YuyzBP1HikJUGYlS+3cbxndrCIAEYBU/1yMQNQo= +github.com/open-telemetry/opentelemetry-collector-contrib/extension/httpforwarder v0.82.0 h1:9IYYC2fonyIQVCCgpGEGp6xlJUCaTJk6Tku9AJfEetY= +github.com/open-telemetry/opentelemetry-collector-contrib/extension/httpforwarder v0.82.0/go.mod h1:Hk0CCPrA5/p4MtYryf0ZVXgc6VSFdycyG71OjpnYMy4= +github.com/open-telemetry/opentelemetry-collector-contrib/extension/observer v0.82.0 h1:kVj5h3bcU2ePeUsOnPyW5/ao7P5UvCVP6YcJcuo1RjA= +github.com/open-telemetry/opentelemetry-collector-contrib/extension/observer v0.82.0/go.mod h1:kJcZzVAOGYW9AcQOF1nC1ZsZCeCSShmB6i8/xART2LA= +github.com/open-telemetry/opentelemetry-collector-contrib/extension/observer/dockerobserver v0.82.0 h1:meEN5jCutq5SgsZ7cXc2aDfBASnjJ9STvtPqIwWG4xw= +github.com/open-telemetry/opentelemetry-collector-contrib/extension/observer/dockerobserver v0.82.0/go.mod h1:QjwB9dMlUkV7j7wfU2G8YgMNm+jtjhpDvhEEydKP/hw= +github.com/open-telemetry/opentelemetry-collector-contrib/extension/observer/ecsobserver v0.82.0 h1:Qixnpq/0PQw9rKzGj7Eci4F3ucZRG46mLOL54a3u2Uo= +github.com/open-telemetry/opentelemetry-collector-contrib/extension/observer/ecsobserver v0.82.0/go.mod h1:9ne2U15XVRwTRlUn5fvSDBDeLU6RH9YVeHOdFrj6kyw= +github.com/open-telemetry/opentelemetry-collector-contrib/extension/observer/ecstaskobserver v0.82.0 h1:PTDBDj+LqWa6aSYohR9dih44r5c9wm5Bs7Uhg2gmZyQ= +github.com/open-telemetry/opentelemetry-collector-contrib/extension/observer/ecstaskobserver v0.82.0/go.mod h1:kd3UQqX8ZgzQNd0RL5ppd9nZewGaFICUoW1FfzJ6CPM= +github.com/open-telemetry/opentelemetry-collector-contrib/extension/observer/hostobserver v0.82.0 h1:zcBhR+GSDYu92t+FD+VNgnapzhQl7x/0djEb2H08UlE= +github.com/open-telemetry/opentelemetry-collector-contrib/extension/observer/hostobserver v0.82.0/go.mod h1:ZhLe9FLklh1UPIMjCazi1rrQDeRw0twSxWXH+cy7cvk= +github.com/open-telemetry/opentelemetry-collector-contrib/extension/observer/k8sobserver v0.82.0 h1:pk7yIUGCwapaKNfZzkFBMMl/5P5dtSUzIfG/YUkwqw8= +github.com/open-telemetry/opentelemetry-collector-contrib/extension/observer/k8sobserver v0.82.0/go.mod h1:H81gSC8lBvMLl8kksDwYf0eaDFmfqAUdKEPVlMkVBdY= +github.com/open-telemetry/opentelemetry-collector-contrib/extension/pprofextension v0.82.0 h1:RhqTW7tIcDSc6bWt1TTjlMsHfAYgIiXrGHbk+CYOcSg= +github.com/open-telemetry/opentelemetry-collector-contrib/extension/pprofextension v0.82.0/go.mod h1:JvWloYbaiC45XUV9odYlMo/y19mN1nil+OwJetJu870= +github.com/open-telemetry/opentelemetry-collector-contrib/extension/storage v0.82.0 h1:HtE9X6xxgYg7p8S1f/BeyHZQJeB508otMYVibOcibWU= +github.com/open-telemetry/opentelemetry-collector-contrib/extension/storage v0.82.0/go.mod h1:fDxP9zbwqzDv20tstDvR0Fp0tlA0W/ZMqG6RHaqsk/Q= +github.com/open-telemetry/opentelemetry-collector-contrib/internal/aws/ecsutil v0.82.0 h1:9wJBkAfq0xgT++d0t6p79ZJKvGjjuy0ZlcXcl8lDEXU= +github.com/open-telemetry/opentelemetry-collector-contrib/internal/aws/ecsutil v0.82.0/go.mod h1:YvDlJ+zVkFiiPMEY6QP3E4ILXQrcZbk23KMW4GPGrVg= +github.com/open-telemetry/opentelemetry-collector-contrib/internal/common v0.82.0 h1:UQkYg2ksIEbUibN97SPMITF/1wSRynAn+aD7tC3mQKQ= +github.com/open-telemetry/opentelemetry-collector-contrib/internal/common v0.82.0/go.mod h1:5VGtCES+3CySFeCO8hFzrxVgeMI8L6cINAlqr0vrv/A= +github.com/open-telemetry/opentelemetry-collector-contrib/internal/coreinternal v0.82.0 h1:0b6glbENAwPdasKKVOgpR/EaZG1sJhsUfXCRiwZ0drU= +github.com/open-telemetry/opentelemetry-collector-contrib/internal/coreinternal v0.82.0/go.mod h1:MKnM9GFqPz4HY4NQDDao+dIjZz4BvThAijuJuPC8NOI= +github.com/open-telemetry/opentelemetry-collector-contrib/internal/docker v0.82.0 h1:sw413Qe/67o0L35OeJEeySjzSAvVbY0jwhSkgNdTWmE= +github.com/open-telemetry/opentelemetry-collector-contrib/internal/docker v0.82.0/go.mod h1:OcIhkcKRv+01fIqpjWROPqiF8JENa+Fe9CssdMklmjw= +github.com/open-telemetry/opentelemetry-collector-contrib/internal/filter v0.82.0 h1:czYBWuiriQyD/4UI61U/eAogi7qnhk9AGreZez20t0Q= +github.com/open-telemetry/opentelemetry-collector-contrib/internal/filter v0.82.0/go.mod h1:tiYWtXrv4+T9L+mo5hdzMiKN25rg7sB2tRIHUqyhF5U= +github.com/open-telemetry/opentelemetry-collector-contrib/internal/k8sconfig v0.82.0 h1:5aL7mnIupfnfyO6izVxwdOArKbJaAzNqH/W//2ux0kE= +github.com/open-telemetry/opentelemetry-collector-contrib/internal/k8sconfig v0.82.0/go.mod h1:F9orevxYo4hFXEYEbhn85znnIUWv1y++MKkXHabZZ58= +github.com/open-telemetry/opentelemetry-collector-contrib/internal/k8stest v0.82.0 h1:ENaPzCM8oHWzwzA1Fj6dl/1zGOh1UC9wb2f17jh45aA= +github.com/open-telemetry/opentelemetry-collector-contrib/internal/kubelet v0.82.0 h1:ms9AyP7vpW4CtsPKibY2kp/+kWr4mtBeF3TsgC66vVk= +github.com/open-telemetry/opentelemetry-collector-contrib/internal/kubelet v0.82.0/go.mod h1:IEmBxonukahtUZUZLsRuUeFNQCsqIrtyWEFWU/CKyU8= +github.com/open-telemetry/opentelemetry-collector-contrib/internal/metadataproviders v0.82.0 h1:EFEWFZNTCTM2UVItheh1f/rAoZcVVNhTLk4xevSSbUg= +github.com/open-telemetry/opentelemetry-collector-contrib/internal/metadataproviders v0.82.0/go.mod h1:xbuPPTg7fSGwlMaM9iEjPdob6MH7Nd6KzJ//Qd0xMFs= +github.com/open-telemetry/opentelemetry-collector-contrib/internal/sharedcomponent v0.82.0 h1:0Hdh8jj6biwzWzdF21c6XYqVhx0YMKF2OVnWhusAoFs= +github.com/open-telemetry/opentelemetry-collector-contrib/internal/sharedcomponent v0.82.0/go.mod h1:UEmb9zd7Jf9KKc6Tmrh7FXlh0IO9FzSumUWzmEW+ANs= +github.com/open-telemetry/opentelemetry-collector-contrib/internal/splunk v0.82.0 h1:0AqiqVGbSnwz6n8CYy+/r0dJz95rqif/ctFlUr44FLw= +github.com/open-telemetry/opentelemetry-collector-contrib/internal/splunk v0.82.0/go.mod h1:lFuju3wV7f/AZRVAyMYqA9XCtmQ66VuBR1XPC4mxVRg= +github.com/open-telemetry/opentelemetry-collector-contrib/pkg/batchperresourceattr v0.82.0 h1:a0WFk4cpk7HuYi3CcKo44z/gY88XN3EMJTq7gW6Nj4A= +github.com/open-telemetry/opentelemetry-collector-contrib/pkg/batchperresourceattr v0.82.0/go.mod h1:aZdFtT+ay8aNiySaQw41KQvtRteKj2Eym63Cc0PWElQ= +github.com/open-telemetry/opentelemetry-collector-contrib/pkg/experimentalmetricmetadata v0.82.0 h1:noxLHxoQqYt3WO3Z2HpUExyYG7l4fuqC0FyqRPYb+BY= +github.com/open-telemetry/opentelemetry-collector-contrib/pkg/experimentalmetricmetadata v0.82.0/go.mod h1:umq1KOdkQa2+djdxtxHmLigyFtLVqM7QXGeP3/s3cHA= +github.com/open-telemetry/opentelemetry-collector-contrib/pkg/ottl v0.82.0 h1:2efL2SE/dndrTLPQcpFzrsIJpYw0i3bkFG0n40xnsQI= +github.com/open-telemetry/opentelemetry-collector-contrib/pkg/ottl v0.82.0/go.mod h1:tqP4R7pPk5M0v0j8nP5h2o1fUqofC2kSrirzkwQW7p0= +github.com/open-telemetry/opentelemetry-collector-contrib/pkg/pdatatest v0.82.0 h1:wBX6PvwO5mopN+uuVU1pyfl54OdrrRT+VPRCyl22O1A= +github.com/open-telemetry/opentelemetry-collector-contrib/pkg/pdatatest v0.82.0/go.mod h1:8bbFs0G0deA/M9oRGqUJ5n/+N1wejo/6CSWztEnz3Hc= +github.com/open-telemetry/opentelemetry-collector-contrib/pkg/pdatautil v0.82.0 h1:zSQ0EolsXY3F18kFwEpqAkLc5C2/DE0vbFS3QfMpsDc= +github.com/open-telemetry/opentelemetry-collector-contrib/pkg/pdatautil v0.82.0/go.mod h1:wbgo9BklRN8M4Mi+76mo9bMVQY2C5gL/rPKwePQL3l0= +github.com/open-telemetry/opentelemetry-collector-contrib/pkg/resourcetotelemetry v0.82.0 h1:3S5WRpygfai9pfgt66oE/ppOEziBlL4NhuIlhb2qXkk= +github.com/open-telemetry/opentelemetry-collector-contrib/pkg/stanza v0.82.0 h1:NbSJ/XsjeyiKSjn/f3eNp3HF3eKeXLfJLZvjbpV+P+s= +github.com/open-telemetry/opentelemetry-collector-contrib/pkg/stanza v0.82.0/go.mod h1:rFVSbwDHe9ZYcnTlnBp3vLIKoGbm89FqhvgNj03S1PI= +github.com/open-telemetry/opentelemetry-collector-contrib/pkg/translator/jaeger v0.82.0 h1:fKTXkXX+iMAAiTu4r1j1DbzKYvbd6CvFoWNWLhTOJjk= +github.com/open-telemetry/opentelemetry-collector-contrib/pkg/translator/jaeger v0.82.0/go.mod h1:1SM5fbDUmJHQUNO0T/lDzMVmGpn+z9UJHyjfGg6IQ0Q= +github.com/open-telemetry/opentelemetry-collector-contrib/pkg/translator/opencensus v0.82.0 h1:XW0HJBOWJLpzDsMAOoLxFL4qMmD/qI4qTpGfAA9afgU= +github.com/open-telemetry/opentelemetry-collector-contrib/pkg/translator/opencensus v0.82.0/go.mod h1:vX/Qm9YG+C4N3hZeLr/M1ndTqbOK37pPV7MZwhWY/S0= +github.com/open-telemetry/opentelemetry-collector-contrib/pkg/translator/prometheus v0.82.0 h1:kx5UQGy8/TiZRst78xblTHvIf3HBJLNKoXvx1GrUHPc= +github.com/open-telemetry/opentelemetry-collector-contrib/pkg/translator/prometheus v0.82.0/go.mod h1:A9Z3SrF0Ngir1kd5t7UzjpUFoy4mhZjpt3B+3d78/qo= +github.com/open-telemetry/opentelemetry-collector-contrib/pkg/translator/prometheusremotewrite v0.82.0 h1:8HOH8p6iFidN1VsZewH+ePdZM+w/89dCS6dW24Enhng= +github.com/open-telemetry/opentelemetry-collector-contrib/pkg/translator/signalfx v0.82.0 h1:0uuo5x+/RFtMhPqo7+CU/lYCmvpJ77fPLdpPxA92Kes= +github.com/open-telemetry/opentelemetry-collector-contrib/pkg/translator/signalfx v0.82.0/go.mod h1:rfHBidHrLvlRi0E5NvQuljP3r4Uv7qtNTPWIFtt79z0= +github.com/open-telemetry/opentelemetry-collector-contrib/pkg/translator/zipkin v0.82.0 h1:NC8LNVLj2UXfPZoW4vpAf9+NWqw1vzwuSjdA2xRPIvA= +github.com/open-telemetry/opentelemetry-collector-contrib/pkg/translator/zipkin v0.82.0/go.mod h1:t2x45aFpeo5tc6oM2nNyKPLy5gBhT/R/uJNdPp7TBbc= +github.com/open-telemetry/opentelemetry-collector-contrib/pkg/winperfcounters v0.82.0 h1:tOtWaBL097YAnQqOl96tkhsoCTTCTKtzyDgD7XA/EUw= +github.com/open-telemetry/opentelemetry-collector-contrib/pkg/winperfcounters v0.82.0/go.mod h1:iZCxlax6orP+psplZ+aLUy2V224bT6ALjdGGC18OXNs= +github.com/open-telemetry/opentelemetry-collector-contrib/processor/attributesprocessor v0.82.0 h1:R+UjA60N26I1gGGzlcxp1IeEYxg9kCktiqle3vEFRBU= +github.com/open-telemetry/opentelemetry-collector-contrib/processor/attributesprocessor v0.82.0/go.mod h1:VoKLInODf1ZwhLHiYWJe/2Gte8BeSlvRdlrM8gO7VTQ= +github.com/open-telemetry/opentelemetry-collector-contrib/processor/filterprocessor v0.82.0 h1:h9FsCZ9ppDRL4cC3QeFSL8uh7auqFsa9BMLLeVx0qFs= +github.com/open-telemetry/opentelemetry-collector-contrib/processor/filterprocessor v0.82.0/go.mod h1:2e36/jV+bd1rPXUJmS1Jk5g3Mrs3EN1Hve/eZrwYEYo= +github.com/open-telemetry/opentelemetry-collector-contrib/processor/groupbyattrsprocessor v0.82.0 h1:Cg7wSQ8E6v9nDaBFFY/zJthhsPkF/aI7iBRZ8kIx9Wg= +github.com/open-telemetry/opentelemetry-collector-contrib/processor/groupbyattrsprocessor v0.82.0/go.mod h1:JftdA1vcWxduDvzRlfASeLkfl9dH39VREgHifaOLGvY= +github.com/open-telemetry/opentelemetry-collector-contrib/processor/k8sattributesprocessor v0.82.0 h1:UQVbkb/xvXhL5TU3w63cIfrV6Lhtk/shtvQDZDVx75I= +github.com/open-telemetry/opentelemetry-collector-contrib/processor/k8sattributesprocessor v0.82.0/go.mod h1:efjxEAMpzeK27fnU9AxWFGDX7oJji6AYz/CqtvlIKHM= +github.com/open-telemetry/opentelemetry-collector-contrib/processor/logstransformprocessor v0.82.0 h1:4IJq3sPsgFXhFD/VWZjv+xrFgnktJbSXJHAspVhQg0c= +github.com/open-telemetry/opentelemetry-collector-contrib/processor/logstransformprocessor v0.82.0/go.mod h1:peSukYtLChww4nCdN1dChWpj99ZV6lirmPgt9he4BcE= +github.com/open-telemetry/opentelemetry-collector-contrib/processor/metricstransformprocessor v0.82.0 h1:Ev7liq1TQyhDgcsfizXC/lO1lKWQX6sv7OAKVe679pI= +github.com/open-telemetry/opentelemetry-collector-contrib/processor/metricstransformprocessor v0.82.0/go.mod h1:WksfpqoJ0O5IKYn57Qs3uoQRjKEoidlG1Axy/VFDc2U= +github.com/open-telemetry/opentelemetry-collector-contrib/processor/probabilisticsamplerprocessor v0.82.0 h1:FMfmm1qOrd/UTFydSaLdDq27dz5/Z6vk9lf9WhLB0YM= +github.com/open-telemetry/opentelemetry-collector-contrib/processor/probabilisticsamplerprocessor v0.82.0/go.mod h1:UTtjfRbvrkSypo7gHWkOO6l3LjmWaf3JxjtCbnCuAAE= +github.com/open-telemetry/opentelemetry-collector-contrib/processor/resourcedetectionprocessor v0.82.0 h1:ToEAh/x/66CfvlNdMzAa9hh0zzZOa2hneCdnDBj+U4U= +github.com/open-telemetry/opentelemetry-collector-contrib/processor/resourcedetectionprocessor v0.82.0/go.mod h1:SvJsVQdfVYVJ0/uG7jzesLDj97ej/8Pkq88MOLD4Ko8= +github.com/open-telemetry/opentelemetry-collector-contrib/processor/resourceprocessor v0.82.0 h1:iIzRlJaR5YPuRLjtbeSALwn0IxdgdbOwlO8DEUPkDz4= +github.com/open-telemetry/opentelemetry-collector-contrib/processor/resourceprocessor v0.82.0/go.mod h1:0fxeFp/yAbCRAbcaPztO1j9JH3HYwItPonmEz6OXlJg= +github.com/open-telemetry/opentelemetry-collector-contrib/processor/routingprocessor v0.82.0 h1:phyIAJGDN0kvch/XafEbZNNydDHxl6QuBE0vhxVsRmo= +github.com/open-telemetry/opentelemetry-collector-contrib/processor/routingprocessor v0.82.0/go.mod h1:KOkqwByOcncJsxws0Q2qQ6jelzdpdkMlm50yT15a7ts= +github.com/open-telemetry/opentelemetry-collector-contrib/processor/spanmetricsprocessor v0.82.0 h1:lJsbHJTuU+uoHWbloOvwJMD0DwhkR9/LQEd2DqPvjkE= +github.com/open-telemetry/opentelemetry-collector-contrib/processor/spanmetricsprocessor v0.82.0/go.mod h1:XiKGtF9jCD2ujUcWwIwmyDsvmH1aKk2XhK8bA8tsEvQ= +github.com/open-telemetry/opentelemetry-collector-contrib/processor/spanprocessor v0.82.0 h1:PTjUqRSExzDY8AS1RA+fDNRIyP6ilh/4kJRLi1Gwz6g= +github.com/open-telemetry/opentelemetry-collector-contrib/processor/spanprocessor v0.82.0/go.mod h1:agArAIGzj1fcKeS2J4voljJ1a2WetcV8IIjK7X+i48U= +github.com/open-telemetry/opentelemetry-collector-contrib/processor/tailsamplingprocessor v0.82.0 h1:1HuCZhKP4QrB1U0NRx9HaG5qCHXaYAubFMVPlhSg6Kk= +github.com/open-telemetry/opentelemetry-collector-contrib/processor/tailsamplingprocessor v0.82.0/go.mod h1:GvmND1rGYv6k6fNHWyKbRi6yq8XlOTe1ymfER+BcDc0= +github.com/open-telemetry/opentelemetry-collector-contrib/processor/transformprocessor v0.82.0 h1:dXtEW53Rf09nSx3nc1pvUX1USsL3swTMzDw6rLA4cA0= +github.com/open-telemetry/opentelemetry-collector-contrib/processor/transformprocessor v0.82.0/go.mod h1:Z95mPIlMbXnOXBYNOgu26isHr09bcO4/ZxIrOVFcpsM= +github.com/open-telemetry/opentelemetry-collector-contrib/receiver/azureeventhubreceiver v0.82.0 h1:MxWC2JiZERpDCGDvGRLrtO5PHsBPWWI674Hh4yXZjqs= +github.com/open-telemetry/opentelemetry-collector-contrib/receiver/azureeventhubreceiver v0.82.0/go.mod h1:vPzXLICG8xxPhAmaCdiouTMxDyP1rxNERAkf4mqyHC4= +github.com/open-telemetry/opentelemetry-collector-contrib/receiver/carbonreceiver v0.82.0 h1:zh2dxXaKJIbYzF1/5ggdMbGOdi2j8jCMFP14/zCX25M= +github.com/open-telemetry/opentelemetry-collector-contrib/receiver/carbonreceiver v0.82.0/go.mod h1:Uk/9MwFO9wVpoULrevcSlu61bHP2K9JSpNk1lbmyufM= +github.com/open-telemetry/opentelemetry-collector-contrib/receiver/cloudfoundryreceiver v0.82.0 h1:ZCRHMMimBwTxgsv6vfnrf3/OHJZZ6LElrKkWFcbW7Jk= +github.com/open-telemetry/opentelemetry-collector-contrib/receiver/cloudfoundryreceiver v0.82.0/go.mod h1:Gt/xs91rHmszZi4oSDjovLdtJMmzLXLBhyjCSbMvrV8= +github.com/open-telemetry/opentelemetry-collector-contrib/receiver/collectdreceiver v0.82.0 h1:7edeB08aK//wMysxCjBJZvsw/Q4AEZVXsDJ/dpfsQgg= +github.com/open-telemetry/opentelemetry-collector-contrib/receiver/collectdreceiver v0.82.0/go.mod h1:UJuFtfIslQf6XltwWyT7twSV9hZ3Bu6ECBPH0WRlyeY= +github.com/open-telemetry/opentelemetry-collector-contrib/receiver/filelogreceiver v0.82.0 h1:zYD9pYKlF7nLpwyrZ9Nvgj4K8TtdnMtX0PS2qSJvo8Y= +github.com/open-telemetry/opentelemetry-collector-contrib/receiver/filelogreceiver v0.82.0/go.mod h1:R2SI1aXgdLZJUwhEjv9nyA9gwhzWaWwS+Gouq9F8iF0= +github.com/open-telemetry/opentelemetry-collector-contrib/receiver/fluentforwardreceiver v0.82.0 h1:0eU0t7UwdZcpGfdMDhxpGRPQm3dqrOvTBjYldI9EjuE= +github.com/open-telemetry/opentelemetry-collector-contrib/receiver/fluentforwardreceiver v0.82.0/go.mod h1:+Wedi3xCMH+5aiwXM+sbZtIn1TPGucajWM5sDIXxhRc= +github.com/open-telemetry/opentelemetry-collector-contrib/receiver/hostmetricsreceiver v0.82.0 h1:J8jwjrVQU5l6EnsaDBY8qOsW32nrylhwlCuV5WbvZ2M= +github.com/open-telemetry/opentelemetry-collector-contrib/receiver/hostmetricsreceiver v0.82.0/go.mod h1:nn7inE3tI59x/LFVNF1e34HGBij1CsZzpzjmYu2DGEk= +github.com/open-telemetry/opentelemetry-collector-contrib/receiver/jaegerreceiver v0.82.0 h1:JMJF6BiLMovfCW/+s+rTnndp7IY9qrVyZ7xZnZEtCtA= +github.com/open-telemetry/opentelemetry-collector-contrib/receiver/jaegerreceiver v0.82.0/go.mod h1:UMwCyO7+q2f3S01fU40NYYXUNSWZy5X1RmnTtJR92fk= +github.com/open-telemetry/opentelemetry-collector-contrib/receiver/jmxreceiver v0.82.0 h1:8R+P7Fa/1uChVnm7Fm31VY5EXwTpik1r5vQF0ys4Sn4= +github.com/open-telemetry/opentelemetry-collector-contrib/receiver/jmxreceiver v0.82.0/go.mod h1:I3FpypkJJSoxWA2v7dO5DGGKdgYVIrgqycElGoY8v28= +github.com/open-telemetry/opentelemetry-collector-contrib/receiver/journaldreceiver v0.82.0 h1:fTucAdcwJb/4r7CLrVuhPa1tyPdkAG2gQKcz+gB/284= +github.com/open-telemetry/opentelemetry-collector-contrib/receiver/journaldreceiver v0.82.0/go.mod h1:bTjY+QlCCg20zHW5cuwEKrwjAlt7rpXxdngpBLTIpwA= +github.com/open-telemetry/opentelemetry-collector-contrib/receiver/k8sclusterreceiver v0.82.0 h1:Y27ys+YcC+W3XJVjPwto293rul17UQL/UXptWG+2oGU= +github.com/open-telemetry/opentelemetry-collector-contrib/receiver/k8sclusterreceiver v0.82.0/go.mod h1:bZTM6H3CARaVBLekpZ5JyHdWJRdYN4ugUw9knFfral8= +github.com/open-telemetry/opentelemetry-collector-contrib/receiver/k8seventsreceiver v0.82.0 h1:/ZWmDnESWoNsoFo2u3CwRIlPnrOVOjNawY7gy/yWtQY= +github.com/open-telemetry/opentelemetry-collector-contrib/receiver/k8seventsreceiver v0.82.0/go.mod h1:4hXqwbQAlNxvD5CxlfkEeOFksjh6kd/ruEyUZaj/yQU= +github.com/open-telemetry/opentelemetry-collector-contrib/receiver/k8sobjectsreceiver v0.82.0 h1:eb74MvhR7v98LqS1fyGWYnmjnENTKio3mS6lFKIk/vI= +github.com/open-telemetry/opentelemetry-collector-contrib/receiver/k8sobjectsreceiver v0.82.0/go.mod h1:Gzu/ChqQJGe/dW86tEu2b+QFxOMEpsBByBL5sPTRfAQ= +github.com/open-telemetry/opentelemetry-collector-contrib/receiver/kafkametricsreceiver v0.82.0 h1:C53B3NXcBYxkY8mx1Bw5LsgYDolMf9AbyUQzzdzcHQk= +github.com/open-telemetry/opentelemetry-collector-contrib/receiver/kafkametricsreceiver v0.82.0/go.mod h1:y6RsXHO6O/SGG8gPZqblLTY7JJy92WHoZXvi/AJZI4c= +github.com/open-telemetry/opentelemetry-collector-contrib/receiver/kafkareceiver v0.82.0 h1:ucTSzZJfzy4xav3bjGFZWAlf/rdxoIN620ZW+wW5u2Y= +github.com/open-telemetry/opentelemetry-collector-contrib/receiver/kafkareceiver v0.82.0/go.mod h1:jep8pXp8tTHpr4Fe3w6PH9vHGHo/m+g1MukPZx9mzHI= +github.com/open-telemetry/opentelemetry-collector-contrib/receiver/kubeletstatsreceiver v0.82.0 h1:MfvRnLf0quBFDDDdUUjlGtPD2yXp8xKSZ+wxm6dyJrw= +github.com/open-telemetry/opentelemetry-collector-contrib/receiver/kubeletstatsreceiver v0.82.0/go.mod h1:y9sXV241G+xyCIWE6ATduA5qGoZX8UyjrlYh28rDxpA= +github.com/open-telemetry/opentelemetry-collector-contrib/receiver/mongodbatlasreceiver v0.82.0 h1:KoCjdPxdzSgm9417p2mQjj+XDFjdQTxVZT4THQ1R1GU= +github.com/open-telemetry/opentelemetry-collector-contrib/receiver/mongodbatlasreceiver v0.82.0/go.mod h1:SM8EL5amDUKLMYGdfQVagHLJNvoElEJ2pA/PqpT/+8A= +github.com/open-telemetry/opentelemetry-collector-contrib/receiver/oracledbreceiver v0.82.0 h1:O5Vpu/1bgNpn67cuw22m1xbBGWp9FJ8V2MS4L7uXGeM= +github.com/open-telemetry/opentelemetry-collector-contrib/receiver/oracledbreceiver v0.82.0/go.mod h1:lDijP8UdLmIYjg8a3lQ0X1hik/V4ZOWXr2QRNPoApyE= +github.com/open-telemetry/opentelemetry-collector-contrib/receiver/postgresqlreceiver v0.82.0 h1:H2WmEQcrDr4nhZtieuA97MZLNA8lcg8mkxjkq5trgiA= +github.com/open-telemetry/opentelemetry-collector-contrib/receiver/postgresqlreceiver v0.82.0/go.mod h1:aTUCEZJpTFIe6gdtAbzNm+ccVI0RuLZ0sSu/K6l54ZM= +github.com/open-telemetry/opentelemetry-collector-contrib/receiver/prometheusexecreceiver v0.82.0 h1:vIAY6LFS+r+hZSqUeSLe96gHpjhDt3QvdoPMkEWWyPw= +github.com/open-telemetry/opentelemetry-collector-contrib/receiver/prometheusexecreceiver v0.82.0/go.mod h1:A6SBmIJDW+DnhVR3UHNhdRbIy28oWfzppgtUDTKbQsI= +github.com/open-telemetry/opentelemetry-collector-contrib/receiver/prometheusreceiver v0.82.0 h1:oJ9H90C/qwP+3dSxEL+fAYXpyN/8GqNp6j0CODeI2yo= +github.com/open-telemetry/opentelemetry-collector-contrib/receiver/prometheusreceiver v0.82.0/go.mod h1:LdHYaAdHsKYhVNOwGJS2Egu+Rrb4xWEerC2JNpC4wrs= +github.com/open-telemetry/opentelemetry-collector-contrib/receiver/receivercreator v0.82.0 h1:pxJRmmalzD026bn3WbMTCTseDq6v/iekjSK2UQs75so= +github.com/open-telemetry/opentelemetry-collector-contrib/receiver/receivercreator v0.82.0/go.mod h1:kfVKq2ke9FITS9mFIDn3cUDy7DTxc80Z3thrgwUk1IM= +github.com/open-telemetry/opentelemetry-collector-contrib/receiver/redisreceiver v0.82.0 h1:IFdRFxt4i9tv0r0uhKYSn1+V+tmVbIBhTgN+X3T+80Y= +github.com/open-telemetry/opentelemetry-collector-contrib/receiver/redisreceiver v0.82.0/go.mod h1:i46k5twSfXFqLPrcjhIrJWwzwPEowHKHeYYjvswmSRg= +github.com/open-telemetry/opentelemetry-collector-contrib/receiver/sapmreceiver v0.82.0 h1:7iYtLgzW7l8emYmSUFMt5DlHh1qwmU5smyHS4S3kxNw= +github.com/open-telemetry/opentelemetry-collector-contrib/receiver/sapmreceiver v0.82.0/go.mod h1:tE3o9dhGQIBa71Go8nqRGjSLhNkIL9wsx15BOnTNGu0= +github.com/open-telemetry/opentelemetry-collector-contrib/receiver/signalfxreceiver v0.82.0 h1:NI6AUgoQlwxsWjCELaIqeINcQLZLpWPE3KaowtULoYw= +github.com/open-telemetry/opentelemetry-collector-contrib/receiver/signalfxreceiver v0.82.0/go.mod h1:LiZGzVtx+dvcOzeX7R7n+zAV1fBldjal1HwQjewRSiQ= +github.com/open-telemetry/opentelemetry-collector-contrib/receiver/simpleprometheusreceiver v0.82.0 h1:R+q57cHUlTde6kfGWVd8lkvzidmatLU3D8LiGxzD8QY= +github.com/open-telemetry/opentelemetry-collector-contrib/receiver/simpleprometheusreceiver v0.82.0/go.mod h1:U21LqYvBO1V+8Hn76tjG1/U8bgKu4TuVRg1+EHEi9Tw= +github.com/open-telemetry/opentelemetry-collector-contrib/receiver/splunkhecreceiver v0.82.0 h1:IgwGzS1ftJjafCug0a38JHv/c7rKeAo1OMIsqt/WvE4= +github.com/open-telemetry/opentelemetry-collector-contrib/receiver/splunkhecreceiver v0.82.0/go.mod h1:EfG/z06CjGdh0WHKi41ZKi+vo51bJsvx3nSELzFxwMI= +github.com/open-telemetry/opentelemetry-collector-contrib/receiver/sqlqueryreceiver v0.82.0 h1:N2ogrYKfkPwlyG9IYk2866xpZ1wZ65vPBJ2FliE9Sy4= +github.com/open-telemetry/opentelemetry-collector-contrib/receiver/sqlqueryreceiver v0.82.0/go.mod h1:CjSoemmgzLD1ZMeCaVWbsitduNZLJ9lnVxl4J9YB+vY= +github.com/open-telemetry/opentelemetry-collector-contrib/receiver/statsdreceiver v0.82.0 h1:iZtAbhXHFR9ID2ZttCbR9wvS2J3Ivi/9nJQj0/7YuCI= +github.com/open-telemetry/opentelemetry-collector-contrib/receiver/statsdreceiver v0.82.0/go.mod h1:8DsPwUYp7YpJJALNimxVe0IXwtaRC3Aj79BWlmeG8N8= +github.com/open-telemetry/opentelemetry-collector-contrib/receiver/syslogreceiver v0.82.0 h1:kDI9TSx8ItyFd1DPmtjeN3XsKsF7tQYyoRuPBJRROU8= +github.com/open-telemetry/opentelemetry-collector-contrib/receiver/syslogreceiver v0.82.0/go.mod h1:3xdSzS5yg+Hlo2aykjWyMAbL6u/RtWkQ1GxdB5Kvjb0= +github.com/open-telemetry/opentelemetry-collector-contrib/receiver/tcplogreceiver v0.82.0 h1:aEscF9RVb1wjwjBf1o+KcTKrfCyFiBOJ3czsEMnzUhE= +github.com/open-telemetry/opentelemetry-collector-contrib/receiver/tcplogreceiver v0.82.0/go.mod h1:/pX5wzLnVmJRse8xOSBoCr9+ciaHdN4gMYfxLcU9F4U= +github.com/open-telemetry/opentelemetry-collector-contrib/receiver/windowseventlogreceiver v0.82.0 h1:maT1kibf+n2H2ZI1BqLN4Xzz0OUVYQHjdVTZnogKJe8= +github.com/open-telemetry/opentelemetry-collector-contrib/receiver/windowseventlogreceiver v0.82.0/go.mod h1:lV4fRU6+Tvl2mi8F9U87Lw8FXeOKD8OflLDXEPqMKqs= +github.com/open-telemetry/opentelemetry-collector-contrib/receiver/windowsperfcountersreceiver v0.82.0 h1:rWFisgl7QitRY+XiQAXECOcwhsHZXYXUUMLMRx2F12M= +github.com/open-telemetry/opentelemetry-collector-contrib/receiver/windowsperfcountersreceiver v0.82.0/go.mod h1:qOyQKOkh5WbtlG2iVrxFq42Pk6SrjZtRKrCMiJ6zetQ= +github.com/open-telemetry/opentelemetry-collector-contrib/receiver/zipkinreceiver v0.82.0 h1:DAuk2nc0eCgZIVdrZ8OPIJ2w343zLN3e98vS9D+Osk4= +github.com/open-telemetry/opentelemetry-collector-contrib/receiver/zipkinreceiver v0.82.0/go.mod h1:8aOC0UNBTUuRt5Bmw77bxcFJYd1HXffvU8wlVo/1DXc= github.com/opencontainers/go-digest v1.0.0 h1:apOUWs51W5PlhuyGyz9FCeeBIOUDA/6nW8Oi/yOhh5U= github.com/opencontainers/go-digest v1.0.0/go.mod h1:0JzlMkj0TRzQZfJkVvzbP0HBR3IKzErnv2BNG4W4MAM= github.com/opencontainers/image-spec v1.0.2/go.mod h1:BtxoFyWECRxE4U/7sNtV5W15zMzWCbyJoFRP3s7yZA0= @@ -1482,8 +1479,8 @@ github.com/signalfx/signalfx-go v1.33.0 h1:+v1fa+is8rYSxGoN1W+9PiDj1dCF5sVjJx60d github.com/signalfx/signalfx-go v1.33.0/go.mod h1:IpGZLPvCKNFyspAXoS480jB02mocTpo0KYd8jbl6/T8= github.com/signalfx/telegraf v0.10.2-0.20210820123244-82265917ca87 h1:ayeUHxiUjcxzuEzjWVkXJxf42UYNw8UKmYmIQu9mAqo= github.com/signalfx/telegraf v0.10.2-0.20210820123244-82265917ca87/go.mod h1:1gnMOcwGO3lAxfoMq28M8gjooF2MqVwquPVEvgZ1its= -github.com/sijms/go-ora/v2 v2.7.8 h1:6G4a+L9atTJFRCIOENi05CEE6gMxCGYS9CFimhqPqlw= -github.com/sijms/go-ora/v2 v2.7.8/go.mod h1:EHxlY6x7y9HAsdfumurRfTd+v8NrEOTR3Xl4FWlH6xk= +github.com/sijms/go-ora/v2 v2.7.9 h1:FvPwsyNtAOywDKlgjrgCpGkL0s49ZA/ShTBgEAfYKE0= +github.com/sijms/go-ora/v2 v2.7.9/go.mod h1:EHxlY6x7y9HAsdfumurRfTd+v8NrEOTR3Xl4FWlH6xk= github.com/sirupsen/logrus v1.2.0/go.mod h1:LxeOpSwHxABJmUn/MG1IvRgCAasNZTLOkJPxbbu5VWo= github.com/sirupsen/logrus v1.4.1/go.mod h1:ni0Sbl8bgC9z8RoU9G6nDWqqs/fq4eDPysMBDgk/93Q= github.com/sirupsen/logrus v1.4.2/go.mod h1:tLMulIdttU9McNUspp0xgXVQah82FyeX6MwdIuYE2rE= @@ -1620,8 +1617,8 @@ go.etcd.io/etcd/client/v2 v2.305.9 h1:YZ2OLi0OvR0H75AcgSUajjd5uqKDKocQUqROTG11jI go.etcd.io/etcd/client/v2 v2.305.9/go.mod h1:0NBdNx9wbxtEQLwAQtrDHwx58m02vXpDcgSYI2seohQ= go.etcd.io/etcd/client/v3 v3.5.0/go.mod h1:AIKXXVX/DQXtfTEqBryiLTUXwON+GuvO6Z7lLS/oTh0= go.etcd.io/etcd/client/v3 v3.5.4/go.mod h1:ZaRkVgBZC+L+dLCjTcF1hRXpgZXQPOvnA/Ak/gq3kiY= -go.mongodb.org/atlas v0.30.0 h1:0JshBG4ASDKPQKY5Mk3xP9jUfhbpqvFlG0aKu2DGaNc= -go.mongodb.org/atlas v0.30.0/go.mod h1:L4BKwVx/OeEhOVjCSdgo90KJm4469iv7ZLzQms/EPTg= +go.mongodb.org/atlas v0.31.0 h1:NgLqsNYm6wDYeDUO90etw1sl8T1U2DUKu36eUdnrFSI= +go.mongodb.org/atlas v0.31.0/go.mod h1:L4BKwVx/OeEhOVjCSdgo90KJm4469iv7ZLzQms/EPTg= go.mongodb.org/mongo-driver v1.12.0 h1:aPx33jmn/rQuJXPQLZQ8NtfPQG8CaqgLThFtqRb0PiE= go.opencensus.io v0.21.0/go.mod h1:mSImk1erAIZhrmZN+AvHh14ztQfjbGwt4TtuofqLduU= go.opencensus.io v0.22.0/go.mod h1:+kGneAE2xo2IficOXnaByMWTGM9T73dGwxeWcUqIpI8= @@ -1632,68 +1629,68 @@ go.opencensus.io v0.22.5/go.mod h1:5pWMHQbX5EPX2/62yrJeAkowc+lfs/XD7Uxpq3pI6kk= go.opencensus.io v0.23.0/go.mod h1:XItmlyltB5F7CS4xOC1DcqMoFqwtC6OG2xF7mCv7P7E= go.opencensus.io v0.24.0 h1:y73uSU6J157QMP2kn2r30vwW1A2W2WFwSCGnAVxeaD0= go.opencensus.io v0.24.0/go.mod h1:vNK8G9p7aAivkbmorf4v+7Hgx+Zs0yY+0fOtgBfjQKo= -go.opentelemetry.io/collector v0.81.0 h1:pF+sB8xNXlg/W0a0QTLz4mUWyool1a9toVj8LmLoFqg= -go.opentelemetry.io/collector v0.81.0/go.mod h1:thuOTBMusXwcTPTwLbs3zwwCOLaaQX2g+Hjf8OObc/w= -go.opentelemetry.io/collector/component v0.81.0 h1:AKsl6bss/SRrW248GFpmGiiI/4kdemW92Ai/X82CCqY= -go.opentelemetry.io/collector/component v0.81.0/go.mod h1:+m6/yPiJ7O7Oc/OLfmgUB2mrY1xoUqRj4BsoOtIVpGs= -go.opentelemetry.io/collector/config/configauth v0.81.0 h1:NIiJuIGOdblN0EIJv64R2mvGhthcYfWuvyCnjk8HRN4= -go.opentelemetry.io/collector/config/configauth v0.81.0/go.mod h1:2KscbmU+8fIzwiSU9Kku0Tf4b4A1plqFIJXR1DWSaTw= -go.opentelemetry.io/collector/config/configcompression v0.81.0 h1:Q725pvVH7tR6BP3WK7Ro3pbqMeQdZEV3KeFVHchBxCc= -go.opentelemetry.io/collector/config/configcompression v0.81.0/go.mod h1:xhHm1sEH7BTECAJo1xn64NMxeIvZGKdVGdSKUUc+YuM= -go.opentelemetry.io/collector/config/configgrpc v0.81.0 h1:Q2xEE2SGbg79j3TdHT+781eUu/2uUIyrHVJAG9bLpVk= -go.opentelemetry.io/collector/config/configgrpc v0.81.0/go.mod h1:Frq/l2Ttbvm7cFH3hkxLdhl5TCNHcH6rnkpmi8U2kLY= -go.opentelemetry.io/collector/config/confighttp v0.81.0 h1:vIdiepUT7P/WtJRdfh8mjzvSqJRVF8/vl9GWtUNQlHQ= -go.opentelemetry.io/collector/config/confighttp v0.81.0/go.mod h1:I54THsffkpv//O7bUHw+0bXxjYdvyL6IHg5ksgYez8I= -go.opentelemetry.io/collector/config/confignet v0.81.0 h1:Eu8m3eX8GaGhOUc//YXvV4i3cEivxUSxkLnV1U9ydhg= -go.opentelemetry.io/collector/config/confignet v0.81.0/go.mod h1:unOg7BZvpt6T5xsf+LyeOQvUhD8ld/2AbfOsmUZ/bPM= -go.opentelemetry.io/collector/config/configopaque v0.81.0 h1:MkCAGh0WydRWydETB9FLnuCj9hDPDiz2g4Wxnl53I0w= -go.opentelemetry.io/collector/config/configopaque v0.81.0/go.mod h1:pM1oy6gasukw3H6jAvc9Q9OtFaaY2IbfeuwCPAjOgXc= -go.opentelemetry.io/collector/config/configtelemetry v0.81.0 h1:j3dhWbAcrfL1n0RmShRJf99X/xIMoPfEShN/5Z8bY0k= -go.opentelemetry.io/collector/config/configtelemetry v0.81.0/go.mod h1:KEYQRiYJdx38iZkvcLKBZWH9fK4NeafxBwGRrRKMgyA= -go.opentelemetry.io/collector/config/configtls v0.81.0 h1:2vt+yOZUvGq5ADqFAxL5ONm1ACuGXDSs87AWT54Ez4M= -go.opentelemetry.io/collector/config/configtls v0.81.0/go.mod h1:HMHTYBMMgqBpTvnNAhQYmjO7XuoBMe2T4qRHcKluB4Q= -go.opentelemetry.io/collector/config/internal v0.81.0 h1:wRV2PBnJygdmKpIdt/xfG7zdQvXvHz9L+z8MhGsOji4= -go.opentelemetry.io/collector/config/internal v0.81.0/go.mod h1:RKcLV1gQxhgwx+6rlPYsvGMq1RZNne3UeOUZkHxJnIg= -go.opentelemetry.io/collector/confmap v0.81.0 h1:AqweoBGdF3jGM2/KgP5GS6bmN+1aVrEiCy4nPf7IBE4= -go.opentelemetry.io/collector/confmap v0.81.0/go.mod h1:iCTnTqGgZZJumhJxpY7rrJz9UQ/0zjPmsJz2Z7Tp4RY= -go.opentelemetry.io/collector/connector v0.81.0 h1:5jYYjQwxxgJKFtVvvbFLd0+2QHsvS0z+lVDxzmRv8uk= -go.opentelemetry.io/collector/connector v0.81.0/go.mod h1:rQsgBsEfxcBj0Wdp6a9z8E9NBxybolOfKheXBcosC2c= -go.opentelemetry.io/collector/connector/forwardconnector v0.81.0 h1:GFgguVsz4BNJeaKE7sOh8qgSa5jGYa1PM+3kV6CyI1Y= -go.opentelemetry.io/collector/connector/forwardconnector v0.81.0/go.mod h1:bBMKg77Or4Bh91bJwkrAiEqOYLc3g407Kz4RRdnePO0= -go.opentelemetry.io/collector/consumer v0.81.0 h1:8R2iCrSzD7T0RtC2Wh4GXxDiqla2vNhDokGW6Bcrfas= -go.opentelemetry.io/collector/consumer v0.81.0/go.mod h1:jS7+gAKdOx3lD3SnaBztBjUVpUYL3ee7fpoqI4p/gT8= -go.opentelemetry.io/collector/exporter v0.81.0 h1:GLhB8WGrBx+zZSB1HIOx2ivFUMahGtAVO2CC5xbCUHQ= -go.opentelemetry.io/collector/exporter v0.81.0/go.mod h1:Di4RTzI8uRooVNATIeApNUgmGdNt8XiikUTQLabmZaA= -go.opentelemetry.io/collector/exporter/loggingexporter v0.81.0 h1:6cHx9fK58m3h/5IrkfgYarHQunBQkGQaUw150oHL1G0= -go.opentelemetry.io/collector/exporter/loggingexporter v0.81.0/go.mod h1:uVVPQ8OkOrXkchTOS9cA4Yu8aB1DJnyC2+Y+IZY8Sys= -go.opentelemetry.io/collector/exporter/otlpexporter v0.81.0 h1:Ri5pj0slm+FUbbG81UIhQaQ992z2+PcT2++4JI32XGI= -go.opentelemetry.io/collector/exporter/otlpexporter v0.81.0/go.mod h1:u19TJEy/n35jjU/ie2YOlAL4K1s9rvRKSNaq9JDlBF8= -go.opentelemetry.io/collector/exporter/otlphttpexporter v0.81.0 h1:KSE7wjy1J0I0izLTodTW4axRmJplpQgCRqYFbAzufZo= -go.opentelemetry.io/collector/exporter/otlphttpexporter v0.81.0/go.mod h1:x/G0eEHSDvHPSoOzaqY8v6uSfwnTuVmKudAEYqAXJJ4= -go.opentelemetry.io/collector/extension v0.81.0 h1:Ak7AzZzxTFJxGyVbEklsGzqHyOHW5USiifJilCcRyTU= -go.opentelemetry.io/collector/extension v0.81.0/go.mod h1:DU2bX8qulS5+OCJZGfvqIwIT/q3sFnEjI2HjJ2LDI/s= -go.opentelemetry.io/collector/extension/auth v0.81.0 h1:UzVQSG9naJh1hX7hh+HVcvB3n+rpCJXX2BBdUoL/Ybo= -go.opentelemetry.io/collector/extension/auth v0.81.0/go.mod h1:PaBFcFrzXV+UgM4VZKp6Kn1IiRC/MbEYWxTfIalcIwk= -go.opentelemetry.io/collector/extension/ballastextension v0.81.0 h1:4zA1pd8aSkvIk03HKCDz2z9fCkiDFQUZeJ6b5V7HF8o= -go.opentelemetry.io/collector/extension/ballastextension v0.81.0/go.mod h1:hdvJ7ecQObgYg7SurOttViBwta4bBb5a1hYz1+HddFk= -go.opentelemetry.io/collector/extension/zpagesextension v0.81.0 h1:ov3h5re95uJcF6N+vR/rLpjsEkGs6easxXSphH9UrPg= -go.opentelemetry.io/collector/extension/zpagesextension v0.81.0/go.mod h1:oN9HkYCae/b2ftIJVzY/ATDEqcxS61DuTaC6aCxeJMo= -go.opentelemetry.io/collector/featuregate v1.0.0-rcv0013 h1:tiTUG9X/gEDN1oDYQOBVUFYQfhUG2CvgW9VhBc2uk1U= -go.opentelemetry.io/collector/featuregate v1.0.0-rcv0013/go.mod h1:0mE3mDLmUrOXVoNsuvj+7dV14h/9HFl/Fy9YTLoLObo= -go.opentelemetry.io/collector/pdata v1.0.0-rcv0013 h1:4sONXE9hAX+4Di8m0bQ/KaoH3Mi+OPt04cXkZ7A8W3k= -go.opentelemetry.io/collector/pdata v1.0.0-rcv0013/go.mod h1:x09G/4KjEcDKNuWCjC5ZtnuDE0XEqiRwI+yrHSVjIy8= -go.opentelemetry.io/collector/processor v0.81.0 h1:ypyNV5R0bnN3XGMAsH/q5eNARF5vXtFgSOK9rBWzsLc= -go.opentelemetry.io/collector/processor v0.81.0/go.mod h1:ZDwO3DVg1VUSA92g0r/o0jYk+T7r9uxgZZ3LABJbC34= -go.opentelemetry.io/collector/processor/batchprocessor v0.81.0 h1:qmFuxKR12Sq209MkXSelvuZDG4otMwxZW0yetHKIsj0= -go.opentelemetry.io/collector/processor/batchprocessor v0.81.0/go.mod h1:Rb5jv7bbMxw72RCvZclh4QD64aoC91qQdFwyaa5HHV4= -go.opentelemetry.io/collector/processor/memorylimiterprocessor v0.81.0 h1:TsfznxCfgnM/SRNXaTRf0b8ealUmnF4fY2rHce3uj40= -go.opentelemetry.io/collector/processor/memorylimiterprocessor v0.81.0/go.mod h1:DYsauLyWsW8WqgSUz4xt5H5uDh2Q7IL9d9VRoTvg1G4= -go.opentelemetry.io/collector/receiver v0.81.0 h1:0c+YtIV7fmd9ev+zmwS9qjx5ASi8cw+gSypu4I7Gugc= -go.opentelemetry.io/collector/receiver v0.81.0/go.mod h1:q80JkMxVLnk0vWxoTRY2J7F4Qx9069Yy5yxDbZ4JVwk= -go.opentelemetry.io/collector/receiver/otlpreceiver v0.81.0 h1:ewVbfATnAeQkwFK3r0dpFKCXcTb8HJKX4AixUioRt+c= -go.opentelemetry.io/collector/receiver/otlpreceiver v0.81.0/go.mod h1:LGuSMVdOq5Zq+CEHF9YBHMaOIUZrzqW7DQGqo9g0dJA= -go.opentelemetry.io/collector/semconv v0.81.0 h1:lCYNNo3powDvFIaTPP2jDKIrBiV1T92NK4QgL/aHYXw= -go.opentelemetry.io/collector/semconv v0.81.0/go.mod h1:TlYPtzvsXyHOgr5eATi43qEMqwSmIziivJB2uctKswo= +go.opentelemetry.io/collector v0.82.0 h1:MaKqWT0R4GCdkZDhYWOQkLfoJj9V7GsMbk1gsAuogaw= +go.opentelemetry.io/collector v0.82.0/go.mod h1:PMmDJkZzC1xpcViHlwMMEVeAnRRl3HYy3nXgD8KJwG0= +go.opentelemetry.io/collector/component v0.82.0 h1:ID9nOGKBf5G0avhuYQlTzmwAyIMvh9B+tlckLE/4qw4= +go.opentelemetry.io/collector/component v0.82.0/go.mod h1:jSdGG4L1Ger6ob6lWpr8jmKC2qqC+XZ/gOgu7GUA5xs= +go.opentelemetry.io/collector/config/configauth v0.82.0 h1:H5xrWyPMotSqajiiH/bay8bpVsT4aq6Vih4OuArXv4Q= +go.opentelemetry.io/collector/config/configauth v0.82.0/go.mod h1:P0ukmBIUk+HP0O7yfUOKRmPmffneAQgmEL9/iTOo1CU= +go.opentelemetry.io/collector/config/configcompression v0.82.0 h1:M6a7eiHdBUB8mIioDhWugJfNm7Sw85cvv/OXyTDhtY0= +go.opentelemetry.io/collector/config/configcompression v0.82.0/go.mod h1:xhHm1sEH7BTECAJo1xn64NMxeIvZGKdVGdSKUUc+YuM= +go.opentelemetry.io/collector/config/configgrpc v0.82.0 h1:taZWDbtVBm0OOcgnfpVA1X43pmU2oNhj39B2uV3COQk= +go.opentelemetry.io/collector/config/configgrpc v0.82.0/go.mod h1:NHXHRI40Q7TT/d38DKT30B7DOrVUkj7anEFOD59R9o8= +go.opentelemetry.io/collector/config/confighttp v0.82.0 h1:2LhyqVTd+Bsr8SgsCq6+q731F81uddK9GwvGhwD/Co0= +go.opentelemetry.io/collector/config/confighttp v0.82.0/go.mod h1:OHGx/aJqGJ9z2jaBXvaylwkAuiUwikg1/n+RRDpsfOo= +go.opentelemetry.io/collector/config/confignet v0.82.0 h1:zN9JaFTn7Dth3u5ot6KZJcBZACTEzGqFWYyO5qAlYfo= +go.opentelemetry.io/collector/config/confignet v0.82.0/go.mod h1:unOg7BZvpt6T5xsf+LyeOQvUhD8ld/2AbfOsmUZ/bPM= +go.opentelemetry.io/collector/config/configopaque v0.82.0 h1:0Ma63QTr4AkODzEABZHtgiU5Dig8SItpHOuB28UnVSw= +go.opentelemetry.io/collector/config/configopaque v0.82.0/go.mod h1:pM1oy6gasukw3H6jAvc9Q9OtFaaY2IbfeuwCPAjOgXc= +go.opentelemetry.io/collector/config/configtelemetry v0.82.0 h1:Zln2K4S5gBDcOpBNIzM0cZS5P6cohEYstHngVvIbGBY= +go.opentelemetry.io/collector/config/configtelemetry v0.82.0/go.mod h1:KEYQRiYJdx38iZkvcLKBZWH9fK4NeafxBwGRrRKMgyA= +go.opentelemetry.io/collector/config/configtls v0.82.0 h1:eE/8muTszLlviOGLy5N08BaXLCcYqDW3mKIoKyDDa8o= +go.opentelemetry.io/collector/config/configtls v0.82.0/go.mod h1:unBTmL1bdpkp9mYEDz7N+Ln4yEwh7Ug74I1HgZMplCk= +go.opentelemetry.io/collector/config/internal v0.82.0 h1:JnnDARkXrC3OJDsMfQkBgfI0Np4s+18zvoDqZ4OH0+I= +go.opentelemetry.io/collector/config/internal v0.82.0/go.mod h1:RKcLV1gQxhgwx+6rlPYsvGMq1RZNne3UeOUZkHxJnIg= +go.opentelemetry.io/collector/confmap v0.82.0 h1:s1Rd8jz21DGlLJfED0Py9VaEq2qPWmWwWy5MriDCX+4= +go.opentelemetry.io/collector/confmap v0.82.0/go.mod h1:IS/PoUYHETtxV6+fJammTkCxxa4LEwK2u4Cx/bVCH/s= +go.opentelemetry.io/collector/connector v0.82.0 h1:sCzfcROg0IbmmwoAeLzVfcAs1ZpwlA+UzLzc3xRjOr4= +go.opentelemetry.io/collector/connector v0.82.0/go.mod h1:yXr1degja36+aAdY3qOv66jCXHs5QjiIeoerygLYC44= +go.opentelemetry.io/collector/connector/forwardconnector v0.82.0 h1:NxjVTGWIkNtBiTiNEa2VZxLTmrDRylu9YjMJe+I7FeI= +go.opentelemetry.io/collector/connector/forwardconnector v0.82.0/go.mod h1:xx9I0GXqLYWjv02TOBkz7pzxHYvO/mepEtmcFUoRXYg= +go.opentelemetry.io/collector/consumer v0.82.0 h1:vZecylW6bpaphetSTjCLgwXLxSYQ6oe/kzwkx4iF5oE= +go.opentelemetry.io/collector/consumer v0.82.0/go.mod h1:qrhd0i0Gp0RkihcEXb+7Rb584Kal2NmGH1eA4Zg6puA= +go.opentelemetry.io/collector/exporter v0.82.0 h1:BWsx4rWfVwlV+qNuevSMm+2Cv6uGZYYZ9CEFqq0q+F4= +go.opentelemetry.io/collector/exporter v0.82.0/go.mod h1:e3VPpLYVNRaF+G2HuKw6A5hTBMYZ4tgRYYzMusfwFJE= +go.opentelemetry.io/collector/exporter/loggingexporter v0.82.0 h1:HlgFz6qqpjqk9ZmGbaLSdUJxOo6Q3jo3PiJHcuugpaA= +go.opentelemetry.io/collector/exporter/loggingexporter v0.82.0/go.mod h1:jMMN2fKXx+RKDI3tpqIym5HK6uZnJ3X22hyFgK24cK4= +go.opentelemetry.io/collector/exporter/otlpexporter v0.82.0 h1:tYCEUQpfyuS/NgrWg9Ulps6f0ffPSCBRTBdK6sXnSaw= +go.opentelemetry.io/collector/exporter/otlpexporter v0.82.0/go.mod h1:CGeXJuRYxrzTtJUHlpLPHirzcmGq5qbcPff0ec+If14= +go.opentelemetry.io/collector/exporter/otlphttpexporter v0.82.0 h1:GdnfmEgOY3/GHFereYRcfr8RcDTR0vlK9a3Qtyr0jCg= +go.opentelemetry.io/collector/exporter/otlphttpexporter v0.82.0/go.mod h1:1a6is4De7GYERjFOa1K9dPbhRwsip5Zj7jt96taViY8= +go.opentelemetry.io/collector/extension v0.82.0 h1:DH4tqrTOz0HmGDJ6FT/jRD2woQf3ugqC6QqSiQdH3wg= +go.opentelemetry.io/collector/extension v0.82.0/go.mod h1:n7d0XTh7fdyorZWTc+gLpJh78FS7GjRqIjUiW1xdhe0= +go.opentelemetry.io/collector/extension/auth v0.82.0 h1:iaxwFslRj6mfzs1wVzbnj+gDU2G98IeXW4tcrq78p5s= +go.opentelemetry.io/collector/extension/auth v0.82.0/go.mod h1:O1xBcb06pKD8g3FadLDvMa1xKZwPGdHQp4CI8vW3RCM= +go.opentelemetry.io/collector/extension/ballastextension v0.82.0 h1:GiNzI6Z3iX9DQwJ/fI44o3yWDtecfgAgxs5C8kptP0Q= +go.opentelemetry.io/collector/extension/ballastextension v0.82.0/go.mod h1:s15/A21hPRjlXH7EelcHlvW2g7A8tEVfReO2T6Wz+C4= +go.opentelemetry.io/collector/extension/zpagesextension v0.82.0 h1:rZN8OxNy+YBjaDXYGnFoGRPBDruET1lxjVL8hzzgH5k= +go.opentelemetry.io/collector/extension/zpagesextension v0.82.0/go.mod h1:mUJk+sX47AdkdASvXu26cK/NXOh+5j+TtEdxJA6K+W4= +go.opentelemetry.io/collector/featuregate v1.0.0-rcv0014 h1:C9o0mbP0MyygqFnKueVQK/v9jef6zvuttmTGlKaqhgw= +go.opentelemetry.io/collector/featuregate v1.0.0-rcv0014/go.mod h1:0mE3mDLmUrOXVoNsuvj+7dV14h/9HFl/Fy9YTLoLObo= +go.opentelemetry.io/collector/pdata v1.0.0-rcv0014 h1:iT5qH0NLmkGeIdDtnBogYDx7L58t6CaWGL378DEo2QY= +go.opentelemetry.io/collector/pdata v1.0.0-rcv0014/go.mod h1:BRvDrx43kiSoUx3mr7SoA7h9B8+OY99mUK+CZSQFWW4= +go.opentelemetry.io/collector/processor v0.82.0 h1:DoqVrrnGYThu/h1sOr6E0hR1Fj5nQT4VT0ptFZcltRk= +go.opentelemetry.io/collector/processor v0.82.0/go.mod h1:B0MtfLWCYNBJ+PXf9k77M2Yn08MKItNB2vuvwhqrtt0= +go.opentelemetry.io/collector/processor/batchprocessor v0.82.0 h1:cUS+9wkzgp5+kgYB7ppSW1HRT+L5fzo3Wmjcm0W6Fho= +go.opentelemetry.io/collector/processor/batchprocessor v0.82.0/go.mod h1:q/+ywtFMrB3yTSSfxw/rpEq07CcgpQeQoROJdi9JOm8= +go.opentelemetry.io/collector/processor/memorylimiterprocessor v0.82.0 h1:ACdNV8fO2LM1yw1gBIXN5ybydxZHqAHomkEf1WljPyc= +go.opentelemetry.io/collector/processor/memorylimiterprocessor v0.82.0/go.mod h1:LbeXquV0D0yi+qIohuxSAvp4LBaJbIer9ZCP9+bGBtU= +go.opentelemetry.io/collector/receiver v0.82.0 h1:bc6jc8jmSgc0/C9zqTqqWOGJFVx0AJ53jiToSmQs2SE= +go.opentelemetry.io/collector/receiver v0.82.0/go.mod h1:Uh6BgcTmmrA1Bm/GpKGRY6WwQyPio4yEDsYkUo0A5Gk= +go.opentelemetry.io/collector/receiver/otlpreceiver v0.82.0 h1:LzcmQ9d7NauTVEWfPNwRwqNd/NBQDi+JU0OHWearcEA= +go.opentelemetry.io/collector/receiver/otlpreceiver v0.82.0/go.mod h1:Qt9Ha/yWaU6ni0XwFslNCBX5zZBQHcnxma/sU1s7LH4= +go.opentelemetry.io/collector/semconv v0.82.0 h1:WUeT2a+uZjI6kLvwcBaJnGvo7KSQ/9dIFRcxOQdXucc= +go.opentelemetry.io/collector/semconv v0.82.0/go.mod h1:TlYPtzvsXyHOgr5eATi43qEMqwSmIziivJB2uctKswo= go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.42.1-0.20230612162650-64be7e574a17 h1:mdcNStUIXngF/mH3xxAo4nbR4g65IXqLL1SvYMjz7JQ= go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.42.1-0.20230612162650-64be7e574a17/go.mod h1:N2Nw/UmmvQn0yCnaUzvsWzTWIeffYIdFteg6mxqCWII= go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.42.0 h1:pginetY7+onl4qN1vl0xW/V/v6OBZ0vVdH+esuJgvmM= @@ -1706,8 +1703,20 @@ go.opentelemetry.io/otel v1.16.0 h1:Z7GVAX/UkAXPKsy94IU+i6thsQS4nb7LviLpnaNeW8s= go.opentelemetry.io/otel v1.16.0/go.mod h1:vl0h9NUa1D5s1nv3A5vZOYWn8av4K8Ml6JDeHrT/bx4= go.opentelemetry.io/otel/bridge/opencensus v0.39.0 h1:YHivttTaDhbZIHuPlg1sWsy2P5gj57vzqPfkHItgbwQ= go.opentelemetry.io/otel/bridge/opencensus v0.39.0/go.mod h1:vZ4537pNjFDXEx//WldAR6Ro2LC8wwmFC76njAXwNPE= +go.opentelemetry.io/otel/exporters/otlp/internal/retry v1.16.0 h1:t4ZwRPU+emrcvM2e9DHd0Fsf0JTPVcbfa/BhTDF03d0= +go.opentelemetry.io/otel/exporters/otlp/internal/retry v1.16.0/go.mod h1:vLarbg68dH2Wa77g71zmKQqlQ8+8Rq3GRG31uc0WcWI= +go.opentelemetry.io/otel/exporters/otlp/otlpmetric v0.39.0 h1:f6BwB2OACc3FCbYVznctQ9V6KK7Vq6CjmYXJ7DeSs4E= +go.opentelemetry.io/otel/exporters/otlp/otlpmetric v0.39.0/go.mod h1:UqL5mZ3qs6XYhDnZaW1Ps4upD+PX6LipH40AoeuIlwU= +go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetricgrpc v0.39.0 h1:rm+Fizi7lTM2UefJ1TO347fSRcwmIsUAaZmYmIGBRAo= +go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetricgrpc v0.39.0/go.mod h1:sWFbI3jJ+6JdjOVepA5blpv/TJ20Hw+26561iMbWcwU= +go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetrichttp v0.39.0 h1:IZXpCEtI7BbX01DRQEWTGDkvjMB6hEhiEZXS+eg2YqY= +go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetrichttp v0.39.0/go.mod h1:xY111jIZtWb+pUUgT4UiiSonAaY2cD2Ts5zvuKLki3o= go.opentelemetry.io/otel/exporters/prometheus v0.39.0 h1:whAaiHxOatgtKd+w0dOi//1KUxj3KoPINZdtDaDj3IA= go.opentelemetry.io/otel/exporters/prometheus v0.39.0/go.mod h1:4jo5Q4CROlCpSPsXLhymi+LYrDXd2ObU5wbKayfZs7Y= +go.opentelemetry.io/otel/exporters/stdout/stdoutmetric v0.39.0 h1:fl2WmyenEf6LYYlfHAtCUEDyGcpwJNqD4dHGO7PVm4w= +go.opentelemetry.io/otel/exporters/stdout/stdoutmetric v0.39.0/go.mod h1:csyQxQ0UHHKVA8KApS7eUO/klMO5sd/av5CNZNU4O6w= +go.opentelemetry.io/otel/exporters/stdout/stdouttrace v1.16.0 h1:+XWJd3jf75RXJq29mxbuXhCXFDG3S3R4vBUeSI2P7tE= +go.opentelemetry.io/otel/exporters/stdout/stdouttrace v1.16.0/go.mod h1:hqgzBPTf4yONMFgdZvL/bK42R/iinTyVQtiWihs3SZc= go.opentelemetry.io/otel/metric v1.16.0 h1:RbrpwVG1Hfv85LgnZ7+txXioPDoh6EdbZHo26Q3hqOo= go.opentelemetry.io/otel/metric v1.16.0/go.mod h1:QE47cpOmkwipPiefDwo2wDzwJrlfxxNYodqc4xnGCo4= go.opentelemetry.io/otel/sdk v1.16.0 h1:Z1Ok1YsijYL0CSJpHt4cS3wDDh7p572grzNrBMiMWgE= @@ -1717,6 +1726,8 @@ go.opentelemetry.io/otel/sdk/metric v0.39.0/go.mod h1:piDIRgjcK7u0HCL5pCA4e74qpK go.opentelemetry.io/otel/trace v1.16.0 h1:8JRpaObFoW0pxuVPapkgH8UhHQj+bJW8jJsCZEu5MQs= go.opentelemetry.io/otel/trace v1.16.0/go.mod h1:Yt9vYq1SdNz3xdjZZK7wcXv1qv2pwLkqr2QVwea0ef0= go.opentelemetry.io/proto/otlp v0.7.0/go.mod h1:PqfVotwruBrMGOCsRd/89rSnXhoiJIqeYNgFYFoEGnI= +go.opentelemetry.io/proto/otlp v1.0.0 h1:T0TX0tmXU8a3CbNXzEKGeU5mIVOdf0oykP+u2lIVU/I= +go.opentelemetry.io/proto/otlp v1.0.0/go.mod h1:Sy6pihPLfYHkr3NkUbEhGHFhINUSI/v80hjKIs5JXpM= go.uber.org/atomic v1.3.2/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE= go.uber.org/atomic v1.4.0/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE= go.uber.org/atomic v1.5.0/go.mod h1:sABNBOSYdrvTF6hTgEIbc7YasKWGhgEQZyfxyTvoXHQ= @@ -2066,8 +2077,8 @@ google.golang.org/api v0.51.0/go.mod h1:t4HdrdoNgyN5cbEfm7Lum0lcLDLiise1F8qDKX00 google.golang.org/api v0.54.0/go.mod h1:7C4bFFOvVDGXjfDTAsgGwDgAxRDeQ4X8NvUedIt6z3k= google.golang.org/api v0.55.0/go.mod h1:38yMfeP1kfjsl8isn0tliTjIb1rJXcQi4UXlbqivdVE= google.golang.org/api v0.57.0/go.mod h1:dVPlbZyBo2/OjBpmvNdpn2GRm6rPy75jyU7bmhdrMgI= -google.golang.org/api v0.132.0 h1:8t2/+qZ26kAOGSmOiHwVycqVaDg7q3JDILrNi/Z6rvc= -google.golang.org/api v0.132.0/go.mod h1:AeTBC6GpJnJSRJjktDcPX0QwtS8pGYZOV6MSuSCusw0= +google.golang.org/api v0.134.0 h1:ktL4Goua+UBgoP1eL1/60LwZJqa1sIzkLmvoR3hR6Gw= +google.golang.org/api v0.134.0/go.mod h1:sjRL3UnjTx5UqNQS9EWr9N8p7xbHpy1k0XGRLCf3Spk= google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM= google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= google.golang.org/appengine v1.5.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= @@ -2139,8 +2150,8 @@ google.golang.org/genproto v0.0.0-20230717213848-3f92550aa753 h1:+VoAg+OKmWaommL google.golang.org/genproto v0.0.0-20230717213848-3f92550aa753/go.mod h1:iqkVr8IRpZ53gx1dEnWlCUIEwDWqWARWrbzpasaTNYM= google.golang.org/genproto/googleapis/api v0.0.0-20230717213848-3f92550aa753 h1:lCbbUxUDD+DiXx9Q6F/ttL0aAu7N2pz8XnmMm8ZW4NE= google.golang.org/genproto/googleapis/api v0.0.0-20230717213848-3f92550aa753/go.mod h1:rsr7RhLuwsDKL7RmgDDCUc6yaGr1iqceVb5Wv6f6YvQ= -google.golang.org/genproto/googleapis/rpc v0.0.0-20230717213848-3f92550aa753 h1:XUODHrpzJEUeWmVo/jfNTLj0YyVveOo28oE6vkFbkO4= -google.golang.org/genproto/googleapis/rpc v0.0.0-20230717213848-3f92550aa753/go.mod h1:TUfxEVdsvPg18p6AslUXFoLdpED4oBnGwyqk3dV1XzM= +google.golang.org/genproto/googleapis/rpc v0.0.0-20230720185612-659f7aaaa771 h1:Z8qdAF9GFsmcUuWQ5KVYIpP3PCKydn/YKORnghIalu4= +google.golang.org/genproto/googleapis/rpc v0.0.0-20230720185612-659f7aaaa771/go.mod h1:TUfxEVdsvPg18p6AslUXFoLdpED4oBnGwyqk3dV1XzM= google.golang.org/grpc v1.14.0/go.mod h1:yo6s7OP7yaDglbqo1J04qKzAhqBH6lvTonzMVmEdcZw= google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c= google.golang.org/grpc v1.19.1/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c= @@ -2172,8 +2183,8 @@ google.golang.org/grpc v1.40.0/go.mod h1:ogyxbiOoUXAkP+4+xa6PZSE9DZgIHtSpzjDTB9K google.golang.org/grpc v1.45.0/go.mod h1:lN7owxKUQEqMfSyQikvvk5tf/6zMPsrK+ONuO11+0rQ= google.golang.org/grpc v1.46.0/go.mod h1:vN9eftEi1UMyUsIF80+uQXhHjbXYbm0uXoFCACuMGWk= google.golang.org/grpc v1.51.0/go.mod h1:wgNDFcnuBGmxLKI/qn4T+m5BtEBYXJPvibbUPsAIPww= -google.golang.org/grpc v1.56.2 h1:fVRFRnXvU+x6C4IlHZewvJOVHoOv1TUuQyoRsYnB4bI= -google.golang.org/grpc v1.56.2/go.mod h1:I9bI3vqKfayGqPUAwGdOSu7kt6oIJLixfffKrpXqQ9s= +google.golang.org/grpc v1.57.0 h1:kfzNeI/klCGD2YPMUlaGNT3pxvYfga7smW3Vth8Zsiw= +google.golang.org/grpc v1.57.0/go.mod h1:Sd+9RMTACXwmub0zcNY2c4arhtrbBYD1AUHI/dt16Mo= google.golang.org/grpc/cmd/protoc-gen-go-grpc v1.1.0/go.mod h1:6Kw0yEErY5E/yWrBtf03jp27GLLJujG4z/JK95pnjjw= google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8= google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0= @@ -2259,8 +2270,8 @@ k8s.io/klog/v2 v2.100.1 h1:7WCHKK6K8fNhTqfBhISHQ97KrnJNFZMcQvKp7gP/tmg= k8s.io/klog/v2 v2.100.1/go.mod h1:y1WjHnz7Dj687irZUWR/WLkLc5N1YHtjLdmgWjndZn0= k8s.io/kube-openapi v0.0.0-20230525220651-2546d827e515 h1:OmK1d0WrkD3IPfkskvroRykOulHVHf0s0ZIFRjyt+UI= k8s.io/kube-openapi v0.0.0-20230525220651-2546d827e515/go.mod h1:kzo02I3kQ4BTtEfVLaPbjvCkX97YqGve33wzlb3fofQ= -k8s.io/kubelet v0.27.3 h1:5WhTV1iiBu9q/rr+gvy65LQ+K/e7dmgcaYjys5ipLqY= -k8s.io/kubelet v0.27.3/go.mod h1:Mz42qgZZgWgPmOJEYaR5evmh+EoSwFzEvPBozA2y9mg= +k8s.io/kubelet v0.27.4 h1:P8+MoRx4ikcAc5eEa3k2A6kd8AXtoDRaoC8KX2HFZe4= +k8s.io/kubelet v0.27.4/go.mod h1:2y4peCA57vKEhBcDL6Q5EkPuGP7FFxj9U41NV9hk1ac= k8s.io/utils v0.0.0-20211116205334-6203023598ed/go.mod h1:jPW/WVKK9YHAvNhRxK0md/EJ228hCsBRufyofKtW8HA= k8s.io/utils v0.0.0-20230711102312-30195339c3c7 h1:ZgnF1KZsYxWIifwSNZFZgNtWE89WI5yiP5WwlfDoIyc= k8s.io/utils v0.0.0-20230711102312-30195339c3c7/go.mod h1:OLgZIPagt7ERELqWJFomSt595RzquPNLL48iOWgYOg0= diff --git a/internal/receiver/databricksreceiver/README.md b/internal/receiver/databricksreceiver/README.md index c1f1321d41..cfe41231b9 100644 --- a/internal/receiver/databricksreceiver/README.md +++ b/internal/receiver/databricksreceiver/README.md @@ -5,6 +5,7 @@ | ------------- |-----------| | Stability | [development]: metrics | | Distributions | [] | +| Issues | [![Open issues](https://img.shields.io/github/issues-search/open-telemetry/opentelemetry-collector-contrib?query=is%3Aissue%20is%3Aopen%20label%3Areceiver%2Fdatabricks%20&label=open&color=orange&logo=opentelemetry)](https://github.com/open-telemetry/opentelemetry-collector-contrib/issues?q=is%3Aopen+is%3Aissue+label%3Areceiver%2Fdatabricks) [![Closed issues](https://img.shields.io/github/issues-search/open-telemetry/opentelemetry-collector-contrib?query=is%3Aissue%20is%3Aclosed%20label%3Areceiver%2Fdatabricks%20&label=closed&color=blue&logo=opentelemetry)](https://github.com/open-telemetry/opentelemetry-collector-contrib/issues?q=is%3Aclosed+is%3Aissue+label%3Areceiver%2Fdatabricks) | [development]: https://github.com/open-telemetry/opentelemetry-collector#development diff --git a/internal/receiver/databricksreceiver/factory.go b/internal/receiver/databricksreceiver/factory.go index 57be02e8ac..eb5b869fe0 100644 --- a/internal/receiver/databricksreceiver/factory.go +++ b/internal/receiver/databricksreceiver/factory.go @@ -58,6 +58,7 @@ func newReceiverFactory() receiver.CreateMetricsFunc { rmp: databricks.NewRunMetricsProvider(dbrsvc), dbrmp: databricks.MetricsProvider{Svc: dbrsvc}, metricsBuilder: metadata.NewMetricsBuilder(dbrcfg.MetricsBuilderConfig, settings), + resourceBuilder: metadata.NewResourceBuilder(dbrcfg.MetricsBuilderConfig.ResourceAttributes), scmb: spark.ClusterMetricsBuilder{Ssvc: ssvc}, semb: spark.ExtraMetricsBuilder{ Ssvc: ssvc, diff --git a/internal/receiver/databricksreceiver/internal/metadata/generated_config_test.go b/internal/receiver/databricksreceiver/internal/metadata/generated_config_test.go index bc4d34b8e7..5fbe85830c 100644 --- a/internal/receiver/databricksreceiver/internal/metadata/generated_config_test.go +++ b/internal/receiver/databricksreceiver/internal/metadata/generated_config_test.go @@ -348,3 +348,53 @@ func loadMetricsBuilderConfig(t *testing.T, name string) MetricsBuilderConfig { require.NoError(t, component.UnmarshalConfig(sub, &cfg)) return cfg } + +func TestResourceAttributesConfig(t *testing.T) { + tests := []struct { + name string + want ResourceAttributesConfig + }{ + { + name: "default", + want: DefaultResourceAttributesConfig(), + }, + { + name: "all_set", + want: ResourceAttributesConfig{ + DatabricksInstanceName: ResourceAttributeConfig{Enabled: true}, + SparkAppID: ResourceAttributeConfig{Enabled: true}, + SparkClusterID: ResourceAttributeConfig{Enabled: true}, + SparkClusterName: ResourceAttributeConfig{Enabled: true}, + }, + }, + { + name: "none_set", + want: ResourceAttributesConfig{ + DatabricksInstanceName: ResourceAttributeConfig{Enabled: false}, + SparkAppID: ResourceAttributeConfig{Enabled: false}, + SparkClusterID: ResourceAttributeConfig{Enabled: false}, + SparkClusterName: ResourceAttributeConfig{Enabled: false}, + }, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + cfg := loadResourceAttributesConfig(t, tt.name) + if diff := cmp.Diff(tt.want, cfg, cmpopts.IgnoreUnexported(ResourceAttributeConfig{})); diff != "" { + t.Errorf("Config mismatch (-expected +actual):\n%s", diff) + } + }) + } +} + +func loadResourceAttributesConfig(t *testing.T, name string) ResourceAttributesConfig { + cm, err := confmaptest.LoadConf(filepath.Join("testdata", "config.yaml")) + require.NoError(t, err) + sub, err := cm.Sub(name) + require.NoError(t, err) + sub, err = sub.Sub("resource_attributes") + require.NoError(t, err) + cfg := DefaultResourceAttributesConfig() + require.NoError(t, component.UnmarshalConfig(sub, &cfg)) + return cfg +} diff --git a/internal/receiver/databricksreceiver/internal/metadata/generated_metrics.go b/internal/receiver/databricksreceiver/internal/metadata/generated_metrics.go index c19a22efba..4890e6d335 100644 --- a/internal/receiver/databricksreceiver/internal/metadata/generated_metrics.go +++ b/internal/receiver/databricksreceiver/internal/metadata/generated_metrics.go @@ -7675,16 +7675,15 @@ func newMetricDatabricksTasksScheduleStatus(cfg MetricConfig) metricDatabricksTa type MetricsBuilder struct { metricsBuffer pmetric.Metrics buildInfo component.BuildInfo - metricDatabricksSparkExecutorMetricsMappedPoolMemory metricDatabricksSparkExecutorMetricsMappedPoolMemory - metricDatabricksSparkBlockManagerMemoryOnHeapMax metricDatabricksSparkBlockManagerMemoryOnHeapMax + metricDatabricksJobsActiveTotal metricDatabricksJobsActiveTotal + metricDatabricksJobsRunDuration metricDatabricksJobsRunDuration metricDatabricksJobsScheduleStatus metricDatabricksJobsScheduleStatus - metricDatabricksTasksScheduleStatus metricDatabricksTasksScheduleStatus metricDatabricksJobsTotal metricDatabricksJobsTotal metricDatabricksSparkBlockManagerMemoryDiskSpaceUsed metricDatabricksSparkBlockManagerMemoryDiskSpaceUsed metricDatabricksSparkBlockManagerMemoryMax metricDatabricksSparkBlockManagerMemoryMax metricDatabricksSparkBlockManagerMemoryOffHeapMax metricDatabricksSparkBlockManagerMemoryOffHeapMax metricDatabricksSparkBlockManagerMemoryOffHeapUsed metricDatabricksSparkBlockManagerMemoryOffHeapUsed - metricDatabricksSparkExecutorMetricsMinorGcCount metricDatabricksSparkExecutorMetricsMinorGcCount + metricDatabricksSparkBlockManagerMemoryOnHeapMax metricDatabricksSparkBlockManagerMemoryOnHeapMax metricDatabricksSparkBlockManagerMemoryOnHeapUsed metricDatabricksSparkBlockManagerMemoryOnHeapUsed metricDatabricksSparkBlockManagerMemoryRemaining metricDatabricksSparkBlockManagerMemoryRemaining metricDatabricksSparkBlockManagerMemoryRemainingOffHeap metricDatabricksSparkBlockManagerMemoryRemainingOffHeap @@ -7728,7 +7727,7 @@ type MetricsBuilder struct { metricDatabricksSparkDatabricksTaskSchedulingLanesPreemptionSlotTransferSuccessfulPreemptionIterationsCount metricDatabricksSparkDatabricksTaskSchedulingLanesPreemptionSlotTransferSuccessfulPreemptionIterationsCount metricDatabricksSparkDatabricksTaskSchedulingLanesPreemptionSlotTransferTasksPreemptedCount metricDatabricksSparkDatabricksTaskSchedulingLanesPreemptionSlotTransferTasksPreemptedCount metricDatabricksSparkDatabricksTaskSchedulingLanesPreemptionSlotTransferWastedTaskTime metricDatabricksSparkDatabricksTaskSchedulingLanesPreemptionSlotTransferWastedTaskTime - metricDatabricksSparkExecutorMetricsMinorGcTime metricDatabricksSparkExecutorMetricsMinorGcTime + metricDatabricksSparkDatabricksTaskSchedulingLanesSlotReservationGradualDecreaseCount metricDatabricksSparkDatabricksTaskSchedulingLanesSlotReservationGradualDecreaseCount metricDatabricksSparkDatabricksTaskSchedulingLanesSlotReservationQuickDropCount metricDatabricksSparkDatabricksTaskSchedulingLanesSlotReservationQuickDropCount metricDatabricksSparkDatabricksTaskSchedulingLanesSlotReservationQuickJumpCount metricDatabricksSparkDatabricksTaskSchedulingLanesSlotReservationQuickJumpCount metricDatabricksSparkDatabricksTaskSchedulingLanesSlotReservationSlotsReserved metricDatabricksSparkDatabricksTaskSchedulingLanesSlotReservationSlotsReserved @@ -7745,9 +7744,9 @@ type MetricsBuilder struct { metricDatabricksSparkExecutorMetricsJvmOffHeapMemory metricDatabricksSparkExecutorMetricsJvmOffHeapMemory metricDatabricksSparkExecutorMetricsMajorGcCount metricDatabricksSparkExecutorMetricsMajorGcCount metricDatabricksSparkExecutorMetricsMajorGcTime metricDatabricksSparkExecutorMetricsMajorGcTime - metricDatabricksSparkLiveListenerBusQueueExecutormanagementSize metricDatabricksSparkLiveListenerBusQueueExecutormanagementSize - metricDatabricksJobsRunDuration metricDatabricksJobsRunDuration - metricDatabricksSparkDatabricksTaskSchedulingLanesSlotReservationGradualDecreaseCount metricDatabricksSparkDatabricksTaskSchedulingLanesSlotReservationGradualDecreaseCount + metricDatabricksSparkExecutorMetricsMappedPoolMemory metricDatabricksSparkExecutorMetricsMappedPoolMemory + metricDatabricksSparkExecutorMetricsMinorGcCount metricDatabricksSparkExecutorMetricsMinorGcCount + metricDatabricksSparkExecutorMetricsMinorGcTime metricDatabricksSparkExecutorMetricsMinorGcTime metricDatabricksSparkExecutorMetricsOffHeapExecutionMemory metricDatabricksSparkExecutorMetricsOffHeapExecutionMemory metricDatabricksSparkExecutorMetricsOffHeapStorageMemory metricDatabricksSparkExecutorMetricsOffHeapStorageMemory metricDatabricksSparkExecutorMetricsOffHeapUnifiedMemory metricDatabricksSparkExecutorMetricsOffHeapUnifiedMemory @@ -7779,7 +7778,7 @@ type MetricsBuilder struct { metricDatabricksSparkLiveListenerBusQueueAppStatusDroppedEventsCount metricDatabricksSparkLiveListenerBusQueueAppStatusDroppedEventsCount metricDatabricksSparkLiveListenerBusQueueAppstatusSize metricDatabricksSparkLiveListenerBusQueueAppstatusSize metricDatabricksSparkLiveListenerBusQueueExecutorManagementDroppedEventsCount metricDatabricksSparkLiveListenerBusQueueExecutorManagementDroppedEventsCount - metricDatabricksJobsActiveTotal metricDatabricksJobsActiveTotal + metricDatabricksSparkLiveListenerBusQueueExecutormanagementSize metricDatabricksSparkLiveListenerBusQueueExecutormanagementSize metricDatabricksSparkLiveListenerBusQueueSharedDroppedEventsCount metricDatabricksSparkLiveListenerBusQueueSharedDroppedEventsCount metricDatabricksSparkLiveListenerBusQueueSharedSize metricDatabricksSparkLiveListenerBusQueueSharedSize metricDatabricksSparkLiveListenerBusQueueStreamsDroppedEventsCount metricDatabricksSparkLiveListenerBusQueueStreamsDroppedEventsCount @@ -7815,10 +7814,9 @@ type MetricsBuilder struct { metricDatabricksSparkTimerLiveListenerBusQueueSharedListenerProcessingTime metricDatabricksSparkTimerLiveListenerBusQueueSharedListenerProcessingTime metricDatabricksSparkTimerLiveListenerBusQueueStreamsListenerProcessingTime metricDatabricksSparkTimerLiveListenerBusQueueStreamsListenerProcessingTime metricDatabricksTasksRunDuration metricDatabricksTasksRunDuration + metricDatabricksTasksScheduleStatus metricDatabricksTasksScheduleStatus startTime pcommon.Timestamp metricsCapacity int - resourceCapacity int - resourceAttributesConfig ResourceAttributesConfig } // metricBuilderOption applies changes to default metrics builder. @@ -7836,7 +7834,6 @@ func NewMetricsBuilder(mbc MetricsBuilderConfig, settings receiver.CreateSetting startTime: pcommon.NewTimestampFromTime(time.Now()), metricsBuffer: pmetric.NewMetrics(), buildInfo: settings.BuildInfo, - resourceAttributesConfig: mbc.ResourceAttributes, metricDatabricksJobsActiveTotal: newMetricDatabricksJobsActiveTotal(mbc.Metrics.DatabricksJobsActiveTotal), metricDatabricksJobsRunDuration: newMetricDatabricksJobsRunDuration(mbc.Metrics.DatabricksJobsRunDuration), metricDatabricksJobsScheduleStatus: newMetricDatabricksJobsScheduleStatus(mbc.Metrics.DatabricksJobsScheduleStatus), @@ -7989,54 +7986,23 @@ func (mb *MetricsBuilder) updateCapacity(rm pmetric.ResourceMetrics) { if mb.metricsCapacity < rm.ScopeMetrics().At(0).Metrics().Len() { mb.metricsCapacity = rm.ScopeMetrics().At(0).Metrics().Len() } - if mb.resourceCapacity < rm.Resource().Attributes().Len() { - mb.resourceCapacity = rm.Resource().Attributes().Len() - } } // ResourceMetricsOption applies changes to provided resource metrics. -type ResourceMetricsOption func(ResourceAttributesConfig, pmetric.ResourceMetrics) - -// WithDatabricksInstanceName sets provided value as "databricks.instance.name" attribute for current resource. -func WithDatabricksInstanceName(val string) ResourceMetricsOption { - return func(rac ResourceAttributesConfig, rm pmetric.ResourceMetrics) { - if rac.DatabricksInstanceName.Enabled { - rm.Resource().Attributes().PutStr("databricks.instance.name", val) - } - } -} - -// WithSparkAppID sets provided value as "spark.app.id" attribute for current resource. -func WithSparkAppID(val string) ResourceMetricsOption { - return func(rac ResourceAttributesConfig, rm pmetric.ResourceMetrics) { - if rac.SparkAppID.Enabled { - rm.Resource().Attributes().PutStr("spark.app.id", val) - } - } -} - -// WithSparkClusterID sets provided value as "spark.cluster.id" attribute for current resource. -func WithSparkClusterID(val string) ResourceMetricsOption { - return func(rac ResourceAttributesConfig, rm pmetric.ResourceMetrics) { - if rac.SparkClusterID.Enabled { - rm.Resource().Attributes().PutStr("spark.cluster.id", val) - } - } -} +type ResourceMetricsOption func(pmetric.ResourceMetrics) -// WithSparkClusterName sets provided value as "spark.cluster.name" attribute for current resource. -func WithSparkClusterName(val string) ResourceMetricsOption { - return func(rac ResourceAttributesConfig, rm pmetric.ResourceMetrics) { - if rac.SparkClusterName.Enabled { - rm.Resource().Attributes().PutStr("spark.cluster.name", val) - } +// WithResource sets the provided resource on the emitted ResourceMetrics. +// It's recommended to use ResourceBuilder to create the resource. +func WithResource(res pcommon.Resource) ResourceMetricsOption { + return func(rm pmetric.ResourceMetrics) { + res.CopyTo(rm.Resource()) } } // WithStartTimeOverride overrides start time for all the resource metrics data points. // This option should be only used if different start time has to be set on metrics coming from different resources. func WithStartTimeOverride(start pcommon.Timestamp) ResourceMetricsOption { - return func(_ ResourceAttributesConfig, rm pmetric.ResourceMetrics) { + return func(rm pmetric.ResourceMetrics) { var dps pmetric.NumberDataPointSlice metrics := rm.ScopeMetrics().At(0).Metrics() for i := 0; i < metrics.Len(); i++ { @@ -8060,7 +8026,6 @@ func WithStartTimeOverride(start pcommon.Timestamp) ResourceMetricsOption { // Resource attributes should be provided as ResourceMetricsOption arguments. func (mb *MetricsBuilder) EmitForResource(rmo ...ResourceMetricsOption) { rm := pmetric.NewResourceMetrics() - rm.Resource().Attributes().EnsureCapacity(mb.resourceCapacity) ils := rm.ScopeMetrics().AppendEmpty() ils.Scope().SetName("otelcol/databricksreceiver") ils.Scope().SetVersion(mb.buildInfo.Version) @@ -8207,7 +8172,7 @@ func (mb *MetricsBuilder) EmitForResource(rmo ...ResourceMetricsOption) { mb.metricDatabricksTasksScheduleStatus.emit(ils.Metrics()) for _, op := range rmo { - op(mb.resourceAttributesConfig, rm) + op(rm) } if ils.Metrics().Len() > 0 { mb.updateCapacity(rm) diff --git a/internal/receiver/databricksreceiver/internal/metadata/generated_metrics_test.go b/internal/receiver/databricksreceiver/internal/metadata/generated_metrics_test.go index 6758ee85b9..b09bbed45b 100644 --- a/internal/receiver/databricksreceiver/internal/metadata/generated_metrics_test.go +++ b/internal/receiver/databricksreceiver/internal/metadata/generated_metrics_test.go @@ -60,11 +60,11 @@ func TestMetricsBuilder(t *testing.T) { defaultMetricsCount++ allMetricsCount++ - mb.RecordDatabricksJobsRunDurationDataPoint(ts, 1, 1) + mb.RecordDatabricksJobsRunDurationDataPoint(ts, 1, 6) defaultMetricsCount++ allMetricsCount++ - mb.RecordDatabricksJobsScheduleStatusDataPoint(ts, 1, 1) + mb.RecordDatabricksJobsScheduleStatusDataPoint(ts, 1, 6) defaultMetricsCount++ allMetricsCount++ @@ -72,549 +72,551 @@ func TestMetricsBuilder(t *testing.T) { defaultMetricsCount++ allMetricsCount++ - mb.RecordDatabricksSparkBlockManagerMemoryDiskSpaceUsedDataPoint(ts, 1, "attr-val", "attr-val", "attr-val", "attr-val") + mb.RecordDatabricksSparkBlockManagerMemoryDiskSpaceUsedDataPoint(ts, 1, "cluster.id-val", "spark.app.id-val", "pipeline.id-val", "pipeline.name-val") defaultMetricsCount++ allMetricsCount++ - mb.RecordDatabricksSparkBlockManagerMemoryMaxDataPoint(ts, 1, "attr-val", "attr-val", "attr-val", "attr-val") + mb.RecordDatabricksSparkBlockManagerMemoryMaxDataPoint(ts, 1, "cluster.id-val", "spark.app.id-val", "pipeline.id-val", "pipeline.name-val") defaultMetricsCount++ allMetricsCount++ - mb.RecordDatabricksSparkBlockManagerMemoryOffHeapMaxDataPoint(ts, 1, "attr-val", "attr-val", "attr-val", "attr-val") + mb.RecordDatabricksSparkBlockManagerMemoryOffHeapMaxDataPoint(ts, 1, "cluster.id-val", "spark.app.id-val", "pipeline.id-val", "pipeline.name-val") defaultMetricsCount++ allMetricsCount++ - mb.RecordDatabricksSparkBlockManagerMemoryOffHeapUsedDataPoint(ts, 1, "attr-val", "attr-val", "attr-val", "attr-val") + mb.RecordDatabricksSparkBlockManagerMemoryOffHeapUsedDataPoint(ts, 1, "cluster.id-val", "spark.app.id-val", "pipeline.id-val", "pipeline.name-val") defaultMetricsCount++ allMetricsCount++ - mb.RecordDatabricksSparkBlockManagerMemoryOnHeapMaxDataPoint(ts, 1, "attr-val", "attr-val", "attr-val", "attr-val") + mb.RecordDatabricksSparkBlockManagerMemoryOnHeapMaxDataPoint(ts, 1, "cluster.id-val", "spark.app.id-val", "pipeline.id-val", "pipeline.name-val") defaultMetricsCount++ allMetricsCount++ - mb.RecordDatabricksSparkBlockManagerMemoryOnHeapUsedDataPoint(ts, 1, "attr-val", "attr-val", "attr-val", "attr-val") + mb.RecordDatabricksSparkBlockManagerMemoryOnHeapUsedDataPoint(ts, 1, "cluster.id-val", "spark.app.id-val", "pipeline.id-val", "pipeline.name-val") defaultMetricsCount++ allMetricsCount++ - mb.RecordDatabricksSparkBlockManagerMemoryRemainingDataPoint(ts, 1, "attr-val", "attr-val", "attr-val", "attr-val") + mb.RecordDatabricksSparkBlockManagerMemoryRemainingDataPoint(ts, 1, "cluster.id-val", "spark.app.id-val", "pipeline.id-val", "pipeline.name-val") defaultMetricsCount++ allMetricsCount++ - mb.RecordDatabricksSparkBlockManagerMemoryRemainingOffHeapDataPoint(ts, 1, "attr-val", "attr-val", "attr-val", "attr-val") + mb.RecordDatabricksSparkBlockManagerMemoryRemainingOffHeapDataPoint(ts, 1, "cluster.id-val", "spark.app.id-val", "pipeline.id-val", "pipeline.name-val") defaultMetricsCount++ allMetricsCount++ - mb.RecordDatabricksSparkBlockManagerMemoryRemainingOnHeapDataPoint(ts, 1, "attr-val", "attr-val", "attr-val", "attr-val") + mb.RecordDatabricksSparkBlockManagerMemoryRemainingOnHeapDataPoint(ts, 1, "cluster.id-val", "spark.app.id-val", "pipeline.id-val", "pipeline.name-val") defaultMetricsCount++ allMetricsCount++ - mb.RecordDatabricksSparkBlockManagerMemoryUsedDataPoint(ts, 1, "attr-val", "attr-val", "attr-val", "attr-val") + mb.RecordDatabricksSparkBlockManagerMemoryUsedDataPoint(ts, 1, "cluster.id-val", "spark.app.id-val", "pipeline.id-val", "pipeline.name-val") defaultMetricsCount++ allMetricsCount++ - mb.RecordDatabricksSparkCodeGeneratorCompilationTimeDataPoint(ts, 1, "attr-val", "attr-val", "attr-val", "attr-val") + mb.RecordDatabricksSparkCodeGeneratorCompilationTimeDataPoint(ts, 1, "cluster.id-val", "spark.app.id-val", "pipeline.id-val", "pipeline.name-val") defaultMetricsCount++ allMetricsCount++ - mb.RecordDatabricksSparkCodeGeneratorGeneratedClassSizeDataPoint(ts, 1, "attr-val", "attr-val", "attr-val", "attr-val") + mb.RecordDatabricksSparkCodeGeneratorGeneratedClassSizeDataPoint(ts, 1, "cluster.id-val", "spark.app.id-val", "pipeline.id-val", "pipeline.name-val") defaultMetricsCount++ allMetricsCount++ - mb.RecordDatabricksSparkCodeGeneratorGeneratedMethodSizeDataPoint(ts, 1, "attr-val", "attr-val", "attr-val", "attr-val") + mb.RecordDatabricksSparkCodeGeneratorGeneratedMethodSizeDataPoint(ts, 1, "cluster.id-val", "spark.app.id-val", "pipeline.id-val", "pipeline.name-val") defaultMetricsCount++ allMetricsCount++ - mb.RecordDatabricksSparkCodeGeneratorSourcecodeSizeDataPoint(ts, 1, "attr-val", "attr-val", "attr-val", "attr-val") + mb.RecordDatabricksSparkCodeGeneratorSourcecodeSizeDataPoint(ts, 1, "cluster.id-val", "spark.app.id-val", "pipeline.id-val", "pipeline.name-val") defaultMetricsCount++ allMetricsCount++ - mb.RecordDatabricksSparkDagSchedulerJobsActiveDataPoint(ts, 1, "attr-val", "attr-val", "attr-val", "attr-val") + mb.RecordDatabricksSparkDagSchedulerJobsActiveDataPoint(ts, 1, "cluster.id-val", "spark.app.id-val", "pipeline.id-val", "pipeline.name-val") defaultMetricsCount++ allMetricsCount++ - mb.RecordDatabricksSparkDagSchedulerJobsAllDataPoint(ts, 1, "attr-val", "attr-val", "attr-val", "attr-val") + mb.RecordDatabricksSparkDagSchedulerJobsAllDataPoint(ts, 1, "cluster.id-val", "spark.app.id-val", "pipeline.id-val", "pipeline.name-val") defaultMetricsCount++ allMetricsCount++ - mb.RecordDatabricksSparkDagSchedulerStagesFailedDataPoint(ts, 1, "attr-val", "attr-val", "attr-val", "attr-val") + mb.RecordDatabricksSparkDagSchedulerStagesFailedDataPoint(ts, 1, "cluster.id-val", "spark.app.id-val", "pipeline.id-val", "pipeline.name-val") defaultMetricsCount++ allMetricsCount++ - mb.RecordDatabricksSparkDagSchedulerStagesRunningDataPoint(ts, 1, "attr-val", "attr-val", "attr-val", "attr-val") + mb.RecordDatabricksSparkDagSchedulerStagesRunningDataPoint(ts, 1, "cluster.id-val", "spark.app.id-val", "pipeline.id-val", "pipeline.name-val") defaultMetricsCount++ allMetricsCount++ - mb.RecordDatabricksSparkDagSchedulerStagesWaitingDataPoint(ts, 1, "attr-val", "attr-val", "attr-val", "attr-val") + mb.RecordDatabricksSparkDagSchedulerStagesWaitingDataPoint(ts, 1, "cluster.id-val", "spark.app.id-val", "pipeline.id-val", "pipeline.name-val") defaultMetricsCount++ allMetricsCount++ - mb.RecordDatabricksSparkDatabricksDirectoryCommitAutoVacuumCountDataPoint(ts, 1, "attr-val", "attr-val", "attr-val", "attr-val") + mb.RecordDatabricksSparkDatabricksDirectoryCommitAutoVacuumCountDataPoint(ts, 1, "cluster.id-val", "spark.app.id-val", "pipeline.id-val", "pipeline.name-val") defaultMetricsCount++ allMetricsCount++ - mb.RecordDatabricksSparkDatabricksDirectoryCommitDeletedFilesFilteredDataPoint(ts, 1, "attr-val", "attr-val", "attr-val", "attr-val") + mb.RecordDatabricksSparkDatabricksDirectoryCommitDeletedFilesFilteredDataPoint(ts, 1, "cluster.id-val", "spark.app.id-val", "pipeline.id-val", "pipeline.name-val") defaultMetricsCount++ allMetricsCount++ - mb.RecordDatabricksSparkDatabricksDirectoryCommitFilterListingCountDataPoint(ts, 1, "attr-val", "attr-val", "attr-val", "attr-val") + mb.RecordDatabricksSparkDatabricksDirectoryCommitFilterListingCountDataPoint(ts, 1, "cluster.id-val", "spark.app.id-val", "pipeline.id-val", "pipeline.name-val") defaultMetricsCount++ allMetricsCount++ - mb.RecordDatabricksSparkDatabricksDirectoryCommitJobCommitCompletedDataPoint(ts, 1, "attr-val", "attr-val", "attr-val", "attr-val") + mb.RecordDatabricksSparkDatabricksDirectoryCommitJobCommitCompletedDataPoint(ts, 1, "cluster.id-val", "spark.app.id-val", "pipeline.id-val", "pipeline.name-val") defaultMetricsCount++ allMetricsCount++ - mb.RecordDatabricksSparkDatabricksDirectoryCommitMarkerReadErrorsDataPoint(ts, 1, "attr-val", "attr-val", "attr-val", "attr-val") + mb.RecordDatabricksSparkDatabricksDirectoryCommitMarkerReadErrorsDataPoint(ts, 1, "cluster.id-val", "spark.app.id-val", "pipeline.id-val", "pipeline.name-val") defaultMetricsCount++ allMetricsCount++ - mb.RecordDatabricksSparkDatabricksDirectoryCommitMarkerRefreshCountDataPoint(ts, 1, "attr-val", "attr-val", "attr-val", "attr-val") + mb.RecordDatabricksSparkDatabricksDirectoryCommitMarkerRefreshCountDataPoint(ts, 1, "cluster.id-val", "spark.app.id-val", "pipeline.id-val", "pipeline.name-val") defaultMetricsCount++ allMetricsCount++ - mb.RecordDatabricksSparkDatabricksDirectoryCommitMarkerRefreshErrorsDataPoint(ts, 1, "attr-val", "attr-val", "attr-val", "attr-val") + mb.RecordDatabricksSparkDatabricksDirectoryCommitMarkerRefreshErrorsDataPoint(ts, 1, "cluster.id-val", "spark.app.id-val", "pipeline.id-val", "pipeline.name-val") defaultMetricsCount++ allMetricsCount++ - mb.RecordDatabricksSparkDatabricksDirectoryCommitMarkersReadDataPoint(ts, 1, "attr-val", "attr-val", "attr-val", "attr-val") + mb.RecordDatabricksSparkDatabricksDirectoryCommitMarkersReadDataPoint(ts, 1, "cluster.id-val", "spark.app.id-val", "pipeline.id-val", "pipeline.name-val") defaultMetricsCount++ allMetricsCount++ - mb.RecordDatabricksSparkDatabricksDirectoryCommitRepeatedListCountDataPoint(ts, 1, "attr-val", "attr-val", "attr-val", "attr-val") + mb.RecordDatabricksSparkDatabricksDirectoryCommitRepeatedListCountDataPoint(ts, 1, "cluster.id-val", "spark.app.id-val", "pipeline.id-val", "pipeline.name-val") defaultMetricsCount++ allMetricsCount++ - mb.RecordDatabricksSparkDatabricksDirectoryCommitUncommittedFilesFilteredDataPoint(ts, 1, "attr-val", "attr-val", "attr-val", "attr-val") + mb.RecordDatabricksSparkDatabricksDirectoryCommitUncommittedFilesFilteredDataPoint(ts, 1, "cluster.id-val", "spark.app.id-val", "pipeline.id-val", "pipeline.name-val") defaultMetricsCount++ allMetricsCount++ - mb.RecordDatabricksSparkDatabricksDirectoryCommitUntrackedFilesFoundDataPoint(ts, 1, "attr-val", "attr-val", "attr-val", "attr-val") + mb.RecordDatabricksSparkDatabricksDirectoryCommitUntrackedFilesFoundDataPoint(ts, 1, "cluster.id-val", "spark.app.id-val", "pipeline.id-val", "pipeline.name-val") defaultMetricsCount++ allMetricsCount++ - mb.RecordDatabricksSparkDatabricksDirectoryCommitVacuumCountDataPoint(ts, 1, "attr-val", "attr-val", "attr-val", "attr-val") + mb.RecordDatabricksSparkDatabricksDirectoryCommitVacuumCountDataPoint(ts, 1, "cluster.id-val", "spark.app.id-val", "pipeline.id-val", "pipeline.name-val") defaultMetricsCount++ allMetricsCount++ - mb.RecordDatabricksSparkDatabricksDirectoryCommitVacuumErrorsDataPoint(ts, 1, "attr-val", "attr-val", "attr-val", "attr-val") + mb.RecordDatabricksSparkDatabricksDirectoryCommitVacuumErrorsDataPoint(ts, 1, "cluster.id-val", "spark.app.id-val", "pipeline.id-val", "pipeline.name-val") defaultMetricsCount++ allMetricsCount++ - mb.RecordDatabricksSparkDatabricksPreemptionChecksCountDataPoint(ts, 1, "attr-val", "attr-val", "attr-val", "attr-val") + mb.RecordDatabricksSparkDatabricksPreemptionChecksCountDataPoint(ts, 1, "cluster.id-val", "spark.app.id-val", "pipeline.id-val", "pipeline.name-val") defaultMetricsCount++ allMetricsCount++ - mb.RecordDatabricksSparkDatabricksPreemptionPoolsAutoexpiredCountDataPoint(ts, 1, "attr-val", "attr-val", "attr-val", "attr-val") + mb.RecordDatabricksSparkDatabricksPreemptionPoolsAutoexpiredCountDataPoint(ts, 1, "cluster.id-val", "spark.app.id-val", "pipeline.id-val", "pipeline.name-val") defaultMetricsCount++ allMetricsCount++ - mb.RecordDatabricksSparkDatabricksPreemptionPoolstarvationTimeDataPoint(ts, 1, "attr-val", "attr-val", "attr-val", "attr-val") + mb.RecordDatabricksSparkDatabricksPreemptionPoolstarvationTimeDataPoint(ts, 1, "cluster.id-val", "spark.app.id-val", "pipeline.id-val", "pipeline.name-val") defaultMetricsCount++ allMetricsCount++ - mb.RecordDatabricksSparkDatabricksPreemptionSchedulerOverheadTimeDataPoint(ts, 1, "attr-val", "attr-val", "attr-val", "attr-val") + mb.RecordDatabricksSparkDatabricksPreemptionSchedulerOverheadTimeDataPoint(ts, 1, "cluster.id-val", "spark.app.id-val", "pipeline.id-val", "pipeline.name-val") defaultMetricsCount++ allMetricsCount++ - mb.RecordDatabricksSparkDatabricksPreemptionTaskWastedTimeDataPoint(ts, 1, "attr-val", "attr-val", "attr-val", "attr-val") + mb.RecordDatabricksSparkDatabricksPreemptionTaskWastedTimeDataPoint(ts, 1, "cluster.id-val", "spark.app.id-val", "pipeline.id-val", "pipeline.name-val") defaultMetricsCount++ allMetricsCount++ - mb.RecordDatabricksSparkDatabricksPreemptionTasksPreemptedCountDataPoint(ts, 1, "attr-val", "attr-val", "attr-val", "attr-val") + mb.RecordDatabricksSparkDatabricksPreemptionTasksPreemptedCountDataPoint(ts, 1, "cluster.id-val", "spark.app.id-val", "pipeline.id-val", "pipeline.name-val") defaultMetricsCount++ allMetricsCount++ - mb.RecordDatabricksSparkDatabricksTaskSchedulingLanesActivePoolsDataPoint(ts, 1, "attr-val", "attr-val", "attr-val", "attr-val") + mb.RecordDatabricksSparkDatabricksTaskSchedulingLanesActivePoolsDataPoint(ts, 1, "cluster.id-val", "spark.app.id-val", "pipeline.id-val", "pipeline.name-val") defaultMetricsCount++ allMetricsCount++ - mb.RecordDatabricksSparkDatabricksTaskSchedulingLanesBypassLaneActivePoolsDataPoint(ts, 1, "attr-val", "attr-val", "attr-val", "attr-val") + mb.RecordDatabricksSparkDatabricksTaskSchedulingLanesBypassLaneActivePoolsDataPoint(ts, 1, "cluster.id-val", "spark.app.id-val", "pipeline.id-val", "pipeline.name-val") defaultMetricsCount++ allMetricsCount++ - mb.RecordDatabricksSparkDatabricksTaskSchedulingLanesFastLaneActivePoolsDataPoint(ts, 1, "attr-val", "attr-val", "attr-val", "attr-val") + mb.RecordDatabricksSparkDatabricksTaskSchedulingLanesFastLaneActivePoolsDataPoint(ts, 1, "cluster.id-val", "spark.app.id-val", "pipeline.id-val", "pipeline.name-val") defaultMetricsCount++ allMetricsCount++ - mb.RecordDatabricksSparkDatabricksTaskSchedulingLanesFinishedQueriesTotalTaskTimeDataPoint(ts, 1, "attr-val", "attr-val", "attr-val", "attr-val") + mb.RecordDatabricksSparkDatabricksTaskSchedulingLanesFinishedQueriesTotalTaskTimeDataPoint(ts, 1, "cluster.id-val", "spark.app.id-val", "pipeline.id-val", "pipeline.name-val") defaultMetricsCount++ allMetricsCount++ - mb.RecordDatabricksSparkDatabricksTaskSchedulingLanesLaneCleanupMarkedPoolsDataPoint(ts, 1, "attr-val", "attr-val", "attr-val", "attr-val") + mb.RecordDatabricksSparkDatabricksTaskSchedulingLanesLaneCleanupMarkedPoolsDataPoint(ts, 1, "cluster.id-val", "spark.app.id-val", "pipeline.id-val", "pipeline.name-val") defaultMetricsCount++ allMetricsCount++ - mb.RecordDatabricksSparkDatabricksTaskSchedulingLanesLaneCleanupTwoPhasePoolsCleanedDataPoint(ts, 1, "attr-val", "attr-val", "attr-val", "attr-val") + mb.RecordDatabricksSparkDatabricksTaskSchedulingLanesLaneCleanupTwoPhasePoolsCleanedDataPoint(ts, 1, "cluster.id-val", "spark.app.id-val", "pipeline.id-val", "pipeline.name-val") defaultMetricsCount++ allMetricsCount++ - mb.RecordDatabricksSparkDatabricksTaskSchedulingLanesLaneCleanupZombiePoolsCleanedDataPoint(ts, 1, "attr-val", "attr-val", "attr-val", "attr-val") + mb.RecordDatabricksSparkDatabricksTaskSchedulingLanesLaneCleanupZombiePoolsCleanedDataPoint(ts, 1, "cluster.id-val", "spark.app.id-val", "pipeline.id-val", "pipeline.name-val") defaultMetricsCount++ allMetricsCount++ - mb.RecordDatabricksSparkDatabricksTaskSchedulingLanesPreemptionSlotTransferSuccessfulPreemptionIterationsCountDataPoint(ts, 1, "attr-val", "attr-val", "attr-val", "attr-val") + mb.RecordDatabricksSparkDatabricksTaskSchedulingLanesPreemptionSlotTransferSuccessfulPreemptionIterationsCountDataPoint(ts, 1, "cluster.id-val", "spark.app.id-val", "pipeline.id-val", "pipeline.name-val") defaultMetricsCount++ allMetricsCount++ - mb.RecordDatabricksSparkDatabricksTaskSchedulingLanesPreemptionSlotTransferTasksPreemptedCountDataPoint(ts, 1, "attr-val", "attr-val", "attr-val", "attr-val") + mb.RecordDatabricksSparkDatabricksTaskSchedulingLanesPreemptionSlotTransferTasksPreemptedCountDataPoint(ts, 1, "cluster.id-val", "spark.app.id-val", "pipeline.id-val", "pipeline.name-val") defaultMetricsCount++ allMetricsCount++ - mb.RecordDatabricksSparkDatabricksTaskSchedulingLanesPreemptionSlotTransferWastedTaskTimeDataPoint(ts, 1, "attr-val", "attr-val", "attr-val", "attr-val") + mb.RecordDatabricksSparkDatabricksTaskSchedulingLanesPreemptionSlotTransferWastedTaskTimeDataPoint(ts, 1, "cluster.id-val", "spark.app.id-val", "pipeline.id-val", "pipeline.name-val") defaultMetricsCount++ allMetricsCount++ - mb.RecordDatabricksSparkDatabricksTaskSchedulingLanesSlotReservationGradualDecreaseCountDataPoint(ts, 1, "attr-val", "attr-val", "attr-val", "attr-val") + mb.RecordDatabricksSparkDatabricksTaskSchedulingLanesSlotReservationGradualDecreaseCountDataPoint(ts, 1, "cluster.id-val", "spark.app.id-val", "pipeline.id-val", "pipeline.name-val") defaultMetricsCount++ allMetricsCount++ - mb.RecordDatabricksSparkDatabricksTaskSchedulingLanesSlotReservationQuickDropCountDataPoint(ts, 1, "attr-val", "attr-val", "attr-val", "attr-val") + mb.RecordDatabricksSparkDatabricksTaskSchedulingLanesSlotReservationQuickDropCountDataPoint(ts, 1, "cluster.id-val", "spark.app.id-val", "pipeline.id-val", "pipeline.name-val") defaultMetricsCount++ allMetricsCount++ - mb.RecordDatabricksSparkDatabricksTaskSchedulingLanesSlotReservationQuickJumpCountDataPoint(ts, 1, "attr-val", "attr-val", "attr-val", "attr-val") + mb.RecordDatabricksSparkDatabricksTaskSchedulingLanesSlotReservationQuickJumpCountDataPoint(ts, 1, "cluster.id-val", "spark.app.id-val", "pipeline.id-val", "pipeline.name-val") defaultMetricsCount++ allMetricsCount++ - mb.RecordDatabricksSparkDatabricksTaskSchedulingLanesSlotReservationSlotsReservedDataPoint(ts, 1, "attr-val", "attr-val", "attr-val", "attr-val") + mb.RecordDatabricksSparkDatabricksTaskSchedulingLanesSlotReservationSlotsReservedDataPoint(ts, 1, "cluster.id-val", "spark.app.id-val", "pipeline.id-val", "pipeline.name-val") defaultMetricsCount++ allMetricsCount++ - mb.RecordDatabricksSparkDatabricksTaskSchedulingLanesSlowLaneActivePoolsDataPoint(ts, 1, "attr-val", "attr-val", "attr-val", "attr-val") + mb.RecordDatabricksSparkDatabricksTaskSchedulingLanesSlowLaneActivePoolsDataPoint(ts, 1, "cluster.id-val", "spark.app.id-val", "pipeline.id-val", "pipeline.name-val") defaultMetricsCount++ allMetricsCount++ - mb.RecordDatabricksSparkDatabricksTaskSchedulingLanesTotalquerygroupsfinishedDataPoint(ts, 1, "attr-val", "attr-val", "attr-val", "attr-val") + mb.RecordDatabricksSparkDatabricksTaskSchedulingLanesTotalquerygroupsfinishedDataPoint(ts, 1, "cluster.id-val", "spark.app.id-val", "pipeline.id-val", "pipeline.name-val") defaultMetricsCount++ allMetricsCount++ - mb.RecordDatabricksSparkExecutorDiskUsedDataPoint(ts, 1, "attr-val", "attr-val", "attr-val") + mb.RecordDatabricksSparkExecutorDiskUsedDataPoint(ts, 1, "cluster.id-val", "spark.app.id-val", "spark.executor.id-val") defaultMetricsCount++ allMetricsCount++ - mb.RecordDatabricksSparkExecutorMaxMemoryDataPoint(ts, 1, "attr-val", "attr-val", "attr-val") + mb.RecordDatabricksSparkExecutorMaxMemoryDataPoint(ts, 1, "cluster.id-val", "spark.app.id-val", "spark.executor.id-val") defaultMetricsCount++ allMetricsCount++ - mb.RecordDatabricksSparkExecutorMemoryUsedDataPoint(ts, 1, "attr-val", "attr-val", "attr-val") + mb.RecordDatabricksSparkExecutorMemoryUsedDataPoint(ts, 1, "cluster.id-val", "spark.app.id-val", "spark.executor.id-val") defaultMetricsCount++ allMetricsCount++ - mb.RecordDatabricksSparkExecutorTotalInputBytesDataPoint(ts, 1, "attr-val", "attr-val", "attr-val") + mb.RecordDatabricksSparkExecutorTotalInputBytesDataPoint(ts, 1, "cluster.id-val", "spark.app.id-val", "spark.executor.id-val") defaultMetricsCount++ allMetricsCount++ - mb.RecordDatabricksSparkExecutorTotalShuffleReadDataPoint(ts, 1, "attr-val", "attr-val", "attr-val") + mb.RecordDatabricksSparkExecutorTotalShuffleReadDataPoint(ts, 1, "cluster.id-val", "spark.app.id-val", "spark.executor.id-val") defaultMetricsCount++ allMetricsCount++ - mb.RecordDatabricksSparkExecutorTotalShuffleWriteDataPoint(ts, 1, "attr-val", "attr-val", "attr-val") + mb.RecordDatabricksSparkExecutorTotalShuffleWriteDataPoint(ts, 1, "cluster.id-val", "spark.app.id-val", "spark.executor.id-val") defaultMetricsCount++ allMetricsCount++ - mb.RecordDatabricksSparkExecutorMetricsDirectPoolMemoryDataPoint(ts, 1, "attr-val", "attr-val", "attr-val", "attr-val") + mb.RecordDatabricksSparkExecutorMetricsDirectPoolMemoryDataPoint(ts, 1, "cluster.id-val", "spark.app.id-val", "pipeline.id-val", "pipeline.name-val") defaultMetricsCount++ allMetricsCount++ - mb.RecordDatabricksSparkExecutorMetricsJvmHeapMemoryDataPoint(ts, 1, "attr-val", "attr-val", "attr-val", "attr-val") + mb.RecordDatabricksSparkExecutorMetricsJvmHeapMemoryDataPoint(ts, 1, "cluster.id-val", "spark.app.id-val", "pipeline.id-val", "pipeline.name-val") defaultMetricsCount++ allMetricsCount++ - mb.RecordDatabricksSparkExecutorMetricsJvmOffHeapMemoryDataPoint(ts, 1, "attr-val", "attr-val", "attr-val", "attr-val") + mb.RecordDatabricksSparkExecutorMetricsJvmOffHeapMemoryDataPoint(ts, 1, "cluster.id-val", "spark.app.id-val", "pipeline.id-val", "pipeline.name-val") defaultMetricsCount++ allMetricsCount++ - mb.RecordDatabricksSparkExecutorMetricsMajorGcCountDataPoint(ts, 1, "attr-val", "attr-val", "attr-val", "attr-val") + mb.RecordDatabricksSparkExecutorMetricsMajorGcCountDataPoint(ts, 1, "cluster.id-val", "spark.app.id-val", "pipeline.id-val", "pipeline.name-val") defaultMetricsCount++ allMetricsCount++ - mb.RecordDatabricksSparkExecutorMetricsMajorGcTimeDataPoint(ts, 1, "attr-val", "attr-val", "attr-val", "attr-val") + mb.RecordDatabricksSparkExecutorMetricsMajorGcTimeDataPoint(ts, 1, "cluster.id-val", "spark.app.id-val", "pipeline.id-val", "pipeline.name-val") defaultMetricsCount++ allMetricsCount++ - mb.RecordDatabricksSparkExecutorMetricsMappedPoolMemoryDataPoint(ts, 1, "attr-val", "attr-val", "attr-val", "attr-val") + mb.RecordDatabricksSparkExecutorMetricsMappedPoolMemoryDataPoint(ts, 1, "cluster.id-val", "spark.app.id-val", "pipeline.id-val", "pipeline.name-val") defaultMetricsCount++ allMetricsCount++ - mb.RecordDatabricksSparkExecutorMetricsMinorGcCountDataPoint(ts, 1, "attr-val", "attr-val", "attr-val", "attr-val") + mb.RecordDatabricksSparkExecutorMetricsMinorGcCountDataPoint(ts, 1, "cluster.id-val", "spark.app.id-val", "pipeline.id-val", "pipeline.name-val") defaultMetricsCount++ allMetricsCount++ - mb.RecordDatabricksSparkExecutorMetricsMinorGcTimeDataPoint(ts, 1, "attr-val", "attr-val", "attr-val", "attr-val") + mb.RecordDatabricksSparkExecutorMetricsMinorGcTimeDataPoint(ts, 1, "cluster.id-val", "spark.app.id-val", "pipeline.id-val", "pipeline.name-val") defaultMetricsCount++ allMetricsCount++ - mb.RecordDatabricksSparkExecutorMetricsOffHeapExecutionMemoryDataPoint(ts, 1, "attr-val", "attr-val", "attr-val", "attr-val") + mb.RecordDatabricksSparkExecutorMetricsOffHeapExecutionMemoryDataPoint(ts, 1, "cluster.id-val", "spark.app.id-val", "pipeline.id-val", "pipeline.name-val") defaultMetricsCount++ allMetricsCount++ - mb.RecordDatabricksSparkExecutorMetricsOffHeapStorageMemoryDataPoint(ts, 1, "attr-val", "attr-val", "attr-val", "attr-val") + mb.RecordDatabricksSparkExecutorMetricsOffHeapStorageMemoryDataPoint(ts, 1, "cluster.id-val", "spark.app.id-val", "pipeline.id-val", "pipeline.name-val") defaultMetricsCount++ allMetricsCount++ - mb.RecordDatabricksSparkExecutorMetricsOffHeapUnifiedMemoryDataPoint(ts, 1, "attr-val", "attr-val", "attr-val", "attr-val") + mb.RecordDatabricksSparkExecutorMetricsOffHeapUnifiedMemoryDataPoint(ts, 1, "cluster.id-val", "spark.app.id-val", "pipeline.id-val", "pipeline.name-val") defaultMetricsCount++ allMetricsCount++ - mb.RecordDatabricksSparkExecutorMetricsOnHeapExecutionMemoryDataPoint(ts, 1, "attr-val", "attr-val", "attr-val", "attr-val") + mb.RecordDatabricksSparkExecutorMetricsOnHeapExecutionMemoryDataPoint(ts, 1, "cluster.id-val", "spark.app.id-val", "pipeline.id-val", "pipeline.name-val") defaultMetricsCount++ allMetricsCount++ - mb.RecordDatabricksSparkExecutorMetricsOnHeapStorageMemoryDataPoint(ts, 1, "attr-val", "attr-val", "attr-val", "attr-val") + mb.RecordDatabricksSparkExecutorMetricsOnHeapStorageMemoryDataPoint(ts, 1, "cluster.id-val", "spark.app.id-val", "pipeline.id-val", "pipeline.name-val") defaultMetricsCount++ allMetricsCount++ - mb.RecordDatabricksSparkExecutorMetricsOnHeapUnifiedMemoryDataPoint(ts, 1, "attr-val", "attr-val", "attr-val", "attr-val") + mb.RecordDatabricksSparkExecutorMetricsOnHeapUnifiedMemoryDataPoint(ts, 1, "cluster.id-val", "spark.app.id-val", "pipeline.id-val", "pipeline.name-val") defaultMetricsCount++ allMetricsCount++ - mb.RecordDatabricksSparkExecutorMetricsProcessTreeJvmRssMemoryDataPoint(ts, 1, "attr-val", "attr-val", "attr-val", "attr-val") + mb.RecordDatabricksSparkExecutorMetricsProcessTreeJvmRssMemoryDataPoint(ts, 1, "cluster.id-val", "spark.app.id-val", "pipeline.id-val", "pipeline.name-val") defaultMetricsCount++ allMetricsCount++ - mb.RecordDatabricksSparkExecutorMetricsProcessTreeJvmVMemoryDataPoint(ts, 1, "attr-val", "attr-val", "attr-val", "attr-val") + mb.RecordDatabricksSparkExecutorMetricsProcessTreeJvmVMemoryDataPoint(ts, 1, "cluster.id-val", "spark.app.id-val", "pipeline.id-val", "pipeline.name-val") defaultMetricsCount++ allMetricsCount++ - mb.RecordDatabricksSparkExecutorMetricsProcessTreeOtherRssMemoryDataPoint(ts, 1, "attr-val", "attr-val", "attr-val", "attr-val") + mb.RecordDatabricksSparkExecutorMetricsProcessTreeOtherRssMemoryDataPoint(ts, 1, "cluster.id-val", "spark.app.id-val", "pipeline.id-val", "pipeline.name-val") defaultMetricsCount++ allMetricsCount++ - mb.RecordDatabricksSparkExecutorMetricsProcessTreeOtherVMemoryDataPoint(ts, 1, "attr-val", "attr-val", "attr-val", "attr-val") + mb.RecordDatabricksSparkExecutorMetricsProcessTreeOtherVMemoryDataPoint(ts, 1, "cluster.id-val", "spark.app.id-val", "pipeline.id-val", "pipeline.name-val") defaultMetricsCount++ allMetricsCount++ - mb.RecordDatabricksSparkExecutorMetricsProcessTreePythonRssMemoryDataPoint(ts, 1, "attr-val", "attr-val", "attr-val", "attr-val") + mb.RecordDatabricksSparkExecutorMetricsProcessTreePythonRssMemoryDataPoint(ts, 1, "cluster.id-val", "spark.app.id-val", "pipeline.id-val", "pipeline.name-val") defaultMetricsCount++ allMetricsCount++ - mb.RecordDatabricksSparkExecutorMetricsProcessTreePythonVMemoryDataPoint(ts, 1, "attr-val", "attr-val", "attr-val", "attr-val") + mb.RecordDatabricksSparkExecutorMetricsProcessTreePythonVMemoryDataPoint(ts, 1, "cluster.id-val", "spark.app.id-val", "pipeline.id-val", "pipeline.name-val") defaultMetricsCount++ allMetricsCount++ - mb.RecordDatabricksSparkHiveExternalCatalogFileCacheHitsDataPoint(ts, 1, "attr-val", "attr-val", "attr-val", "attr-val") + mb.RecordDatabricksSparkHiveExternalCatalogFileCacheHitsDataPoint(ts, 1, "cluster.id-val", "spark.app.id-val", "pipeline.id-val", "pipeline.name-val") defaultMetricsCount++ allMetricsCount++ - mb.RecordDatabricksSparkHiveExternalCatalogFilesDiscoveredDataPoint(ts, 1, "attr-val", "attr-val", "attr-val", "attr-val") + mb.RecordDatabricksSparkHiveExternalCatalogFilesDiscoveredDataPoint(ts, 1, "cluster.id-val", "spark.app.id-val", "pipeline.id-val", "pipeline.name-val") defaultMetricsCount++ allMetricsCount++ - mb.RecordDatabricksSparkHiveExternalCatalogHiveClientCallsDataPoint(ts, 1, "attr-val", "attr-val", "attr-val", "attr-val") + mb.RecordDatabricksSparkHiveExternalCatalogHiveClientCallsDataPoint(ts, 1, "cluster.id-val", "spark.app.id-val", "pipeline.id-val", "pipeline.name-val") defaultMetricsCount++ allMetricsCount++ - mb.RecordDatabricksSparkHiveExternalCatalogParallelListingJobsCountDataPoint(ts, 1, "attr-val", "attr-val", "attr-val", "attr-val") + mb.RecordDatabricksSparkHiveExternalCatalogParallelListingJobsCountDataPoint(ts, 1, "cluster.id-val", "spark.app.id-val", "pipeline.id-val", "pipeline.name-val") defaultMetricsCount++ allMetricsCount++ - mb.RecordDatabricksSparkHiveExternalCatalogPartitionsFetchedDataPoint(ts, 1, "attr-val", "attr-val", "attr-val", "attr-val") + mb.RecordDatabricksSparkHiveExternalCatalogPartitionsFetchedDataPoint(ts, 1, "cluster.id-val", "spark.app.id-val", "pipeline.id-val", "pipeline.name-val") defaultMetricsCount++ allMetricsCount++ - mb.RecordDatabricksSparkJobNumActiveStagesDataPoint(ts, 1, "attr-val", "attr-val", 1) + mb.RecordDatabricksSparkJobNumActiveStagesDataPoint(ts, 1, "cluster.id-val", "spark.app.id-val", 12) defaultMetricsCount++ allMetricsCount++ - mb.RecordDatabricksSparkJobNumActiveTasksDataPoint(ts, 1, "attr-val", "attr-val", 1) + mb.RecordDatabricksSparkJobNumActiveTasksDataPoint(ts, 1, "cluster.id-val", "spark.app.id-val", 12) defaultMetricsCount++ allMetricsCount++ - mb.RecordDatabricksSparkJobNumCompletedStagesDataPoint(ts, 1, "attr-val", "attr-val", 1) + mb.RecordDatabricksSparkJobNumCompletedStagesDataPoint(ts, 1, "cluster.id-val", "spark.app.id-val", 12) defaultMetricsCount++ allMetricsCount++ - mb.RecordDatabricksSparkJobNumCompletedTasksDataPoint(ts, 1, "attr-val", "attr-val", 1) + mb.RecordDatabricksSparkJobNumCompletedTasksDataPoint(ts, 1, "cluster.id-val", "spark.app.id-val", 12) defaultMetricsCount++ allMetricsCount++ - mb.RecordDatabricksSparkJobNumFailedStagesDataPoint(ts, 1, "attr-val", "attr-val", 1) + mb.RecordDatabricksSparkJobNumFailedStagesDataPoint(ts, 1, "cluster.id-val", "spark.app.id-val", 12) defaultMetricsCount++ allMetricsCount++ - mb.RecordDatabricksSparkJobNumFailedTasksDataPoint(ts, 1, "attr-val", "attr-val", 1) + mb.RecordDatabricksSparkJobNumFailedTasksDataPoint(ts, 1, "cluster.id-val", "spark.app.id-val", 12) defaultMetricsCount++ allMetricsCount++ - mb.RecordDatabricksSparkJobNumSkippedStagesDataPoint(ts, 1, "attr-val", "attr-val", 1) + mb.RecordDatabricksSparkJobNumSkippedStagesDataPoint(ts, 1, "cluster.id-val", "spark.app.id-val", 12) defaultMetricsCount++ allMetricsCount++ - mb.RecordDatabricksSparkJobNumSkippedTasksDataPoint(ts, 1, "attr-val", "attr-val", 1) + mb.RecordDatabricksSparkJobNumSkippedTasksDataPoint(ts, 1, "cluster.id-val", "spark.app.id-val", 12) defaultMetricsCount++ allMetricsCount++ - mb.RecordDatabricksSparkJobNumTasksDataPoint(ts, 1, "attr-val", "attr-val", 1) + mb.RecordDatabricksSparkJobNumTasksDataPoint(ts, 1, "cluster.id-val", "spark.app.id-val", 12) defaultMetricsCount++ allMetricsCount++ - mb.RecordDatabricksSparkJvmCPUTimeDataPoint(ts, 1, "attr-val", "attr-val", "attr-val", "attr-val") + mb.RecordDatabricksSparkJvmCPUTimeDataPoint(ts, 1, "cluster.id-val", "spark.app.id-val", "pipeline.id-val", "pipeline.name-val") defaultMetricsCount++ allMetricsCount++ - mb.RecordDatabricksSparkLiveListenerBusEventsPostedCountDataPoint(ts, 1, "attr-val", "attr-val", "attr-val", "attr-val") + mb.RecordDatabricksSparkLiveListenerBusEventsPostedCountDataPoint(ts, 1, "cluster.id-val", "spark.app.id-val", "pipeline.id-val", "pipeline.name-val") defaultMetricsCount++ allMetricsCount++ - mb.RecordDatabricksSparkLiveListenerBusQueueAppStatusDroppedEventsCountDataPoint(ts, 1, "attr-val", "attr-val", "attr-val", "attr-val") + mb.RecordDatabricksSparkLiveListenerBusQueueAppStatusDroppedEventsCountDataPoint(ts, 1, "cluster.id-val", "spark.app.id-val", "pipeline.id-val", "pipeline.name-val") defaultMetricsCount++ allMetricsCount++ - mb.RecordDatabricksSparkLiveListenerBusQueueAppstatusSizeDataPoint(ts, 1, "attr-val", "attr-val", "attr-val", "attr-val") + mb.RecordDatabricksSparkLiveListenerBusQueueAppstatusSizeDataPoint(ts, 1, "cluster.id-val", "spark.app.id-val", "pipeline.id-val", "pipeline.name-val") defaultMetricsCount++ allMetricsCount++ - mb.RecordDatabricksSparkLiveListenerBusQueueExecutorManagementDroppedEventsCountDataPoint(ts, 1, "attr-val", "attr-val", "attr-val", "attr-val") + mb.RecordDatabricksSparkLiveListenerBusQueueExecutorManagementDroppedEventsCountDataPoint(ts, 1, "cluster.id-val", "spark.app.id-val", "pipeline.id-val", "pipeline.name-val") defaultMetricsCount++ allMetricsCount++ - mb.RecordDatabricksSparkLiveListenerBusQueueExecutormanagementSizeDataPoint(ts, 1, "attr-val", "attr-val", "attr-val", "attr-val") + mb.RecordDatabricksSparkLiveListenerBusQueueExecutormanagementSizeDataPoint(ts, 1, "cluster.id-val", "spark.app.id-val", "pipeline.id-val", "pipeline.name-val") defaultMetricsCount++ allMetricsCount++ - mb.RecordDatabricksSparkLiveListenerBusQueueSharedDroppedEventsCountDataPoint(ts, 1, "attr-val", "attr-val", "attr-val", "attr-val") + mb.RecordDatabricksSparkLiveListenerBusQueueSharedDroppedEventsCountDataPoint(ts, 1, "cluster.id-val", "spark.app.id-val", "pipeline.id-val", "pipeline.name-val") defaultMetricsCount++ allMetricsCount++ - mb.RecordDatabricksSparkLiveListenerBusQueueSharedSizeDataPoint(ts, 1, "attr-val", "attr-val", "attr-val", "attr-val") + mb.RecordDatabricksSparkLiveListenerBusQueueSharedSizeDataPoint(ts, 1, "cluster.id-val", "spark.app.id-val", "pipeline.id-val", "pipeline.name-val") defaultMetricsCount++ allMetricsCount++ - mb.RecordDatabricksSparkLiveListenerBusQueueStreamsDroppedEventsCountDataPoint(ts, 1, "attr-val", "attr-val", "attr-val", "attr-val") + mb.RecordDatabricksSparkLiveListenerBusQueueStreamsDroppedEventsCountDataPoint(ts, 1, "cluster.id-val", "spark.app.id-val", "pipeline.id-val", "pipeline.name-val") defaultMetricsCount++ allMetricsCount++ - mb.RecordDatabricksSparkLiveListenerBusQueueStreamsSizeDataPoint(ts, 1, "attr-val", "attr-val", "attr-val", "attr-val") + mb.RecordDatabricksSparkLiveListenerBusQueueStreamsSizeDataPoint(ts, 1, "cluster.id-val", "spark.app.id-val", "pipeline.id-val", "pipeline.name-val") defaultMetricsCount++ allMetricsCount++ - mb.RecordDatabricksSparkSparkSQLOperationManagerHiveOperationsCountDataPoint(ts, 1, "attr-val", "attr-val", "attr-val", "attr-val") + mb.RecordDatabricksSparkSparkSQLOperationManagerHiveOperationsCountDataPoint(ts, 1, "cluster.id-val", "spark.app.id-val", "pipeline.id-val", "pipeline.name-val") defaultMetricsCount++ allMetricsCount++ - mb.RecordDatabricksSparkStageDiskBytesSpilledDataPoint(ts, 1, "attr-val", "attr-val", 1) + mb.RecordDatabricksSparkStageDiskBytesSpilledDataPoint(ts, 1, "cluster.id-val", "spark.app.id-val", 12) defaultMetricsCount++ allMetricsCount++ - mb.RecordDatabricksSparkStageExecutorRunTimeDataPoint(ts, 1, "attr-val", "attr-val", 1) + mb.RecordDatabricksSparkStageExecutorRunTimeDataPoint(ts, 1, "cluster.id-val", "spark.app.id-val", 12) defaultMetricsCount++ allMetricsCount++ - mb.RecordDatabricksSparkStageInputBytesDataPoint(ts, 1, "attr-val", "attr-val", 1) + mb.RecordDatabricksSparkStageInputBytesDataPoint(ts, 1, "cluster.id-val", "spark.app.id-val", 12) defaultMetricsCount++ allMetricsCount++ - mb.RecordDatabricksSparkStageInputRecordsDataPoint(ts, 1, "attr-val", "attr-val", 1) + mb.RecordDatabricksSparkStageInputRecordsDataPoint(ts, 1, "cluster.id-val", "spark.app.id-val", 12) defaultMetricsCount++ allMetricsCount++ - mb.RecordDatabricksSparkStageMemoryBytesSpilledDataPoint(ts, 1, "attr-val", "attr-val", 1) + mb.RecordDatabricksSparkStageMemoryBytesSpilledDataPoint(ts, 1, "cluster.id-val", "spark.app.id-val", 12) defaultMetricsCount++ allMetricsCount++ - mb.RecordDatabricksSparkStageOutputBytesDataPoint(ts, 1, "attr-val", "attr-val", 1) + mb.RecordDatabricksSparkStageOutputBytesDataPoint(ts, 1, "cluster.id-val", "spark.app.id-val", 12) defaultMetricsCount++ allMetricsCount++ - mb.RecordDatabricksSparkStageOutputRecordsDataPoint(ts, 1, "attr-val", "attr-val", 1) + mb.RecordDatabricksSparkStageOutputRecordsDataPoint(ts, 1, "cluster.id-val", "spark.app.id-val", 12) defaultMetricsCount++ allMetricsCount++ - mb.RecordDatabricksSparkTimerDagSchedulerMessageProcessingTimeDataPoint(ts, 1, "attr-val", "attr-val") + mb.RecordDatabricksSparkTimerDagSchedulerMessageProcessingTimeDataPoint(ts, 1, "cluster.id-val", "spark.app.id-val") defaultMetricsCount++ allMetricsCount++ - mb.RecordDatabricksSparkTimerLiveListenerBusListenerProcessingApacheSparkSQLExecutionStreamingQueryListenerBusTimeDataPoint(ts, 1, "attr-val", "attr-val") + mb.RecordDatabricksSparkTimerLiveListenerBusListenerProcessingApacheSparkSQLExecutionStreamingQueryListenerBusTimeDataPoint(ts, 1, "cluster.id-val", "spark.app.id-val") defaultMetricsCount++ allMetricsCount++ - mb.RecordDatabricksSparkTimerLiveListenerBusListenerProcessingApacheSparkSQLExecutionTimeDataPoint(ts, 1, "attr-val", "attr-val") + mb.RecordDatabricksSparkTimerLiveListenerBusListenerProcessingApacheSparkSQLExecutionTimeDataPoint(ts, 1, "cluster.id-val", "spark.app.id-val") defaultMetricsCount++ allMetricsCount++ - mb.RecordDatabricksSparkTimerLiveListenerBusListenerProcessingApacheSparkSQLExecutionUISQLAppStatusListenerTimeDataPoint(ts, 1, "attr-val", "attr-val") + mb.RecordDatabricksSparkTimerLiveListenerBusListenerProcessingApacheSparkSQLExecutionUISQLAppStatusListenerTimeDataPoint(ts, 1, "cluster.id-val", "spark.app.id-val") defaultMetricsCount++ allMetricsCount++ - mb.RecordDatabricksSparkTimerLiveListenerBusListenerProcessingApacheSparkSQLHiveThriftserverUIHiveThriftServer2listenerTimeDataPoint(ts, 1, "attr-val", "attr-val") + mb.RecordDatabricksSparkTimerLiveListenerBusListenerProcessingApacheSparkSQLHiveThriftserverUIHiveThriftServer2listenerTimeDataPoint(ts, 1, "cluster.id-val", "spark.app.id-val") defaultMetricsCount++ allMetricsCount++ - mb.RecordDatabricksSparkTimerLiveListenerBusListenerProcessingApacheSparkSQLSparkSessionTimeDataPoint(ts, 1, "attr-val", "attr-val") + mb.RecordDatabricksSparkTimerLiveListenerBusListenerProcessingApacheSparkSQLSparkSessionTimeDataPoint(ts, 1, "cluster.id-val", "spark.app.id-val") defaultMetricsCount++ allMetricsCount++ - mb.RecordDatabricksSparkTimerLiveListenerBusListenerProcessingApacheSparkSQLUtilExecutionListenerBusTimeDataPoint(ts, 1, "attr-val", "attr-val") + mb.RecordDatabricksSparkTimerLiveListenerBusListenerProcessingApacheSparkSQLUtilExecutionListenerBusTimeDataPoint(ts, 1, "cluster.id-val", "spark.app.id-val") defaultMetricsCount++ allMetricsCount++ - mb.RecordDatabricksSparkTimerLiveListenerBusListenerProcessingApacheSparkStatusAppStatusListenerTimeDataPoint(ts, 1, "attr-val", "attr-val") + mb.RecordDatabricksSparkTimerLiveListenerBusListenerProcessingApacheSparkStatusAppStatusListenerTimeDataPoint(ts, 1, "cluster.id-val", "spark.app.id-val") defaultMetricsCount++ allMetricsCount++ - mb.RecordDatabricksSparkTimerLiveListenerBusListenerProcessingApacheSparkUtilProfilerEnvTimeDataPoint(ts, 1, "attr-val", "attr-val") + mb.RecordDatabricksSparkTimerLiveListenerBusListenerProcessingApacheSparkUtilProfilerEnvTimeDataPoint(ts, 1, "cluster.id-val", "spark.app.id-val") defaultMetricsCount++ allMetricsCount++ - mb.RecordDatabricksSparkTimerLiveListenerBusListenerProcessingDatabricksBackendDaemonDriverDataPlaneEventListenerTimeDataPoint(ts, 1, "attr-val", "attr-val") + mb.RecordDatabricksSparkTimerLiveListenerBusListenerProcessingDatabricksBackendDaemonDriverDataPlaneEventListenerTimeDataPoint(ts, 1, "cluster.id-val", "spark.app.id-val") defaultMetricsCount++ allMetricsCount++ - mb.RecordDatabricksSparkTimerLiveListenerBusListenerProcessingDatabricksBackendDaemonDriverDbcEventLoggingListenerTimeDataPoint(ts, 1, "attr-val", "attr-val") + mb.RecordDatabricksSparkTimerLiveListenerBusListenerProcessingDatabricksBackendDaemonDriverDbcEventLoggingListenerTimeDataPoint(ts, 1, "cluster.id-val", "spark.app.id-val") defaultMetricsCount++ allMetricsCount++ - mb.RecordDatabricksSparkTimerLiveListenerBusListenerProcessingDatabricksPhotonPhotonCleanupListenerTimeDataPoint(ts, 1, "attr-val", "attr-val") + mb.RecordDatabricksSparkTimerLiveListenerBusListenerProcessingDatabricksPhotonPhotonCleanupListenerTimeDataPoint(ts, 1, "cluster.id-val", "spark.app.id-val") defaultMetricsCount++ allMetricsCount++ - mb.RecordDatabricksSparkTimerLiveListenerBusListenerProcessingDatabricksSparkUtilExecutorTimeLoggingListenerTimeDataPoint(ts, 1, "attr-val", "attr-val") + mb.RecordDatabricksSparkTimerLiveListenerBusListenerProcessingDatabricksSparkUtilExecutorTimeLoggingListenerTimeDataPoint(ts, 1, "cluster.id-val", "spark.app.id-val") defaultMetricsCount++ allMetricsCount++ - mb.RecordDatabricksSparkTimerLiveListenerBusListenerProcessingDatabricksSparkUtilUsageLoggingListenerTimeDataPoint(ts, 1, "attr-val", "attr-val") + mb.RecordDatabricksSparkTimerLiveListenerBusListenerProcessingDatabricksSparkUtilUsageLoggingListenerTimeDataPoint(ts, 1, "cluster.id-val", "spark.app.id-val") defaultMetricsCount++ allMetricsCount++ - mb.RecordDatabricksSparkTimerLiveListenerBusListenerProcessingDatabricksSQLAdviceAdvisorListenerTimeDataPoint(ts, 1, "attr-val", "attr-val") + mb.RecordDatabricksSparkTimerLiveListenerBusListenerProcessingDatabricksSQLAdviceAdvisorListenerTimeDataPoint(ts, 1, "cluster.id-val", "spark.app.id-val") defaultMetricsCount++ allMetricsCount++ - mb.RecordDatabricksSparkTimerLiveListenerBusListenerProcessingDatabricksSQLDebuggerQueryWatchdogListenerTimeDataPoint(ts, 1, "attr-val", "attr-val") + mb.RecordDatabricksSparkTimerLiveListenerBusListenerProcessingDatabricksSQLDebuggerQueryWatchdogListenerTimeDataPoint(ts, 1, "cluster.id-val", "spark.app.id-val") defaultMetricsCount++ allMetricsCount++ - mb.RecordDatabricksSparkTimerLiveListenerBusListenerProcessingDatabricksSQLExecutionUIIoCacheListenerTimeDataPoint(ts, 1, "attr-val", "attr-val") + mb.RecordDatabricksSparkTimerLiveListenerBusListenerProcessingDatabricksSQLExecutionUIIoCacheListenerTimeDataPoint(ts, 1, "cluster.id-val", "spark.app.id-val") defaultMetricsCount++ allMetricsCount++ - mb.RecordDatabricksSparkTimerLiveListenerBusListenerProcessingDatabricksSQLIoCachingRepeatedReadsEstimatorTimeDataPoint(ts, 1, "attr-val", "attr-val") + mb.RecordDatabricksSparkTimerLiveListenerBusListenerProcessingDatabricksSQLIoCachingRepeatedReadsEstimatorTimeDataPoint(ts, 1, "cluster.id-val", "spark.app.id-val") defaultMetricsCount++ allMetricsCount++ - mb.RecordDatabricksSparkTimerLiveListenerBusQueueAppStatusListenerProcessingTimeDataPoint(ts, 1, "attr-val", "attr-val") + mb.RecordDatabricksSparkTimerLiveListenerBusQueueAppStatusListenerProcessingTimeDataPoint(ts, 1, "cluster.id-val", "spark.app.id-val") defaultMetricsCount++ allMetricsCount++ - mb.RecordDatabricksSparkTimerLiveListenerBusQueueExecutorManagementListenerProcessingTimeDataPoint(ts, 1, "attr-val", "attr-val") + mb.RecordDatabricksSparkTimerLiveListenerBusQueueExecutorManagementListenerProcessingTimeDataPoint(ts, 1, "cluster.id-val", "spark.app.id-val") defaultMetricsCount++ allMetricsCount++ - mb.RecordDatabricksSparkTimerLiveListenerBusQueueSharedListenerProcessingTimeDataPoint(ts, 1, "attr-val", "attr-val") + mb.RecordDatabricksSparkTimerLiveListenerBusQueueSharedListenerProcessingTimeDataPoint(ts, 1, "cluster.id-val", "spark.app.id-val") defaultMetricsCount++ allMetricsCount++ - mb.RecordDatabricksSparkTimerLiveListenerBusQueueStreamsListenerProcessingTimeDataPoint(ts, 1, "attr-val", "attr-val") + mb.RecordDatabricksSparkTimerLiveListenerBusQueueStreamsListenerProcessingTimeDataPoint(ts, 1, "cluster.id-val", "spark.app.id-val") defaultMetricsCount++ allMetricsCount++ - mb.RecordDatabricksTasksRunDurationDataPoint(ts, 1, 1, "attr-val") + mb.RecordDatabricksTasksRunDurationDataPoint(ts, 1, 6, "task.id-val") defaultMetricsCount++ allMetricsCount++ - mb.RecordDatabricksTasksScheduleStatusDataPoint(ts, 1, 1, "attr-val", AttributeTaskType(1)) + mb.RecordDatabricksTasksScheduleStatusDataPoint(ts, 1, 6, "task.id-val", AttributeTaskTypeNotebookTask) - metrics := mb.Emit(WithDatabricksInstanceName("attr-val"), WithSparkAppID("attr-val"), WithSparkClusterID("attr-val"), WithSparkClusterName("attr-val")) + res := pcommon.NewResource() + res.Attributes().PutStr("k1", "v1") + metrics := mb.Emit(WithResource(res)) if test.configSet == testSetNone { assert.Equal(t, 0, metrics.ResourceMetrics().Len()) @@ -623,39 +625,7 @@ func TestMetricsBuilder(t *testing.T) { assert.Equal(t, 1, metrics.ResourceMetrics().Len()) rm := metrics.ResourceMetrics().At(0) - attrCount := 0 - enabledAttrCount := 0 - attrVal, ok := rm.Resource().Attributes().Get("databricks.instance.name") - attrCount++ - assert.Equal(t, mb.resourceAttributesConfig.DatabricksInstanceName.Enabled, ok) - if mb.resourceAttributesConfig.DatabricksInstanceName.Enabled { - enabledAttrCount++ - assert.EqualValues(t, "attr-val", attrVal.Str()) - } - attrVal, ok = rm.Resource().Attributes().Get("spark.app.id") - attrCount++ - assert.Equal(t, mb.resourceAttributesConfig.SparkAppID.Enabled, ok) - if mb.resourceAttributesConfig.SparkAppID.Enabled { - enabledAttrCount++ - assert.EqualValues(t, "attr-val", attrVal.Str()) - } - attrVal, ok = rm.Resource().Attributes().Get("spark.cluster.id") - attrCount++ - assert.Equal(t, mb.resourceAttributesConfig.SparkClusterID.Enabled, ok) - if mb.resourceAttributesConfig.SparkClusterID.Enabled { - enabledAttrCount++ - assert.EqualValues(t, "attr-val", attrVal.Str()) - } - attrVal, ok = rm.Resource().Attributes().Get("spark.cluster.name") - attrCount++ - assert.Equal(t, mb.resourceAttributesConfig.SparkClusterName.Enabled, ok) - if mb.resourceAttributesConfig.SparkClusterName.Enabled { - enabledAttrCount++ - assert.EqualValues(t, "attr-val", attrVal.Str()) - } - assert.Equal(t, enabledAttrCount, rm.Resource().Attributes().Len()) - assert.Equal(t, attrCount, 4) - + assert.Equal(t, res, rm.Resource()) assert.Equal(t, 1, rm.ScopeMetrics().Len()) ms := rm.ScopeMetrics().At(0).Metrics() if test.configSet == testSetDefault { @@ -693,7 +663,7 @@ func TestMetricsBuilder(t *testing.T) { assert.Equal(t, int64(1), dp.IntValue()) attrVal, ok := dp.Attributes().Get("job.id") assert.True(t, ok) - assert.EqualValues(t, 1, attrVal.Int()) + assert.EqualValues(t, 6, attrVal.Int()) case "databricks.jobs.schedule.status": assert.False(t, validatedMetrics["databricks.jobs.schedule.status"], "Found a duplicate in the metrics slice: databricks.jobs.schedule.status") validatedMetrics["databricks.jobs.schedule.status"] = true @@ -708,7 +678,7 @@ func TestMetricsBuilder(t *testing.T) { assert.Equal(t, int64(1), dp.IntValue()) attrVal, ok := dp.Attributes().Get("job.id") assert.True(t, ok) - assert.EqualValues(t, 1, attrVal.Int()) + assert.EqualValues(t, 6, attrVal.Int()) case "databricks.jobs.total": assert.False(t, validatedMetrics["databricks.jobs.total"], "Found a duplicate in the metrics slice: databricks.jobs.total") validatedMetrics["databricks.jobs.total"] = true @@ -735,16 +705,16 @@ func TestMetricsBuilder(t *testing.T) { assert.Equal(t, float64(1), dp.DoubleValue()) attrVal, ok := dp.Attributes().Get("cluster.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "cluster.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("spark.app.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "spark.app.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("pipeline.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "pipeline.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("pipeline.name") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "pipeline.name-val", attrVal.Str()) case "databricks.spark.block_manager.memory.max": assert.False(t, validatedMetrics["databricks.spark.block_manager.memory.max"], "Found a duplicate in the metrics slice: databricks.spark.block_manager.memory.max") validatedMetrics["databricks.spark.block_manager.memory.max"] = true @@ -759,16 +729,16 @@ func TestMetricsBuilder(t *testing.T) { assert.Equal(t, float64(1), dp.DoubleValue()) attrVal, ok := dp.Attributes().Get("cluster.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "cluster.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("spark.app.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "spark.app.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("pipeline.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "pipeline.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("pipeline.name") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "pipeline.name-val", attrVal.Str()) case "databricks.spark.block_manager.memory.off_heap.max": assert.False(t, validatedMetrics["databricks.spark.block_manager.memory.off_heap.max"], "Found a duplicate in the metrics slice: databricks.spark.block_manager.memory.off_heap.max") validatedMetrics["databricks.spark.block_manager.memory.off_heap.max"] = true @@ -783,16 +753,16 @@ func TestMetricsBuilder(t *testing.T) { assert.Equal(t, float64(1), dp.DoubleValue()) attrVal, ok := dp.Attributes().Get("cluster.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "cluster.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("spark.app.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "spark.app.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("pipeline.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "pipeline.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("pipeline.name") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "pipeline.name-val", attrVal.Str()) case "databricks.spark.block_manager.memory.off_heap.used": assert.False(t, validatedMetrics["databricks.spark.block_manager.memory.off_heap.used"], "Found a duplicate in the metrics slice: databricks.spark.block_manager.memory.off_heap.used") validatedMetrics["databricks.spark.block_manager.memory.off_heap.used"] = true @@ -807,16 +777,16 @@ func TestMetricsBuilder(t *testing.T) { assert.Equal(t, float64(1), dp.DoubleValue()) attrVal, ok := dp.Attributes().Get("cluster.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "cluster.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("spark.app.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "spark.app.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("pipeline.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "pipeline.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("pipeline.name") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "pipeline.name-val", attrVal.Str()) case "databricks.spark.block_manager.memory.on_heap.max": assert.False(t, validatedMetrics["databricks.spark.block_manager.memory.on_heap.max"], "Found a duplicate in the metrics slice: databricks.spark.block_manager.memory.on_heap.max") validatedMetrics["databricks.spark.block_manager.memory.on_heap.max"] = true @@ -831,16 +801,16 @@ func TestMetricsBuilder(t *testing.T) { assert.Equal(t, float64(1), dp.DoubleValue()) attrVal, ok := dp.Attributes().Get("cluster.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "cluster.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("spark.app.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "spark.app.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("pipeline.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "pipeline.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("pipeline.name") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "pipeline.name-val", attrVal.Str()) case "databricks.spark.block_manager.memory.on_heap.used": assert.False(t, validatedMetrics["databricks.spark.block_manager.memory.on_heap.used"], "Found a duplicate in the metrics slice: databricks.spark.block_manager.memory.on_heap.used") validatedMetrics["databricks.spark.block_manager.memory.on_heap.used"] = true @@ -855,16 +825,16 @@ func TestMetricsBuilder(t *testing.T) { assert.Equal(t, float64(1), dp.DoubleValue()) attrVal, ok := dp.Attributes().Get("cluster.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "cluster.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("spark.app.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "spark.app.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("pipeline.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "pipeline.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("pipeline.name") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "pipeline.name-val", attrVal.Str()) case "databricks.spark.block_manager.memory.remaining": assert.False(t, validatedMetrics["databricks.spark.block_manager.memory.remaining"], "Found a duplicate in the metrics slice: databricks.spark.block_manager.memory.remaining") validatedMetrics["databricks.spark.block_manager.memory.remaining"] = true @@ -879,16 +849,16 @@ func TestMetricsBuilder(t *testing.T) { assert.Equal(t, float64(1), dp.DoubleValue()) attrVal, ok := dp.Attributes().Get("cluster.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "cluster.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("spark.app.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "spark.app.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("pipeline.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "pipeline.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("pipeline.name") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "pipeline.name-val", attrVal.Str()) case "databricks.spark.block_manager.memory.remaining.off_heap": assert.False(t, validatedMetrics["databricks.spark.block_manager.memory.remaining.off_heap"], "Found a duplicate in the metrics slice: databricks.spark.block_manager.memory.remaining.off_heap") validatedMetrics["databricks.spark.block_manager.memory.remaining.off_heap"] = true @@ -903,16 +873,16 @@ func TestMetricsBuilder(t *testing.T) { assert.Equal(t, float64(1), dp.DoubleValue()) attrVal, ok := dp.Attributes().Get("cluster.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "cluster.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("spark.app.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "spark.app.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("pipeline.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "pipeline.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("pipeline.name") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "pipeline.name-val", attrVal.Str()) case "databricks.spark.block_manager.memory.remaining.on_heap": assert.False(t, validatedMetrics["databricks.spark.block_manager.memory.remaining.on_heap"], "Found a duplicate in the metrics slice: databricks.spark.block_manager.memory.remaining.on_heap") validatedMetrics["databricks.spark.block_manager.memory.remaining.on_heap"] = true @@ -927,16 +897,16 @@ func TestMetricsBuilder(t *testing.T) { assert.Equal(t, float64(1), dp.DoubleValue()) attrVal, ok := dp.Attributes().Get("cluster.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "cluster.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("spark.app.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "spark.app.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("pipeline.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "pipeline.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("pipeline.name") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "pipeline.name-val", attrVal.Str()) case "databricks.spark.block_manager.memory.used": assert.False(t, validatedMetrics["databricks.spark.block_manager.memory.used"], "Found a duplicate in the metrics slice: databricks.spark.block_manager.memory.used") validatedMetrics["databricks.spark.block_manager.memory.used"] = true @@ -951,16 +921,16 @@ func TestMetricsBuilder(t *testing.T) { assert.Equal(t, float64(1), dp.DoubleValue()) attrVal, ok := dp.Attributes().Get("cluster.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "cluster.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("spark.app.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "spark.app.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("pipeline.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "pipeline.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("pipeline.name") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "pipeline.name-val", attrVal.Str()) case "databricks.spark.code_generator.compilation.time": assert.False(t, validatedMetrics["databricks.spark.code_generator.compilation.time"], "Found a duplicate in the metrics slice: databricks.spark.code_generator.compilation.time") validatedMetrics["databricks.spark.code_generator.compilation.time"] = true @@ -975,16 +945,16 @@ func TestMetricsBuilder(t *testing.T) { assert.Equal(t, float64(1), dp.DoubleValue()) attrVal, ok := dp.Attributes().Get("cluster.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "cluster.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("spark.app.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "spark.app.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("pipeline.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "pipeline.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("pipeline.name") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "pipeline.name-val", attrVal.Str()) case "databricks.spark.code_generator.generated_class_size": assert.False(t, validatedMetrics["databricks.spark.code_generator.generated_class_size"], "Found a duplicate in the metrics slice: databricks.spark.code_generator.generated_class_size") validatedMetrics["databricks.spark.code_generator.generated_class_size"] = true @@ -999,16 +969,16 @@ func TestMetricsBuilder(t *testing.T) { assert.Equal(t, float64(1), dp.DoubleValue()) attrVal, ok := dp.Attributes().Get("cluster.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "cluster.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("spark.app.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "spark.app.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("pipeline.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "pipeline.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("pipeline.name") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "pipeline.name-val", attrVal.Str()) case "databricks.spark.code_generator.generated_method_size": assert.False(t, validatedMetrics["databricks.spark.code_generator.generated_method_size"], "Found a duplicate in the metrics slice: databricks.spark.code_generator.generated_method_size") validatedMetrics["databricks.spark.code_generator.generated_method_size"] = true @@ -1023,16 +993,16 @@ func TestMetricsBuilder(t *testing.T) { assert.Equal(t, float64(1), dp.DoubleValue()) attrVal, ok := dp.Attributes().Get("cluster.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "cluster.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("spark.app.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "spark.app.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("pipeline.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "pipeline.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("pipeline.name") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "pipeline.name-val", attrVal.Str()) case "databricks.spark.code_generator.sourcecode_size": assert.False(t, validatedMetrics["databricks.spark.code_generator.sourcecode_size"], "Found a duplicate in the metrics slice: databricks.spark.code_generator.sourcecode_size") validatedMetrics["databricks.spark.code_generator.sourcecode_size"] = true @@ -1047,16 +1017,16 @@ func TestMetricsBuilder(t *testing.T) { assert.Equal(t, float64(1), dp.DoubleValue()) attrVal, ok := dp.Attributes().Get("cluster.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "cluster.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("spark.app.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "spark.app.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("pipeline.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "pipeline.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("pipeline.name") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "pipeline.name-val", attrVal.Str()) case "databricks.spark.dag_scheduler.jobs.active": assert.False(t, validatedMetrics["databricks.spark.dag_scheduler.jobs.active"], "Found a duplicate in the metrics slice: databricks.spark.dag_scheduler.jobs.active") validatedMetrics["databricks.spark.dag_scheduler.jobs.active"] = true @@ -1071,16 +1041,16 @@ func TestMetricsBuilder(t *testing.T) { assert.Equal(t, float64(1), dp.DoubleValue()) attrVal, ok := dp.Attributes().Get("cluster.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "cluster.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("spark.app.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "spark.app.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("pipeline.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "pipeline.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("pipeline.name") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "pipeline.name-val", attrVal.Str()) case "databricks.spark.dag_scheduler.jobs.all": assert.False(t, validatedMetrics["databricks.spark.dag_scheduler.jobs.all"], "Found a duplicate in the metrics slice: databricks.spark.dag_scheduler.jobs.all") validatedMetrics["databricks.spark.dag_scheduler.jobs.all"] = true @@ -1095,16 +1065,16 @@ func TestMetricsBuilder(t *testing.T) { assert.Equal(t, float64(1), dp.DoubleValue()) attrVal, ok := dp.Attributes().Get("cluster.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "cluster.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("spark.app.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "spark.app.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("pipeline.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "pipeline.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("pipeline.name") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "pipeline.name-val", attrVal.Str()) case "databricks.spark.dag_scheduler.stages.failed": assert.False(t, validatedMetrics["databricks.spark.dag_scheduler.stages.failed"], "Found a duplicate in the metrics slice: databricks.spark.dag_scheduler.stages.failed") validatedMetrics["databricks.spark.dag_scheduler.stages.failed"] = true @@ -1119,16 +1089,16 @@ func TestMetricsBuilder(t *testing.T) { assert.Equal(t, float64(1), dp.DoubleValue()) attrVal, ok := dp.Attributes().Get("cluster.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "cluster.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("spark.app.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "spark.app.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("pipeline.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "pipeline.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("pipeline.name") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "pipeline.name-val", attrVal.Str()) case "databricks.spark.dag_scheduler.stages.running": assert.False(t, validatedMetrics["databricks.spark.dag_scheduler.stages.running"], "Found a duplicate in the metrics slice: databricks.spark.dag_scheduler.stages.running") validatedMetrics["databricks.spark.dag_scheduler.stages.running"] = true @@ -1143,16 +1113,16 @@ func TestMetricsBuilder(t *testing.T) { assert.Equal(t, float64(1), dp.DoubleValue()) attrVal, ok := dp.Attributes().Get("cluster.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "cluster.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("spark.app.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "spark.app.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("pipeline.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "pipeline.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("pipeline.name") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "pipeline.name-val", attrVal.Str()) case "databricks.spark.dag_scheduler.stages.waiting": assert.False(t, validatedMetrics["databricks.spark.dag_scheduler.stages.waiting"], "Found a duplicate in the metrics slice: databricks.spark.dag_scheduler.stages.waiting") validatedMetrics["databricks.spark.dag_scheduler.stages.waiting"] = true @@ -1167,16 +1137,16 @@ func TestMetricsBuilder(t *testing.T) { assert.Equal(t, float64(1), dp.DoubleValue()) attrVal, ok := dp.Attributes().Get("cluster.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "cluster.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("spark.app.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "spark.app.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("pipeline.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "pipeline.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("pipeline.name") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "pipeline.name-val", attrVal.Str()) case "databricks.spark.databricks.directory_commit.auto_vacuum.count": assert.False(t, validatedMetrics["databricks.spark.databricks.directory_commit.auto_vacuum.count"], "Found a duplicate in the metrics slice: databricks.spark.databricks.directory_commit.auto_vacuum.count") validatedMetrics["databricks.spark.databricks.directory_commit.auto_vacuum.count"] = true @@ -1193,16 +1163,16 @@ func TestMetricsBuilder(t *testing.T) { assert.Equal(t, int64(1), dp.IntValue()) attrVal, ok := dp.Attributes().Get("cluster.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "cluster.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("spark.app.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "spark.app.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("pipeline.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "pipeline.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("pipeline.name") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "pipeline.name-val", attrVal.Str()) case "databricks.spark.databricks.directory_commit.deleted_files_filtered": assert.False(t, validatedMetrics["databricks.spark.databricks.directory_commit.deleted_files_filtered"], "Found a duplicate in the metrics slice: databricks.spark.databricks.directory_commit.deleted_files_filtered") validatedMetrics["databricks.spark.databricks.directory_commit.deleted_files_filtered"] = true @@ -1219,16 +1189,16 @@ func TestMetricsBuilder(t *testing.T) { assert.Equal(t, int64(1), dp.IntValue()) attrVal, ok := dp.Attributes().Get("cluster.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "cluster.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("spark.app.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "spark.app.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("pipeline.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "pipeline.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("pipeline.name") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "pipeline.name-val", attrVal.Str()) case "databricks.spark.databricks.directory_commit.filter_listing.count": assert.False(t, validatedMetrics["databricks.spark.databricks.directory_commit.filter_listing.count"], "Found a duplicate in the metrics slice: databricks.spark.databricks.directory_commit.filter_listing.count") validatedMetrics["databricks.spark.databricks.directory_commit.filter_listing.count"] = true @@ -1245,16 +1215,16 @@ func TestMetricsBuilder(t *testing.T) { assert.Equal(t, int64(1), dp.IntValue()) attrVal, ok := dp.Attributes().Get("cluster.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "cluster.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("spark.app.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "spark.app.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("pipeline.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "pipeline.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("pipeline.name") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "pipeline.name-val", attrVal.Str()) case "databricks.spark.databricks.directory_commit.job_commit_completed": assert.False(t, validatedMetrics["databricks.spark.databricks.directory_commit.job_commit_completed"], "Found a duplicate in the metrics slice: databricks.spark.databricks.directory_commit.job_commit_completed") validatedMetrics["databricks.spark.databricks.directory_commit.job_commit_completed"] = true @@ -1271,16 +1241,16 @@ func TestMetricsBuilder(t *testing.T) { assert.Equal(t, int64(1), dp.IntValue()) attrVal, ok := dp.Attributes().Get("cluster.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "cluster.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("spark.app.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "spark.app.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("pipeline.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "pipeline.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("pipeline.name") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "pipeline.name-val", attrVal.Str()) case "databricks.spark.databricks.directory_commit.marker_read.errors": assert.False(t, validatedMetrics["databricks.spark.databricks.directory_commit.marker_read.errors"], "Found a duplicate in the metrics slice: databricks.spark.databricks.directory_commit.marker_read.errors") validatedMetrics["databricks.spark.databricks.directory_commit.marker_read.errors"] = true @@ -1297,16 +1267,16 @@ func TestMetricsBuilder(t *testing.T) { assert.Equal(t, int64(1), dp.IntValue()) attrVal, ok := dp.Attributes().Get("cluster.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "cluster.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("spark.app.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "spark.app.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("pipeline.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "pipeline.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("pipeline.name") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "pipeline.name-val", attrVal.Str()) case "databricks.spark.databricks.directory_commit.marker_refresh.count": assert.False(t, validatedMetrics["databricks.spark.databricks.directory_commit.marker_refresh.count"], "Found a duplicate in the metrics slice: databricks.spark.databricks.directory_commit.marker_refresh.count") validatedMetrics["databricks.spark.databricks.directory_commit.marker_refresh.count"] = true @@ -1323,16 +1293,16 @@ func TestMetricsBuilder(t *testing.T) { assert.Equal(t, int64(1), dp.IntValue()) attrVal, ok := dp.Attributes().Get("cluster.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "cluster.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("spark.app.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "spark.app.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("pipeline.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "pipeline.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("pipeline.name") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "pipeline.name-val", attrVal.Str()) case "databricks.spark.databricks.directory_commit.marker_refresh.errors": assert.False(t, validatedMetrics["databricks.spark.databricks.directory_commit.marker_refresh.errors"], "Found a duplicate in the metrics slice: databricks.spark.databricks.directory_commit.marker_refresh.errors") validatedMetrics["databricks.spark.databricks.directory_commit.marker_refresh.errors"] = true @@ -1349,16 +1319,16 @@ func TestMetricsBuilder(t *testing.T) { assert.Equal(t, int64(1), dp.IntValue()) attrVal, ok := dp.Attributes().Get("cluster.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "cluster.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("spark.app.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "spark.app.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("pipeline.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "pipeline.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("pipeline.name") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "pipeline.name-val", attrVal.Str()) case "databricks.spark.databricks.directory_commit.markers.read": assert.False(t, validatedMetrics["databricks.spark.databricks.directory_commit.markers.read"], "Found a duplicate in the metrics slice: databricks.spark.databricks.directory_commit.markers.read") validatedMetrics["databricks.spark.databricks.directory_commit.markers.read"] = true @@ -1375,16 +1345,16 @@ func TestMetricsBuilder(t *testing.T) { assert.Equal(t, int64(1), dp.IntValue()) attrVal, ok := dp.Attributes().Get("cluster.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "cluster.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("spark.app.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "spark.app.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("pipeline.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "pipeline.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("pipeline.name") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "pipeline.name-val", attrVal.Str()) case "databricks.spark.databricks.directory_commit.repeated_list.count": assert.False(t, validatedMetrics["databricks.spark.databricks.directory_commit.repeated_list.count"], "Found a duplicate in the metrics slice: databricks.spark.databricks.directory_commit.repeated_list.count") validatedMetrics["databricks.spark.databricks.directory_commit.repeated_list.count"] = true @@ -1401,16 +1371,16 @@ func TestMetricsBuilder(t *testing.T) { assert.Equal(t, int64(1), dp.IntValue()) attrVal, ok := dp.Attributes().Get("cluster.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "cluster.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("spark.app.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "spark.app.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("pipeline.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "pipeline.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("pipeline.name") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "pipeline.name-val", attrVal.Str()) case "databricks.spark.databricks.directory_commit.uncommitted_files.filtered": assert.False(t, validatedMetrics["databricks.spark.databricks.directory_commit.uncommitted_files.filtered"], "Found a duplicate in the metrics slice: databricks.spark.databricks.directory_commit.uncommitted_files.filtered") validatedMetrics["databricks.spark.databricks.directory_commit.uncommitted_files.filtered"] = true @@ -1427,16 +1397,16 @@ func TestMetricsBuilder(t *testing.T) { assert.Equal(t, int64(1), dp.IntValue()) attrVal, ok := dp.Attributes().Get("cluster.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "cluster.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("spark.app.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "spark.app.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("pipeline.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "pipeline.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("pipeline.name") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "pipeline.name-val", attrVal.Str()) case "databricks.spark.databricks.directory_commit.untracked_files.found": assert.False(t, validatedMetrics["databricks.spark.databricks.directory_commit.untracked_files.found"], "Found a duplicate in the metrics slice: databricks.spark.databricks.directory_commit.untracked_files.found") validatedMetrics["databricks.spark.databricks.directory_commit.untracked_files.found"] = true @@ -1453,16 +1423,16 @@ func TestMetricsBuilder(t *testing.T) { assert.Equal(t, int64(1), dp.IntValue()) attrVal, ok := dp.Attributes().Get("cluster.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "cluster.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("spark.app.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "spark.app.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("pipeline.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "pipeline.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("pipeline.name") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "pipeline.name-val", attrVal.Str()) case "databricks.spark.databricks.directory_commit.vacuum.count": assert.False(t, validatedMetrics["databricks.spark.databricks.directory_commit.vacuum.count"], "Found a duplicate in the metrics slice: databricks.spark.databricks.directory_commit.vacuum.count") validatedMetrics["databricks.spark.databricks.directory_commit.vacuum.count"] = true @@ -1479,16 +1449,16 @@ func TestMetricsBuilder(t *testing.T) { assert.Equal(t, int64(1), dp.IntValue()) attrVal, ok := dp.Attributes().Get("cluster.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "cluster.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("spark.app.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "spark.app.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("pipeline.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "pipeline.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("pipeline.name") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "pipeline.name-val", attrVal.Str()) case "databricks.spark.databricks.directory_commit.vacuum.errors": assert.False(t, validatedMetrics["databricks.spark.databricks.directory_commit.vacuum.errors"], "Found a duplicate in the metrics slice: databricks.spark.databricks.directory_commit.vacuum.errors") validatedMetrics["databricks.spark.databricks.directory_commit.vacuum.errors"] = true @@ -1505,16 +1475,16 @@ func TestMetricsBuilder(t *testing.T) { assert.Equal(t, int64(1), dp.IntValue()) attrVal, ok := dp.Attributes().Get("cluster.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "cluster.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("spark.app.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "spark.app.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("pipeline.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "pipeline.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("pipeline.name") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "pipeline.name-val", attrVal.Str()) case "databricks.spark.databricks.preemption.checks.count": assert.False(t, validatedMetrics["databricks.spark.databricks.preemption.checks.count"], "Found a duplicate in the metrics slice: databricks.spark.databricks.preemption.checks.count") validatedMetrics["databricks.spark.databricks.preemption.checks.count"] = true @@ -1531,16 +1501,16 @@ func TestMetricsBuilder(t *testing.T) { assert.Equal(t, int64(1), dp.IntValue()) attrVal, ok := dp.Attributes().Get("cluster.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "cluster.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("spark.app.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "spark.app.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("pipeline.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "pipeline.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("pipeline.name") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "pipeline.name-val", attrVal.Str()) case "databricks.spark.databricks.preemption.pools_autoexpired.count": assert.False(t, validatedMetrics["databricks.spark.databricks.preemption.pools_autoexpired.count"], "Found a duplicate in the metrics slice: databricks.spark.databricks.preemption.pools_autoexpired.count") validatedMetrics["databricks.spark.databricks.preemption.pools_autoexpired.count"] = true @@ -1557,16 +1527,16 @@ func TestMetricsBuilder(t *testing.T) { assert.Equal(t, int64(1), dp.IntValue()) attrVal, ok := dp.Attributes().Get("cluster.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "cluster.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("spark.app.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "spark.app.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("pipeline.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "pipeline.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("pipeline.name") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "pipeline.name-val", attrVal.Str()) case "databricks.spark.databricks.preemption.poolstarvation.time": assert.False(t, validatedMetrics["databricks.spark.databricks.preemption.poolstarvation.time"], "Found a duplicate in the metrics slice: databricks.spark.databricks.preemption.poolstarvation.time") validatedMetrics["databricks.spark.databricks.preemption.poolstarvation.time"] = true @@ -1583,16 +1553,16 @@ func TestMetricsBuilder(t *testing.T) { assert.Equal(t, int64(1), dp.IntValue()) attrVal, ok := dp.Attributes().Get("cluster.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "cluster.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("spark.app.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "spark.app.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("pipeline.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "pipeline.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("pipeline.name") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "pipeline.name-val", attrVal.Str()) case "databricks.spark.databricks.preemption.scheduler_overhead.time": assert.False(t, validatedMetrics["databricks.spark.databricks.preemption.scheduler_overhead.time"], "Found a duplicate in the metrics slice: databricks.spark.databricks.preemption.scheduler_overhead.time") validatedMetrics["databricks.spark.databricks.preemption.scheduler_overhead.time"] = true @@ -1609,16 +1579,16 @@ func TestMetricsBuilder(t *testing.T) { assert.Equal(t, int64(1), dp.IntValue()) attrVal, ok := dp.Attributes().Get("cluster.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "cluster.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("spark.app.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "spark.app.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("pipeline.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "pipeline.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("pipeline.name") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "pipeline.name-val", attrVal.Str()) case "databricks.spark.databricks.preemption.task_wasted.time": assert.False(t, validatedMetrics["databricks.spark.databricks.preemption.task_wasted.time"], "Found a duplicate in the metrics slice: databricks.spark.databricks.preemption.task_wasted.time") validatedMetrics["databricks.spark.databricks.preemption.task_wasted.time"] = true @@ -1635,16 +1605,16 @@ func TestMetricsBuilder(t *testing.T) { assert.Equal(t, int64(1), dp.IntValue()) attrVal, ok := dp.Attributes().Get("cluster.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "cluster.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("spark.app.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "spark.app.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("pipeline.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "pipeline.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("pipeline.name") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "pipeline.name-val", attrVal.Str()) case "databricks.spark.databricks.preemption.tasks_preempted.count": assert.False(t, validatedMetrics["databricks.spark.databricks.preemption.tasks_preempted.count"], "Found a duplicate in the metrics slice: databricks.spark.databricks.preemption.tasks_preempted.count") validatedMetrics["databricks.spark.databricks.preemption.tasks_preempted.count"] = true @@ -1661,16 +1631,16 @@ func TestMetricsBuilder(t *testing.T) { assert.Equal(t, int64(1), dp.IntValue()) attrVal, ok := dp.Attributes().Get("cluster.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "cluster.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("spark.app.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "spark.app.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("pipeline.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "pipeline.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("pipeline.name") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "pipeline.name-val", attrVal.Str()) case "databricks.spark.databricks.task_scheduling_lanes.active_pools": assert.False(t, validatedMetrics["databricks.spark.databricks.task_scheduling_lanes.active_pools"], "Found a duplicate in the metrics slice: databricks.spark.databricks.task_scheduling_lanes.active_pools") validatedMetrics["databricks.spark.databricks.task_scheduling_lanes.active_pools"] = true @@ -1687,16 +1657,16 @@ func TestMetricsBuilder(t *testing.T) { assert.Equal(t, int64(1), dp.IntValue()) attrVal, ok := dp.Attributes().Get("cluster.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "cluster.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("spark.app.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "spark.app.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("pipeline.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "pipeline.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("pipeline.name") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "pipeline.name-val", attrVal.Str()) case "databricks.spark.databricks.task_scheduling_lanes.bypass_lane_active_pools": assert.False(t, validatedMetrics["databricks.spark.databricks.task_scheduling_lanes.bypass_lane_active_pools"], "Found a duplicate in the metrics slice: databricks.spark.databricks.task_scheduling_lanes.bypass_lane_active_pools") validatedMetrics["databricks.spark.databricks.task_scheduling_lanes.bypass_lane_active_pools"] = true @@ -1713,16 +1683,16 @@ func TestMetricsBuilder(t *testing.T) { assert.Equal(t, int64(1), dp.IntValue()) attrVal, ok := dp.Attributes().Get("cluster.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "cluster.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("spark.app.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "spark.app.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("pipeline.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "pipeline.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("pipeline.name") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "pipeline.name-val", attrVal.Str()) case "databricks.spark.databricks.task_scheduling_lanes.fast_lane_active_pools": assert.False(t, validatedMetrics["databricks.spark.databricks.task_scheduling_lanes.fast_lane_active_pools"], "Found a duplicate in the metrics slice: databricks.spark.databricks.task_scheduling_lanes.fast_lane_active_pools") validatedMetrics["databricks.spark.databricks.task_scheduling_lanes.fast_lane_active_pools"] = true @@ -1739,16 +1709,16 @@ func TestMetricsBuilder(t *testing.T) { assert.Equal(t, int64(1), dp.IntValue()) attrVal, ok := dp.Attributes().Get("cluster.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "cluster.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("spark.app.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "spark.app.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("pipeline.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "pipeline.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("pipeline.name") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "pipeline.name-val", attrVal.Str()) case "databricks.spark.databricks.task_scheduling_lanes.finished_queries_total_task.time": assert.False(t, validatedMetrics["databricks.spark.databricks.task_scheduling_lanes.finished_queries_total_task.time"], "Found a duplicate in the metrics slice: databricks.spark.databricks.task_scheduling_lanes.finished_queries_total_task.time") validatedMetrics["databricks.spark.databricks.task_scheduling_lanes.finished_queries_total_task.time"] = true @@ -1765,16 +1735,16 @@ func TestMetricsBuilder(t *testing.T) { assert.Equal(t, int64(1), dp.IntValue()) attrVal, ok := dp.Attributes().Get("cluster.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "cluster.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("spark.app.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "spark.app.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("pipeline.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "pipeline.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("pipeline.name") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "pipeline.name-val", attrVal.Str()) case "databricks.spark.databricks.task_scheduling_lanes.lane_cleanup.marked_pools": assert.False(t, validatedMetrics["databricks.spark.databricks.task_scheduling_lanes.lane_cleanup.marked_pools"], "Found a duplicate in the metrics slice: databricks.spark.databricks.task_scheduling_lanes.lane_cleanup.marked_pools") validatedMetrics["databricks.spark.databricks.task_scheduling_lanes.lane_cleanup.marked_pools"] = true @@ -1791,16 +1761,16 @@ func TestMetricsBuilder(t *testing.T) { assert.Equal(t, int64(1), dp.IntValue()) attrVal, ok := dp.Attributes().Get("cluster.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "cluster.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("spark.app.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "spark.app.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("pipeline.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "pipeline.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("pipeline.name") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "pipeline.name-val", attrVal.Str()) case "databricks.spark.databricks.task_scheduling_lanes.lane_cleanup.two_phase_pools_cleaned": assert.False(t, validatedMetrics["databricks.spark.databricks.task_scheduling_lanes.lane_cleanup.two_phase_pools_cleaned"], "Found a duplicate in the metrics slice: databricks.spark.databricks.task_scheduling_lanes.lane_cleanup.two_phase_pools_cleaned") validatedMetrics["databricks.spark.databricks.task_scheduling_lanes.lane_cleanup.two_phase_pools_cleaned"] = true @@ -1817,16 +1787,16 @@ func TestMetricsBuilder(t *testing.T) { assert.Equal(t, int64(1), dp.IntValue()) attrVal, ok := dp.Attributes().Get("cluster.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "cluster.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("spark.app.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "spark.app.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("pipeline.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "pipeline.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("pipeline.name") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "pipeline.name-val", attrVal.Str()) case "databricks.spark.databricks.task_scheduling_lanes.lane_cleanup.zombie_pools_cleaned": assert.False(t, validatedMetrics["databricks.spark.databricks.task_scheduling_lanes.lane_cleanup.zombie_pools_cleaned"], "Found a duplicate in the metrics slice: databricks.spark.databricks.task_scheduling_lanes.lane_cleanup.zombie_pools_cleaned") validatedMetrics["databricks.spark.databricks.task_scheduling_lanes.lane_cleanup.zombie_pools_cleaned"] = true @@ -1843,16 +1813,16 @@ func TestMetricsBuilder(t *testing.T) { assert.Equal(t, int64(1), dp.IntValue()) attrVal, ok := dp.Attributes().Get("cluster.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "cluster.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("spark.app.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "spark.app.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("pipeline.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "pipeline.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("pipeline.name") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "pipeline.name-val", attrVal.Str()) case "databricks.spark.databricks.task_scheduling_lanes.preemption.slot_transfer_successful_preemption_iterations.count": assert.False(t, validatedMetrics["databricks.spark.databricks.task_scheduling_lanes.preemption.slot_transfer_successful_preemption_iterations.count"], "Found a duplicate in the metrics slice: databricks.spark.databricks.task_scheduling_lanes.preemption.slot_transfer_successful_preemption_iterations.count") validatedMetrics["databricks.spark.databricks.task_scheduling_lanes.preemption.slot_transfer_successful_preemption_iterations.count"] = true @@ -1869,16 +1839,16 @@ func TestMetricsBuilder(t *testing.T) { assert.Equal(t, int64(1), dp.IntValue()) attrVal, ok := dp.Attributes().Get("cluster.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "cluster.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("spark.app.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "spark.app.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("pipeline.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "pipeline.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("pipeline.name") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "pipeline.name-val", attrVal.Str()) case "databricks.spark.databricks.task_scheduling_lanes.preemption.slot_transfer_tasks_preempted.count": assert.False(t, validatedMetrics["databricks.spark.databricks.task_scheduling_lanes.preemption.slot_transfer_tasks_preempted.count"], "Found a duplicate in the metrics slice: databricks.spark.databricks.task_scheduling_lanes.preemption.slot_transfer_tasks_preempted.count") validatedMetrics["databricks.spark.databricks.task_scheduling_lanes.preemption.slot_transfer_tasks_preempted.count"] = true @@ -1895,16 +1865,16 @@ func TestMetricsBuilder(t *testing.T) { assert.Equal(t, int64(1), dp.IntValue()) attrVal, ok := dp.Attributes().Get("cluster.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "cluster.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("spark.app.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "spark.app.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("pipeline.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "pipeline.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("pipeline.name") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "pipeline.name-val", attrVal.Str()) case "databricks.spark.databricks.task_scheduling_lanes.preemption.slot_transfer_wasted_task.time": assert.False(t, validatedMetrics["databricks.spark.databricks.task_scheduling_lanes.preemption.slot_transfer_wasted_task.time"], "Found a duplicate in the metrics slice: databricks.spark.databricks.task_scheduling_lanes.preemption.slot_transfer_wasted_task.time") validatedMetrics["databricks.spark.databricks.task_scheduling_lanes.preemption.slot_transfer_wasted_task.time"] = true @@ -1921,16 +1891,16 @@ func TestMetricsBuilder(t *testing.T) { assert.Equal(t, int64(1), dp.IntValue()) attrVal, ok := dp.Attributes().Get("cluster.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "cluster.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("spark.app.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "spark.app.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("pipeline.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "pipeline.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("pipeline.name") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "pipeline.name-val", attrVal.Str()) case "databricks.spark.databricks.task_scheduling_lanes.slot_reservation.gradual_decrease.count": assert.False(t, validatedMetrics["databricks.spark.databricks.task_scheduling_lanes.slot_reservation.gradual_decrease.count"], "Found a duplicate in the metrics slice: databricks.spark.databricks.task_scheduling_lanes.slot_reservation.gradual_decrease.count") validatedMetrics["databricks.spark.databricks.task_scheduling_lanes.slot_reservation.gradual_decrease.count"] = true @@ -1947,16 +1917,16 @@ func TestMetricsBuilder(t *testing.T) { assert.Equal(t, int64(1), dp.IntValue()) attrVal, ok := dp.Attributes().Get("cluster.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "cluster.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("spark.app.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "spark.app.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("pipeline.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "pipeline.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("pipeline.name") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "pipeline.name-val", attrVal.Str()) case "databricks.spark.databricks.task_scheduling_lanes.slot_reservation.quick_drop.count": assert.False(t, validatedMetrics["databricks.spark.databricks.task_scheduling_lanes.slot_reservation.quick_drop.count"], "Found a duplicate in the metrics slice: databricks.spark.databricks.task_scheduling_lanes.slot_reservation.quick_drop.count") validatedMetrics["databricks.spark.databricks.task_scheduling_lanes.slot_reservation.quick_drop.count"] = true @@ -1973,16 +1943,16 @@ func TestMetricsBuilder(t *testing.T) { assert.Equal(t, int64(1), dp.IntValue()) attrVal, ok := dp.Attributes().Get("cluster.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "cluster.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("spark.app.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "spark.app.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("pipeline.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "pipeline.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("pipeline.name") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "pipeline.name-val", attrVal.Str()) case "databricks.spark.databricks.task_scheduling_lanes.slot_reservation.quick_jump.count": assert.False(t, validatedMetrics["databricks.spark.databricks.task_scheduling_lanes.slot_reservation.quick_jump.count"], "Found a duplicate in the metrics slice: databricks.spark.databricks.task_scheduling_lanes.slot_reservation.quick_jump.count") validatedMetrics["databricks.spark.databricks.task_scheduling_lanes.slot_reservation.quick_jump.count"] = true @@ -1999,16 +1969,16 @@ func TestMetricsBuilder(t *testing.T) { assert.Equal(t, int64(1), dp.IntValue()) attrVal, ok := dp.Attributes().Get("cluster.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "cluster.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("spark.app.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "spark.app.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("pipeline.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "pipeline.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("pipeline.name") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "pipeline.name-val", attrVal.Str()) case "databricks.spark.databricks.task_scheduling_lanes.slot_reservation.slots_reserved": assert.False(t, validatedMetrics["databricks.spark.databricks.task_scheduling_lanes.slot_reservation.slots_reserved"], "Found a duplicate in the metrics slice: databricks.spark.databricks.task_scheduling_lanes.slot_reservation.slots_reserved") validatedMetrics["databricks.spark.databricks.task_scheduling_lanes.slot_reservation.slots_reserved"] = true @@ -2025,16 +1995,16 @@ func TestMetricsBuilder(t *testing.T) { assert.Equal(t, int64(1), dp.IntValue()) attrVal, ok := dp.Attributes().Get("cluster.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "cluster.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("spark.app.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "spark.app.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("pipeline.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "pipeline.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("pipeline.name") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "pipeline.name-val", attrVal.Str()) case "databricks.spark.databricks.task_scheduling_lanes.slow_lane_active_pools": assert.False(t, validatedMetrics["databricks.spark.databricks.task_scheduling_lanes.slow_lane_active_pools"], "Found a duplicate in the metrics slice: databricks.spark.databricks.task_scheduling_lanes.slow_lane_active_pools") validatedMetrics["databricks.spark.databricks.task_scheduling_lanes.slow_lane_active_pools"] = true @@ -2051,16 +2021,16 @@ func TestMetricsBuilder(t *testing.T) { assert.Equal(t, int64(1), dp.IntValue()) attrVal, ok := dp.Attributes().Get("cluster.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "cluster.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("spark.app.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "spark.app.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("pipeline.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "pipeline.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("pipeline.name") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "pipeline.name-val", attrVal.Str()) case "databricks.spark.databricks.task_scheduling_lanes.totalquerygroupsfinished": assert.False(t, validatedMetrics["databricks.spark.databricks.task_scheduling_lanes.totalquerygroupsfinished"], "Found a duplicate in the metrics slice: databricks.spark.databricks.task_scheduling_lanes.totalquerygroupsfinished") validatedMetrics["databricks.spark.databricks.task_scheduling_lanes.totalquerygroupsfinished"] = true @@ -2077,16 +2047,16 @@ func TestMetricsBuilder(t *testing.T) { assert.Equal(t, int64(1), dp.IntValue()) attrVal, ok := dp.Attributes().Get("cluster.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "cluster.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("spark.app.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "spark.app.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("pipeline.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "pipeline.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("pipeline.name") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "pipeline.name-val", attrVal.Str()) case "databricks.spark.executor.disk_used": assert.False(t, validatedMetrics["databricks.spark.executor.disk_used"], "Found a duplicate in the metrics slice: databricks.spark.executor.disk_used") validatedMetrics["databricks.spark.executor.disk_used"] = true @@ -2101,13 +2071,13 @@ func TestMetricsBuilder(t *testing.T) { assert.Equal(t, int64(1), dp.IntValue()) attrVal, ok := dp.Attributes().Get("cluster.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "cluster.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("spark.app.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "spark.app.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("spark.executor.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "spark.executor.id-val", attrVal.Str()) case "databricks.spark.executor.max_memory": assert.False(t, validatedMetrics["databricks.spark.executor.max_memory"], "Found a duplicate in the metrics slice: databricks.spark.executor.max_memory") validatedMetrics["databricks.spark.executor.max_memory"] = true @@ -2122,13 +2092,13 @@ func TestMetricsBuilder(t *testing.T) { assert.Equal(t, int64(1), dp.IntValue()) attrVal, ok := dp.Attributes().Get("cluster.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "cluster.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("spark.app.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "spark.app.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("spark.executor.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "spark.executor.id-val", attrVal.Str()) case "databricks.spark.executor.memory_used": assert.False(t, validatedMetrics["databricks.spark.executor.memory_used"], "Found a duplicate in the metrics slice: databricks.spark.executor.memory_used") validatedMetrics["databricks.spark.executor.memory_used"] = true @@ -2143,13 +2113,13 @@ func TestMetricsBuilder(t *testing.T) { assert.Equal(t, int64(1), dp.IntValue()) attrVal, ok := dp.Attributes().Get("cluster.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "cluster.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("spark.app.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "spark.app.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("spark.executor.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "spark.executor.id-val", attrVal.Str()) case "databricks.spark.executor.total_input_bytes": assert.False(t, validatedMetrics["databricks.spark.executor.total_input_bytes"], "Found a duplicate in the metrics slice: databricks.spark.executor.total_input_bytes") validatedMetrics["databricks.spark.executor.total_input_bytes"] = true @@ -2166,13 +2136,13 @@ func TestMetricsBuilder(t *testing.T) { assert.Equal(t, int64(1), dp.IntValue()) attrVal, ok := dp.Attributes().Get("cluster.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "cluster.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("spark.app.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "spark.app.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("spark.executor.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "spark.executor.id-val", attrVal.Str()) case "databricks.spark.executor.total_shuffle_read": assert.False(t, validatedMetrics["databricks.spark.executor.total_shuffle_read"], "Found a duplicate in the metrics slice: databricks.spark.executor.total_shuffle_read") validatedMetrics["databricks.spark.executor.total_shuffle_read"] = true @@ -2189,13 +2159,13 @@ func TestMetricsBuilder(t *testing.T) { assert.Equal(t, int64(1), dp.IntValue()) attrVal, ok := dp.Attributes().Get("cluster.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "cluster.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("spark.app.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "spark.app.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("spark.executor.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "spark.executor.id-val", attrVal.Str()) case "databricks.spark.executor.total_shuffle_write": assert.False(t, validatedMetrics["databricks.spark.executor.total_shuffle_write"], "Found a duplicate in the metrics slice: databricks.spark.executor.total_shuffle_write") validatedMetrics["databricks.spark.executor.total_shuffle_write"] = true @@ -2212,13 +2182,13 @@ func TestMetricsBuilder(t *testing.T) { assert.Equal(t, int64(1), dp.IntValue()) attrVal, ok := dp.Attributes().Get("cluster.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "cluster.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("spark.app.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "spark.app.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("spark.executor.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "spark.executor.id-val", attrVal.Str()) case "databricks.spark.executor_metrics.direct_pool.memory": assert.False(t, validatedMetrics["databricks.spark.executor_metrics.direct_pool.memory"], "Found a duplicate in the metrics slice: databricks.spark.executor_metrics.direct_pool.memory") validatedMetrics["databricks.spark.executor_metrics.direct_pool.memory"] = true @@ -2233,16 +2203,16 @@ func TestMetricsBuilder(t *testing.T) { assert.Equal(t, float64(1), dp.DoubleValue()) attrVal, ok := dp.Attributes().Get("cluster.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "cluster.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("spark.app.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "spark.app.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("pipeline.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "pipeline.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("pipeline.name") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "pipeline.name-val", attrVal.Str()) case "databricks.spark.executor_metrics.jvm.heap.memory": assert.False(t, validatedMetrics["databricks.spark.executor_metrics.jvm.heap.memory"], "Found a duplicate in the metrics slice: databricks.spark.executor_metrics.jvm.heap.memory") validatedMetrics["databricks.spark.executor_metrics.jvm.heap.memory"] = true @@ -2257,16 +2227,16 @@ func TestMetricsBuilder(t *testing.T) { assert.Equal(t, float64(1), dp.DoubleValue()) attrVal, ok := dp.Attributes().Get("cluster.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "cluster.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("spark.app.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "spark.app.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("pipeline.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "pipeline.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("pipeline.name") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "pipeline.name-val", attrVal.Str()) case "databricks.spark.executor_metrics.jvm.off_heap.memory": assert.False(t, validatedMetrics["databricks.spark.executor_metrics.jvm.off_heap.memory"], "Found a duplicate in the metrics slice: databricks.spark.executor_metrics.jvm.off_heap.memory") validatedMetrics["databricks.spark.executor_metrics.jvm.off_heap.memory"] = true @@ -2281,16 +2251,16 @@ func TestMetricsBuilder(t *testing.T) { assert.Equal(t, float64(1), dp.DoubleValue()) attrVal, ok := dp.Attributes().Get("cluster.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "cluster.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("spark.app.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "spark.app.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("pipeline.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "pipeline.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("pipeline.name") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "pipeline.name-val", attrVal.Str()) case "databricks.spark.executor_metrics.major_gc.count": assert.False(t, validatedMetrics["databricks.spark.executor_metrics.major_gc.count"], "Found a duplicate in the metrics slice: databricks.spark.executor_metrics.major_gc.count") validatedMetrics["databricks.spark.executor_metrics.major_gc.count"] = true @@ -2305,16 +2275,16 @@ func TestMetricsBuilder(t *testing.T) { assert.Equal(t, float64(1), dp.DoubleValue()) attrVal, ok := dp.Attributes().Get("cluster.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "cluster.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("spark.app.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "spark.app.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("pipeline.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "pipeline.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("pipeline.name") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "pipeline.name-val", attrVal.Str()) case "databricks.spark.executor_metrics.major_gc.time": assert.False(t, validatedMetrics["databricks.spark.executor_metrics.major_gc.time"], "Found a duplicate in the metrics slice: databricks.spark.executor_metrics.major_gc.time") validatedMetrics["databricks.spark.executor_metrics.major_gc.time"] = true @@ -2329,16 +2299,16 @@ func TestMetricsBuilder(t *testing.T) { assert.Equal(t, float64(1), dp.DoubleValue()) attrVal, ok := dp.Attributes().Get("cluster.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "cluster.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("spark.app.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "spark.app.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("pipeline.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "pipeline.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("pipeline.name") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "pipeline.name-val", attrVal.Str()) case "databricks.spark.executor_metrics.mapped_pool.memory": assert.False(t, validatedMetrics["databricks.spark.executor_metrics.mapped_pool.memory"], "Found a duplicate in the metrics slice: databricks.spark.executor_metrics.mapped_pool.memory") validatedMetrics["databricks.spark.executor_metrics.mapped_pool.memory"] = true @@ -2353,16 +2323,16 @@ func TestMetricsBuilder(t *testing.T) { assert.Equal(t, float64(1), dp.DoubleValue()) attrVal, ok := dp.Attributes().Get("cluster.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "cluster.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("spark.app.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "spark.app.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("pipeline.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "pipeline.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("pipeline.name") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "pipeline.name-val", attrVal.Str()) case "databricks.spark.executor_metrics.minor_gc.count": assert.False(t, validatedMetrics["databricks.spark.executor_metrics.minor_gc.count"], "Found a duplicate in the metrics slice: databricks.spark.executor_metrics.minor_gc.count") validatedMetrics["databricks.spark.executor_metrics.minor_gc.count"] = true @@ -2377,16 +2347,16 @@ func TestMetricsBuilder(t *testing.T) { assert.Equal(t, float64(1), dp.DoubleValue()) attrVal, ok := dp.Attributes().Get("cluster.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "cluster.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("spark.app.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "spark.app.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("pipeline.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "pipeline.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("pipeline.name") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "pipeline.name-val", attrVal.Str()) case "databricks.spark.executor_metrics.minor_gc.time": assert.False(t, validatedMetrics["databricks.spark.executor_metrics.minor_gc.time"], "Found a duplicate in the metrics slice: databricks.spark.executor_metrics.minor_gc.time") validatedMetrics["databricks.spark.executor_metrics.minor_gc.time"] = true @@ -2401,16 +2371,16 @@ func TestMetricsBuilder(t *testing.T) { assert.Equal(t, float64(1), dp.DoubleValue()) attrVal, ok := dp.Attributes().Get("cluster.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "cluster.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("spark.app.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "spark.app.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("pipeline.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "pipeline.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("pipeline.name") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "pipeline.name-val", attrVal.Str()) case "databricks.spark.executor_metrics.off_heap.execution.memory": assert.False(t, validatedMetrics["databricks.spark.executor_metrics.off_heap.execution.memory"], "Found a duplicate in the metrics slice: databricks.spark.executor_metrics.off_heap.execution.memory") validatedMetrics["databricks.spark.executor_metrics.off_heap.execution.memory"] = true @@ -2425,16 +2395,16 @@ func TestMetricsBuilder(t *testing.T) { assert.Equal(t, float64(1), dp.DoubleValue()) attrVal, ok := dp.Attributes().Get("cluster.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "cluster.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("spark.app.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "spark.app.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("pipeline.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "pipeline.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("pipeline.name") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "pipeline.name-val", attrVal.Str()) case "databricks.spark.executor_metrics.off_heap.storage.memory": assert.False(t, validatedMetrics["databricks.spark.executor_metrics.off_heap.storage.memory"], "Found a duplicate in the metrics slice: databricks.spark.executor_metrics.off_heap.storage.memory") validatedMetrics["databricks.spark.executor_metrics.off_heap.storage.memory"] = true @@ -2449,16 +2419,16 @@ func TestMetricsBuilder(t *testing.T) { assert.Equal(t, float64(1), dp.DoubleValue()) attrVal, ok := dp.Attributes().Get("cluster.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "cluster.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("spark.app.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "spark.app.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("pipeline.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "pipeline.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("pipeline.name") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "pipeline.name-val", attrVal.Str()) case "databricks.spark.executor_metrics.off_heap.unified.memory": assert.False(t, validatedMetrics["databricks.spark.executor_metrics.off_heap.unified.memory"], "Found a duplicate in the metrics slice: databricks.spark.executor_metrics.off_heap.unified.memory") validatedMetrics["databricks.spark.executor_metrics.off_heap.unified.memory"] = true @@ -2473,16 +2443,16 @@ func TestMetricsBuilder(t *testing.T) { assert.Equal(t, float64(1), dp.DoubleValue()) attrVal, ok := dp.Attributes().Get("cluster.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "cluster.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("spark.app.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "spark.app.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("pipeline.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "pipeline.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("pipeline.name") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "pipeline.name-val", attrVal.Str()) case "databricks.spark.executor_metrics.on_heap.execution.memory": assert.False(t, validatedMetrics["databricks.spark.executor_metrics.on_heap.execution.memory"], "Found a duplicate in the metrics slice: databricks.spark.executor_metrics.on_heap.execution.memory") validatedMetrics["databricks.spark.executor_metrics.on_heap.execution.memory"] = true @@ -2497,16 +2467,16 @@ func TestMetricsBuilder(t *testing.T) { assert.Equal(t, float64(1), dp.DoubleValue()) attrVal, ok := dp.Attributes().Get("cluster.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "cluster.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("spark.app.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "spark.app.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("pipeline.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "pipeline.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("pipeline.name") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "pipeline.name-val", attrVal.Str()) case "databricks.spark.executor_metrics.on_heap.storage.memory": assert.False(t, validatedMetrics["databricks.spark.executor_metrics.on_heap.storage.memory"], "Found a duplicate in the metrics slice: databricks.spark.executor_metrics.on_heap.storage.memory") validatedMetrics["databricks.spark.executor_metrics.on_heap.storage.memory"] = true @@ -2521,16 +2491,16 @@ func TestMetricsBuilder(t *testing.T) { assert.Equal(t, float64(1), dp.DoubleValue()) attrVal, ok := dp.Attributes().Get("cluster.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "cluster.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("spark.app.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "spark.app.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("pipeline.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "pipeline.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("pipeline.name") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "pipeline.name-val", attrVal.Str()) case "databricks.spark.executor_metrics.on_heap.unified.memory": assert.False(t, validatedMetrics["databricks.spark.executor_metrics.on_heap.unified.memory"], "Found a duplicate in the metrics slice: databricks.spark.executor_metrics.on_heap.unified.memory") validatedMetrics["databricks.spark.executor_metrics.on_heap.unified.memory"] = true @@ -2545,16 +2515,16 @@ func TestMetricsBuilder(t *testing.T) { assert.Equal(t, float64(1), dp.DoubleValue()) attrVal, ok := dp.Attributes().Get("cluster.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "cluster.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("spark.app.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "spark.app.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("pipeline.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "pipeline.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("pipeline.name") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "pipeline.name-val", attrVal.Str()) case "databricks.spark.executor_metrics.process_tree.jvm_rss.memory": assert.False(t, validatedMetrics["databricks.spark.executor_metrics.process_tree.jvm_rss.memory"], "Found a duplicate in the metrics slice: databricks.spark.executor_metrics.process_tree.jvm_rss.memory") validatedMetrics["databricks.spark.executor_metrics.process_tree.jvm_rss.memory"] = true @@ -2569,16 +2539,16 @@ func TestMetricsBuilder(t *testing.T) { assert.Equal(t, float64(1), dp.DoubleValue()) attrVal, ok := dp.Attributes().Get("cluster.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "cluster.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("spark.app.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "spark.app.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("pipeline.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "pipeline.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("pipeline.name") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "pipeline.name-val", attrVal.Str()) case "databricks.spark.executor_metrics.process_tree.jvm_v.memory": assert.False(t, validatedMetrics["databricks.spark.executor_metrics.process_tree.jvm_v.memory"], "Found a duplicate in the metrics slice: databricks.spark.executor_metrics.process_tree.jvm_v.memory") validatedMetrics["databricks.spark.executor_metrics.process_tree.jvm_v.memory"] = true @@ -2593,16 +2563,16 @@ func TestMetricsBuilder(t *testing.T) { assert.Equal(t, float64(1), dp.DoubleValue()) attrVal, ok := dp.Attributes().Get("cluster.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "cluster.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("spark.app.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "spark.app.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("pipeline.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "pipeline.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("pipeline.name") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "pipeline.name-val", attrVal.Str()) case "databricks.spark.executor_metrics.process_tree.other_rss.memory": assert.False(t, validatedMetrics["databricks.spark.executor_metrics.process_tree.other_rss.memory"], "Found a duplicate in the metrics slice: databricks.spark.executor_metrics.process_tree.other_rss.memory") validatedMetrics["databricks.spark.executor_metrics.process_tree.other_rss.memory"] = true @@ -2617,16 +2587,16 @@ func TestMetricsBuilder(t *testing.T) { assert.Equal(t, float64(1), dp.DoubleValue()) attrVal, ok := dp.Attributes().Get("cluster.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "cluster.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("spark.app.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "spark.app.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("pipeline.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "pipeline.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("pipeline.name") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "pipeline.name-val", attrVal.Str()) case "databricks.spark.executor_metrics.process_tree.other_v.memory": assert.False(t, validatedMetrics["databricks.spark.executor_metrics.process_tree.other_v.memory"], "Found a duplicate in the metrics slice: databricks.spark.executor_metrics.process_tree.other_v.memory") validatedMetrics["databricks.spark.executor_metrics.process_tree.other_v.memory"] = true @@ -2641,16 +2611,16 @@ func TestMetricsBuilder(t *testing.T) { assert.Equal(t, float64(1), dp.DoubleValue()) attrVal, ok := dp.Attributes().Get("cluster.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "cluster.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("spark.app.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "spark.app.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("pipeline.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "pipeline.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("pipeline.name") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "pipeline.name-val", attrVal.Str()) case "databricks.spark.executor_metrics.process_tree.python_rss.memory": assert.False(t, validatedMetrics["databricks.spark.executor_metrics.process_tree.python_rss.memory"], "Found a duplicate in the metrics slice: databricks.spark.executor_metrics.process_tree.python_rss.memory") validatedMetrics["databricks.spark.executor_metrics.process_tree.python_rss.memory"] = true @@ -2665,16 +2635,16 @@ func TestMetricsBuilder(t *testing.T) { assert.Equal(t, float64(1), dp.DoubleValue()) attrVal, ok := dp.Attributes().Get("cluster.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "cluster.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("spark.app.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "spark.app.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("pipeline.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "pipeline.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("pipeline.name") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "pipeline.name-val", attrVal.Str()) case "databricks.spark.executor_metrics.process_tree.python_v.memory": assert.False(t, validatedMetrics["databricks.spark.executor_metrics.process_tree.python_v.memory"], "Found a duplicate in the metrics slice: databricks.spark.executor_metrics.process_tree.python_v.memory") validatedMetrics["databricks.spark.executor_metrics.process_tree.python_v.memory"] = true @@ -2689,16 +2659,16 @@ func TestMetricsBuilder(t *testing.T) { assert.Equal(t, float64(1), dp.DoubleValue()) attrVal, ok := dp.Attributes().Get("cluster.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "cluster.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("spark.app.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "spark.app.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("pipeline.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "pipeline.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("pipeline.name") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "pipeline.name-val", attrVal.Str()) case "databricks.spark.hive_external_catalog.file_cache.hits": assert.False(t, validatedMetrics["databricks.spark.hive_external_catalog.file_cache.hits"], "Found a duplicate in the metrics slice: databricks.spark.hive_external_catalog.file_cache.hits") validatedMetrics["databricks.spark.hive_external_catalog.file_cache.hits"] = true @@ -2715,16 +2685,16 @@ func TestMetricsBuilder(t *testing.T) { assert.Equal(t, int64(1), dp.IntValue()) attrVal, ok := dp.Attributes().Get("cluster.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "cluster.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("spark.app.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "spark.app.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("pipeline.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "pipeline.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("pipeline.name") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "pipeline.name-val", attrVal.Str()) case "databricks.spark.hive_external_catalog.files_discovered": assert.False(t, validatedMetrics["databricks.spark.hive_external_catalog.files_discovered"], "Found a duplicate in the metrics slice: databricks.spark.hive_external_catalog.files_discovered") validatedMetrics["databricks.spark.hive_external_catalog.files_discovered"] = true @@ -2741,16 +2711,16 @@ func TestMetricsBuilder(t *testing.T) { assert.Equal(t, int64(1), dp.IntValue()) attrVal, ok := dp.Attributes().Get("cluster.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "cluster.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("spark.app.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "spark.app.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("pipeline.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "pipeline.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("pipeline.name") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "pipeline.name-val", attrVal.Str()) case "databricks.spark.hive_external_catalog.hive_client_calls": assert.False(t, validatedMetrics["databricks.spark.hive_external_catalog.hive_client_calls"], "Found a duplicate in the metrics slice: databricks.spark.hive_external_catalog.hive_client_calls") validatedMetrics["databricks.spark.hive_external_catalog.hive_client_calls"] = true @@ -2767,16 +2737,16 @@ func TestMetricsBuilder(t *testing.T) { assert.Equal(t, int64(1), dp.IntValue()) attrVal, ok := dp.Attributes().Get("cluster.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "cluster.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("spark.app.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "spark.app.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("pipeline.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "pipeline.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("pipeline.name") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "pipeline.name-val", attrVal.Str()) case "databricks.spark.hive_external_catalog.parallel_listing_jobs.count": assert.False(t, validatedMetrics["databricks.spark.hive_external_catalog.parallel_listing_jobs.count"], "Found a duplicate in the metrics slice: databricks.spark.hive_external_catalog.parallel_listing_jobs.count") validatedMetrics["databricks.spark.hive_external_catalog.parallel_listing_jobs.count"] = true @@ -2793,16 +2763,16 @@ func TestMetricsBuilder(t *testing.T) { assert.Equal(t, int64(1), dp.IntValue()) attrVal, ok := dp.Attributes().Get("cluster.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "cluster.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("spark.app.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "spark.app.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("pipeline.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "pipeline.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("pipeline.name") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "pipeline.name-val", attrVal.Str()) case "databricks.spark.hive_external_catalog.partitions_fetched": assert.False(t, validatedMetrics["databricks.spark.hive_external_catalog.partitions_fetched"], "Found a duplicate in the metrics slice: databricks.spark.hive_external_catalog.partitions_fetched") validatedMetrics["databricks.spark.hive_external_catalog.partitions_fetched"] = true @@ -2819,16 +2789,16 @@ func TestMetricsBuilder(t *testing.T) { assert.Equal(t, int64(1), dp.IntValue()) attrVal, ok := dp.Attributes().Get("cluster.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "cluster.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("spark.app.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "spark.app.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("pipeline.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "pipeline.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("pipeline.name") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "pipeline.name-val", attrVal.Str()) case "databricks.spark.job.num_active_stages": assert.False(t, validatedMetrics["databricks.spark.job.num_active_stages"], "Found a duplicate in the metrics slice: databricks.spark.job.num_active_stages") validatedMetrics["databricks.spark.job.num_active_stages"] = true @@ -2843,13 +2813,13 @@ func TestMetricsBuilder(t *testing.T) { assert.Equal(t, int64(1), dp.IntValue()) attrVal, ok := dp.Attributes().Get("cluster.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "cluster.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("spark.app.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "spark.app.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("spark.job.id") assert.True(t, ok) - assert.EqualValues(t, 1, attrVal.Int()) + assert.EqualValues(t, 12, attrVal.Int()) case "databricks.spark.job.num_active_tasks": assert.False(t, validatedMetrics["databricks.spark.job.num_active_tasks"], "Found a duplicate in the metrics slice: databricks.spark.job.num_active_tasks") validatedMetrics["databricks.spark.job.num_active_tasks"] = true @@ -2864,13 +2834,13 @@ func TestMetricsBuilder(t *testing.T) { assert.Equal(t, int64(1), dp.IntValue()) attrVal, ok := dp.Attributes().Get("cluster.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "cluster.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("spark.app.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "spark.app.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("spark.job.id") assert.True(t, ok) - assert.EqualValues(t, 1, attrVal.Int()) + assert.EqualValues(t, 12, attrVal.Int()) case "databricks.spark.job.num_completed_stages": assert.False(t, validatedMetrics["databricks.spark.job.num_completed_stages"], "Found a duplicate in the metrics slice: databricks.spark.job.num_completed_stages") validatedMetrics["databricks.spark.job.num_completed_stages"] = true @@ -2885,13 +2855,13 @@ func TestMetricsBuilder(t *testing.T) { assert.Equal(t, int64(1), dp.IntValue()) attrVal, ok := dp.Attributes().Get("cluster.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "cluster.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("spark.app.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "spark.app.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("spark.job.id") assert.True(t, ok) - assert.EqualValues(t, 1, attrVal.Int()) + assert.EqualValues(t, 12, attrVal.Int()) case "databricks.spark.job.num_completed_tasks": assert.False(t, validatedMetrics["databricks.spark.job.num_completed_tasks"], "Found a duplicate in the metrics slice: databricks.spark.job.num_completed_tasks") validatedMetrics["databricks.spark.job.num_completed_tasks"] = true @@ -2906,13 +2876,13 @@ func TestMetricsBuilder(t *testing.T) { assert.Equal(t, int64(1), dp.IntValue()) attrVal, ok := dp.Attributes().Get("cluster.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "cluster.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("spark.app.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "spark.app.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("spark.job.id") assert.True(t, ok) - assert.EqualValues(t, 1, attrVal.Int()) + assert.EqualValues(t, 12, attrVal.Int()) case "databricks.spark.job.num_failed_stages": assert.False(t, validatedMetrics["databricks.spark.job.num_failed_stages"], "Found a duplicate in the metrics slice: databricks.spark.job.num_failed_stages") validatedMetrics["databricks.spark.job.num_failed_stages"] = true @@ -2927,13 +2897,13 @@ func TestMetricsBuilder(t *testing.T) { assert.Equal(t, int64(1), dp.IntValue()) attrVal, ok := dp.Attributes().Get("cluster.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "cluster.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("spark.app.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "spark.app.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("spark.job.id") assert.True(t, ok) - assert.EqualValues(t, 1, attrVal.Int()) + assert.EqualValues(t, 12, attrVal.Int()) case "databricks.spark.job.num_failed_tasks": assert.False(t, validatedMetrics["databricks.spark.job.num_failed_tasks"], "Found a duplicate in the metrics slice: databricks.spark.job.num_failed_tasks") validatedMetrics["databricks.spark.job.num_failed_tasks"] = true @@ -2948,13 +2918,13 @@ func TestMetricsBuilder(t *testing.T) { assert.Equal(t, int64(1), dp.IntValue()) attrVal, ok := dp.Attributes().Get("cluster.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "cluster.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("spark.app.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "spark.app.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("spark.job.id") assert.True(t, ok) - assert.EqualValues(t, 1, attrVal.Int()) + assert.EqualValues(t, 12, attrVal.Int()) case "databricks.spark.job.num_skipped_stages": assert.False(t, validatedMetrics["databricks.spark.job.num_skipped_stages"], "Found a duplicate in the metrics slice: databricks.spark.job.num_skipped_stages") validatedMetrics["databricks.spark.job.num_skipped_stages"] = true @@ -2969,13 +2939,13 @@ func TestMetricsBuilder(t *testing.T) { assert.Equal(t, int64(1), dp.IntValue()) attrVal, ok := dp.Attributes().Get("cluster.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "cluster.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("spark.app.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "spark.app.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("spark.job.id") assert.True(t, ok) - assert.EqualValues(t, 1, attrVal.Int()) + assert.EqualValues(t, 12, attrVal.Int()) case "databricks.spark.job.num_skipped_tasks": assert.False(t, validatedMetrics["databricks.spark.job.num_skipped_tasks"], "Found a duplicate in the metrics slice: databricks.spark.job.num_skipped_tasks") validatedMetrics["databricks.spark.job.num_skipped_tasks"] = true @@ -2990,13 +2960,13 @@ func TestMetricsBuilder(t *testing.T) { assert.Equal(t, int64(1), dp.IntValue()) attrVal, ok := dp.Attributes().Get("cluster.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "cluster.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("spark.app.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "spark.app.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("spark.job.id") assert.True(t, ok) - assert.EqualValues(t, 1, attrVal.Int()) + assert.EqualValues(t, 12, attrVal.Int()) case "databricks.spark.job.num_tasks": assert.False(t, validatedMetrics["databricks.spark.job.num_tasks"], "Found a duplicate in the metrics slice: databricks.spark.job.num_tasks") validatedMetrics["databricks.spark.job.num_tasks"] = true @@ -3011,13 +2981,13 @@ func TestMetricsBuilder(t *testing.T) { assert.Equal(t, int64(1), dp.IntValue()) attrVal, ok := dp.Attributes().Get("cluster.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "cluster.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("spark.app.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "spark.app.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("spark.job.id") assert.True(t, ok) - assert.EqualValues(t, 1, attrVal.Int()) + assert.EqualValues(t, 12, attrVal.Int()) case "databricks.spark.jvm.cpu.time": assert.False(t, validatedMetrics["databricks.spark.jvm.cpu.time"], "Found a duplicate in the metrics slice: databricks.spark.jvm.cpu.time") validatedMetrics["databricks.spark.jvm.cpu.time"] = true @@ -3034,16 +3004,16 @@ func TestMetricsBuilder(t *testing.T) { assert.Equal(t, float64(1), dp.DoubleValue()) attrVal, ok := dp.Attributes().Get("cluster.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "cluster.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("spark.app.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "spark.app.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("pipeline.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "pipeline.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("pipeline.name") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "pipeline.name-val", attrVal.Str()) case "databricks.spark.live_listener_bus.events_posted.count": assert.False(t, validatedMetrics["databricks.spark.live_listener_bus.events_posted.count"], "Found a duplicate in the metrics slice: databricks.spark.live_listener_bus.events_posted.count") validatedMetrics["databricks.spark.live_listener_bus.events_posted.count"] = true @@ -3060,16 +3030,16 @@ func TestMetricsBuilder(t *testing.T) { assert.Equal(t, int64(1), dp.IntValue()) attrVal, ok := dp.Attributes().Get("cluster.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "cluster.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("spark.app.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "spark.app.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("pipeline.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "pipeline.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("pipeline.name") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "pipeline.name-val", attrVal.Str()) case "databricks.spark.live_listener_bus.queue.app_status.dropped_events.count": assert.False(t, validatedMetrics["databricks.spark.live_listener_bus.queue.app_status.dropped_events.count"], "Found a duplicate in the metrics slice: databricks.spark.live_listener_bus.queue.app_status.dropped_events.count") validatedMetrics["databricks.spark.live_listener_bus.queue.app_status.dropped_events.count"] = true @@ -3086,16 +3056,16 @@ func TestMetricsBuilder(t *testing.T) { assert.Equal(t, int64(1), dp.IntValue()) attrVal, ok := dp.Attributes().Get("cluster.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "cluster.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("spark.app.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "spark.app.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("pipeline.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "pipeline.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("pipeline.name") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "pipeline.name-val", attrVal.Str()) case "databricks.spark.live_listener_bus.queue.appstatus.size": assert.False(t, validatedMetrics["databricks.spark.live_listener_bus.queue.appstatus.size"], "Found a duplicate in the metrics slice: databricks.spark.live_listener_bus.queue.appstatus.size") validatedMetrics["databricks.spark.live_listener_bus.queue.appstatus.size"] = true @@ -3110,16 +3080,16 @@ func TestMetricsBuilder(t *testing.T) { assert.Equal(t, float64(1), dp.DoubleValue()) attrVal, ok := dp.Attributes().Get("cluster.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "cluster.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("spark.app.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "spark.app.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("pipeline.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "pipeline.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("pipeline.name") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "pipeline.name-val", attrVal.Str()) case "databricks.spark.live_listener_bus.queue.executor_management.dropped_events.count": assert.False(t, validatedMetrics["databricks.spark.live_listener_bus.queue.executor_management.dropped_events.count"], "Found a duplicate in the metrics slice: databricks.spark.live_listener_bus.queue.executor_management.dropped_events.count") validatedMetrics["databricks.spark.live_listener_bus.queue.executor_management.dropped_events.count"] = true @@ -3136,16 +3106,16 @@ func TestMetricsBuilder(t *testing.T) { assert.Equal(t, int64(1), dp.IntValue()) attrVal, ok := dp.Attributes().Get("cluster.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "cluster.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("spark.app.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "spark.app.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("pipeline.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "pipeline.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("pipeline.name") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "pipeline.name-val", attrVal.Str()) case "databricks.spark.live_listener_bus.queue.executormanagement.size": assert.False(t, validatedMetrics["databricks.spark.live_listener_bus.queue.executormanagement.size"], "Found a duplicate in the metrics slice: databricks.spark.live_listener_bus.queue.executormanagement.size") validatedMetrics["databricks.spark.live_listener_bus.queue.executormanagement.size"] = true @@ -3160,16 +3130,16 @@ func TestMetricsBuilder(t *testing.T) { assert.Equal(t, float64(1), dp.DoubleValue()) attrVal, ok := dp.Attributes().Get("cluster.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "cluster.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("spark.app.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "spark.app.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("pipeline.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "pipeline.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("pipeline.name") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "pipeline.name-val", attrVal.Str()) case "databricks.spark.live_listener_bus.queue.shared.dropped_events.count": assert.False(t, validatedMetrics["databricks.spark.live_listener_bus.queue.shared.dropped_events.count"], "Found a duplicate in the metrics slice: databricks.spark.live_listener_bus.queue.shared.dropped_events.count") validatedMetrics["databricks.spark.live_listener_bus.queue.shared.dropped_events.count"] = true @@ -3186,16 +3156,16 @@ func TestMetricsBuilder(t *testing.T) { assert.Equal(t, int64(1), dp.IntValue()) attrVal, ok := dp.Attributes().Get("cluster.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "cluster.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("spark.app.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "spark.app.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("pipeline.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "pipeline.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("pipeline.name") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "pipeline.name-val", attrVal.Str()) case "databricks.spark.live_listener_bus.queue.shared.size": assert.False(t, validatedMetrics["databricks.spark.live_listener_bus.queue.shared.size"], "Found a duplicate in the metrics slice: databricks.spark.live_listener_bus.queue.shared.size") validatedMetrics["databricks.spark.live_listener_bus.queue.shared.size"] = true @@ -3210,16 +3180,16 @@ func TestMetricsBuilder(t *testing.T) { assert.Equal(t, float64(1), dp.DoubleValue()) attrVal, ok := dp.Attributes().Get("cluster.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "cluster.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("spark.app.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "spark.app.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("pipeline.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "pipeline.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("pipeline.name") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "pipeline.name-val", attrVal.Str()) case "databricks.spark.live_listener_bus.queue.streams.dropped_events.count": assert.False(t, validatedMetrics["databricks.spark.live_listener_bus.queue.streams.dropped_events.count"], "Found a duplicate in the metrics slice: databricks.spark.live_listener_bus.queue.streams.dropped_events.count") validatedMetrics["databricks.spark.live_listener_bus.queue.streams.dropped_events.count"] = true @@ -3236,16 +3206,16 @@ func TestMetricsBuilder(t *testing.T) { assert.Equal(t, int64(1), dp.IntValue()) attrVal, ok := dp.Attributes().Get("cluster.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "cluster.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("spark.app.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "spark.app.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("pipeline.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "pipeline.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("pipeline.name") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "pipeline.name-val", attrVal.Str()) case "databricks.spark.live_listener_bus.queue.streams.size": assert.False(t, validatedMetrics["databricks.spark.live_listener_bus.queue.streams.size"], "Found a duplicate in the metrics slice: databricks.spark.live_listener_bus.queue.streams.size") validatedMetrics["databricks.spark.live_listener_bus.queue.streams.size"] = true @@ -3260,16 +3230,16 @@ func TestMetricsBuilder(t *testing.T) { assert.Equal(t, float64(1), dp.DoubleValue()) attrVal, ok := dp.Attributes().Get("cluster.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "cluster.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("spark.app.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "spark.app.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("pipeline.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "pipeline.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("pipeline.name") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "pipeline.name-val", attrVal.Str()) case "databricks.spark.spark_sql_operation_manager.hive_operations.count": assert.False(t, validatedMetrics["databricks.spark.spark_sql_operation_manager.hive_operations.count"], "Found a duplicate in the metrics slice: databricks.spark.spark_sql_operation_manager.hive_operations.count") validatedMetrics["databricks.spark.spark_sql_operation_manager.hive_operations.count"] = true @@ -3284,16 +3254,16 @@ func TestMetricsBuilder(t *testing.T) { assert.Equal(t, float64(1), dp.DoubleValue()) attrVal, ok := dp.Attributes().Get("cluster.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "cluster.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("spark.app.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "spark.app.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("pipeline.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "pipeline.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("pipeline.name") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "pipeline.name-val", attrVal.Str()) case "databricks.spark.stage.disk_bytes_spilled": assert.False(t, validatedMetrics["databricks.spark.stage.disk_bytes_spilled"], "Found a duplicate in the metrics slice: databricks.spark.stage.disk_bytes_spilled") validatedMetrics["databricks.spark.stage.disk_bytes_spilled"] = true @@ -3308,13 +3278,13 @@ func TestMetricsBuilder(t *testing.T) { assert.Equal(t, int64(1), dp.IntValue()) attrVal, ok := dp.Attributes().Get("cluster.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "cluster.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("spark.app.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "spark.app.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("spark.job.id") assert.True(t, ok) - assert.EqualValues(t, 1, attrVal.Int()) + assert.EqualValues(t, 12, attrVal.Int()) case "databricks.spark.stage.executor_run_time": assert.False(t, validatedMetrics["databricks.spark.stage.executor_run_time"], "Found a duplicate in the metrics slice: databricks.spark.stage.executor_run_time") validatedMetrics["databricks.spark.stage.executor_run_time"] = true @@ -3329,13 +3299,13 @@ func TestMetricsBuilder(t *testing.T) { assert.Equal(t, int64(1), dp.IntValue()) attrVal, ok := dp.Attributes().Get("cluster.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "cluster.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("spark.app.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "spark.app.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("spark.job.id") assert.True(t, ok) - assert.EqualValues(t, 1, attrVal.Int()) + assert.EqualValues(t, 12, attrVal.Int()) case "databricks.spark.stage.input_bytes": assert.False(t, validatedMetrics["databricks.spark.stage.input_bytes"], "Found a duplicate in the metrics slice: databricks.spark.stage.input_bytes") validatedMetrics["databricks.spark.stage.input_bytes"] = true @@ -3350,13 +3320,13 @@ func TestMetricsBuilder(t *testing.T) { assert.Equal(t, int64(1), dp.IntValue()) attrVal, ok := dp.Attributes().Get("cluster.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "cluster.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("spark.app.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "spark.app.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("spark.job.id") assert.True(t, ok) - assert.EqualValues(t, 1, attrVal.Int()) + assert.EqualValues(t, 12, attrVal.Int()) case "databricks.spark.stage.input_records": assert.False(t, validatedMetrics["databricks.spark.stage.input_records"], "Found a duplicate in the metrics slice: databricks.spark.stage.input_records") validatedMetrics["databricks.spark.stage.input_records"] = true @@ -3371,13 +3341,13 @@ func TestMetricsBuilder(t *testing.T) { assert.Equal(t, int64(1), dp.IntValue()) attrVal, ok := dp.Attributes().Get("cluster.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "cluster.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("spark.app.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "spark.app.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("spark.job.id") assert.True(t, ok) - assert.EqualValues(t, 1, attrVal.Int()) + assert.EqualValues(t, 12, attrVal.Int()) case "databricks.spark.stage.memory_bytes_spilled": assert.False(t, validatedMetrics["databricks.spark.stage.memory_bytes_spilled"], "Found a duplicate in the metrics slice: databricks.spark.stage.memory_bytes_spilled") validatedMetrics["databricks.spark.stage.memory_bytes_spilled"] = true @@ -3392,13 +3362,13 @@ func TestMetricsBuilder(t *testing.T) { assert.Equal(t, int64(1), dp.IntValue()) attrVal, ok := dp.Attributes().Get("cluster.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "cluster.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("spark.app.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "spark.app.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("spark.job.id") assert.True(t, ok) - assert.EqualValues(t, 1, attrVal.Int()) + assert.EqualValues(t, 12, attrVal.Int()) case "databricks.spark.stage.output_bytes": assert.False(t, validatedMetrics["databricks.spark.stage.output_bytes"], "Found a duplicate in the metrics slice: databricks.spark.stage.output_bytes") validatedMetrics["databricks.spark.stage.output_bytes"] = true @@ -3413,13 +3383,13 @@ func TestMetricsBuilder(t *testing.T) { assert.Equal(t, int64(1), dp.IntValue()) attrVal, ok := dp.Attributes().Get("cluster.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "cluster.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("spark.app.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "spark.app.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("spark.job.id") assert.True(t, ok) - assert.EqualValues(t, 1, attrVal.Int()) + assert.EqualValues(t, 12, attrVal.Int()) case "databricks.spark.stage.output_records": assert.False(t, validatedMetrics["databricks.spark.stage.output_records"], "Found a duplicate in the metrics slice: databricks.spark.stage.output_records") validatedMetrics["databricks.spark.stage.output_records"] = true @@ -3434,13 +3404,13 @@ func TestMetricsBuilder(t *testing.T) { assert.Equal(t, int64(1), dp.IntValue()) attrVal, ok := dp.Attributes().Get("cluster.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "cluster.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("spark.app.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "spark.app.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("spark.job.id") assert.True(t, ok) - assert.EqualValues(t, 1, attrVal.Int()) + assert.EqualValues(t, 12, attrVal.Int()) case "databricks.spark.timer.dag_scheduler.message_processing.time": assert.False(t, validatedMetrics["databricks.spark.timer.dag_scheduler.message_processing.time"], "Found a duplicate in the metrics slice: databricks.spark.timer.dag_scheduler.message_processing.time") validatedMetrics["databricks.spark.timer.dag_scheduler.message_processing.time"] = true @@ -3457,10 +3427,10 @@ func TestMetricsBuilder(t *testing.T) { assert.Equal(t, float64(1), dp.DoubleValue()) attrVal, ok := dp.Attributes().Get("cluster.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "cluster.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("spark.app.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "spark.app.id-val", attrVal.Str()) case "databricks.spark.timer.live_listener_bus.listener_processing.apache.spark.sql.execution.streaming.query_listener_bus.time": assert.False(t, validatedMetrics["databricks.spark.timer.live_listener_bus.listener_processing.apache.spark.sql.execution.streaming.query_listener_bus.time"], "Found a duplicate in the metrics slice: databricks.spark.timer.live_listener_bus.listener_processing.apache.spark.sql.execution.streaming.query_listener_bus.time") validatedMetrics["databricks.spark.timer.live_listener_bus.listener_processing.apache.spark.sql.execution.streaming.query_listener_bus.time"] = true @@ -3477,10 +3447,10 @@ func TestMetricsBuilder(t *testing.T) { assert.Equal(t, float64(1), dp.DoubleValue()) attrVal, ok := dp.Attributes().Get("cluster.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "cluster.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("spark.app.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "spark.app.id-val", attrVal.Str()) case "databricks.spark.timer.live_listener_bus.listener_processing.apache.spark.sql.execution.time": assert.False(t, validatedMetrics["databricks.spark.timer.live_listener_bus.listener_processing.apache.spark.sql.execution.time"], "Found a duplicate in the metrics slice: databricks.spark.timer.live_listener_bus.listener_processing.apache.spark.sql.execution.time") validatedMetrics["databricks.spark.timer.live_listener_bus.listener_processing.apache.spark.sql.execution.time"] = true @@ -3497,10 +3467,10 @@ func TestMetricsBuilder(t *testing.T) { assert.Equal(t, float64(1), dp.DoubleValue()) attrVal, ok := dp.Attributes().Get("cluster.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "cluster.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("spark.app.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "spark.app.id-val", attrVal.Str()) case "databricks.spark.timer.live_listener_bus.listener_processing.apache.spark.sql.execution.ui.sql_app_status_listener.time": assert.False(t, validatedMetrics["databricks.spark.timer.live_listener_bus.listener_processing.apache.spark.sql.execution.ui.sql_app_status_listener.time"], "Found a duplicate in the metrics slice: databricks.spark.timer.live_listener_bus.listener_processing.apache.spark.sql.execution.ui.sql_app_status_listener.time") validatedMetrics["databricks.spark.timer.live_listener_bus.listener_processing.apache.spark.sql.execution.ui.sql_app_status_listener.time"] = true @@ -3517,10 +3487,10 @@ func TestMetricsBuilder(t *testing.T) { assert.Equal(t, float64(1), dp.DoubleValue()) attrVal, ok := dp.Attributes().Get("cluster.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "cluster.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("spark.app.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "spark.app.id-val", attrVal.Str()) case "databricks.spark.timer.live_listener_bus.listener_processing.apache.spark.sql.hive.thriftserver.ui.hive_thrift_server2listener.time": assert.False(t, validatedMetrics["databricks.spark.timer.live_listener_bus.listener_processing.apache.spark.sql.hive.thriftserver.ui.hive_thrift_server2listener.time"], "Found a duplicate in the metrics slice: databricks.spark.timer.live_listener_bus.listener_processing.apache.spark.sql.hive.thriftserver.ui.hive_thrift_server2listener.time") validatedMetrics["databricks.spark.timer.live_listener_bus.listener_processing.apache.spark.sql.hive.thriftserver.ui.hive_thrift_server2listener.time"] = true @@ -3537,10 +3507,10 @@ func TestMetricsBuilder(t *testing.T) { assert.Equal(t, float64(1), dp.DoubleValue()) attrVal, ok := dp.Attributes().Get("cluster.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "cluster.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("spark.app.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "spark.app.id-val", attrVal.Str()) case "databricks.spark.timer.live_listener_bus.listener_processing.apache.spark.sql.spark_session.time": assert.False(t, validatedMetrics["databricks.spark.timer.live_listener_bus.listener_processing.apache.spark.sql.spark_session.time"], "Found a duplicate in the metrics slice: databricks.spark.timer.live_listener_bus.listener_processing.apache.spark.sql.spark_session.time") validatedMetrics["databricks.spark.timer.live_listener_bus.listener_processing.apache.spark.sql.spark_session.time"] = true @@ -3557,10 +3527,10 @@ func TestMetricsBuilder(t *testing.T) { assert.Equal(t, float64(1), dp.DoubleValue()) attrVal, ok := dp.Attributes().Get("cluster.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "cluster.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("spark.app.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "spark.app.id-val", attrVal.Str()) case "databricks.spark.timer.live_listener_bus.listener_processing.apache.spark.sql.util.execution_listener_bus.time": assert.False(t, validatedMetrics["databricks.spark.timer.live_listener_bus.listener_processing.apache.spark.sql.util.execution_listener_bus.time"], "Found a duplicate in the metrics slice: databricks.spark.timer.live_listener_bus.listener_processing.apache.spark.sql.util.execution_listener_bus.time") validatedMetrics["databricks.spark.timer.live_listener_bus.listener_processing.apache.spark.sql.util.execution_listener_bus.time"] = true @@ -3577,10 +3547,10 @@ func TestMetricsBuilder(t *testing.T) { assert.Equal(t, float64(1), dp.DoubleValue()) attrVal, ok := dp.Attributes().Get("cluster.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "cluster.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("spark.app.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "spark.app.id-val", attrVal.Str()) case "databricks.spark.timer.live_listener_bus.listener_processing.apache.spark.status.app_status_listener.time": assert.False(t, validatedMetrics["databricks.spark.timer.live_listener_bus.listener_processing.apache.spark.status.app_status_listener.time"], "Found a duplicate in the metrics slice: databricks.spark.timer.live_listener_bus.listener_processing.apache.spark.status.app_status_listener.time") validatedMetrics["databricks.spark.timer.live_listener_bus.listener_processing.apache.spark.status.app_status_listener.time"] = true @@ -3597,10 +3567,10 @@ func TestMetricsBuilder(t *testing.T) { assert.Equal(t, float64(1), dp.DoubleValue()) attrVal, ok := dp.Attributes().Get("cluster.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "cluster.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("spark.app.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "spark.app.id-val", attrVal.Str()) case "databricks.spark.timer.live_listener_bus.listener_processing.apache.spark.util.profiler_env.time": assert.False(t, validatedMetrics["databricks.spark.timer.live_listener_bus.listener_processing.apache.spark.util.profiler_env.time"], "Found a duplicate in the metrics slice: databricks.spark.timer.live_listener_bus.listener_processing.apache.spark.util.profiler_env.time") validatedMetrics["databricks.spark.timer.live_listener_bus.listener_processing.apache.spark.util.profiler_env.time"] = true @@ -3617,10 +3587,10 @@ func TestMetricsBuilder(t *testing.T) { assert.Equal(t, float64(1), dp.DoubleValue()) attrVal, ok := dp.Attributes().Get("cluster.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "cluster.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("spark.app.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "spark.app.id-val", attrVal.Str()) case "databricks.spark.timer.live_listener_bus.listener_processing.databricks.backend.daemon.driver.data_plane_event_listener.time": assert.False(t, validatedMetrics["databricks.spark.timer.live_listener_bus.listener_processing.databricks.backend.daemon.driver.data_plane_event_listener.time"], "Found a duplicate in the metrics slice: databricks.spark.timer.live_listener_bus.listener_processing.databricks.backend.daemon.driver.data_plane_event_listener.time") validatedMetrics["databricks.spark.timer.live_listener_bus.listener_processing.databricks.backend.daemon.driver.data_plane_event_listener.time"] = true @@ -3637,10 +3607,10 @@ func TestMetricsBuilder(t *testing.T) { assert.Equal(t, float64(1), dp.DoubleValue()) attrVal, ok := dp.Attributes().Get("cluster.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "cluster.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("spark.app.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "spark.app.id-val", attrVal.Str()) case "databricks.spark.timer.live_listener_bus.listener_processing.databricks.backend.daemon.driver.dbc_event_logging_listener.time": assert.False(t, validatedMetrics["databricks.spark.timer.live_listener_bus.listener_processing.databricks.backend.daemon.driver.dbc_event_logging_listener.time"], "Found a duplicate in the metrics slice: databricks.spark.timer.live_listener_bus.listener_processing.databricks.backend.daemon.driver.dbc_event_logging_listener.time") validatedMetrics["databricks.spark.timer.live_listener_bus.listener_processing.databricks.backend.daemon.driver.dbc_event_logging_listener.time"] = true @@ -3657,10 +3627,10 @@ func TestMetricsBuilder(t *testing.T) { assert.Equal(t, float64(1), dp.DoubleValue()) attrVal, ok := dp.Attributes().Get("cluster.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "cluster.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("spark.app.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "spark.app.id-val", attrVal.Str()) case "databricks.spark.timer.live_listener_bus.listener_processing.databricks.photon.photon_cleanup_listener.time": assert.False(t, validatedMetrics["databricks.spark.timer.live_listener_bus.listener_processing.databricks.photon.photon_cleanup_listener.time"], "Found a duplicate in the metrics slice: databricks.spark.timer.live_listener_bus.listener_processing.databricks.photon.photon_cleanup_listener.time") validatedMetrics["databricks.spark.timer.live_listener_bus.listener_processing.databricks.photon.photon_cleanup_listener.time"] = true @@ -3677,10 +3647,10 @@ func TestMetricsBuilder(t *testing.T) { assert.Equal(t, float64(1), dp.DoubleValue()) attrVal, ok := dp.Attributes().Get("cluster.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "cluster.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("spark.app.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "spark.app.id-val", attrVal.Str()) case "databricks.spark.timer.live_listener_bus.listener_processing.databricks.spark.util.executor_time_logging_listener.time": assert.False(t, validatedMetrics["databricks.spark.timer.live_listener_bus.listener_processing.databricks.spark.util.executor_time_logging_listener.time"], "Found a duplicate in the metrics slice: databricks.spark.timer.live_listener_bus.listener_processing.databricks.spark.util.executor_time_logging_listener.time") validatedMetrics["databricks.spark.timer.live_listener_bus.listener_processing.databricks.spark.util.executor_time_logging_listener.time"] = true @@ -3697,10 +3667,10 @@ func TestMetricsBuilder(t *testing.T) { assert.Equal(t, float64(1), dp.DoubleValue()) attrVal, ok := dp.Attributes().Get("cluster.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "cluster.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("spark.app.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "spark.app.id-val", attrVal.Str()) case "databricks.spark.timer.live_listener_bus.listener_processing.databricks.spark.util.usage_logging_listener.time": assert.False(t, validatedMetrics["databricks.spark.timer.live_listener_bus.listener_processing.databricks.spark.util.usage_logging_listener.time"], "Found a duplicate in the metrics slice: databricks.spark.timer.live_listener_bus.listener_processing.databricks.spark.util.usage_logging_listener.time") validatedMetrics["databricks.spark.timer.live_listener_bus.listener_processing.databricks.spark.util.usage_logging_listener.time"] = true @@ -3717,10 +3687,10 @@ func TestMetricsBuilder(t *testing.T) { assert.Equal(t, float64(1), dp.DoubleValue()) attrVal, ok := dp.Attributes().Get("cluster.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "cluster.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("spark.app.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "spark.app.id-val", attrVal.Str()) case "databricks.spark.timer.live_listener_bus.listener_processing.databricks.sql.advice.advisor_listener.time": assert.False(t, validatedMetrics["databricks.spark.timer.live_listener_bus.listener_processing.databricks.sql.advice.advisor_listener.time"], "Found a duplicate in the metrics slice: databricks.spark.timer.live_listener_bus.listener_processing.databricks.sql.advice.advisor_listener.time") validatedMetrics["databricks.spark.timer.live_listener_bus.listener_processing.databricks.sql.advice.advisor_listener.time"] = true @@ -3737,10 +3707,10 @@ func TestMetricsBuilder(t *testing.T) { assert.Equal(t, float64(1), dp.DoubleValue()) attrVal, ok := dp.Attributes().Get("cluster.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "cluster.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("spark.app.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "spark.app.id-val", attrVal.Str()) case "databricks.spark.timer.live_listener_bus.listener_processing.databricks.sql.debugger.query_watchdog_listener.time": assert.False(t, validatedMetrics["databricks.spark.timer.live_listener_bus.listener_processing.databricks.sql.debugger.query_watchdog_listener.time"], "Found a duplicate in the metrics slice: databricks.spark.timer.live_listener_bus.listener_processing.databricks.sql.debugger.query_watchdog_listener.time") validatedMetrics["databricks.spark.timer.live_listener_bus.listener_processing.databricks.sql.debugger.query_watchdog_listener.time"] = true @@ -3757,10 +3727,10 @@ func TestMetricsBuilder(t *testing.T) { assert.Equal(t, float64(1), dp.DoubleValue()) attrVal, ok := dp.Attributes().Get("cluster.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "cluster.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("spark.app.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "spark.app.id-val", attrVal.Str()) case "databricks.spark.timer.live_listener_bus.listener_processing.databricks.sql.execution.ui.io_cache_listener.time": assert.False(t, validatedMetrics["databricks.spark.timer.live_listener_bus.listener_processing.databricks.sql.execution.ui.io_cache_listener.time"], "Found a duplicate in the metrics slice: databricks.spark.timer.live_listener_bus.listener_processing.databricks.sql.execution.ui.io_cache_listener.time") validatedMetrics["databricks.spark.timer.live_listener_bus.listener_processing.databricks.sql.execution.ui.io_cache_listener.time"] = true @@ -3777,10 +3747,10 @@ func TestMetricsBuilder(t *testing.T) { assert.Equal(t, float64(1), dp.DoubleValue()) attrVal, ok := dp.Attributes().Get("cluster.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "cluster.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("spark.app.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "spark.app.id-val", attrVal.Str()) case "databricks.spark.timer.live_listener_bus.listener_processing.databricks.sql.io.caching.repeated_reads_estimator.time": assert.False(t, validatedMetrics["databricks.spark.timer.live_listener_bus.listener_processing.databricks.sql.io.caching.repeated_reads_estimator.time"], "Found a duplicate in the metrics slice: databricks.spark.timer.live_listener_bus.listener_processing.databricks.sql.io.caching.repeated_reads_estimator.time") validatedMetrics["databricks.spark.timer.live_listener_bus.listener_processing.databricks.sql.io.caching.repeated_reads_estimator.time"] = true @@ -3797,10 +3767,10 @@ func TestMetricsBuilder(t *testing.T) { assert.Equal(t, float64(1), dp.DoubleValue()) attrVal, ok := dp.Attributes().Get("cluster.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "cluster.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("spark.app.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "spark.app.id-val", attrVal.Str()) case "databricks.spark.timer.live_listener_bus.queue.app_status.listener_processing.time": assert.False(t, validatedMetrics["databricks.spark.timer.live_listener_bus.queue.app_status.listener_processing.time"], "Found a duplicate in the metrics slice: databricks.spark.timer.live_listener_bus.queue.app_status.listener_processing.time") validatedMetrics["databricks.spark.timer.live_listener_bus.queue.app_status.listener_processing.time"] = true @@ -3817,10 +3787,10 @@ func TestMetricsBuilder(t *testing.T) { assert.Equal(t, float64(1), dp.DoubleValue()) attrVal, ok := dp.Attributes().Get("cluster.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "cluster.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("spark.app.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "spark.app.id-val", attrVal.Str()) case "databricks.spark.timer.live_listener_bus.queue.executor_management.listener_processing.time": assert.False(t, validatedMetrics["databricks.spark.timer.live_listener_bus.queue.executor_management.listener_processing.time"], "Found a duplicate in the metrics slice: databricks.spark.timer.live_listener_bus.queue.executor_management.listener_processing.time") validatedMetrics["databricks.spark.timer.live_listener_bus.queue.executor_management.listener_processing.time"] = true @@ -3837,10 +3807,10 @@ func TestMetricsBuilder(t *testing.T) { assert.Equal(t, float64(1), dp.DoubleValue()) attrVal, ok := dp.Attributes().Get("cluster.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "cluster.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("spark.app.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "spark.app.id-val", attrVal.Str()) case "databricks.spark.timer.live_listener_bus.queue.shared.listener_processing.time": assert.False(t, validatedMetrics["databricks.spark.timer.live_listener_bus.queue.shared.listener_processing.time"], "Found a duplicate in the metrics slice: databricks.spark.timer.live_listener_bus.queue.shared.listener_processing.time") validatedMetrics["databricks.spark.timer.live_listener_bus.queue.shared.listener_processing.time"] = true @@ -3857,10 +3827,10 @@ func TestMetricsBuilder(t *testing.T) { assert.Equal(t, float64(1), dp.DoubleValue()) attrVal, ok := dp.Attributes().Get("cluster.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "cluster.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("spark.app.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "spark.app.id-val", attrVal.Str()) case "databricks.spark.timer.live_listener_bus.queue.streams.listener_processing.time": assert.False(t, validatedMetrics["databricks.spark.timer.live_listener_bus.queue.streams.listener_processing.time"], "Found a duplicate in the metrics slice: databricks.spark.timer.live_listener_bus.queue.streams.listener_processing.time") validatedMetrics["databricks.spark.timer.live_listener_bus.queue.streams.listener_processing.time"] = true @@ -3877,10 +3847,10 @@ func TestMetricsBuilder(t *testing.T) { assert.Equal(t, float64(1), dp.DoubleValue()) attrVal, ok := dp.Attributes().Get("cluster.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "cluster.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("spark.app.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "spark.app.id-val", attrVal.Str()) case "databricks.tasks.run.duration": assert.False(t, validatedMetrics["databricks.tasks.run.duration"], "Found a duplicate in the metrics slice: databricks.tasks.run.duration") validatedMetrics["databricks.tasks.run.duration"] = true @@ -3895,10 +3865,10 @@ func TestMetricsBuilder(t *testing.T) { assert.Equal(t, int64(1), dp.IntValue()) attrVal, ok := dp.Attributes().Get("job.id") assert.True(t, ok) - assert.EqualValues(t, 1, attrVal.Int()) + assert.EqualValues(t, 6, attrVal.Int()) attrVal, ok = dp.Attributes().Get("task.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "task.id-val", attrVal.Str()) case "databricks.tasks.schedule.status": assert.False(t, validatedMetrics["databricks.tasks.schedule.status"], "Found a duplicate in the metrics slice: databricks.tasks.schedule.status") validatedMetrics["databricks.tasks.schedule.status"] = true @@ -3913,13 +3883,13 @@ func TestMetricsBuilder(t *testing.T) { assert.Equal(t, int64(1), dp.IntValue()) attrVal, ok := dp.Attributes().Get("job.id") assert.True(t, ok) - assert.EqualValues(t, 1, attrVal.Int()) + assert.EqualValues(t, 6, attrVal.Int()) attrVal, ok = dp.Attributes().Get("task.id") assert.True(t, ok) - assert.EqualValues(t, "attr-val", attrVal.Str()) + assert.EqualValues(t, "task.id-val", attrVal.Str()) attrVal, ok = dp.Attributes().Get("task.type") assert.True(t, ok) - assert.Equal(t, "NotebookTask", attrVal.Str()) + assert.EqualValues(t, "NotebookTask", attrVal.Str()) } } }) diff --git a/internal/receiver/databricksreceiver/internal/metadata/generated_resource.go b/internal/receiver/databricksreceiver/internal/metadata/generated_resource.go new file mode 100644 index 0000000000..8bde4e201e --- /dev/null +++ b/internal/receiver/databricksreceiver/internal/metadata/generated_resource.go @@ -0,0 +1,57 @@ +// Code generated by mdatagen. DO NOT EDIT. + +package metadata + +import ( + "go.opentelemetry.io/collector/pdata/pcommon" +) + +// ResourceBuilder is a helper struct to build resources predefined in metadata.yaml. +// The ResourceBuilder is not thread-safe and must not to be used in multiple goroutines. +type ResourceBuilder struct { + res pcommon.Resource + config ResourceAttributesConfig +} + +// NewResourceBuilder creates a new ResourceBuilder. This method should be called on the start of the application. +func NewResourceBuilder(rac ResourceAttributesConfig) *ResourceBuilder { + return &ResourceBuilder{ + config: rac, + res: pcommon.NewResource(), + } +} + +// SetDatabricksInstanceName sets provided value as "databricks.instance.name" attribute. +func (rb *ResourceBuilder) SetDatabricksInstanceName(val string) { + if rb.config.DatabricksInstanceName.Enabled { + rb.res.Attributes().PutStr("databricks.instance.name", val) + } +} + +// SetSparkAppID sets provided value as "spark.app.id" attribute. +func (rb *ResourceBuilder) SetSparkAppID(val string) { + if rb.config.SparkAppID.Enabled { + rb.res.Attributes().PutStr("spark.app.id", val) + } +} + +// SetSparkClusterID sets provided value as "spark.cluster.id" attribute. +func (rb *ResourceBuilder) SetSparkClusterID(val string) { + if rb.config.SparkClusterID.Enabled { + rb.res.Attributes().PutStr("spark.cluster.id", val) + } +} + +// SetSparkClusterName sets provided value as "spark.cluster.name" attribute. +func (rb *ResourceBuilder) SetSparkClusterName(val string) { + if rb.config.SparkClusterName.Enabled { + rb.res.Attributes().PutStr("spark.cluster.name", val) + } +} + +// Emit returns the built resource and resets the internal builder state. +func (rb *ResourceBuilder) Emit() pcommon.Resource { + r := rb.res + rb.res = pcommon.NewResource() + return r +} diff --git a/internal/receiver/databricksreceiver/internal/metadata/generated_resource_test.go b/internal/receiver/databricksreceiver/internal/metadata/generated_resource_test.go new file mode 100644 index 0000000000..f475c1a53e --- /dev/null +++ b/internal/receiver/databricksreceiver/internal/metadata/generated_resource_test.go @@ -0,0 +1,58 @@ +// Code generated by mdatagen. DO NOT EDIT. + +package metadata + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestResourceBuilder(t *testing.T) { + for _, test := range []string{"default", "all_set", "none_set"} { + t.Run(test, func(t *testing.T) { + cfg := loadResourceAttributesConfig(t, test) + rb := NewResourceBuilder(cfg) + rb.SetDatabricksInstanceName("databricks.instance.name-val") + rb.SetSparkAppID("spark.app.id-val") + rb.SetSparkClusterID("spark.cluster.id-val") + rb.SetSparkClusterName("spark.cluster.name-val") + + res := rb.Emit() + assert.Equal(t, 0, rb.Emit().Attributes().Len()) // Second call should return 0 + + switch test { + case "default": + assert.Equal(t, 4, res.Attributes().Len()) + case "all_set": + assert.Equal(t, 4, res.Attributes().Len()) + case "none_set": + assert.Equal(t, 0, res.Attributes().Len()) + return + default: + assert.Failf(t, "unexpected test case: %s", test) + } + + val, ok := res.Attributes().Get("databricks.instance.name") + assert.True(t, ok) + if ok { + assert.EqualValues(t, "databricks.instance.name-val", val.Str()) + } + val, ok = res.Attributes().Get("spark.app.id") + assert.True(t, ok) + if ok { + assert.EqualValues(t, "spark.app.id-val", val.Str()) + } + val, ok = res.Attributes().Get("spark.cluster.id") + assert.True(t, ok) + if ok { + assert.EqualValues(t, "spark.cluster.id-val", val.Str()) + } + val, ok = res.Attributes().Get("spark.cluster.name") + assert.True(t, ok) + if ok { + assert.EqualValues(t, "spark.cluster.name-val", val.Str()) + } + }) + } +} diff --git a/internal/receiver/databricksreceiver/internal/spark/cluster_metrics_builder_test.go b/internal/receiver/databricksreceiver/internal/spark/cluster_metrics_builder_test.go index 8555a7c39a..c5e5c81c40 100644 --- a/internal/receiver/databricksreceiver/internal/spark/cluster_metrics_builder_test.go +++ b/internal/receiver/databricksreceiver/internal/spark/cluster_metrics_builder_test.go @@ -24,6 +24,7 @@ import ( "go.opentelemetry.io/collector/pdata/pmetric" "github.com/signalfx/splunk-otel-collector/internal/receiver/databricksreceiver/internal/commontest" + "github.com/signalfx/splunk-otel-collector/internal/receiver/databricksreceiver/internal/metadata" ) func TestStripSparkMetricKey(t *testing.T) { @@ -47,7 +48,8 @@ func TestClusterMetricsBuilder_GeneratedMetrics(t *testing.T) { const expectedCount = 112 testBuilder := commontest.NewTestMetricsBuilder() - built := coreMetrics.Build(testBuilder, pcommon.NewTimestampFromTime(time.Now())) + rb := metadata.NewResourceBuilder(metadata.DefaultResourceAttributesConfig()) + built := coreMetrics.Build(testBuilder, rb, pcommon.NewTimestampFromTime(time.Now()), "my-app-id") pm := pmetric.NewMetrics() for _, metric := range built { metric.ResourceMetrics().MoveAndAppendTo(pm.ResourceMetrics()) diff --git a/internal/receiver/databricksreceiver/internal/spark/extra_metrics_builder_test.go b/internal/receiver/databricksreceiver/internal/spark/extra_metrics_builder_test.go index d74c24d686..65c0ff15a0 100644 --- a/internal/receiver/databricksreceiver/internal/spark/extra_metrics_builder_test.go +++ b/internal/receiver/databricksreceiver/internal/spark/extra_metrics_builder_test.go @@ -24,6 +24,7 @@ import ( "go.uber.org/zap" "github.com/signalfx/splunk-otel-collector/internal/receiver/databricksreceiver/internal/commontest" + "github.com/signalfx/splunk-otel-collector/internal/receiver/databricksreceiver/internal/metadata" ) func TestSparkExtraMetricsBuilder_Executors(t *testing.T) { @@ -32,7 +33,8 @@ func TestSparkExtraMetricsBuilder_Executors(t *testing.T) { require.NoError(t, err) builder := commontest.NewTestMetricsBuilder() - built := execMetrics.Build(builder, pcommon.NewTimestampFromTime(time.Now())) + rb := metadata.NewResourceBuilder(metadata.DefaultResourceAttributesConfig()) + built := execMetrics.Build(builder, rb, pcommon.NewTimestampFromTime(time.Now()), "my-app-id") pm := pmetric.NewMetrics() for _, metrics := range built { metrics.ResourceMetrics().MoveAndAppendTo(pm.ResourceMetrics()) @@ -53,7 +55,8 @@ func TestSparkExtraMetricsBuilder_Jobs(t *testing.T) { require.NoError(t, err) builder := commontest.NewTestMetricsBuilder() - built := jobMetrics.Build(builder, pcommon.NewTimestampFromTime(time.Now())) + rb := metadata.NewResourceBuilder(metadata.DefaultResourceAttributesConfig()) + built := jobMetrics.Build(builder, rb, pcommon.NewTimestampFromTime(time.Now()), "my-app-id") pm := pmetric.NewMetrics() for _, metrics := range built { metrics.ResourceMetrics().MoveAndAppendTo(pm.ResourceMetrics()) @@ -77,7 +80,8 @@ func TestSparkExtraMetricsBuilder_Stages(t *testing.T) { require.NoError(t, err) builder := commontest.NewTestMetricsBuilder() - built := stageMetrics.Build(builder, pcommon.NewTimestampFromTime(time.Now())) + rb := metadata.NewResourceBuilder(metadata.DefaultResourceAttributesConfig()) + built := stageMetrics.Build(builder, rb, pcommon.NewTimestampFromTime(time.Now()), "my-app-id") pm := pmetric.NewMetrics() for _, metrics := range built { metrics.ResourceMetrics().MoveAndAppendTo(pm.ResourceMetrics()) diff --git a/internal/receiver/databricksreceiver/internal/spark/resource_metrics.go b/internal/receiver/databricksreceiver/internal/spark/resource_metrics.go index e01db5addc..779f9ba62e 100644 --- a/internal/receiver/databricksreceiver/internal/spark/resource_metrics.go +++ b/internal/receiver/databricksreceiver/internal/spark/resource_metrics.go @@ -124,15 +124,16 @@ func (m *ResourceMetrics) addStageInfo(clstr Cluster, appID string, info StageIn }) } -func (m *ResourceMetrics) Build(builder *metadata.MetricsBuilder, now pcommon.Timestamp, rmo ...metadata.ResourceMetricsOption) []pmetric.Metrics { +func (m *ResourceMetrics) Build(mb *metadata.MetricsBuilder, rb *metadata.ResourceBuilder, now pcommon.Timestamp, instanceName string) []pmetric.Metrics { var out []pmetric.Metrics for rs, metricInfos := range m.m { for _, mi := range metricInfos { - mi.build(builder, rs, now) + mi.build(mb, rs, now) } - rmo = append(rmo, metadata.WithSparkClusterID(rs.cluster.ClusterID)) - rmo = append(rmo, metadata.WithSparkClusterName(rs.cluster.ClusterName)) - out = append(out, builder.Emit(rmo...)) + rb.SetDatabricksInstanceName(instanceName) + rb.SetSparkClusterID(rs.cluster.ClusterID) + rb.SetSparkClusterName(rs.cluster.ClusterName) + out = append(out, mb.Emit(metadata.WithResource(rb.Emit()))) } return out } diff --git a/internal/receiver/databricksreceiver/internal/spark/resource_metrics_test.go b/internal/receiver/databricksreceiver/internal/spark/resource_metrics_test.go index 00a3d5f004..a00d9c40c7 100644 --- a/internal/receiver/databricksreceiver/internal/spark/resource_metrics_test.go +++ b/internal/receiver/databricksreceiver/internal/spark/resource_metrics_test.go @@ -23,6 +23,7 @@ import ( "go.opentelemetry.io/collector/pdata/pmetric" "github.com/signalfx/splunk-otel-collector/internal/receiver/databricksreceiver/internal/commontest" + "github.com/signalfx/splunk-otel-collector/internal/receiver/databricksreceiver/internal/metadata" ) func TestSparkDbrMetrics_Append(t *testing.T) { @@ -45,8 +46,9 @@ func TestSparkDbrMetrics_Append(t *testing.T) { }) outerRM.Append(rmSub2) - builder := commontest.NewTestMetricsBuilder() - built := outerRM.Build(builder, pcommon.NewTimestampFromTime(time.Now())) + mb := commontest.NewTestMetricsBuilder() + rb := metadata.NewResourceBuilder(metadata.DefaultResourceAttributesConfig()) + built := outerRM.Build(mb, rb, pcommon.NewTimestampFromTime(time.Now()), "my-app-id") allMetrics := pmetric.NewMetrics() for _, metrics := range built { metrics.ResourceMetrics().CopyTo(allMetrics.ResourceMetrics()) diff --git a/internal/receiver/databricksreceiver/scraper.go b/internal/receiver/databricksreceiver/scraper.go index 1783f8c2b0..752ea3a8e6 100644 --- a/internal/receiver/databricksreceiver/scraper.go +++ b/internal/receiver/databricksreceiver/scraper.go @@ -39,6 +39,7 @@ type scraper struct { dbrsvc databricks.Service logger *zap.Logger metricsBuilder *metadata.MetricsBuilder + resourceBuilder *metadata.ResourceBuilder dbrInstanceName string } @@ -60,7 +61,8 @@ func (s scraper) scrape(_ context.Context) (pmetric.Metrics, error) { return pmetric.Metrics{}, fmt.Errorf("scrape failed to add multi job run metrics: %w", err) } - dbrMetrics := s.metricsBuilder.Emit(metadata.WithDatabricksInstanceName(s.dbrInstanceName)) + s.resourceBuilder.SetDatabricksInstanceName(s.dbrInstanceName) + dbrMetrics := s.metricsBuilder.Emit(metadata.WithResource(s.resourceBuilder.Emit())) // spark metrics clusters, err := s.dbrsvc.RunningClusters() @@ -103,7 +105,7 @@ func (s scraper) scrape(_ context.Context) (pmetric.Metrics, error) { out := pmetric.NewMetrics() dbrMetrics.ResourceMetrics().MoveAndAppendTo(out.ResourceMetrics()) - sparkMetrics := allSparkDbrMetrics.Build(s.metricsBuilder, now, metadata.WithDatabricksInstanceName(s.dbrInstanceName)) + sparkMetrics := allSparkDbrMetrics.Build(s.metricsBuilder, s.resourceBuilder, now, s.dbrInstanceName) for _, metric := range sparkMetrics { metric.ResourceMetrics().MoveAndAppendTo(out.ResourceMetrics()) } diff --git a/internal/receiver/databricksreceiver/scraper_test.go b/internal/receiver/databricksreceiver/scraper_test.go index 9e2a025594..715485ffb4 100644 --- a/internal/receiver/databricksreceiver/scraper_test.go +++ b/internal/receiver/databricksreceiver/scraper_test.go @@ -24,6 +24,7 @@ import ( "github.com/signalfx/splunk-otel-collector/internal/receiver/databricksreceiver/internal/commontest" "github.com/signalfx/splunk-otel-collector/internal/receiver/databricksreceiver/internal/databricks" + "github.com/signalfx/splunk-otel-collector/internal/receiver/databricksreceiver/internal/metadata" "github.com/signalfx/splunk-otel-collector/internal/receiver/databricksreceiver/internal/spark" ) @@ -39,6 +40,7 @@ func TestScraper_Success(t *testing.T) { logger: nopLogger, dbrInstanceName: "my-instance", metricsBuilder: commontest.NewTestMetricsBuilder(), + resourceBuilder: metadata.NewResourceBuilder(metadata.DefaultResourceAttributesConfig()), rmp: databricks.NewRunMetricsProvider(dbrsvc), dbrmp: databricks.MetricsProvider{Svc: dbrsvc}, scmb: spark.ClusterMetricsBuilder{Ssvc: ssvc}, @@ -71,6 +73,7 @@ func TestScraper_Forbidden(t *testing.T) { logger: nopLogger, dbrInstanceName: "my-instance", metricsBuilder: commontest.NewTestMetricsBuilder(), + resourceBuilder: metadata.NewResourceBuilder(metadata.DefaultResourceAttributesConfig()), rmp: databricks.NewRunMetricsProvider(dbrsvc), dbrmp: databricks.MetricsProvider{Svc: dbrsvc}, scmb: spark.ClusterMetricsBuilder{Ssvc: ssvc}, @@ -94,6 +97,7 @@ func TestScraper_MultiCluster_Forbidden(t *testing.T) { logger: nopLogger, dbrInstanceName: "my-instance", metricsBuilder: commontest.NewTestMetricsBuilder(), + resourceBuilder: metadata.NewResourceBuilder(metadata.DefaultResourceAttributesConfig()), rmp: databricks.NewRunMetricsProvider(dbrsvc), dbrmp: databricks.MetricsProvider{Svc: dbrsvc}, scmb: spark.ClusterMetricsBuilder{Ssvc: ssvc}, diff --git a/internal/tools/go.mod b/internal/tools/go.mod index 14cc8bd7b8..d711330162 100644 --- a/internal/tools/go.mod +++ b/internal/tools/go.mod @@ -14,7 +14,7 @@ require ( github.com/golangci/golangci-lint v1.53.3 github.com/google/addlicense v1.1.1 github.com/jstemmer/go-junit-report v1.0.0 - github.com/open-telemetry/opentelemetry-collector-contrib/cmd/mdatagen v0.81.0 + github.com/open-telemetry/opentelemetry-collector-contrib/cmd/mdatagen v0.82.0 github.com/ory/go-acc v0.2.8 github.com/pavius/impi v0.0.3 github.com/tcnksm/ghr v0.16.0 @@ -199,9 +199,9 @@ require ( github.com/yeya24/promlinter v0.2.0 // indirect github.com/ykadowak/zerologlint v0.1.2 // indirect gitlab.com/bosi/decorder v0.2.3 // indirect - go.opentelemetry.io/collector/confmap v0.81.0 // indirect - go.opentelemetry.io/collector/featuregate v1.0.0-rcv0013 // indirect - go.opentelemetry.io/collector/pdata v1.0.0-rcv0013 // indirect + go.opentelemetry.io/collector/confmap v0.82.0 // indirect + go.opentelemetry.io/collector/featuregate v1.0.0-rcv0014 // indirect + go.opentelemetry.io/collector/pdata v1.0.0-rcv0014 // indirect go.tmz.dev/musttag v0.7.0 // indirect go.uber.org/atomic v1.10.0 // indirect go.uber.org/multierr v1.11.0 // indirect @@ -216,8 +216,8 @@ require ( golang.org/x/sys v0.10.0 // indirect golang.org/x/text v0.11.0 // indirect google.golang.org/appengine v1.6.7 // indirect - google.golang.org/genproto v0.0.0-20230410155749-daa745c078e1 // indirect - google.golang.org/grpc v1.56.1 // indirect + google.golang.org/genproto/googleapis/rpc v0.0.0-20230530153820-e85fd2cbaebc // indirect + google.golang.org/grpc v1.57.0 // indirect google.golang.org/protobuf v1.31.0 // indirect gopkg.in/ini.v1 v1.67.0 // indirect gopkg.in/yaml.v2 v2.4.0 // indirect diff --git a/internal/tools/go.sum b/internal/tools/go.sum index b2f8ffa330..a8df8193da 100644 --- a/internal/tools/go.sum +++ b/internal/tools/go.sum @@ -547,8 +547,8 @@ github.com/olekukonko/tablewriter v0.0.5 h1:P2Ga83D34wi1o9J6Wh1mRuqd4mF/x/lgBS7N github.com/olekukonko/tablewriter v0.0.5/go.mod h1:hPp6KlRPjbx+hW8ykQs1w3UBbZlj6HuIJcUGPhkA7kY= github.com/onsi/ginkgo/v2 v2.9.4 h1:xR7vG4IXt5RWx6FfIjyAtsoMAtnc3C/rFXBBd2AjZwE= github.com/onsi/gomega v1.27.6 h1:ENqfyGeS5AX/rlXDd/ETokDz93u0YufY1Pgxuy/PvWE= -github.com/open-telemetry/opentelemetry-collector-contrib/cmd/mdatagen v0.81.0 h1:o6a5Z5XZGw/qddTJrvPW8yTa0IpXzXubK8r43nuRdLc= -github.com/open-telemetry/opentelemetry-collector-contrib/cmd/mdatagen v0.81.0/go.mod h1:8moVn5V/NwuBcJL6XS2xoZC/QehRQex1lRUUScBbO+I= +github.com/open-telemetry/opentelemetry-collector-contrib/cmd/mdatagen v0.82.0 h1:8k9ujIcxhjKLB9R1uN2csu0DrLuo44tRHpuzaKZuMKg= +github.com/open-telemetry/opentelemetry-collector-contrib/cmd/mdatagen v0.82.0/go.mod h1:59hZ34LjfMw8T50ui56Xu5wOjy4wzp0FVuZJJyv4p9E= github.com/ory/go-acc v0.2.8 h1:rOHHAPQjf0u7eHFGWpiXK+gIu/e0GRSJNr9pDukdNC4= github.com/ory/go-acc v0.2.8/go.mod h1:iCRZUdGb/7nqvSn8xWZkhfVrtXRZ9Wru2E5rabCjFPI= github.com/ory/viper v1.7.5 h1:+xVdq7SU3e1vNaCsk/ixsfxE4zylk1TJUiJrY647jUE= @@ -773,17 +773,17 @@ go.opencensus.io v0.22.2/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= go.opencensus.io v0.22.3/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= go.opencensus.io v0.22.4/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= go.opencensus.io v0.22.5/go.mod h1:5pWMHQbX5EPX2/62yrJeAkowc+lfs/XD7Uxpq3pI6kk= -go.opentelemetry.io/collector/component v0.81.0 h1:AKsl6bss/SRrW248GFpmGiiI/4kdemW92Ai/X82CCqY= -go.opentelemetry.io/collector/config/configtelemetry v0.81.0 h1:j3dhWbAcrfL1n0RmShRJf99X/xIMoPfEShN/5Z8bY0k= -go.opentelemetry.io/collector/confmap v0.81.0 h1:AqweoBGdF3jGM2/KgP5GS6bmN+1aVrEiCy4nPf7IBE4= -go.opentelemetry.io/collector/confmap v0.81.0/go.mod h1:iCTnTqGgZZJumhJxpY7rrJz9UQ/0zjPmsJz2Z7Tp4RY= -go.opentelemetry.io/collector/consumer v0.81.0 h1:8R2iCrSzD7T0RtC2Wh4GXxDiqla2vNhDokGW6Bcrfas= -go.opentelemetry.io/collector/featuregate v1.0.0-rcv0013 h1:tiTUG9X/gEDN1oDYQOBVUFYQfhUG2CvgW9VhBc2uk1U= -go.opentelemetry.io/collector/featuregate v1.0.0-rcv0013/go.mod h1:0mE3mDLmUrOXVoNsuvj+7dV14h/9HFl/Fy9YTLoLObo= -go.opentelemetry.io/collector/pdata v1.0.0-rcv0013 h1:4sONXE9hAX+4Di8m0bQ/KaoH3Mi+OPt04cXkZ7A8W3k= -go.opentelemetry.io/collector/pdata v1.0.0-rcv0013/go.mod h1:x09G/4KjEcDKNuWCjC5ZtnuDE0XEqiRwI+yrHSVjIy8= -go.opentelemetry.io/collector/receiver v0.81.0 h1:0c+YtIV7fmd9ev+zmwS9qjx5ASi8cw+gSypu4I7Gugc= -go.opentelemetry.io/collector/semconv v0.81.0 h1:lCYNNo3powDvFIaTPP2jDKIrBiV1T92NK4QgL/aHYXw= +go.opentelemetry.io/collector/component v0.82.0 h1:ID9nOGKBf5G0avhuYQlTzmwAyIMvh9B+tlckLE/4qw4= +go.opentelemetry.io/collector/config/configtelemetry v0.82.0 h1:Zln2K4S5gBDcOpBNIzM0cZS5P6cohEYstHngVvIbGBY= +go.opentelemetry.io/collector/confmap v0.82.0 h1:s1Rd8jz21DGlLJfED0Py9VaEq2qPWmWwWy5MriDCX+4= +go.opentelemetry.io/collector/confmap v0.82.0/go.mod h1:IS/PoUYHETtxV6+fJammTkCxxa4LEwK2u4Cx/bVCH/s= +go.opentelemetry.io/collector/consumer v0.82.0 h1:vZecylW6bpaphetSTjCLgwXLxSYQ6oe/kzwkx4iF5oE= +go.opentelemetry.io/collector/featuregate v1.0.0-rcv0014 h1:C9o0mbP0MyygqFnKueVQK/v9jef6zvuttmTGlKaqhgw= +go.opentelemetry.io/collector/featuregate v1.0.0-rcv0014/go.mod h1:0mE3mDLmUrOXVoNsuvj+7dV14h/9HFl/Fy9YTLoLObo= +go.opentelemetry.io/collector/pdata v1.0.0-rcv0014 h1:iT5qH0NLmkGeIdDtnBogYDx7L58t6CaWGL378DEo2QY= +go.opentelemetry.io/collector/pdata v1.0.0-rcv0014/go.mod h1:BRvDrx43kiSoUx3mr7SoA7h9B8+OY99mUK+CZSQFWW4= +go.opentelemetry.io/collector/receiver v0.82.0 h1:bc6jc8jmSgc0/C9zqTqqWOGJFVx0AJ53jiToSmQs2SE= +go.opentelemetry.io/collector/semconv v0.82.0 h1:WUeT2a+uZjI6kLvwcBaJnGvo7KSQ/9dIFRcxOQdXucc= go.opentelemetry.io/otel v1.16.0 h1:Z7GVAX/UkAXPKsy94IU+i6thsQS4nb7LviLpnaNeW8s= go.opentelemetry.io/otel/metric v1.16.0 h1:RbrpwVG1Hfv85LgnZ7+txXioPDoh6EdbZHo26Q3hqOo= go.opentelemetry.io/otel/trace v1.16.0 h1:8JRpaObFoW0pxuVPapkgH8UhHQj+bJW8jJsCZEu5MQs= @@ -1171,8 +1171,8 @@ google.golang.org/genproto v0.0.0-20201214200347-8c77b98c765d/go.mod h1:FWY/as6D google.golang.org/genproto v0.0.0-20210108203827-ffc7fda8c3d7/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= google.golang.org/genproto v0.0.0-20210226172003-ab064af71705/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= google.golang.org/genproto v0.0.0-20210602131652-f16073e35f0c/go.mod h1:UODoCrxHCcBojKKwX1terBiRUaqAsFqJiF615XL43r0= -google.golang.org/genproto v0.0.0-20230410155749-daa745c078e1 h1:KpwkzHKEF7B9Zxg18WzOa7djJ+Ha5DzthMyZYQfEn2A= -google.golang.org/genproto v0.0.0-20230410155749-daa745c078e1/go.mod h1:nKE/iIaLqn2bQwXBg8f1g2Ylh6r5MN5CmZvuzZCgsCU= +google.golang.org/genproto/googleapis/rpc v0.0.0-20230530153820-e85fd2cbaebc h1:XSJ8Vk1SWuNr8S18z1NZSziL0CPIXLCCMDOEFtHBOFc= +google.golang.org/genproto/googleapis/rpc v0.0.0-20230530153820-e85fd2cbaebc/go.mod h1:66JfowdXAEgad5O9NnYcsNPLCPZJD++2L9X0PCMODrA= google.golang.org/grpc v1.14.0/go.mod h1:yo6s7OP7yaDglbqo1J04qKzAhqBH6lvTonzMVmEdcZw= google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c= google.golang.org/grpc v1.20.1/go.mod h1:10oTOabMzJvdu6/UiuZezV6QK5dSlG84ov/aaiqXj38= @@ -1194,8 +1194,8 @@ google.golang.org/grpc v1.33.2/go.mod h1:JMHMWHQWaTccqQQlmk3MJZS+GWXOdAesneDmEnv google.golang.org/grpc v1.34.0/go.mod h1:WotjhfgOW/POjDeRt8vscBtXq+2VjORFy659qA51WJ8= google.golang.org/grpc v1.35.0/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU= google.golang.org/grpc v1.38.0/go.mod h1:NREThFqKR1f3iQ6oBuvc5LadQuXVGo9rkm5ZGrQdJfM= -google.golang.org/grpc v1.56.1 h1:z0dNfjIl0VpaZ9iSVjA6daGatAYwPGstTjt5vkRMFkQ= -google.golang.org/grpc v1.56.1/go.mod h1:I9bI3vqKfayGqPUAwGdOSu7kt6oIJLixfffKrpXqQ9s= +google.golang.org/grpc v1.57.0 h1:kfzNeI/klCGD2YPMUlaGNT3pxvYfga7smW3Vth8Zsiw= +google.golang.org/grpc v1.57.0/go.mod h1:Sd+9RMTACXwmub0zcNY2c4arhtrbBYD1AUHI/dt16Mo= google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8= google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0= google.golang.org/protobuf v0.0.0-20200228230310-ab0ca4ff8a60/go.mod h1:cfTl7dwQJ+fmap5saPgwCLgHXTUD7jkjRqWcaiX5VyM= diff --git a/pkg/extension/smartagentextension/go.mod b/pkg/extension/smartagentextension/go.mod index 4b260d291d..2a778183dd 100644 --- a/pkg/extension/smartagentextension/go.mod +++ b/pkg/extension/smartagentextension/go.mod @@ -7,9 +7,9 @@ require ( github.com/signalfx/signalfx-agent v1.0.1-0.20230104182534-9eee411fe305 github.com/signalfx/splunk-otel-collector/tests v0.72.0 github.com/stretchr/testify v1.8.4 - go.opentelemetry.io/collector/component v0.81.0 - go.opentelemetry.io/collector/confmap v0.81.0 - go.opentelemetry.io/collector/extension v0.81.0 + go.opentelemetry.io/collector/component v0.82.0 + go.opentelemetry.io/collector/confmap v0.82.0 + go.opentelemetry.io/collector/extension v0.82.0 gopkg.in/yaml.v2 v2.4.0 ) @@ -84,7 +84,7 @@ require ( github.com/jmespath/go-jmespath v0.4.0 // indirect github.com/josharian/intern v1.0.0 // indirect github.com/json-iterator/go v1.1.12 // indirect - github.com/klauspost/compress v1.16.6 // indirect + github.com/klauspost/compress v1.16.7 // indirect github.com/knadh/koanf v1.5.0 // indirect github.com/knadh/koanf/v2 v2.0.1 // indirect github.com/leodido/go-urn v1.2.1 // indirect @@ -133,24 +133,24 @@ require ( go.etcd.io/etcd/client/pkg/v3 v3.5.9 // indirect go.etcd.io/etcd/client/v2 v2.305.9 // indirect go.opencensus.io v0.24.0 // indirect - go.opentelemetry.io/collector v0.81.0 // indirect - go.opentelemetry.io/collector/config/configauth v0.81.0 // indirect - go.opentelemetry.io/collector/config/configcompression v0.81.0 // indirect - go.opentelemetry.io/collector/config/configgrpc v0.81.0 // indirect - go.opentelemetry.io/collector/config/confighttp v0.81.0 // indirect - go.opentelemetry.io/collector/config/confignet v0.81.0 // indirect - go.opentelemetry.io/collector/config/configopaque v0.81.0 // indirect - go.opentelemetry.io/collector/config/configtelemetry v0.81.0 // indirect - go.opentelemetry.io/collector/config/configtls v0.81.0 // indirect - go.opentelemetry.io/collector/config/internal v0.81.0 // indirect - go.opentelemetry.io/collector/consumer v0.81.0 // indirect - go.opentelemetry.io/collector/exporter v0.81.0 // indirect - go.opentelemetry.io/collector/extension/auth v0.81.0 // indirect - go.opentelemetry.io/collector/featuregate v1.0.0-rcv0013 // indirect - go.opentelemetry.io/collector/pdata v1.0.0-rcv0013 // indirect - go.opentelemetry.io/collector/processor v0.81.0 // indirect - go.opentelemetry.io/collector/receiver v0.81.0 // indirect - go.opentelemetry.io/collector/receiver/otlpreceiver v0.81.0 // indirect + go.opentelemetry.io/collector v0.82.0 // indirect + go.opentelemetry.io/collector/config/configauth v0.82.0 // indirect + go.opentelemetry.io/collector/config/configcompression v0.82.0 // indirect + go.opentelemetry.io/collector/config/configgrpc v0.82.0 // indirect + go.opentelemetry.io/collector/config/confighttp v0.82.0 // indirect + go.opentelemetry.io/collector/config/confignet v0.82.0 // indirect + go.opentelemetry.io/collector/config/configopaque v0.82.0 // indirect + go.opentelemetry.io/collector/config/configtelemetry v0.82.0 // indirect + go.opentelemetry.io/collector/config/configtls v0.82.0 // indirect + go.opentelemetry.io/collector/config/internal v0.82.0 // indirect + go.opentelemetry.io/collector/consumer v0.82.0 // indirect + go.opentelemetry.io/collector/exporter v0.82.0 // indirect + go.opentelemetry.io/collector/extension/auth v0.82.0 // indirect + go.opentelemetry.io/collector/featuregate v1.0.0-rcv0014 // indirect + go.opentelemetry.io/collector/pdata v1.0.0-rcv0014 // indirect + go.opentelemetry.io/collector/processor v0.82.0 // indirect + go.opentelemetry.io/collector/receiver v0.82.0 // indirect + go.opentelemetry.io/collector/receiver/otlpreceiver v0.82.0 // indirect go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.42.1-0.20230612162650-64be7e574a17 // indirect go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.42.0 // indirect go.opentelemetry.io/otel v1.16.0 // indirect diff --git a/pkg/extension/smartagentextension/go.sum b/pkg/extension/smartagentextension/go.sum index 6390107a45..c56a668350 100644 --- a/pkg/extension/smartagentextension/go.sum +++ b/pkg/extension/smartagentextension/go.sum @@ -404,8 +404,8 @@ github.com/julienschmidt/httprouter v1.3.0/go.mod h1:JR6WtHb+2LUe8TCKY3cZOxFyyO8 github.com/kelseyhightower/envconfig v1.4.0 h1:Im6hONhd3pLkfDFsbRgu68RDNkGF1r3dvMUtDTo2cv8= github.com/kisielk/errcheck v1.5.0/go.mod h1:pFxgyoBC7bSaBwPgfKdkLd5X25qrDl4LWUI2bnpBCr8= github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= -github.com/klauspost/compress v1.16.6 h1:91SKEy4K37vkp255cJ8QesJhjyRO0hn9i9G0GoUwLsk= -github.com/klauspost/compress v1.16.6/go.mod h1:ntbaceVETuRiXiv4DpjP66DpAtAGkEQskQzEyD//IeE= +github.com/klauspost/compress v1.16.7 h1:2mk3MPGNzKyxErAw8YaohYh69+pa4sIQSC0fPGCFR9I= +github.com/klauspost/compress v1.16.7/go.mod h1:ntbaceVETuRiXiv4DpjP66DpAtAGkEQskQzEyD//IeE= github.com/knadh/koanf v1.5.0 h1:q2TSd/3Pyc/5yP9ldIrSdIz26MCcyNQzW0pEAugLPNs= github.com/knadh/koanf v1.5.0/go.mod h1:Hgyjp4y8v44hpZtPzs7JZfRAW5AhN7KfZcwv1RYggDs= github.com/knadh/koanf/v2 v2.0.1 h1:1dYGITt1I23x8cfx8ZnldtezdyaZtfAuRtIFOiRzK7g= @@ -661,50 +661,50 @@ go.etcd.io/etcd/client/v3 v3.5.4/go.mod h1:ZaRkVgBZC+L+dLCjTcF1hRXpgZXQPOvnA/Ak/ go.mongodb.org/mongo-driver v1.11.6 h1:XM7G6PjiGAO5betLF13BIa5TlLUUE3uJ/2Ox3Lz1K+o= go.opencensus.io v0.24.0 h1:y73uSU6J157QMP2kn2r30vwW1A2W2WFwSCGnAVxeaD0= go.opencensus.io v0.24.0/go.mod h1:vNK8G9p7aAivkbmorf4v+7Hgx+Zs0yY+0fOtgBfjQKo= -go.opentelemetry.io/collector v0.81.0 h1:pF+sB8xNXlg/W0a0QTLz4mUWyool1a9toVj8LmLoFqg= -go.opentelemetry.io/collector v0.81.0/go.mod h1:thuOTBMusXwcTPTwLbs3zwwCOLaaQX2g+Hjf8OObc/w= -go.opentelemetry.io/collector/component v0.81.0 h1:AKsl6bss/SRrW248GFpmGiiI/4kdemW92Ai/X82CCqY= -go.opentelemetry.io/collector/component v0.81.0/go.mod h1:+m6/yPiJ7O7Oc/OLfmgUB2mrY1xoUqRj4BsoOtIVpGs= -go.opentelemetry.io/collector/config/configauth v0.81.0 h1:NIiJuIGOdblN0EIJv64R2mvGhthcYfWuvyCnjk8HRN4= -go.opentelemetry.io/collector/config/configauth v0.81.0/go.mod h1:2KscbmU+8fIzwiSU9Kku0Tf4b4A1plqFIJXR1DWSaTw= -go.opentelemetry.io/collector/config/configcompression v0.81.0 h1:Q725pvVH7tR6BP3WK7Ro3pbqMeQdZEV3KeFVHchBxCc= -go.opentelemetry.io/collector/config/configcompression v0.81.0/go.mod h1:xhHm1sEH7BTECAJo1xn64NMxeIvZGKdVGdSKUUc+YuM= -go.opentelemetry.io/collector/config/configgrpc v0.81.0 h1:Q2xEE2SGbg79j3TdHT+781eUu/2uUIyrHVJAG9bLpVk= -go.opentelemetry.io/collector/config/configgrpc v0.81.0/go.mod h1:Frq/l2Ttbvm7cFH3hkxLdhl5TCNHcH6rnkpmi8U2kLY= -go.opentelemetry.io/collector/config/confighttp v0.81.0 h1:vIdiepUT7P/WtJRdfh8mjzvSqJRVF8/vl9GWtUNQlHQ= -go.opentelemetry.io/collector/config/confighttp v0.81.0/go.mod h1:I54THsffkpv//O7bUHw+0bXxjYdvyL6IHg5ksgYez8I= -go.opentelemetry.io/collector/config/confignet v0.81.0 h1:Eu8m3eX8GaGhOUc//YXvV4i3cEivxUSxkLnV1U9ydhg= -go.opentelemetry.io/collector/config/confignet v0.81.0/go.mod h1:unOg7BZvpt6T5xsf+LyeOQvUhD8ld/2AbfOsmUZ/bPM= -go.opentelemetry.io/collector/config/configopaque v0.81.0 h1:MkCAGh0WydRWydETB9FLnuCj9hDPDiz2g4Wxnl53I0w= -go.opentelemetry.io/collector/config/configopaque v0.81.0/go.mod h1:pM1oy6gasukw3H6jAvc9Q9OtFaaY2IbfeuwCPAjOgXc= -go.opentelemetry.io/collector/config/configtelemetry v0.81.0 h1:j3dhWbAcrfL1n0RmShRJf99X/xIMoPfEShN/5Z8bY0k= -go.opentelemetry.io/collector/config/configtelemetry v0.81.0/go.mod h1:KEYQRiYJdx38iZkvcLKBZWH9fK4NeafxBwGRrRKMgyA= -go.opentelemetry.io/collector/config/configtls v0.81.0 h1:2vt+yOZUvGq5ADqFAxL5ONm1ACuGXDSs87AWT54Ez4M= -go.opentelemetry.io/collector/config/configtls v0.81.0/go.mod h1:HMHTYBMMgqBpTvnNAhQYmjO7XuoBMe2T4qRHcKluB4Q= -go.opentelemetry.io/collector/config/internal v0.81.0 h1:wRV2PBnJygdmKpIdt/xfG7zdQvXvHz9L+z8MhGsOji4= -go.opentelemetry.io/collector/config/internal v0.81.0/go.mod h1:RKcLV1gQxhgwx+6rlPYsvGMq1RZNne3UeOUZkHxJnIg= -go.opentelemetry.io/collector/confmap v0.81.0 h1:AqweoBGdF3jGM2/KgP5GS6bmN+1aVrEiCy4nPf7IBE4= -go.opentelemetry.io/collector/confmap v0.81.0/go.mod h1:iCTnTqGgZZJumhJxpY7rrJz9UQ/0zjPmsJz2Z7Tp4RY= -go.opentelemetry.io/collector/consumer v0.81.0 h1:8R2iCrSzD7T0RtC2Wh4GXxDiqla2vNhDokGW6Bcrfas= -go.opentelemetry.io/collector/consumer v0.81.0/go.mod h1:jS7+gAKdOx3lD3SnaBztBjUVpUYL3ee7fpoqI4p/gT8= -go.opentelemetry.io/collector/exporter v0.81.0 h1:GLhB8WGrBx+zZSB1HIOx2ivFUMahGtAVO2CC5xbCUHQ= -go.opentelemetry.io/collector/exporter v0.81.0/go.mod h1:Di4RTzI8uRooVNATIeApNUgmGdNt8XiikUTQLabmZaA= -go.opentelemetry.io/collector/exporter/otlpexporter v0.81.0 h1:Ri5pj0slm+FUbbG81UIhQaQ992z2+PcT2++4JI32XGI= -go.opentelemetry.io/collector/extension v0.81.0 h1:Ak7AzZzxTFJxGyVbEklsGzqHyOHW5USiifJilCcRyTU= -go.opentelemetry.io/collector/extension v0.81.0/go.mod h1:DU2bX8qulS5+OCJZGfvqIwIT/q3sFnEjI2HjJ2LDI/s= -go.opentelemetry.io/collector/extension/auth v0.81.0 h1:UzVQSG9naJh1hX7hh+HVcvB3n+rpCJXX2BBdUoL/Ybo= -go.opentelemetry.io/collector/extension/auth v0.81.0/go.mod h1:PaBFcFrzXV+UgM4VZKp6Kn1IiRC/MbEYWxTfIalcIwk= -go.opentelemetry.io/collector/featuregate v1.0.0-rcv0013 h1:tiTUG9X/gEDN1oDYQOBVUFYQfhUG2CvgW9VhBc2uk1U= -go.opentelemetry.io/collector/featuregate v1.0.0-rcv0013/go.mod h1:0mE3mDLmUrOXVoNsuvj+7dV14h/9HFl/Fy9YTLoLObo= -go.opentelemetry.io/collector/pdata v1.0.0-rcv0013 h1:4sONXE9hAX+4Di8m0bQ/KaoH3Mi+OPt04cXkZ7A8W3k= -go.opentelemetry.io/collector/pdata v1.0.0-rcv0013/go.mod h1:x09G/4KjEcDKNuWCjC5ZtnuDE0XEqiRwI+yrHSVjIy8= -go.opentelemetry.io/collector/processor v0.81.0 h1:ypyNV5R0bnN3XGMAsH/q5eNARF5vXtFgSOK9rBWzsLc= -go.opentelemetry.io/collector/processor v0.81.0/go.mod h1:ZDwO3DVg1VUSA92g0r/o0jYk+T7r9uxgZZ3LABJbC34= -go.opentelemetry.io/collector/receiver v0.81.0 h1:0c+YtIV7fmd9ev+zmwS9qjx5ASi8cw+gSypu4I7Gugc= -go.opentelemetry.io/collector/receiver v0.81.0/go.mod h1:q80JkMxVLnk0vWxoTRY2J7F4Qx9069Yy5yxDbZ4JVwk= -go.opentelemetry.io/collector/receiver/otlpreceiver v0.81.0 h1:ewVbfATnAeQkwFK3r0dpFKCXcTb8HJKX4AixUioRt+c= -go.opentelemetry.io/collector/receiver/otlpreceiver v0.81.0/go.mod h1:LGuSMVdOq5Zq+CEHF9YBHMaOIUZrzqW7DQGqo9g0dJA= -go.opentelemetry.io/collector/semconv v0.81.0 h1:lCYNNo3powDvFIaTPP2jDKIrBiV1T92NK4QgL/aHYXw= +go.opentelemetry.io/collector v0.82.0 h1:MaKqWT0R4GCdkZDhYWOQkLfoJj9V7GsMbk1gsAuogaw= +go.opentelemetry.io/collector v0.82.0/go.mod h1:PMmDJkZzC1xpcViHlwMMEVeAnRRl3HYy3nXgD8KJwG0= +go.opentelemetry.io/collector/component v0.82.0 h1:ID9nOGKBf5G0avhuYQlTzmwAyIMvh9B+tlckLE/4qw4= +go.opentelemetry.io/collector/component v0.82.0/go.mod h1:jSdGG4L1Ger6ob6lWpr8jmKC2qqC+XZ/gOgu7GUA5xs= +go.opentelemetry.io/collector/config/configauth v0.82.0 h1:H5xrWyPMotSqajiiH/bay8bpVsT4aq6Vih4OuArXv4Q= +go.opentelemetry.io/collector/config/configauth v0.82.0/go.mod h1:P0ukmBIUk+HP0O7yfUOKRmPmffneAQgmEL9/iTOo1CU= +go.opentelemetry.io/collector/config/configcompression v0.82.0 h1:M6a7eiHdBUB8mIioDhWugJfNm7Sw85cvv/OXyTDhtY0= +go.opentelemetry.io/collector/config/configcompression v0.82.0/go.mod h1:xhHm1sEH7BTECAJo1xn64NMxeIvZGKdVGdSKUUc+YuM= +go.opentelemetry.io/collector/config/configgrpc v0.82.0 h1:taZWDbtVBm0OOcgnfpVA1X43pmU2oNhj39B2uV3COQk= +go.opentelemetry.io/collector/config/configgrpc v0.82.0/go.mod h1:NHXHRI40Q7TT/d38DKT30B7DOrVUkj7anEFOD59R9o8= +go.opentelemetry.io/collector/config/confighttp v0.82.0 h1:2LhyqVTd+Bsr8SgsCq6+q731F81uddK9GwvGhwD/Co0= +go.opentelemetry.io/collector/config/confighttp v0.82.0/go.mod h1:OHGx/aJqGJ9z2jaBXvaylwkAuiUwikg1/n+RRDpsfOo= +go.opentelemetry.io/collector/config/confignet v0.82.0 h1:zN9JaFTn7Dth3u5ot6KZJcBZACTEzGqFWYyO5qAlYfo= +go.opentelemetry.io/collector/config/confignet v0.82.0/go.mod h1:unOg7BZvpt6T5xsf+LyeOQvUhD8ld/2AbfOsmUZ/bPM= +go.opentelemetry.io/collector/config/configopaque v0.82.0 h1:0Ma63QTr4AkODzEABZHtgiU5Dig8SItpHOuB28UnVSw= +go.opentelemetry.io/collector/config/configopaque v0.82.0/go.mod h1:pM1oy6gasukw3H6jAvc9Q9OtFaaY2IbfeuwCPAjOgXc= +go.opentelemetry.io/collector/config/configtelemetry v0.82.0 h1:Zln2K4S5gBDcOpBNIzM0cZS5P6cohEYstHngVvIbGBY= +go.opentelemetry.io/collector/config/configtelemetry v0.82.0/go.mod h1:KEYQRiYJdx38iZkvcLKBZWH9fK4NeafxBwGRrRKMgyA= +go.opentelemetry.io/collector/config/configtls v0.82.0 h1:eE/8muTszLlviOGLy5N08BaXLCcYqDW3mKIoKyDDa8o= +go.opentelemetry.io/collector/config/configtls v0.82.0/go.mod h1:unBTmL1bdpkp9mYEDz7N+Ln4yEwh7Ug74I1HgZMplCk= +go.opentelemetry.io/collector/config/internal v0.82.0 h1:JnnDARkXrC3OJDsMfQkBgfI0Np4s+18zvoDqZ4OH0+I= +go.opentelemetry.io/collector/config/internal v0.82.0/go.mod h1:RKcLV1gQxhgwx+6rlPYsvGMq1RZNne3UeOUZkHxJnIg= +go.opentelemetry.io/collector/confmap v0.82.0 h1:s1Rd8jz21DGlLJfED0Py9VaEq2qPWmWwWy5MriDCX+4= +go.opentelemetry.io/collector/confmap v0.82.0/go.mod h1:IS/PoUYHETtxV6+fJammTkCxxa4LEwK2u4Cx/bVCH/s= +go.opentelemetry.io/collector/consumer v0.82.0 h1:vZecylW6bpaphetSTjCLgwXLxSYQ6oe/kzwkx4iF5oE= +go.opentelemetry.io/collector/consumer v0.82.0/go.mod h1:qrhd0i0Gp0RkihcEXb+7Rb584Kal2NmGH1eA4Zg6puA= +go.opentelemetry.io/collector/exporter v0.82.0 h1:BWsx4rWfVwlV+qNuevSMm+2Cv6uGZYYZ9CEFqq0q+F4= +go.opentelemetry.io/collector/exporter v0.82.0/go.mod h1:e3VPpLYVNRaF+G2HuKw6A5hTBMYZ4tgRYYzMusfwFJE= +go.opentelemetry.io/collector/exporter/otlpexporter v0.82.0 h1:tYCEUQpfyuS/NgrWg9Ulps6f0ffPSCBRTBdK6sXnSaw= +go.opentelemetry.io/collector/extension v0.82.0 h1:DH4tqrTOz0HmGDJ6FT/jRD2woQf3ugqC6QqSiQdH3wg= +go.opentelemetry.io/collector/extension v0.82.0/go.mod h1:n7d0XTh7fdyorZWTc+gLpJh78FS7GjRqIjUiW1xdhe0= +go.opentelemetry.io/collector/extension/auth v0.82.0 h1:iaxwFslRj6mfzs1wVzbnj+gDU2G98IeXW4tcrq78p5s= +go.opentelemetry.io/collector/extension/auth v0.82.0/go.mod h1:O1xBcb06pKD8g3FadLDvMa1xKZwPGdHQp4CI8vW3RCM= +go.opentelemetry.io/collector/featuregate v1.0.0-rcv0014 h1:C9o0mbP0MyygqFnKueVQK/v9jef6zvuttmTGlKaqhgw= +go.opentelemetry.io/collector/featuregate v1.0.0-rcv0014/go.mod h1:0mE3mDLmUrOXVoNsuvj+7dV14h/9HFl/Fy9YTLoLObo= +go.opentelemetry.io/collector/pdata v1.0.0-rcv0014 h1:iT5qH0NLmkGeIdDtnBogYDx7L58t6CaWGL378DEo2QY= +go.opentelemetry.io/collector/pdata v1.0.0-rcv0014/go.mod h1:BRvDrx43kiSoUx3mr7SoA7h9B8+OY99mUK+CZSQFWW4= +go.opentelemetry.io/collector/processor v0.82.0 h1:DoqVrrnGYThu/h1sOr6E0hR1Fj5nQT4VT0ptFZcltRk= +go.opentelemetry.io/collector/processor v0.82.0/go.mod h1:B0MtfLWCYNBJ+PXf9k77M2Yn08MKItNB2vuvwhqrtt0= +go.opentelemetry.io/collector/receiver v0.82.0 h1:bc6jc8jmSgc0/C9zqTqqWOGJFVx0AJ53jiToSmQs2SE= +go.opentelemetry.io/collector/receiver v0.82.0/go.mod h1:Uh6BgcTmmrA1Bm/GpKGRY6WwQyPio4yEDsYkUo0A5Gk= +go.opentelemetry.io/collector/receiver/otlpreceiver v0.82.0 h1:LzcmQ9d7NauTVEWfPNwRwqNd/NBQDi+JU0OHWearcEA= +go.opentelemetry.io/collector/receiver/otlpreceiver v0.82.0/go.mod h1:Qt9Ha/yWaU6ni0XwFslNCBX5zZBQHcnxma/sU1s7LH4= +go.opentelemetry.io/collector/semconv v0.82.0 h1:WUeT2a+uZjI6kLvwcBaJnGvo7KSQ/9dIFRcxOQdXucc= go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.42.1-0.20230612162650-64be7e574a17 h1:mdcNStUIXngF/mH3xxAo4nbR4g65IXqLL1SvYMjz7JQ= go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.42.1-0.20230612162650-64be7e574a17/go.mod h1:N2Nw/UmmvQn0yCnaUzvsWzTWIeffYIdFteg6mxqCWII= go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.42.0 h1:pginetY7+onl4qN1vl0xW/V/v6OBZ0vVdH+esuJgvmM= diff --git a/pkg/processor/timestampprocessor/go.mod b/pkg/processor/timestampprocessor/go.mod index 665324df26..17f7037318 100644 --- a/pkg/processor/timestampprocessor/go.mod +++ b/pkg/processor/timestampprocessor/go.mod @@ -4,11 +4,11 @@ go 1.19 require ( github.com/stretchr/testify v1.8.4 - go.opentelemetry.io/collector/component v0.81.0 - go.opentelemetry.io/collector/confmap v0.81.0 - go.opentelemetry.io/collector/consumer v0.81.0 - go.opentelemetry.io/collector/pdata v1.0.0-rcv0013 - go.opentelemetry.io/collector/processor v0.81.0 + go.opentelemetry.io/collector/component v0.82.0 + go.opentelemetry.io/collector/confmap v0.82.0 + go.opentelemetry.io/collector/consumer v0.82.0 + go.opentelemetry.io/collector/pdata v1.0.0-rcv0014 + go.opentelemetry.io/collector/processor v0.82.0 go.uber.org/zap v1.24.0 ) @@ -26,19 +26,19 @@ require ( github.com/modern-go/reflect2 v1.0.2 // indirect github.com/pmezard/go-difflib v1.0.0 // indirect go.opencensus.io v0.24.0 // indirect - go.opentelemetry.io/collector v0.81.0 // indirect - go.opentelemetry.io/collector/config/configtelemetry v0.81.0 // indirect - go.opentelemetry.io/collector/featuregate v1.0.0-rcv0013 // indirect + go.opentelemetry.io/collector v0.82.0 // indirect + go.opentelemetry.io/collector/config/configtelemetry v0.82.0 // indirect + go.opentelemetry.io/collector/featuregate v1.0.0-rcv0014 // indirect go.opentelemetry.io/otel v1.16.0 // indirect go.opentelemetry.io/otel/metric v1.16.0 // indirect go.opentelemetry.io/otel/trace v1.16.0 // indirect go.uber.org/atomic v1.10.0 // indirect go.uber.org/multierr v1.11.0 // indirect - golang.org/x/net v0.11.0 // indirect - golang.org/x/sys v0.9.0 // indirect - golang.org/x/text v0.10.0 // indirect - google.golang.org/genproto v0.0.0-20230410155749-daa745c078e1 // indirect - google.golang.org/grpc v1.56.1 // indirect + golang.org/x/net v0.12.0 // indirect + golang.org/x/sys v0.10.0 // indirect + golang.org/x/text v0.11.0 // indirect + google.golang.org/genproto/googleapis/rpc v0.0.0-20230530153820-e85fd2cbaebc // indirect + google.golang.org/grpc v1.56.2 // indirect google.golang.org/protobuf v1.31.0 // indirect gopkg.in/yaml.v3 v3.0.1 // indirect ) diff --git a/pkg/processor/timestampprocessor/go.sum b/pkg/processor/timestampprocessor/go.sum index 362e59cf1c..4e483ba021 100644 --- a/pkg/processor/timestampprocessor/go.sum +++ b/pkg/processor/timestampprocessor/go.sum @@ -259,22 +259,22 @@ go.etcd.io/etcd/client/pkg/v3 v3.5.4/go.mod h1:IJHfcCEKxYu1Os13ZdwCwIUTUVGYTSAM3 go.etcd.io/etcd/client/v3 v3.5.4/go.mod h1:ZaRkVgBZC+L+dLCjTcF1hRXpgZXQPOvnA/Ak/gq3kiY= go.opencensus.io v0.24.0 h1:y73uSU6J157QMP2kn2r30vwW1A2W2WFwSCGnAVxeaD0= go.opencensus.io v0.24.0/go.mod h1:vNK8G9p7aAivkbmorf4v+7Hgx+Zs0yY+0fOtgBfjQKo= -go.opentelemetry.io/collector v0.81.0 h1:pF+sB8xNXlg/W0a0QTLz4mUWyool1a9toVj8LmLoFqg= -go.opentelemetry.io/collector v0.81.0/go.mod h1:thuOTBMusXwcTPTwLbs3zwwCOLaaQX2g+Hjf8OObc/w= -go.opentelemetry.io/collector/component v0.81.0 h1:AKsl6bss/SRrW248GFpmGiiI/4kdemW92Ai/X82CCqY= -go.opentelemetry.io/collector/component v0.81.0/go.mod h1:+m6/yPiJ7O7Oc/OLfmgUB2mrY1xoUqRj4BsoOtIVpGs= -go.opentelemetry.io/collector/config/configtelemetry v0.81.0 h1:j3dhWbAcrfL1n0RmShRJf99X/xIMoPfEShN/5Z8bY0k= -go.opentelemetry.io/collector/config/configtelemetry v0.81.0/go.mod h1:KEYQRiYJdx38iZkvcLKBZWH9fK4NeafxBwGRrRKMgyA= -go.opentelemetry.io/collector/confmap v0.81.0 h1:AqweoBGdF3jGM2/KgP5GS6bmN+1aVrEiCy4nPf7IBE4= -go.opentelemetry.io/collector/confmap v0.81.0/go.mod h1:iCTnTqGgZZJumhJxpY7rrJz9UQ/0zjPmsJz2Z7Tp4RY= -go.opentelemetry.io/collector/consumer v0.81.0 h1:8R2iCrSzD7T0RtC2Wh4GXxDiqla2vNhDokGW6Bcrfas= -go.opentelemetry.io/collector/consumer v0.81.0/go.mod h1:jS7+gAKdOx3lD3SnaBztBjUVpUYL3ee7fpoqI4p/gT8= -go.opentelemetry.io/collector/featuregate v1.0.0-rcv0013 h1:tiTUG9X/gEDN1oDYQOBVUFYQfhUG2CvgW9VhBc2uk1U= -go.opentelemetry.io/collector/featuregate v1.0.0-rcv0013/go.mod h1:0mE3mDLmUrOXVoNsuvj+7dV14h/9HFl/Fy9YTLoLObo= -go.opentelemetry.io/collector/pdata v1.0.0-rcv0013 h1:4sONXE9hAX+4Di8m0bQ/KaoH3Mi+OPt04cXkZ7A8W3k= -go.opentelemetry.io/collector/pdata v1.0.0-rcv0013/go.mod h1:x09G/4KjEcDKNuWCjC5ZtnuDE0XEqiRwI+yrHSVjIy8= -go.opentelemetry.io/collector/processor v0.81.0 h1:ypyNV5R0bnN3XGMAsH/q5eNARF5vXtFgSOK9rBWzsLc= -go.opentelemetry.io/collector/processor v0.81.0/go.mod h1:ZDwO3DVg1VUSA92g0r/o0jYk+T7r9uxgZZ3LABJbC34= +go.opentelemetry.io/collector v0.82.0 h1:MaKqWT0R4GCdkZDhYWOQkLfoJj9V7GsMbk1gsAuogaw= +go.opentelemetry.io/collector v0.82.0/go.mod h1:PMmDJkZzC1xpcViHlwMMEVeAnRRl3HYy3nXgD8KJwG0= +go.opentelemetry.io/collector/component v0.82.0 h1:ID9nOGKBf5G0avhuYQlTzmwAyIMvh9B+tlckLE/4qw4= +go.opentelemetry.io/collector/component v0.82.0/go.mod h1:jSdGG4L1Ger6ob6lWpr8jmKC2qqC+XZ/gOgu7GUA5xs= +go.opentelemetry.io/collector/config/configtelemetry v0.82.0 h1:Zln2K4S5gBDcOpBNIzM0cZS5P6cohEYstHngVvIbGBY= +go.opentelemetry.io/collector/config/configtelemetry v0.82.0/go.mod h1:KEYQRiYJdx38iZkvcLKBZWH9fK4NeafxBwGRrRKMgyA= +go.opentelemetry.io/collector/confmap v0.82.0 h1:s1Rd8jz21DGlLJfED0Py9VaEq2qPWmWwWy5MriDCX+4= +go.opentelemetry.io/collector/confmap v0.82.0/go.mod h1:IS/PoUYHETtxV6+fJammTkCxxa4LEwK2u4Cx/bVCH/s= +go.opentelemetry.io/collector/consumer v0.82.0 h1:vZecylW6bpaphetSTjCLgwXLxSYQ6oe/kzwkx4iF5oE= +go.opentelemetry.io/collector/consumer v0.82.0/go.mod h1:qrhd0i0Gp0RkihcEXb+7Rb584Kal2NmGH1eA4Zg6puA= +go.opentelemetry.io/collector/featuregate v1.0.0-rcv0014 h1:C9o0mbP0MyygqFnKueVQK/v9jef6zvuttmTGlKaqhgw= +go.opentelemetry.io/collector/featuregate v1.0.0-rcv0014/go.mod h1:0mE3mDLmUrOXVoNsuvj+7dV14h/9HFl/Fy9YTLoLObo= +go.opentelemetry.io/collector/pdata v1.0.0-rcv0014 h1:iT5qH0NLmkGeIdDtnBogYDx7L58t6CaWGL378DEo2QY= +go.opentelemetry.io/collector/pdata v1.0.0-rcv0014/go.mod h1:BRvDrx43kiSoUx3mr7SoA7h9B8+OY99mUK+CZSQFWW4= +go.opentelemetry.io/collector/processor v0.82.0 h1:DoqVrrnGYThu/h1sOr6E0hR1Fj5nQT4VT0ptFZcltRk= +go.opentelemetry.io/collector/processor v0.82.0/go.mod h1:B0MtfLWCYNBJ+PXf9k77M2Yn08MKItNB2vuvwhqrtt0= go.opentelemetry.io/otel v1.16.0 h1:Z7GVAX/UkAXPKsy94IU+i6thsQS4nb7LviLpnaNeW8s= go.opentelemetry.io/otel v1.16.0/go.mod h1:vl0h9NUa1D5s1nv3A5vZOYWn8av4K8Ml6JDeHrT/bx4= go.opentelemetry.io/otel/metric v1.16.0 h1:RbrpwVG1Hfv85LgnZ7+txXioPDoh6EdbZHo26Q3hqOo= @@ -344,8 +344,8 @@ golang.org/x/sys v0.0.0-20210603081109-ebe580a85c40/go.mod h1:oPkhp1MJrh7nUepCBc golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.4.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.9.0 h1:KS/R3tvhPqvJvwcKfnBHJwwthS11LRhmM5D59eEXa0s= -golang.org/x/sys v0.9.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.10.0 h1:SqMFp9UcQJZa+pmYuAKjd9xq1f0j5rLcDIk0mj4qAsA= +golang.org/x/sys v0.10.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/term v0.4.0/go.mod h1:9P2UbLfCdcvo3p/nzKvsmas4TnlujnuoV9hGgYzW1lQ= golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= @@ -355,8 +355,8 @@ golang.org/x/text v0.3.5/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= golang.org/x/text v0.6.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= -golang.org/x/text v0.10.0 h1:UpjohKhiEgNc0CSauXmwYftY1+LlaC75SJwh0SgCX58= -golang.org/x/text v0.10.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE= +golang.org/x/text v0.11.0 h1:LAntKIrcmeSKERyiOh0XMV39LXS8IE9UL2yP7+f5ij4= +golang.org/x/text v0.11.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE= golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= @@ -382,8 +382,8 @@ google.golang.org/genproto v0.0.0-20190819201941-24fa4b261c55/go.mod h1:DMBHOl98 google.golang.org/genproto v0.0.0-20200513103714-09dca8ec2884/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= google.golang.org/genproto v0.0.0-20200526211855-cb27e3aa2013/go.mod h1:NbSheEEYHJ7i3ixzK3sjbqSGDJWnxyFXZblF3eUsNvo= google.golang.org/genproto v0.0.0-20210602131652-f16073e35f0c/go.mod h1:UODoCrxHCcBojKKwX1terBiRUaqAsFqJiF615XL43r0= -google.golang.org/genproto v0.0.0-20230410155749-daa745c078e1 h1:KpwkzHKEF7B9Zxg18WzOa7djJ+Ha5DzthMyZYQfEn2A= -google.golang.org/genproto v0.0.0-20230410155749-daa745c078e1/go.mod h1:nKE/iIaLqn2bQwXBg8f1g2Ylh6r5MN5CmZvuzZCgsCU= +google.golang.org/genproto/googleapis/rpc v0.0.0-20230530153820-e85fd2cbaebc h1:XSJ8Vk1SWuNr8S18z1NZSziL0CPIXLCCMDOEFtHBOFc= +google.golang.org/genproto/googleapis/rpc v0.0.0-20230530153820-e85fd2cbaebc/go.mod h1:66JfowdXAEgad5O9NnYcsNPLCPZJD++2L9X0PCMODrA= google.golang.org/grpc v1.14.0/go.mod h1:yo6s7OP7yaDglbqo1J04qKzAhqBH6lvTonzMVmEdcZw= google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c= google.golang.org/grpc v1.22.0/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg= @@ -393,8 +393,8 @@ google.golang.org/grpc v1.27.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8 google.golang.org/grpc v1.33.1/go.mod h1:fr5YgcSWrqhRRxogOsw7RzIpsmvOZ6IcH4kBYTpR3n0= google.golang.org/grpc v1.33.2/go.mod h1:JMHMWHQWaTccqQQlmk3MJZS+GWXOdAesneDmEnv2fbc= google.golang.org/grpc v1.38.0/go.mod h1:NREThFqKR1f3iQ6oBuvc5LadQuXVGo9rkm5ZGrQdJfM= -google.golang.org/grpc v1.56.1 h1:z0dNfjIl0VpaZ9iSVjA6daGatAYwPGstTjt5vkRMFkQ= -google.golang.org/grpc v1.56.1/go.mod h1:I9bI3vqKfayGqPUAwGdOSu7kt6oIJLixfffKrpXqQ9s= +google.golang.org/grpc v1.56.2 h1:fVRFRnXvU+x6C4IlHZewvJOVHoOv1TUuQyoRsYnB4bI= +google.golang.org/grpc v1.56.2/go.mod h1:I9bI3vqKfayGqPUAwGdOSu7kt6oIJLixfffKrpXqQ9s= google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8= google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0= google.golang.org/protobuf v0.0.0-20200228230310-ab0ca4ff8a60/go.mod h1:cfTl7dwQJ+fmap5saPgwCLgHXTUD7jkjRqWcaiX5VyM= diff --git a/pkg/receiver/smartagentreceiver/go.mod b/pkg/receiver/smartagentreceiver/go.mod index 5364cdabfa..da8ce1b495 100644 --- a/pkg/receiver/smartagentreceiver/go.mod +++ b/pkg/receiver/smartagentreceiver/go.mod @@ -3,9 +3,9 @@ module github.com/signalfx/splunk-otel-collector/pkg/receiver/smartagentreceiver go 1.19 require ( - github.com/open-telemetry/opentelemetry-collector-contrib/pkg/experimentalmetricmetadata v0.81.0 - github.com/open-telemetry/opentelemetry-collector-contrib/pkg/pdatatest v0.81.0 - github.com/open-telemetry/opentelemetry-collector-contrib/pkg/translator/zipkin v0.81.0 + github.com/open-telemetry/opentelemetry-collector-contrib/pkg/experimentalmetricmetadata v0.82.0 + github.com/open-telemetry/opentelemetry-collector-contrib/pkg/pdatatest v0.82.0 + github.com/open-telemetry/opentelemetry-collector-contrib/pkg/translator/zipkin v0.82.0 github.com/openzipkin/zipkin-go v0.4.1 github.com/signalfx/defaults v1.2.2-0.20180531161417-70562fe60657 github.com/signalfx/golib/v3 v3.3.50 @@ -14,14 +14,14 @@ require ( github.com/signalfx/splunk-otel-collector/tests v0.72.0 github.com/sirupsen/logrus v1.9.3 github.com/stretchr/testify v1.8.4 - go.opentelemetry.io/collector v0.81.0 - go.opentelemetry.io/collector/component v0.81.0 - go.opentelemetry.io/collector/confmap v0.81.0 - go.opentelemetry.io/collector/consumer v0.81.0 - go.opentelemetry.io/collector/exporter v0.81.0 - go.opentelemetry.io/collector/extension v0.81.0 - go.opentelemetry.io/collector/pdata v1.0.0-rcv0013 - go.opentelemetry.io/collector/receiver v0.81.0 + go.opentelemetry.io/collector v0.82.0 + go.opentelemetry.io/collector/component v0.82.0 + go.opentelemetry.io/collector/confmap v0.82.0 + go.opentelemetry.io/collector/consumer v0.82.0 + go.opentelemetry.io/collector/exporter v0.82.0 + go.opentelemetry.io/collector/extension v0.82.0 + go.opentelemetry.io/collector/pdata v1.0.0-rcv0014 + go.opentelemetry.io/collector/receiver v0.82.0 go.opentelemetry.io/otel/metric v1.16.0 go.opentelemetry.io/otel/trace v1.16.0 go.uber.org/zap v1.24.0 @@ -217,8 +217,8 @@ require ( github.com/mwielbut/pointy v1.1.0 // indirect github.com/oklog/run v1.1.0 // indirect github.com/olekukonko/tablewriter v0.0.5 // indirect - github.com/open-telemetry/opentelemetry-collector-contrib/internal/coreinternal v0.81.0 // indirect - github.com/open-telemetry/opentelemetry-collector-contrib/pkg/pdatautil v0.81.0 // indirect + github.com/open-telemetry/opentelemetry-collector-contrib/internal/coreinternal v0.82.0 // indirect + github.com/open-telemetry/opentelemetry-collector-contrib/pkg/pdatautil v0.82.0 // indirect github.com/opencontainers/go-digest v1.0.0 // indirect github.com/opencontainers/image-spec v1.1.0-rc2.0.20221005185240-3a7f492d3f1b // indirect github.com/opencontainers/runc v1.1.6 // indirect @@ -269,20 +269,20 @@ require ( go.etcd.io/etcd/client/pkg/v3 v3.5.9 // indirect go.etcd.io/etcd/client/v2 v2.305.9 // indirect go.opencensus.io v0.24.0 // indirect - go.opentelemetry.io/collector/config/configauth v0.81.0 // indirect - go.opentelemetry.io/collector/config/configcompression v0.81.0 // indirect - go.opentelemetry.io/collector/config/configgrpc v0.81.0 // indirect - go.opentelemetry.io/collector/config/confighttp v0.81.0 // indirect - go.opentelemetry.io/collector/config/confignet v0.81.0 // indirect - go.opentelemetry.io/collector/config/configopaque v0.81.0 // indirect - go.opentelemetry.io/collector/config/configtelemetry v0.81.0 // indirect - go.opentelemetry.io/collector/config/configtls v0.81.0 // indirect - go.opentelemetry.io/collector/config/internal v0.81.0 // indirect - go.opentelemetry.io/collector/extension/auth v0.81.0 // indirect - go.opentelemetry.io/collector/featuregate v1.0.0-rcv0013 // indirect - go.opentelemetry.io/collector/processor v0.81.0 // indirect - go.opentelemetry.io/collector/receiver/otlpreceiver v0.81.0 // indirect - go.opentelemetry.io/collector/semconv v0.81.0 // indirect + go.opentelemetry.io/collector/config/configauth v0.82.0 // indirect + go.opentelemetry.io/collector/config/configcompression v0.82.0 // indirect + go.opentelemetry.io/collector/config/configgrpc v0.82.0 // indirect + go.opentelemetry.io/collector/config/confighttp v0.82.0 // indirect + go.opentelemetry.io/collector/config/confignet v0.82.0 // indirect + go.opentelemetry.io/collector/config/configopaque v0.82.0 // indirect + go.opentelemetry.io/collector/config/configtelemetry v0.82.0 // indirect + go.opentelemetry.io/collector/config/configtls v0.82.0 // indirect + go.opentelemetry.io/collector/config/internal v0.82.0 // indirect + go.opentelemetry.io/collector/extension/auth v0.82.0 // indirect + go.opentelemetry.io/collector/featuregate v1.0.0-rcv0014 // indirect + go.opentelemetry.io/collector/processor v0.82.0 // indirect + go.opentelemetry.io/collector/receiver/otlpreceiver v0.82.0 // indirect + go.opentelemetry.io/collector/semconv v0.82.0 // indirect go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.42.1-0.20230612162650-64be7e574a17 // indirect go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.42.0 // indirect go.opentelemetry.io/otel v1.16.0 // indirect @@ -303,7 +303,7 @@ require ( google.golang.org/api v0.125.0 // indirect google.golang.org/appengine v1.6.7 // indirect google.golang.org/genproto/googleapis/rpc v0.0.0-20230530153820-e85fd2cbaebc // indirect - google.golang.org/grpc v1.56.2 // indirect + google.golang.org/grpc v1.57.0 // indirect google.golang.org/protobuf v1.31.0 // indirect gopkg.in/fsnotify.v1 v1.4.7 // indirect gopkg.in/go-playground/validator.v9 v9.31.0 // indirect diff --git a/pkg/receiver/smartagentreceiver/go.sum b/pkg/receiver/smartagentreceiver/go.sum index 874110bbe7..6b766a7dce 100644 --- a/pkg/receiver/smartagentreceiver/go.sum +++ b/pkg/receiver/smartagentreceiver/go.sum @@ -914,16 +914,16 @@ github.com/onsi/gomega v1.7.1/go.mod h1:XdKZgCCFLUoM/7CFJVPcG8C1xQ1AJ0vpAezJrB7J github.com/onsi/gomega v1.10.1/go.mod h1:iN09h71vgCQne3DLsj+A5owkum+a2tYe+TOCB1ybHNo= github.com/onsi/gomega v1.13.0/go.mod h1:lRk9szgn8TxENtWd0Tp4c3wjlRfMTMH27I+3Je41yGY= github.com/onsi/gomega v1.27.9 h1:qIyVWbOsvQEye2QCqLsNSeH/5L1RS9vS382erEWfT3o= -github.com/open-telemetry/opentelemetry-collector-contrib/internal/coreinternal v0.81.0 h1:sPjCHuqjn5UYDJOai4FulMCfLP+7AbspjHfv0jAtmD0= -github.com/open-telemetry/opentelemetry-collector-contrib/internal/coreinternal v0.81.0/go.mod h1:moQ6krtZ8dyziij2P+9eao5+gBfCJjiNDwN7n2MZZs4= -github.com/open-telemetry/opentelemetry-collector-contrib/pkg/experimentalmetricmetadata v0.81.0 h1:O7D2OVWhVgR5JzLoM+Q7/1Pbt+zpVrNsFzJt+/5TonM= -github.com/open-telemetry/opentelemetry-collector-contrib/pkg/experimentalmetricmetadata v0.81.0/go.mod h1:jTNj31j5LsaNnYZnaNsEtOfXaYgjdZPeLf0cS3Fjg5w= -github.com/open-telemetry/opentelemetry-collector-contrib/pkg/pdatatest v0.81.0 h1:mPkMu2Dx9QrGmZxnfwcSSvAyUZzBtaeYIdvmFSrC0KA= -github.com/open-telemetry/opentelemetry-collector-contrib/pkg/pdatatest v0.81.0/go.mod h1:HGW+MymIh+h0Gc9TBCsh/R7X+wauRpNtV34sqNd4YG0= -github.com/open-telemetry/opentelemetry-collector-contrib/pkg/pdatautil v0.81.0 h1:qUNZEYelezsSH6KrbE4u1TrzXCggSFPZqFI1m29gJFQ= -github.com/open-telemetry/opentelemetry-collector-contrib/pkg/pdatautil v0.81.0/go.mod h1:OUF0FMVFQQnlqc+QUMyQScszuBnqO9pO7xL7/kK1PM0= -github.com/open-telemetry/opentelemetry-collector-contrib/pkg/translator/zipkin v0.81.0 h1:/2cI6UppJgjmc9voPGDUWWv3Bhd0N4LgbZ+qFfrLeI4= -github.com/open-telemetry/opentelemetry-collector-contrib/pkg/translator/zipkin v0.81.0/go.mod h1:BzoMZIy0wKuBbdUv0FJaWPY2xWZeaOC8tgrxjUp07Zg= +github.com/open-telemetry/opentelemetry-collector-contrib/internal/coreinternal v0.82.0 h1:0b6glbENAwPdasKKVOgpR/EaZG1sJhsUfXCRiwZ0drU= +github.com/open-telemetry/opentelemetry-collector-contrib/internal/coreinternal v0.82.0/go.mod h1:MKnM9GFqPz4HY4NQDDao+dIjZz4BvThAijuJuPC8NOI= +github.com/open-telemetry/opentelemetry-collector-contrib/pkg/experimentalmetricmetadata v0.82.0 h1:noxLHxoQqYt3WO3Z2HpUExyYG7l4fuqC0FyqRPYb+BY= +github.com/open-telemetry/opentelemetry-collector-contrib/pkg/experimentalmetricmetadata v0.82.0/go.mod h1:umq1KOdkQa2+djdxtxHmLigyFtLVqM7QXGeP3/s3cHA= +github.com/open-telemetry/opentelemetry-collector-contrib/pkg/pdatatest v0.82.0 h1:wBX6PvwO5mopN+uuVU1pyfl54OdrrRT+VPRCyl22O1A= +github.com/open-telemetry/opentelemetry-collector-contrib/pkg/pdatatest v0.82.0/go.mod h1:8bbFs0G0deA/M9oRGqUJ5n/+N1wejo/6CSWztEnz3Hc= +github.com/open-telemetry/opentelemetry-collector-contrib/pkg/pdatautil v0.82.0 h1:zSQ0EolsXY3F18kFwEpqAkLc5C2/DE0vbFS3QfMpsDc= +github.com/open-telemetry/opentelemetry-collector-contrib/pkg/pdatautil v0.82.0/go.mod h1:wbgo9BklRN8M4Mi+76mo9bMVQY2C5gL/rPKwePQL3l0= +github.com/open-telemetry/opentelemetry-collector-contrib/pkg/translator/zipkin v0.82.0 h1:NC8LNVLj2UXfPZoW4vpAf9+NWqw1vzwuSjdA2xRPIvA= +github.com/open-telemetry/opentelemetry-collector-contrib/pkg/translator/zipkin v0.82.0/go.mod h1:t2x45aFpeo5tc6oM2nNyKPLy5gBhT/R/uJNdPp7TBbc= github.com/opencontainers/go-digest v1.0.0 h1:apOUWs51W5PlhuyGyz9FCeeBIOUDA/6nW8Oi/yOhh5U= github.com/opencontainers/go-digest v1.0.0/go.mod h1:0JzlMkj0TRzQZfJkVvzbP0HBR3IKzErnv2BNG4W4MAM= github.com/opencontainers/image-spec v1.0.2/go.mod h1:BtxoFyWECRxE4U/7sNtV5W15zMzWCbyJoFRP3s7yZA0= @@ -1173,51 +1173,51 @@ go.opencensus.io v0.22.5/go.mod h1:5pWMHQbX5EPX2/62yrJeAkowc+lfs/XD7Uxpq3pI6kk= go.opencensus.io v0.23.0/go.mod h1:XItmlyltB5F7CS4xOC1DcqMoFqwtC6OG2xF7mCv7P7E= go.opencensus.io v0.24.0 h1:y73uSU6J157QMP2kn2r30vwW1A2W2WFwSCGnAVxeaD0= go.opencensus.io v0.24.0/go.mod h1:vNK8G9p7aAivkbmorf4v+7Hgx+Zs0yY+0fOtgBfjQKo= -go.opentelemetry.io/collector v0.81.0 h1:pF+sB8xNXlg/W0a0QTLz4mUWyool1a9toVj8LmLoFqg= -go.opentelemetry.io/collector v0.81.0/go.mod h1:thuOTBMusXwcTPTwLbs3zwwCOLaaQX2g+Hjf8OObc/w= -go.opentelemetry.io/collector/component v0.81.0 h1:AKsl6bss/SRrW248GFpmGiiI/4kdemW92Ai/X82CCqY= -go.opentelemetry.io/collector/component v0.81.0/go.mod h1:+m6/yPiJ7O7Oc/OLfmgUB2mrY1xoUqRj4BsoOtIVpGs= -go.opentelemetry.io/collector/config/configauth v0.81.0 h1:NIiJuIGOdblN0EIJv64R2mvGhthcYfWuvyCnjk8HRN4= -go.opentelemetry.io/collector/config/configauth v0.81.0/go.mod h1:2KscbmU+8fIzwiSU9Kku0Tf4b4A1plqFIJXR1DWSaTw= -go.opentelemetry.io/collector/config/configcompression v0.81.0 h1:Q725pvVH7tR6BP3WK7Ro3pbqMeQdZEV3KeFVHchBxCc= -go.opentelemetry.io/collector/config/configcompression v0.81.0/go.mod h1:xhHm1sEH7BTECAJo1xn64NMxeIvZGKdVGdSKUUc+YuM= -go.opentelemetry.io/collector/config/configgrpc v0.81.0 h1:Q2xEE2SGbg79j3TdHT+781eUu/2uUIyrHVJAG9bLpVk= -go.opentelemetry.io/collector/config/configgrpc v0.81.0/go.mod h1:Frq/l2Ttbvm7cFH3hkxLdhl5TCNHcH6rnkpmi8U2kLY= -go.opentelemetry.io/collector/config/confighttp v0.81.0 h1:vIdiepUT7P/WtJRdfh8mjzvSqJRVF8/vl9GWtUNQlHQ= -go.opentelemetry.io/collector/config/confighttp v0.81.0/go.mod h1:I54THsffkpv//O7bUHw+0bXxjYdvyL6IHg5ksgYez8I= -go.opentelemetry.io/collector/config/confignet v0.81.0 h1:Eu8m3eX8GaGhOUc//YXvV4i3cEivxUSxkLnV1U9ydhg= -go.opentelemetry.io/collector/config/confignet v0.81.0/go.mod h1:unOg7BZvpt6T5xsf+LyeOQvUhD8ld/2AbfOsmUZ/bPM= -go.opentelemetry.io/collector/config/configopaque v0.81.0 h1:MkCAGh0WydRWydETB9FLnuCj9hDPDiz2g4Wxnl53I0w= -go.opentelemetry.io/collector/config/configopaque v0.81.0/go.mod h1:pM1oy6gasukw3H6jAvc9Q9OtFaaY2IbfeuwCPAjOgXc= -go.opentelemetry.io/collector/config/configtelemetry v0.81.0 h1:j3dhWbAcrfL1n0RmShRJf99X/xIMoPfEShN/5Z8bY0k= -go.opentelemetry.io/collector/config/configtelemetry v0.81.0/go.mod h1:KEYQRiYJdx38iZkvcLKBZWH9fK4NeafxBwGRrRKMgyA= -go.opentelemetry.io/collector/config/configtls v0.81.0 h1:2vt+yOZUvGq5ADqFAxL5ONm1ACuGXDSs87AWT54Ez4M= -go.opentelemetry.io/collector/config/configtls v0.81.0/go.mod h1:HMHTYBMMgqBpTvnNAhQYmjO7XuoBMe2T4qRHcKluB4Q= -go.opentelemetry.io/collector/config/internal v0.81.0 h1:wRV2PBnJygdmKpIdt/xfG7zdQvXvHz9L+z8MhGsOji4= -go.opentelemetry.io/collector/config/internal v0.81.0/go.mod h1:RKcLV1gQxhgwx+6rlPYsvGMq1RZNne3UeOUZkHxJnIg= -go.opentelemetry.io/collector/confmap v0.81.0 h1:AqweoBGdF3jGM2/KgP5GS6bmN+1aVrEiCy4nPf7IBE4= -go.opentelemetry.io/collector/confmap v0.81.0/go.mod h1:iCTnTqGgZZJumhJxpY7rrJz9UQ/0zjPmsJz2Z7Tp4RY= -go.opentelemetry.io/collector/consumer v0.81.0 h1:8R2iCrSzD7T0RtC2Wh4GXxDiqla2vNhDokGW6Bcrfas= -go.opentelemetry.io/collector/consumer v0.81.0/go.mod h1:jS7+gAKdOx3lD3SnaBztBjUVpUYL3ee7fpoqI4p/gT8= -go.opentelemetry.io/collector/exporter v0.81.0 h1:GLhB8WGrBx+zZSB1HIOx2ivFUMahGtAVO2CC5xbCUHQ= -go.opentelemetry.io/collector/exporter v0.81.0/go.mod h1:Di4RTzI8uRooVNATIeApNUgmGdNt8XiikUTQLabmZaA= -go.opentelemetry.io/collector/exporter/otlpexporter v0.81.0 h1:Ri5pj0slm+FUbbG81UIhQaQ992z2+PcT2++4JI32XGI= -go.opentelemetry.io/collector/extension v0.81.0 h1:Ak7AzZzxTFJxGyVbEklsGzqHyOHW5USiifJilCcRyTU= -go.opentelemetry.io/collector/extension v0.81.0/go.mod h1:DU2bX8qulS5+OCJZGfvqIwIT/q3sFnEjI2HjJ2LDI/s= -go.opentelemetry.io/collector/extension/auth v0.81.0 h1:UzVQSG9naJh1hX7hh+HVcvB3n+rpCJXX2BBdUoL/Ybo= -go.opentelemetry.io/collector/extension/auth v0.81.0/go.mod h1:PaBFcFrzXV+UgM4VZKp6Kn1IiRC/MbEYWxTfIalcIwk= -go.opentelemetry.io/collector/featuregate v1.0.0-rcv0013 h1:tiTUG9X/gEDN1oDYQOBVUFYQfhUG2CvgW9VhBc2uk1U= -go.opentelemetry.io/collector/featuregate v1.0.0-rcv0013/go.mod h1:0mE3mDLmUrOXVoNsuvj+7dV14h/9HFl/Fy9YTLoLObo= -go.opentelemetry.io/collector/pdata v1.0.0-rcv0013 h1:4sONXE9hAX+4Di8m0bQ/KaoH3Mi+OPt04cXkZ7A8W3k= -go.opentelemetry.io/collector/pdata v1.0.0-rcv0013/go.mod h1:x09G/4KjEcDKNuWCjC5ZtnuDE0XEqiRwI+yrHSVjIy8= -go.opentelemetry.io/collector/processor v0.81.0 h1:ypyNV5R0bnN3XGMAsH/q5eNARF5vXtFgSOK9rBWzsLc= -go.opentelemetry.io/collector/processor v0.81.0/go.mod h1:ZDwO3DVg1VUSA92g0r/o0jYk+T7r9uxgZZ3LABJbC34= -go.opentelemetry.io/collector/receiver v0.81.0 h1:0c+YtIV7fmd9ev+zmwS9qjx5ASi8cw+gSypu4I7Gugc= -go.opentelemetry.io/collector/receiver v0.81.0/go.mod h1:q80JkMxVLnk0vWxoTRY2J7F4Qx9069Yy5yxDbZ4JVwk= -go.opentelemetry.io/collector/receiver/otlpreceiver v0.81.0 h1:ewVbfATnAeQkwFK3r0dpFKCXcTb8HJKX4AixUioRt+c= -go.opentelemetry.io/collector/receiver/otlpreceiver v0.81.0/go.mod h1:LGuSMVdOq5Zq+CEHF9YBHMaOIUZrzqW7DQGqo9g0dJA= -go.opentelemetry.io/collector/semconv v0.81.0 h1:lCYNNo3powDvFIaTPP2jDKIrBiV1T92NK4QgL/aHYXw= -go.opentelemetry.io/collector/semconv v0.81.0/go.mod h1:TlYPtzvsXyHOgr5eATi43qEMqwSmIziivJB2uctKswo= +go.opentelemetry.io/collector v0.82.0 h1:MaKqWT0R4GCdkZDhYWOQkLfoJj9V7GsMbk1gsAuogaw= +go.opentelemetry.io/collector v0.82.0/go.mod h1:PMmDJkZzC1xpcViHlwMMEVeAnRRl3HYy3nXgD8KJwG0= +go.opentelemetry.io/collector/component v0.82.0 h1:ID9nOGKBf5G0avhuYQlTzmwAyIMvh9B+tlckLE/4qw4= +go.opentelemetry.io/collector/component v0.82.0/go.mod h1:jSdGG4L1Ger6ob6lWpr8jmKC2qqC+XZ/gOgu7GUA5xs= +go.opentelemetry.io/collector/config/configauth v0.82.0 h1:H5xrWyPMotSqajiiH/bay8bpVsT4aq6Vih4OuArXv4Q= +go.opentelemetry.io/collector/config/configauth v0.82.0/go.mod h1:P0ukmBIUk+HP0O7yfUOKRmPmffneAQgmEL9/iTOo1CU= +go.opentelemetry.io/collector/config/configcompression v0.82.0 h1:M6a7eiHdBUB8mIioDhWugJfNm7Sw85cvv/OXyTDhtY0= +go.opentelemetry.io/collector/config/configcompression v0.82.0/go.mod h1:xhHm1sEH7BTECAJo1xn64NMxeIvZGKdVGdSKUUc+YuM= +go.opentelemetry.io/collector/config/configgrpc v0.82.0 h1:taZWDbtVBm0OOcgnfpVA1X43pmU2oNhj39B2uV3COQk= +go.opentelemetry.io/collector/config/configgrpc v0.82.0/go.mod h1:NHXHRI40Q7TT/d38DKT30B7DOrVUkj7anEFOD59R9o8= +go.opentelemetry.io/collector/config/confighttp v0.82.0 h1:2LhyqVTd+Bsr8SgsCq6+q731F81uddK9GwvGhwD/Co0= +go.opentelemetry.io/collector/config/confighttp v0.82.0/go.mod h1:OHGx/aJqGJ9z2jaBXvaylwkAuiUwikg1/n+RRDpsfOo= +go.opentelemetry.io/collector/config/confignet v0.82.0 h1:zN9JaFTn7Dth3u5ot6KZJcBZACTEzGqFWYyO5qAlYfo= +go.opentelemetry.io/collector/config/confignet v0.82.0/go.mod h1:unOg7BZvpt6T5xsf+LyeOQvUhD8ld/2AbfOsmUZ/bPM= +go.opentelemetry.io/collector/config/configopaque v0.82.0 h1:0Ma63QTr4AkODzEABZHtgiU5Dig8SItpHOuB28UnVSw= +go.opentelemetry.io/collector/config/configopaque v0.82.0/go.mod h1:pM1oy6gasukw3H6jAvc9Q9OtFaaY2IbfeuwCPAjOgXc= +go.opentelemetry.io/collector/config/configtelemetry v0.82.0 h1:Zln2K4S5gBDcOpBNIzM0cZS5P6cohEYstHngVvIbGBY= +go.opentelemetry.io/collector/config/configtelemetry v0.82.0/go.mod h1:KEYQRiYJdx38iZkvcLKBZWH9fK4NeafxBwGRrRKMgyA= +go.opentelemetry.io/collector/config/configtls v0.82.0 h1:eE/8muTszLlviOGLy5N08BaXLCcYqDW3mKIoKyDDa8o= +go.opentelemetry.io/collector/config/configtls v0.82.0/go.mod h1:unBTmL1bdpkp9mYEDz7N+Ln4yEwh7Ug74I1HgZMplCk= +go.opentelemetry.io/collector/config/internal v0.82.0 h1:JnnDARkXrC3OJDsMfQkBgfI0Np4s+18zvoDqZ4OH0+I= +go.opentelemetry.io/collector/config/internal v0.82.0/go.mod h1:RKcLV1gQxhgwx+6rlPYsvGMq1RZNne3UeOUZkHxJnIg= +go.opentelemetry.io/collector/confmap v0.82.0 h1:s1Rd8jz21DGlLJfED0Py9VaEq2qPWmWwWy5MriDCX+4= +go.opentelemetry.io/collector/confmap v0.82.0/go.mod h1:IS/PoUYHETtxV6+fJammTkCxxa4LEwK2u4Cx/bVCH/s= +go.opentelemetry.io/collector/consumer v0.82.0 h1:vZecylW6bpaphetSTjCLgwXLxSYQ6oe/kzwkx4iF5oE= +go.opentelemetry.io/collector/consumer v0.82.0/go.mod h1:qrhd0i0Gp0RkihcEXb+7Rb584Kal2NmGH1eA4Zg6puA= +go.opentelemetry.io/collector/exporter v0.82.0 h1:BWsx4rWfVwlV+qNuevSMm+2Cv6uGZYYZ9CEFqq0q+F4= +go.opentelemetry.io/collector/exporter v0.82.0/go.mod h1:e3VPpLYVNRaF+G2HuKw6A5hTBMYZ4tgRYYzMusfwFJE= +go.opentelemetry.io/collector/exporter/otlpexporter v0.82.0 h1:tYCEUQpfyuS/NgrWg9Ulps6f0ffPSCBRTBdK6sXnSaw= +go.opentelemetry.io/collector/extension v0.82.0 h1:DH4tqrTOz0HmGDJ6FT/jRD2woQf3ugqC6QqSiQdH3wg= +go.opentelemetry.io/collector/extension v0.82.0/go.mod h1:n7d0XTh7fdyorZWTc+gLpJh78FS7GjRqIjUiW1xdhe0= +go.opentelemetry.io/collector/extension/auth v0.82.0 h1:iaxwFslRj6mfzs1wVzbnj+gDU2G98IeXW4tcrq78p5s= +go.opentelemetry.io/collector/extension/auth v0.82.0/go.mod h1:O1xBcb06pKD8g3FadLDvMa1xKZwPGdHQp4CI8vW3RCM= +go.opentelemetry.io/collector/featuregate v1.0.0-rcv0014 h1:C9o0mbP0MyygqFnKueVQK/v9jef6zvuttmTGlKaqhgw= +go.opentelemetry.io/collector/featuregate v1.0.0-rcv0014/go.mod h1:0mE3mDLmUrOXVoNsuvj+7dV14h/9HFl/Fy9YTLoLObo= +go.opentelemetry.io/collector/pdata v1.0.0-rcv0014 h1:iT5qH0NLmkGeIdDtnBogYDx7L58t6CaWGL378DEo2QY= +go.opentelemetry.io/collector/pdata v1.0.0-rcv0014/go.mod h1:BRvDrx43kiSoUx3mr7SoA7h9B8+OY99mUK+CZSQFWW4= +go.opentelemetry.io/collector/processor v0.82.0 h1:DoqVrrnGYThu/h1sOr6E0hR1Fj5nQT4VT0ptFZcltRk= +go.opentelemetry.io/collector/processor v0.82.0/go.mod h1:B0MtfLWCYNBJ+PXf9k77M2Yn08MKItNB2vuvwhqrtt0= +go.opentelemetry.io/collector/receiver v0.82.0 h1:bc6jc8jmSgc0/C9zqTqqWOGJFVx0AJ53jiToSmQs2SE= +go.opentelemetry.io/collector/receiver v0.82.0/go.mod h1:Uh6BgcTmmrA1Bm/GpKGRY6WwQyPio4yEDsYkUo0A5Gk= +go.opentelemetry.io/collector/receiver/otlpreceiver v0.82.0 h1:LzcmQ9d7NauTVEWfPNwRwqNd/NBQDi+JU0OHWearcEA= +go.opentelemetry.io/collector/receiver/otlpreceiver v0.82.0/go.mod h1:Qt9Ha/yWaU6ni0XwFslNCBX5zZBQHcnxma/sU1s7LH4= +go.opentelemetry.io/collector/semconv v0.82.0 h1:WUeT2a+uZjI6kLvwcBaJnGvo7KSQ/9dIFRcxOQdXucc= +go.opentelemetry.io/collector/semconv v0.82.0/go.mod h1:TlYPtzvsXyHOgr5eATi43qEMqwSmIziivJB2uctKswo= go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.42.1-0.20230612162650-64be7e574a17 h1:mdcNStUIXngF/mH3xxAo4nbR4g65IXqLL1SvYMjz7JQ= go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.42.1-0.20230612162650-64be7e574a17/go.mod h1:N2Nw/UmmvQn0yCnaUzvsWzTWIeffYIdFteg6mxqCWII= go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.42.0 h1:pginetY7+onl4qN1vl0xW/V/v6OBZ0vVdH+esuJgvmM= @@ -1657,8 +1657,8 @@ google.golang.org/grpc v1.40.0/go.mod h1:ogyxbiOoUXAkP+4+xa6PZSE9DZgIHtSpzjDTB9K google.golang.org/grpc v1.45.0/go.mod h1:lN7owxKUQEqMfSyQikvvk5tf/6zMPsrK+ONuO11+0rQ= google.golang.org/grpc v1.46.0/go.mod h1:vN9eftEi1UMyUsIF80+uQXhHjbXYbm0uXoFCACuMGWk= google.golang.org/grpc v1.51.0/go.mod h1:wgNDFcnuBGmxLKI/qn4T+m5BtEBYXJPvibbUPsAIPww= -google.golang.org/grpc v1.56.2 h1:fVRFRnXvU+x6C4IlHZewvJOVHoOv1TUuQyoRsYnB4bI= -google.golang.org/grpc v1.56.2/go.mod h1:I9bI3vqKfayGqPUAwGdOSu7kt6oIJLixfffKrpXqQ9s= +google.golang.org/grpc v1.57.0 h1:kfzNeI/klCGD2YPMUlaGNT3pxvYfga7smW3Vth8Zsiw= +google.golang.org/grpc v1.57.0/go.mod h1:Sd+9RMTACXwmub0zcNY2c4arhtrbBYD1AUHI/dt16Mo= google.golang.org/grpc/cmd/protoc-gen-go-grpc v1.1.0/go.mod h1:6Kw0yEErY5E/yWrBtf03jp27GLLJujG4z/JK95pnjjw= google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8= google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0= diff --git a/tests/go.mod b/tests/go.mod index 5bd214023e..94190123fb 100644 --- a/tests/go.mod +++ b/tests/go.mod @@ -14,17 +14,17 @@ require ( github.com/signalfx/signalfx-go/signalflow/v2 v2.1.0 github.com/stretchr/testify v1.8.4 github.com/testcontainers/testcontainers-go v0.20.1 - go.opentelemetry.io/collector/component v0.81.0 - go.opentelemetry.io/collector/config/configgrpc v0.81.0 - go.opentelemetry.io/collector/config/confignet v0.81.0 - go.opentelemetry.io/collector/config/configtls v0.81.0 - go.opentelemetry.io/collector/confmap v0.81.0 - go.opentelemetry.io/collector/consumer v0.81.0 - go.opentelemetry.io/collector/exporter v0.81.0 - go.opentelemetry.io/collector/exporter/otlpexporter v0.81.0 - go.opentelemetry.io/collector/pdata v1.0.0-rcv0013 - go.opentelemetry.io/collector/receiver v0.81.0 - go.opentelemetry.io/collector/receiver/otlpreceiver v0.81.0 + go.opentelemetry.io/collector/component v0.82.0 + go.opentelemetry.io/collector/config/configgrpc v0.82.0 + go.opentelemetry.io/collector/config/confignet v0.82.0 + go.opentelemetry.io/collector/config/configtls v0.82.0 + go.opentelemetry.io/collector/confmap v0.82.0 + go.opentelemetry.io/collector/consumer v0.82.0 + go.opentelemetry.io/collector/exporter v0.82.0 + go.opentelemetry.io/collector/exporter/otlpexporter v0.82.0 + go.opentelemetry.io/collector/pdata v1.0.0-rcv0014 + go.opentelemetry.io/collector/receiver v0.82.0 + go.opentelemetry.io/collector/receiver/otlpreceiver v0.82.0 go.opentelemetry.io/otel/metric v1.16.0 go.opentelemetry.io/otel/trace v1.16.0 go.uber.org/atomic v1.11.0 @@ -90,7 +90,7 @@ require ( github.com/jonboulle/clockwork v0.2.2 // indirect github.com/josharian/intern v1.0.0 // indirect github.com/json-iterator/go v1.1.12 // indirect - github.com/klauspost/compress v1.16.6 // indirect + github.com/klauspost/compress v1.16.7 // indirect github.com/knadh/koanf v1.5.0 // indirect github.com/knadh/koanf/v2 v2.0.1 // indirect github.com/liggitt/tabwriter v0.0.0-20181228230101-89fcab3d43de // indirect @@ -135,31 +135,31 @@ require ( github.com/xlab/treeprint v1.1.0 // indirect github.com/yusufpapurcu/wmi v1.2.3 // indirect go.opencensus.io v0.24.0 // indirect - go.opentelemetry.io/collector v0.81.0 // indirect - go.opentelemetry.io/collector/config/configauth v0.81.0 // indirect - go.opentelemetry.io/collector/config/configcompression v0.81.0 // indirect - go.opentelemetry.io/collector/config/confighttp v0.81.0 // indirect - go.opentelemetry.io/collector/config/configopaque v0.81.0 // indirect - go.opentelemetry.io/collector/config/configtelemetry v0.81.0 // indirect - go.opentelemetry.io/collector/config/internal v0.81.0 // indirect - go.opentelemetry.io/collector/extension v0.81.0 // indirect - go.opentelemetry.io/collector/extension/auth v0.81.0 // indirect - go.opentelemetry.io/collector/featuregate v1.0.0-rcv0013 // indirect - go.opentelemetry.io/collector/processor v0.81.0 // indirect + go.opentelemetry.io/collector v0.82.0 // indirect + go.opentelemetry.io/collector/config/configauth v0.82.0 // indirect + go.opentelemetry.io/collector/config/configcompression v0.82.0 // indirect + go.opentelemetry.io/collector/config/confighttp v0.82.0 // indirect + go.opentelemetry.io/collector/config/configopaque v0.82.0 // indirect + go.opentelemetry.io/collector/config/configtelemetry v0.82.0 // indirect + go.opentelemetry.io/collector/config/internal v0.82.0 // indirect + go.opentelemetry.io/collector/extension v0.82.0 // indirect + go.opentelemetry.io/collector/extension/auth v0.82.0 // indirect + go.opentelemetry.io/collector/featuregate v1.0.0-rcv0014 // indirect + go.opentelemetry.io/collector/processor v0.82.0 // indirect go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.42.1-0.20230612162650-64be7e574a17 // indirect go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.42.0 // indirect go.opentelemetry.io/otel v1.16.0 // indirect go.starlark.net v0.0.0-20200306205701-8dd3e2ee1dd5 // indirect golang.org/x/crypto v0.9.0 // indirect - golang.org/x/net v0.11.0 // indirect + golang.org/x/net v0.12.0 // indirect golang.org/x/oauth2 v0.8.0 // indirect - golang.org/x/sys v0.9.0 // indirect + golang.org/x/sys v0.10.0 // indirect golang.org/x/term v0.6.0 // indirect - golang.org/x/text v0.10.0 // indirect + golang.org/x/text v0.11.0 // indirect golang.org/x/time v0.3.0 // indirect google.golang.org/appengine v1.6.7 // indirect google.golang.org/genproto/googleapis/rpc v0.0.0-20230530153820-e85fd2cbaebc // indirect - google.golang.org/grpc v1.56.1 // indirect + google.golang.org/grpc v1.56.2 // indirect google.golang.org/protobuf v1.31.0 // indirect gopkg.in/inf.v0 v0.9.1 // indirect k8s.io/component-base v0.27.4 // indirect diff --git a/tests/go.sum b/tests/go.sum index 43fd2e27b7..9be4365760 100644 --- a/tests/go.sum +++ b/tests/go.sum @@ -909,8 +909,8 @@ github.com/kisielk/errcheck v1.5.0/go.mod h1:pFxgyoBC7bSaBwPgfKdkLd5X25qrDl4LWUI github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= github.com/klauspost/asmfmt v1.3.2/go.mod h1:AG8TuvYojzulgDAMCnYn50l/5QV3Bs/tp6j0HLHbNSE= github.com/klauspost/compress v1.15.9/go.mod h1:PhcZ0MbTNciWF3rruxRgKxI5NkcHHrHUDtV4Yw2GlzU= -github.com/klauspost/compress v1.16.6 h1:91SKEy4K37vkp255cJ8QesJhjyRO0hn9i9G0GoUwLsk= -github.com/klauspost/compress v1.16.6/go.mod h1:ntbaceVETuRiXiv4DpjP66DpAtAGkEQskQzEyD//IeE= +github.com/klauspost/compress v1.16.7 h1:2mk3MPGNzKyxErAw8YaohYh69+pa4sIQSC0fPGCFR9I= +github.com/klauspost/compress v1.16.7/go.mod h1:ntbaceVETuRiXiv4DpjP66DpAtAGkEQskQzEyD//IeE= github.com/klauspost/cpuid/v2 v2.0.9/go.mod h1:FInQzS24/EEf25PyTYn52gqo7WaD8xa0213Md/qVLRg= github.com/knadh/koanf v1.5.0 h1:q2TSd/3Pyc/5yP9ldIrSdIz26MCcyNQzW0pEAugLPNs= github.com/knadh/koanf v1.5.0/go.mod h1:Hgyjp4y8v44hpZtPzs7JZfRAW5AhN7KfZcwv1RYggDs= @@ -1154,51 +1154,51 @@ go.opencensus.io v0.22.5/go.mod h1:5pWMHQbX5EPX2/62yrJeAkowc+lfs/XD7Uxpq3pI6kk= go.opencensus.io v0.23.0/go.mod h1:XItmlyltB5F7CS4xOC1DcqMoFqwtC6OG2xF7mCv7P7E= go.opencensus.io v0.24.0 h1:y73uSU6J157QMP2kn2r30vwW1A2W2WFwSCGnAVxeaD0= go.opencensus.io v0.24.0/go.mod h1:vNK8G9p7aAivkbmorf4v+7Hgx+Zs0yY+0fOtgBfjQKo= -go.opentelemetry.io/collector v0.81.0 h1:pF+sB8xNXlg/W0a0QTLz4mUWyool1a9toVj8LmLoFqg= -go.opentelemetry.io/collector v0.81.0/go.mod h1:thuOTBMusXwcTPTwLbs3zwwCOLaaQX2g+Hjf8OObc/w= -go.opentelemetry.io/collector/component v0.81.0 h1:AKsl6bss/SRrW248GFpmGiiI/4kdemW92Ai/X82CCqY= -go.opentelemetry.io/collector/component v0.81.0/go.mod h1:+m6/yPiJ7O7Oc/OLfmgUB2mrY1xoUqRj4BsoOtIVpGs= -go.opentelemetry.io/collector/config/configauth v0.81.0 h1:NIiJuIGOdblN0EIJv64R2mvGhthcYfWuvyCnjk8HRN4= -go.opentelemetry.io/collector/config/configauth v0.81.0/go.mod h1:2KscbmU+8fIzwiSU9Kku0Tf4b4A1plqFIJXR1DWSaTw= -go.opentelemetry.io/collector/config/configcompression v0.81.0 h1:Q725pvVH7tR6BP3WK7Ro3pbqMeQdZEV3KeFVHchBxCc= -go.opentelemetry.io/collector/config/configcompression v0.81.0/go.mod h1:xhHm1sEH7BTECAJo1xn64NMxeIvZGKdVGdSKUUc+YuM= -go.opentelemetry.io/collector/config/configgrpc v0.81.0 h1:Q2xEE2SGbg79j3TdHT+781eUu/2uUIyrHVJAG9bLpVk= -go.opentelemetry.io/collector/config/configgrpc v0.81.0/go.mod h1:Frq/l2Ttbvm7cFH3hkxLdhl5TCNHcH6rnkpmi8U2kLY= -go.opentelemetry.io/collector/config/confighttp v0.81.0 h1:vIdiepUT7P/WtJRdfh8mjzvSqJRVF8/vl9GWtUNQlHQ= -go.opentelemetry.io/collector/config/confighttp v0.81.0/go.mod h1:I54THsffkpv//O7bUHw+0bXxjYdvyL6IHg5ksgYez8I= -go.opentelemetry.io/collector/config/confignet v0.81.0 h1:Eu8m3eX8GaGhOUc//YXvV4i3cEivxUSxkLnV1U9ydhg= -go.opentelemetry.io/collector/config/confignet v0.81.0/go.mod h1:unOg7BZvpt6T5xsf+LyeOQvUhD8ld/2AbfOsmUZ/bPM= -go.opentelemetry.io/collector/config/configopaque v0.81.0 h1:MkCAGh0WydRWydETB9FLnuCj9hDPDiz2g4Wxnl53I0w= -go.opentelemetry.io/collector/config/configopaque v0.81.0/go.mod h1:pM1oy6gasukw3H6jAvc9Q9OtFaaY2IbfeuwCPAjOgXc= -go.opentelemetry.io/collector/config/configtelemetry v0.81.0 h1:j3dhWbAcrfL1n0RmShRJf99X/xIMoPfEShN/5Z8bY0k= -go.opentelemetry.io/collector/config/configtelemetry v0.81.0/go.mod h1:KEYQRiYJdx38iZkvcLKBZWH9fK4NeafxBwGRrRKMgyA= -go.opentelemetry.io/collector/config/configtls v0.81.0 h1:2vt+yOZUvGq5ADqFAxL5ONm1ACuGXDSs87AWT54Ez4M= -go.opentelemetry.io/collector/config/configtls v0.81.0/go.mod h1:HMHTYBMMgqBpTvnNAhQYmjO7XuoBMe2T4qRHcKluB4Q= -go.opentelemetry.io/collector/config/internal v0.81.0 h1:wRV2PBnJygdmKpIdt/xfG7zdQvXvHz9L+z8MhGsOji4= -go.opentelemetry.io/collector/config/internal v0.81.0/go.mod h1:RKcLV1gQxhgwx+6rlPYsvGMq1RZNne3UeOUZkHxJnIg= -go.opentelemetry.io/collector/confmap v0.81.0 h1:AqweoBGdF3jGM2/KgP5GS6bmN+1aVrEiCy4nPf7IBE4= -go.opentelemetry.io/collector/confmap v0.81.0/go.mod h1:iCTnTqGgZZJumhJxpY7rrJz9UQ/0zjPmsJz2Z7Tp4RY= -go.opentelemetry.io/collector/consumer v0.81.0 h1:8R2iCrSzD7T0RtC2Wh4GXxDiqla2vNhDokGW6Bcrfas= -go.opentelemetry.io/collector/consumer v0.81.0/go.mod h1:jS7+gAKdOx3lD3SnaBztBjUVpUYL3ee7fpoqI4p/gT8= -go.opentelemetry.io/collector/exporter v0.81.0 h1:GLhB8WGrBx+zZSB1HIOx2ivFUMahGtAVO2CC5xbCUHQ= -go.opentelemetry.io/collector/exporter v0.81.0/go.mod h1:Di4RTzI8uRooVNATIeApNUgmGdNt8XiikUTQLabmZaA= -go.opentelemetry.io/collector/exporter/otlpexporter v0.81.0 h1:Ri5pj0slm+FUbbG81UIhQaQ992z2+PcT2++4JI32XGI= -go.opentelemetry.io/collector/exporter/otlpexporter v0.81.0/go.mod h1:u19TJEy/n35jjU/ie2YOlAL4K1s9rvRKSNaq9JDlBF8= -go.opentelemetry.io/collector/extension v0.81.0 h1:Ak7AzZzxTFJxGyVbEklsGzqHyOHW5USiifJilCcRyTU= -go.opentelemetry.io/collector/extension v0.81.0/go.mod h1:DU2bX8qulS5+OCJZGfvqIwIT/q3sFnEjI2HjJ2LDI/s= -go.opentelemetry.io/collector/extension/auth v0.81.0 h1:UzVQSG9naJh1hX7hh+HVcvB3n+rpCJXX2BBdUoL/Ybo= -go.opentelemetry.io/collector/extension/auth v0.81.0/go.mod h1:PaBFcFrzXV+UgM4VZKp6Kn1IiRC/MbEYWxTfIalcIwk= -go.opentelemetry.io/collector/featuregate v1.0.0-rcv0013 h1:tiTUG9X/gEDN1oDYQOBVUFYQfhUG2CvgW9VhBc2uk1U= -go.opentelemetry.io/collector/featuregate v1.0.0-rcv0013/go.mod h1:0mE3mDLmUrOXVoNsuvj+7dV14h/9HFl/Fy9YTLoLObo= -go.opentelemetry.io/collector/pdata v1.0.0-rcv0013 h1:4sONXE9hAX+4Di8m0bQ/KaoH3Mi+OPt04cXkZ7A8W3k= -go.opentelemetry.io/collector/pdata v1.0.0-rcv0013/go.mod h1:x09G/4KjEcDKNuWCjC5ZtnuDE0XEqiRwI+yrHSVjIy8= -go.opentelemetry.io/collector/processor v0.81.0 h1:ypyNV5R0bnN3XGMAsH/q5eNARF5vXtFgSOK9rBWzsLc= -go.opentelemetry.io/collector/processor v0.81.0/go.mod h1:ZDwO3DVg1VUSA92g0r/o0jYk+T7r9uxgZZ3LABJbC34= -go.opentelemetry.io/collector/receiver v0.81.0 h1:0c+YtIV7fmd9ev+zmwS9qjx5ASi8cw+gSypu4I7Gugc= -go.opentelemetry.io/collector/receiver v0.81.0/go.mod h1:q80JkMxVLnk0vWxoTRY2J7F4Qx9069Yy5yxDbZ4JVwk= -go.opentelemetry.io/collector/receiver/otlpreceiver v0.81.0 h1:ewVbfATnAeQkwFK3r0dpFKCXcTb8HJKX4AixUioRt+c= -go.opentelemetry.io/collector/receiver/otlpreceiver v0.81.0/go.mod h1:LGuSMVdOq5Zq+CEHF9YBHMaOIUZrzqW7DQGqo9g0dJA= -go.opentelemetry.io/collector/semconv v0.81.0 h1:lCYNNo3powDvFIaTPP2jDKIrBiV1T92NK4QgL/aHYXw= +go.opentelemetry.io/collector v0.82.0 h1:MaKqWT0R4GCdkZDhYWOQkLfoJj9V7GsMbk1gsAuogaw= +go.opentelemetry.io/collector v0.82.0/go.mod h1:PMmDJkZzC1xpcViHlwMMEVeAnRRl3HYy3nXgD8KJwG0= +go.opentelemetry.io/collector/component v0.82.0 h1:ID9nOGKBf5G0avhuYQlTzmwAyIMvh9B+tlckLE/4qw4= +go.opentelemetry.io/collector/component v0.82.0/go.mod h1:jSdGG4L1Ger6ob6lWpr8jmKC2qqC+XZ/gOgu7GUA5xs= +go.opentelemetry.io/collector/config/configauth v0.82.0 h1:H5xrWyPMotSqajiiH/bay8bpVsT4aq6Vih4OuArXv4Q= +go.opentelemetry.io/collector/config/configauth v0.82.0/go.mod h1:P0ukmBIUk+HP0O7yfUOKRmPmffneAQgmEL9/iTOo1CU= +go.opentelemetry.io/collector/config/configcompression v0.82.0 h1:M6a7eiHdBUB8mIioDhWugJfNm7Sw85cvv/OXyTDhtY0= +go.opentelemetry.io/collector/config/configcompression v0.82.0/go.mod h1:xhHm1sEH7BTECAJo1xn64NMxeIvZGKdVGdSKUUc+YuM= +go.opentelemetry.io/collector/config/configgrpc v0.82.0 h1:taZWDbtVBm0OOcgnfpVA1X43pmU2oNhj39B2uV3COQk= +go.opentelemetry.io/collector/config/configgrpc v0.82.0/go.mod h1:NHXHRI40Q7TT/d38DKT30B7DOrVUkj7anEFOD59R9o8= +go.opentelemetry.io/collector/config/confighttp v0.82.0 h1:2LhyqVTd+Bsr8SgsCq6+q731F81uddK9GwvGhwD/Co0= +go.opentelemetry.io/collector/config/confighttp v0.82.0/go.mod h1:OHGx/aJqGJ9z2jaBXvaylwkAuiUwikg1/n+RRDpsfOo= +go.opentelemetry.io/collector/config/confignet v0.82.0 h1:zN9JaFTn7Dth3u5ot6KZJcBZACTEzGqFWYyO5qAlYfo= +go.opentelemetry.io/collector/config/confignet v0.82.0/go.mod h1:unOg7BZvpt6T5xsf+LyeOQvUhD8ld/2AbfOsmUZ/bPM= +go.opentelemetry.io/collector/config/configopaque v0.82.0 h1:0Ma63QTr4AkODzEABZHtgiU5Dig8SItpHOuB28UnVSw= +go.opentelemetry.io/collector/config/configopaque v0.82.0/go.mod h1:pM1oy6gasukw3H6jAvc9Q9OtFaaY2IbfeuwCPAjOgXc= +go.opentelemetry.io/collector/config/configtelemetry v0.82.0 h1:Zln2K4S5gBDcOpBNIzM0cZS5P6cohEYstHngVvIbGBY= +go.opentelemetry.io/collector/config/configtelemetry v0.82.0/go.mod h1:KEYQRiYJdx38iZkvcLKBZWH9fK4NeafxBwGRrRKMgyA= +go.opentelemetry.io/collector/config/configtls v0.82.0 h1:eE/8muTszLlviOGLy5N08BaXLCcYqDW3mKIoKyDDa8o= +go.opentelemetry.io/collector/config/configtls v0.82.0/go.mod h1:unBTmL1bdpkp9mYEDz7N+Ln4yEwh7Ug74I1HgZMplCk= +go.opentelemetry.io/collector/config/internal v0.82.0 h1:JnnDARkXrC3OJDsMfQkBgfI0Np4s+18zvoDqZ4OH0+I= +go.opentelemetry.io/collector/config/internal v0.82.0/go.mod h1:RKcLV1gQxhgwx+6rlPYsvGMq1RZNne3UeOUZkHxJnIg= +go.opentelemetry.io/collector/confmap v0.82.0 h1:s1Rd8jz21DGlLJfED0Py9VaEq2qPWmWwWy5MriDCX+4= +go.opentelemetry.io/collector/confmap v0.82.0/go.mod h1:IS/PoUYHETtxV6+fJammTkCxxa4LEwK2u4Cx/bVCH/s= +go.opentelemetry.io/collector/consumer v0.82.0 h1:vZecylW6bpaphetSTjCLgwXLxSYQ6oe/kzwkx4iF5oE= +go.opentelemetry.io/collector/consumer v0.82.0/go.mod h1:qrhd0i0Gp0RkihcEXb+7Rb584Kal2NmGH1eA4Zg6puA= +go.opentelemetry.io/collector/exporter v0.82.0 h1:BWsx4rWfVwlV+qNuevSMm+2Cv6uGZYYZ9CEFqq0q+F4= +go.opentelemetry.io/collector/exporter v0.82.0/go.mod h1:e3VPpLYVNRaF+G2HuKw6A5hTBMYZ4tgRYYzMusfwFJE= +go.opentelemetry.io/collector/exporter/otlpexporter v0.82.0 h1:tYCEUQpfyuS/NgrWg9Ulps6f0ffPSCBRTBdK6sXnSaw= +go.opentelemetry.io/collector/exporter/otlpexporter v0.82.0/go.mod h1:CGeXJuRYxrzTtJUHlpLPHirzcmGq5qbcPff0ec+If14= +go.opentelemetry.io/collector/extension v0.82.0 h1:DH4tqrTOz0HmGDJ6FT/jRD2woQf3ugqC6QqSiQdH3wg= +go.opentelemetry.io/collector/extension v0.82.0/go.mod h1:n7d0XTh7fdyorZWTc+gLpJh78FS7GjRqIjUiW1xdhe0= +go.opentelemetry.io/collector/extension/auth v0.82.0 h1:iaxwFslRj6mfzs1wVzbnj+gDU2G98IeXW4tcrq78p5s= +go.opentelemetry.io/collector/extension/auth v0.82.0/go.mod h1:O1xBcb06pKD8g3FadLDvMa1xKZwPGdHQp4CI8vW3RCM= +go.opentelemetry.io/collector/featuregate v1.0.0-rcv0014 h1:C9o0mbP0MyygqFnKueVQK/v9jef6zvuttmTGlKaqhgw= +go.opentelemetry.io/collector/featuregate v1.0.0-rcv0014/go.mod h1:0mE3mDLmUrOXVoNsuvj+7dV14h/9HFl/Fy9YTLoLObo= +go.opentelemetry.io/collector/pdata v1.0.0-rcv0014 h1:iT5qH0NLmkGeIdDtnBogYDx7L58t6CaWGL378DEo2QY= +go.opentelemetry.io/collector/pdata v1.0.0-rcv0014/go.mod h1:BRvDrx43kiSoUx3mr7SoA7h9B8+OY99mUK+CZSQFWW4= +go.opentelemetry.io/collector/processor v0.82.0 h1:DoqVrrnGYThu/h1sOr6E0hR1Fj5nQT4VT0ptFZcltRk= +go.opentelemetry.io/collector/processor v0.82.0/go.mod h1:B0MtfLWCYNBJ+PXf9k77M2Yn08MKItNB2vuvwhqrtt0= +go.opentelemetry.io/collector/receiver v0.82.0 h1:bc6jc8jmSgc0/C9zqTqqWOGJFVx0AJ53jiToSmQs2SE= +go.opentelemetry.io/collector/receiver v0.82.0/go.mod h1:Uh6BgcTmmrA1Bm/GpKGRY6WwQyPio4yEDsYkUo0A5Gk= +go.opentelemetry.io/collector/receiver/otlpreceiver v0.82.0 h1:LzcmQ9d7NauTVEWfPNwRwqNd/NBQDi+JU0OHWearcEA= +go.opentelemetry.io/collector/receiver/otlpreceiver v0.82.0/go.mod h1:Qt9Ha/yWaU6ni0XwFslNCBX5zZBQHcnxma/sU1s7LH4= +go.opentelemetry.io/collector/semconv v0.82.0 h1:WUeT2a+uZjI6kLvwcBaJnGvo7KSQ/9dIFRcxOQdXucc= go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.42.1-0.20230612162650-64be7e574a17 h1:mdcNStUIXngF/mH3xxAo4nbR4g65IXqLL1SvYMjz7JQ= go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.42.1-0.20230612162650-64be7e574a17/go.mod h1:N2Nw/UmmvQn0yCnaUzvsWzTWIeffYIdFteg6mxqCWII= go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.42.0 h1:pginetY7+onl4qN1vl0xW/V/v6OBZ0vVdH+esuJgvmM= @@ -1377,8 +1377,9 @@ golang.org/x/sys v0.4.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.7.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.9.0 h1:KS/R3tvhPqvJvwcKfnBHJwwthS11LRhmM5D59eEXa0s= golang.org/x/sys v0.9.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.10.0 h1:SqMFp9UcQJZa+pmYuAKjd9xq1f0j5rLcDIk0mj4qAsA= +golang.org/x/sys v0.10.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/term v0.4.0/go.mod h1:9P2UbLfCdcvo3p/nzKvsmas4TnlujnuoV9hGgYzW1lQ= golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k= golang.org/x/term v0.6.0 h1:clScbb1cHjoCkyRbWwBEUZ5H/tIFu5TAXIqaZD0Gcjw= @@ -1398,8 +1399,8 @@ golang.org/x/text v0.6.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= golang.org/x/text v0.8.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8= golang.org/x/text v0.9.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8= -golang.org/x/text v0.10.0 h1:UpjohKhiEgNc0CSauXmwYftY1+LlaC75SJwh0SgCX58= -golang.org/x/text v0.10.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE= +golang.org/x/text v0.11.0 h1:LAntKIrcmeSKERyiOh0XMV39LXS8IE9UL2yP7+f5ij4= +golang.org/x/text v0.11.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE= golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20220922220347-f3bd1da661af/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.1.0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= @@ -1612,8 +1613,8 @@ google.golang.org/grpc v1.52.0/go.mod h1:pu6fVzoFb+NBYNAvQL08ic+lvB2IojljRYuun5v google.golang.org/grpc v1.53.0/go.mod h1:OnIrk0ipVdj4N5d9IUoFUx72/VlD7+jUsHwZgwSMQpw= google.golang.org/grpc v1.54.0/go.mod h1:PUSEXI6iWghWaB6lXM4knEgpJNu2qUcKfDtNci3EC2g= google.golang.org/grpc v1.55.0/go.mod h1:iYEXKGkEBhg1PjZQvoYEVPTDkHo1/bjTnfwTeGONTY8= -google.golang.org/grpc v1.56.1 h1:z0dNfjIl0VpaZ9iSVjA6daGatAYwPGstTjt5vkRMFkQ= -google.golang.org/grpc v1.56.1/go.mod h1:I9bI3vqKfayGqPUAwGdOSu7kt6oIJLixfffKrpXqQ9s= +google.golang.org/grpc v1.56.2 h1:fVRFRnXvU+x6C4IlHZewvJOVHoOv1TUuQyoRsYnB4bI= +google.golang.org/grpc v1.56.2/go.mod h1:I9bI3vqKfayGqPUAwGdOSu7kt6oIJLixfffKrpXqQ9s= google.golang.org/grpc/cmd/protoc-gen-go-grpc v1.1.0/go.mod h1:6Kw0yEErY5E/yWrBtf03jp27GLLJujG4z/JK95pnjjw= google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8= google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0= diff --git a/tests/receivers/prometheus/name_normalization_flag_test.go b/tests/receivers/prometheus/name_normalization_flag_test.go index 7c13fbefe1..bc1b5c4c28 100644 --- a/tests/receivers/prometheus/name_normalization_flag_test.go +++ b/tests/receivers/prometheus/name_normalization_flag_test.go @@ -41,15 +41,15 @@ func TestNameNormalization(t *testing.T) { resourceMetricsFilename string builder testutils.CollectorBuilder }{ - {"without flag", "non_normalized_httpd.yaml", nil}, - {"enabled flag", "normalized_httpd.yaml", + {"default", "non_normalized_httpd.yaml", nil}, + {"trim_metric_suffixes_true", "normalized_httpd.yaml", func(collector testutils.Collector) testutils.Collector { - return collector.WithArgs("--feature-gates=+pkg.translator.prometheus.NormalizeName") + return collector.WithArgs("--set=receivers.prometheus.trim_metric_suffixes=true") }, }, - {"disabled flag", "non_normalized_httpd.yaml", + {"trim_metric_suffixes_false", "non_normalized_httpd.yaml", func(collector testutils.Collector) testutils.Collector { - return collector.WithArgs("--feature-gates=-pkg.translator.prometheus.NormalizeName") + return collector.WithArgs("--set=receivers.prometheus.trim_metric_suffixes=false") }, }, } {