From 4858d21e988cbf5396e8f21fab247d1286562c51 Mon Sep 17 00:00:00 2001 From: Cijo Thomas Date: Wed, 22 Jan 2025 16:51:11 -0800 Subject: [PATCH 1/3] Simplify metric::data imports --- opentelemetry-otlp/src/exporter/http/mod.rs | 5 +- opentelemetry-proto/src/transform/metrics.rs | 6 +- .../metrics/internal/exponential_histogram.rs | 23 ++-- .../src/metrics/internal/last_value.rs | 6 +- .../src/metrics/internal/precomputed_sum.rs | 6 +- opentelemetry-sdk/src/metrics/mod.rs | 103 +++++++++--------- .../src/testing/metrics/in_memory_exporter.rs | 14 +-- opentelemetry-stdout/src/metrics/exporter.rs | 39 ++++--- 8 files changed, 103 insertions(+), 99 deletions(-) diff --git a/opentelemetry-otlp/src/exporter/http/mod.rs b/opentelemetry-otlp/src/exporter/http/mod.rs index 4d1af8c880..865225e856 100644 --- a/opentelemetry-otlp/src/exporter/http/mod.rs +++ b/opentelemetry-otlp/src/exporter/http/mod.rs @@ -27,6 +27,9 @@ use std::time::Duration; #[cfg(feature = "metrics")] mod metrics; +#[cfg(feature = "metrics")] +use opentelemetry_sdk::metrics::data::ResourceMetrics; + #[cfg(feature = "logs")] pub(crate) mod logs; @@ -336,7 +339,7 @@ impl OtlpHttpClient { #[cfg(feature = "metrics")] fn build_metrics_export_body( &self, - metrics: &mut opentelemetry_sdk::metrics::data::ResourceMetrics, + metrics: &mut ResourceMetrics, ) -> opentelemetry_sdk::metrics::MetricResult<(Vec, &'static str)> { use opentelemetry_proto::tonic::collector::metrics::v1::ExportMetricsServiceRequest; diff --git a/opentelemetry-proto/src/transform/metrics.rs b/opentelemetry-proto/src/transform/metrics.rs index cb135ebf83..4b6c1e2de2 100644 --- a/opentelemetry-proto/src/transform/metrics.rs +++ b/opentelemetry-proto/src/transform/metrics.rs @@ -11,7 +11,7 @@ pub mod tonic { use opentelemetry::{otel_debug, Key, Value}; use opentelemetry_sdk::metrics::data::{ self, Exemplar as SdkExemplar, ExponentialHistogram as SdkExponentialHistogram, - Gauge as SdkGauge, Histogram as SdkHistogram, Metric as SdkMetric, + Gauge as SdkGauge, Histogram as SdkHistogram, Metric as SdkMetric, ResourceMetrics, ScopeMetrics as SdkScopeMetrics, Sum as SdkSum, }; use opentelemetry_sdk::metrics::Temporality; @@ -110,8 +110,8 @@ pub mod tonic { } } - impl From<&data::ResourceMetrics> for ExportMetricsServiceRequest { - fn from(rm: &data::ResourceMetrics) -> Self { + impl From<&ResourceMetrics> for ExportMetricsServiceRequest { + fn from(rm: &ResourceMetrics) -> Self { ExportMetricsServiceRequest { resource_metrics: vec![TonicResourceMetrics { resource: Some((&rm.resource).into()), diff --git a/opentelemetry-sdk/src/metrics/internal/exponential_histogram.rs b/opentelemetry-sdk/src/metrics/internal/exponential_histogram.rs index 170f4a068d..995bc156e7 100644 --- a/opentelemetry-sdk/src/metrics/internal/exponential_histogram.rs +++ b/opentelemetry-sdk/src/metrics/internal/exponential_histogram.rs @@ -4,7 +4,7 @@ use opentelemetry::{otel_debug, KeyValue}; use std::sync::OnceLock; use crate::metrics::{ - data::{self, Aggregation}, + data::{self, Aggregation, ExponentialHistogram}, Temporality, }; @@ -386,7 +386,7 @@ impl ExpoHistogram { fn delta(&self, dest: Option<&mut dyn Aggregation>) -> (usize, Option>) { let time = self.init_time.delta(); - let h = dest.and_then(|d| d.as_mut().downcast_mut::>()); + let h = dest.and_then(|d| d.as_mut().downcast_mut::>()); let mut new_agg = if h.is_none() { Some(data::ExponentialHistogram { data_points: vec![], @@ -443,7 +443,7 @@ impl ExpoHistogram { ) -> (usize, Option>) { let time = self.init_time.cumulative(); - let h = dest.and_then(|d| d.as_mut().downcast_mut::>()); + let h = dest.and_then(|d| d.as_mut().downcast_mut::>()); let mut new_agg = if h.is_none() { Some(data::ExponentialHistogram { data_points: vec![], @@ -528,6 +528,7 @@ where mod tests { use std::{ops::Neg, time::SystemTime}; + use data::{ExponentialHistogram, Gauge, Histogram, Sum}; use tests::internal::AggregateFns; use crate::metrics::internal::{self, AggregateBuilder}; @@ -1468,8 +1469,8 @@ mod tests { test_name ); - if let Some(a) = a.as_any().downcast_ref::>() { - let b = b.as_any().downcast_ref::>().unwrap(); + if let Some(a) = a.as_any().downcast_ref::>() { + let b = b.as_any().downcast_ref::>().unwrap(); assert_eq!( a.data_points.len(), b.data_points.len(), @@ -1479,8 +1480,8 @@ mod tests { for (a, b) in a.data_points.iter().zip(b.data_points.iter()) { assert_gauge_data_points_eq(a, b, "mismatching gauge data points", test_name); } - } else if let Some(a) = a.as_any().downcast_ref::>() { - let b = b.as_any().downcast_ref::>().unwrap(); + } else if let Some(a) = a.as_any().downcast_ref::>() { + let b = b.as_any().downcast_ref::>().unwrap(); assert_eq!( a.temporality, b.temporality, "{} mismatching sum temporality", @@ -1500,8 +1501,8 @@ mod tests { for (a, b) in a.data_points.iter().zip(b.data_points.iter()) { assert_sum_data_points_eq(a, b, "mismatching sum data points", test_name); } - } else if let Some(a) = a.as_any().downcast_ref::>() { - let b = b.as_any().downcast_ref::>().unwrap(); + } else if let Some(a) = a.as_any().downcast_ref::>() { + let b = b.as_any().downcast_ref::>().unwrap(); assert_eq!( a.temporality, b.temporality, "{}: mismatching hist temporality", @@ -1516,10 +1517,10 @@ mod tests { for (a, b) in a.data_points.iter().zip(b.data_points.iter()) { assert_hist_data_points_eq(a, b, "mismatching hist data points", test_name); } - } else if let Some(a) = a.as_any().downcast_ref::>() { + } else if let Some(a) = a.as_any().downcast_ref::>() { let b = b .as_any() - .downcast_ref::>() + .downcast_ref::>() .unwrap(); assert_eq!( a.temporality, b.temporality, diff --git a/opentelemetry-sdk/src/metrics/internal/last_value.rs b/opentelemetry-sdk/src/metrics/internal/last_value.rs index cc2176b897..b14c86047e 100644 --- a/opentelemetry-sdk/src/metrics/internal/last_value.rs +++ b/opentelemetry-sdk/src/metrics/internal/last_value.rs @@ -1,5 +1,5 @@ use crate::metrics::{ - data::{self, Aggregation, GaugeDataPoint}, + data::{self, Aggregation, Gauge, GaugeDataPoint}, Temporality, }; use opentelemetry::KeyValue; @@ -65,7 +65,7 @@ impl LastValue { ) -> (usize, Option>) { let time = self.init_time.delta(); - let s_data = dest.and_then(|d| d.as_mut().downcast_mut::>()); + let s_data = dest.and_then(|d| d.as_mut().downcast_mut::>()); let mut new_agg = if s_data.is_none() { Some(data::Gauge { data_points: vec![], @@ -97,7 +97,7 @@ impl LastValue { dest: Option<&mut dyn Aggregation>, ) -> (usize, Option>) { let time = self.init_time.cumulative(); - let s_data = dest.and_then(|d| d.as_mut().downcast_mut::>()); + let s_data = dest.and_then(|d| d.as_mut().downcast_mut::>()); let mut new_agg = if s_data.is_none() { Some(data::Gauge { data_points: vec![], diff --git a/opentelemetry-sdk/src/metrics/internal/precomputed_sum.rs b/opentelemetry-sdk/src/metrics/internal/precomputed_sum.rs index b2f478e078..c035dbe696 100644 --- a/opentelemetry-sdk/src/metrics/internal/precomputed_sum.rs +++ b/opentelemetry-sdk/src/metrics/internal/precomputed_sum.rs @@ -1,6 +1,6 @@ use opentelemetry::KeyValue; -use crate::metrics::data::{self, Aggregation, SumDataPoint}; +use crate::metrics::data::{self, Aggregation, Sum, SumDataPoint}; use crate::metrics::Temporality; use super::aggregate::{AggregateTimeInitiator, AttributeSetFilter}; @@ -40,7 +40,7 @@ impl PrecomputedSum { ) -> (usize, Option>) { let time = self.init_time.delta(); - let s_data = dest.and_then(|d| d.as_mut().downcast_mut::>()); + let s_data = dest.and_then(|d| d.as_mut().downcast_mut::>()); let mut new_agg = if s_data.is_none() { Some(data::Sum { data_points: vec![], @@ -91,7 +91,7 @@ impl PrecomputedSum { ) -> (usize, Option>) { let time = self.init_time.cumulative(); - let s_data = dest.and_then(|d| d.as_mut().downcast_mut::>()); + let s_data = dest.and_then(|d| d.as_mut().downcast_mut::>()); let mut new_agg = if s_data.is_none() { Some(data::Sum { data_points: vec![], diff --git a/opentelemetry-sdk/src/metrics/mod.rs b/opentelemetry-sdk/src/metrics/mod.rs index a6a53a4f7a..51711daa3f 100644 --- a/opentelemetry-sdk/src/metrics/mod.rs +++ b/opentelemetry-sdk/src/metrics/mod.rs @@ -105,10 +105,14 @@ pub enum Temporality { mod tests { use self::data::{HistogramDataPoint, ScopeMetrics, SumDataPoint}; use super::*; + use crate::metrics::data::Aggregation; use crate::metrics::data::ResourceMetrics; use crate::testing::metrics::InMemoryMetricExporter; use crate::testing::metrics::InMemoryMetricExporterBuilder; + use data::Gauge; use data::GaugeDataPoint; + use data::Histogram; + use data::Sum; use opentelemetry::metrics::{Counter, Meter, UpDownCounter}; use opentelemetry::InstrumentationScope; use opentelemetry::{metrics::MeterProvider as _, KeyValue}; @@ -225,7 +229,7 @@ mod tests { counter.add(50, &[]); test_context.flush_metrics(); - let sum = test_context.get_aggregation::>("my_counter", None); + let sum = test_context.get_aggregation::>("my_counter", None); assert_eq!(sum.data_points.len(), 1, "Expected only one data point"); assert!(sum.is_monotonic, "Should produce monotonic."); @@ -248,7 +252,7 @@ mod tests { counter.add(50, &[]); test_context.flush_metrics(); - let sum = test_context.get_aggregation::>("my_counter", None); + let sum = test_context.get_aggregation::>("my_counter", None); assert_eq!(sum.data_points.len(), 1, "Expected only one data point"); assert!(sum.is_monotonic, "Should produce monotonic."); @@ -450,7 +454,7 @@ mod tests { for (iter, v) in values_clone.iter().enumerate() { test_context.flush_metrics(); - let sum = test_context.get_aggregation::>("my_observable_counter", None); + let sum = test_context.get_aggregation::>("my_observable_counter", None); assert_eq!(sum.data_points.len(), 1); assert!(sum.is_monotonic, "Counter should produce monotonic."); if let Temporality::Cumulative = temporality { @@ -567,7 +571,7 @@ mod tests { let sum = metric .data .as_any() - .downcast_ref::>() + .downcast_ref::>() .expect("Sum aggregation expected for Counter instruments by default"); // Expecting 1 time-series. @@ -633,7 +637,7 @@ mod tests { let sum1 = metric1 .data .as_any() - .downcast_ref::>() + .downcast_ref::>() .expect("Sum aggregation expected for Counter instruments by default"); // Expecting 1 time-series. @@ -653,7 +657,7 @@ mod tests { let sum2 = metric2 .data .as_any() - .downcast_ref::>() + .downcast_ref::>() .expect("Sum aggregation expected for Counter instruments by default"); // Expecting 1 time-series. @@ -737,7 +741,7 @@ mod tests { let sum = metric .data .as_any() - .downcast_ref::>() + .downcast_ref::>() .expect("Sum aggregation expected for Counter instruments by default"); // Expecting 1 time-series. @@ -757,7 +761,7 @@ mod tests { let reader = PeriodicReader::builder(exporter.clone()).build(); let criteria = Instrument::new().name("test_histogram"); let stream_invalid_aggregation = Stream::new() - .aggregation(Aggregation::ExplicitBucketHistogram { + .aggregation(aggregation::Aggregation::ExplicitBucketHistogram { boundaries: vec![0.9, 1.9, 1.2, 1.3, 1.4, 1.5], // invalid boundaries record_min_max: false, }) @@ -860,7 +864,7 @@ mod tests { let sum = metric .data .as_any() - .downcast_ref::>() + .downcast_ref::>() .expect("Sum aggregation expected for ObservableCounter instruments by default"); // Expecting 1 time-series only, as the view drops all attributes resulting @@ -937,7 +941,7 @@ mod tests { let sum = metric .data .as_any() - .downcast_ref::>() + .downcast_ref::>() .expect("Sum aggregation expected for Counter instruments by default"); // Expecting 1 time-series only, as the view drops all attributes resulting @@ -957,7 +961,7 @@ mod tests { counter.add(50, &[]); test_context.flush_metrics(); - let sum = test_context.get_aggregation::>("my_counter", Some("my_unit")); + let sum = test_context.get_aggregation::>("my_counter", Some("my_unit")); assert_eq!(sum.data_points.len(), 1, "Expected only one data point"); assert!(!sum.is_monotonic, "Should not produce monotonic."); @@ -980,7 +984,7 @@ mod tests { counter.add(50, &[]); test_context.flush_metrics(); - let sum = test_context.get_aggregation::>("my_counter", Some("my_unit")); + let sum = test_context.get_aggregation::>("my_counter", Some("my_unit")); assert_eq!(sum.data_points.len(), 1, "Expected only one data point"); assert!(!sum.is_monotonic, "Should not produce monotonic."); @@ -1002,12 +1006,12 @@ mod tests { counter.add(50, &[]); test_context.flush_metrics(); - let _ = test_context.get_aggregation::>("my_counter", None); + let _ = test_context.get_aggregation::>("my_counter", None); test_context.reset_metrics(); counter.add(5, &[]); test_context.flush_metrics(); - let sum = test_context.get_aggregation::>("my_counter", None); + let sum = test_context.get_aggregation::>("my_counter", None); assert_eq!(sum.data_points.len(), 1, "Expected only one data point"); assert!(sum.is_monotonic, "Should produce monotonic."); @@ -1029,12 +1033,12 @@ mod tests { counter.add(50, &[]); test_context.flush_metrics(); - let _ = test_context.get_aggregation::>("my_counter", None); + let _ = test_context.get_aggregation::>("my_counter", None); test_context.reset_metrics(); counter.add(5, &[]); test_context.flush_metrics(); - let sum = test_context.get_aggregation::>("my_counter", None); + let sum = test_context.get_aggregation::>("my_counter", None); assert_eq!(sum.data_points.len(), 1, "Expected only one data point"); assert!(sum.is_monotonic, "Should produce monotonic."); @@ -1056,12 +1060,12 @@ mod tests { counter.add(50, &[]); test_context.flush_metrics(); - let _ = test_context.get_aggregation::>("my_counter", None); + let _ = test_context.get_aggregation::>("my_counter", None); test_context.reset_metrics(); counter.add(50, &[KeyValue::new("a", "b")]); test_context.flush_metrics(); - let sum = test_context.get_aggregation::>("my_counter", None); + let sum = test_context.get_aggregation::>("my_counter", None); let no_attr_data_point = sum.data_points.iter().find(|x| x.attributes.is_empty()); @@ -1092,7 +1096,7 @@ mod tests { counter.add(1, &[KeyValue::new("key1", "value2")]); test_context.flush_metrics(); - let sum = test_context.get_aggregation::>("my_counter", None); + let sum = test_context.get_aggregation::>("my_counter", None); // Expecting 2 time-series. assert_eq!(sum.data_points.len(), 2); @@ -1217,7 +1221,7 @@ mod tests { match instrument_name { "counter" => { let counter_data = - test_context.get_aggregation::>("test_counter", None); + test_context.get_aggregation::>("test_counter", None); assert_eq!(counter_data.data_points.len(), 2); let zero_attribute_datapoint = find_sum_datapoint_with_no_attributes(&counter_data.data_points) @@ -1233,7 +1237,7 @@ mod tests { } "updown_counter" => { let updown_counter_data = - test_context.get_aggregation::>("test_updowncounter", None); + test_context.get_aggregation::>("test_updowncounter", None); assert_eq!(updown_counter_data.data_points.len(), 2); let zero_attribute_datapoint = find_sum_datapoint_with_no_attributes(&updown_counter_data.data_points) @@ -1248,8 +1252,8 @@ mod tests { assert_eq!(data_point1.value, 20); } "histogram" => { - let histogram_data = test_context - .get_aggregation::>("test_histogram", None); + let histogram_data = + test_context.get_aggregation::>("test_histogram", None); assert_eq!(histogram_data.data_points.len(), 2); let zero_attribute_datapoint = find_histogram_datapoint_with_no_attributes(&histogram_data.data_points) @@ -1270,8 +1274,7 @@ mod tests { assert_eq!(data_point1.max, Some(30)); } "gauge" => { - let gauge_data = - test_context.get_aggregation::>("test_gauge", None); + let gauge_data = test_context.get_aggregation::>("test_gauge", None); assert_eq!(gauge_data.data_points.len(), 2); let zero_attribute_datapoint = find_gauge_datapoint_with_no_attributes(&gauge_data.data_points) @@ -1370,7 +1373,7 @@ mod tests { match instrument_name { "counter" => { let counter_data = - test_context.get_aggregation::>("test_counter", None); + test_context.get_aggregation::>("test_counter", None); assert_eq!(counter_data.data_points.len(), 2); assert!(counter_data.is_monotonic); let zero_attribute_datapoint = @@ -1387,7 +1390,7 @@ mod tests { } "updown_counter" => { let updown_counter_data = - test_context.get_aggregation::>("test_updowncounter", None); + test_context.get_aggregation::>("test_updowncounter", None); assert_eq!(updown_counter_data.data_points.len(), 2); assert!(!updown_counter_data.is_monotonic); let zero_attribute_datapoint = @@ -1403,8 +1406,7 @@ mod tests { assert_eq!(data_point1.value, 20); } "gauge" => { - let gauge_data = - test_context.get_aggregation::>("test_gauge", None); + let gauge_data = test_context.get_aggregation::>("test_gauge", None); assert_eq!(gauge_data.data_points.len(), 2); let zero_attribute_datapoint = find_gauge_datapoint_with_no_attributes(&gauge_data.data_points) @@ -1453,8 +1455,7 @@ mod tests { // Assert // We invoke `test_context.flush_metrics()` six times. - let sums = - test_context.get_from_multiple_aggregations::>("my_counter", None, 6); + let sums = test_context.get_from_multiple_aggregations::>("my_counter", None, 6); let mut sum_zero_attributes = 0; let mut sum_key1_value1 = 0; @@ -1506,8 +1507,7 @@ mod tests { // Assert // We invoke `test_context.flush_metrics()` six times. - let sums = - test_context.get_from_multiple_aggregations::>("test_counter", None, 6); + let sums = test_context.get_from_multiple_aggregations::>("test_counter", None, 6); let mut sum_zero_attributes = 0.0; let mut sum_key1_value1 = 0.0; @@ -1560,7 +1560,7 @@ mod tests { // Assert // We invoke `test_context.flush_metrics()` six times. - let histograms = test_context.get_from_multiple_aggregations::>( + let histograms = test_context.get_from_multiple_aggregations::>( "test_histogram", None, 6, @@ -1697,7 +1697,7 @@ mod tests { // Assert // We invoke `test_context.flush_metrics()` six times. - let histograms = test_context.get_from_multiple_aggregations::>( + let histograms = test_context.get_from_multiple_aggregations::>( "test_histogram", None, 6, @@ -1827,8 +1827,7 @@ mod tests { test_context.flush_metrics(); // Assert - let histogram_data = - test_context.get_aggregation::>("my_histogram", None); + let histogram_data = test_context.get_aggregation::>("my_histogram", None); // Expecting 2 time-series. assert_eq!(histogram_data.data_points.len(), 2); if let Temporality::Cumulative = temporality { @@ -1874,8 +1873,7 @@ mod tests { test_context.flush_metrics(); - let histogram_data = - test_context.get_aggregation::>("my_histogram", None); + let histogram_data = test_context.get_aggregation::>("my_histogram", None); assert_eq!(histogram_data.data_points.len(), 2); let data_point1 = find_histogram_datapoint_with_key_value(&histogram_data.data_points, "key1", "value1") @@ -1924,8 +1922,7 @@ mod tests { test_context.flush_metrics(); // Assert - let histogram_data = - test_context.get_aggregation::>("test_histogram", None); + let histogram_data = test_context.get_aggregation::>("test_histogram", None); // Expecting 2 time-series. assert_eq!(histogram_data.data_points.len(), 1); if let Temporality::Cumulative = temporality { @@ -1978,7 +1975,7 @@ mod tests { test_context.flush_metrics(); // Assert - let gauge_data_point = test_context.get_aggregation::>("my_gauge", None); + let gauge_data_point = test_context.get_aggregation::>("my_gauge", None); // Expecting 2 time-series. assert_eq!(gauge_data_point.data_points.len(), 2); @@ -2007,7 +2004,7 @@ mod tests { test_context.flush_metrics(); - let gauge = test_context.get_aggregation::>("my_gauge", None); + let gauge = test_context.get_aggregation::>("my_gauge", None); assert_eq!(gauge.data_points.len(), 2); let data_point1 = find_gauge_datapoint_with_key_value(&gauge.data_points, "key1", "value1") .expect("datapoint with key1=value1 expected"); @@ -2036,7 +2033,7 @@ mod tests { test_context.flush_metrics(); // Assert - let gauge = test_context.get_aggregation::>("test_observable_gauge", None); + let gauge = test_context.get_aggregation::>("test_observable_gauge", None); // Expecting 2 time-series. let expected_time_series_count = if use_empty_attributes { 3 } else { 2 }; assert_eq!(gauge.data_points.len(), expected_time_series_count); @@ -2064,7 +2061,7 @@ mod tests { test_context.flush_metrics(); - let gauge = test_context.get_aggregation::>("test_observable_gauge", None); + let gauge = test_context.get_aggregation::>("test_observable_gauge", None); assert_eq!(gauge.data_points.len(), expected_time_series_count); if use_empty_attributes { @@ -2102,7 +2099,7 @@ mod tests { test_context.flush_metrics(); // Assert - let sum = test_context.get_aggregation::>("my_counter", None); + let sum = test_context.get_aggregation::>("my_counter", None); // Expecting 2 time-series. assert_eq!(sum.data_points.len(), 2); assert!(sum.is_monotonic, "Counter should produce monotonic."); @@ -2139,7 +2136,7 @@ mod tests { test_context.flush_metrics(); - let sum = test_context.get_aggregation::>("my_counter", None); + let sum = test_context.get_aggregation::>("my_counter", None); assert_eq!(sum.data_points.len(), 2); let data_point1 = find_sum_datapoint_with_key_value(&sum.data_points, "key1", "value1") .expect("datapoint with key1=value1 expected"); @@ -2179,7 +2176,7 @@ mod tests { counter.add(100, &[KeyValue::new("A", "yet_another")]); test_context.flush_metrics(); - let sum = test_context.get_aggregation::>("my_counter", None); + let sum = test_context.get_aggregation::>("my_counter", None); // Expecting 2002 metric points. (2000 + 1 overflow + Empty attributes) assert_eq!(sum.data_points.len(), 2002); @@ -2273,7 +2270,7 @@ mod tests { ); test_context.flush_metrics(); - let sum = test_context.get_aggregation::>("my_counter", None); + let sum = test_context.get_aggregation::>("my_counter", None); // Expecting 1 time-series. assert_eq!(sum.data_points.len(), 1); @@ -2302,7 +2299,7 @@ mod tests { test_context.flush_metrics(); // Assert - let sum = test_context.get_aggregation::>("my_updown_counter", None); + let sum = test_context.get_aggregation::>("my_updown_counter", None); // Expecting 2 time-series. assert_eq!(sum.data_points.len(), 2); assert!( @@ -2338,7 +2335,7 @@ mod tests { test_context.flush_metrics(); - let sum = test_context.get_aggregation::>("my_updown_counter", None); + let sum = test_context.get_aggregation::>("my_updown_counter", None); assert_eq!(sum.data_points.len(), 2); let data_point1 = find_sum_datapoint_with_key_value(&sum.data_points, "key1", "value1") .expect("datapoint with key1=value1 expected"); @@ -2493,7 +2490,7 @@ mod tests { assert!(resource_metrics.is_empty(), "no metrics should be exported"); } - fn get_aggregation( + fn get_aggregation( &mut self, counter_name: &str, unit_name: Option<&str>, @@ -2536,7 +2533,7 @@ mod tests { .expect("Failed to cast aggregation to expected type") } - fn get_from_multiple_aggregations( + fn get_from_multiple_aggregations( &mut self, counter_name: &str, unit_name: Option<&str>, diff --git a/opentelemetry-sdk/src/testing/metrics/in_memory_exporter.rs b/opentelemetry-sdk/src/testing/metrics/in_memory_exporter.rs index 1d6f9c2754..e7774f2810 100644 --- a/opentelemetry-sdk/src/testing/metrics/in_memory_exporter.rs +++ b/opentelemetry-sdk/src/testing/metrics/in_memory_exporter.rs @@ -1,4 +1,4 @@ -use crate::metrics::data; +use crate::metrics::data::{self, Gauge, Sum}; use crate::metrics::data::{Histogram, Metric, ResourceMetrics, ScopeMetrics}; use crate::metrics::exporter::PushMetricExporter; use crate::metrics::MetricError; @@ -213,7 +213,7 @@ impl InMemoryMetricExporter { time: hist.time, temporality: hist.temporality, })) - } else if let Some(sum) = data.as_any().downcast_ref::>() { + } else if let Some(sum) = data.as_any().downcast_ref::>() { Some(Box::new(data::Sum { data_points: sum.data_points.clone(), start_time: sum.start_time, @@ -221,7 +221,7 @@ impl InMemoryMetricExporter { temporality: sum.temporality, is_monotonic: sum.is_monotonic, })) - } else if let Some(sum) = data.as_any().downcast_ref::>() { + } else if let Some(sum) = data.as_any().downcast_ref::>() { Some(Box::new(data::Sum { data_points: sum.data_points.clone(), start_time: sum.start_time, @@ -229,7 +229,7 @@ impl InMemoryMetricExporter { temporality: sum.temporality, is_monotonic: sum.is_monotonic, })) - } else if let Some(sum) = data.as_any().downcast_ref::>() { + } else if let Some(sum) = data.as_any().downcast_ref::>() { Some(Box::new(data::Sum { data_points: sum.data_points.clone(), start_time: sum.start_time, @@ -237,19 +237,19 @@ impl InMemoryMetricExporter { temporality: sum.temporality, is_monotonic: sum.is_monotonic, })) - } else if let Some(gauge) = data.as_any().downcast_ref::>() { + } else if let Some(gauge) = data.as_any().downcast_ref::>() { Some(Box::new(data::Gauge { data_points: gauge.data_points.clone(), start_time: gauge.start_time, time: gauge.time, })) - } else if let Some(gauge) = data.as_any().downcast_ref::>() { + } else if let Some(gauge) = data.as_any().downcast_ref::>() { Some(Box::new(data::Gauge { data_points: gauge.data_points.clone(), start_time: gauge.start_time, time: gauge.time, })) - } else if let Some(gauge) = data.as_any().downcast_ref::>() { + } else if let Some(gauge) = data.as_any().downcast_ref::>() { Some(Box::new(data::Gauge { data_points: gauge.data_points.clone(), start_time: gauge.start_time, diff --git a/opentelemetry-stdout/src/metrics/exporter.rs b/opentelemetry-stdout/src/metrics/exporter.rs index 54feb33c41..bf1944b29c 100644 --- a/opentelemetry-stdout/src/metrics/exporter.rs +++ b/opentelemetry-stdout/src/metrics/exporter.rs @@ -2,7 +2,10 @@ use async_trait::async_trait; use chrono::{DateTime, Utc}; use core::{f64, fmt}; use opentelemetry_sdk::metrics::{ - data::{self, ScopeMetrics}, + data::{ + self, ExponentialHistogram, Gauge, GaugeDataPoint, Histogram, HistogramDataPoint, + ResourceMetrics, ScopeMetrics, Sum, SumDataPoint, + }, exporter::PushMetricExporter, }; use opentelemetry_sdk::metrics::{MetricError, MetricResult, Temporality}; @@ -36,7 +39,7 @@ impl fmt::Debug for MetricExporter { #[async_trait] impl PushMetricExporter for MetricExporter { /// Write Metrics to stdout - async fn export(&self, metrics: &mut data::ResourceMetrics) -> MetricResult<()> { + async fn export(&self, metrics: &mut ResourceMetrics) -> MetricResult<()> { if self.is_shutdown.load(atomic::Ordering::SeqCst) { Err(MetricError::Other("exporter is shut down".into())) } else { @@ -97,34 +100,34 @@ fn print_metrics(metrics: &[ScopeMetrics]) { println!("\t\tUnit : {}", &metric.unit); let data = metric.data.as_any(); - if let Some(hist) = data.downcast_ref::>() { + if let Some(hist) = data.downcast_ref::>() { println!("\t\tType : Histogram"); print_histogram(hist); - } else if let Some(hist) = data.downcast_ref::>() { + } else if let Some(hist) = data.downcast_ref::>() { println!("\t\tType : Histogram"); print_histogram(hist); - } else if let Some(_hist) = data.downcast_ref::>() { + } else if let Some(_hist) = data.downcast_ref::>() { println!("\t\tType : Exponential Histogram"); // TODO - } else if let Some(_hist) = data.downcast_ref::>() { + } else if let Some(_hist) = data.downcast_ref::>() { println!("\t\tType : Exponential Histogram"); // TODO - } else if let Some(sum) = data.downcast_ref::>() { + } else if let Some(sum) = data.downcast_ref::>() { println!("\t\tType : Sum"); print_sum(sum); - } else if let Some(sum) = data.downcast_ref::>() { + } else if let Some(sum) = data.downcast_ref::>() { println!("\t\tType : Sum"); print_sum(sum); - } else if let Some(sum) = data.downcast_ref::>() { + } else if let Some(sum) = data.downcast_ref::>() { println!("\t\tType : Sum"); print_sum(sum); - } else if let Some(gauge) = data.downcast_ref::>() { + } else if let Some(gauge) = data.downcast_ref::>() { println!("\t\tType : Gauge"); print_gauge(gauge); - } else if let Some(gauge) = data.downcast_ref::>() { + } else if let Some(gauge) = data.downcast_ref::>() { println!("\t\tType : Gauge"); print_gauge(gauge); - } else if let Some(gauge) = data.downcast_ref::>() { + } else if let Some(gauge) = data.downcast_ref::>() { println!("\t\tType : Gauge"); print_gauge(gauge); } else { @@ -134,7 +137,7 @@ fn print_metrics(metrics: &[ScopeMetrics]) { } } -fn print_sum(sum: &data::Sum) { +fn print_sum(sum: &Sum) { println!("\t\tSum DataPoints"); println!("\t\tMonotonic : {}", sum.is_monotonic); if sum.temporality == Temporality::Cumulative { @@ -155,7 +158,7 @@ fn print_sum(sum: &data::Sum) { print_sum_data_points(&sum.data_points); } -fn print_gauge(gauge: &data::Gauge) { +fn print_gauge(gauge: &Gauge) { println!("\t\tGauge DataPoints"); if let Some(start_time) = gauge.start_time { let datetime: DateTime = start_time.into(); @@ -172,7 +175,7 @@ fn print_gauge(gauge: &data::Gauge) { print_gauge_data_points(&gauge.data_points); } -fn print_histogram(histogram: &data::Histogram) { +fn print_histogram(histogram: &Histogram) { if histogram.temporality == Temporality::Cumulative { println!("\t\tTemporality : Cumulative"); } else { @@ -192,7 +195,7 @@ fn print_histogram(histogram: &data::Histogram) { print_hist_data_points(&histogram.data_points); } -fn print_sum_data_points(data_points: &[data::SumDataPoint]) { +fn print_sum_data_points(data_points: &[SumDataPoint]) { for (i, data_point) in data_points.iter().enumerate() { println!("\t\tDataPoint #{}", i); println!("\t\t\tValue : {:#?}", data_point.value); @@ -203,7 +206,7 @@ fn print_sum_data_points(data_points: &[data::SumDataPoint]) { } } -fn print_gauge_data_points(data_points: &[data::GaugeDataPoint]) { +fn print_gauge_data_points(data_points: &[GaugeDataPoint]) { for (i, data_point) in data_points.iter().enumerate() { println!("\t\tDataPoint #{}", i); println!("\t\t\tValue : {:#?}", data_point.value); @@ -214,7 +217,7 @@ fn print_gauge_data_points(data_points: &[data::GaugeDataPoint]) { } } -fn print_hist_data_points(data_points: &[data::HistogramDataPoint]) { +fn print_hist_data_points(data_points: &[HistogramDataPoint]) { for (i, data_point) in data_points.iter().enumerate() { println!("\t\tDataPoint #{}", i); println!("\t\t\tCount : {}", data_point.count); From 28ea47906fc47a0eb7a4f3d071ba29856db65ee4 Mon Sep 17 00:00:00 2001 From: Cijo Thomas Date: Wed, 22 Jan 2025 17:02:12 -0800 Subject: [PATCH 2/3] fix unusde --- opentelemetry-stdout/src/metrics/exporter.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/opentelemetry-stdout/src/metrics/exporter.rs b/opentelemetry-stdout/src/metrics/exporter.rs index bf1944b29c..839784755c 100644 --- a/opentelemetry-stdout/src/metrics/exporter.rs +++ b/opentelemetry-stdout/src/metrics/exporter.rs @@ -3,7 +3,7 @@ use chrono::{DateTime, Utc}; use core::{f64, fmt}; use opentelemetry_sdk::metrics::{ data::{ - self, ExponentialHistogram, Gauge, GaugeDataPoint, Histogram, HistogramDataPoint, + ExponentialHistogram, Gauge, GaugeDataPoint, Histogram, HistogramDataPoint, ResourceMetrics, ScopeMetrics, Sum, SumDataPoint, }, exporter::PushMetricExporter, From 76380cbcc0bb68ebef6cf134e86021b76ddcb96f Mon Sep 17 00:00:00 2001 From: Cijo Thomas Date: Wed, 22 Jan 2025 17:08:35 -0800 Subject: [PATCH 3/3] unused --- opentelemetry-proto/src/transform/metrics.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/opentelemetry-proto/src/transform/metrics.rs b/opentelemetry-proto/src/transform/metrics.rs index 4b6c1e2de2..680da03b3f 100644 --- a/opentelemetry-proto/src/transform/metrics.rs +++ b/opentelemetry-proto/src/transform/metrics.rs @@ -10,7 +10,7 @@ pub mod tonic { use opentelemetry::{otel_debug, Key, Value}; use opentelemetry_sdk::metrics::data::{ - self, Exemplar as SdkExemplar, ExponentialHistogram as SdkExponentialHistogram, + Exemplar as SdkExemplar, ExponentialHistogram as SdkExponentialHistogram, Gauge as SdkGauge, Histogram as SdkHistogram, Metric as SdkMetric, ResourceMetrics, ScopeMetrics as SdkScopeMetrics, Sum as SdkSum, };