Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Implement Summary Metric in Prometheus RemoteWrite Exporter #2083

Merged
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
20 changes: 19 additions & 1 deletion exporter/prometheusremotewriteexporter/exporter.go
Original file line number Diff line number Diff line change
Expand Up @@ -123,7 +123,12 @@ func (prwe *PrwExporter) PushMetrics(ctx context.Context, md pdata.Metrics) (int
case *otlp.Metric_DoubleHistogram, *otlp.Metric_IntHistogram:
if err := prwe.handleHistogramMetric(tsMap, metric); err != nil {
dropped++
errs = append(errs, err)
errs = append(errs, consumererror.Permanent(err))
}
case *otlp.Metric_DoubleSummary:
if err := prwe.handleSummaryMetric(tsMap, metric); err != nil {
dropped++
errs = append(errs, consumererror.Permanent(err))
}
default:
dropped++
Expand Down Expand Up @@ -209,6 +214,19 @@ func (prwe *PrwExporter) handleHistogramMetric(tsMap map[string]*prompb.TimeSeri
return nil
}

// handleSummaryMetric processes data points in a single OTLP summary metric by mapping the sum, count and each
// quantile of every data point as a Sample, and adding each Sample to its corresponding TimeSeries.
// tsMap and metric cannot be nil.
func (prwe *PrwExporter) handleSummaryMetric(tsMap map[string]*prompb.TimeSeries, metric *otlp.Metric) error {
if metric.GetDoubleSummary().GetDataPoints() == nil {
return fmt.Errorf("nil data point. %s is dropped", metric.GetName())
}
for _, pt := range metric.GetDoubleSummary().GetDataPoints() {
addSingleDoubleSummaryDataPoint(pt, metric, prwe.namespace, tsMap, prwe.externalLabels)
}
return nil
}

// export sends a Snappy-compressed WriteRequest containing TimeSeries to a remote write endpoint in order
func (prwe *PrwExporter) export(ctx context.Context, tsMap map[string]*prompb.TimeSeries) error {
// Calls the helper function to convert the TsMap to the desired format
Expand Down
45 changes: 45 additions & 0 deletions exporter/prometheusremotewriteexporter/exporter_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -329,6 +329,20 @@ func Test_PushMetrics(t *testing.T) {
}
doubleHistogramBatch := pdata.MetricsFromOtlp(doubleHistogramMetric)

doubleSummaryMetric := []*otlp.ResourceMetrics{
{
InstrumentationLibraryMetrics: []*otlp.InstrumentationLibraryMetrics{
{
Metrics: []*otlp.Metric{
validMetrics1[validDoubleSummary],
validMetrics2[validDoubleSummary],
},
},
},
},
}
doubleSummaryBatch := pdata.MetricsFromOtlp(doubleSummaryMetric)

// len(BucketCount) > len(ExplicitBounds)
unmatchedBoundBucketIntHistMetric := []*otlp.ResourceMetrics{
{
Expand Down Expand Up @@ -435,6 +449,19 @@ func Test_PushMetrics(t *testing.T) {
}
nilDataPointDoubleHistogramBatch := pdata.MetricsFromOtlp(nilDataPointDoubleHistogramMetric)

nilDataPointDoubleSummaryMetric := []*otlp.ResourceMetrics{
{
InstrumentationLibraryMetrics: []*otlp.InstrumentationLibraryMetrics{
{
Metrics: []*otlp.Metric{
errorMetrics[nilDataPointDoubleSummary],
},
},
},
},
}
nilDataPointDoubleSummaryBatch := pdata.MetricsFromOtlp(nilDataPointDoubleSummaryMetric)

checkFunc := func(t *testing.T, r *http.Request, expected int) {
body, err := ioutil.ReadAll(r.Body)
if err != nil {
Expand Down Expand Up @@ -553,6 +580,15 @@ func Test_PushMetrics(t *testing.T) {
0,
false,
},
{
"doubleSummary_case",
&doubleSummaryBatch,
checkFunc,
10,
http.StatusAccepted,
0,
false,
},
{
"unmatchedBoundBucketIntHist_case",
&unmatchedBoundBucketIntHistBatch,
Expand Down Expand Up @@ -634,6 +670,15 @@ func Test_PushMetrics(t *testing.T) {
nilDataPointIntHistogramBatch.MetricCount(),
true,
},
{
"nilDataPointDoubleSummary_case",
&nilDataPointDoubleSummaryBatch,
checkFunc,
0,
http.StatusAccepted,
nilDataPointDoubleSummaryBatch.MetricCount(),
true,
},
}

for _, tt := range tests {
Expand Down
59 changes: 50 additions & 9 deletions exporter/prometheusremotewriteexporter/helper.go
Original file line number Diff line number Diff line change
Expand Up @@ -31,15 +31,16 @@ import (
)

const (
nameStr = "__name__"
sumStr = "_sum"
countStr = "_count"
bucketStr = "_bucket"
leStr = "le"
pInfStr = "+Inf"
totalStr = "total"
delimeter = "_"
keyStr = "key"
nameStr = "__name__"
sumStr = "_sum"
countStr = "_count"
bucketStr = "_bucket"
leStr = "le"
quantileStr = "quantile"
pInfStr = "+Inf"
totalStr = "total"
delimeter = "_"
keyStr = "key"
)

// ByLabelName enables the usage of sort.Sort() with a slice of labels
Expand Down Expand Up @@ -72,6 +73,8 @@ func validateMetrics(metric *otlp.Metric) bool {
case *otlp.Metric_IntHistogram:
return metric.GetIntHistogram() != nil && metric.GetIntHistogram().GetAggregationTemporality() ==
otlp.AggregationTemporality_AGGREGATION_TEMPORALITY_CUMULATIVE
case *otlp.Metric_DoubleSummary:
return metric.GetDoubleSummary() != nil
}
return false
}
Expand Down Expand Up @@ -425,3 +428,41 @@ func addSingleDoubleHistogramDataPoint(pt *otlp.DoubleHistogramDataPoint, metric
infLabels := createLabelSet(pt.GetLabels(), externalLabels, nameStr, baseName+bucketStr, leStr, pInfStr)
addSample(tsMap, infBucket, infLabels, metric)
}

// addSingleDoubleSummaryDataPoint converts pt to len(QuantileValues) + 2 samples.
func addSingleDoubleSummaryDataPoint(pt *otlp.DoubleSummaryDataPoint, metric *otlp.Metric, namespace string,
tsMap map[string]*prompb.TimeSeries, externalLabels map[string]string) {
if pt == nil {
return
}
time := convertTimeStamp(pt.TimeUnixNano)
// sum and count of the summary should append suffix to baseName
baseName := getPromMetricName(metric, namespace)
// treat sum as a sample in an individual TimeSeries
sum := &prompb.Sample{
Value: pt.GetSum(),
Timestamp: time,
}

sumlabels := createLabelSet(pt.GetLabels(), externalLabels, nameStr, baseName+sumStr)
addSample(tsMap, sum, sumlabels, metric)

// treat count as a sample in an individual TimeSeries
count := &prompb.Sample{
Value: float64(pt.GetCount()),
Timestamp: time,
}
countlabels := createLabelSet(pt.GetLabels(), externalLabels, nameStr, baseName+countStr)
addSample(tsMap, count, countlabels, metric)

// process each percentile/quantile
for _, qt := range pt.GetQuantileValues() {
quantile := &prompb.Sample{
Value: qt.Value,
Timestamp: time,
}
percentileStr := strconv.FormatFloat(qt.GetQuantile(), 'f', -1, 64)
qtlabels := createLabelSet(pt.GetLabels(), externalLabels, nameStr, baseName, quantileStr, percentileStr)
addSample(tsMap, quantile, qtlabels, metric)
}
}
63 changes: 63 additions & 0 deletions exporter/prometheusremotewriteexporter/testutil_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -79,12 +79,17 @@ var (
bounds = []float64{0.1, 0.5, 0.99}
buckets = []uint64{1, 2, 3}

quantileBounds = []float64{0.15, 0.9, 0.99}
quantileValues = []float64{7, 8, 9}
quantiles = getQuantiles(quantileBounds, quantileValues)

validIntGauge = "valid_IntGauge"
validDoubleGauge = "valid_DoubleGauge"
validIntSum = "valid_IntSum"
validDoubleSum = "valid_DoubleSum"
validIntHistogram = "valid_IntHistogram"
validDoubleHistogram = "valid_DoubleHistogram"
validDoubleSummary = "valid_DoubleSummary"

validIntGaugeDirty = "*valid_IntGauge$"

Expand Down Expand Up @@ -163,6 +168,17 @@ var (
},
},
},
validDoubleSummary: {
Name: validDoubleSummary,
Data: &otlp.Metric_DoubleSummary{
DoubleSummary: &otlp.DoubleSummary{
DataPoints: []*otlp.DoubleSummaryDataPoint{
getDoubleSummaryDataPoint(lbs1, time1, floatVal1, uint64(intVal1), quantiles),
nil,
},
},
},
},
}
validMetrics2 = map[string]*otlp.Metric{
validIntGauge: {
Expand Down Expand Up @@ -229,6 +245,17 @@ var (
},
},
},
validDoubleSummary: {
Name: validDoubleSummary,
Data: &otlp.Metric_DoubleSummary{
DoubleSummary: &otlp.DoubleSummary{
DataPoints: []*otlp.DoubleSummaryDataPoint{
getDoubleSummaryDataPoint(lbs2, time2, floatVal2, uint64(intVal2), quantiles),
nil,
},
},
},
},
validIntGaugeDirty: {
Name: validIntGaugeDirty,
Data: &otlp.Metric_IntGauge{
Expand Down Expand Up @@ -280,6 +307,7 @@ var (
notMatchDoubleSum = "notMatchDoubleSum"
notMatchIntHistogram = "notMatchIntHistogram"
notMatchDoubleHistogram = "notMatchDoubleHistogram"
notMatchDoubleSummary = "notMatchDoubleSummary"

// Category 2: invalid type and temporality combination
invalidIntSum = "invalidIntSum"
Expand All @@ -294,6 +322,7 @@ var (
nilDataPointDoubleSum = "nilDataPointDoubleSum"
nilDataPointIntHistogram = "nilDataPointIntHistogram"
nilDataPointDoubleHistogram = "nilDataPointDoubleHistogram"
nilDataPointDoubleSummary = "nilDataPointDoubleSummary"

// different metrics that will not pass validate metrics
invalidMetrics = map[string]*otlp.Metric{
Expand Down Expand Up @@ -325,6 +354,10 @@ var (
Name: notMatchDoubleHistogram,
Data: &otlp.Metric_DoubleHistogram{},
},
notMatchDoubleSummary: {
Name: notMatchDoubleSummary,
Data: &otlp.Metric_DoubleSummary{},
},
invalidIntSum: {
Name: invalidIntSum,
Data: &otlp.Metric_IntSum{
Expand Down Expand Up @@ -410,6 +443,14 @@ var (
},
},
},
nilDataPointDoubleSummary: {
Name: nilDataPointDoubleSummary,
Data: &otlp.Metric_DoubleSummary{
DoubleSummary: &otlp.DoubleSummary{
DataPoints: nil,
},
},
},
}
)

Expand Down Expand Up @@ -470,6 +511,17 @@ func getDoubleHistogramDataPoint(labels []commonpb.StringKeyValue, ts uint64, su
}
}

func getDoubleSummaryDataPoint(labels []commonpb.StringKeyValue, ts uint64, sum float64, count uint64,
quantiles []*otlp.DoubleSummaryDataPoint_ValueAtQuantile) *otlp.DoubleSummaryDataPoint {
return &otlp.DoubleSummaryDataPoint{
Labels: labels,
TimeUnixNano: ts,
Count: count,
Sum: sum,
QuantileValues: quantiles,
}
}

// Prometheus TimeSeries
func getPromLabels(lbs ...string) []prompb.Label {
pbLbs := prompb.Labels{
Expand Down Expand Up @@ -501,3 +553,14 @@ func getTimeSeries(labels []prompb.Label, samples ...prompb.Sample) *prompb.Time
Samples: samples,
}
}

func getQuantiles(bounds []float64, values []float64) []*otlp.DoubleSummaryDataPoint_ValueAtQuantile {
quantiles := make([]*otlp.DoubleSummaryDataPoint_ValueAtQuantile, len(bounds))
for i := 0; i < len(bounds); i++ {
quantiles[i] = &otlp.DoubleSummaryDataPoint_ValueAtQuantile{
Quantile: bounds[i],
Value: values[i],
}
}
return quantiles
}