Skip to content

Commit

Permalink
Prometheus exporter: handle colliding metric attribute keys (#5717)
Browse files Browse the repository at this point in the history
Co-authored-by: Jack Berg <jberg@newrelic.com>
  • Loading branch information
dashpole and jack-berg authored Sep 20, 2023
1 parent 9b081e1 commit 9a93155
Show file tree
Hide file tree
Showing 3 changed files with 182 additions and 11 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@
import io.opentelemetry.api.common.Attributes;
import io.opentelemetry.api.trace.SpanContext;
import io.opentelemetry.sdk.common.InstrumentationScopeInfo;
import io.opentelemetry.sdk.internal.ThrottlingLogger;
import io.opentelemetry.sdk.metrics.data.AggregationTemporality;
import io.opentelemetry.sdk.metrics.data.DoubleExemplarData;
import io.opentelemetry.sdk.metrics.data.DoublePointData;
Expand Down Expand Up @@ -58,12 +59,18 @@
import java.util.concurrent.TimeUnit;
import java.util.function.BiConsumer;
import java.util.function.Predicate;
import java.util.logging.Level;
import java.util.logging.Logger;
import javax.annotation.Nullable;

/** Serializes metrics into Prometheus exposition formats. */
// Adapted from
// https://github.com/prometheus/client_java/blob/master/simpleclient_common/src/main/java/io/prometheus/client/exporter/common/TextFormat.java
abstract class Serializer {

private static final Logger LOGGER = Logger.getLogger(Serializer.class.getName());
private static final ThrottlingLogger THROTTLING_LOGGER = new ThrottlingLogger(LOGGER);

static Serializer create(@Nullable String acceptHeader, Predicate<String> filter) {
if (acceptHeader == null) {
return new Prometheus004Serializer(filter);
Expand Down Expand Up @@ -445,27 +452,58 @@ private static void writeScopeNameAndVersion(
private static void writeAttributePairs(
Writer writer, boolean initialComma, Attributes attributes) throws IOException {
try {
// This logic handles colliding attribute keys by joining the values,
// separated by a semicolon. It relies on the attributes being sorted, so that
// colliding attribute keys are in subsequent iterations of the for loop.
attributes.forEach(
new BiConsumer<AttributeKey<?>, Object>() {
private boolean prefixWithComma = initialComma;
boolean initialAttribute = true;
String previousKey = "";
String previousValue = "";

@Override
public void accept(AttributeKey<?> key, Object value) {
try {
if (prefixWithComma) {
writer.write(',');
String sanitizedKey = NameSanitizer.INSTANCE.apply(key.getKey());
int compare = sanitizedKey.compareTo(previousKey);
if (compare == 0) {
// This key collides with the previous one. Append the value
// to the previous value instead of writing the key again.
writer.write(';');
} else {
prefixWithComma = true;
if (compare < 0) {
THROTTLING_LOGGER.log(
Level.WARNING,
"Dropping out-of-order attribute "
+ sanitizedKey
+ "="
+ value
+ ", which occurred after "
+ previousKey
+ ". This can occur when an alternative Attribute implementation is used.");
}
if (!initialAttribute) {
writer.write('"');
}
if (initialComma || !initialAttribute) {
writer.write(',');
}
writer.write(sanitizedKey);
writer.write("=\"");
}
writer.write(NameSanitizer.INSTANCE.apply(key.getKey()));
writer.write("=\"");
writeEscapedLabelValue(writer, value.toString());
writer.write('"');
String stringValue = value.toString();
writeEscapedLabelValue(writer, stringValue);
previousKey = sanitizedKey;
previousValue = stringValue;
initialAttribute = false;
} catch (IOException e) {
throw new UncheckedIOException(e);
}
}
});
if (!attributes.isEmpty()) {
writer.write('"');
}
} catch (UncheckedIOException e) {
throw e.getCause();
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@
import static io.opentelemetry.exporter.prometheus.TestConstants.DELTA_HISTOGRAM;
import static io.opentelemetry.exporter.prometheus.TestConstants.DELTA_LONG_SUM;
import static io.opentelemetry.exporter.prometheus.TestConstants.DOUBLE_GAUGE;
import static io.opentelemetry.exporter.prometheus.TestConstants.DOUBLE_GAUGE_COLLIDING_ATTRIBUTES;
import static io.opentelemetry.exporter.prometheus.TestConstants.DOUBLE_GAUGE_MULTIPLE_ATTRIBUTES;
import static io.opentelemetry.exporter.prometheus.TestConstants.DOUBLE_GAUGE_NO_ATTRIBUTES;
import static io.opentelemetry.exporter.prometheus.TestConstants.LONG_GAUGE;
Expand All @@ -22,15 +23,36 @@
import static io.opentelemetry.exporter.prometheus.TestConstants.SUMMARY;
import static org.assertj.core.api.Assertions.assertThat;

import io.github.netmikey.logunit.api.LogCapturer;
import io.opentelemetry.api.common.AttributeKey;
import io.opentelemetry.api.common.Attributes;
import io.opentelemetry.api.common.AttributesBuilder;
import io.opentelemetry.internal.testing.slf4j.SuppressLogger;
import io.opentelemetry.sdk.common.InstrumentationScopeInfo;
import io.opentelemetry.sdk.metrics.data.AggregationTemporality;
import io.opentelemetry.sdk.metrics.data.MetricData;
import io.opentelemetry.sdk.metrics.internal.data.ImmutableDoublePointData;
import io.opentelemetry.sdk.metrics.internal.data.ImmutableMetricData;
import io.opentelemetry.sdk.metrics.internal.data.ImmutableSumData;
import io.opentelemetry.sdk.resources.Resource;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.UncheckedIOException;
import java.util.Arrays;
import java.util.Collections;
import java.util.LinkedHashMap;
import java.util.Map;
import java.util.function.BiConsumer;
import org.jetbrains.annotations.Nullable;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.RegisterExtension;

class SerializerTest {

@RegisterExtension
private final LogCapturer logCapturer =
LogCapturer.create().captureForLogger(Serializer.class.getName());

@Test
void prometheus004() {
// Same output as prometheus client library except for these changes which are compatible with
Expand All @@ -53,7 +75,8 @@ void prometheus004() {
CUMULATIVE_HISTOGRAM_NO_ATTRIBUTES,
CUMULATIVE_HISTOGRAM_SINGLE_ATTRIBUTE,
DOUBLE_GAUGE_NO_ATTRIBUTES,
DOUBLE_GAUGE_MULTIPLE_ATTRIBUTES))
DOUBLE_GAUGE_MULTIPLE_ATTRIBUTES,
DOUBLE_GAUGE_COLLIDING_ATTRIBUTES))
.isEqualTo(
"# TYPE target info\n"
+ "# HELP target Target metadata\n"
Expand Down Expand Up @@ -103,7 +126,11 @@ void prometheus004() {
+ "double_gauge_no_attributes_seconds{otel_scope_name=\"full\",otel_scope_version=\"version\"} 7.0 1633950672000\n"
+ "# TYPE double_gauge_multiple_attributes_seconds gauge\n"
+ "# HELP double_gauge_multiple_attributes_seconds unused\n"
+ "double_gauge_multiple_attributes_seconds{otel_scope_name=\"full\",otel_scope_version=\"version\",animal=\"bear\",type=\"dgma\"} 8.0 1633950672000\n");
+ "double_gauge_multiple_attributes_seconds{otel_scope_name=\"full\",otel_scope_version=\"version\",animal=\"bear\",type=\"dgma\"} 8.0 1633950672000\n"
+ "# TYPE double_gauge_colliding_attributes_seconds gauge\n"
+ "# HELP double_gauge_colliding_attributes_seconds unused\n"
+ "double_gauge_colliding_attributes_seconds{otel_scope_name=\"full\",otel_scope_version=\"version\",foo_bar=\"a;b\",type=\"dgma\"} 8.0 1633950672000\n");
assertThat(logCapturer.size()).isZero();
}

@Test
Expand All @@ -124,7 +151,8 @@ void openMetrics() {
CUMULATIVE_HISTOGRAM_NO_ATTRIBUTES,
CUMULATIVE_HISTOGRAM_SINGLE_ATTRIBUTE,
DOUBLE_GAUGE_NO_ATTRIBUTES,
DOUBLE_GAUGE_MULTIPLE_ATTRIBUTES))
DOUBLE_GAUGE_MULTIPLE_ATTRIBUTES,
DOUBLE_GAUGE_COLLIDING_ATTRIBUTES))
.isEqualTo(
"# TYPE target info\n"
+ "# HELP target Target metadata\n"
Expand Down Expand Up @@ -175,7 +203,52 @@ void openMetrics() {
+ "# TYPE double_gauge_multiple_attributes_seconds gauge\n"
+ "# HELP double_gauge_multiple_attributes_seconds unused\n"
+ "double_gauge_multiple_attributes_seconds{otel_scope_name=\"full\",otel_scope_version=\"version\",animal=\"bear\",type=\"dgma\"} 8.0 1633950672.000\n"
+ "# TYPE double_gauge_colliding_attributes_seconds gauge\n"
+ "# HELP double_gauge_colliding_attributes_seconds unused\n"
+ "double_gauge_colliding_attributes_seconds{otel_scope_name=\"full\",otel_scope_version=\"version\",foo_bar=\"a;b\",type=\"dgma\"} 8.0 1633950672.000\n"
+ "# EOF\n");
assertThat(logCapturer.size()).isZero();
}

@Test
@SuppressLogger(Serializer.class)
void outOfOrderedAttributes() {
// Alternative attributes implementation which sorts entries by the order they were added rather
// than lexicographically
// all attributes are retained, we log a warning, and b_key and b.key are not be merged
LinkedHashMap<AttributeKey<?>, Object> attributesMap = new LinkedHashMap<>();
attributesMap.put(AttributeKey.stringKey("b_key"), "val1");
attributesMap.put(AttributeKey.stringKey("a_key"), "val2");
attributesMap.put(AttributeKey.stringKey("b.key"), "val3");
Attributes attributes = new MapAttributes(attributesMap);

MetricData metricData =
ImmutableMetricData.createDoubleSum(
Resource.builder().put("kr", "vr").build(),
InstrumentationScopeInfo.builder("scope").setVersion("1.0.0").build(),
"sum",
"description",
"s",
ImmutableSumData.create(
/* isMonotonic= */ true,
AggregationTemporality.CUMULATIVE,
Collections.singletonList(
ImmutableDoublePointData.create(
1633947011000000000L, 1633950672000000000L, attributes, 5))));

assertThat(serialize004(metricData))
.isEqualTo(
"# TYPE target info\n"
+ "# HELP target Target metadata\n"
+ "target_info{kr=\"vr\"} 1\n"
+ "# TYPE otel_scope_info info\n"
+ "# HELP otel_scope_info Scope metadata\n"
+ "otel_scope_info{otel_scope_name=\"scope\",otel_scope_version=\"1.0.0\"} 1\n"
+ "# TYPE sum_seconds_total counter\n"
+ "# HELP sum_seconds_total description\n"
+ "sum_seconds_total{otel_scope_name=\"scope\",otel_scope_version=\"1.0.0\",b_key=\"val1\",a_key=\"val2\",b_key=\"val3\"} 5.0 1633950672000\n");
logCapturer.assertContains(
"Dropping out-of-order attribute a_key=val2, which occurred after b_key. This can occur when an alternative Attribute implementation is used.");
}

private static String serialize004(MetricData... metrics) {
Expand All @@ -197,4 +270,46 @@ private static String serializeOpenMetrics(MetricData... metrics) {
throw new UncheckedIOException(e);
}
}

@SuppressWarnings("unchecked")
private static class MapAttributes implements Attributes {

private final LinkedHashMap<AttributeKey<?>, Object> map;

@SuppressWarnings("NonApiType")
private MapAttributes(LinkedHashMap<AttributeKey<?>, Object> map) {
this.map = map;
}

@Nullable
@Override
public <T> T get(AttributeKey<T> key) {
return (T) map.get(key);
}

@Override
public void forEach(BiConsumer<? super AttributeKey<?>, ? super Object> consumer) {
map.forEach(consumer);
}

@Override
public int size() {
return map.size();
}

@Override
public boolean isEmpty() {
return map.isEmpty();
}

@Override
public Map<AttributeKey<?>, Object> asMap() {
return map;
}

@Override
public AttributesBuilder toBuilder() {
throw new UnsupportedOperationException("not supported");
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -357,4 +357,22 @@ private TestConstants() {
1633950672000000000L,
Attributes.of(TYPE, "dgma", stringKey("animal"), "bear"),
8))));
static final MetricData DOUBLE_GAUGE_COLLIDING_ATTRIBUTES =
ImmutableMetricData.createDoubleGauge(
Resource.create(Attributes.of(stringKey("kr"), "vr")),
InstrumentationScopeInfo.builder("full")
.setVersion("version")
.setAttributes(Attributes.of(stringKey("ks"), "vs"))
.build(),
"double.gauge.colliding.attributes",
"unused",
"s",
ImmutableGaugeData.create(
Collections.singletonList(
ImmutableDoublePointData.create(
1633947011000000000L,
1633950672000000000L,
Attributes.of(
TYPE, "dgma", stringKey("foo.bar"), "a", stringKey("foo_bar"), "b"),
8))));
}

0 comments on commit 9a93155

Please sign in to comment.