Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,9 @@
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Locale;
import java.util.Map;
Expand Down Expand Up @@ -471,11 +473,8 @@ private Labels convertAttributes(
? filterAllowedResourceAttributeKeys(resource)
: Collections.emptyList();

Map<String, String> labelNameToValue = new HashMap<>();
attributes.forEach(
(key, value) ->
labelNameToValue.put(
convertLabelName(key.getKey()), toLabelValue(key.getType(), value)));
Map<String, String> labelNameToValue = new LinkedHashMap<>();
normalizeAndMergeAttributeLabels(labelNameToValue, attributes);
Copy link
Copy Markdown
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This is just looking at attributes, but I think the collisions also have to consider collisions potentially caused by normalization of other sources of attributes: resource, scope, additionalAttributes


for (int i = 0; i < additionalAttributes.length; i += 2) {
labelNameToValue.putIfAbsent(
Expand All @@ -501,13 +500,18 @@ private Labels convertAttributes(

if (resource != null) {
Attributes resourceAttributes = resource.getAttributes();
Map<String, List<OriginalLabelKeyValue>> resourceLabelNameToValues = new LinkedHashMap<>();
for (AttributeKey attributeKey : allowedAttributeKeys) {
Object attributeValue = resourceAttributes.get(attributeKey);
if (attributeValue != null) {
labelNameToValue.putIfAbsent(
convertLabelName(attributeKey.getKey()), attributeValue.toString());
addNormalizedLabelValue(
resourceLabelNameToValues,
convertLabelName(attributeKey.getKey()),
attributeKey.getKey(),
attributeValue.toString());
}
}
joinCollidingLabelValues(resourceLabelNameToValues).forEach(labelNameToValue::putIfAbsent);
}

String[] names = new String[labelNameToValue.size()];
Expand All @@ -523,6 +527,50 @@ private Labels convertAttributes(
return Labels.of(names, values);
}

/**
* Normalize attribute keys to Prometheus label names and join colliding values into a single
* label.
*/
private static void normalizeAndMergeAttributeLabels(
Map<String, String> labelNameToValue, Attributes attributes) {
Map<String, List<OriginalLabelKeyValue>> labelNameToValues = new LinkedHashMap<>();
attributes.forEach(
(key, value) ->
addNormalizedLabelValue(
labelNameToValues,
convertLabelName(key.getKey()),
key.getKey(),
toLabelValue(key.getType(), value)));
labelNameToValue.putAll(joinCollidingLabelValues(labelNameToValues));
}

/** Collect the original attribute key and rendered value for one normalized label name. */
private static void addNormalizedLabelValue(
Map<String, List<OriginalLabelKeyValue>> labelNameToValues,
String labelName,
String originalKey,
String value) {
labelNameToValues
.computeIfAbsent(labelName, ignored -> new ArrayList<>())
.add(new OriginalLabelKeyValue(originalKey, value));
}

/** Join values for each normalized label name in lexicographic order of the original keys. */
private static Map<String, String> joinCollidingLabelValues(
Map<String, List<OriginalLabelKeyValue>> labelNameToValues) {
Map<String, String> joinedLabels = new LinkedHashMap<>();
labelNameToValues.forEach(
(labelName, labelValues) -> {
labelValues.sort(Comparator.comparing(OriginalLabelKeyValue::originalKey));
joinedLabels.put(
labelName,
labelValues.stream()
.map(OriginalLabelKeyValue::value)
.collect(Collectors.joining(";")));
});
return joinedLabels;
}

private List<AttributeKey<?>> filterAllowedResourceAttributeKeys(@Nullable Resource resource) {
requireNonNull(
allowedResourceAttributesFilter,
Expand Down Expand Up @@ -739,4 +787,23 @@ public static String toJsonValidStr(String str) {
sb.append('"');
return sb.toString();
}

/** Stores the original attribute key and rendered value for one normalized label collision. */
private static final class OriginalLabelKeyValue {
private final String originalKey;
private final String value;

private OriginalLabelKeyValue(String originalKey, String value) {
this.originalKey = originalKey;
this.value = value;
}

private String originalKey() {
return originalKey;
}

private String value() {
return value;
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,9 @@
import static org.assertj.core.api.Assertions.assertThat;
import static org.assertj.core.api.Assertions.assertThatCode;

import io.opentelemetry.api.common.AttributeKey;
import io.opentelemetry.api.common.Attributes;
import io.opentelemetry.api.common.AttributesBuilder;
import io.opentelemetry.api.common.KeyValue;
import io.opentelemetry.api.common.Value;
import io.opentelemetry.sdk.common.InstrumentationScopeInfo;
Expand Down Expand Up @@ -48,9 +50,12 @@
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.function.BiConsumer;
import java.util.function.Predicate;
import java.util.regex.Pattern;
import java.util.stream.Collectors;
Expand Down Expand Up @@ -285,9 +290,66 @@ private static Stream<Arguments> resourceAttributesAdditionArgs() {
"my_metric_units",
"cluster=\"mycluster\",otel_scope_foo=\"bar\",otel_scope_name=\"scope\",otel_scope_schema_url=\"schemaUrl\",otel_scope_version=\"version\""));

arguments.add(
Arguments.of(
createSampleMetricData(
"my.metric",
"units",
MetricDataType.LONG_SUM,
Attributes.empty(),
Comment thread
ADITYA-CODE-SOURCE marked this conversation as resolved.
Resource.create(
createMapAttributes("foo_bar", "b", "foo-bar", "c", "foo.bar", "a"))),
Copy link
Copy Markdown
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I eventually put together that createMapAttributes intentionally creates a non-default Attributes implementation to confirm the sorting logic.

  • Let's add a comment explaining this because its not immediately apparent why we're going out of the way to add a new attributes implementation
  • Is it possible / reasonable to avoid the extra allocation / overhead by detecting when the Attributes implementation is default ArrayBackedAttributes?

Copy link
Copy Markdown
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Thinking about this more, I don't think we should try to check if Attributes is instance of ArrayBackedAttributes. But I am still worried about the excessive new allocation here which occurs regardless of whether a collision occurs. To mitigate, let's adjust to a strategy which assumes that no collision occurs, but if one does occur, triggers the code path with all this additional new machinery.

/* allowedResourceAttributesFilter= */ Predicates.startsWith("foo"),
"my_metric_units",
"foo_bar=\"c;a;b\",otel_scope_foo=\"bar\",otel_scope_name=\"scope\",otel_scope_schema_url=\"schemaUrl\",otel_scope_version=\"version\""));

arguments.add(
Arguments.of(
createSampleMetricData(
"my.metric",
"units",
MetricDataType.LONG_SUM,
createMapAttributes("foo-bar", "metric"),
Resource.create(createMapAttributes("foo.bar", "resource"))),
/* allowedResourceAttributesFilter= */ Predicates.startsWith("foo"),
"my_metric_units",
"foo_bar=\"metric\",otel_scope_foo=\"bar\",otel_scope_name=\"scope\",otel_scope_schema_url=\"schemaUrl\",otel_scope_version=\"version\""));

return arguments.stream();
}

@ParameterizedTest
@MethodSource("metricAttributeCollisionArgs")
void metricAttributeCollisionsAreMergedAndSorted(
Attributes attributes, String expectedLabelName, String expectedLabelValue) {
MetricData metricData =
createSampleMetricData("sample", "1", MetricDataType.LONG_SUM, attributes, null);

Labels labels = extractCounterLabels(metricData);
assertThat(extractLabelNames(labels))
.containsExactly(
expectedLabelName,
"otel_scope_foo",
"otel_scope_name",
"otel_scope_schema_url",
"otel_scope_version");
assertThat(labels.getName(0)).isEqualTo(expectedLabelName);
assertThat(labels.get(expectedLabelName)).isEqualTo(expectedLabelValue);
}

private static Stream<Arguments> metricAttributeCollisionArgs() {
return Stream.of(
Arguments.of(
createMapAttributes("foo_bar", "b", "foo-bar", "c", "foo.bar", "a"),
"foo_bar",
"c;a;b"),
Arguments.of(createMapAttributes("foo.bar", "a;b", "foo-bar", "c"), "foo_bar", "c;a;b"),
Arguments.of(
createMapAttributes("a.b", "1", "a-b", "2", "a_b", "3", "a/b", "4", "a@b", "5"),
"a_b",
"2;1;4;5;3"));
}

@Test
void prometheusNameCollisionTest_Issue6277() {
// NOTE: Metrics with the same resolved prometheus name should merge. However,
Expand Down Expand Up @@ -555,4 +617,70 @@ void validateCacheIsBounded() {
// it never saw those resources before.
assertThat(predicateCalledCount.get()).isEqualTo(2);
}

private static Attributes createMapAttributes(String... keyValues) {
Map<AttributeKey<?>, Object> map = new LinkedHashMap<>();
for (int i = 0; i < keyValues.length; i += 2) {
map.put(stringKey(keyValues[i]), keyValues[i + 1]);
}
return new MapAttributes(map);
}

private Labels extractCounterLabels(MetricData metricData) {
MetricSnapshots snapshots = converter.convert(Collections.singletonList(metricData));

Optional<MetricSnapshot> metricSnapshot =
snapshots.stream().filter(snapshot -> snapshot instanceof CounterSnapshot).findFirst();
assertThat(metricSnapshot).isPresent();
return metricSnapshot.get().getDataPoints().get(0).getLabels();
}

private static List<String> extractLabelNames(Labels labels) {
List<String> names = new ArrayList<>();
for (int i = 0; i < labels.size(); i++) {
names.add(labels.getName(i));
}
return names;
}

@SuppressWarnings("unchecked")
private static final class MapAttributes implements Attributes {

private final LinkedHashMap<AttributeKey<?>, Object> map;

private MapAttributes(Map<AttributeKey<?>, Object> map) {
this.map = new LinkedHashMap<>(map);
}

@Nullable
@Override
public <T> T get(AttributeKey<T> key) {
return (T) map.get(key);
}

@Override
public void forEach(BiConsumer<? super AttributeKey<?>, ? super Object> consumer) {
map.forEach(consumer);
}

@Override
public int size() {
return map.size();
}

@Override
public boolean isEmpty() {
return map.isEmpty();
}

@Override
public Map<AttributeKey<?>, Object> asMap() {
return map;
}

@Override
public AttributesBuilder toBuilder() {
throw new UnsupportedOperationException("not supported");
}
}
}
Loading