From 110ba21092827ecce1e76d734343eb2655dbc215 Mon Sep 17 00:00:00 2001 From: Cesar Munoz <56847527+LikeTheSalad@users.noreply.github.com> Date: Mon, 21 Aug 2023 15:10:40 +0200 Subject: [PATCH 01/16] Replacing proto by wire compilation and serialization processes --- disk-buffering/build.gradle.kts | 10 +++++- .../mapping/common/ByteStringMapper.java | 6 ++-- .../serializers/LogRecordDataSerializer.java | 10 +++--- .../serializers/MetricDataSerializer.java | 10 +++--- .../serializers/SpanDataSerializer.java | 10 +++--- .../reader/DelimitedProtoStreamReader.java | 33 +++++++++++++++++-- 6 files changed, 61 insertions(+), 18 deletions(-) diff --git a/disk-buffering/build.gradle.kts b/disk-buffering/build.gradle.kts index 0ae73bc30..c250be2bc 100644 --- a/disk-buffering/build.gradle.kts +++ b/disk-buffering/build.gradle.kts @@ -5,6 +5,7 @@ plugins { id("otel.publish-conventions") id("me.champeau.jmh") version "0.7.1" id("ru.vyarus.animalsniffer") version "1.7.1" + id("com.squareup.wire") version "4.8.1" } description = "Exporter implementations that store signals on disk" @@ -18,7 +19,6 @@ java { dependencies { api("io.opentelemetry:opentelemetry-sdk") implementation("io.opentelemetry:opentelemetry-exporter-otlp-common") - implementation("io.opentelemetry.proto:opentelemetry-proto:0.20.0-alpha") compileOnly("com.google.auto.value:auto-value-annotations") annotationProcessor("com.google.auto.value:auto-value") signature("com.toasttab.android:gummy-bears-api-24:0.5.1@signature") @@ -47,3 +47,11 @@ jmh { timeOnIteration.set("5s") timeUnit.set("ms") } + +wire { + java {} + + sourcePath { + srcJar("io.opentelemetry.proto:opentelemetry-proto:0.20.0-alpha") + } +} diff --git a/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/internal/serialization/mapping/common/ByteStringMapper.java b/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/internal/serialization/mapping/common/ByteStringMapper.java index a04fbe0b7..ca8366e8a 100644 --- a/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/internal/serialization/mapping/common/ByteStringMapper.java +++ b/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/internal/serialization/mapping/common/ByteStringMapper.java @@ -5,7 +5,7 @@ package io.opentelemetry.contrib.disk.buffering.internal.serialization.mapping.common; -import com.google.protobuf.ByteString; +import okio.ByteString; public final class ByteStringMapper { @@ -16,10 +16,10 @@ public static ByteStringMapper getInstance() { } public ByteString stringToProto(String source) { - return ByteString.copyFromUtf8(source); + return ByteString.encodeUtf8(source); } public String protoToString(ByteString source) { - return source.toStringUtf8(); + return source.utf8(); } } diff --git a/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/internal/serialization/serializers/LogRecordDataSerializer.java b/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/internal/serialization/serializers/LogRecordDataSerializer.java index 6d0451ef7..c6dbcf786 100644 --- a/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/internal/serialization/serializers/LogRecordDataSerializer.java +++ b/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/internal/serialization/serializers/LogRecordDataSerializer.java @@ -5,7 +5,7 @@ package io.opentelemetry.contrib.disk.buffering.internal.serialization.serializers; -import com.google.protobuf.InvalidProtocolBufferException; +import com.squareup.wire.ProtoAdapter; import io.opentelemetry.contrib.disk.buffering.internal.serialization.mapping.logs.ProtoLogsDataMapper; import io.opentelemetry.proto.logs.v1.LogsData; import io.opentelemetry.sdk.logs.data.LogRecordData; @@ -27,7 +27,9 @@ static LogRecordDataSerializer getInstance() { public byte[] serialize(Collection logRecordData) { LogsData proto = ProtoLogsDataMapper.getInstance().toProto(logRecordData); try (ByteArrayOutputStream out = new ByteArrayOutputStream()) { - proto.writeDelimitedTo(out); + int size = LogsData.ADAPTER.encodedSize(proto); + ProtoAdapter.UINT32.encode(out, size); + proto.encode(out); return out.toByteArray(); } catch (IOException e) { throw new IllegalStateException(e); @@ -37,8 +39,8 @@ public byte[] serialize(Collection logRecordData) { @Override public List deserialize(byte[] source) { try { - return ProtoLogsDataMapper.getInstance().fromProto(LogsData.parseFrom(source)); - } catch (InvalidProtocolBufferException e) { + return ProtoLogsDataMapper.getInstance().fromProto(LogsData.ADAPTER.decode(source)); + } catch (IOException e) { throw new IllegalArgumentException(e); } } diff --git a/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/internal/serialization/serializers/MetricDataSerializer.java b/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/internal/serialization/serializers/MetricDataSerializer.java index 9e5722fbd..e45c72a0f 100644 --- a/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/internal/serialization/serializers/MetricDataSerializer.java +++ b/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/internal/serialization/serializers/MetricDataSerializer.java @@ -5,7 +5,7 @@ package io.opentelemetry.contrib.disk.buffering.internal.serialization.serializers; -import com.google.protobuf.InvalidProtocolBufferException; +import com.squareup.wire.ProtoAdapter; import io.opentelemetry.contrib.disk.buffering.internal.serialization.mapping.metrics.ProtoMetricsDataMapper; import io.opentelemetry.proto.metrics.v1.MetricsData; import io.opentelemetry.sdk.metrics.data.MetricData; @@ -27,7 +27,9 @@ static MetricDataSerializer getInstance() { public byte[] serialize(Collection metricData) { MetricsData proto = ProtoMetricsDataMapper.getInstance().toProto(metricData); try (ByteArrayOutputStream out = new ByteArrayOutputStream()) { - proto.writeDelimitedTo(out); + int size = MetricsData.ADAPTER.encodedSize(proto); + ProtoAdapter.UINT32.encode(out, size); + proto.encode(out); return out.toByteArray(); } catch (IOException e) { throw new IllegalStateException(e); @@ -37,8 +39,8 @@ public byte[] serialize(Collection metricData) { @Override public List deserialize(byte[] source) { try { - return ProtoMetricsDataMapper.getInstance().fromProto(MetricsData.parseFrom(source)); - } catch (InvalidProtocolBufferException e) { + return ProtoMetricsDataMapper.getInstance().fromProto(MetricsData.ADAPTER.decode(source)); + } catch (IOException e) { throw new IllegalArgumentException(e); } } diff --git a/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/internal/serialization/serializers/SpanDataSerializer.java b/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/internal/serialization/serializers/SpanDataSerializer.java index 1dc02034b..36b293d52 100644 --- a/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/internal/serialization/serializers/SpanDataSerializer.java +++ b/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/internal/serialization/serializers/SpanDataSerializer.java @@ -5,7 +5,7 @@ package io.opentelemetry.contrib.disk.buffering.internal.serialization.serializers; -import com.google.protobuf.InvalidProtocolBufferException; +import com.squareup.wire.ProtoAdapter; import io.opentelemetry.contrib.disk.buffering.internal.serialization.mapping.spans.ProtoSpansDataMapper; import io.opentelemetry.proto.trace.v1.TracesData; import io.opentelemetry.sdk.trace.data.SpanData; @@ -27,7 +27,9 @@ static SpanDataSerializer getInstance() { public byte[] serialize(Collection spanData) { TracesData proto = ProtoSpansDataMapper.getInstance().toProto(spanData); try (ByteArrayOutputStream out = new ByteArrayOutputStream()) { - proto.writeDelimitedTo(out); + int size = TracesData.ADAPTER.encodedSize(proto); + ProtoAdapter.UINT32.encode(out, size); + proto.encode(out); return out.toByteArray(); } catch (IOException e) { throw new IllegalStateException(e); @@ -37,8 +39,8 @@ public byte[] serialize(Collection spanData) { @Override public List deserialize(byte[] source) { try { - return ProtoSpansDataMapper.getInstance().fromProto(TracesData.parseFrom(source)); - } catch (InvalidProtocolBufferException e) { + return ProtoSpansDataMapper.getInstance().fromProto(TracesData.ADAPTER.decode(source)); + } catch (IOException e) { throw new IllegalArgumentException(e); } } diff --git a/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/internal/storage/files/reader/DelimitedProtoStreamReader.java b/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/internal/storage/files/reader/DelimitedProtoStreamReader.java index ccdb0f1ed..cb9281c66 100644 --- a/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/internal/storage/files/reader/DelimitedProtoStreamReader.java +++ b/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/internal/storage/files/reader/DelimitedProtoStreamReader.java @@ -5,7 +5,6 @@ package io.opentelemetry.contrib.disk.buffering.internal.storage.files.reader; -import com.google.protobuf.CodedInputStream; import io.opentelemetry.contrib.disk.buffering.internal.storage.files.utils.CountingInputStream; import java.io.IOException; import java.io.InputStream; @@ -40,12 +39,42 @@ private int getNextItemSize() { if (firstByte == -1) { return 0; } - return CodedInputStream.readRawVarint32(firstByte, inputStream); + return readRawVarint32(firstByte); } catch (IOException e) { return 0; } } + private int readRawVarint32(final int firstByte) throws IOException { + if ((firstByte & 0x80) == 0) { + return firstByte; + } + + int result = firstByte & 0x7f; + int offset = 7; + for (; offset < 32; offset += 7) { + final int b = inputStream.read(); + if (b == -1) { + throw new IllegalStateException(); + } + result |= (b & 0x7f) << offset; + if ((b & 0x80) == 0) { + return result; + } + } + // Keep reading up to 64 bits. + for (; offset < 64; offset += 7) { + final int b = inputStream.read(); + if (b == -1) { + throw new IllegalStateException(); + } + if ((b & 0x80) == 0) { + return result; + } + } + throw new IllegalStateException(); + } + public static class Factory implements StreamReader.Factory { private static final Factory INSTANCE = new DelimitedProtoStreamReader.Factory(); From f8eac65fc977bb263fcdf01b4a013d438c47f304 Mon Sep 17 00:00:00 2001 From: Cesar Munoz <56847527+LikeTheSalad@users.noreply.github.com> Date: Mon, 21 Aug 2023 16:07:25 +0200 Subject: [PATCH 02/16] Adapting MetricDataMapper to Wire's generated code --- .../mapping/metrics/MetricDataMapper.java | 450 +++++++++--------- 1 file changed, 222 insertions(+), 228 deletions(-) diff --git a/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/internal/serialization/mapping/metrics/MetricDataMapper.java b/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/internal/serialization/mapping/metrics/MetricDataMapper.java index 512cc71cb..12df0d3e7 100644 --- a/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/internal/serialization/mapping/metrics/MetricDataMapper.java +++ b/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/internal/serialization/mapping/metrics/MetricDataMapper.java @@ -72,11 +72,11 @@ public static MetricDataMapper getInstance() { } public Metric mapToProto(MetricData source) { - Metric.Builder metric = Metric.newBuilder(); + Metric.Builder metric = new Metric.Builder(); - metric.setName(source.getName()); - metric.setDescription(source.getDescription()); - metric.setUnit(source.getUnit()); + metric.name(source.getName()); + metric.description(source.getDescription()); + metric.unit(source.getUnit()); addDataToProto(source, metric); @@ -85,71 +85,70 @@ public Metric mapToProto(MetricData source) { @SuppressWarnings("unchecked") public MetricData mapToSdk(Metric source, Resource resource, InstrumentationScopeInfo scope) { - switch (source.getDataCase()) { - case GAUGE: - DataWithType gaugeDataWithType = mapGaugeToSdk(source.getGauge()); - if (gaugeDataWithType.type == MetricDataType.DOUBLE_GAUGE) { - return ImmutableMetricData.createDoubleGauge( - resource, - scope, - source.getName(), - source.getDescription(), - source.getUnit(), - (GaugeData) gaugeDataWithType.data); - } else { - return ImmutableMetricData.createLongGauge( - resource, - scope, - source.getName(), - source.getDescription(), - source.getUnit(), - (GaugeData) gaugeDataWithType.data); - } - case SUM: - DataWithType sumDataWithType = mapSumToSdk(source.getSum()); - if (sumDataWithType.type == MetricDataType.DOUBLE_SUM) { - return ImmutableMetricData.createDoubleSum( - resource, - scope, - source.getName(), - source.getDescription(), - source.getUnit(), - (SumData) sumDataWithType.data); - } else { - return ImmutableMetricData.createLongSum( - resource, - scope, - source.getName(), - source.getDescription(), - source.getUnit(), - (SumData) sumDataWithType.data); - } - case SUMMARY: - return ImmutableMetricData.createDoubleSummary( + if (source.gauge != null) { + DataWithType gaugeDataWithType = mapGaugeToSdk(source.gauge); + if (gaugeDataWithType.type == MetricDataType.DOUBLE_GAUGE) { + return ImmutableMetricData.createDoubleGauge( resource, scope, - source.getName(), - source.getDescription(), - source.getUnit(), - mapSummaryToSdk(source.getSummary())); - case HISTOGRAM: - return ImmutableMetricData.createDoubleHistogram( + source.name, + source.description, + source.unit, + (GaugeData) gaugeDataWithType.data); + } else { + return ImmutableMetricData.createLongGauge( resource, scope, - source.getName(), - source.getDescription(), - source.getUnit(), - mapHistogramToSdk(source.getHistogram())); - case EXPONENTIAL_HISTOGRAM: - return ImmutableMetricData.createExponentialHistogram( + source.name, + source.description, + source.unit, + (GaugeData) gaugeDataWithType.data); + } + } else if (source.sum != null) { + DataWithType sumDataWithType = mapSumToSdk(source.sum); + if (sumDataWithType.type == MetricDataType.DOUBLE_SUM) { + return ImmutableMetricData.createDoubleSum( resource, scope, - source.getName(), - source.getDescription(), - source.getUnit(), - mapExponentialHistogramToSdk(source.getExponentialHistogram())); - default: - throw new UnsupportedOperationException(); + source.name, + source.description, + source.unit, + (SumData) sumDataWithType.data); + } else { + return ImmutableMetricData.createLongSum( + resource, + scope, + source.name, + source.description, + source.unit, + (SumData) sumDataWithType.data); + } + } else if (source.summary != null) { + return ImmutableMetricData.createDoubleSummary( + resource, + scope, + source.name, + source.description, + source.unit, + mapSummaryToSdk(source.summary)); + } else if (source.histogram != null) { + return ImmutableMetricData.createDoubleHistogram( + resource, + scope, + source.name, + source.description, + source.unit, + mapHistogramToSdk(source.histogram)); + } else if (source.exponential_histogram != null) { + return ImmutableMetricData.createExponentialHistogram( + resource, + scope, + source.name, + source.description, + source.unit, + mapExponentialHistogramToSdk(source.exponential_histogram)); + } else { + throw new UnsupportedOperationException(); } } @@ -157,36 +156,36 @@ public MetricData mapToSdk(Metric source, Resource resource, InstrumentationScop private static void addDataToProto(MetricData source, Metric.Builder target) { switch (source.getType()) { case LONG_GAUGE: - target.setGauge(mapLongGaugeToProto((GaugeData) source.getData())); + target.gauge(mapLongGaugeToProto((GaugeData) source.getData())); break; case DOUBLE_GAUGE: - target.setGauge(mapDoubleGaugeToProto((GaugeData) source.getData())); + target.gauge(mapDoubleGaugeToProto((GaugeData) source.getData())); break; case LONG_SUM: - target.setSum(mapLongSumToProto((SumData) source.getData())); + target.sum(mapLongSumToProto((SumData) source.getData())); break; case DOUBLE_SUM: - target.setSum(mapDoubleSumToProto((SumData) source.getData())); + target.sum(mapDoubleSumToProto((SumData) source.getData())); break; case SUMMARY: - target.setSummary(mapSummaryToProto((SummaryData) source.getData())); + target.summary(mapSummaryToProto((SummaryData) source.getData())); break; case HISTOGRAM: - target.setHistogram(mapHistogramToProto((HistogramData) source.getData())); + target.histogram(mapHistogramToProto((HistogramData) source.getData())); break; case EXPONENTIAL_HISTOGRAM: - target.setExponentialHistogram( + target.exponential_histogram( mapExponentialHistogramToProto((ExponentialHistogramData) source.getData())); break; } } private static DataWithType mapGaugeToSdk(Gauge gauge) { - if (gauge.getDataPointsCount() > 0) { - NumberDataPoint dataPoint = gauge.getDataPoints(0); - if (dataPoint.hasAsInt()) { + if (!gauge.data_points.isEmpty()) { + NumberDataPoint dataPoint = gauge.data_points.get(0); + if (dataPoint.as_int != null) { return new DataWithType(mapLongGaugeToSdk(gauge), MetricDataType.LONG_GAUGE); - } else if (dataPoint.hasAsDouble()) { + } else if (dataPoint.as_double != null) { return new DataWithType(mapDoubleGaugeToSdk(gauge), MetricDataType.DOUBLE_GAUGE); } } @@ -194,11 +193,11 @@ private static DataWithType mapGaugeToSdk(Gauge gauge) { } private static DataWithType mapSumToSdk(Sum sum) { - if (sum.getDataPointsCount() > 0) { - NumberDataPoint dataPoint = sum.getDataPoints(0); - if (dataPoint.hasAsInt()) { + if (!sum.data_points.isEmpty()) { + NumberDataPoint dataPoint = sum.data_points.get(0); + if (dataPoint.as_int != null) { return new DataWithType(mapLongSumToSdk(sum), MetricDataType.LONG_SUM); - } else if (dataPoint.hasAsDouble()) { + } else if (dataPoint.as_double != null) { return new DataWithType(mapDoubleSumToSdk(sum), MetricDataType.DOUBLE_SUM); } } @@ -206,11 +205,11 @@ private static DataWithType mapSumToSdk(Sum sum) { } private static Gauge mapLongGaugeToProto(GaugeData data) { - Gauge.Builder gauge = Gauge.newBuilder(); + Gauge.Builder gauge = new Gauge.Builder(); if (data.getPoints() != null) { for (LongPointData point : data.getPoints()) { - gauge.addDataPoints(longPointDataToNumberDataPoint(point)); + gauge.data_points.add(longPointDataToNumberDataPoint(point)); } } @@ -218,11 +217,11 @@ private static Gauge mapLongGaugeToProto(GaugeData data) { } private static Gauge mapDoubleGaugeToProto(GaugeData data) { - Gauge.Builder gauge = Gauge.newBuilder(); + Gauge.Builder gauge = new Gauge.Builder(); if (data.getPoints() != null) { for (DoublePointData point : data.getPoints()) { - gauge.addDataPoints(doublePointDataToNumberDataPoint(point)); + gauge.data_points.add(doublePointDataToNumberDataPoint(point)); } } @@ -230,41 +229,39 @@ private static Gauge mapDoubleGaugeToProto(GaugeData data) { } private static Sum mapLongSumToProto(SumData data) { - Sum.Builder sum = Sum.newBuilder(); + Sum.Builder sum = new Sum.Builder(); if (data.getPoints() != null) { for (LongPointData point : data.getPoints()) { - sum.addDataPoints(longPointDataToNumberDataPoint(point)); + sum.data_points.add(longPointDataToNumberDataPoint(point)); } } - sum.setIsMonotonic(data.isMonotonic()); - sum.setAggregationTemporality( - mapAggregationTemporalityToProto(data.getAggregationTemporality())); + sum.is_monotonic(data.isMonotonic()); + sum.aggregation_temporality(mapAggregationTemporalityToProto(data.getAggregationTemporality())); return sum.build(); } private static Sum mapDoubleSumToProto(SumData data) { - Sum.Builder sum = Sum.newBuilder(); + Sum.Builder sum = new Sum.Builder(); if (data.getPoints() != null) { for (DoublePointData point : data.getPoints()) { - sum.addDataPoints(doublePointDataToNumberDataPoint(point)); + sum.data_points.add(doublePointDataToNumberDataPoint(point)); } } - sum.setIsMonotonic(data.isMonotonic()); - sum.setAggregationTemporality( - mapAggregationTemporalityToProto(data.getAggregationTemporality())); + sum.is_monotonic(data.isMonotonic()); + sum.aggregation_temporality(mapAggregationTemporalityToProto(data.getAggregationTemporality())); return sum.build(); } private static Summary mapSummaryToProto(SummaryData data) { - Summary.Builder summary = Summary.newBuilder(); + Summary.Builder summary = new Summary.Builder(); if (data.getPoints() != null) { for (SummaryPointData point : data.getPoints()) { - summary.addDataPoints(summaryPointDataToSummaryDataPoint(point)); + summary.data_points.add(summaryPointDataToSummaryDataPoint(point)); } } @@ -272,14 +269,14 @@ private static Summary mapSummaryToProto(SummaryData data) { } private static Histogram mapHistogramToProto(HistogramData data) { - Histogram.Builder histogram = Histogram.newBuilder(); + Histogram.Builder histogram = new Histogram.Builder(); if (data.getPoints() != null) { for (HistogramPointData point : data.getPoints()) { - histogram.addDataPoints(histogramPointDataToHistogramDataPoint(point)); + histogram.data_points.add(histogramPointDataToHistogramDataPoint(point)); } } - histogram.setAggregationTemporality( + histogram.aggregation_temporality( mapAggregationTemporalityToProto(data.getAggregationTemporality())); return histogram.build(); @@ -287,29 +284,29 @@ private static Histogram mapHistogramToProto(HistogramData data) { private static ExponentialHistogram mapExponentialHistogramToProto( ExponentialHistogramData data) { - ExponentialHistogram.Builder exponentialHistogram = ExponentialHistogram.newBuilder(); + ExponentialHistogram.Builder exponentialHistogram = new ExponentialHistogram.Builder(); if (data.getPoints() != null) { for (ExponentialHistogramPointData point : data.getPoints()) { - exponentialHistogram.addDataPoints( + exponentialHistogram.data_points.add( exponentialHistogramPointDataToExponentialHistogramDataPoint(point)); } } - exponentialHistogram.setAggregationTemporality( + exponentialHistogram.aggregation_temporality( mapAggregationTemporalityToProto(data.getAggregationTemporality())); return exponentialHistogram.build(); } private static NumberDataPoint longPointDataToNumberDataPoint(LongPointData source) { - NumberDataPoint.Builder numberDataPoint = NumberDataPoint.newBuilder(); + NumberDataPoint.Builder numberDataPoint = new NumberDataPoint.Builder(); - numberDataPoint.setStartTimeUnixNano(source.getStartEpochNanos()); - numberDataPoint.setTimeUnixNano(source.getEpochNanos()); - numberDataPoint.setAsInt(source.getValue()); + numberDataPoint.start_time_unix_nano(source.getStartEpochNanos()); + numberDataPoint.time_unix_nano(source.getEpochNanos()); + numberDataPoint.as_int(source.getValue()); if (source.getExemplars() != null) { for (LongExemplarData exemplar : source.getExemplars()) { - numberDataPoint.addExemplars(longExemplarDataToExemplar(exemplar)); + numberDataPoint.exemplars.add(longExemplarDataToExemplar(exemplar)); } } @@ -320,18 +317,18 @@ private static NumberDataPoint longPointDataToNumberDataPoint(LongPointData sour private static void addAttributesToNumberDataPoint( PointData source, NumberDataPoint.Builder target) { - target.addAllAttributes(attributesToProto(source.getAttributes())); + target.attributes.addAll(attributesToProto(source.getAttributes())); } private static NumberDataPoint doublePointDataToNumberDataPoint(DoublePointData source) { - NumberDataPoint.Builder numberDataPoint = NumberDataPoint.newBuilder(); + NumberDataPoint.Builder numberDataPoint = new NumberDataPoint.Builder(); - numberDataPoint.setStartTimeUnixNano(source.getStartEpochNanos()); - numberDataPoint.setTimeUnixNano(source.getEpochNanos()); - numberDataPoint.setAsDouble(source.getValue()); + numberDataPoint.start_time_unix_nano(source.getStartEpochNanos()); + numberDataPoint.time_unix_nano(source.getEpochNanos()); + numberDataPoint.as_double(source.getValue()); if (source.getExemplars() != null) { for (DoubleExemplarData exemplar : source.getExemplars()) { - numberDataPoint.addExemplars(doubleExemplarDataToExemplar(exemplar)); + numberDataPoint.exemplars.add(doubleExemplarDataToExemplar(exemplar)); } } @@ -342,17 +339,17 @@ private static NumberDataPoint doublePointDataToNumberDataPoint(DoublePointData private static SummaryDataPoint summaryPointDataToSummaryDataPoint( SummaryPointData summaryPointData) { - SummaryDataPoint.Builder summaryDataPoint = SummaryDataPoint.newBuilder(); + SummaryDataPoint.Builder summaryDataPoint = new SummaryDataPoint.Builder(); - summaryDataPoint.setStartTimeUnixNano(summaryPointData.getStartEpochNanos()); - summaryDataPoint.setTimeUnixNano(summaryPointData.getEpochNanos()); + summaryDataPoint.start_time_unix_nano(summaryPointData.getStartEpochNanos()); + summaryDataPoint.time_unix_nano(summaryPointData.getEpochNanos()); if (summaryPointData.getValues() != null) { for (ValueAtQuantile value : summaryPointData.getValues()) { - summaryDataPoint.addQuantileValues(valueAtQuantileToValueAtQuantile(value)); + summaryDataPoint.quantile_values.add(valueAtQuantileToValueAtQuantile(value)); } } - summaryDataPoint.setCount(summaryPointData.getCount()); - summaryDataPoint.setSum(summaryPointData.getSum()); + summaryDataPoint.count(summaryPointData.getCount()); + summaryDataPoint.sum(summaryPointData.getSum()); addAttributesToSummaryDataPoint(summaryPointData, summaryDataPoint); @@ -361,37 +358,33 @@ private static SummaryDataPoint summaryPointDataToSummaryDataPoint( private static void addAttributesToSummaryDataPoint( PointData source, SummaryDataPoint.Builder target) { - target.addAllAttributes(attributesToProto(source.getAttributes())); + target.attributes.addAll(attributesToProto(source.getAttributes())); } private static HistogramDataPoint histogramPointDataToHistogramDataPoint( HistogramPointData histogramPointData) { - HistogramDataPoint.Builder histogramDataPoint = HistogramDataPoint.newBuilder(); + HistogramDataPoint.Builder histogramDataPoint = new HistogramDataPoint.Builder(); - histogramDataPoint.setStartTimeUnixNano(histogramPointData.getStartEpochNanos()); - histogramDataPoint.setTimeUnixNano(histogramPointData.getEpochNanos()); + histogramDataPoint.start_time_unix_nano(histogramPointData.getStartEpochNanos()); + histogramDataPoint.time_unix_nano(histogramPointData.getEpochNanos()); if (histogramPointData.getCounts() != null) { - for (Long count : histogramPointData.getCounts()) { - histogramDataPoint.addBucketCounts(count); - } + histogramDataPoint.bucket_counts.addAll(histogramPointData.getCounts()); } if (histogramPointData.getBoundaries() != null) { - for (Double boundary : histogramPointData.getBoundaries()) { - histogramDataPoint.addExplicitBounds(boundary); - } + histogramDataPoint.explicit_bounds.addAll(histogramPointData.getBoundaries()); } if (histogramPointData.getExemplars() != null) { for (DoubleExemplarData exemplar : histogramPointData.getExemplars()) { - histogramDataPoint.addExemplars(doubleExemplarDataToExemplar(exemplar)); + histogramDataPoint.exemplars.add(doubleExemplarDataToExemplar(exemplar)); } } - histogramDataPoint.setCount(histogramPointData.getCount()); - histogramDataPoint.setSum(histogramPointData.getSum()); + histogramDataPoint.count(histogramPointData.getCount()); + histogramDataPoint.sum(histogramPointData.getSum()); if (histogramPointData.hasMin()) { - histogramDataPoint.setMin(histogramPointData.getMin()); + histogramDataPoint.min(histogramPointData.getMin()); } if (histogramPointData.hasMax()) { - histogramDataPoint.setMax(histogramPointData.getMax()); + histogramDataPoint.max(histogramPointData.getMax()); } addAttributesToHistogramDataPoint(histogramPointData, histogramDataPoint); @@ -401,36 +394,36 @@ private static HistogramDataPoint histogramPointDataToHistogramDataPoint( private static void addAttributesToHistogramDataPoint( HistogramPointData source, HistogramDataPoint.Builder target) { - target.addAllAttributes(attributesToProto(source.getAttributes())); + target.attributes.addAll(attributesToProto(source.getAttributes())); } private static ExponentialHistogramDataPoint exponentialHistogramPointDataToExponentialHistogramDataPoint( ExponentialHistogramPointData exponentialHistogramPointData) { ExponentialHistogramDataPoint.Builder exponentialHistogramDataPoint = - ExponentialHistogramDataPoint.newBuilder(); + new ExponentialHistogramDataPoint.Builder(); - exponentialHistogramDataPoint.setStartTimeUnixNano( + exponentialHistogramDataPoint.start_time_unix_nano( exponentialHistogramPointData.getStartEpochNanos()); - exponentialHistogramDataPoint.setTimeUnixNano(exponentialHistogramPointData.getEpochNanos()); - exponentialHistogramDataPoint.setPositive( + exponentialHistogramDataPoint.time_unix_nano(exponentialHistogramPointData.getEpochNanos()); + exponentialHistogramDataPoint.positive( exponentialHistogramBucketsToBuckets(exponentialHistogramPointData.getPositiveBuckets())); - exponentialHistogramDataPoint.setNegative( + exponentialHistogramDataPoint.negative( exponentialHistogramBucketsToBuckets(exponentialHistogramPointData.getNegativeBuckets())); if (exponentialHistogramPointData.getExemplars() != null) { for (DoubleExemplarData exemplar : exponentialHistogramPointData.getExemplars()) { - exponentialHistogramDataPoint.addExemplars(doubleExemplarDataToExemplar(exemplar)); + exponentialHistogramDataPoint.exemplars.add(doubleExemplarDataToExemplar(exemplar)); } } - exponentialHistogramDataPoint.setCount(exponentialHistogramPointData.getCount()); - exponentialHistogramDataPoint.setSum(exponentialHistogramPointData.getSum()); - exponentialHistogramDataPoint.setScale(exponentialHistogramPointData.getScale()); - exponentialHistogramDataPoint.setZeroCount(exponentialHistogramPointData.getZeroCount()); + exponentialHistogramDataPoint.count(exponentialHistogramPointData.getCount()); + exponentialHistogramDataPoint.sum(exponentialHistogramPointData.getSum()); + exponentialHistogramDataPoint.scale(exponentialHistogramPointData.getScale()); + exponentialHistogramDataPoint.zero_count(exponentialHistogramPointData.getZeroCount()); if (exponentialHistogramPointData.hasMin()) { - exponentialHistogramDataPoint.setMin(exponentialHistogramPointData.getMin()); + exponentialHistogramDataPoint.min(exponentialHistogramPointData.getMin()); } if (exponentialHistogramPointData.hasMax()) { - exponentialHistogramDataPoint.setMax(exponentialHistogramPointData.getMax()); + exponentialHistogramDataPoint.max(exponentialHistogramPointData.getMax()); } addAttributesToExponentialHistogramDataPoint( @@ -441,29 +434,27 @@ private static void addAttributesToHistogramDataPoint( private static void addAttributesToExponentialHistogramDataPoint( ExponentialHistogramPointData source, ExponentialHistogramDataPoint.Builder target) { - target.addAllAttributes(attributesToProto(source.getAttributes())); + target.attributes.addAll(attributesToProto(source.getAttributes())); } private static ExponentialHistogramDataPoint.Buckets exponentialHistogramBucketsToBuckets( ExponentialHistogramBuckets source) { ExponentialHistogramDataPoint.Buckets.Builder buckets = - ExponentialHistogramDataPoint.Buckets.newBuilder(); + new ExponentialHistogramDataPoint.Buckets.Builder(); if (source.getBucketCounts() != null) { - for (Long bucketCount : source.getBucketCounts()) { - buckets.addBucketCounts(bucketCount); - } + buckets.bucket_counts.addAll(source.getBucketCounts()); } - buckets.setOffset(source.getOffset()); + buckets.offset(source.getOffset()); return buckets.build(); } private static Exemplar doubleExemplarDataToExemplar(DoubleExemplarData doubleExemplarData) { - Exemplar.Builder exemplar = Exemplar.newBuilder(); + Exemplar.Builder exemplar = new Exemplar.Builder(); - exemplar.setTimeUnixNano(doubleExemplarData.getEpochNanos()); - exemplar.setAsDouble(doubleExemplarData.getValue()); + exemplar.time_unix_nano(doubleExemplarData.getEpochNanos()); + exemplar.as_double(doubleExemplarData.getValue()); addExtrasToExemplar(doubleExemplarData, exemplar); @@ -471,10 +462,10 @@ private static Exemplar doubleExemplarDataToExemplar(DoubleExemplarData doubleEx } private static Exemplar longExemplarDataToExemplar(LongExemplarData doubleExemplarData) { - Exemplar.Builder exemplar = Exemplar.newBuilder(); + Exemplar.Builder exemplar = new Exemplar.Builder(); - exemplar.setTimeUnixNano(doubleExemplarData.getEpochNanos()); - exemplar.setAsInt(doubleExemplarData.getValue()); + exemplar.time_unix_nano(doubleExemplarData.getEpochNanos()); + exemplar.as_int(doubleExemplarData.getValue()); addExtrasToExemplar(doubleExemplarData, exemplar); @@ -482,10 +473,10 @@ private static Exemplar longExemplarDataToExemplar(LongExemplarData doubleExempl } private static void addExtrasToExemplar(ExemplarData source, Exemplar.Builder target) { - target.addAllFilteredAttributes(attributesToProto(source.getFilteredAttributes())); + target.filtered_attributes.addAll(attributesToProto(source.getFilteredAttributes())); SpanContext spanContext = source.getSpanContext(); - target.setSpanId(ByteStringMapper.getInstance().stringToProto(spanContext.getSpanId())); - target.setTraceId(ByteStringMapper.getInstance().stringToProto(spanContext.getTraceId())); + target.span_id(ByteStringMapper.getInstance().stringToProto(spanContext.getSpanId())); + target.trace_id(ByteStringMapper.getInstance().stringToProto(spanContext.getTraceId())); } private static AggregationTemporality mapAggregationTemporalityToProto( @@ -500,7 +491,7 @@ private static AggregationTemporality mapAggregationTemporalityToProto( aggregationTemporality = AggregationTemporality.AGGREGATION_TEMPORALITY_CUMULATIVE; break; default: - aggregationTemporality = AggregationTemporality.UNRECOGNIZED; + aggregationTemporality = AggregationTemporality.AGGREGATION_TEMPORALITY_UNSPECIFIED; } return aggregationTemporality; @@ -508,95 +499,99 @@ private static AggregationTemporality mapAggregationTemporalityToProto( private static SummaryData mapSummaryToSdk(Summary summary) { return ImmutableSummaryData.create( - summaryDataPointListToSummaryPointDataCollection(summary.getDataPointsList())); + summaryDataPointListToSummaryPointDataCollection(summary.data_points)); } private static HistogramData mapHistogramToSdk(Histogram histogram) { return ImmutableHistogramData.create( - mapAggregationTemporalityToSdk(histogram.getAggregationTemporality()), - histogramDataPointListToHistogramPointDataCollection(histogram.getDataPointsList())); + mapAggregationTemporalityToSdk(histogram.aggregation_temporality), + histogramDataPointListToHistogramPointDataCollection(histogram.data_points)); } private static ExponentialHistogramData mapExponentialHistogramToSdk( ExponentialHistogram source) { return ImmutableExponentialHistogramData.create( - mapAggregationTemporalityToSdk(source.getAggregationTemporality()), + mapAggregationTemporalityToSdk(source.aggregation_temporality), exponentialHistogramDataPointListToExponentialHistogramPointDataCollection( - source.getDataPointsList())); + source.data_points)); } private static ExponentialHistogramPointData exponentialHistogramDataPointToExponentialHistogramPointData( ExponentialHistogramDataPoint source) { + double min = (source.min != null) ? source.min : 0; + double max = (source.max != null) ? source.max : 0; return ImmutableExponentialHistogramPointData.create( - source.getScale(), - source.getSum(), - source.getZeroCount(), - source.hasMin(), - source.getMin(), - source.hasMax(), - source.getMax(), - mapBucketsFromProto(source.getPositive(), source.getScale()), - mapBucketsFromProto(source.getNegative(), source.getScale()), - source.getStartTimeUnixNano(), - source.getTimeUnixNano(), - protoToAttributes(source.getAttributesList()), - exemplarListToDoubleExemplarDataList(source.getExemplarsList())); + source.scale, + source.sum, + source.zero_count, + min > 0, + min, + max > 0, + max, + mapBucketsFromProto(source.positive, source.scale), + mapBucketsFromProto(source.negative, source.scale), + source.start_time_unix_nano, + source.time_unix_nano, + protoToAttributes(source.attributes), + exemplarListToDoubleExemplarDataList(source.exemplars)); } private static HistogramPointData histogramDataPointToHistogramPointData( HistogramDataPoint source) { + double min = (source.min != null) ? source.min : 0; + double max = (source.max != null) ? source.max : 0; return ImmutableHistogramPointData.create( - source.getStartTimeUnixNano(), - source.getTimeUnixNano(), - protoToAttributes(source.getAttributesList()), - source.getSum(), - source.hasMin(), - source.getMin(), - source.hasMax(), - source.getMax(), - source.getExplicitBoundsList(), - source.getBucketCountsList(), - exemplarListToDoubleExemplarDataList(source.getExemplarsList())); + source.start_time_unix_nano, + source.time_unix_nano, + protoToAttributes(source.attributes), + source.sum, + min > 0, + min, + max > 0, + max, + source.explicit_bounds, + source.bucket_counts, + exemplarListToDoubleExemplarDataList(source.exemplars)); } private static DoubleExemplarData exemplarToDoubleExemplarData(Exemplar source) { return ImmutableDoubleExemplarData.create( - protoToAttributes(source.getFilteredAttributesList()), - source.getTimeUnixNano(), + protoToAttributes(source.filtered_attributes), + source.time_unix_nano, createForExemplar(source), - source.getAsDouble()); + source.as_double); } private static LongExemplarData exemplarToLongExemplarData(Exemplar source) { return ImmutableLongExemplarData.create( - protoToAttributes(source.getFilteredAttributesList()), - source.getTimeUnixNano(), + protoToAttributes(source.filtered_attributes), + source.time_unix_nano, createForExemplar(source), - source.getAsInt()); + source.as_int); } private static SpanContext createForExemplar(Exemplar value) { return SpanContext.create( - ByteStringMapper.getInstance().protoToString(value.getTraceId()), - ByteStringMapper.getInstance().protoToString(value.getSpanId()), + ByteStringMapper.getInstance().protoToString(value.trace_id), + ByteStringMapper.getInstance().protoToString(value.span_id), TraceFlags.getSampled(), TraceState.getDefault()); } private static SummaryPointData summaryDataPointToSummaryPointData(SummaryDataPoint source) { return ImmutableSummaryPointData.create( - source.getStartTimeUnixNano(), - source.getTimeUnixNano(), - protoToAttributes(source.getAttributesList()), - source.getCount(), - source.getSum(), - valueAtQuantileListToValueAtQuantileList(source.getQuantileValuesList())); + source.start_time_unix_nano, + source.time_unix_nano, + protoToAttributes(source.attributes), + source.count, + source.sum, + valueAtQuantileListToValueAtQuantileList(source.quantile_values)); } private static ValueAtQuantile mapFromSummaryValueAtQuantileProto( SummaryDataPoint.ValueAtQuantile source) { - return ImmutableValueAtQuantile.create(source.getQuantile(), source.getValue()); + return ImmutableValueAtQuantile.create(source.quantile, source.value); } private static io.opentelemetry.sdk.metrics.data.AggregationTemporality @@ -620,53 +615,53 @@ private static ValueAtQuantile mapFromSummaryValueAtQuantileProto( private static GaugeData mapLongGaugeToSdk(Gauge gauge) { return ImmutableGaugeData.create( - numberDataPointListToLongPointDataCollection(gauge.getDataPointsList())); + numberDataPointListToLongPointDataCollection(gauge.data_points)); } private static GaugeData mapDoubleGaugeToSdk(Gauge gauge) { return ImmutableGaugeData.create( - numberDataPointListToDoublePointDataCollection(gauge.getDataPointsList())); + numberDataPointListToDoublePointDataCollection(gauge.data_points)); } private static SumData mapLongSumToSdk(Sum sum) { return ImmutableSumData.create( - sum.getIsMonotonic(), - mapAggregationTemporalityToSdk(sum.getAggregationTemporality()), - numberDataPointListToLongPointDataCollection(sum.getDataPointsList())); + sum.is_monotonic, + mapAggregationTemporalityToSdk(sum.aggregation_temporality), + numberDataPointListToLongPointDataCollection(sum.data_points)); } private static SumData mapDoubleSumToSdk(Sum sum) { return ImmutableSumData.create( - sum.getIsMonotonic(), - mapAggregationTemporalityToSdk(sum.getAggregationTemporality()), - numberDataPointListToDoublePointDataCollection(sum.getDataPointsList())); + sum.is_monotonic, + mapAggregationTemporalityToSdk(sum.aggregation_temporality), + numberDataPointListToDoublePointDataCollection(sum.data_points)); } private static DoublePointData mapDoubleNumberDataPointToSdk(NumberDataPoint source) { return ImmutableDoublePointData.create( - source.getStartTimeUnixNano(), - source.getTimeUnixNano(), - protoToAttributes(source.getAttributesList()), - source.getAsDouble(), - exemplarListToDoubleExemplarDataList(source.getExemplarsList())); + source.start_time_unix_nano, + source.time_unix_nano, + protoToAttributes(source.attributes), + source.as_double, + exemplarListToDoubleExemplarDataList(source.exemplars)); } private static LongPointData mapLongNumberDataPointToSdk(NumberDataPoint source) { return ImmutableLongPointData.create( - source.getStartTimeUnixNano(), - source.getTimeUnixNano(), - protoToAttributes(source.getAttributesList()), - source.getAsInt(), - exemplarListToLongExemplarDataList(source.getExemplarsList())); + source.start_time_unix_nano, + source.time_unix_nano, + protoToAttributes(source.attributes), + source.as_int, + exemplarListToLongExemplarDataList(source.exemplars)); } private static SummaryDataPoint.ValueAtQuantile valueAtQuantileToValueAtQuantile( ValueAtQuantile valueAtQuantile) { SummaryDataPoint.ValueAtQuantile.Builder builder = - SummaryDataPoint.ValueAtQuantile.newBuilder(); + new SummaryDataPoint.ValueAtQuantile.Builder(); - builder.setQuantile(valueAtQuantile.getQuantile()); - builder.setValue(valueAtQuantile.getValue()); + builder.quantile(valueAtQuantile.getQuantile()); + builder.value(valueAtQuantile.getValue()); return builder.build(); } @@ -755,8 +750,7 @@ private static List exemplarListToLongExemplarDataList(List attributesToProto(Attributes source) { From f20b071cf14463a797f31c1e8bb4759d9c000a75 Mon Sep 17 00:00:00 2001 From: Cesar Munoz <56847527+LikeTheSalad@users.noreply.github.com> Date: Mon, 21 Aug 2023 16:29:21 +0200 Subject: [PATCH 03/16] Adapting SpanDataMapper to Wire's generated code --- .../mapping/spans/SpanDataMapper.java | 112 +++++++++--------- 1 file changed, 56 insertions(+), 56 deletions(-) diff --git a/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/internal/serialization/mapping/spans/SpanDataMapper.java b/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/internal/serialization/mapping/spans/SpanDataMapper.java index 689be4d9b..34f63757a 100644 --- a/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/internal/serialization/mapping/spans/SpanDataMapper.java +++ b/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/internal/serialization/mapping/spans/SpanDataMapper.java @@ -39,26 +39,26 @@ public static SpanDataMapper getInstance() { private final ByteStringMapper byteStringMapper = ByteStringMapper.getInstance(); public Span mapToProto(SpanData source) { - Span.Builder span = Span.newBuilder(); + Span.Builder span = new Span.Builder(); - span.setStartTimeUnixNano(source.getStartEpochNanos()); - span.setEndTimeUnixNano(source.getEndEpochNanos()); + span.start_time_unix_nano(source.getStartEpochNanos()); + span.end_time_unix_nano(source.getEndEpochNanos()); if (source.getEvents() != null) { for (EventData event : source.getEvents()) { - span.addEvents(eventDataToProto(event)); + span.events.add(eventDataToProto(event)); } } if (source.getLinks() != null) { for (LinkData link : source.getLinks()) { - span.addLinks(linkDataToProto(link)); + span.links.add(linkDataToProto(link)); } } - span.setTraceId(byteStringMapper.stringToProto(source.getTraceId())); - span.setSpanId(byteStringMapper.stringToProto(source.getSpanId())); - span.setParentSpanId(byteStringMapper.stringToProto(source.getParentSpanId())); - span.setName(source.getName()); - span.setKind(mapSpanKindToProto(source.getKind())); - span.setStatus(statusDataToProto(source.getStatus())); + span.trace_id(byteStringMapper.stringToProto(source.getTraceId())); + span.span_id(byteStringMapper.stringToProto(source.getSpanId())); + span.parent_span_id(byteStringMapper.stringToProto(source.getParentSpanId())); + span.name(source.getName()); + span.kind(mapSpanKindToProto(source.getKind())); + span.status(statusDataToProto(source.getStatus())); addSpanProtoExtras(source, span); @@ -66,26 +66,26 @@ public Span mapToProto(SpanData source) { } private static void addSpanProtoExtras(SpanData source, Span.Builder target) { - target.addAllAttributes(attributesToProto(source.getAttributes())); - target.setDroppedAttributesCount( + target.attributes.addAll(attributesToProto(source.getAttributes())); + target.dropped_attributes_count( source.getTotalAttributeCount() - source.getAttributes().size()); - target.setDroppedEventsCount(source.getTotalRecordedEvents() - getListSize(source.getEvents())); - target.setDroppedLinksCount(source.getTotalRecordedLinks() - getListSize(source.getLinks())); - target.setTraceState(encodeTraceState(source.getSpanContext().getTraceState())); + target.dropped_events_count(source.getTotalRecordedEvents() - getListSize(source.getEvents())); + target.dropped_links_count(source.getTotalRecordedLinks() - getListSize(source.getLinks())); + target.trace_state(encodeTraceState(source.getSpanContext().getTraceState())); } public SpanData mapToSdk( Span source, Resource resource, InstrumentationScopeInfo instrumentationScopeInfo) { SpanDataImpl.Builder spanData = SpanDataImpl.builder(); - spanData.setStartEpochNanos(source.getStartTimeUnixNano()); - spanData.setEndEpochNanos(source.getEndTimeUnixNano()); - spanData.setEvents(eventListToEventDataList(source.getEventsList())); - spanData.setLinks(linkListToLinkDataList(source.getLinksList())); - spanData.setName(source.getName()); - spanData.setKind(mapSpanKindToSdk(source.getKind())); - if (source.hasStatus()) { - spanData.setStatus(mapStatusDataToSdk(source.getStatus())); + spanData.setStartEpochNanos(source.start_time_unix_nano); + spanData.setEndEpochNanos(source.end_time_unix_nano); + spanData.setEvents(eventListToEventDataList(source.events)); + spanData.setLinks(linkListToLinkDataList(source.links)); + spanData.setName(source.name); + spanData.setKind(mapSpanKindToSdk(source.kind)); + if (source.status != null) { + spanData.setStatus(mapStatusDataToSdk(source.status)); } addSpanDataExtras(source, spanData, resource, instrumentationScopeInfo); @@ -98,40 +98,40 @@ private static void addSpanDataExtras( SpanDataImpl.Builder target, Resource resource, InstrumentationScopeInfo instrumentationScopeInfo) { - Attributes attributes = protoToAttributes(source.getAttributesList()); + Attributes attributes = protoToAttributes(source.attributes); target.setAttributes(attributes); target.setResource(resource); target.setInstrumentationScopeInfo(instrumentationScopeInfo); - String traceId = ByteStringMapper.getInstance().protoToString(source.getTraceId()); + String traceId = ByteStringMapper.getInstance().protoToString(source.trace_id); target.setSpanContext( SpanContext.create( traceId, - ByteStringMapper.getInstance().protoToString(source.getSpanId()), + ByteStringMapper.getInstance().protoToString(source.span_id), TraceFlags.getSampled(), - decodeTraceState(source.getTraceState()))); + decodeTraceState(source.trace_state))); target.setParentSpanContext( SpanContext.create( traceId, - ByteStringMapper.getInstance().protoToString(source.getParentSpanId()), + ByteStringMapper.getInstance().protoToString(source.parent_span_id), TraceFlags.getSampled(), TraceState.getDefault())); - target.setTotalAttributeCount(source.getDroppedAttributesCount() + attributes.size()); + target.setTotalAttributeCount(source.dropped_attributes_count + attributes.size()); target.setTotalRecordedEvents( - calculateRecordedItems(source.getDroppedEventsCount(), source.getEventsCount())); + calculateRecordedItems(source.dropped_events_count, source.events.size())); target.setTotalRecordedLinks( - calculateRecordedItems(source.getDroppedLinksCount(), source.getLinksCount())); + calculateRecordedItems(source.dropped_links_count, source.links.size())); } private static StatusData mapStatusDataToSdk(Status source) { - return StatusData.create(getStatusCode(source.getCodeValue()), source.getMessage()); + return StatusData.create(getStatusCode(source.code.getValue()), source.message); } private static Span.Event eventDataToProto(EventData source) { - Span.Event.Builder event = Span.Event.newBuilder(); + Span.Event.Builder event = new Span.Event.Builder(); - event.setTimeUnixNano(source.getEpochNanos()); - event.setName(source.getName()); - event.setDroppedAttributesCount(source.getDroppedAttributesCount()); + event.time_unix_nano(source.getEpochNanos()); + event.name(source.getName()); + event.dropped_attributes_count(source.getDroppedAttributesCount()); addEventProtoExtras(source, event); @@ -139,14 +139,14 @@ private static Span.Event eventDataToProto(EventData source) { } private static void addEventProtoExtras(EventData source, Span.Event.Builder target) { - target.addAllAttributes(attributesToProto(source.getAttributes())); + target.attributes.addAll(attributesToProto(source.getAttributes())); } private static Status statusDataToProto(StatusData source) { - Status.Builder status = Status.newBuilder(); + Status.Builder status = new Status.Builder(); - status.setMessage(source.getDescription()); - status.setCode(mapStatusCodeToProto(source.getStatusCode())); + status.message(source.getDescription()); + status.code(mapStatusCodeToProto(source.getStatusCode())); return status.build(); } @@ -198,12 +198,12 @@ private static Status.StatusCode mapStatusCodeToProto(StatusCode source) { } private static EventData eventDataToSdk(Span.Event source) { - Attributes attributes = protoToAttributes(source.getAttributesList()); + Attributes attributes = protoToAttributes(source.attributes); return EventData.create( - source.getTimeUnixNano(), - source.getName(), + source.time_unix_nano, + source.name, attributes, - attributes.size() + source.getDroppedAttributesCount()); + attributes.size() + source.dropped_attributes_count); } private static SpanKind mapSpanKindToSdk(Span.SpanKind source) { @@ -251,14 +251,14 @@ private static List linkListToLinkDataList(List list) { } private static LinkData linkDataToSdk(Span.Link source) { - Attributes attributes = protoToAttributes(source.getAttributesList()); - int totalAttrCount = source.getDroppedAttributesCount() + attributes.size(); + Attributes attributes = protoToAttributes(source.attributes); + int totalAttrCount = source.dropped_attributes_count + attributes.size(); SpanContext spanContext = SpanContext.create( - ByteStringMapper.getInstance().protoToString(source.getTraceId()), - ByteStringMapper.getInstance().protoToString(source.getSpanId()), + ByteStringMapper.getInstance().protoToString(source.trace_id), + ByteStringMapper.getInstance().protoToString(source.span_id), TraceFlags.getSampled(), - decodeTraceState(source.getTraceState())); + decodeTraceState(source.trace_state)); return LinkData.create(spanContext, attributes, totalAttrCount); } @@ -304,14 +304,14 @@ private static TraceState decodeTraceState(@Nullable String source) { } private static Span.Link linkDataToProto(LinkData source) { - Span.Link.Builder builder = Span.Link.newBuilder(); + Span.Link.Builder builder = new Span.Link.Builder(); SpanContext spanContext = source.getSpanContext(); - builder.setTraceId(ByteStringMapper.getInstance().stringToProto(spanContext.getTraceId())); - builder.setSpanId(ByteStringMapper.getInstance().stringToProto(spanContext.getSpanId())); - builder.addAllAttributes(attributesToProto(source.getAttributes())); - builder.setDroppedAttributesCount( + builder.trace_id(ByteStringMapper.getInstance().stringToProto(spanContext.getTraceId())); + builder.span_id(ByteStringMapper.getInstance().stringToProto(spanContext.getSpanId())); + builder.attributes.addAll(attributesToProto(source.getAttributes())); + builder.dropped_attributes_count( source.getTotalAttributeCount() - source.getAttributes().size()); - builder.setTraceState(encodeTraceState(spanContext.getTraceState())); + builder.trace_state(encodeTraceState(spanContext.getTraceState())); return builder.build(); } From 80dce435c582298bc3962dabde3e6a053880aef3 Mon Sep 17 00:00:00 2001 From: Cesar Munoz <56847527+LikeTheSalad@users.noreply.github.com> Date: Mon, 21 Aug 2023 16:29:32 +0200 Subject: [PATCH 04/16] Adapting LogRecordDataMapper to Wire's generated code --- .../mapping/logs/LogRecordDataMapper.java | 53 +++++++++---------- 1 file changed, 26 insertions(+), 27 deletions(-) diff --git a/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/internal/serialization/mapping/logs/LogRecordDataMapper.java b/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/internal/serialization/mapping/logs/LogRecordDataMapper.java index 8cedb2d4e..92af6a387 100644 --- a/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/internal/serialization/mapping/logs/LogRecordDataMapper.java +++ b/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/internal/serialization/mapping/logs/LogRecordDataMapper.java @@ -30,21 +30,21 @@ public static LogRecordDataMapper getInstance() { } public LogRecord mapToProto(LogRecordData source) { - LogRecord.Builder logRecord = LogRecord.newBuilder(); + LogRecord.Builder logRecord = new LogRecord.Builder(); - logRecord.setTimeUnixNano(source.getTimestampEpochNanos()); - logRecord.setObservedTimeUnixNano(source.getObservedTimestampEpochNanos()); + logRecord.time_unix_nano(source.getTimestampEpochNanos()); + logRecord.observed_time_unix_nano(source.getObservedTimestampEpochNanos()); if (source.getSeverity() != null) { - logRecord.setSeverityNumber(severityToProto(source.getSeverity())); + logRecord.severity_number(severityToProto(source.getSeverity())); } if (source.getSeverityText() != null) { - logRecord.setSeverityText(source.getSeverityText()); + logRecord.severity_text(source.getSeverityText()); } if (source.getBody() != null) { - logRecord.setBody(bodyToAnyValue(source.getBody())); + logRecord.body(bodyToAnyValue(source.getBody())); } - logRecord.setFlags(source.getSpanContext().getTraceFlags().asByte()); + logRecord.flags(source.getSpanContext().getTraceFlags().asByte()); addExtrasToProtoBuilder(source, logRecord); @@ -52,12 +52,12 @@ public LogRecord mapToProto(LogRecordData source) { } private static void addExtrasToProtoBuilder(LogRecordData source, LogRecord.Builder target) { - target.addAllAttributes( + target.attributes.addAll( AttributesMapper.getInstance().attributesToProto(source.getAttributes())); SpanContext spanContext = source.getSpanContext(); - target.setSpanId(ByteStringMapper.getInstance().stringToProto(spanContext.getSpanId())); - target.setTraceId(ByteStringMapper.getInstance().stringToProto(spanContext.getTraceId())); - target.setDroppedAttributesCount( + target.span_id(ByteStringMapper.getInstance().stringToProto(spanContext.getSpanId())); + target.trace_id(ByteStringMapper.getInstance().stringToProto(spanContext.getTraceId())); + target.dropped_attributes_count( source.getTotalAttributeCount() - source.getAttributes().size()); } @@ -65,12 +65,12 @@ public LogRecordData mapToSdk( LogRecord source, Resource resource, InstrumentationScopeInfo scopeInfo) { LogRecordDataImpl.Builder logRecordData = LogRecordDataImpl.builder(); - logRecordData.setTimestampEpochNanos(source.getTimeUnixNano()); - logRecordData.setObservedTimestampEpochNanos(source.getObservedTimeUnixNano()); - logRecordData.setSeverity(severityNumberToSdk(source.getSeverityNumber())); - logRecordData.setSeverityText(source.getSeverityText()); - if (source.hasBody()) { - logRecordData.setBody(anyValueToBody(source.getBody())); + logRecordData.setTimestampEpochNanos(source.time_unix_nano); + logRecordData.setObservedTimestampEpochNanos(source.observed_time_unix_nano); + logRecordData.setSeverity(severityNumberToSdk(source.severity_number)); + logRecordData.setSeverityText(source.severity_text); + if (source.body != null) { + logRecordData.setBody(anyValueToBody(source.body)); } addExtrasToSdkItemBuilder(source, logRecordData, resource, scopeInfo); @@ -83,31 +83,30 @@ private static void addExtrasToSdkItemBuilder( LogRecordDataImpl.Builder target, Resource resource, InstrumentationScopeInfo scopeInfo) { - Attributes attributes = - AttributesMapper.getInstance().protoToAttributes(source.getAttributesList()); + Attributes attributes = AttributesMapper.getInstance().protoToAttributes(source.attributes); target.setAttributes(attributes); target.setSpanContext( SpanContext.create( - ByteStringMapper.getInstance().protoToString(source.getTraceId()), - ByteStringMapper.getInstance().protoToString(source.getSpanId()), + ByteStringMapper.getInstance().protoToString(source.trace_id), + ByteStringMapper.getInstance().protoToString(source.span_id), TraceFlags.getSampled(), TraceState.getDefault())); - target.setTotalAttributeCount(source.getDroppedAttributesCount() + attributes.size()); + target.setTotalAttributeCount(source.dropped_attributes_count + attributes.size()); target.setResource(resource); target.setInstrumentationScopeInfo(scopeInfo); } private static AnyValue bodyToAnyValue(Body body) { - return AnyValue.newBuilder().setStringValue(body.asString()).build(); + return new AnyValue.Builder().string_value(body.asString()).build(); } private static SeverityNumber severityToProto(Severity severity) { - return SeverityNumber.forNumber(severity.getSeverityNumber()); + return SeverityNumber.fromValue(severity.getSeverityNumber()); } private static Body anyValueToBody(AnyValue source) { - if (source.hasStringValue()) { - return Body.string(source.getStringValue()); + if (source.string_value != null) { + return Body.string(source.string_value); } else { return Body.empty(); } @@ -115,7 +114,7 @@ private static Body anyValueToBody(AnyValue source) { private static Severity severityNumberToSdk(SeverityNumber source) { for (Severity value : Severity.values()) { - if (value.getSeverityNumber() == source.getNumber()) { + if (value.getSeverityNumber() == source.getValue()) { return value; } } From bea9675bd3838ffb2d41ed4f14d75770942eac43 Mon Sep 17 00:00:00 2001 From: Cesar Munoz <56847527+LikeTheSalad@users.noreply.github.com> Date: Mon, 21 Aug 2023 16:29:52 +0200 Subject: [PATCH 05/16] Adapting mapper utils to Wire's generated code --- .../mapping/common/AttributesMapper.java | 66 +++++++++---------- .../common/BaseProtoSignalsDataMapper.java | 14 ++-- .../mapping/common/ResourceMapper.java | 8 +-- .../mapping/logs/ProtoLogsDataMapper.java | 24 +++---- .../metrics/ProtoMetricsDataMapper.java | 24 +++---- .../mapping/spans/ProtoSpansDataMapper.java | 25 ++++--- 6 files changed, 79 insertions(+), 82 deletions(-) diff --git a/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/internal/serialization/mapping/common/AttributesMapper.java b/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/internal/serialization/mapping/common/AttributesMapper.java index 8b4ceb05a..e017cb878 100644 --- a/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/internal/serialization/mapping/common/AttributesMapper.java +++ b/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/internal/serialization/mapping/common/AttributesMapper.java @@ -32,15 +32,15 @@ public List attributesToProto(Attributes attributes) { public Attributes protoToAttributes(List values) { AttributesBuilder builder = Attributes.builder(); for (KeyValue keyValue : values) { - addValue(builder, keyValue.getKey(), keyValue.getValue()); + addValue(builder, keyValue.key, keyValue.value); } return builder.build(); } private static KeyValue attributeEntryToProto(AttributeKey key, Object value) { - KeyValue.Builder builder = KeyValue.newBuilder(); - builder.setKey(key.getKey()); - builder.setValue(attributeValueToProto(key.getType(), value)); + KeyValue.Builder builder = new KeyValue.Builder(); + builder.key(key.getKey()); + builder.value(attributeValueToProto(key.getType(), value)); return builder.build(); } @@ -68,37 +68,37 @@ private static AnyValue attributeValueToProto(AttributeType type, Object value) } private static AnyValue arrayToAnyValue(List value) { - return AnyValue.newBuilder() - .setArrayValue(ArrayValue.newBuilder().addAllValues(value).build()) + return new AnyValue.Builder() + .array_value(new ArrayValue.Builder().values(value).build()) .build(); } private static void addValue(AttributesBuilder builder, String key, AnyValue value) { - if (value.hasStringValue()) { - builder.put(AttributeKey.stringKey(key), value.getStringValue()); - } else if (value.hasBoolValue()) { - builder.put(AttributeKey.booleanKey(key), value.getBoolValue()); - } else if (value.hasIntValue()) { - builder.put(AttributeKey.longKey(key), value.getIntValue()); - } else if (value.hasDoubleValue()) { - builder.put(AttributeKey.doubleKey(key), value.getDoubleValue()); - } else if (value.hasArrayValue()) { - addArray(builder, key, value.getArrayValue()); + if (value.string_value != null) { + builder.put(AttributeKey.stringKey(key), value.string_value); + } else if (value.bool_value != null) { + builder.put(AttributeKey.booleanKey(key), value.bool_value); + } else if (value.int_value != null) { + builder.put(AttributeKey.longKey(key), value.int_value); + } else if (value.double_value != null) { + builder.put(AttributeKey.doubleKey(key), value.double_value); + } else if (value.array_value != null) { + addArray(builder, key, value.array_value); } else { throw new UnsupportedOperationException(); } } private static void addArray(AttributesBuilder builder, String key, ArrayValue arrayValue) { - List values = arrayValue.getValuesList(); + List values = arrayValue.values; AnyValue anyValue = values.get(0); - if (anyValue.hasStringValue()) { + if (anyValue.string_value != null) { builder.put(AttributeKey.stringArrayKey(key), anyValuesToStrings(values)); - } else if (anyValue.hasBoolValue()) { + } else if (anyValue.bool_value != null) { builder.put(AttributeKey.booleanArrayKey(key), anyValuesToBooleans(values)); - } else if (anyValue.hasIntValue()) { + } else if (anyValue.int_value != null) { builder.put(AttributeKey.longArrayKey(key), anyValuesToLongs(values)); - } else if (anyValue.hasDoubleValue()) { + } else if (anyValue.double_value != null) { builder.put(AttributeKey.doubleArrayKey(key), anyValuesToDoubles(values)); } else { throw new UnsupportedOperationException(); @@ -106,38 +106,38 @@ private static void addArray(AttributesBuilder builder, String key, ArrayValue a } private static AnyValue stringToAnyValue(String value) { - AnyValue.Builder anyValue = AnyValue.newBuilder(); + AnyValue.Builder anyValue = new AnyValue.Builder(); - anyValue.setStringValue(value); + anyValue.string_value(value); return anyValue.build(); } private static AnyValue booleanToAnyValue(Boolean value) { - AnyValue.Builder anyValue = AnyValue.newBuilder(); + AnyValue.Builder anyValue = new AnyValue.Builder(); if (value != null) { - anyValue.setBoolValue(value); + anyValue.bool_value(value); } return anyValue.build(); } private static AnyValue longToAnyValue(Long value) { - AnyValue.Builder anyValue = AnyValue.newBuilder(); + AnyValue.Builder anyValue = new AnyValue.Builder(); if (value != null) { - anyValue.setIntValue(value); + anyValue.int_value(value); } return anyValue.build(); } private static AnyValue doubleToAnyValue(Double value) { - AnyValue.Builder anyValue = AnyValue.newBuilder(); + AnyValue.Builder anyValue = new AnyValue.Builder(); if (value != null) { - anyValue.setDoubleValue(value); + anyValue.double_value(value); } return anyValue.build(); @@ -216,18 +216,18 @@ private static List anyValuesToDoubles(List values) { } private static String anyValueToString(AnyValue value) { - return value.getStringValue(); + return value.string_value; } private static Boolean anyValueToBoolean(AnyValue value) { - return value.getBoolValue(); + return value.bool_value; } private static Long anyValueToLong(AnyValue value) { - return value.getIntValue(); + return value.int_value; } private static Double anyValueToDouble(AnyValue value) { - return value.getDoubleValue(); + return value.double_value; } } diff --git a/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/internal/serialization/mapping/common/BaseProtoSignalsDataMapper.java b/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/internal/serialization/mapping/common/BaseProtoSignalsDataMapper.java index ef263c53d..aff6ab560 100644 --- a/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/internal/serialization/mapping/common/BaseProtoSignalsDataMapper.java +++ b/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/internal/serialization/mapping/common/BaseProtoSignalsDataMapper.java @@ -76,10 +76,10 @@ protected Resource protoToResource( protected InstrumentationScopeInfo protoToInstrumentationScopeInfo( InstrumentationScope scope, @Nullable String schemaUrl) { - InstrumentationScopeInfoBuilder builder = InstrumentationScopeInfo.builder(scope.getName()); - builder.setAttributes(protoToAttributes(scope.getAttributesList())); - if (!scope.getVersion().isEmpty()) { - builder.setVersion(scope.getVersion()); + InstrumentationScopeInfoBuilder builder = InstrumentationScopeInfo.builder(scope.name); + builder.setAttributes(protoToAttributes(scope.attributes)); + if (!scope.version.isEmpty()) { + builder.setVersion(scope.version); } if (schemaUrl != null) { builder.setSchemaUrl(schemaUrl); @@ -89,11 +89,11 @@ protected InstrumentationScopeInfo protoToInstrumentationScopeInfo( protected InstrumentationScope instrumentationScopeToProto(InstrumentationScopeInfo source) { InstrumentationScope.Builder builder = - InstrumentationScope.newBuilder().setName(source.getName()); + new InstrumentationScope.Builder().name(source.getName()); if (source.getVersion() != null) { - builder.setVersion(source.getVersion()); + builder.version(source.getVersion()); } - builder.addAllAttributes(attributesToProto(source.getAttributes())); + builder.attributes.addAll(attributesToProto(source.getAttributes())); return builder.build(); } diff --git a/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/internal/serialization/mapping/common/ResourceMapper.java b/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/internal/serialization/mapping/common/ResourceMapper.java index c93894cb6..702202443 100644 --- a/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/internal/serialization/mapping/common/ResourceMapper.java +++ b/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/internal/serialization/mapping/common/ResourceMapper.java @@ -18,9 +18,8 @@ public static ResourceMapper getInstance() { } public Resource mapToProto(io.opentelemetry.sdk.resources.Resource sdkResource) { - return Resource.newBuilder() - .addAllAttributes( - AttributesMapper.getInstance().attributesToProto(sdkResource.getAttributes())) + return new Resource.Builder() + .attributes(AttributesMapper.getInstance().attributesToProto(sdkResource.getAttributes())) .build(); } @@ -31,8 +30,7 @@ public io.opentelemetry.sdk.resources.Resource mapToSdk( if (schemaUrl != null) { resource.setSchemaUrl(schemaUrl); } - resource.putAll( - AttributesMapper.getInstance().protoToAttributes(protoResource.getAttributesList())); + resource.putAll(AttributesMapper.getInstance().protoToAttributes(protoResource.attributes)); return resource.build(); } } diff --git a/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/internal/serialization/mapping/logs/ProtoLogsDataMapper.java b/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/internal/serialization/mapping/logs/ProtoLogsDataMapper.java index 5fbaa62b7..1d11c177f 100644 --- a/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/internal/serialization/mapping/logs/ProtoLogsDataMapper.java +++ b/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/internal/serialization/mapping/logs/ProtoLogsDataMapper.java @@ -40,7 +40,7 @@ protected LogRecordData protoToSignalItem( @Override protected List getProtoResources(LogsData logsData) { - return logsData.getResourceLogsList(); + return logsData.resource_logs; } @Override @@ -53,49 +53,49 @@ protected LogsData createProtoData( for (Map.Entry> logsByScope : instrumentationScopeInfoScopedLogsMap.entrySet()) { ScopeLogs.Builder scopeBuilder = createProtoScopeBuilder(logsByScope.getKey()); - scopeBuilder.addAllLogRecords(logsByScope.getValue()); - resourceLogsBuilder.addScopeLogs(scopeBuilder.build()); + scopeBuilder.log_records.addAll(logsByScope.getValue()); + resourceLogsBuilder.scope_logs.add(scopeBuilder.build()); } items.add(resourceLogsBuilder.build()); }); - return LogsData.newBuilder().addAllResourceLogs(items).build(); + return new LogsData.Builder().resource_logs(items).build(); } private ScopeLogs.Builder createProtoScopeBuilder(InstrumentationScopeInfo scopeInfo) { ScopeLogs.Builder builder = - ScopeLogs.newBuilder().setScope(instrumentationScopeToProto(scopeInfo)); + new ScopeLogs.Builder().scope(instrumentationScopeToProto(scopeInfo)); if (scopeInfo.getSchemaUrl() != null) { - builder.setSchemaUrl(scopeInfo.getSchemaUrl()); + builder.schema_url(scopeInfo.getSchemaUrl()); } return builder; } private ResourceLogs.Builder createProtoResourceBuilder(Resource resource) { - ResourceLogs.Builder builder = ResourceLogs.newBuilder().setResource(resourceToProto(resource)); + ResourceLogs.Builder builder = new ResourceLogs.Builder().resource(resourceToProto(resource)); if (resource.getSchemaUrl() != null) { - builder.setSchemaUrl(resource.getSchemaUrl()); + builder.schema_url(resource.getSchemaUrl()); } return builder; } @Override protected List getSignalsFromProto(ScopeLogs scopeSignals) { - return scopeSignals.getLogRecordsList(); + return scopeSignals.log_records; } @Override protected InstrumentationScopeInfo getInstrumentationScopeFromProto(ScopeLogs scopeSignals) { - return protoToInstrumentationScopeInfo(scopeSignals.getScope(), scopeSignals.getSchemaUrl()); + return protoToInstrumentationScopeInfo(scopeSignals.scope, scopeSignals.schema_url); } @Override protected List getScopes(ResourceLogs resourceSignal) { - return resourceSignal.getScopeLogsList(); + return resourceSignal.scope_logs; } @Override protected Resource getResourceFromProto(ResourceLogs resourceSignal) { - return protoToResource(resourceSignal.getResource(), resourceSignal.getSchemaUrl()); + return protoToResource(resourceSignal.resource, resourceSignal.schema_url); } @Override diff --git a/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/internal/serialization/mapping/metrics/ProtoMetricsDataMapper.java b/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/internal/serialization/mapping/metrics/ProtoMetricsDataMapper.java index 34b92f2d1..a81ab9957 100644 --- a/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/internal/serialization/mapping/metrics/ProtoMetricsDataMapper.java +++ b/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/internal/serialization/mapping/metrics/ProtoMetricsDataMapper.java @@ -40,7 +40,7 @@ protected MetricData protoToSignalItem( @Override protected List getProtoResources(MetricsData protoData) { - return protoData.getResourceMetricsList(); + return protoData.resource_metrics; } @Override @@ -53,50 +53,50 @@ protected MetricsData createProtoData( for (Map.Entry> metricsByScope : instrumentationScopeInfoScopedMetricsMap.entrySet()) { ScopeMetrics.Builder scopeBuilder = createProtoScopeBuilder(metricsByScope.getKey()); - scopeBuilder.addAllMetrics(metricsByScope.getValue()); - resourceMetricsBuilder.addScopeMetrics(scopeBuilder.build()); + scopeBuilder.metrics.addAll(metricsByScope.getValue()); + resourceMetricsBuilder.scope_metrics.add(scopeBuilder.build()); } items.add(resourceMetricsBuilder.build()); }); - return MetricsData.newBuilder().addAllResourceMetrics(items).build(); + return new MetricsData.Builder().resource_metrics(items).build(); } private ScopeMetrics.Builder createProtoScopeBuilder(InstrumentationScopeInfo scopeInfo) { ScopeMetrics.Builder builder = - ScopeMetrics.newBuilder().setScope(instrumentationScopeToProto(scopeInfo)); + new ScopeMetrics.Builder().scope(instrumentationScopeToProto(scopeInfo)); if (scopeInfo.getSchemaUrl() != null) { - builder.setSchemaUrl(scopeInfo.getSchemaUrl()); + builder.schema_url(scopeInfo.getSchemaUrl()); } return builder; } private ResourceMetrics.Builder createProtoResourceBuilder(Resource resource) { ResourceMetrics.Builder builder = - ResourceMetrics.newBuilder().setResource(resourceToProto(resource)); + new ResourceMetrics.Builder().resource(resourceToProto(resource)); if (resource.getSchemaUrl() != null) { - builder.setSchemaUrl(resource.getSchemaUrl()); + builder.schema_url(resource.getSchemaUrl()); } return builder; } @Override protected List getSignalsFromProto(ScopeMetrics scopeSignals) { - return scopeSignals.getMetricsList(); + return scopeSignals.metrics; } @Override protected InstrumentationScopeInfo getInstrumentationScopeFromProto(ScopeMetrics scopeSignals) { - return protoToInstrumentationScopeInfo(scopeSignals.getScope(), scopeSignals.getSchemaUrl()); + return protoToInstrumentationScopeInfo(scopeSignals.scope, scopeSignals.schema_url); } @Override protected List getScopes(ResourceMetrics resourceSignal) { - return resourceSignal.getScopeMetricsList(); + return resourceSignal.scope_metrics; } @Override protected Resource getResourceFromProto(ResourceMetrics resourceSignal) { - return protoToResource(resourceSignal.getResource(), resourceSignal.getSchemaUrl()); + return protoToResource(resourceSignal.resource, resourceSignal.schema_url); } @Override diff --git a/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/internal/serialization/mapping/spans/ProtoSpansDataMapper.java b/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/internal/serialization/mapping/spans/ProtoSpansDataMapper.java index cfe5c2d59..18acf3a1f 100644 --- a/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/internal/serialization/mapping/spans/ProtoSpansDataMapper.java +++ b/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/internal/serialization/mapping/spans/ProtoSpansDataMapper.java @@ -33,7 +33,7 @@ protected Span signalItemToProto(SpanData sourceData) { @Override protected List getProtoResources(TracesData protoData) { - return protoData.getResourceSpansList(); + return protoData.resource_spans; } @Override @@ -52,32 +52,32 @@ protected TracesData createProtoData( for (Map.Entry> spansByScope : instrumentationScopeInfoScopedSpansMap.entrySet()) { ScopeSpans.Builder scopeBuilder = createProtoScopeBuilder(spansByScope.getKey()); - scopeBuilder.addAllSpans(spansByScope.getValue()); - resourceSpansBuilder.addScopeSpans(scopeBuilder.build()); + scopeBuilder.spans.addAll(spansByScope.getValue()); + resourceSpansBuilder.scope_spans.add(scopeBuilder.build()); } items.add(resourceSpansBuilder.build()); }); - return TracesData.newBuilder().addAllResourceSpans(items).build(); + return new TracesData.Builder().resource_spans(items).build(); } @Override protected List getSignalsFromProto(ScopeSpans scopeSignals) { - return scopeSignals.getSpansList(); + return scopeSignals.spans; } @Override protected InstrumentationScopeInfo getInstrumentationScopeFromProto(ScopeSpans scopeSignals) { - return protoToInstrumentationScopeInfo(scopeSignals.getScope(), scopeSignals.getSchemaUrl()); + return protoToInstrumentationScopeInfo(scopeSignals.scope, scopeSignals.schema_url); } @Override protected List getScopes(ResourceSpans resourceSignal) { - return resourceSignal.getScopeSpansList(); + return resourceSignal.scope_spans; } @Override protected Resource getResourceFromProto(ResourceSpans resourceSignal) { - return protoToResource(resourceSignal.getResource(), resourceSignal.getSchemaUrl()); + return protoToResource(resourceSignal.resource, resourceSignal.schema_url); } @Override @@ -91,10 +91,9 @@ protected InstrumentationScopeInfo getInstrumentationScopeInfo(SpanData source) } private ResourceSpans.Builder createProtoResourceBuilder(Resource resource) { - ResourceSpans.Builder builder = - ResourceSpans.newBuilder().setResource(resourceToProto(resource)); + ResourceSpans.Builder builder = new ResourceSpans.Builder().resource(resourceToProto(resource)); if (resource.getSchemaUrl() != null) { - builder.setSchemaUrl(resource.getSchemaUrl()); + builder.schema_url(resource.getSchemaUrl()); } return builder; } @@ -102,9 +101,9 @@ private ResourceSpans.Builder createProtoResourceBuilder(Resource resource) { private ScopeSpans.Builder createProtoScopeBuilder( InstrumentationScopeInfo instrumentationScopeInfo) { ScopeSpans.Builder builder = - ScopeSpans.newBuilder().setScope(instrumentationScopeToProto(instrumentationScopeInfo)); + new ScopeSpans.Builder().scope(instrumentationScopeToProto(instrumentationScopeInfo)); if (instrumentationScopeInfo.getSchemaUrl() != null) { - builder.setSchemaUrl(instrumentationScopeInfo.getSchemaUrl()); + builder.schema_url(instrumentationScopeInfo.getSchemaUrl()); } return builder; } From 73bfa0e1096ab3a900425d703fca47647ce52aed Mon Sep 17 00:00:00 2001 From: Cesar Munoz <56847527+LikeTheSalad@users.noreply.github.com> Date: Mon, 21 Aug 2023 16:41:03 +0200 Subject: [PATCH 06/16] Using Wire's generated classes in tests --- .../mapping/logs/ProtoLogsDataMapperTest.java | 34 +++++++++---------- .../metrics/ProtoMetricsDataMapperTest.java | 30 ++++++++-------- .../spans/ProtoSpansDataMapperTest.java | 30 ++++++++-------- 3 files changed, 47 insertions(+), 47 deletions(-) diff --git a/disk-buffering/src/test/java/io/opentelemetry/contrib/disk/buffering/internal/serialization/mapping/logs/ProtoLogsDataMapperTest.java b/disk-buffering/src/test/java/io/opentelemetry/contrib/disk/buffering/internal/serialization/mapping/logs/ProtoLogsDataMapperTest.java index c4c4300e2..48a563300 100644 --- a/disk-buffering/src/test/java/io/opentelemetry/contrib/disk/buffering/internal/serialization/mapping/logs/ProtoLogsDataMapperTest.java +++ b/disk-buffering/src/test/java/io/opentelemetry/contrib/disk/buffering/internal/serialization/mapping/logs/ProtoLogsDataMapperTest.java @@ -87,10 +87,10 @@ void verifyConversionDataStructure() { LogsData result = mapToProto(signals); - List resourceLogsList = result.getResourceLogsList(); + List resourceLogsList = result.resource_logs; assertEquals(1, resourceLogsList.size()); - assertEquals(1, resourceLogsList.get(0).getScopeLogsList().size()); - assertEquals(1, resourceLogsList.get(0).getScopeLogsList().get(0).getLogRecordsList().size()); + assertEquals(1, resourceLogsList.get(0).scope_logs.size()); + assertEquals(1, resourceLogsList.get(0).scope_logs.get(0).log_records.size()); assertThat(mapFromProto(result)).containsExactlyInAnyOrderElementsOf(signals); } @@ -101,14 +101,14 @@ void verifyMultipleLogsWithSameResourceAndScope() { LogsData proto = mapToProto(signals); - List resourceLogsList = proto.getResourceLogsList(); + List resourceLogsList = proto.resource_logs; assertEquals(1, resourceLogsList.size()); - List scopeLogsList = resourceLogsList.get(0).getScopeLogsList(); + List scopeLogsList = resourceLogsList.get(0).scope_logs; assertEquals(1, scopeLogsList.size()); - List logRecords = scopeLogsList.get(0).getLogRecordsList(); + List logRecords = scopeLogsList.get(0).log_records; assertEquals(2, logRecords.size()); - assertEquals("Log body", logRecords.get(0).getBody().getStringValue()); - assertEquals("Other log body", logRecords.get(1).getBody().getStringValue()); + assertEquals("Log body", logRecords.get(0).body.string_value); + assertEquals("Other log body", logRecords.get(1).body.string_value); assertEquals(2, mapFromProto(proto).size()); @@ -122,14 +122,14 @@ void verifyMultipleLogsWithSameResourceDifferentScope() { LogsData proto = mapToProto(signals); - List resourceLogsList = proto.getResourceLogsList(); + List resourceLogsList = proto.resource_logs; assertEquals(1, resourceLogsList.size()); - List scopeLogsList = resourceLogsList.get(0).getScopeLogsList(); + List scopeLogsList = resourceLogsList.get(0).scope_logs; assertEquals(2, scopeLogsList.size()); ScopeLogs firstScope = scopeLogsList.get(0); ScopeLogs secondScope = scopeLogsList.get(1); - List firstScopeLogs = firstScope.getLogRecordsList(); - List secondScopeLogs = secondScope.getLogRecordsList(); + List firstScopeLogs = firstScope.log_records; + List secondScopeLogs = secondScope.log_records; assertEquals(1, firstScopeLogs.size()); assertEquals(1, secondScopeLogs.size()); @@ -142,18 +142,18 @@ void verifyMultipleLogsWithDifferentResource() { LogsData proto = mapToProto(signals); - List resourceLogsList = proto.getResourceLogsList(); + List resourceLogsList = proto.resource_logs; assertEquals(2, resourceLogsList.size()); ResourceLogs firstResourceLogs = resourceLogsList.get(0); ResourceLogs secondResourceLogs = resourceLogsList.get(1); - List firstScopeLogsList = firstResourceLogs.getScopeLogsList(); - List secondScopeLogsList = secondResourceLogs.getScopeLogsList(); + List firstScopeLogsList = firstResourceLogs.scope_logs; + List secondScopeLogsList = secondResourceLogs.scope_logs; assertEquals(1, firstScopeLogsList.size()); assertEquals(1, secondScopeLogsList.size()); ScopeLogs firstScope = firstScopeLogsList.get(0); ScopeLogs secondScope = secondScopeLogsList.get(0); - List firstScopeLogs = firstScope.getLogRecordsList(); - List secondScopeLogs = secondScope.getLogRecordsList(); + List firstScopeLogs = firstScope.log_records; + List secondScopeLogs = secondScope.log_records; assertEquals(1, firstScopeLogs.size()); assertEquals(1, secondScopeLogs.size()); diff --git a/disk-buffering/src/test/java/io/opentelemetry/contrib/disk/buffering/internal/serialization/mapping/metrics/ProtoMetricsDataMapperTest.java b/disk-buffering/src/test/java/io/opentelemetry/contrib/disk/buffering/internal/serialization/mapping/metrics/ProtoMetricsDataMapperTest.java index b2df173ec..16812f3b6 100644 --- a/disk-buffering/src/test/java/io/opentelemetry/contrib/disk/buffering/internal/serialization/mapping/metrics/ProtoMetricsDataMapperTest.java +++ b/disk-buffering/src/test/java/io/opentelemetry/contrib/disk/buffering/internal/serialization/mapping/metrics/ProtoMetricsDataMapperTest.java @@ -80,10 +80,10 @@ void verifyConversionDataStructure() { MetricsData proto = mapToProto(signals); - List resourceMetrics = proto.getResourceMetricsList(); + List resourceMetrics = proto.resource_metrics; assertEquals(1, resourceMetrics.size()); - assertEquals(1, resourceMetrics.get(0).getScopeMetricsList().size()); - assertEquals(1, resourceMetrics.get(0).getScopeMetricsList().get(0).getMetricsList().size()); + assertEquals(1, resourceMetrics.get(0).scope_metrics.size()); + assertEquals(1, resourceMetrics.get(0).scope_metrics.get(0).metrics.size()); assertThat(mapFromProto(proto)).containsExactlyInAnyOrderElementsOf(signals); } @@ -94,11 +94,11 @@ void verifyMultipleMetricsWithSameResourceAndScope() { MetricsData proto = mapToProto(signals); - List resourceMetrics = proto.getResourceMetricsList(); + List resourceMetrics = proto.resource_metrics; assertEquals(1, resourceMetrics.size()); - List scopeMetrics = resourceMetrics.get(0).getScopeMetricsList(); + List scopeMetrics = resourceMetrics.get(0).scope_metrics; assertEquals(1, scopeMetrics.size()); - List metrics = scopeMetrics.get(0).getMetricsList(); + List metrics = scopeMetrics.get(0).metrics; assertEquals(2, metrics.size()); assertThat(mapFromProto(proto)).containsExactlyInAnyOrderElementsOf(signals); @@ -111,14 +111,14 @@ void verifyMultipleMetricsWithSameResourceDifferentScope() { MetricsData proto = mapToProto(signals); - List resourceMetrics = proto.getResourceMetricsList(); + List resourceMetrics = proto.resource_metrics; assertEquals(1, resourceMetrics.size()); - List scopeMetrics = resourceMetrics.get(0).getScopeMetricsList(); + List scopeMetrics = resourceMetrics.get(0).scope_metrics; assertEquals(2, scopeMetrics.size()); ScopeMetrics firstScope = scopeMetrics.get(0); ScopeMetrics secondScope = scopeMetrics.get(1); - List firstScopeMetrics = firstScope.getMetricsList(); - List secondScopeMetrics = secondScope.getMetricsList(); + List firstScopeMetrics = firstScope.metrics; + List secondScopeMetrics = secondScope.metrics; assertEquals(1, firstScopeMetrics.size()); assertEquals(1, secondScopeMetrics.size()); @@ -132,18 +132,18 @@ void verifyMultipleMetricsWithDifferentResource() { MetricsData proto = mapToProto(signals); - List resourceMetrics = proto.getResourceMetricsList(); + List resourceMetrics = proto.resource_metrics; assertEquals(2, resourceMetrics.size()); ResourceMetrics firstResourceMetrics = resourceMetrics.get(0); ResourceMetrics secondResourceMetrics = resourceMetrics.get(1); - List firstScopeMetrics = firstResourceMetrics.getScopeMetricsList(); - List secondScopeMetrics = secondResourceMetrics.getScopeMetricsList(); + List firstScopeMetrics = firstResourceMetrics.scope_metrics; + List secondScopeMetrics = secondResourceMetrics.scope_metrics; assertEquals(1, firstScopeMetrics.size()); assertEquals(1, secondScopeMetrics.size()); ScopeMetrics firstScope = firstScopeMetrics.get(0); ScopeMetrics secondScope = secondScopeMetrics.get(0); - List firstMetrics = firstScope.getMetricsList(); - List secondMetrics = secondScope.getMetricsList(); + List firstMetrics = firstScope.metrics; + List secondMetrics = secondScope.metrics; assertEquals(1, firstMetrics.size()); assertEquals(1, secondMetrics.size()); diff --git a/disk-buffering/src/test/java/io/opentelemetry/contrib/disk/buffering/internal/serialization/mapping/spans/ProtoSpansDataMapperTest.java b/disk-buffering/src/test/java/io/opentelemetry/contrib/disk/buffering/internal/serialization/mapping/spans/ProtoSpansDataMapperTest.java index 4aca59cf1..bdd9c053c 100644 --- a/disk-buffering/src/test/java/io/opentelemetry/contrib/disk/buffering/internal/serialization/mapping/spans/ProtoSpansDataMapperTest.java +++ b/disk-buffering/src/test/java/io/opentelemetry/contrib/disk/buffering/internal/serialization/mapping/spans/ProtoSpansDataMapperTest.java @@ -118,10 +118,10 @@ void verifyConversionDataStructure() { TracesData proto = mapToProto(signals); - List resourceSpans = proto.getResourceSpansList(); + List resourceSpans = proto.resource_spans; assertEquals(1, resourceSpans.size()); - assertEquals(1, resourceSpans.get(0).getScopeSpansList().size()); - assertEquals(1, resourceSpans.get(0).getScopeSpansList().get(0).getSpansList().size()); + assertEquals(1, resourceSpans.get(0).scope_spans.size()); + assertEquals(1, resourceSpans.get(0).scope_spans.get(0).spans.size()); assertThat(mapFromProto(proto)).containsExactlyInAnyOrderElementsOf(signals); } @@ -132,11 +132,11 @@ void verifyMultipleSpansWithSameResourceAndScope() { TracesData proto = mapToProto(signals); - List resourceSpans = proto.getResourceSpansList(); + List resourceSpans = proto.resource_spans; assertEquals(1, resourceSpans.size()); - List scopeSpans = resourceSpans.get(0).getScopeSpansList(); + List scopeSpans = resourceSpans.get(0).scope_spans; assertEquals(1, scopeSpans.size()); - List spans = scopeSpans.get(0).getSpansList(); + List spans = scopeSpans.get(0).spans; assertEquals(2, spans.size()); assertThat(mapFromProto(proto)).containsExactlyInAnyOrderElementsOf(signals); @@ -148,14 +148,14 @@ void verifyMultipleSpansWithSameResourceDifferentScope() { TracesData proto = mapToProto(signals); - List resourceSpans = proto.getResourceSpansList(); + List resourceSpans = proto.resource_spans; assertEquals(1, resourceSpans.size()); - List scopeSpans = resourceSpans.get(0).getScopeSpansList(); + List scopeSpans = resourceSpans.get(0).scope_spans; assertEquals(2, scopeSpans.size()); ScopeSpans firstScope = scopeSpans.get(0); ScopeSpans secondScope = scopeSpans.get(1); - List firstScopeSpans = firstScope.getSpansList(); - List secondScopeSpans = secondScope.getSpansList(); + List firstScopeSpans = firstScope.spans; + List secondScopeSpans = secondScope.spans; assertEquals(1, firstScopeSpans.size()); assertEquals(1, secondScopeSpans.size()); @@ -168,18 +168,18 @@ void verifyMultipleSpansWithDifferentResource() { TracesData proto = mapToProto(signals); - List resourceSpans = proto.getResourceSpansList(); + List resourceSpans = proto.resource_spans; assertEquals(2, resourceSpans.size()); ResourceSpans firstResourceSpans = resourceSpans.get(0); ResourceSpans secondResourceSpans = resourceSpans.get(1); - List firstScopeSpans = firstResourceSpans.getScopeSpansList(); - List secondScopeSpans = secondResourceSpans.getScopeSpansList(); + List firstScopeSpans = firstResourceSpans.scope_spans; + List secondScopeSpans = secondResourceSpans.scope_spans; assertEquals(1, firstScopeSpans.size()); assertEquals(1, secondScopeSpans.size()); ScopeSpans firstScope = firstScopeSpans.get(0); ScopeSpans secondScope = secondScopeSpans.get(0); - List firstSpans = firstScope.getSpansList(); - List secondSpans = secondScope.getSpansList(); + List firstSpans = firstScope.spans; + List secondSpans = secondScope.spans; assertEquals(1, firstSpans.size()); assertEquals(1, secondSpans.size()); From 35d4f56037136ee5604018c87a6522a0f6a2b130 Mon Sep 17 00:00:00 2001 From: Cesar Munoz <56847527+LikeTheSalad@users.noreply.github.com> Date: Mon, 21 Aug 2023 16:41:55 +0200 Subject: [PATCH 07/16] Adjusting based on lint warnings --- .../storage/files/reader/DelimitedProtoStreamReader.java | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/internal/storage/files/reader/DelimitedProtoStreamReader.java b/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/internal/storage/files/reader/DelimitedProtoStreamReader.java index cb9281c66..ff7e861ec 100644 --- a/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/internal/storage/files/reader/DelimitedProtoStreamReader.java +++ b/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/internal/storage/files/reader/DelimitedProtoStreamReader.java @@ -45,7 +45,7 @@ private int getNextItemSize() { } } - private int readRawVarint32(final int firstByte) throws IOException { + private int readRawVarint32(int firstByte) throws IOException { if ((firstByte & 0x80) == 0) { return firstByte; } @@ -53,7 +53,7 @@ private int readRawVarint32(final int firstByte) throws IOException { int result = firstByte & 0x7f; int offset = 7; for (; offset < 32; offset += 7) { - final int b = inputStream.read(); + int b = inputStream.read(); if (b == -1) { throw new IllegalStateException(); } @@ -64,7 +64,7 @@ private int readRawVarint32(final int firstByte) throws IOException { } // Keep reading up to 64 bits. for (; offset < 64; offset += 7) { - final int b = inputStream.read(); + int b = inputStream.read(); if (b == -1) { throw new IllegalStateException(); } From e23043934c2b1b389eadfcb781b22a668c624eae Mon Sep 17 00:00:00 2001 From: Cesar Munoz <56847527+LikeTheSalad@users.noreply.github.com> Date: Mon, 21 Aug 2023 17:08:20 +0200 Subject: [PATCH 08/16] Created ProtobufTools for common proto ops --- .../serializers/LogRecordDataSerializer.java | 4 +- .../serializers/MetricDataSerializer.java | 4 +- .../serializers/SpanDataSerializer.java | 4 +- .../reader/DelimitedProtoStreamReader.java | 33 +------------- .../internal/utils/ProtobufTools.java | 45 +++++++++++++++++++ 5 files changed, 53 insertions(+), 37 deletions(-) create mode 100644 disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/internal/utils/ProtobufTools.java diff --git a/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/internal/serialization/serializers/LogRecordDataSerializer.java b/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/internal/serialization/serializers/LogRecordDataSerializer.java index c6dbcf786..2cd219a05 100644 --- a/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/internal/serialization/serializers/LogRecordDataSerializer.java +++ b/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/internal/serialization/serializers/LogRecordDataSerializer.java @@ -5,8 +5,8 @@ package io.opentelemetry.contrib.disk.buffering.internal.serialization.serializers; -import com.squareup.wire.ProtoAdapter; import io.opentelemetry.contrib.disk.buffering.internal.serialization.mapping.logs.ProtoLogsDataMapper; +import io.opentelemetry.contrib.disk.buffering.internal.utils.ProtobufTools; import io.opentelemetry.proto.logs.v1.LogsData; import io.opentelemetry.sdk.logs.data.LogRecordData; import java.io.ByteArrayOutputStream; @@ -28,7 +28,7 @@ public byte[] serialize(Collection logRecordData) { LogsData proto = ProtoLogsDataMapper.getInstance().toProto(logRecordData); try (ByteArrayOutputStream out = new ByteArrayOutputStream()) { int size = LogsData.ADAPTER.encodedSize(proto); - ProtoAdapter.UINT32.encode(out, size); + ProtobufTools.writeRawVarint32(size, out); proto.encode(out); return out.toByteArray(); } catch (IOException e) { diff --git a/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/internal/serialization/serializers/MetricDataSerializer.java b/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/internal/serialization/serializers/MetricDataSerializer.java index e45c72a0f..5c317980f 100644 --- a/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/internal/serialization/serializers/MetricDataSerializer.java +++ b/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/internal/serialization/serializers/MetricDataSerializer.java @@ -5,8 +5,8 @@ package io.opentelemetry.contrib.disk.buffering.internal.serialization.serializers; -import com.squareup.wire.ProtoAdapter; import io.opentelemetry.contrib.disk.buffering.internal.serialization.mapping.metrics.ProtoMetricsDataMapper; +import io.opentelemetry.contrib.disk.buffering.internal.utils.ProtobufTools; import io.opentelemetry.proto.metrics.v1.MetricsData; import io.opentelemetry.sdk.metrics.data.MetricData; import java.io.ByteArrayOutputStream; @@ -28,7 +28,7 @@ public byte[] serialize(Collection metricData) { MetricsData proto = ProtoMetricsDataMapper.getInstance().toProto(metricData); try (ByteArrayOutputStream out = new ByteArrayOutputStream()) { int size = MetricsData.ADAPTER.encodedSize(proto); - ProtoAdapter.UINT32.encode(out, size); + ProtobufTools.writeRawVarint32(size, out); proto.encode(out); return out.toByteArray(); } catch (IOException e) { diff --git a/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/internal/serialization/serializers/SpanDataSerializer.java b/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/internal/serialization/serializers/SpanDataSerializer.java index 36b293d52..4c1cfdefd 100644 --- a/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/internal/serialization/serializers/SpanDataSerializer.java +++ b/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/internal/serialization/serializers/SpanDataSerializer.java @@ -5,8 +5,8 @@ package io.opentelemetry.contrib.disk.buffering.internal.serialization.serializers; -import com.squareup.wire.ProtoAdapter; import io.opentelemetry.contrib.disk.buffering.internal.serialization.mapping.spans.ProtoSpansDataMapper; +import io.opentelemetry.contrib.disk.buffering.internal.utils.ProtobufTools; import io.opentelemetry.proto.trace.v1.TracesData; import io.opentelemetry.sdk.trace.data.SpanData; import java.io.ByteArrayOutputStream; @@ -28,7 +28,7 @@ public byte[] serialize(Collection spanData) { TracesData proto = ProtoSpansDataMapper.getInstance().toProto(spanData); try (ByteArrayOutputStream out = new ByteArrayOutputStream()) { int size = TracesData.ADAPTER.encodedSize(proto); - ProtoAdapter.UINT32.encode(out, size); + ProtobufTools.writeRawVarint32(size, out); proto.encode(out); return out.toByteArray(); } catch (IOException e) { diff --git a/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/internal/storage/files/reader/DelimitedProtoStreamReader.java b/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/internal/storage/files/reader/DelimitedProtoStreamReader.java index ff7e861ec..0f9723c4c 100644 --- a/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/internal/storage/files/reader/DelimitedProtoStreamReader.java +++ b/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/internal/storage/files/reader/DelimitedProtoStreamReader.java @@ -6,6 +6,7 @@ package io.opentelemetry.contrib.disk.buffering.internal.storage.files.reader; import io.opentelemetry.contrib.disk.buffering.internal.storage.files.utils.CountingInputStream; +import io.opentelemetry.contrib.disk.buffering.internal.utils.ProtobufTools; import java.io.IOException; import java.io.InputStream; import javax.annotation.Nullable; @@ -39,42 +40,12 @@ private int getNextItemSize() { if (firstByte == -1) { return 0; } - return readRawVarint32(firstByte); + return ProtobufTools.readRawVarint32(firstByte, inputStream); } catch (IOException e) { return 0; } } - private int readRawVarint32(int firstByte) throws IOException { - if ((firstByte & 0x80) == 0) { - return firstByte; - } - - int result = firstByte & 0x7f; - int offset = 7; - for (; offset < 32; offset += 7) { - int b = inputStream.read(); - if (b == -1) { - throw new IllegalStateException(); - } - result |= (b & 0x7f) << offset; - if ((b & 0x80) == 0) { - return result; - } - } - // Keep reading up to 64 bits. - for (; offset < 64; offset += 7) { - int b = inputStream.read(); - if (b == -1) { - throw new IllegalStateException(); - } - if ((b & 0x80) == 0) { - return result; - } - } - throw new IllegalStateException(); - } - public static class Factory implements StreamReader.Factory { private static final Factory INSTANCE = new DelimitedProtoStreamReader.Factory(); diff --git a/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/internal/utils/ProtobufTools.java b/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/internal/utils/ProtobufTools.java new file mode 100644 index 000000000..1079b74b9 --- /dev/null +++ b/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/internal/utils/ProtobufTools.java @@ -0,0 +1,45 @@ +package io.opentelemetry.contrib.disk.buffering.internal.utils; + +import com.squareup.wire.ProtoAdapter; +import java.io.IOException; +import java.io.InputStream; +import java.io.OutputStream; + +public final class ProtobufTools { + + private ProtobufTools() {} + + public static void writeRawVarint32(int value, OutputStream out) throws IOException { + ProtoAdapter.INT32.encode(out, value); + } + + public static int readRawVarint32(int firstByte, InputStream input) throws IOException { + if ((firstByte & 0x80) == 0) { + return firstByte; + } + + int result = firstByte & 0x7f; + int offset = 7; + for (; offset < 32; offset += 7) { + int b = input.read(); + if (b == -1) { + throw new IllegalStateException(); + } + result |= (b & 0x7f) << offset; + if ((b & 0x80) == 0) { + return result; + } + } + // Keep reading up to 64 bits. + for (; offset < 64; offset += 7) { + int b = input.read(); + if (b == -1) { + throw new IllegalStateException(); + } + if ((b & 0x80) == 0) { + return result; + } + } + throw new IllegalStateException(); + } +} From f4a52eafdc872c3f29b9bca57c723097036e5242 Mon Sep 17 00:00:00 2001 From: Cesar Munoz <56847527+LikeTheSalad@users.noreply.github.com> Date: Mon, 21 Aug 2023 17:12:56 +0200 Subject: [PATCH 09/16] Adding comment to method gotten from CodedInputStream --- .../contrib/disk/buffering/internal/utils/ProtobufTools.java | 1 + 1 file changed, 1 insertion(+) diff --git a/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/internal/utils/ProtobufTools.java b/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/internal/utils/ProtobufTools.java index 1079b74b9..469adec37 100644 --- a/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/internal/utils/ProtobufTools.java +++ b/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/internal/utils/ProtobufTools.java @@ -13,6 +13,7 @@ public static void writeRawVarint32(int value, OutputStream out) throws IOExcept ProtoAdapter.INT32.encode(out, value); } + /** This code has been taken from Google's protobuf CodedInputStream. */ public static int readRawVarint32(int firstByte, InputStream input) throws IOException { if ((firstByte & 0x80) == 0) { return firstByte; From ef77953ac541b4cbc11bd630b926ccc0355d4e5d Mon Sep 17 00:00:00 2001 From: Cesar Munoz <56847527+LikeTheSalad@users.noreply.github.com> Date: Mon, 21 Aug 2023 17:23:32 +0200 Subject: [PATCH 10/16] Avoiding generating unused classes with wire --- disk-buffering/build.gradle.kts | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/disk-buffering/build.gradle.kts b/disk-buffering/build.gradle.kts index c250be2bc..3256827a3 100644 --- a/disk-buffering/build.gradle.kts +++ b/disk-buffering/build.gradle.kts @@ -54,4 +54,10 @@ wire { sourcePath { srcJar("io.opentelemetry.proto:opentelemetry-proto:0.20.0-alpha") } + + root( + "opentelemetry.proto.trace.v1.TracesData", + "opentelemetry.proto.metrics.v1.MetricsData", + "opentelemetry.proto.logs.v1.LogsData", + ) } From acece8d62a0cf454de21bf6e624e941c7fbe2421 Mon Sep 17 00:00:00 2001 From: Cesar Munoz <56847527+LikeTheSalad@users.noreply.github.com> Date: Mon, 21 Aug 2023 17:25:46 +0200 Subject: [PATCH 11/16] Removing unused dependencies --- disk-buffering/build.gradle.kts | 1 - 1 file changed, 1 deletion(-) diff --git a/disk-buffering/build.gradle.kts b/disk-buffering/build.gradle.kts index 3256827a3..88158e21b 100644 --- a/disk-buffering/build.gradle.kts +++ b/disk-buffering/build.gradle.kts @@ -18,7 +18,6 @@ java { dependencies { api("io.opentelemetry:opentelemetry-sdk") - implementation("io.opentelemetry:opentelemetry-exporter-otlp-common") compileOnly("com.google.auto.value:auto-value-annotations") annotationProcessor("com.google.auto.value:auto-value") signature("com.toasttab.android:gummy-bears-api-24:0.5.1@signature") From ad177ae8396e4b9a5ad7facb2ef30e77ec4445a6 Mon Sep 17 00:00:00 2001 From: Cesar Munoz <56847527+LikeTheSalad@users.noreply.github.com> Date: Mon, 21 Aug 2023 17:58:35 +0200 Subject: [PATCH 12/16] Fixing gradle issues introduced by the wire plugin --- disk-buffering/build.gradle.kts | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/disk-buffering/build.gradle.kts b/disk-buffering/build.gradle.kts index 88158e21b..9e1eaf107 100644 --- a/disk-buffering/build.gradle.kts +++ b/disk-buffering/build.gradle.kts @@ -60,3 +60,16 @@ wire { "opentelemetry.proto.logs.v1.LogsData", ) } + +// The javadoc from wire's generated classes has errors that make the task that generates the "javadoc" artifact to fail. This +// makes the javadoc task to ignore those generated classes. +tasks.withType(Javadoc::class.java) { + exclude("io/opentelemetry/proto/*") +} + +// The task that generates the "sources" artifact fails due to a "duplicated io/opentelemetry/proto/metrics/v1/Exemplar.java" file +// Which is strange since there's only one file like that which is generated by wire and the main "jar" task doesn't raise the same issue. +// This allows to ignore any subsequent files with the same path when creating the "sources" artifact. +tasks.named("sourcesJar", Jar::class.java) { + duplicatesStrategy = DuplicatesStrategy.EXCLUDE +} From 60694fda30b8bc05683a9a12d93112d57c9c324d Mon Sep 17 00:00:00 2001 From: Cesar Munoz <56847527+LikeTheSalad@users.noreply.github.com> Date: Mon, 21 Aug 2023 18:41:33 +0200 Subject: [PATCH 13/16] Updating changelog --- CHANGELOG.md | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 7e5e738b9..fa5fb36fd 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,10 @@ ## Unreleased +### Disk buffering + +- Removing protobuf dependency ([#?]()) + ## Version 1.28.0 (2023-07-14) ### AWS X-Ray SDK support From 4b08de1241a16b9f7812a0fd94dcfae05a78a2dc Mon Sep 17 00:00:00 2001 From: Cesar Munoz <56847527+LikeTheSalad@users.noreply.github.com> Date: Mon, 21 Aug 2023 19:23:05 +0200 Subject: [PATCH 14/16] Adding PR ID to the changelog --- CHANGELOG.md | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index fa5fb36fd..b13fb213b 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,7 +4,8 @@ ### Disk buffering -- Removing protobuf dependency ([#?]()) +- Removing protobuf + dependency ([#1008](https://github.com/open-telemetry/opentelemetry-java-contrib/pull/1008)) ## Version 1.28.0 (2023-07-14) From bb71c76ed6a39b4c4672b7878026e31f5df86bc5 Mon Sep 17 00:00:00 2001 From: Cesar Munoz <56847527+LikeTheSalad@users.noreply.github.com> Date: Mon, 21 Aug 2023 19:27:15 +0200 Subject: [PATCH 15/16] Running spotlessApply --- .../contrib/disk/buffering/internal/utils/ProtobufTools.java | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/internal/utils/ProtobufTools.java b/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/internal/utils/ProtobufTools.java index 469adec37..04816e81e 100644 --- a/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/internal/utils/ProtobufTools.java +++ b/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/internal/utils/ProtobufTools.java @@ -1,3 +1,8 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + package io.opentelemetry.contrib.disk.buffering.internal.utils; import com.squareup.wire.ProtoAdapter; From a51b07891a5a7c600d52f2b3bb604d33a3849455 Mon Sep 17 00:00:00 2001 From: Cesar Munoz <56847527+LikeTheSalad@users.noreply.github.com> Date: Mon, 21 Aug 2023 19:31:48 +0200 Subject: [PATCH 16/16] Running spotlessApply --- disk-buffering/build.gradle.kts | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/disk-buffering/build.gradle.kts b/disk-buffering/build.gradle.kts index 9e1eaf107..be2259dbb 100644 --- a/disk-buffering/build.gradle.kts +++ b/disk-buffering/build.gradle.kts @@ -55,9 +55,9 @@ wire { } root( - "opentelemetry.proto.trace.v1.TracesData", - "opentelemetry.proto.metrics.v1.MetricsData", - "opentelemetry.proto.logs.v1.LogsData", + "opentelemetry.proto.trace.v1.TracesData", + "opentelemetry.proto.metrics.v1.MetricsData", + "opentelemetry.proto.logs.v1.LogsData", ) }