From f07bd121ce9ccacd56e502c81cb4c811f813cf29 Mon Sep 17 00:00:00 2001 From: Alex Boten <223565+codeboten@users.noreply.github.com> Date: Fri, 30 Aug 2024 08:40:35 -0700 Subject: [PATCH] [chore] use generated test harness for groupbyattrs processor Like https://github.com/open-telemetry/opentelemetry-collector-contrib/pull/34940 but for the groupsbyattrs processor. Signed-off-by: Alex Boten <223565+codeboten@users.noreply.github.com> --- .../groupbyattrsprocessor/processor_test.go | 224 +++++++++++++-- .../groupbyattrsprocessor/telemetry_test.go | 258 ------------------ 2 files changed, 208 insertions(+), 274 deletions(-) delete mode 100644 processor/groupbyattrsprocessor/telemetry_test.go diff --git a/processor/groupbyattrsprocessor/processor_test.go b/processor/groupbyattrsprocessor/processor_test.go index 315880f90cee..a6722ddb6f97 100644 --- a/processor/groupbyattrsprocessor/processor_test.go +++ b/processor/groupbyattrsprocessor/processor_test.go @@ -18,6 +18,8 @@ import ( "go.opentelemetry.io/collector/pdata/pmetric" "go.opentelemetry.io/collector/pdata/ptrace" "go.opentelemetry.io/collector/processor/processortest" + "go.opentelemetry.io/otel/attribute" + "go.opentelemetry.io/otel/sdk/metric/metricdata" ) var ( @@ -270,8 +272,8 @@ func TestComplexAttributeGrouping(t *testing.T) { inputMetrics := someComplexMetrics(tt.withResourceAttrIndex, tt.inputResourceCount, tt.inputInstrumentationLibraryCount, 2) inputHistogramMetrics := someComplexHistogramMetrics(tt.withResourceAttrIndex, tt.inputResourceCount, tt.inputInstrumentationLibraryCount, 2, 2) - tel := setupTelemetry() - gap, err := createGroupByAttrsProcessor(tel.NewProcessorCreateSettings(), tt.groupByKeys) + tel := setupTestTelemetry() + gap, err := createGroupByAttrsProcessor(tel.NewSettings(), tt.groupByKeys) require.NoError(t, err) processedLogs, err := gap.processLogs(context.Background(), inputLogs) @@ -371,23 +373,213 @@ func TestComplexAttributeGrouping(t *testing.T) { } } } - - expected := expectedMetrics{} + var want []metricdata.Metrics if tt.shouldMoveCommonGroupedAttr { - expected.mDistLogGroups = int64(tt.outputResourceCount) - expected.mNumGroupedLogs = int64(tt.outputTotalRecordsCount) - - expected.mDistMetricGroups = int64(tt.outputResourceCount) - expected.mNumGroupedMetrics = 4 * int64(tt.outputTotalRecordsCount) - - expected.mDistSpanGroups = int64(tt.outputResourceCount) - expected.mNumGroupedSpans = int64(tt.outputTotalRecordsCount) + want = []metricdata.Metrics{ + { + Name: "otelcol_processor_groupbyattrs_num_grouped_logs", + Description: "Number of logs that had attributes grouped", + Unit: "1", + Data: metricdata.Sum[int64]{ + Temporality: metricdata.CumulativeTemporality, + IsMonotonic: true, + DataPoints: []metricdata.DataPoint[int64]{ + { + Value: int64(tt.outputTotalRecordsCount), + }, + }, + }, + }, + { + Name: "otelcol_processor_groupbyattrs_num_grouped_metrics", + Description: "Number of metrics that had attributes grouped", + Unit: "1", + Data: metricdata.Sum[int64]{ + Temporality: metricdata.CumulativeTemporality, + IsMonotonic: true, + DataPoints: []metricdata.DataPoint[int64]{ + { + Value: 4 * int64(tt.outputTotalRecordsCount), + }, + }, + }, + }, + { + Name: "otelcol_processor_groupbyattrs_num_grouped_spans", + Description: "Number of spans that had attributes grouped", + Unit: "1", + Data: metricdata.Sum[int64]{ + Temporality: metricdata.CumulativeTemporality, + IsMonotonic: true, + DataPoints: []metricdata.DataPoint[int64]{ + { + Value: int64(tt.outputTotalRecordsCount), + }, + }, + }, + }, + { + Name: "otelcol_processor_groupbyattrs_log_groups", + Description: "Distribution of groups extracted for logs", + Unit: "1", + Data: metricdata.Histogram[int64]{ + Temporality: metricdata.CumulativeTemporality, + DataPoints: []metricdata.HistogramDataPoint[int64]{ + { + Attributes: *attribute.EmptySet(), + Bounds: []float64{0, 5, 10, 25, 50, 75, 100, 250, 500, 750, 1000, 2500, 5000, 7500, 10000}, + BucketCounts: []uint64{0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0}, + Count: 1, + Min: metricdata.NewExtrema(int64(tt.outputResourceCount)), + Max: metricdata.NewExtrema(int64(tt.outputResourceCount)), + Sum: int64(tt.outputResourceCount), + }, + }, + }, + }, + { + Name: "otelcol_processor_groupbyattrs_metric_groups", + Description: "Distribution of groups extracted for metrics", + Unit: "1", + Data: metricdata.Histogram[int64]{ + Temporality: metricdata.CumulativeTemporality, + DataPoints: []metricdata.HistogramDataPoint[int64]{ + { + Attributes: *attribute.EmptySet(), + Bounds: []float64{0, 5, 10, 25, 50, 75, 100, 250, 500, 750, 1000, 2500, 5000, 7500, 10000}, + BucketCounts: []uint64{0, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0}, + Count: 2, + Min: metricdata.NewExtrema(int64(tt.outputResourceCount)), + Max: metricdata.NewExtrema(int64(tt.outputResourceCount)), + Sum: 2 * int64(tt.outputResourceCount), + }, + }, + }, + }, + { + Name: "otelcol_processor_groupbyattrs_span_groups", + Description: "Distribution of groups extracted for spans", + Unit: "1", + Data: metricdata.Histogram[int64]{ + Temporality: metricdata.CumulativeTemporality, + DataPoints: []metricdata.HistogramDataPoint[int64]{ + { + Attributes: *attribute.EmptySet(), + Bounds: []float64{0, 5, 10, 25, 50, 75, 100, 250, 500, 750, 1000, 2500, 5000, 7500, 10000}, + BucketCounts: []uint64{0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0}, + Count: 1, + Min: metricdata.NewExtrema(int64(tt.outputResourceCount)), + Max: metricdata.NewExtrema(int64(tt.outputResourceCount)), + Sum: int64(tt.outputResourceCount), + }, + }, + }, + }, + } } else { - expected.mNumNonGroupedLogs = int64(tt.outputTotalRecordsCount) - expected.mNumNonGroupedMetrics = 4 * int64(tt.outputTotalRecordsCount) - expected.mNumNonGroupedSpans = int64(tt.outputTotalRecordsCount) + want = []metricdata.Metrics{ + { + Name: "otelcol_processor_groupbyattrs_num_non_grouped_logs", + Description: "Number of logs that did not have attributes grouped", + Unit: "1", + Data: metricdata.Sum[int64]{ + Temporality: metricdata.CumulativeTemporality, + IsMonotonic: true, + DataPoints: []metricdata.DataPoint[int64]{ + { + Value: int64(tt.outputTotalRecordsCount), + }, + }, + }, + }, + { + Name: "otelcol_processor_groupbyattrs_num_non_grouped_metrics", + Description: "Number of metrics that did not have attributes grouped", + Unit: "1", + Data: metricdata.Sum[int64]{ + Temporality: metricdata.CumulativeTemporality, + IsMonotonic: true, + DataPoints: []metricdata.DataPoint[int64]{ + { + Value: 4 * int64(tt.outputTotalRecordsCount), + }, + }, + }, + }, + { + Name: "otelcol_processor_groupbyattrs_num_non_grouped_spans", + Description: "Number of spans that did not have attributes grouped", + Unit: "1", + Data: metricdata.Sum[int64]{ + Temporality: metricdata.CumulativeTemporality, + IsMonotonic: true, + DataPoints: []metricdata.DataPoint[int64]{ + { + Value: int64(tt.outputTotalRecordsCount), + }, + }, + }, + }, + { + Name: "otelcol_processor_groupbyattrs_log_groups", + Description: "Distribution of groups extracted for logs", + Unit: "1", + Data: metricdata.Histogram[int64]{ + Temporality: metricdata.CumulativeTemporality, + DataPoints: []metricdata.HistogramDataPoint[int64]{ + { + Attributes: *attribute.EmptySet(), + Bounds: []float64{0, 5, 10, 25, 50, 75, 100, 250, 500, 750, 1000, 2500, 5000, 7500, 10000}, + BucketCounts: []uint64{0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0}, + Count: 1, + Min: metricdata.NewExtrema(int64(tt.outputResourceCount)), + Max: metricdata.NewExtrema(int64(tt.outputResourceCount)), + Sum: int64(tt.outputResourceCount), + }, + }, + }, + }, + { + Name: "otelcol_processor_groupbyattrs_metric_groups", + Description: "Distribution of groups extracted for metrics", + Unit: "1", + Data: metricdata.Histogram[int64]{ + Temporality: metricdata.CumulativeTemporality, + DataPoints: []metricdata.HistogramDataPoint[int64]{ + { + Attributes: *attribute.EmptySet(), + Bounds: []float64{0, 5, 10, 25, 50, 75, 100, 250, 500, 750, 1000, 2500, 5000, 7500, 10000}, + BucketCounts: []uint64{0, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0}, + Count: 2, + Min: metricdata.NewExtrema(int64(tt.outputResourceCount)), + Max: metricdata.NewExtrema(int64(tt.outputResourceCount)), + Sum: 2 * int64(tt.outputResourceCount), + }, + }, + }, + }, + { + Name: "otelcol_processor_groupbyattrs_span_groups", + Description: "Distribution of groups extracted for spans", + Unit: "1", + Data: metricdata.Histogram[int64]{ + Temporality: metricdata.CumulativeTemporality, + DataPoints: []metricdata.HistogramDataPoint[int64]{ + { + Attributes: *attribute.EmptySet(), + Bounds: []float64{0, 5, 10, 25, 50, 75, 100, 250, 500, 750, 1000, 2500, 5000, 7500, 10000}, + BucketCounts: []uint64{0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0}, + Count: 1, + Min: metricdata.NewExtrema(int64(tt.outputResourceCount)), + Max: metricdata.NewExtrema(int64(tt.outputResourceCount)), + Sum: int64(tt.outputResourceCount), + }, + }, + }, + }, + } } - tel.assertMetrics(t, expected) + tel.assertMetrics(t, want) }) } } diff --git a/processor/groupbyattrsprocessor/telemetry_test.go b/processor/groupbyattrsprocessor/telemetry_test.go deleted file mode 100644 index e875565f98f6..000000000000 --- a/processor/groupbyattrsprocessor/telemetry_test.go +++ /dev/null @@ -1,258 +0,0 @@ -// Copyright The OpenTelemetry Authors -// SPDX-License-Identifier: Apache-2.0 - -package groupbyattrsprocessor - -import ( - "context" - "testing" - - "github.com/stretchr/testify/require" - "go.opentelemetry.io/collector/component" - "go.opentelemetry.io/collector/processor" - "go.opentelemetry.io/collector/processor/processortest" - "go.opentelemetry.io/otel/attribute" - "go.opentelemetry.io/otel/sdk/metric" - "go.opentelemetry.io/otel/sdk/metric/metricdata" - "go.opentelemetry.io/otel/sdk/metric/metricdata/metricdatatest" - - "github.com/open-telemetry/opentelemetry-collector-contrib/processor/groupbyattrsprocessor/internal/metadata" -) - -type testTelemetry struct { - reader *metric.ManualReader - meterProvider *metric.MeterProvider -} - -type expectedMetrics struct { - mNumGroupedSpans int64 - mNumNonGroupedSpans int64 - mDistSpanGroups int64 - - mNumGroupedLogs int64 - mNumNonGroupedLogs int64 - mDistLogGroups int64 - - mNumGroupedMetrics int64 - mNumNonGroupedMetrics int64 - mDistMetricGroups int64 -} - -func setupTelemetry() testTelemetry { - reader := metric.NewManualReader() - return testTelemetry{ - reader: reader, - meterProvider: metric.NewMeterProvider(metric.WithReader(reader)), - } -} - -func (tt *testTelemetry) assertMetrics(t *testing.T, expected expectedMetrics) { - var md metricdata.ResourceMetrics - require.NoError(t, tt.reader.Collect(context.Background(), &md)) - if expected.mNumGroupedLogs > 0 { - name := "otelcol_processor_groupbyattrs_num_grouped_logs" - got := tt.getMetric(name, md) - want := metricdata.Metrics{ - Name: name, - Description: "Number of logs that had attributes grouped", - Unit: "1", - Data: metricdata.Sum[int64]{ - Temporality: metricdata.CumulativeTemporality, - IsMonotonic: true, - DataPoints: []metricdata.DataPoint[int64]{ - { - Value: expected.mNumGroupedLogs, - }, - }, - }, - } - metricdatatest.AssertEqual(t, want, got, metricdatatest.IgnoreTimestamp()) - } - if expected.mNumGroupedMetrics > 0 { - name := "otelcol_processor_groupbyattrs_num_grouped_metrics" - got := tt.getMetric(name, md) - want := metricdata.Metrics{ - Name: name, - Description: "Number of metrics that had attributes grouped", - Unit: "1", - Data: metricdata.Sum[int64]{ - Temporality: metricdata.CumulativeTemporality, - IsMonotonic: true, - DataPoints: []metricdata.DataPoint[int64]{ - { - Value: expected.mNumGroupedMetrics, - }, - }, - }, - } - metricdatatest.AssertEqual(t, want, got, metricdatatest.IgnoreTimestamp()) - } - if expected.mNumGroupedSpans > 0 { - name := "otelcol_processor_groupbyattrs_num_grouped_spans" - got := tt.getMetric(name, md) - want := metricdata.Metrics{ - Name: name, - Description: "Number of spans that had attributes grouped", - Unit: "1", - Data: metricdata.Sum[int64]{ - Temporality: metricdata.CumulativeTemporality, - IsMonotonic: true, - DataPoints: []metricdata.DataPoint[int64]{ - { - Value: expected.mNumGroupedSpans, - }, - }, - }, - } - metricdatatest.AssertEqual(t, want, got, metricdatatest.IgnoreTimestamp()) - } - if expected.mNumNonGroupedLogs > 0 { - name := "otelcol_processor_groupbyattrs_num_non_grouped_logs" - got := tt.getMetric(name, md) - want := metricdata.Metrics{ - Name: name, - Description: "Number of logs that did not have attributes grouped", - Unit: "1", - Data: metricdata.Sum[int64]{ - Temporality: metricdata.CumulativeTemporality, - IsMonotonic: true, - DataPoints: []metricdata.DataPoint[int64]{ - { - Value: expected.mNumNonGroupedLogs, - }, - }, - }, - } - metricdatatest.AssertEqual(t, want, got, metricdatatest.IgnoreTimestamp()) - } - if expected.mNumNonGroupedMetrics > 0 { - name := "otelcol_processor_groupbyattrs_num_non_grouped_metrics" - got := tt.getMetric(name, md) - want := metricdata.Metrics{ - Name: name, - Description: "Number of metrics that did not have attributes grouped", - Unit: "1", - Data: metricdata.Sum[int64]{ - Temporality: metricdata.CumulativeTemporality, - IsMonotonic: true, - DataPoints: []metricdata.DataPoint[int64]{ - { - Value: expected.mNumNonGroupedMetrics, - }, - }, - }, - } - metricdatatest.AssertEqual(t, want, got, metricdatatest.IgnoreTimestamp()) - } - if expected.mNumNonGroupedSpans > 0 { - name := "otelcol_processor_groupbyattrs_num_non_grouped_spans" - got := tt.getMetric(name, md) - want := metricdata.Metrics{ - Name: name, - Description: "Number of spans that did not have attributes grouped", - Unit: "1", - Data: metricdata.Sum[int64]{ - Temporality: metricdata.CumulativeTemporality, - IsMonotonic: true, - DataPoints: []metricdata.DataPoint[int64]{ - { - Value: expected.mNumNonGroupedSpans, - }, - }, - }, - } - metricdatatest.AssertEqual(t, want, got, metricdatatest.IgnoreTimestamp()) - } - if expected.mDistLogGroups > 0 { - name := "otelcol_processor_groupbyattrs_log_groups" - got := tt.getMetric(name, md) - want := metricdata.Metrics{ - Name: name, - Description: "Distribution of groups extracted for logs", - Unit: "1", - Data: metricdata.Histogram[int64]{ - Temporality: metricdata.CumulativeTemporality, - DataPoints: []metricdata.HistogramDataPoint[int64]{ - { - Attributes: *attribute.EmptySet(), - Bounds: []float64{0, 5, 10, 25, 50, 75, 100, 250, 500, 750, 1000, 2500, 5000, 7500, 10000}, - BucketCounts: []uint64{0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0}, - Count: 1, - Min: metricdata.NewExtrema(expected.mDistLogGroups), - Max: metricdata.NewExtrema(expected.mDistLogGroups), - Sum: expected.mDistLogGroups, - }, - }, - }, - } - metricdatatest.AssertEqual(t, want, got, metricdatatest.IgnoreTimestamp(), metricdatatest.IgnoreExemplars()) - } - if expected.mDistMetricGroups > 0 { - name := "otelcol_processor_groupbyattrs_metric_groups" - got := tt.getMetric(name, md) - want := metricdata.Metrics{ - Name: name, - Description: "Distribution of groups extracted for metrics", - Unit: "1", - Data: metricdata.Histogram[int64]{ - Temporality: metricdata.CumulativeTemporality, - DataPoints: []metricdata.HistogramDataPoint[int64]{ - { - Attributes: *attribute.EmptySet(), - Bounds: []float64{0, 5, 10, 25, 50, 75, 100, 250, 500, 750, 1000, 2500, 5000, 7500, 10000}, - BucketCounts: []uint64{0, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0}, - Count: 2, - Min: metricdata.NewExtrema(expected.mDistMetricGroups), - Max: metricdata.NewExtrema(expected.mDistMetricGroups), - Sum: 2 * expected.mDistMetricGroups, - }, - }, - }, - } - metricdatatest.AssertEqual(t, want, got, metricdatatest.IgnoreTimestamp(), metricdatatest.IgnoreExemplars()) - } - if expected.mDistSpanGroups > 0 { - name := "otelcol_processor_groupbyattrs_span_groups" - got := tt.getMetric(name, md) - want := metricdata.Metrics{ - Name: name, - Description: "Distribution of groups extracted for spans", - Unit: "1", - Data: metricdata.Histogram[int64]{ - Temporality: metricdata.CumulativeTemporality, - DataPoints: []metricdata.HistogramDataPoint[int64]{ - { - Attributes: *attribute.EmptySet(), - Bounds: []float64{0, 5, 10, 25, 50, 75, 100, 250, 500, 750, 1000, 2500, 5000, 7500, 10000}, - BucketCounts: []uint64{0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0}, - Count: 1, - Min: metricdata.NewExtrema(expected.mDistSpanGroups), - Max: metricdata.NewExtrema(expected.mDistSpanGroups), - Sum: expected.mDistSpanGroups, - }, - }, - }, - } - metricdatatest.AssertEqual(t, want, got, metricdatatest.IgnoreTimestamp(), metricdatatest.IgnoreExemplars()) - } -} - -func (tt *testTelemetry) NewProcessorCreateSettings() processor.Settings { - settings := processortest.NewNopSettings() - settings.MeterProvider = tt.meterProvider - settings.ID = component.NewID(metadata.Type) - - return settings -} - -func (tt *testTelemetry) getMetric(name string, got metricdata.ResourceMetrics) metricdata.Metrics { - for _, sm := range got.ScopeMetrics { - for _, m := range sm.Metrics { - if m.Name == name { - return m - } - } - } - - return metricdata.Metrics{} -}