diff --git a/.chloggen/ProtoFromTracesError.yaml b/.chloggen/ProtoFromTracesError.yaml new file mode 100644 index 000000000000..134bd861706d --- /dev/null +++ b/.chloggen/ProtoFromTracesError.yaml @@ -0,0 +1,27 @@ +# Use this changelog template to create an entry for release notes. + +# One of 'breaking', 'deprecation', 'new_component', 'enhancement', 'bug_fix' +change_type: breaking + +# The name of the component, or a single word describing the area of concern, (e.g. filelogreceiver) +component: pkg/translator/jaeger + +# A brief description of the change. Surround your text with quotes ("") if it needs to start with a backtick (`). +note: Remove error from method signature as it always returns nil + +# Mandatory: One or more tracking issues related to the change. You can use the PR number here if no issue exists. +issues: [35560] + +# (Optional) One or more lines of additional information to render under the primary note. +# These lines will be padded with 2 spaces and then inserted directly into the document. +# Use pipe (|) for multiline entries. +subtext: + +# If your change doesn't affect end users or the exported elements of any package, +# you should instead start your pull request title with [chore] or use the "Skip Changelog" label. +# Optional: The change log or logs in which this entry should be included. +# e.g. '[user]' or '[user, api]' +# Include 'user' if the change is relevant to end users. +# Include 'api' if there is a change to a library API. +# Default: '[user]' +change_logs: [api] diff --git a/.chloggen/feat_add-condition-to-logdedup-processor.yaml b/.chloggen/feat_add-condition-to-logdedup-processor.yaml new file mode 100644 index 000000000000..bd7a30d6e5d9 --- /dev/null +++ b/.chloggen/feat_add-condition-to-logdedup-processor.yaml @@ -0,0 +1,27 @@ +# Use this changelog template to create an entry for release notes. + +# One of 'breaking', 'deprecation', 'new_component', 'enhancement', 'bug_fix' +change_type: enhancement + +# The name of the component, or a single word describing the area of concern, (e.g. filelogreceiver) +component: logdedupprocessor + +# A brief description of the change. Surround your text with quotes ("") if it needs to start with a backtick (`). +note: Add a `condition` field to the Log DeDuplication Processor. + +# Mandatory: One or more tracking issues related to the change. You can use the PR number here if no issue exists. +issues: [35440] + +# (Optional) One or more lines of additional information to render under the primary note. +# These lines will be padded with 2 spaces and then inserted directly into the document. +# Use pipe (|) for multiline entries. +subtext: + +# If your change doesn't affect end users or the exported elements of any package, +# you should instead start your pull request title with [chore] or use the "Skip Changelog" label. +# Optional: The change log or logs in which this entry should be included. +# e.g. '[user]' or '[user, api]' +# Include 'user' if the change is relevant to end users. +# Include 'api' if there is a change to a library API. +# Default: '[user]' +change_logs: [] diff --git a/.chloggen/gcp-0482.yaml b/.chloggen/gcp-0482.yaml new file mode 100644 index 000000000000..28293a032679 --- /dev/null +++ b/.chloggen/gcp-0482.yaml @@ -0,0 +1,27 @@ +# Use this changelog template to create an entry for release notes. + +# One of 'breaking', 'deprecation', 'new_component', 'enhancement', 'bug_fix' +change_type: enhancement + +# The name of the component, or a single word describing the area of concern, (e.g. filelogreceiver) +component: googlecloudexporter + +# A brief description of the change. Surround your text with quotes ("") if it needs to start with a backtick (`). +note: Google Cloud exporter is marked as mutating. + +# Mandatory: One or more tracking issues related to the change. You can use the PR number here if no issue exists. +issues: [35366] + +# (Optional) One or more lines of additional information to render under the primary note. +# These lines will be padded with 2 spaces and then inserted directly into the document. +# Use pipe (|) for multiline entries. +subtext: + +# If your change doesn't affect end users or the exported elements of any package, +# you should instead start your pull request title with [chore] or use the "Skip Changelog" label. +# Optional: The change log or logs in which this entry should be included. +# e.g. '[user]' or '[user, api]' +# Include 'user' if the change is relevant to end users. +# Include 'api' if there is a change to a library API. +# Default: '[user]' +change_logs: [] diff --git a/.chloggen/gmp-0482.yaml b/.chloggen/gmp-0482.yaml new file mode 100644 index 000000000000..92e8bf7e5008 --- /dev/null +++ b/.chloggen/gmp-0482.yaml @@ -0,0 +1,27 @@ +# Use this changelog template to create an entry for release notes. + +# One of 'breaking', 'deprecation', 'new_component', 'enhancement', 'bug_fix' +change_type: enhancement + +# The name of the component, or a single word describing the area of concern, (e.g. filelogreceiver) +component: googlemanagedprometheusexporter + +# A brief description of the change. Surround your text with quotes ("") if it needs to start with a backtick (`). +note: GMP exporter is marked as mutating. + +# Mandatory: One or more tracking issues related to the change. You can use the PR number here if no issue exists. +issues: [35366] + +# (Optional) One or more lines of additional information to render under the primary note. +# These lines will be padded with 2 spaces and then inserted directly into the document. +# Use pipe (|) for multiline entries. +subtext: + +# If your change doesn't affect end users or the exported elements of any package, +# you should instead start your pull request title with [chore] or use the "Skip Changelog" label. +# Optional: The change log or logs in which this entry should be included. +# e.g. '[user]' or '[user, api]' +# Include 'user' if the change is relevant to end users. +# Include 'api' if there is a change to a library API. +# Default: '[user]' +change_logs: [] diff --git a/connector/datadogconnector/go.mod b/connector/datadogconnector/go.mod index b24e9f0a2364..4ca0a023d65b 100644 --- a/connector/datadogconnector/go.mod +++ b/connector/datadogconnector/go.mod @@ -105,7 +105,7 @@ require ( github.com/DataDog/sketches-go v1.4.6 // indirect github.com/DataDog/viper v1.13.5 // indirect github.com/DataDog/zstd v1.5.5 // indirect - github.com/GoogleCloudPlatform/opentelemetry-operations-go/detectors/gcp v1.24.1 // indirect + github.com/GoogleCloudPlatform/opentelemetry-operations-go/detectors/gcp v1.24.2 // indirect github.com/Microsoft/go-winio v0.6.2 // indirect github.com/alecthomas/participle/v2 v2.1.1 // indirect github.com/antchfx/xmlquery v1.4.1 // indirect diff --git a/connector/datadogconnector/go.sum b/connector/datadogconnector/go.sum index 458796046cd9..3d02fa336d4a 100644 --- a/connector/datadogconnector/go.sum +++ b/connector/datadogconnector/go.sum @@ -228,8 +228,8 @@ github.com/DataDog/viper v1.13.5 h1:SZMcyMknYQN2jRY/40A16gUXexlNJOI8sDs1cWZnI64= github.com/DataDog/viper v1.13.5/go.mod h1:wDdUVJ2SHaMaPrCZrlRCObwkubsX8j5sme3LaR/SGTc= github.com/DataDog/zstd v1.5.5 h1:oWf5W7GtOLgp6bciQYDmhHHjdhYkALu6S/5Ni9ZgSvQ= github.com/DataDog/zstd v1.5.5/go.mod h1:g4AWEaM3yOg3HYfnJ3YIawPnVdXJh9QME85blwSAmyw= -github.com/GoogleCloudPlatform/opentelemetry-operations-go/detectors/gcp v1.24.1 h1:pB2F2JKCj1Znmp2rwxxt1J0Fg0wezTMgWYk5Mpbi1kg= -github.com/GoogleCloudPlatform/opentelemetry-operations-go/detectors/gcp v1.24.1/go.mod h1:itPGVDKf9cC/ov4MdvJ2QZ0khw4bfoo9jzwTJlaxy2k= +github.com/GoogleCloudPlatform/opentelemetry-operations-go/detectors/gcp v1.24.2 h1:cZpsGsWTIFKymTA0je7IIvi1O7Es7apb9CF3EQlOcfE= +github.com/GoogleCloudPlatform/opentelemetry-operations-go/detectors/gcp v1.24.2/go.mod h1:itPGVDKf9cC/ov4MdvJ2QZ0khw4bfoo9jzwTJlaxy2k= github.com/Microsoft/go-winio v0.5.0/go.mod h1:JPGBdM1cNvN/6ISo+n8V5iA4v8pBzdOpzfwIujj1a84= github.com/Microsoft/go-winio v0.6.2 h1:F2VQgta7ecxGYO8k3ZZz3RS8fVIXVxONVUPlNERoyfY= github.com/Microsoft/go-winio v0.6.2/go.mod h1:yd8OoFMLzJbo9gZq8j5qaps8bJ9aShtEA8Ipt1oGCvU= diff --git a/connector/sumconnector/connector_test.go b/connector/sumconnector/connector_test.go index 4c2d4438c692..d37129de7c02 100644 --- a/connector/sumconnector/connector_test.go +++ b/connector/sumconnector/connector_test.go @@ -269,3 +269,359 @@ func TestTracesToMetrics(t *testing.T) { }) } } + +// The test input file has a repetitive structure: +// - There are four resources, each with six metrics, each with four data points. +// - The four resources have the following sets of attributes: +// - resource.required: foo, resource.optional: bar +// - resource.required: foo, resource.optional: notbar +// - resource.required: notfoo +// - (no attributes) +// +// - The size metrics have the following sets of types: +// - int gauge, double gauge, int sum, double sum, historgram, summary +// +// - The four data points on each metric have the following sets of attributes: +// - datapoint.required: foo, datapoint.optional: bar +// - datapoint.required: foo, datapoint.optional: notbar +// - datapoint.required: notfoo +// - (no attributes) +func TestMetricsToMetrics(t *testing.T) { + testCases := []struct { + name string + cfg *Config + }{ + { + name: "one_attribute", + cfg: &Config{ + DataPoints: map[string]MetricInfo{ + "datapoint.sum.by_attr": { + Description: "Data point sum by attribute", + SourceAttribute: "beep", + Attributes: []AttributeConfig{ + { + Key: "datapoint.required", + }, + }, + }, + }, + }, + }, + { + name: "one_condition", + cfg: &Config{ + DataPoints: map[string]MetricInfo{ + "datapoint.sum.if": { + Description: "Data point sum if ...", + SourceAttribute: "beep", + Conditions: []string{ + `resource.attributes["resource.optional"] != nil`, + }, + }, + }, + }, + }, + { + name: "multiple_conditions", + cfg: &Config{ + DataPoints: map[string]MetricInfo{ + "datapoint.sum.if": { + Description: "Data point sum if ...", + SourceAttribute: "beep", + Conditions: []string{ + `resource.attributes["resource.optional"] != nil`, + `attributes["datapoint.optional"] != nil`, + }, + }, + }, + }, + }, + { + name: "multiple_metrics", + cfg: &Config{ + DataPoints: map[string]MetricInfo{ + "datapoint.sum.all": { + Description: "All data points sum", + SourceAttribute: "beep", + }, + "datapoint.sum.if": { + Description: "Data point sum if ...", + SourceAttribute: "beep", + Conditions: []string{ + `resource.attributes["resource.optional"] != nil`, + `attributes["datapoint.optional"] != nil`, + }, + }, + }, + }, + }, + { + name: "multiple_attributes", + cfg: &Config{ + DataPoints: map[string]MetricInfo{ + "datapoint.sum.by_attr": { + Description: "Data point sum by attributes", + SourceAttribute: "beep", + Attributes: []AttributeConfig{ + { + Key: "datapoint.required", + }, + { + Key: "datapoint.optional", + }, + }, + }, + }, + }, + }, + { + name: "default_attribute_value", + cfg: &Config{ + DataPoints: map[string]MetricInfo{ + "datapoint.sum.by_attr": { + Description: "Data point sum by attribute with default", + SourceAttribute: "beep", + Attributes: []AttributeConfig{ + { + Key: "datapoint.required", + }, + { + Key: "datapoint.optional", + DefaultValue: "other", + }, + }, + }, + }, + }, + }, + { + name: "condition_and_attribute", + cfg: &Config{ + DataPoints: map[string]MetricInfo{ + "datapoint.sum.if.by_attr": { + Description: "Data point sum by attribute if ...", + SourceAttribute: "beep", + Conditions: []string{ + `resource.attributes["resource.optional"] != nil`, + }, + Attributes: []AttributeConfig{ + { + Key: "datapoint.required", + }, + }, + }, + }, + }, + }, + } + + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + require.NoError(t, tc.cfg.Validate()) + factory := NewFactory() + sink := &consumertest.MetricsSink{} + conn, err := factory.CreateMetricsToMetrics(context.Background(), + connectortest.NewNopSettings(), tc.cfg, sink) + require.NoError(t, err) + require.NotNil(t, conn) + assert.False(t, conn.Capabilities().MutatesData) + + require.NoError(t, conn.Start(context.Background(), componenttest.NewNopHost())) + defer func() { + assert.NoError(t, conn.Shutdown(context.Background())) + }() + + testMetrics, err := golden.ReadMetrics(filepath.Join("testdata", "metrics", "input.yaml")) + assert.NoError(t, err) + assert.NoError(t, conn.ConsumeMetrics(context.Background(), testMetrics)) + + allMetrics := sink.AllMetrics() + assert.Len(t, allMetrics, 1) + + expected, err := golden.ReadMetrics(filepath.Join("testdata", "metrics", tc.name+".yaml")) + assert.NoError(t, err) + assert.NoError(t, pmetrictest.CompareMetrics(expected, allMetrics[0], + pmetrictest.IgnoreTimestamp(), + pmetrictest.IgnoreResourceMetricsOrder(), + pmetrictest.IgnoreMetricsOrder(), + pmetrictest.IgnoreMetricFloatPrecision(3), + pmetrictest.IgnoreMetricDataPointsOrder())) + }) + } +} + +// The test input file has a repetitive structure: +// - There are four resources, each with four logs. +// - The four resources have the following sets of attributes: +// - resource.required: foo, resource.optional: bar +// - resource.required: foo, resource.optional: notbar +// - resource.required: notfoo +// - (no attributes) +// +// - The four logs on each resource have the following sets of attributes: +// - log.required: foo, log.optional: bar +// - log.required: foo, log.optional: notbar +// - log.required: notfoo +// - (no attributes) +func TestLogsToMetrics(t *testing.T) { + testCases := []struct { + name string + cfg *Config + }{ + { + name: "one_attribute", + cfg: &Config{ + Logs: map[string]MetricInfo{ + "log.sum.by_attr": { + Description: "Log sum by attribute", + SourceAttribute: "beep", + Attributes: []AttributeConfig{ + { + Key: "log.required", + }, + }, + }, + }, + }, + }, + { + name: "one_condition", + cfg: &Config{ + Logs: map[string]MetricInfo{ + "sum.if": { + Description: "Sum if ...", + SourceAttribute: "beep", + Conditions: []string{ + `resource.attributes["resource.optional"] != nil`, + }, + }, + }, + }, + }, + { + name: "multiple_conditions", + cfg: &Config{ + Logs: map[string]MetricInfo{ + "sum.if": { + Description: "Sum if ...", + SourceAttribute: "beep", + Conditions: []string{ + `resource.attributes["resource.optional"] != nil`, + `attributes["log.optional"] != nil`, + }, + }, + }, + }, + }, + { + name: "multiple_metrics", + cfg: &Config{ + Logs: map[string]MetricInfo{ + "sum.all": { + Description: "All logs Sum", + SourceAttribute: "beep", + }, + "sum.if": { + Description: "Sum if ...", + SourceAttribute: "beep", + Conditions: []string{ + `resource.attributes["resource.optional"] != nil`, + }, + }, + }, + }, + }, + { + name: "multiple_attributes", + cfg: &Config{ + Logs: map[string]MetricInfo{ + "log.sum.by_attr": { + Description: "Log sum by attributes", + SourceAttribute: "beep", + Attributes: []AttributeConfig{ + { + Key: "log.required", + }, + { + Key: "log.optional", + }, + }, + }, + }, + }, + }, + { + name: "default_attribute_value", + cfg: &Config{ + Logs: map[string]MetricInfo{ + "log.sum.by_attr": { + Description: "Log sum by attribute with default", + SourceAttribute: "beep", + Attributes: []AttributeConfig{ + { + Key: "log.required", + }, + { + Key: "log.optional", + DefaultValue: "other", + }, + }, + }, + }, + }, + }, + { + name: "condition_and_attribute", + cfg: &Config{ + Logs: map[string]MetricInfo{ + "log.sum.if.by_attr": { + Description: "Log sum by attribute if ...", + SourceAttribute: "beep", + Conditions: []string{ + `resource.attributes["resource.optional"] != nil`, + }, + Attributes: []AttributeConfig{ + { + Key: "log.required", + }, + }, + }, + }, + }, + }, + } + + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + require.NoError(t, tc.cfg.Validate()) + factory := NewFactory() + sink := &consumertest.MetricsSink{} + conn, err := factory.CreateLogsToMetrics(context.Background(), + connectortest.NewNopSettings(), tc.cfg, sink) + require.NoError(t, err) + require.NotNil(t, conn) + assert.False(t, conn.Capabilities().MutatesData) + + require.NoError(t, conn.Start(context.Background(), componenttest.NewNopHost())) + defer func() { + assert.NoError(t, conn.Shutdown(context.Background())) + }() + + testLogs, err := golden.ReadLogs(filepath.Join("testdata", "logs", "input.yaml")) + assert.NoError(t, err) + assert.NoError(t, conn.ConsumeLogs(context.Background(), testLogs)) + + allMetrics := sink.AllMetrics() + assert.Len(t, allMetrics, 1) + + expected, err := golden.ReadMetrics(filepath.Join("testdata", "logs", tc.name+".yaml")) + assert.NoError(t, err) + assert.NoError(t, pmetrictest.CompareMetrics(expected, allMetrics[0], + pmetrictest.IgnoreTimestamp(), + pmetrictest.IgnoreResourceMetricsOrder(), + pmetrictest.IgnoreMetricsOrder(), + pmetrictest.IgnoreMetricFloatPrecision(3), + pmetrictest.IgnoreMetricDataPointsOrder())) + }) + } +} diff --git a/connector/sumconnector/testdata/logs/condition_and_attribute.yaml b/connector/sumconnector/testdata/logs/condition_and_attribute.yaml new file mode 100644 index 000000000000..ecce8be30eee --- /dev/null +++ b/connector/sumconnector/testdata/logs/condition_and_attribute.yaml @@ -0,0 +1,57 @@ +resourceMetrics: + - resource: + attributes: + - key: resource.required + value: + stringValue: foo + - key: resource.optional + value: + stringValue: bar + scopeMetrics: + - metrics: + - description: Log sum by attribute if ... + name: log.sum.if.by_attr + sum: + aggregationTemporality: 1 + dataPoints: + - asDouble: "2.1" + attributes: + - key: log.required + value: + stringValue: foo + timeUnixNano: "1678390948399018000" + - asDouble: "2" + attributes: + - key: log.required + value: + stringValue: notfoo + timeUnixNano: "1678390948399018000" + isMonotonic: true + - resource: + attributes: + - key: resource.required + value: + stringValue: foo + - key: resource.optional + value: + stringValue: notbar + scopeMetrics: + - metrics: + - description: Log sum by attribute if ... + name: log.sum.if.by_attr + sum: + aggregationTemporality: 1 + dataPoints: + - asDouble: "4.1" + attributes: + - key: log.required + value: + stringValue: foo + timeUnixNano: "1678390948399021000" + - asDouble: "2" + attributes: + - key: log.required + value: + stringValue: notfoo + timeUnixNano: "1678390948399021000" + isMonotonic: true diff --git a/connector/sumconnector/testdata/logs/default_attribute_value.yaml b/connector/sumconnector/testdata/logs/default_attribute_value.yaml new file mode 100644 index 000000000000..e61873e8eed9 --- /dev/null +++ b/connector/sumconnector/testdata/logs/default_attribute_value.yaml @@ -0,0 +1,163 @@ +resourceMetrics: + - resource: + attributes: + - key: resource.required + value: + stringValue: foo + - key: resource.optional + value: + stringValue: bar + scopeMetrics: + - metrics: + - description: Log sum by attribute with default + name: log.sum.by_attr + sum: + aggregationTemporality: 1 + dataPoints: + - asDouble: "4.2" + attributes: + - key: log.required + value: + stringValue: foo + - key: log.optional + value: + stringValue: bar + timeUnixNano: "1678390948398365000" + - asDouble: "0" + attributes: + - key: log.required + value: + stringValue: foo + - key: log.optional + value: + stringValue: notbar + timeUnixNano: "1678390948398365000" + - asDouble: "4" + attributes: + - key: log.required + value: + stringValue: notfoo + - key: log.optional + value: + stringValue: other + timeUnixNano: "1678390948398365000" + isMonotonic: true + - resource: + attributes: + - key: resource.required + value: + stringValue: foo + - key: resource.optional + value: + stringValue: notbar + scopeMetrics: + - metrics: + - description: Log sum by attribute with default + name: log.sum.by_attr + sum: + aggregationTemporality: 1 + dataPoints: + - asDouble: "4.2" + attributes: + - key: log.required + value: + stringValue: foo + - key: log.optional + value: + stringValue: bar + timeUnixNano: "1678390948398368000" + - asDouble: "4" + attributes: + - key: log.required + value: + stringValue: foo + - key: log.optional + value: + stringValue: notbar + timeUnixNano: "1678390948398368000" + - asDouble: "4" + attributes: + - key: log.required + value: + stringValue: notfoo + - key: log.optional + value: + stringValue: other + timeUnixNano: "1678390948398368000" + isMonotonic: true + - resource: + attributes: + - key: resource.required + value: + stringValue: notfoo + scopeMetrics: + - metrics: + - description: Log sum by attribute with default + name: log.sum.by_attr + sum: + aggregationTemporality: 1 + dataPoints: + - asDouble: "4.2" + attributes: + - key: log.required + value: + stringValue: foo + - key: log.optional + value: + stringValue: bar + timeUnixNano: "1678390948398371000" + - asDouble: "4" + attributes: + - key: log.required + value: + stringValue: foo + - key: log.optional + value: + stringValue: notbar + timeUnixNano: "1678390948398371000" + - asDouble: "4" + attributes: + - key: log.required + value: + stringValue: notfoo + - key: log.optional + value: + stringValue: other + timeUnixNano: "1678390948398371000" + isMonotonic: true + - resource: {} + scopeMetrics: + - metrics: + - description: Log sum by attribute with default + name: log.sum.by_attr + sum: + aggregationTemporality: 1 + dataPoints: + - asDouble: "4.2" + attributes: + - key: log.required + value: + stringValue: foo + - key: log.optional + value: + stringValue: bar + timeUnixNano: "1678390948398373000" + - asDouble: "4" + attributes: + - key: log.required + value: + stringValue: foo + - key: log.optional + value: + stringValue: notbar + timeUnixNano: "1678390948398373000" + - asDouble: "4" + attributes: + - key: log.required + value: + stringValue: notfoo + - key: log.optional + value: + stringValue: other + timeUnixNano: "1678390948398373000" + isMonotonic: true diff --git a/connector/sumconnector/testdata/logs/input.yaml b/connector/sumconnector/testdata/logs/input.yaml new file mode 100644 index 000000000000..05044f21650c --- /dev/null +++ b/connector/sumconnector/testdata/logs/input.yaml @@ -0,0 +1,223 @@ +resourceLogs: + - resource: + attributes: + - key: resource.required + value: + stringValue: foo + - key: resource.optional + value: + stringValue: bar + scopeLogs: + - logRecords: + - attributes: + - key: log.required + value: + stringValue: foo + - key: log.optional + value: + stringValue: bar + - key: beep + value: + doubleValue: 2.1 + body: + stringValue: This is a log message + spanId: "" + timeUnixNano: "1581452773000000789" + traceId: "" + - attributes: + - key: log.required + value: + stringValue: foo + - key: log.optional + value: + stringValue: notbar + - key: beep + value: + stringValue: "astring" + body: + stringValue: This is a log message + spanId: "" + timeUnixNano: "1581452773000000789" + traceId: "" + - attributes: + - key: log.required + value: + stringValue: notfoo + - key: beep + value: + doubleValue: 2 + body: + stringValue: This is a log message + spanId: "" + timeUnixNano: "1581452773000000789" + traceId: "" + - body: + stringValue: This is a log message + spanId: "" + timeUnixNano: "1581452773000000789" + traceId: "" + scope: {} + - resource: + attributes: + - key: resource.required + value: + stringValue: foo + - key: resource.optional + value: + stringValue: notbar + scopeLogs: + - logRecords: + - attributes: + - key: log.required + value: + stringValue: foo + - key: log.optional + value: + stringValue: bar + - key: beep + value: + doubleValue: 2.1 + body: + stringValue: This is a log message + spanId: "" + timeUnixNano: "1581452773000000789" + traceId: "" + - attributes: + - key: log.required + value: + stringValue: foo + - key: log.optional + value: + stringValue: notbar + - key: beep + value: + doubleValue: 2 + body: + stringValue: This is a log message + spanId: "" + timeUnixNano: "1581452773000000789" + traceId: "" + - attributes: + - key: log.required + value: + stringValue: notfoo + - key: beep + value: + doubleValue: 2 + body: + stringValue: This is a log message + spanId: "" + timeUnixNano: "1581452773000000789" + traceId: "" + - body: + stringValue: This is a log message + spanId: "" + timeUnixNano: "1581452773000000789" + traceId: "" + scope: {} + - resource: + attributes: + - key: resource.required + value: + stringValue: notfoo + scopeLogs: + - logRecords: + - attributes: + - key: log.required + value: + stringValue: foo + - key: log.optional + value: + stringValue: bar + - key: beep + value: + doubleValue: 2.1 + body: + stringValue: This is a log message + spanId: "" + timeUnixNano: "1581452773000000789" + traceId: "" + - attributes: + - key: log.required + value: + stringValue: foo + - key: log.optional + value: + stringValue: notbar + - key: beep + value: + doubleValue: 2 + body: + stringValue: This is a log message + spanId: "" + timeUnixNano: "1581452773000000789" + traceId: "" + - attributes: + - key: log.required + value: + stringValue: notfoo + - key: beep + value: + doubleValue: 2 + body: + stringValue: This is a log message + spanId: "" + timeUnixNano: "1581452773000000789" + traceId: "" + - body: + stringValue: This is a log message + spanId: "" + timeUnixNano: "1581452773000000789" + traceId: "" + scope: {} + - resource: {} + scopeLogs: + - logRecords: + - attributes: + - key: log.required + value: + stringValue: foo + - key: log.optional + value: + stringValue: bar + - key: beep + value: + doubleValue: 2.1 + body: + stringValue: This is a log message + spanId: "" + timeUnixNano: "1581452773000000789" + traceId: "" + - attributes: + - key: log.required + value: + stringValue: foo + - key: log.optional + value: + stringValue: notbar + - key: beep + value: + doubleValue: 2 + body: + stringValue: This is a log message + spanId: "" + timeUnixNano: "1581452773000000789" + traceId: "" + - attributes: + - key: log.required + value: + stringValue: notfoo + - key: beep + value: + doubleValue: 2 + body: + stringValue: This is a log message + spanId: "" + timeUnixNano: "1581452773000000789" + traceId: "" + - body: + stringValue: This is a log message + spanId: "" + timeUnixNano: "1581452773000000789" + traceId: "" + scope: {} diff --git a/connector/sumconnector/testdata/logs/multiple_attributes.yaml b/connector/sumconnector/testdata/logs/multiple_attributes.yaml new file mode 100644 index 000000000000..175bf2d7f55a --- /dev/null +++ b/connector/sumconnector/testdata/logs/multiple_attributes.yaml @@ -0,0 +1,127 @@ +resourceMetrics: + - resource: + attributes: + - key: resource.required + value: + stringValue: foo + - key: resource.optional + value: + stringValue: bar + scopeMetrics: + - metrics: + - description: Log sum by attributes + name: log.sum.by_attr + sum: + aggregationTemporality: 1 + dataPoints: + - asDouble: "4.2" + attributes: + - key: log.required + value: + stringValue: foo + - key: log.optional + value: + stringValue: bar + timeUnixNano: "1678390948397879000" + - asDouble: "0" + attributes: + - key: log.required + value: + stringValue: foo + - key: log.optional + value: + stringValue: notbar + timeUnixNano: "1678390948397879000" + isMonotonic: true + - resource: + attributes: + - key: resource.required + value: + stringValue: foo + - key: resource.optional + value: + stringValue: notbar + scopeMetrics: + - metrics: + - description: Log sum by attributes + name: log.sum.by_attr + sum: + aggregationTemporality: 1 + dataPoints: + - asDouble: "4.2" + attributes: + - key: log.required + value: + stringValue: foo + - key: log.optional + value: + stringValue: bar + timeUnixNano: "1678390948397882000" + - asDouble: "4" + attributes: + - key: log.required + value: + stringValue: foo + - key: log.optional + value: + stringValue: notbar + timeUnixNano: "1678390948397882000" + isMonotonic: true + - resource: + attributes: + - key: resource.required + value: + stringValue: notfoo + scopeMetrics: + - metrics: + - description: Log sum by attributes + name: log.sum.by_attr + sum: + aggregationTemporality: 1 + dataPoints: + - asDouble: "4.2" + attributes: + - key: log.required + value: + stringValue: foo + - key: log.optional + value: + stringValue: bar + timeUnixNano: "1678390948397884000" + - asDouble: "4" + attributes: + - key: log.required + value: + stringValue: foo + - key: log.optional + value: + stringValue: notbar + timeUnixNano: "1678390948397884000" + isMonotonic: true + - resource: {} + scopeMetrics: + - metrics: + - description: Log sum by attributes + name: log.sum.by_attr + sum: + aggregationTemporality: 1 + dataPoints: + - asDouble: "4.2" + attributes: + - key: log.required + value: + stringValue: foo + - key: log.optional + value: + stringValue: bar + timeUnixNano: "1678390948397886000" + - asDouble: "4" + attributes: + - key: log.required + value: + stringValue: foo + - key: log.optional + value: + stringValue: notbar + timeUnixNano: "1678390948397886000" + isMonotonic: true diff --git a/connector/sumconnector/testdata/logs/multiple_conditions.yaml b/connector/sumconnector/testdata/logs/multiple_conditions.yaml new file mode 100644 index 000000000000..c4725a2bf72b --- /dev/null +++ b/connector/sumconnector/testdata/logs/multiple_conditions.yaml @@ -0,0 +1,63 @@ +resourceMetrics: + - resource: + attributes: + - key: resource.required + value: + stringValue: foo + - key: resource.optional + value: + stringValue: bar + scopeMetrics: + - metrics: + - description: Sum if ... + name: sum.if + sum: + aggregationTemporality: 1 + dataPoints: + - asDouble: "4.1" + timeUnixNano: "1678390948395853000" + isMonotonic: true + - resource: + attributes: + - key: resource.required + value: + stringValue: foo + - key: resource.optional + value: + stringValue: notbar + scopeMetrics: + - metrics: + - description: Sum if ... + name: sum.if + sum: + aggregationTemporality: 1 + dataPoints: + - asDouble: "6.1" + timeUnixNano: "1678390948395856000" + isMonotonic: true + - resource: + attributes: + - key: resource.required + value: + stringValue: notfoo + scopeMetrics: + - metrics: + - description: Sum if ... + name: sum.if + sum: + aggregationTemporality: 1 + dataPoints: + - asDouble: "4.1" + timeUnixNano: "1678390948395858000" + isMonotonic: true + - resource: {} + scopeMetrics: + - metrics: + - description: Sum if ... + name: sum.if + sum: + aggregationTemporality: 1 + dataPoints: + - asDouble: "4.1" + timeUnixNano: "1678390948395859000" + isMonotonic: true diff --git a/connector/sumconnector/testdata/logs/multiple_metrics.yaml b/connector/sumconnector/testdata/logs/multiple_metrics.yaml new file mode 100644 index 000000000000..a4f951a7a452 --- /dev/null +++ b/connector/sumconnector/testdata/logs/multiple_metrics.yaml @@ -0,0 +1,79 @@ +resourceMetrics: + - resource: + attributes: + - key: resource.required + value: + stringValue: foo + - key: resource.optional + value: + stringValue: bar + scopeMetrics: + - metrics: + - description: All logs Sum + name: sum.all + sum: + aggregationTemporality: 1 + dataPoints: + - asDouble: "4.1" + timeUnixNano: "1678390948396984000" + isMonotonic: true + - description: Sum if ... + name: sum.if + sum: + aggregationTemporality: 1 + dataPoints: + - asDouble: "4.1" + timeUnixNano: "1678390948396984000" + isMonotonic: true + - resource: + attributes: + - key: resource.required + value: + stringValue: foo + - key: resource.optional + value: + stringValue: notbar + scopeMetrics: + - metrics: + - description: All logs Sum + name: sum.all + sum: + aggregationTemporality: 1 + dataPoints: + - asDouble: "6.1" + timeUnixNano: "1678390948396988000" + isMonotonic: true + - description: Sum if ... + name: sum.if + sum: + aggregationTemporality: 1 + dataPoints: + - asDouble: "6.1" + timeUnixNano: "1678390948396988000" + isMonotonic: true + - resource: + attributes: + - key: resource.required + value: + stringValue: notfoo + scopeMetrics: + - metrics: + - description: All logs Sum + name: sum.all + sum: + aggregationTemporality: 1 + dataPoints: + - asDouble: "6.1" + timeUnixNano: "1678390948396990000" + isMonotonic: true + - resource: {} + scopeMetrics: + - metrics: + - description: All logs Sum + name: sum.all + sum: + aggregationTemporality: 1 + dataPoints: + - asDouble: "6.1" + timeUnixNano: "1678390948396992000" + isMonotonic: true diff --git a/connector/sumconnector/testdata/logs/one_attribute.yaml b/connector/sumconnector/testdata/logs/one_attribute.yaml new file mode 100644 index 000000000000..40aebf83ebda --- /dev/null +++ b/connector/sumconnector/testdata/logs/one_attribute.yaml @@ -0,0 +1,103 @@ +resourceMetrics: + - resource: + attributes: + - key: resource.required + value: + stringValue: foo + - key: resource.optional + value: + stringValue: bar + scopeMetrics: + - metrics: + - description: Log sum by attribute + name: log.sum.by_attr + sum: + aggregationTemporality: 1 + dataPoints: + - asDouble: "2.1" + attributes: + - key: log.required + value: + stringValue: foo + timeUnixNano: "1678390948397419000" + - asDouble: "2" + attributes: + - key: log.required + value: + stringValue: notfoo + timeUnixNano: "1678390948397419000" + isMonotonic: true + - resource: + attributes: + - key: resource.required + value: + stringValue: foo + - key: resource.optional + value: + stringValue: notbar + scopeMetrics: + - metrics: + - description: Log sum by attribute + name: log.sum.by_attr + sum: + aggregationTemporality: 1 + dataPoints: + - asDouble: "4.1" + attributes: + - key: log.required + value: + stringValue: foo + timeUnixNano: "1678390948397423000" + - asDouble: "2" + attributes: + - key: log.required + value: + stringValue: notfoo + timeUnixNano: "1678390948397423000" + isMonotonic: true + - resource: + attributes: + - key: resource.required + value: + stringValue: notfoo + scopeMetrics: + - metrics: + - description: Log sum by attribute + name: log.sum.by_attr + sum: + aggregationTemporality: 1 + dataPoints: + - asDouble: "4.1" + attributes: + - key: log.required + value: + stringValue: foo + timeUnixNano: "1678390948397425000" + - asDouble: "2" + attributes: + - key: log.required + value: + stringValue: notfoo + timeUnixNano: "1678390948397425000" + isMonotonic: true + - resource: {} + scopeMetrics: + - metrics: + - description: Log sum by attribute + name: log.sum.by_attr + sum: + aggregationTemporality: 1 + dataPoints: + - asDouble: "4.1" + attributes: + - key: log.required + value: + stringValue: foo + timeUnixNano: "1678390948397427000" + - asDouble: "2" + attributes: + - key: log.required + value: + stringValue: notfoo + timeUnixNano: "1678390948397427000" + isMonotonic: true diff --git a/connector/sumconnector/testdata/logs/one_condition.yaml b/connector/sumconnector/testdata/logs/one_condition.yaml new file mode 100644 index 000000000000..dd26fb4d630f --- /dev/null +++ b/connector/sumconnector/testdata/logs/one_condition.yaml @@ -0,0 +1,37 @@ +resourceMetrics: + - resource: + attributes: + - key: resource.required + value: + stringValue: foo + - key: resource.optional + value: + stringValue: bar + scopeMetrics: + - metrics: + - description: Sum if ... + name: sum.if + sum: + aggregationTemporality: 1 + dataPoints: + - asDouble: "4.1" + timeUnixNano: "1678390948395244000" + isMonotonic: true + - resource: + attributes: + - key: resource.required + value: + stringValue: foo + - key: resource.optional + value: + stringValue: notbar + scopeMetrics: + - metrics: + - description: Sum if ... + name: sum.if + sum: + aggregationTemporality: 1 + dataPoints: + - asDouble: "6.1" + timeUnixNano: "1678390948395279000" + isMonotonic: true diff --git a/connector/sumconnector/testdata/metrics/condition_and_attribute.yaml b/connector/sumconnector/testdata/metrics/condition_and_attribute.yaml new file mode 100644 index 000000000000..414827d5557f --- /dev/null +++ b/connector/sumconnector/testdata/metrics/condition_and_attribute.yaml @@ -0,0 +1,57 @@ +resourceMetrics: + - resource: + attributes: + - key: resource.required + value: + stringValue: foo + - key: resource.optional + value: + stringValue: bar + scopeMetrics: + - metrics: + - description: Data point sum by attribute if ... + name: datapoint.sum.if.by_attr + sum: + aggregationTemporality: 1 + dataPoints: + - asDouble: "22.5" + attributes: + - key: datapoint.required + value: + stringValue: foo + timeUnixNano: "1678391923823222000" + - asDouble: "6" + attributes: + - key: datapoint.required + value: + stringValue: notfoo + timeUnixNano: "1678391923823222000" + isMonotonic: true + - resource: + attributes: + - key: resource.required + value: + stringValue: foo + - key: resource.optional + value: + stringValue: notbar + scopeMetrics: + - metrics: + - description: Data point sum by attribute if ... + name: datapoint.sum.if.by_attr + sum: + aggregationTemporality: 1 + dataPoints: + - asDouble: "6" + attributes: + - key: datapoint.required + value: + stringValue: notfoo + timeUnixNano: "1678391923823233000" + - asDouble: "24.6" + attributes: + - key: datapoint.required + value: + stringValue: foo + timeUnixNano: "1678391923823233000" + isMonotonic: true diff --git a/connector/sumconnector/testdata/metrics/default_attribute_value.yaml b/connector/sumconnector/testdata/metrics/default_attribute_value.yaml new file mode 100644 index 000000000000..d0ccb3f7b790 --- /dev/null +++ b/connector/sumconnector/testdata/metrics/default_attribute_value.yaml @@ -0,0 +1,163 @@ +resourceMetrics: + - resource: + attributes: + - key: resource.required + value: + stringValue: foo + - key: resource.optional + value: + stringValue: bar + scopeMetrics: + - metrics: + - description: Data point sum by attribute with default + name: datapoint.sum.by_attr + sum: + aggregationTemporality: 1 + dataPoints: + - asDouble: "24" + attributes: + - key: datapoint.required + value: + stringValue: foo + - key: datapoint.optional + value: + stringValue: notbar + timeUnixNano: "1678391923822404000" + - asDouble: "12" + attributes: + - key: datapoint.required + value: + stringValue: notfoo + - key: datapoint.optional + value: + stringValue: other + timeUnixNano: "1678391923822404000" + - asDouble: "21" + attributes: + - key: datapoint.required + value: + stringValue: foo + - key: datapoint.optional + value: + stringValue: bar + timeUnixNano: "1678391923822404000" + isMonotonic: true + - resource: + attributes: + - key: resource.required + value: + stringValue: foo + - key: resource.optional + value: + stringValue: notbar + scopeMetrics: + - metrics: + - description: Data point sum by attribute with default + name: datapoint.sum.by_attr + sum: + aggregationTemporality: 1 + dataPoints: + - asDouble: "25.2" + attributes: + - key: datapoint.required + value: + stringValue: foo + - key: datapoint.optional + value: + stringValue: bar + timeUnixNano: "1678391923822416000" + - asDouble: "24" + attributes: + - key: datapoint.required + value: + stringValue: foo + - key: datapoint.optional + value: + stringValue: notbar + timeUnixNano: "1678391923822416000" + - asDouble: "12" + attributes: + - key: datapoint.required + value: + stringValue: notfoo + - key: datapoint.optional + value: + stringValue: other + timeUnixNano: "1678391923822416000" + isMonotonic: true + - resource: + attributes: + - key: resource.required + value: + stringValue: notfoo + scopeMetrics: + - metrics: + - description: Data point sum by attribute with default + name: datapoint.sum.by_attr + sum: + aggregationTemporality: 1 + dataPoints: + - asDouble: "25.2" + attributes: + - key: datapoint.required + value: + stringValue: foo + - key: datapoint.optional + value: + stringValue: bar + timeUnixNano: "1678391923822426000" + - asDouble: "24" + attributes: + - key: datapoint.required + value: + stringValue: foo + - key: datapoint.optional + value: + stringValue: notbar + timeUnixNano: "1678391923822426000" + - asDouble: "12" + attributes: + - key: datapoint.required + value: + stringValue: notfoo + - key: datapoint.optional + value: + stringValue: other + timeUnixNano: "1678391923822426000" + isMonotonic: true + - resource: {} + scopeMetrics: + - metrics: + - description: Data point sum by attribute with default + name: datapoint.sum.by_attr + sum: + aggregationTemporality: 1 + dataPoints: + - asDouble: "24" + attributes: + - key: datapoint.required + value: + stringValue: foo + - key: datapoint.optional + value: + stringValue: notbar + timeUnixNano: "1678391923822435000" + - asDouble: "12" + attributes: + - key: datapoint.required + value: + stringValue: notfoo + - key: datapoint.optional + value: + stringValue: other + timeUnixNano: "1678391923822435000" + - asDouble: "25.2" + attributes: + - key: datapoint.required + value: + stringValue: foo + - key: datapoint.optional + value: + stringValue: bar + timeUnixNano: "1678391923822435000" + isMonotonic: true diff --git a/connector/sumconnector/testdata/metrics/input.yaml b/connector/sumconnector/testdata/metrics/input.yaml new file mode 100644 index 000000000000..8a04ab206c3f --- /dev/null +++ b/connector/sumconnector/testdata/metrics/input.yaml @@ -0,0 +1,1390 @@ +resourceMetrics: + - resource: + attributes: + - key: resource.required + value: + stringValue: foo + - key: resource.optional + value: + stringValue: bar + scopeMetrics: + - metrics: + - gauge: + dataPoints: + - asInt: "123" + attributes: + - key: datapoint.required + value: + stringValue: foo + - key: datapoint.optional + value: + stringValue: bar + - key: beep + value: + stringValue: "astring" + startTimeUnixNano: "1581452772000000321" + timeUnixNano: "1581452773000000789" + - asInt: "456" + attributes: + - key: datapoint.required + value: + stringValue: foo + - key: datapoint.optional + value: + stringValue: notbar + - key: beep + value: + doubleValue: 2 + startTimeUnixNano: "1581452772000000321" + timeUnixNano: "1581452773000000789" + - asInt: "789" + attributes: + - key: datapoint.required + value: + stringValue: notfoo + - key: beep + value: + intValue: 1 + startTimeUnixNano: "1581452772000000321" + timeUnixNano: "1581452773000000789" + - asInt: "0" + startTimeUnixNano: "1581452772000000321" + timeUnixNano: "1581452773000000789" + name: gauge-int + unit: "1" + - gauge: + dataPoints: + - asDouble: 1.23 + attributes: + - key: datapoint.required + value: + stringValue: foo + - key: datapoint.optional + value: + stringValue: bar + - key: beep + value: + doubleValue: 2.1 + startTimeUnixNano: "1581452772000000321" + timeUnixNano: "1581452773000000789" + - asDouble: 4.56 + attributes: + - key: datapoint.required + value: + stringValue: foo + - key: datapoint.optional + value: + stringValue: notbar + - key: beep + value: + doubleValue: 2 + startTimeUnixNano: "1581452772000000321" + timeUnixNano: "1581452773000000789" + - asDouble: 7.89 + attributes: + - key: datapoint.required + value: + stringValue: notfoo + - key: beep + value: + intValue: 1 + startTimeUnixNano: "1581452772000000321" + timeUnixNano: "1581452773000000789" + - asDouble: 0 + startTimeUnixNano: "1581452772000000321" + timeUnixNano: "1581452773000000789" + name: gauge-double + unit: "1" + - name: counter-int + sum: + aggregationTemporality: 2 + dataPoints: + - asInt: "123" + attributes: + - key: datapoint.required + value: + stringValue: foo + - key: datapoint.optional + value: + stringValue: bar + - key: beep + value: + doubleValue: 2.1 + startTimeUnixNano: "1581452772000000321" + timeUnixNano: "1581452773000000789" + - asInt: "456" + attributes: + - key: datapoint.required + value: + stringValue: foo + - key: datapoint.optional + value: + stringValue: notbar + - key: beep + value: + doubleValue: 2 + startTimeUnixNano: "1581452772000000321" + timeUnixNano: "1581452773000000789" + - asInt: "789" + attributes: + - key: datapoint.required + value: + stringValue: notfoo + - key: beep + value: + intValue: 1 + startTimeUnixNano: "1581452772000000321" + timeUnixNano: "1581452773000000789" + - asInt: "0" + startTimeUnixNano: "1581452772000000321" + timeUnixNano: "1581452773000000789" + isMonotonic: true + unit: "1" + - name: counter-double + sum: + aggregationTemporality: 2 + dataPoints: + - asDouble: 1.23 + attributes: + - key: datapoint.required + value: + stringValue: foo + - key: datapoint.optional + value: + stringValue: bar + - key: beep + value: + doubleValue: 2.1 + startTimeUnixNano: "1581452772000000321" + timeUnixNano: "1581452773000000789" + - asDouble: 4.56 + attributes: + - key: datapoint.required + value: + stringValue: foo + - key: datapoint.optional + value: + stringValue: notbar + - key: beep + value: + doubleValue: 2 + startTimeUnixNano: "1581452772000000321" + timeUnixNano: "1581452773000000789" + - asDouble: 4.56 + attributes: + - key: datapoint.required + value: + stringValue: notfoo + - key: beep + value: + intValue: 1 + startTimeUnixNano: "1581452772000000321" + timeUnixNano: "1581452773000000789" + - asDouble: 0 + startTimeUnixNano: "1581452772000000321" + timeUnixNano: "1581452773000000789" + isMonotonic: true + unit: "1" + - histogram: + aggregationTemporality: 2 + dataPoints: + - attributes: + - key: datapoint.required + value: + stringValue: foo + - key: datapoint.optional + value: + stringValue: bar + - key: beep + value: + doubleValue: 2.1 + count: "1" + startTimeUnixNano: "1581452772000000321" + sum: 15 + timeUnixNano: "1581452773000000789" + - attributes: + - key: datapoint.required + value: + stringValue: foo + - key: datapoint.optional + value: + stringValue: notbar + - key: beep + value: + doubleValue: 2 + count: "2" + startTimeUnixNano: "1581452772000000321" + sum: 30 + timeUnixNano: "1581452773000000789" + - attributes: + - key: datapoint.required + value: + stringValue: notfoo + - key: beep + value: + intValue: 1 + count: "3" + startTimeUnixNano: "1581452772000000321" + sum: 45 + timeUnixNano: "1581452773000000789" + - startTimeUnixNano: "1581452772000000321" + sum: 0 + timeUnixNano: "1581452773000000789" + name: double-histogram + unit: "1" + - name: double-summary + summary: + dataPoints: + - attributes: + - key: datapoint.required + value: + stringValue: foo + - key: datapoint.optional + value: + stringValue: bar + - key: beep + value: + doubleValue: 2.1 + count: "1" + startTimeUnixNano: "1581452772000000321" + sum: 15 + timeUnixNano: "1581452773000000789" + - attributes: + - key: datapoint.required + value: + stringValue: foo + - key: datapoint.optional + value: + stringValue: notbar + - key: beep + value: + doubleValue: 2 + count: "2" + startTimeUnixNano: "1581452772000000321" + sum: 30 + timeUnixNano: "1581452773000000789" + - attributes: + - key: datapoint.required + value: + stringValue: notfoo + - key: beep + value: + intValue: 1 + count: "3" + startTimeUnixNano: "1581452772000000321" + sum: 45 + timeUnixNano: "1581452773000000789" + - startTimeUnixNano: "1581452772000000321" + timeUnixNano: "1581452773000000789" + unit: "1" + scope: {} + + - resource: + attributes: + - key: resource.required + value: + stringValue: foo + - key: resource.optional + value: + stringValue: notbar + scopeMetrics: + - metrics: + - gauge: + dataPoints: + - asInt: "123" + attributes: + - key: datapoint.required + value: + stringValue: foo + - key: datapoint.optional + value: + stringValue: bar + - key: beep + value: + doubleValue: 2.1 + startTimeUnixNano: "1581452772000000321" + timeUnixNano: "1581452773000000789" + - asInt: "456" + attributes: + - key: datapoint.required + value: + stringValue: foo + - key: datapoint.optional + value: + stringValue: notbar + - key: beep + value: + doubleValue: 2 + startTimeUnixNano: "1581452772000000321" + timeUnixNano: "1581452773000000789" + - asInt: "789" + attributes: + - key: datapoint.required + value: + stringValue: notfoo + - key: beep + value: + intValue: 1 + startTimeUnixNano: "1581452772000000321" + timeUnixNano: "1581452773000000789" + - asInt: "0" + startTimeUnixNano: "1581452772000000321" + timeUnixNano: "1581452773000000789" + name: gauge-int + unit: "1" + - gauge: + dataPoints: + - asDouble: 1.23 + attributes: + - key: datapoint.required + value: + stringValue: foo + - key: datapoint.optional + value: + stringValue: bar + - key: beep + value: + doubleValue: 2.1 + startTimeUnixNano: "1581452772000000321" + timeUnixNano: "1581452773000000789" + - asDouble: 4.56 + attributes: + - key: datapoint.required + value: + stringValue: foo + - key: datapoint.optional + value: + stringValue: notbar + - key: beep + value: + doubleValue: 2 + startTimeUnixNano: "1581452772000000321" + timeUnixNano: "1581452773000000789" + - asDouble: 7.89 + attributes: + - key: datapoint.required + value: + stringValue: notfoo + - key: beep + value: + intValue: 1 + startTimeUnixNano: "1581452772000000321" + timeUnixNano: "1581452773000000789" + - asDouble: 0 + startTimeUnixNano: "1581452772000000321" + timeUnixNano: "1581452773000000789" + name: gauge-double + unit: "1" + - name: counter-int + sum: + aggregationTemporality: 2 + dataPoints: + - asInt: "123" + attributes: + - key: datapoint.required + value: + stringValue: foo + - key: datapoint.optional + value: + stringValue: bar + - key: beep + value: + doubleValue: 2.1 + startTimeUnixNano: "1581452772000000321" + timeUnixNano: "1581452773000000789" + - asInt: "456" + attributes: + - key: datapoint.required + value: + stringValue: foo + - key: datapoint.optional + value: + stringValue: notbar + - key: beep + value: + doubleValue: 2 + startTimeUnixNano: "1581452772000000321" + timeUnixNano: "1581452773000000789" + - asInt: "789" + attributes: + - key: datapoint.required + value: + stringValue: notfoo + - key: beep + value: + intValue: 1 + startTimeUnixNano: "1581452772000000321" + timeUnixNano: "1581452773000000789" + - asInt: "0" + startTimeUnixNano: "1581452772000000321" + timeUnixNano: "1581452773000000789" + isMonotonic: true + unit: "1" + - name: counter-double + sum: + aggregationTemporality: 2 + dataPoints: + - asDouble: 1.23 + attributes: + - key: datapoint.required + value: + stringValue: foo + - key: datapoint.optional + value: + stringValue: bar + - key: beep + value: + doubleValue: 2.1 + startTimeUnixNano: "1581452772000000321" + timeUnixNano: "1581452773000000789" + - asDouble: 4.56 + attributes: + - key: datapoint.required + value: + stringValue: foo + - key: datapoint.optional + value: + stringValue: notbar + - key: beep + value: + doubleValue: 2 + startTimeUnixNano: "1581452772000000321" + timeUnixNano: "1581452773000000789" + - asDouble: 4.56 + attributes: + - key: datapoint.required + value: + stringValue: notfoo + - key: beep + value: + intValue: 1 + startTimeUnixNano: "1581452772000000321" + timeUnixNano: "1581452773000000789" + - asDouble: 0 + startTimeUnixNano: "1581452772000000321" + timeUnixNano: "1581452773000000789" + isMonotonic: true + unit: "1" + - histogram: + aggregationTemporality: 2 + dataPoints: + - attributes: + - key: datapoint.required + value: + stringValue: foo + - key: datapoint.optional + value: + stringValue: bar + - key: beep + value: + doubleValue: 2.1 + count: "1" + startTimeUnixNano: "1581452772000000321" + sum: 15 + timeUnixNano: "1581452773000000789" + - attributes: + - key: datapoint.required + value: + stringValue: foo + - key: datapoint.optional + value: + stringValue: notbar + - key: beep + value: + doubleValue: 2 + count: "2" + startTimeUnixNano: "1581452772000000321" + sum: 30 + timeUnixNano: "1581452773000000789" + - attributes: + - key: datapoint.required + value: + stringValue: notfoo + - key: beep + value: + intValue: 1 + count: "3" + startTimeUnixNano: "1581452772000000321" + sum: 45 + timeUnixNano: "1581452773000000789" + - startTimeUnixNano: "1581452772000000321" + sum: 0 + timeUnixNano: "1581452773000000789" + name: double-histogram + unit: "1" + - name: double-summary + summary: + dataPoints: + - attributes: + - key: datapoint.required + value: + stringValue: foo + - key: datapoint.optional + value: + stringValue: bar + - key: beep + value: + doubleValue: 2.1 + count: "1" + startTimeUnixNano: "1581452772000000321" + sum: 15 + timeUnixNano: "1581452773000000789" + - attributes: + - key: datapoint.required + value: + stringValue: foo + - key: datapoint.optional + value: + stringValue: notbar + - key: beep + value: + doubleValue: 2 + count: "2" + startTimeUnixNano: "1581452772000000321" + sum: 30 + timeUnixNano: "1581452773000000789" + - attributes: + - key: datapoint.required + value: + stringValue: notfoo + - key: beep + value: + intValue: 1 + count: "3" + startTimeUnixNano: "1581452772000000321" + sum: 45 + timeUnixNano: "1581452773000000789" + - startTimeUnixNano: "1581452772000000321" + timeUnixNano: "1581452773000000789" + unit: "1" + scope: {} + + - resource: + attributes: + - key: resource.int + value: + intValue: 1 + - key: resource.optional_int + value: + intValue: 2 + scopeMetrics: + - metrics: + - gauge: + dataPoints: + - asInt: "123" + attributes: + - key: datapoint.int + value: + intValue: 1 + - key: datapoint.optional_int + value: + intValue: 2 + - key: beep + value: + doubleValue: 2.1 + startTimeUnixNano: "1581452772000000321" + timeUnixNano: "1581452773000000789" + - asInt: "456" + attributes: + - key: datapoint.int + value: + intValue: 1 + - key: datapoint.optional_int + value: + intValue: 4 + - key: beep + value: + doubleValue: 2 + startTimeUnixNano: "1581452772000000321" + timeUnixNano: "1581452773000000789" + - asInt: "789" + attributes: + - key: datapoint.int + value: + intValue: 10 + - key: beep + value: + intValue: 1 + startTimeUnixNano: "1581452772000000321" + timeUnixNano: "1581452773000000789" + - asInt: "0" + startTimeUnixNano: "1581452772000000321" + timeUnixNano: "1581452773000000789" + name: gauge-int + unit: "1" + - gauge: + dataPoints: + - asDouble: 1.23 + attributes: + - key: datapoint.int + value: + intValue: 1 + - key: datapoint.optional_int + value: + intValue: 2 + - key: beep + value: + doubleValue: 2.1 + startTimeUnixNano: "1581452772000000321" + timeUnixNano: "1581452773000000789" + - asDouble: 4.56 + attributes: + - key: datapoint.int + value: + intValue: 1 + - key: datapoint.optional_int + value: + intValue: 4 + - key: beep + value: + doubleValue: 2 + startTimeUnixNano: "1581452772000000321" + timeUnixNano: "1581452773000000789" + - asDouble: 7.89 + attributes: + - key: datapoint.int + value: + intValue: 10 + - key: beep + value: + intValue: 1 + startTimeUnixNano: "1581452772000000321" + timeUnixNano: "1581452773000000789" + - asDouble: 0 + startTimeUnixNano: "1581452772000000321" + timeUnixNano: "1581452773000000789" + name: gauge-double + unit: "1" + - name: counter-int + sum: + aggregationTemporality: 2 + dataPoints: + - asInt: "123" + attributes: + - key: datapoint.int + value: + intValue: 1 + - key: datapoint.optional_int + value: + intValue: 2 + - key: beep + value: + doubleValue: 2.1 + startTimeUnixNano: "1581452772000000321" + timeUnixNano: "1581452773000000789" + - asInt: "456" + attributes: + - key: datapoint.int + value: + intValue: 1 + - key: datapoint.optional_int + value: + intValue: 4 + - key: beep + value: + doubleValue: 2 + startTimeUnixNano: "1581452772000000321" + timeUnixNano: "1581452773000000789" + - asInt: "789" + attributes: + - key: datapoint.int + value: + intValue: 10 + - key: beep + value: + intValue: 1 + startTimeUnixNano: "1581452772000000321" + timeUnixNano: "1581452773000000789" + - asInt: "0" + startTimeUnixNano: "1581452772000000321" + timeUnixNano: "1581452773000000789" + isMonotonic: true + unit: "1" + - name: counter-double + sum: + aggregationTemporality: 2 + dataPoints: + - asDouble: 1.23 + attributes: + - key: datapoint.int + value: + intValue: 1 + - key: datapoint.optional_int + value: + intValue: 2 + - key: beep + value: + doubleValue: 2.1 + startTimeUnixNano: "1581452772000000321" + timeUnixNano: "1581452773000000789" + - asDouble: 4.56 + attributes: + - key: datapoint.int + value: + intValue: 1 + - key: datapoint.optional_int + value: + intValue: 4 + - key: beep + value: + doubleValue: 2 + startTimeUnixNano: "1581452772000000321" + timeUnixNano: "1581452773000000789" + - asDouble: 4.56 + attributes: + - key: datapoint.int + value: + intValue: 10 + - key: beep + value: + intValue: 1 + startTimeUnixNano: "1581452772000000321" + timeUnixNano: "1581452773000000789" + - asDouble: 0 + startTimeUnixNano: "1581452772000000321" + timeUnixNano: "1581452773000000789" + isMonotonic: true + unit: "1" + - histogram: + aggregationTemporality: 2 + dataPoints: + - attributes: + - key: datapoint.int + value: + intValue: 1 + - key: datapoint.optional_int + value: + intValue: 2 + - key: beep + value: + doubleValue: 2.1 + count: "1" + startTimeUnixNano: "1581452772000000321" + sum: 15 + timeUnixNano: "1581452773000000789" + - attributes: + - key: datapoint.int + value: + intValue: 1 + - key: datapoint.optional_int + value: + intValue: 4 + - key: beep + value: + doubleValue: 2 + count: "2" + startTimeUnixNano: "1581452772000000321" + sum: 30 + timeUnixNano: "1581452773000000789" + - attributes: + - key: datapoint.int + value: + intValue: 10 + - key: beep + value: + intValue: 1 + count: "3" + startTimeUnixNano: "1581452772000000321" + sum: 45 + timeUnixNano: "1581452773000000789" + - startTimeUnixNano: "1581452772000000321" + sum: 0 + timeUnixNano: "1581452773000000789" + name: double-histogram + unit: "1" + - name: double-summary + summary: + dataPoints: + - attributes: + - key: datapoint.int + value: + intValue: 1 + - key: datapoint.optional_int + value: + intValue: 2 + - key: beep + value: + doubleValue: 2.1 + count: "1" + startTimeUnixNano: "1581452772000000321" + sum: 15 + timeUnixNano: "1581452773000000789" + - attributes: + - key: datapoint.int + value: + intValue: 10 + - key: datapoint.optional_int + value: + intValue: 4 + - key: beep + value: + doubleValue: 2 + count: "2" + startTimeUnixNano: "1581452772000000321" + sum: 30 + timeUnixNano: "1581452773000000789" + - attributes: + - key: datapoint.int + value: + intValue: 10 + - key: beep + value: + intValue: 1 + count: "3" + startTimeUnixNano: "1581452772000000321" + sum: 45 + timeUnixNano: "1581452773000000789" + - startTimeUnixNano: "1581452772000000321" + timeUnixNano: "1581452773000000789" + unit: "1" + scope: {} + + - resource: + attributes: + - key: resource.required + value: + stringValue: notfoo + scopeMetrics: + - metrics: + - gauge: + dataPoints: + - asInt: "123" + attributes: + - key: datapoint.required + value: + stringValue: foo + - key: datapoint.optional + value: + stringValue: bar + - key: beep + value: + doubleValue: 2.1 + startTimeUnixNano: "1581452772000000321" + timeUnixNano: "1581452773000000789" + - asInt: "456" + attributes: + - key: datapoint.required + value: + stringValue: foo + - key: datapoint.optional + value: + stringValue: notbar + - key: beep + value: + doubleValue: 2 + startTimeUnixNano: "1581452772000000321" + timeUnixNano: "1581452773000000789" + - asInt: "789" + attributes: + - key: datapoint.required + value: + stringValue: notfoo + - key: beep + value: + intValue: 1 + startTimeUnixNano: "1581452772000000321" + timeUnixNano: "1581452773000000789" + - asInt: "0" + startTimeUnixNano: "1581452772000000321" + timeUnixNano: "1581452773000000789" + name: gauge-int + unit: "1" + - gauge: + dataPoints: + - asDouble: 1.23 + attributes: + - key: datapoint.required + value: + stringValue: foo + - key: datapoint.optional + value: + stringValue: bar + - key: beep + value: + doubleValue: 2.1 + startTimeUnixNano: "1581452772000000321" + timeUnixNano: "1581452773000000789" + - asDouble: 4.56 + attributes: + - key: datapoint.required + value: + stringValue: foo + - key: datapoint.optional + value: + stringValue: notbar + - key: beep + value: + doubleValue: 2 + startTimeUnixNano: "1581452772000000321" + timeUnixNano: "1581452773000000789" + - asDouble: 7.89 + attributes: + - key: datapoint.required + value: + stringValue: notfoo + - key: beep + value: + intValue: 1 + startTimeUnixNano: "1581452772000000321" + timeUnixNano: "1581452773000000789" + - asDouble: 0 + startTimeUnixNano: "1581452772000000321" + timeUnixNano: "1581452773000000789" + name: gauge-double + unit: "1" + - name: counter-int + sum: + aggregationTemporality: 2 + dataPoints: + - asInt: "123" + attributes: + - key: datapoint.required + value: + stringValue: foo + - key: datapoint.optional + value: + stringValue: bar + - key: beep + value: + doubleValue: 2.1 + startTimeUnixNano: "1581452772000000321" + timeUnixNano: "1581452773000000789" + - asInt: "456" + attributes: + - key: datapoint.required + value: + stringValue: foo + - key: datapoint.optional + value: + stringValue: notbar + - key: beep + value: + doubleValue: 2 + startTimeUnixNano: "1581452772000000321" + timeUnixNano: "1581452773000000789" + - asInt: "789" + attributes: + - key: datapoint.required + value: + stringValue: notfoo + - key: beep + value: + intValue: 1 + startTimeUnixNano: "1581452772000000321" + timeUnixNano: "1581452773000000789" + - asInt: "0" + startTimeUnixNano: "1581452772000000321" + timeUnixNano: "1581452773000000789" + isMonotonic: true + unit: "1" + - name: counter-double + sum: + aggregationTemporality: 2 + dataPoints: + - asDouble: 1.23 + attributes: + - key: datapoint.required + value: + stringValue: foo + - key: datapoint.optional + value: + stringValue: bar + - key: beep + value: + doubleValue: 2.1 + startTimeUnixNano: "1581452772000000321" + timeUnixNano: "1581452773000000789" + - asDouble: 4.56 + attributes: + - key: datapoint.required + value: + stringValue: foo + - key: datapoint.optional + value: + stringValue: notbar + - key: beep + value: + doubleValue: 2 + startTimeUnixNano: "1581452772000000321" + timeUnixNano: "1581452773000000789" + - asDouble: 4.56 + attributes: + - key: datapoint.required + value: + stringValue: notfoo + - key: beep + value: + intValue: 1 + startTimeUnixNano: "1581452772000000321" + timeUnixNano: "1581452773000000789" + - asDouble: 0 + startTimeUnixNano: "1581452772000000321" + timeUnixNano: "1581452773000000789" + isMonotonic: true + unit: "1" + - histogram: + aggregationTemporality: 2 + dataPoints: + - attributes: + - key: datapoint.required + value: + stringValue: foo + - key: datapoint.optional + value: + stringValue: bar + - key: beep + value: + doubleValue: 2.1 + count: "1" + startTimeUnixNano: "1581452772000000321" + sum: 15 + timeUnixNano: "1581452773000000789" + - attributes: + - key: datapoint.required + value: + stringValue: foo + - key: datapoint.optional + value: + stringValue: notbar + - key: beep + value: + doubleValue: 2 + count: "2" + startTimeUnixNano: "1581452772000000321" + sum: 30 + timeUnixNano: "1581452773000000789" + - attributes: + - key: datapoint.required + value: + stringValue: notfoo + - key: beep + value: + intValue: 1 + count: "3" + startTimeUnixNano: "1581452772000000321" + sum: 45 + timeUnixNano: "1581452773000000789" + - startTimeUnixNano: "1581452772000000321" + sum: 0 + timeUnixNano: "1581452773000000789" + name: double-histogram + unit: "1" + - name: double-summary + summary: + dataPoints: + - attributes: + - key: datapoint.required + value: + stringValue: foo + - key: datapoint.optional + value: + stringValue: bar + - key: beep + value: + doubleValue: 2.1 + count: "1" + startTimeUnixNano: "1581452772000000321" + sum: 15 + timeUnixNano: "1581452773000000789" + - attributes: + - key: datapoint.required + value: + stringValue: foo + - key: datapoint.optional + value: + stringValue: notbar + - key: beep + value: + doubleValue: 2 + count: "2" + startTimeUnixNano: "1581452772000000321" + sum: 30 + timeUnixNano: "1581452773000000789" + - attributes: + - key: datapoint.required + value: + stringValue: notfoo + - key: beep + value: + intValue: 1 + count: "3" + startTimeUnixNano: "1581452772000000321" + sum: 45 + timeUnixNano: "1581452773000000789" + - startTimeUnixNano: "1581452772000000321" + timeUnixNano: "1581452773000000789" + unit: "1" + scope: {} + + - resource: {} + scopeMetrics: + - metrics: + - gauge: + dataPoints: + - asInt: "123" + attributes: + - key: datapoint.required + value: + stringValue: foo + - key: datapoint.optional + value: + stringValue: bar + - key: beep + value: + doubleValue: 2.1 + startTimeUnixNano: "1581452772000000321" + timeUnixNano: "1581452773000000789" + - asInt: "456" + attributes: + - key: datapoint.required + value: + stringValue: foo + - key: datapoint.optional + value: + stringValue: notbar + - key: beep + value: + doubleValue: 2 + startTimeUnixNano: "1581452772000000321" + timeUnixNano: "1581452773000000789" + - asInt: "789" + attributes: + - key: datapoint.required + value: + stringValue: notfoo + - key: beep + value: + intValue: 1 + startTimeUnixNano: "1581452772000000321" + timeUnixNano: "1581452773000000789" + - asInt: "0" + startTimeUnixNano: "1581452772000000321" + timeUnixNano: "1581452773000000789" + name: gauge-int + unit: "1" + - gauge: + dataPoints: + - asDouble: 1.23 + attributes: + - key: datapoint.required + value: + stringValue: foo + - key: datapoint.optional + value: + stringValue: bar + - key: beep + value: + doubleValue: 2.1 + startTimeUnixNano: "1581452772000000321" + timeUnixNano: "1581452773000000789" + - asDouble: 4.56 + attributes: + - key: datapoint.required + value: + stringValue: foo + - key: datapoint.optional + value: + stringValue: notbar + - key: beep + value: + doubleValue: 2 + startTimeUnixNano: "1581452772000000321" + timeUnixNano: "1581452773000000789" + - asDouble: 7.89 + attributes: + - key: datapoint.required + value: + stringValue: notfoo + - key: beep + value: + intValue: 1 + startTimeUnixNano: "1581452772000000321" + timeUnixNano: "1581452773000000789" + - asDouble: 0 + startTimeUnixNano: "1581452772000000321" + timeUnixNano: "1581452773000000789" + name: gauge-double + unit: "1" + - name: counter-int + sum: + aggregationTemporality: 2 + dataPoints: + - asInt: "123" + attributes: + - key: datapoint.required + value: + stringValue: foo + - key: datapoint.optional + value: + stringValue: bar + - key: beep + value: + doubleValue: 2.1 + startTimeUnixNano: "1581452772000000321" + timeUnixNano: "1581452773000000789" + - asInt: "456" + attributes: + - key: datapoint.required + value: + stringValue: foo + - key: datapoint.optional + value: + stringValue: notbar + - key: beep + value: + doubleValue: 2 + startTimeUnixNano: "1581452772000000321" + timeUnixNano: "1581452773000000789" + - asInt: "789" + attributes: + - key: datapoint.required + value: + stringValue: notfoo + - key: beep + value: + intValue: 1 + startTimeUnixNano: "1581452772000000321" + timeUnixNano: "1581452773000000789" + - asInt: "0" + startTimeUnixNano: "1581452772000000321" + timeUnixNano: "1581452773000000789" + isMonotonic: true + unit: "1" + - name: counter-double + sum: + aggregationTemporality: 2 + dataPoints: + - asDouble: 1.23 + attributes: + - key: datapoint.required + value: + stringValue: foo + - key: datapoint.optional + value: + stringValue: bar + - key: beep + value: + doubleValue: 2.1 + startTimeUnixNano: "1581452772000000321" + timeUnixNano: "1581452773000000789" + - asDouble: 4.56 + attributes: + - key: datapoint.required + value: + stringValue: foo + - key: datapoint.optional + value: + stringValue: notbar + - key: beep + value: + doubleValue: 2 + startTimeUnixNano: "1581452772000000321" + timeUnixNano: "1581452773000000789" + - asDouble: 4.56 + attributes: + - key: datapoint.required + value: + stringValue: notfoo + - key: beep + value: + intValue: 1 + startTimeUnixNano: "1581452772000000321" + timeUnixNano: "1581452773000000789" + - asDouble: 0 + startTimeUnixNano: "1581452772000000321" + timeUnixNano: "1581452773000000789" + isMonotonic: true + unit: "1" + - histogram: + aggregationTemporality: 2 + dataPoints: + - attributes: + - key: datapoint.required + value: + stringValue: foo + - key: datapoint.optional + value: + stringValue: bar + - key: beep + value: + doubleValue: 2.1 + count: "1" + startTimeUnixNano: "1581452772000000321" + sum: 15 + timeUnixNano: "1581452773000000789" + - attributes: + - key: datapoint.required + value: + stringValue: foo + - key: datapoint.optional + value: + stringValue: notbar + - key: beep + value: + doubleValue: 2 + count: "2" + startTimeUnixNano: "1581452772000000321" + sum: 30 + timeUnixNano: "1581452773000000789" + - attributes: + - key: datapoint.required + value: + stringValue: notfoo + - key: beep + value: + intValue: 1 + count: "3" + startTimeUnixNano: "1581452772000000321" + sum: 45 + timeUnixNano: "1581452773000000789" + - startTimeUnixNano: "1581452772000000321" + sum: 0 + timeUnixNano: "1581452773000000789" + name: double-histogram + unit: "1" + - name: double-summary + summary: + dataPoints: + - attributes: + - key: datapoint.required + value: + stringValue: foo + - key: datapoint.optional + value: + stringValue: bar + - key: beep + value: + doubleValue: 2.1 + count: "1" + startTimeUnixNano: "1581452772000000321" + sum: 15 + timeUnixNano: "1581452773000000789" + - attributes: + - key: datapoint.required + value: + stringValue: foo + - key: datapoint.optional + value: + stringValue: notbar + - key: beep + value: + doubleValue: 2 + count: "2" + startTimeUnixNano: "1581452772000000321" + sum: 30 + timeUnixNano: "1581452773000000789" + - attributes: + - key: datapoint.required + value: + stringValue: notfoo + - key: beep + value: + intValue: 1 + count: "3" + startTimeUnixNano: "1581452772000000321" + sum: 45 + timeUnixNano: "1581452773000000789" + - startTimeUnixNano: "1581452772000000321" + timeUnixNano: "1581452773000000789" + unit: "1" + scope: {} diff --git a/connector/sumconnector/testdata/metrics/multiple_attributes.yaml b/connector/sumconnector/testdata/metrics/multiple_attributes.yaml new file mode 100644 index 000000000000..5d598f2add3e --- /dev/null +++ b/connector/sumconnector/testdata/metrics/multiple_attributes.yaml @@ -0,0 +1,127 @@ +resourceMetrics: + - resource: + attributes: + - key: resource.required + value: + stringValue: foo + - key: resource.optional + value: + stringValue: bar + scopeMetrics: + - metrics: + - description: Data point sum by attributes + name: datapoint.sum.by_attr + sum: + aggregationTemporality: 1 + dataPoints: + - asDouble: "21" + attributes: + - key: datapoint.required + value: + stringValue: foo + - key: datapoint.optional + value: + stringValue: bar + timeUnixNano: "1678391923821783000" + - asDouble: "24" + attributes: + - key: datapoint.required + value: + stringValue: foo + - key: datapoint.optional + value: + stringValue: notbar + timeUnixNano: "1678391923821783000" + isMonotonic: true + - resource: + attributes: + - key: resource.required + value: + stringValue: foo + - key: resource.optional + value: + stringValue: notbar + scopeMetrics: + - metrics: + - description: Data point sum by attributes + name: datapoint.sum.by_attr + sum: + aggregationTemporality: 1 + dataPoints: + - asDouble: "25.2" + attributes: + - key: datapoint.required + value: + stringValue: foo + - key: datapoint.optional + value: + stringValue: bar + timeUnixNano: "1678391923821792000" + - asDouble: "24" + attributes: + - key: datapoint.required + value: + stringValue: foo + - key: datapoint.optional + value: + stringValue: notbar + timeUnixNano: "1678391923821792000" + isMonotonic: true + - resource: + attributes: + - key: resource.required + value: + stringValue: notfoo + scopeMetrics: + - metrics: + - description: Data point sum by attributes + name: datapoint.sum.by_attr + sum: + aggregationTemporality: 1 + dataPoints: + - asDouble: "25.2" + attributes: + - key: datapoint.required + value: + stringValue: foo + - key: datapoint.optional + value: + stringValue: bar + timeUnixNano: "1678391923821800000" + - asDouble: "24" + attributes: + - key: datapoint.required + value: + stringValue: foo + - key: datapoint.optional + value: + stringValue: notbar + timeUnixNano: "1678391923821800000" + isMonotonic: true + - resource: {} + scopeMetrics: + - metrics: + - description: Data point sum by attributes + name: datapoint.sum.by_attr + sum: + aggregationTemporality: 1 + dataPoints: + - asDouble: "25.2" + attributes: + - key: datapoint.required + value: + stringValue: foo + - key: datapoint.optional + value: + stringValue: bar + timeUnixNano: "1678391923821807000" + - asDouble: "24" + attributes: + - key: datapoint.required + value: + stringValue: foo + - key: datapoint.optional + value: + stringValue: notbar + timeUnixNano: "1678391923821807000" + isMonotonic: true diff --git a/connector/sumconnector/testdata/metrics/multiple_conditions.yaml b/connector/sumconnector/testdata/metrics/multiple_conditions.yaml new file mode 100644 index 000000000000..b95fee6578a9 --- /dev/null +++ b/connector/sumconnector/testdata/metrics/multiple_conditions.yaml @@ -0,0 +1,63 @@ +resourceMetrics: + - resource: + attributes: + - key: resource.required + value: + stringValue: foo + - key: resource.optional + value: + stringValue: bar + scopeMetrics: + - metrics: + - description: Data point sum if ... + name: datapoint.sum.if + sum: + aggregationTemporality: 1 + dataPoints: + - asDouble: "28.5" + timeUnixNano: "1678391923819487000" + isMonotonic: true + - resource: + attributes: + - key: resource.required + value: + stringValue: foo + - key: resource.optional + value: + stringValue: notbar + scopeMetrics: + - metrics: + - description: Data point sum if ... + name: datapoint.sum.if + sum: + aggregationTemporality: 1 + dataPoints: + - asDouble: "30.6" + timeUnixNano: "1678391923819499000" + isMonotonic: true + - resource: + attributes: + - key: resource.required + value: + stringValue: notfoo + scopeMetrics: + - metrics: + - description: Data point sum if ... + name: datapoint.sum.if + sum: + aggregationTemporality: 1 + dataPoints: + - asDouble: "24.6" + timeUnixNano: "1678391923819510000" + isMonotonic: true + - resource: {} + scopeMetrics: + - metrics: + - description: Data point sum if ... + name: datapoint.sum.if + sum: + aggregationTemporality: 1 + dataPoints: + - asDouble: "24.6" + timeUnixNano: "1678391923819529000" + isMonotonic: true diff --git a/connector/sumconnector/testdata/metrics/multiple_metrics.yaml b/connector/sumconnector/testdata/metrics/multiple_metrics.yaml new file mode 100644 index 000000000000..47982780fb20 --- /dev/null +++ b/connector/sumconnector/testdata/metrics/multiple_metrics.yaml @@ -0,0 +1,113 @@ +resourceMetrics: + - resource: + attributes: + - key: resource.required + value: + stringValue: foo + - key: resource.optional + value: + stringValue: bar + scopeMetrics: + - metrics: + - description: All data points sum + name: datapoint.sum.all + sum: + aggregationTemporality: 1 + dataPoints: + - asDouble: "28.5" + timeUnixNano: "1678391923820453000" + isMonotonic: true + - description: Data point sum if ... + name: datapoint.sum.if + sum: + aggregationTemporality: 1 + dataPoints: + - asDouble: "28.5" + timeUnixNano: "1678391923820453000" + isMonotonic: true + - resource: + attributes: + - key: resource.required + value: + stringValue: foo + - key: resource.optional + value: + stringValue: notbar + scopeMetrics: + - metrics: + - description: All data points sum + name: datapoint.sum.all + sum: + aggregationTemporality: 1 + dataPoints: + - asDouble: "30.6" + timeUnixNano: "1678391923820468000" + isMonotonic: true + - description: Data point sum if ... + name: datapoint.sum.if + sum: + aggregationTemporality: 1 + dataPoints: + - asDouble: "30.6" + timeUnixNano: "1678391923820468000" + isMonotonic: true + - resource: + attributes: + - key: resource.required + value: + stringValue: notfoo + scopeMetrics: + - metrics: + - description: All data points sum + name: datapoint.sum.all + sum: + aggregationTemporality: 1 + dataPoints: + - asDouble: "30.6" + timeUnixNano: "1678391923820480000" + isMonotonic: true + - description: Data point sum if ... + name: datapoint.sum.if + sum: + aggregationTemporality: 1 + dataPoints: + - asDouble: "24.6" + timeUnixNano: "1678391923820480000" + isMonotonic: true + - resource: {} + scopeMetrics: + - metrics: + - description: All data points sum + name: datapoint.sum.all + sum: + aggregationTemporality: 1 + dataPoints: + - asDouble: "30.6" + timeUnixNano: "1678391923820491000" + isMonotonic: true + - description: Data point sum if ... + name: datapoint.sum.if + sum: + aggregationTemporality: 1 + dataPoints: + - asDouble: "24.6" + timeUnixNano: "1678391923820491000" + isMonotonic: true + - resource: + attributes: + - key: resource.int + value: + intValue: 1 + - key: resource.optional_int + value: + intValue: 2 + scopeMetrics: + - metrics: + - description: All data points sum + name: datapoint.sum.all + sum: + aggregationTemporality: 1 + dataPoints: + - asDouble: "30.6" + timeUnixNano: "1678391923820480000" + isMonotonic: true diff --git a/connector/sumconnector/testdata/metrics/one_attribute.yaml b/connector/sumconnector/testdata/metrics/one_attribute.yaml new file mode 100644 index 000000000000..2dac193abaf2 --- /dev/null +++ b/connector/sumconnector/testdata/metrics/one_attribute.yaml @@ -0,0 +1,103 @@ +resourceMetrics: + - resource: + attributes: + - key: resource.required + value: + stringValue: foo + - key: resource.optional + value: + stringValue: bar + scopeMetrics: + - metrics: + - description: Data point sum by attribute + name: datapoint.sum.by_attr + sum: + aggregationTemporality: 1 + dataPoints: + - asDouble: "22.5" + attributes: + - key: datapoint.required + value: + stringValue: foo + timeUnixNano: "1678391923821179000" + - asDouble: "6" + attributes: + - key: datapoint.required + value: + stringValue: notfoo + timeUnixNano: "1678391923821179000" + isMonotonic: true + - resource: + attributes: + - key: resource.required + value: + stringValue: foo + - key: resource.optional + value: + stringValue: notbar + scopeMetrics: + - metrics: + - description: Data point sum by attribute + name: datapoint.sum.by_attr + sum: + aggregationTemporality: 1 + dataPoints: + - asDouble: "24.6" + attributes: + - key: datapoint.required + value: + stringValue: foo + timeUnixNano: "1678391923821189000" + - asDouble: "6" + attributes: + - key: datapoint.required + value: + stringValue: notfoo + timeUnixNano: "1678391923821189000" + isMonotonic: true + - resource: + attributes: + - key: resource.required + value: + stringValue: notfoo + scopeMetrics: + - metrics: + - description: Data point sum by attribute + name: datapoint.sum.by_attr + sum: + aggregationTemporality: 1 + dataPoints: + - asDouble: "24.6" + attributes: + - key: datapoint.required + value: + stringValue: foo + timeUnixNano: "1678391923821196000" + - asDouble: "6" + attributes: + - key: datapoint.required + value: + stringValue: notfoo + timeUnixNano: "1678391923821196000" + isMonotonic: true + - resource: {} + scopeMetrics: + - metrics: + - description: Data point sum by attribute + name: datapoint.sum.by_attr + sum: + aggregationTemporality: 1 + dataPoints: + - asDouble: "24.6" + attributes: + - key: datapoint.required + value: + stringValue: foo + timeUnixNano: "1678391923821203000" + - asDouble: "6" + attributes: + - key: datapoint.required + value: + stringValue: notfoo + timeUnixNano: "1678391923821203000" + isMonotonic: true diff --git a/connector/sumconnector/testdata/metrics/one_condition.yaml b/connector/sumconnector/testdata/metrics/one_condition.yaml new file mode 100644 index 000000000000..c4953f43827a --- /dev/null +++ b/connector/sumconnector/testdata/metrics/one_condition.yaml @@ -0,0 +1,37 @@ +resourceMetrics: + - resource: + attributes: + - key: resource.required + value: + stringValue: foo + - key: resource.optional + value: + stringValue: bar + scopeMetrics: + - metrics: + - description: Data point sum if ... + name: datapoint.sum.if + sum: + aggregationTemporality: 1 + dataPoints: + - asDouble: "28.5" + timeUnixNano: "1678391923818482000" + isMonotonic: true + - resource: + attributes: + - key: resource.required + value: + stringValue: foo + - key: resource.optional + value: + stringValue: notbar + scopeMetrics: + - metrics: + - description: Data point sum if ... + name: datapoint.sum.if + sum: + aggregationTemporality: 1 + dataPoints: + - asDouble: "30.6" + timeUnixNano: "1678391923818549000" + isMonotonic: true diff --git a/exporter/awskinesisexporter/internal/batch/encode_jaeger.go b/exporter/awskinesisexporter/internal/batch/encode_jaeger.go index 42131ee0ea3d..50f4662a0575 100644 --- a/exporter/awskinesisexporter/internal/batch/encode_jaeger.go +++ b/exporter/awskinesisexporter/internal/batch/encode_jaeger.go @@ -6,7 +6,6 @@ package batch // import "github.com/open-telemetry/opentelemetry-collector-contr import ( "github.com/gogo/protobuf/proto" "github.com/jaegertracing/jaeger/model" - "go.opentelemetry.io/collector/consumer/consumererror" "go.opentelemetry.io/collector/pdata/plog" "go.opentelemetry.io/collector/pdata/pmetric" "go.opentelemetry.io/collector/pdata/ptrace" @@ -30,10 +29,7 @@ type jaegerEncoder struct { var _ Encoder = (*jaegerEncoder)(nil) func (je jaegerEncoder) Traces(td ptrace.Traces) (*Batch, error) { - traces, err := jaeger.ProtoFromTraces(td) - if err != nil { - return nil, consumererror.NewTraces(err, td) - } + traces := jaeger.ProtoFromTraces(td) bt := New(je.batchOptions...) diff --git a/exporter/datadogexporter/go.mod b/exporter/datadogexporter/go.mod index bf1484d7e48f..67136dd512f3 100644 --- a/exporter/datadogexporter/go.mod +++ b/exporter/datadogexporter/go.mod @@ -37,7 +37,7 @@ require ( github.com/DataDog/opentelemetry-mapping-go/pkg/otlp/metrics v0.20.0 github.com/DataDog/opentelemetry-mapping-go/pkg/quantile v0.20.0 github.com/DataDog/sketches-go v1.4.6 // indirect - github.com/GoogleCloudPlatform/opentelemetry-operations-go/detectors/gcp v1.24.1 + github.com/GoogleCloudPlatform/opentelemetry-operations-go/detectors/gcp v1.24.2 github.com/aws/aws-sdk-go v1.55.5 github.com/cenkalti/backoff/v4 v4.3.0 github.com/google/go-cmp v0.6.0 diff --git a/exporter/datadogexporter/go.sum b/exporter/datadogexporter/go.sum index 2e631427abed..3e107dad4464 100644 --- a/exporter/datadogexporter/go.sum +++ b/exporter/datadogexporter/go.sum @@ -236,8 +236,8 @@ github.com/DataDog/viper v1.13.5 h1:SZMcyMknYQN2jRY/40A16gUXexlNJOI8sDs1cWZnI64= github.com/DataDog/viper v1.13.5/go.mod h1:wDdUVJ2SHaMaPrCZrlRCObwkubsX8j5sme3LaR/SGTc= github.com/DataDog/zstd v1.5.5 h1:oWf5W7GtOLgp6bciQYDmhHHjdhYkALu6S/5Ni9ZgSvQ= github.com/DataDog/zstd v1.5.5/go.mod h1:g4AWEaM3yOg3HYfnJ3YIawPnVdXJh9QME85blwSAmyw= -github.com/GoogleCloudPlatform/opentelemetry-operations-go/detectors/gcp v1.24.1 h1:pB2F2JKCj1Znmp2rwxxt1J0Fg0wezTMgWYk5Mpbi1kg= -github.com/GoogleCloudPlatform/opentelemetry-operations-go/detectors/gcp v1.24.1/go.mod h1:itPGVDKf9cC/ov4MdvJ2QZ0khw4bfoo9jzwTJlaxy2k= +github.com/GoogleCloudPlatform/opentelemetry-operations-go/detectors/gcp v1.24.2 h1:cZpsGsWTIFKymTA0je7IIvi1O7Es7apb9CF3EQlOcfE= +github.com/GoogleCloudPlatform/opentelemetry-operations-go/detectors/gcp v1.24.2/go.mod h1:itPGVDKf9cC/ov4MdvJ2QZ0khw4bfoo9jzwTJlaxy2k= github.com/Microsoft/go-winio v0.5.0/go.mod h1:JPGBdM1cNvN/6ISo+n8V5iA4v8pBzdOpzfwIujj1a84= github.com/Microsoft/go-winio v0.6.2 h1:F2VQgta7ecxGYO8k3ZZz3RS8fVIXVxONVUPlNERoyfY= github.com/Microsoft/go-winio v0.6.2/go.mod h1:yd8OoFMLzJbo9gZq8j5qaps8bJ9aShtEA8Ipt1oGCvU= diff --git a/exporter/datadogexporter/integrationtest/go.mod b/exporter/datadogexporter/integrationtest/go.mod index fe505668d783..a7ce97441553 100644 --- a/exporter/datadogexporter/integrationtest/go.mod +++ b/exporter/datadogexporter/integrationtest/go.mod @@ -116,7 +116,7 @@ require ( github.com/DataDog/sketches-go v1.4.6 // indirect github.com/DataDog/viper v1.13.5 // indirect github.com/DataDog/zstd v1.5.5 // indirect - github.com/GoogleCloudPlatform/opentelemetry-operations-go/detectors/gcp v1.24.1 // indirect + github.com/GoogleCloudPlatform/opentelemetry-operations-go/detectors/gcp v1.24.2 // indirect github.com/Microsoft/go-winio v0.6.2 // indirect github.com/alecthomas/participle/v2 v2.1.1 // indirect github.com/alecthomas/units v0.0.0-20240626203959-61d1e3462e30 // indirect diff --git a/exporter/datadogexporter/integrationtest/go.sum b/exporter/datadogexporter/integrationtest/go.sum index aef972cdca24..170fb07e9523 100644 --- a/exporter/datadogexporter/integrationtest/go.sum +++ b/exporter/datadogexporter/integrationtest/go.sum @@ -234,8 +234,8 @@ github.com/DataDog/viper v1.13.5 h1:SZMcyMknYQN2jRY/40A16gUXexlNJOI8sDs1cWZnI64= github.com/DataDog/viper v1.13.5/go.mod h1:wDdUVJ2SHaMaPrCZrlRCObwkubsX8j5sme3LaR/SGTc= github.com/DataDog/zstd v1.5.5 h1:oWf5W7GtOLgp6bciQYDmhHHjdhYkALu6S/5Ni9ZgSvQ= github.com/DataDog/zstd v1.5.5/go.mod h1:g4AWEaM3yOg3HYfnJ3YIawPnVdXJh9QME85blwSAmyw= -github.com/GoogleCloudPlatform/opentelemetry-operations-go/detectors/gcp v1.24.1 h1:pB2F2JKCj1Znmp2rwxxt1J0Fg0wezTMgWYk5Mpbi1kg= -github.com/GoogleCloudPlatform/opentelemetry-operations-go/detectors/gcp v1.24.1/go.mod h1:itPGVDKf9cC/ov4MdvJ2QZ0khw4bfoo9jzwTJlaxy2k= +github.com/GoogleCloudPlatform/opentelemetry-operations-go/detectors/gcp v1.24.2 h1:cZpsGsWTIFKymTA0je7IIvi1O7Es7apb9CF3EQlOcfE= +github.com/GoogleCloudPlatform/opentelemetry-operations-go/detectors/gcp v1.24.2/go.mod h1:itPGVDKf9cC/ov4MdvJ2QZ0khw4bfoo9jzwTJlaxy2k= github.com/Microsoft/go-winio v0.5.0/go.mod h1:JPGBdM1cNvN/6ISo+n8V5iA4v8pBzdOpzfwIujj1a84= github.com/Microsoft/go-winio v0.6.2 h1:F2VQgta7ecxGYO8k3ZZz3RS8fVIXVxONVUPlNERoyfY= github.com/Microsoft/go-winio v0.6.2/go.mod h1:yd8OoFMLzJbo9gZq8j5qaps8bJ9aShtEA8Ipt1oGCvU= diff --git a/exporter/googlecloudexporter/factory.go b/exporter/googlecloudexporter/factory.go index 31f885097d0b..712402f83c22 100644 --- a/exporter/googlecloudexporter/factory.go +++ b/exporter/googlecloudexporter/factory.go @@ -11,6 +11,7 @@ import ( "github.com/GoogleCloudPlatform/opentelemetry-operations-go/exporter/collector" "go.opentelemetry.io/collector/component" + "go.opentelemetry.io/collector/consumer" "go.opentelemetry.io/collector/exporter" "go.opentelemetry.io/collector/exporter/exporterhelper" "go.opentelemetry.io/collector/featuregate" @@ -55,7 +56,7 @@ func createLogsExporter( params exporter.Settings, cfg component.Config) (exporter.Logs, error) { eCfg := cfg.(*Config) - logsExporter, err := collector.NewGoogleCloudLogsExporter(ctx, eCfg.Config, params.TelemetrySettings.Logger, params.BuildInfo.Version) + logsExporter, err := collector.NewGoogleCloudLogsExporter(ctx, eCfg.Config, params.TelemetrySettings.Logger, params.TelemetrySettings.MeterProvider, params.BuildInfo.Version, eCfg.TimeoutSettings.Timeout) if err != nil { return nil, err } @@ -69,7 +70,9 @@ func createLogsExporter( // Disable exporterhelper Timeout, since we are using a custom mechanism // within exporter itself exporterhelper.WithTimeout(exporterhelper.TimeoutConfig{Timeout: 0}), - exporterhelper.WithQueue(eCfg.QueueSettings)) + exporterhelper.WithQueue(eCfg.QueueSettings), + exporterhelper.WithCapabilities(consumer.Capabilities{MutatesData: true}), + ) } // createTracesExporter creates a trace exporter based on this config. @@ -78,7 +81,7 @@ func createTracesExporter( params exporter.Settings, cfg component.Config) (exporter.Traces, error) { eCfg := cfg.(*Config) - tExp, err := collector.NewGoogleCloudTracesExporter(ctx, eCfg.Config, params.BuildInfo.Version, eCfg.TimeoutSettings.Timeout) + tExp, err := collector.NewGoogleCloudTracesExporter(ctx, eCfg.Config, params.TelemetrySettings.Logger, params.TelemetrySettings.MeterProvider, params.BuildInfo.Version, eCfg.TimeoutSettings.Timeout) if err != nil { return nil, err } @@ -92,7 +95,9 @@ func createTracesExporter( // Disable exporterhelper Timeout, since we are using a custom mechanism // within exporter itself exporterhelper.WithTimeout(exporterhelper.TimeoutConfig{Timeout: 0}), - exporterhelper.WithQueue(eCfg.QueueSettings)) + exporterhelper.WithQueue(eCfg.QueueSettings), + exporterhelper.WithCapabilities(consumer.Capabilities{MutatesData: true}), + ) } // createMetricsExporter creates a metrics exporter based on this config. @@ -101,7 +106,7 @@ func createMetricsExporter( params exporter.Settings, cfg component.Config) (exporter.Metrics, error) { eCfg := cfg.(*Config) - mExp, err := collector.NewGoogleCloudMetricsExporter(ctx, eCfg.Config, params.TelemetrySettings.Logger, params.BuildInfo.Version, eCfg.TimeoutSettings.Timeout) + mExp, err := collector.NewGoogleCloudMetricsExporter(ctx, eCfg.Config, params.TelemetrySettings.Logger, params.TelemetrySettings.MeterProvider, params.BuildInfo.Version, eCfg.TimeoutSettings.Timeout) if err != nil { return nil, err } @@ -115,5 +120,7 @@ func createMetricsExporter( // Disable exporterhelper Timeout, since we are using a custom mechanism // within exporter itself exporterhelper.WithTimeout(exporterhelper.TimeoutConfig{Timeout: 0}), - exporterhelper.WithQueue(eCfg.QueueSettings)) + exporterhelper.WithQueue(eCfg.QueueSettings), + exporterhelper.WithCapabilities(consumer.Capabilities{MutatesData: true}), + ) } diff --git a/exporter/googlecloudexporter/go.mod b/exporter/googlecloudexporter/go.mod index cbc6a12ea425..d5477849188d 100644 --- a/exporter/googlecloudexporter/go.mod +++ b/exporter/googlecloudexporter/go.mod @@ -3,10 +3,11 @@ module github.com/open-telemetry/opentelemetry-collector-contrib/exporter/google go 1.22.0 require ( - github.com/GoogleCloudPlatform/opentelemetry-operations-go/exporter/collector v0.48.1 + github.com/GoogleCloudPlatform/opentelemetry-operations-go/exporter/collector v0.48.2 github.com/stretchr/testify v1.9.0 go.opentelemetry.io/collector/component v0.111.0 go.opentelemetry.io/collector/confmap v1.17.0 + go.opentelemetry.io/collector/consumer v0.111.0 go.opentelemetry.io/collector/exporter v0.111.0 go.opentelemetry.io/collector/featuregate v1.17.0 go.opentelemetry.io/collector/pdata v1.17.0 @@ -22,8 +23,8 @@ require ( cloud.google.com/go/longrunning v0.5.9 // indirect cloud.google.com/go/monitoring v1.20.2 // indirect cloud.google.com/go/trace v1.10.10 // indirect - github.com/GoogleCloudPlatform/opentelemetry-operations-go/exporter/trace v1.24.1 // indirect - github.com/GoogleCloudPlatform/opentelemetry-operations-go/internal/resourcemapping v0.48.1 // indirect + github.com/GoogleCloudPlatform/opentelemetry-operations-go/exporter/trace v1.24.2 // indirect + github.com/GoogleCloudPlatform/opentelemetry-operations-go/internal/resourcemapping v0.48.2 // indirect github.com/cenkalti/backoff/v4 v4.3.0 // indirect github.com/davecgh/go-spew v1.1.1 // indirect github.com/felixge/httpsnoop v1.0.4 // indirect @@ -57,7 +58,6 @@ require ( go.opencensus.io v0.24.0 // indirect go.opentelemetry.io/collector/config/configretry v1.17.0 // indirect go.opentelemetry.io/collector/config/configtelemetry v0.111.0 // indirect - go.opentelemetry.io/collector/consumer v0.111.0 // indirect go.opentelemetry.io/collector/consumer/consumerprofiles v0.111.0 // indirect go.opentelemetry.io/collector/consumer/consumertest v0.111.0 // indirect go.opentelemetry.io/collector/exporter/exporterprofiles v0.111.0 // indirect @@ -70,7 +70,7 @@ require ( go.opentelemetry.io/collector/receiver/receiverprofiles v0.111.0 // indirect go.opentelemetry.io/collector/semconv v0.111.0 // indirect go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.52.0 // indirect - go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.53.0 // indirect + go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.55.0 // indirect go.opentelemetry.io/otel v1.30.0 // indirect go.opentelemetry.io/otel/metric v1.30.0 // indirect go.opentelemetry.io/otel/sdk v1.30.0 // indirect @@ -91,6 +91,7 @@ require ( google.golang.org/genproto/googleapis/api v0.0.0-20240822170219-fc7c04adadcd // indirect google.golang.org/genproto/googleapis/rpc v0.0.0-20240822170219-fc7c04adadcd // indirect google.golang.org/grpc v1.67.1 // indirect + google.golang.org/grpc/stats/opentelemetry v0.0.0-20240702152247-2da976983bbb // indirect google.golang.org/protobuf v1.34.2 // indirect gopkg.in/yaml.v3 v3.0.1 // indirect ) diff --git a/exporter/googlecloudexporter/go.sum b/exporter/googlecloudexporter/go.sum index a047b50eb3cd..472b722c26b2 100644 --- a/exporter/googlecloudexporter/go.sum +++ b/exporter/googlecloudexporter/go.sum @@ -16,14 +16,14 @@ cloud.google.com/go/monitoring v1.20.2/go.mod h1:36rpg/7fdQ7NX5pG5x1FA7cXTVXusOp cloud.google.com/go/trace v1.10.10 h1:eiIFoRp1qTh2tRemTd8HIE7qZ0Ok5l7dl9pYsNWoXjk= cloud.google.com/go/trace v1.10.10/go.mod h1:5b1BiSYQO27KgGRevNFfoIQ8czwpVgnkKbTLb4wV+XM= github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= -github.com/GoogleCloudPlatform/opentelemetry-operations-go/exporter/collector v0.48.1 h1:55BCCek53ynaXLoMS8bdivu34TYZ4dnKzi4sOf7qtIU= -github.com/GoogleCloudPlatform/opentelemetry-operations-go/exporter/collector v0.48.1/go.mod h1:n8ee0TUmtsXm2GUWL86jkrxc8mPGRLuTJg13M/iW6Q0= -github.com/GoogleCloudPlatform/opentelemetry-operations-go/exporter/trace v1.24.1 h1:01bHLeqkrxYSkjvyTBEZ8rxBxDhWm1snWGEW73Te4lU= -github.com/GoogleCloudPlatform/opentelemetry-operations-go/exporter/trace v1.24.1/go.mod h1:UFO9jC3njhKdD/ymLnaKi7Or5miVWq06LvRWQNFfnTU= -github.com/GoogleCloudPlatform/opentelemetry-operations-go/internal/cloudmock v0.48.1 h1:oTX4vsorBZo/Zdum6OKPA4o7544hm6smoRv1QjpTwGo= -github.com/GoogleCloudPlatform/opentelemetry-operations-go/internal/cloudmock v0.48.1/go.mod h1:0wEl7vrAD8mehJyohS9HZy+WyEOaQO2mJx86Cvh93kM= -github.com/GoogleCloudPlatform/opentelemetry-operations-go/internal/resourcemapping v0.48.1 h1:8nn+rsCvTq9axyEh382S0PFLBeaFwNsT43IrPWzctRU= -github.com/GoogleCloudPlatform/opentelemetry-operations-go/internal/resourcemapping v0.48.1/go.mod h1:viRWSEhtMZqz1rhwmOVKkWl6SwmVowfL9O2YR5gI2PE= +github.com/GoogleCloudPlatform/opentelemetry-operations-go/exporter/collector v0.48.2 h1:YkwqygP/gm2iuuxAFh8HnFL+ZQ/PKhYRDi3IlfLrBek= +github.com/GoogleCloudPlatform/opentelemetry-operations-go/exporter/collector v0.48.2/go.mod h1:f4MEEOvRbw06aLAQ3g/elRal5XvUaGPUI+uYUL6W4WI= +github.com/GoogleCloudPlatform/opentelemetry-operations-go/exporter/trace v1.24.2 h1:B7ox5J7nwey9FPxobwU1wugDKgVqtFvwZRDS0YbM+tY= +github.com/GoogleCloudPlatform/opentelemetry-operations-go/exporter/trace v1.24.2/go.mod h1:VWMJ2cFLtnygvsntQ8JUNQ/VxoZiVd8ewsmyeKSK3k8= +github.com/GoogleCloudPlatform/opentelemetry-operations-go/internal/cloudmock v0.48.2 h1:ffI2ensdT33alWXmBDi/7cvCV7K3o7TF5oE44g8tiN0= +github.com/GoogleCloudPlatform/opentelemetry-operations-go/internal/cloudmock v0.48.2/go.mod h1:pNP/L2wDlaQnQlFvkDKGSruDoYRpmAxB6drgsskfYwg= +github.com/GoogleCloudPlatform/opentelemetry-operations-go/internal/resourcemapping v0.48.2 h1:th/AQTVtV5u0WVQln/ks+jxhkZ433MeOevmka55fkeg= +github.com/GoogleCloudPlatform/opentelemetry-operations-go/internal/resourcemapping v0.48.2/go.mod h1:wRbFgBQUVm1YXrvWKofAEmq9HNJTDphbAaJSSX01KUI= github.com/cenkalti/backoff/v4 v4.3.0 h1:MyRJ/UdXutAwSAT+s3wNd7MfTIcy71VQueUuFK343L8= github.com/cenkalti/backoff/v4 v4.3.0/go.mod h1:Y3VNntkOUPxTVeUxJ/G5vcM//AlwfmyYozVcomhLiZE= github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU= @@ -178,8 +178,8 @@ go.opentelemetry.io/collector/semconv v0.111.0 h1:ELleMtLBzeZ3xhfhYPmFcLc0hJMqRx go.opentelemetry.io/collector/semconv v0.111.0/go.mod h1:zCJ5njhWpejR+A40kiEoeFm1xq1uzyZwMnRNX6/D82A= go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.52.0 h1:vS1Ao/R55RNV4O7TA2Qopok8yN+X0LIP6RVWLFkprck= go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.52.0/go.mod h1:BMsdeOxN04K0L5FNUBfjFdvwWGNe/rkmSwH4Aelu/X0= -go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.53.0 h1:4K4tsIXefpVJtvA/8srF4V4y0akAoPHkIslgAkjixJA= -go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.53.0/go.mod h1:jjdQuTGVsXV4vSs+CJ2qYDeDPf9yIJV23qlIzBm73Vg= +go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.55.0 h1:ZIg3ZT/aQ7AfKqdwp7ECpOK6vHqquXXuyTjIO8ZdmPs= +go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.55.0/go.mod h1:DQAwmETtZV00skUwgD6+0U89g80NKsJE3DCKeLLPQMI= go.opentelemetry.io/otel v1.30.0 h1:F2t8sK4qf1fAmY9ua4ohFS/K+FUuOPemHUIXHtktrts= go.opentelemetry.io/otel v1.30.0/go.mod h1:tFw4Br9b7fOS+uEao81PJjVMjW/5fvNCbpsDIXqP0pc= go.opentelemetry.io/otel/metric v1.30.0 h1:4xNulvn9gjzo4hjg+wzIKG7iNFEaBMX00Qd4QIZs7+w= @@ -274,6 +274,8 @@ google.golang.org/grpc v1.27.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8 google.golang.org/grpc v1.33.2/go.mod h1:JMHMWHQWaTccqQQlmk3MJZS+GWXOdAesneDmEnv2fbc= google.golang.org/grpc v1.67.1 h1:zWnc1Vrcno+lHZCOofnIMvycFcc0QRGIzm9dhnDX68E= google.golang.org/grpc v1.67.1/go.mod h1:1gLDyUQU7CTLJI90u3nXZ9ekeghjeM7pTDZlqFNg2AA= +google.golang.org/grpc/stats/opentelemetry v0.0.0-20240702152247-2da976983bbb h1:i2YAt03JpcWES+tSjXp3X3SvUdcVhwa07DJP8AqjLGQ= +google.golang.org/grpc/stats/opentelemetry v0.0.0-20240702152247-2da976983bbb/go.mod h1:4R1ubOs0qFA4h2ZP1CgEXfa7auTXBxA2xUh9o26WXu8= google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8= google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0= google.golang.org/protobuf v0.0.0-20200228230310-ab0ca4ff8a60/go.mod h1:cfTl7dwQJ+fmap5saPgwCLgHXTUD7jkjRqWcaiX5VyM= diff --git a/exporter/googlemanagedprometheusexporter/factory.go b/exporter/googlemanagedprometheusexporter/factory.go index 3a0d099eefbf..b8c6f99f1b9f 100644 --- a/exporter/googlemanagedprometheusexporter/factory.go +++ b/exporter/googlemanagedprometheusexporter/factory.go @@ -12,6 +12,7 @@ import ( "github.com/GoogleCloudPlatform/opentelemetry-operations-go/exporter/collector" "github.com/GoogleCloudPlatform/opentelemetry-operations-go/exporter/collector/googlemanagedprometheus" "go.opentelemetry.io/collector/component" + "go.opentelemetry.io/collector/consumer" "go.opentelemetry.io/collector/exporter" "go.opentelemetry.io/collector/exporter/exporterhelper" @@ -50,7 +51,7 @@ func createMetricsExporter( params exporter.Settings, cfg component.Config) (exporter.Metrics, error) { eCfg := cfg.(*Config) - mExp, err := collector.NewGoogleCloudMetricsExporter(ctx, eCfg.GMPConfig.toCollectorConfig(), params.TelemetrySettings.Logger, params.BuildInfo.Version, eCfg.TimeoutSettings.Timeout) + mExp, err := collector.NewGoogleCloudMetricsExporter(ctx, eCfg.GMPConfig.toCollectorConfig(), params.TelemetrySettings.Logger, params.TelemetrySettings.MeterProvider, params.BuildInfo.Version, eCfg.TimeoutSettings.Timeout) if err != nil { return nil, err } @@ -64,5 +65,7 @@ func createMetricsExporter( // Disable exporterhelper Timeout, since we are using a custom mechanism // within exporter itself exporterhelper.WithTimeout(exporterhelper.TimeoutConfig{Timeout: 0}), - exporterhelper.WithQueue(eCfg.QueueSettings)) + exporterhelper.WithQueue(eCfg.QueueSettings), + exporterhelper.WithCapabilities(consumer.Capabilities{MutatesData: true}), + ) } diff --git a/exporter/googlemanagedprometheusexporter/go.mod b/exporter/googlemanagedprometheusexporter/go.mod index bfbd211c7680..95dc3bd50b17 100644 --- a/exporter/googlemanagedprometheusexporter/go.mod +++ b/exporter/googlemanagedprometheusexporter/go.mod @@ -3,11 +3,12 @@ module github.com/open-telemetry/opentelemetry-collector-contrib/exporter/google go 1.22.0 require ( - github.com/GoogleCloudPlatform/opentelemetry-operations-go/exporter/collector v0.48.1 - github.com/GoogleCloudPlatform/opentelemetry-operations-go/exporter/collector/googlemanagedprometheus v0.48.1 + github.com/GoogleCloudPlatform/opentelemetry-operations-go/exporter/collector v0.48.2 + github.com/GoogleCloudPlatform/opentelemetry-operations-go/exporter/collector/googlemanagedprometheus v0.48.2 github.com/stretchr/testify v1.9.0 go.opentelemetry.io/collector/component v0.111.0 go.opentelemetry.io/collector/confmap v1.17.0 + go.opentelemetry.io/collector/consumer v0.111.0 go.opentelemetry.io/collector/exporter v0.111.0 go.opentelemetry.io/collector/otelcol/otelcoltest v0.111.0 go.opentelemetry.io/collector/pdata v1.17.0 @@ -23,8 +24,8 @@ require ( cloud.google.com/go/longrunning v0.5.9 // indirect cloud.google.com/go/monitoring v1.20.2 // indirect cloud.google.com/go/trace v1.10.10 // indirect - github.com/GoogleCloudPlatform/opentelemetry-operations-go/exporter/trace v1.24.1 // indirect - github.com/GoogleCloudPlatform/opentelemetry-operations-go/internal/resourcemapping v0.48.1 // indirect + github.com/GoogleCloudPlatform/opentelemetry-operations-go/exporter/trace v1.24.2 // indirect + github.com/GoogleCloudPlatform/opentelemetry-operations-go/internal/resourcemapping v0.48.2 // indirect github.com/beorn7/perks v1.0.1 // indirect github.com/cenkalti/backoff/v4 v4.3.0 // indirect github.com/cespare/xxhash/v2 v2.3.0 // indirect @@ -87,7 +88,6 @@ require ( go.opentelemetry.io/collector/confmap/provider/yamlprovider v1.17.0 // indirect go.opentelemetry.io/collector/connector v0.111.0 // indirect go.opentelemetry.io/collector/connector/connectorprofiles v0.111.0 // indirect - go.opentelemetry.io/collector/consumer v0.111.0 // indirect go.opentelemetry.io/collector/consumer/consumerprofiles v0.111.0 // indirect go.opentelemetry.io/collector/consumer/consumertest v0.111.0 // indirect go.opentelemetry.io/collector/exporter/exporterprofiles v0.111.0 // indirect @@ -146,6 +146,7 @@ require ( google.golang.org/genproto/googleapis/api v0.0.0-20240903143218-8af14fe29dc1 // indirect google.golang.org/genproto/googleapis/rpc v0.0.0-20240903143218-8af14fe29dc1 // indirect google.golang.org/grpc v1.67.1 // indirect + google.golang.org/grpc/stats/opentelemetry v0.0.0-20240702152247-2da976983bbb // indirect google.golang.org/protobuf v1.34.2 // indirect gopkg.in/yaml.v3 v3.0.1 // indirect ) diff --git a/exporter/googlemanagedprometheusexporter/go.sum b/exporter/googlemanagedprometheusexporter/go.sum index fbe293e2f04a..c8592cf0b879 100644 --- a/exporter/googlemanagedprometheusexporter/go.sum +++ b/exporter/googlemanagedprometheusexporter/go.sum @@ -16,16 +16,16 @@ cloud.google.com/go/monitoring v1.20.2/go.mod h1:36rpg/7fdQ7NX5pG5x1FA7cXTVXusOp cloud.google.com/go/trace v1.10.10 h1:eiIFoRp1qTh2tRemTd8HIE7qZ0Ok5l7dl9pYsNWoXjk= cloud.google.com/go/trace v1.10.10/go.mod h1:5b1BiSYQO27KgGRevNFfoIQ8czwpVgnkKbTLb4wV+XM= github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= -github.com/GoogleCloudPlatform/opentelemetry-operations-go/exporter/collector v0.48.1 h1:55BCCek53ynaXLoMS8bdivu34TYZ4dnKzi4sOf7qtIU= -github.com/GoogleCloudPlatform/opentelemetry-operations-go/exporter/collector v0.48.1/go.mod h1:n8ee0TUmtsXm2GUWL86jkrxc8mPGRLuTJg13M/iW6Q0= -github.com/GoogleCloudPlatform/opentelemetry-operations-go/exporter/collector/googlemanagedprometheus v0.48.1 h1:u/of4NZ/0vK8c9Zjt6QLQtHjzjxKvplbrU8r1kLLYTk= -github.com/GoogleCloudPlatform/opentelemetry-operations-go/exporter/collector/googlemanagedprometheus v0.48.1/go.mod h1:nolt+2xPwKxTH0sQ5SKrK0kKEvVq12N/+3r6vhAmPvw= -github.com/GoogleCloudPlatform/opentelemetry-operations-go/exporter/trace v1.24.1 h1:01bHLeqkrxYSkjvyTBEZ8rxBxDhWm1snWGEW73Te4lU= -github.com/GoogleCloudPlatform/opentelemetry-operations-go/exporter/trace v1.24.1/go.mod h1:UFO9jC3njhKdD/ymLnaKi7Or5miVWq06LvRWQNFfnTU= -github.com/GoogleCloudPlatform/opentelemetry-operations-go/internal/cloudmock v0.48.1 h1:oTX4vsorBZo/Zdum6OKPA4o7544hm6smoRv1QjpTwGo= -github.com/GoogleCloudPlatform/opentelemetry-operations-go/internal/cloudmock v0.48.1/go.mod h1:0wEl7vrAD8mehJyohS9HZy+WyEOaQO2mJx86Cvh93kM= -github.com/GoogleCloudPlatform/opentelemetry-operations-go/internal/resourcemapping v0.48.1 h1:8nn+rsCvTq9axyEh382S0PFLBeaFwNsT43IrPWzctRU= -github.com/GoogleCloudPlatform/opentelemetry-operations-go/internal/resourcemapping v0.48.1/go.mod h1:viRWSEhtMZqz1rhwmOVKkWl6SwmVowfL9O2YR5gI2PE= +github.com/GoogleCloudPlatform/opentelemetry-operations-go/exporter/collector v0.48.2 h1:YkwqygP/gm2iuuxAFh8HnFL+ZQ/PKhYRDi3IlfLrBek= +github.com/GoogleCloudPlatform/opentelemetry-operations-go/exporter/collector v0.48.2/go.mod h1:f4MEEOvRbw06aLAQ3g/elRal5XvUaGPUI+uYUL6W4WI= +github.com/GoogleCloudPlatform/opentelemetry-operations-go/exporter/collector/googlemanagedprometheus v0.48.2 h1:8UhoMZCIt8KI2Ffl5bOTUFXxECM9MSIspfBQkkOQJzM= +github.com/GoogleCloudPlatform/opentelemetry-operations-go/exporter/collector/googlemanagedprometheus v0.48.2/go.mod h1:LRzU5QbgRr3XPKhxtTi3JmZMErXrg53O9LGK6A85IHA= +github.com/GoogleCloudPlatform/opentelemetry-operations-go/exporter/trace v1.24.2 h1:B7ox5J7nwey9FPxobwU1wugDKgVqtFvwZRDS0YbM+tY= +github.com/GoogleCloudPlatform/opentelemetry-operations-go/exporter/trace v1.24.2/go.mod h1:VWMJ2cFLtnygvsntQ8JUNQ/VxoZiVd8ewsmyeKSK3k8= +github.com/GoogleCloudPlatform/opentelemetry-operations-go/internal/cloudmock v0.48.2 h1:ffI2ensdT33alWXmBDi/7cvCV7K3o7TF5oE44g8tiN0= +github.com/GoogleCloudPlatform/opentelemetry-operations-go/internal/cloudmock v0.48.2/go.mod h1:pNP/L2wDlaQnQlFvkDKGSruDoYRpmAxB6drgsskfYwg= +github.com/GoogleCloudPlatform/opentelemetry-operations-go/internal/resourcemapping v0.48.2 h1:th/AQTVtV5u0WVQln/ks+jxhkZ433MeOevmka55fkeg= +github.com/GoogleCloudPlatform/opentelemetry-operations-go/internal/resourcemapping v0.48.2/go.mod h1:wRbFgBQUVm1YXrvWKofAEmq9HNJTDphbAaJSSX01KUI= github.com/beorn7/perks v1.0.1 h1:VlbKKnNfV8bJzeqoa4cOKqO6bYr3WgKZxO8Z16+hsOM= github.com/beorn7/perks v1.0.1/go.mod h1:G2ZrVWU2WbWT9wwq4/hrbKbnv/1ERSJQ0ibhJ6rlkpw= github.com/cenkalti/backoff/v4 v4.3.0 h1:MyRJ/UdXutAwSAT+s3wNd7MfTIcy71VQueUuFK343L8= @@ -415,6 +415,8 @@ google.golang.org/grpc v1.27.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8 google.golang.org/grpc v1.33.2/go.mod h1:JMHMWHQWaTccqQQlmk3MJZS+GWXOdAesneDmEnv2fbc= google.golang.org/grpc v1.67.1 h1:zWnc1Vrcno+lHZCOofnIMvycFcc0QRGIzm9dhnDX68E= google.golang.org/grpc v1.67.1/go.mod h1:1gLDyUQU7CTLJI90u3nXZ9ekeghjeM7pTDZlqFNg2AA= +google.golang.org/grpc/stats/opentelemetry v0.0.0-20240702152247-2da976983bbb h1:i2YAt03JpcWES+tSjXp3X3SvUdcVhwa07DJP8AqjLGQ= +google.golang.org/grpc/stats/opentelemetry v0.0.0-20240702152247-2da976983bbb/go.mod h1:4R1ubOs0qFA4h2ZP1CgEXfa7auTXBxA2xUh9o26WXu8= google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8= google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0= google.golang.org/protobuf v0.0.0-20200228230310-ab0ca4ff8a60/go.mod h1:cfTl7dwQJ+fmap5saPgwCLgHXTUD7jkjRqWcaiX5VyM= diff --git a/exporter/kafkaexporter/jaeger_marshaler.go b/exporter/kafkaexporter/jaeger_marshaler.go index abc73c22f18a..d6d6beb643c3 100644 --- a/exporter/kafkaexporter/jaeger_marshaler.go +++ b/exporter/kafkaexporter/jaeger_marshaler.go @@ -22,10 +22,7 @@ type jaegerMarshaler struct { var _ TracesMarshaler = (*jaegerMarshaler)(nil) func (j jaegerMarshaler) Marshal(traces ptrace.Traces, topic string) ([]*sarama.ProducerMessage, error) { - batches, err := jaeger.ProtoFromTraces(traces) - if err != nil { - return nil, err - } + batches := jaeger.ProtoFromTraces(traces) var messages []*sarama.ProducerMessage var errs error diff --git a/exporter/kafkaexporter/jaeger_marshaler_test.go b/exporter/kafkaexporter/jaeger_marshaler_test.go index 81a310c4a353..ca4cd7e7440e 100644 --- a/exporter/kafkaexporter/jaeger_marshaler_test.go +++ b/exporter/kafkaexporter/jaeger_marshaler_test.go @@ -25,8 +25,7 @@ func TestJaegerMarshaler(t *testing.T) { span.SetEndTimestamp(pcommon.Timestamp(20)) span.SetTraceID([16]byte{1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16}) span.SetSpanID([8]byte{1, 2, 3, 4, 5, 6, 7, 8}) - batches, err := jaeger.ProtoFromTraces(td) - require.NoError(t, err) + batches := jaeger.ProtoFromTraces(td) batches[0].Spans[0].Process = batches[0].Process jaegerProtoBytes, err := batches[0].Spans[0].Marshal() diff --git a/exporter/logzioexporter/exporter.go b/exporter/logzioexporter/exporter.go index 3254084ba8ce..297c3eafdcb3 100644 --- a/exporter/logzioexporter/exporter.go +++ b/exporter/logzioexporter/exporter.go @@ -182,10 +182,7 @@ func mergeMapEntries(maps ...pcommon.Map) pcommon.Map { func (exporter *logzioExporter) pushTraceData(ctx context.Context, traces ptrace.Traces) error { // a buffer to store logzio span and services bytes var dataBuffer bytes.Buffer - batches, err := jaeger.ProtoFromTraces(traces) - if err != nil { - return err - } + batches := jaeger.ProtoFromTraces(traces) for _, batch := range batches { for _, span := range batch.Spans { span.Process = batch.Process @@ -195,7 +192,7 @@ func (exporter *logzioExporter) pushTraceData(ctx context.Context, traces ptrace if transformErr != nil { return transformErr } - _, err = dataBuffer.Write(append(logzioSpan, '\n')) + _, err := dataBuffer.Write(append(logzioSpan, '\n')) if err != nil { return err } @@ -220,7 +217,7 @@ func (exporter *logzioExporter) pushTraceData(ctx context.Context, traces ptrace } } } - err = exporter.export(ctx, exporter.config.ClientConfig.Endpoint, dataBuffer.Bytes()) + err := exporter.export(ctx, exporter.config.ClientConfig.Endpoint, dataBuffer.Bytes()) // reset the data buffer after each export to prevent duplicated data dataBuffer.Reset() return err diff --git a/exporter/pulsarexporter/jaeger_marshaler.go b/exporter/pulsarexporter/jaeger_marshaler.go index 79a927a12f2e..786140d2c67d 100644 --- a/exporter/pulsarexporter/jaeger_marshaler.go +++ b/exporter/pulsarexporter/jaeger_marshaler.go @@ -22,10 +22,7 @@ type jaegerMarshaler struct { var _ TracesMarshaler = (*jaegerMarshaler)(nil) func (j jaegerMarshaler) Marshal(traces ptrace.Traces, _ string) ([]*pulsar.ProducerMessage, error) { - batches, err := jaeger.ProtoFromTraces(traces) - if err != nil { - return nil, err - } + batches := jaeger.ProtoFromTraces(traces) var errs error messages := make([]*pulsar.ProducerMessage, 0, len(batches)) diff --git a/exporter/pulsarexporter/jaeger_marshaler_test.go b/exporter/pulsarexporter/jaeger_marshaler_test.go index e4fb6d06e948..8b0eb9bec1ca 100644 --- a/exporter/pulsarexporter/jaeger_marshaler_test.go +++ b/exporter/pulsarexporter/jaeger_marshaler_test.go @@ -30,8 +30,7 @@ func buildTraces() ptrace.Traces { func TestJaegerJsonBatchMarshaler(t *testing.T) { ptraces := buildTraces() - batches, err := jaeger.ProtoFromTraces(ptraces) - require.NoError(t, err) + batches := jaeger.ProtoFromTraces(ptraces) jsonMarshaler := &jsonpb.Marshaler{} buffer := new(bytes.Buffer) @@ -48,8 +47,7 @@ func TestJaegerJsonBatchMarshaler(t *testing.T) { func TestJaegerProtoBatchMarshaler(t *testing.T) { ptraces := buildTraces() - batches, err := jaeger.ProtoFromTraces(ptraces) - require.NoError(t, err) + batches := jaeger.ProtoFromTraces(ptraces) jaegerProtoBytes, err := batches[0].Marshal() require.NoError(t, err) diff --git a/exporter/sapmexporter/exporter.go b/exporter/sapmexporter/exporter.go index 9893d1c189a8..305bbf956123 100644 --- a/exporter/sapmexporter/exporter.go +++ b/exporter/sapmexporter/exporter.go @@ -96,10 +96,7 @@ func (se *sapmExporter) pushTraceData(ctx context.Context, td ptrace.Traces) err accessToken := se.retrieveAccessToken(ctx, rss.At(0)) - batches, err := jaeger.ProtoFromTraces(td) - if err != nil { - return consumererror.NewPermanent(err) - } + batches := jaeger.ProtoFromTraces(td) // Cannot remove the access token from the pdata, because exporters required to not modify incoming pdata, // so need to remove that after conversion. diff --git a/exporter/sapmexporter/exporter_test.go b/exporter/sapmexporter/exporter_test.go index d9d863eaeb13..8d0953120ca0 100644 --- a/exporter/sapmexporter/exporter_test.go +++ b/exporter/sapmexporter/exporter_test.go @@ -92,8 +92,7 @@ func TestFilterToken(t *testing.T) { for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { traces := buildTestTraces(tt.useToken) - batches, err := jaeger.ProtoFromTraces(traces) - require.NoError(t, err) + batches := jaeger.ProtoFromTraces(traces) assert.Equal(t, tt.useToken, hasToken(batches)) filterToken(batches) assert.False(t, hasToken(batches)) diff --git a/extension/encoding/jaegerencodingextension/jaeger_test.go b/extension/encoding/jaegerencodingextension/jaeger_test.go index 674b0c0ba82f..a1888a434c36 100644 --- a/extension/encoding/jaegerencodingextension/jaeger_test.go +++ b/extension/encoding/jaegerencodingextension/jaeger_test.go @@ -24,8 +24,7 @@ func TestUnmarshalJaeger(t *testing.T) { span.SetEndTimestamp(pcommon.Timestamp(20)) span.SetTraceID([16]byte{1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16}) span.SetSpanID([8]byte{1, 2, 3, 4, 5, 6, 7, 8}) - batches, err := jaeger.ProtoFromTraces(td) - require.NoError(t, err) + batches := jaeger.ProtoFromTraces(td) protoBytes, err := batches[0].Spans[0].Marshal() require.NoError(t, err) diff --git a/internal/coreinternal/goldendataset/resource_generator.go b/internal/coreinternal/goldendataset/resource_generator.go index 03e82dfcdb1e..0f9ae0986346 100644 --- a/internal/coreinternal/goldendataset/resource_generator.go +++ b/internal/coreinternal/goldendataset/resource_generator.go @@ -5,7 +5,7 @@ package goldendataset // import "github.com/open-telemetry/opentelemetry-collect import ( "go.opentelemetry.io/collector/pdata/pcommon" - conventions "go.opentelemetry.io/collector/semconv/v1.6.1" + conventions "go.opentelemetry.io/collector/semconv/v1.18.0" ) // GenerateResource generates a PData Resource object with representative attributes for the diff --git a/internal/coreinternal/goldendataset/span_generator.go b/internal/coreinternal/goldendataset/span_generator.go index a72cb327fa38..ce2d703713b1 100644 --- a/internal/coreinternal/goldendataset/span_generator.go +++ b/internal/coreinternal/goldendataset/span_generator.go @@ -10,7 +10,7 @@ import ( "go.opentelemetry.io/collector/pdata/pcommon" "go.opentelemetry.io/collector/pdata/ptrace" - conventions "go.opentelemetry.io/collector/semconv/v1.6.1" + conventions "go.opentelemetry.io/collector/semconv/v1.12.0" ) var statusCodeMap = map[PICTInputStatus]ptrace.StatusCode{ diff --git a/internal/coreinternal/parseutils/uri.go b/internal/coreinternal/parseutils/uri.go index 048e90e38389..cd154b9d36cc 100644 --- a/internal/coreinternal/parseutils/uri.go +++ b/internal/coreinternal/parseutils/uri.go @@ -8,7 +8,7 @@ import ( "strconv" "strings" - semconv "go.opentelemetry.io/collector/semconv/v1.25.0" + semconv "go.opentelemetry.io/collector/semconv/v1.27.0" ) const ( diff --git a/internal/coreinternal/parseutils/uri_test.go b/internal/coreinternal/parseutils/uri_test.go index f2cab56aa3bd..26c98240ea8e 100644 --- a/internal/coreinternal/parseutils/uri_test.go +++ b/internal/coreinternal/parseutils/uri_test.go @@ -8,7 +8,7 @@ import ( "testing" "github.com/stretchr/testify/require" - semconv "go.opentelemetry.io/collector/semconv/v1.25.0" + semconv "go.opentelemetry.io/collector/semconv/v1.27.0" ) // Test all usecases: absolute uri, relative uri, query string diff --git a/internal/exp/metrics/metrics_test.go b/internal/exp/metrics/metrics_test.go index df49b71aae60..de6645068fc3 100644 --- a/internal/exp/metrics/metrics_test.go +++ b/internal/exp/metrics/metrics_test.go @@ -11,7 +11,7 @@ import ( "github.com/stretchr/testify/require" "go.opentelemetry.io/collector/pdata/pcommon" "go.opentelemetry.io/collector/pdata/pmetric" - conventions "go.opentelemetry.io/collector/semconv/v1.9.0" + conventions "go.opentelemetry.io/collector/semconv/v1.27.0" "github.com/open-telemetry/opentelemetry-collector-contrib/internal/exp/metrics" "github.com/open-telemetry/opentelemetry-collector-contrib/pkg/golden" diff --git a/internal/filter/filterlog/filterlog_test.go b/internal/filter/filterlog/filterlog_test.go index b0a07f16fab1..ebfd5e483d3c 100644 --- a/internal/filter/filterlog/filterlog_test.go +++ b/internal/filter/filterlog/filterlog_test.go @@ -12,7 +12,7 @@ import ( "go.opentelemetry.io/collector/featuregate" "go.opentelemetry.io/collector/pdata/pcommon" "go.opentelemetry.io/collector/pdata/plog" - conventions "go.opentelemetry.io/collector/semconv/v1.6.1" + conventions "go.opentelemetry.io/collector/semconv/v1.27.0" "github.com/open-telemetry/opentelemetry-collector-contrib/internal/filter/filterconfig" "github.com/open-telemetry/opentelemetry-collector-contrib/internal/filter/filterottl" diff --git a/internal/filter/filtermatcher/filtermatcher_test.go b/internal/filter/filtermatcher/filtermatcher_test.go index 0592fccc4ba8..26a556c3814a 100644 --- a/internal/filter/filtermatcher/filtermatcher_test.go +++ b/internal/filter/filtermatcher/filtermatcher_test.go @@ -9,7 +9,7 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" "go.opentelemetry.io/collector/pdata/pcommon" - conventions "go.opentelemetry.io/collector/semconv/v1.6.1" + conventions "go.opentelemetry.io/collector/semconv/v1.27.0" "github.com/open-telemetry/opentelemetry-collector-contrib/internal/filter/filterconfig" "github.com/open-telemetry/opentelemetry-collector-contrib/internal/filter/filterset" diff --git a/internal/filter/filterspan/filterspan.go b/internal/filter/filterspan/filterspan.go index 2ee4358991fe..734b8a01b756 100644 --- a/internal/filter/filterspan/filterspan.go +++ b/internal/filter/filterspan/filterspan.go @@ -9,7 +9,7 @@ import ( "go.opentelemetry.io/collector/featuregate" "go.opentelemetry.io/collector/pdata/pcommon" - conventions "go.opentelemetry.io/collector/semconv/v1.6.1" + conventions "go.opentelemetry.io/collector/semconv/v1.27.0" "github.com/open-telemetry/opentelemetry-collector-contrib/internal/coreinternal/traceutil" "github.com/open-telemetry/opentelemetry-collector-contrib/internal/filter/expr" diff --git a/internal/filter/filterspan/filterspan_test.go b/internal/filter/filterspan/filterspan_test.go index 4b407eb77d71..7dd815b58f12 100644 --- a/internal/filter/filterspan/filterspan_test.go +++ b/internal/filter/filterspan/filterspan_test.go @@ -12,7 +12,7 @@ import ( "go.opentelemetry.io/collector/featuregate" "go.opentelemetry.io/collector/pdata/pcommon" "go.opentelemetry.io/collector/pdata/ptrace" - conventions "go.opentelemetry.io/collector/semconv/v1.6.1" + conventions "go.opentelemetry.io/collector/semconv/v1.27.0" "github.com/open-telemetry/opentelemetry-collector-contrib/internal/coreinternal/testdata" "github.com/open-telemetry/opentelemetry-collector-contrib/internal/coreinternal/traceutil" diff --git a/internal/metadataproviders/internal/internal_test.go b/internal/metadataproviders/internal/internal_test.go index b569c132efe4..48db74f8bd15 100644 --- a/internal/metadataproviders/internal/internal_test.go +++ b/internal/metadataproviders/internal/internal_test.go @@ -7,7 +7,7 @@ import ( "testing" "github.com/stretchr/testify/assert" - conventions "go.opentelemetry.io/collector/semconv/v1.18.0" + conventions "go.opentelemetry.io/collector/semconv/v1.27.0" ) func TestGOOSToOsType(t *testing.T) { diff --git a/internal/metadataproviders/system/metadata.go b/internal/metadataproviders/system/metadata.go index 6a8dda084dd7..6a394ab4a494 100644 --- a/internal/metadataproviders/system/metadata.go +++ b/internal/metadataproviders/system/metadata.go @@ -13,7 +13,7 @@ import ( "github.com/Showmax/go-fqdn" "github.com/shirou/gopsutil/v4/cpu" - conventions "go.opentelemetry.io/collector/semconv/v1.6.1" + conventions "go.opentelemetry.io/collector/semconv/v1.27.0" "go.opentelemetry.io/otel/attribute" "go.opentelemetry.io/otel/sdk/resource" diff --git a/internal/splunk/hostid.go b/internal/splunk/hostid.go index 8a9305567cbb..588a1180f50b 100644 --- a/internal/splunk/hostid.go +++ b/internal/splunk/hostid.go @@ -8,7 +8,7 @@ import ( "strings" "go.opentelemetry.io/collector/pdata/pcommon" - conventions "go.opentelemetry.io/collector/semconv/v1.6.1" + conventions "go.opentelemetry.io/collector/semconv/v1.27.0" ) // HostIDKey represents a host identifier. diff --git a/internal/splunk/hostid_test.go b/internal/splunk/hostid_test.go index 292888fdef5c..0da55b6b972a 100644 --- a/internal/splunk/hostid_test.go +++ b/internal/splunk/hostid_test.go @@ -8,7 +8,7 @@ import ( "github.com/stretchr/testify/assert" "go.opentelemetry.io/collector/pdata/pcommon" - conventions "go.opentelemetry.io/collector/semconv/v1.6.1" + conventions "go.opentelemetry.io/collector/semconv/v1.27.0" ) var ( diff --git a/pkg/ottl/grammar.go b/pkg/ottl/grammar.go index 369eff00ecbd..2407a138c1c2 100644 --- a/pkg/ottl/grammar.go +++ b/pkg/ottl/grammar.go @@ -57,6 +57,18 @@ func (b *booleanValue) checkForCustomError() error { return nil } +func (b *booleanValue) accept(v grammarVisitor) { + if b.Comparison != nil { + b.Comparison.accept(v) + } + if b.ConstExpr != nil && b.ConstExpr.Converter != nil { + b.ConstExpr.Converter.accept(v) + } + if b.SubExpr != nil { + b.SubExpr.accept(v) + } +} + // opAndBooleanValue represents the right side of an AND boolean expression. type opAndBooleanValue struct { Operator string `parser:"@OpAnd"` @@ -67,6 +79,12 @@ func (b *opAndBooleanValue) checkForCustomError() error { return b.Value.checkForCustomError() } +func (b *opAndBooleanValue) accept(v grammarVisitor) { + if b.Value != nil { + b.Value.accept(v) + } +} + // term represents an arbitrary number of boolean values joined by AND. type term struct { Left *booleanValue `parser:"@@"` @@ -87,6 +105,17 @@ func (b *term) checkForCustomError() error { return nil } +func (b *term) accept(v grammarVisitor) { + if b.Left != nil { + b.Left.accept(v) + } + for _, r := range b.Right { + if r != nil { + r.accept(v) + } + } +} + // opOrTerm represents the right side of an OR boolean expression. type opOrTerm struct { Operator string `parser:"@OpOr"` @@ -97,6 +126,12 @@ func (b *opOrTerm) checkForCustomError() error { return b.Term.checkForCustomError() } +func (b *opOrTerm) accept(v grammarVisitor) { + if b.Term != nil { + b.Term.accept(v) + } +} + // booleanExpression represents a true/false decision expressed // as an arbitrary number of terms separated by OR. type booleanExpression struct { @@ -118,6 +153,17 @@ func (b *booleanExpression) checkForCustomError() error { return nil } +func (b *booleanExpression) accept(v grammarVisitor) { + if b.Left != nil { + b.Left.accept(v) + } + for _, r := range b.Right { + if r != nil { + r.accept(v) + } + } +} + // compareOp is the type of a comparison operator. type compareOp int @@ -187,6 +233,11 @@ func (c *comparison) checkForCustomError() error { return err } +func (c *comparison) accept(v grammarVisitor) { + c.Left.accept(v) + c.Right.accept(v) +} + // editor represents the function call of a statement. type editor struct { Function string `parser:"@(Lowercase(Uppercase | Lowercase)*)"` @@ -210,6 +261,13 @@ func (i *editor) checkForCustomError() error { return nil } +func (i *editor) accept(v grammarVisitor) { + v.visitEditor(i) + for _, arg := range i.Arguments { + arg.accept(v) + } +} + // converter represents a converter function call. type converter struct { Function string `parser:"@(Uppercase(Uppercase | Lowercase)*)"` @@ -217,6 +275,14 @@ type converter struct { Keys []key `parser:"( @@ )*"` } +func (c *converter) accept(v grammarVisitor) { + if c.Arguments != nil { + for _, a := range c.Arguments { + a.accept(v) + } + } +} + type argument struct { Name string `parser:"(@(Lowercase(Uppercase | Lowercase)*) Equal)?"` Value value `parser:"( @@"` @@ -227,6 +293,10 @@ func (a *argument) checkForCustomError() error { return a.Value.checkForCustomError() } +func (a *argument) accept(v grammarVisitor) { + a.Value.accept(v) +} + // value represents a part of a parsed statement which is resolved to a value of some sort. This can be a telemetry path // mathExpression, function call, or literal. type value struct { @@ -251,8 +321,27 @@ func (v *value) checkForCustomError() error { return nil } +func (v *value) accept(vis grammarVisitor) { + vis.visitValue(v) + if v.Literal != nil { + v.Literal.accept(vis) + } + if v.MathExpression != nil { + v.MathExpression.accept(vis) + } + if v.Map != nil { + v.Map.accept(vis) + } + if v.List != nil { + for _, i := range v.List.Values { + i.accept(vis) + } + } +} + // path represents a telemetry path mathExpression. type path struct { + Pos lexer.Position Context string `parser:"(@Lowercase '.')?"` Fields []field `parser:"@@ ( '.' @@ )*"` } @@ -276,6 +365,14 @@ type mapValue struct { Values []mapItem `parser:"'{' (@@ ','?)* '}'"` } +func (m *mapValue) accept(v grammarVisitor) { + for _, i := range m.Values { + if i.Value != nil { + i.Value.accept(v) + } + } +} + type mapItem struct { Key *string `parser:"@String ':'"` Value *value `parser:"@@"` @@ -326,6 +423,19 @@ func (m *mathExprLiteral) checkForCustomError() error { return nil } +func (m *mathExprLiteral) accept(v grammarVisitor) { + v.visitMathExprLiteral(m) + if m.Path != nil { + v.visitPath(m.Path) + } + if m.Editor != nil { + m.Editor.accept(v) + } + if m.Converter != nil { + m.Converter.accept(v) + } +} + type mathValue struct { Literal *mathExprLiteral `parser:"( @@"` SubExpression *mathExpression `parser:"| '(' @@ ')' )"` @@ -338,6 +448,15 @@ func (m *mathValue) checkForCustomError() error { return m.SubExpression.checkForCustomError() } +func (m *mathValue) accept(v grammarVisitor) { + if m.Literal != nil { + m.Literal.accept(v) + } + if m.SubExpression != nil { + m.SubExpression.accept(v) + } +} + type opMultDivValue struct { Operator mathOp `parser:"@OpMultDiv"` Value *mathValue `parser:"@@"` @@ -347,6 +466,12 @@ func (m *opMultDivValue) checkForCustomError() error { return m.Value.checkForCustomError() } +func (m *opMultDivValue) accept(v grammarVisitor) { + if m.Value != nil { + m.Value.accept(v) + } +} + type addSubTerm struct { Left *mathValue `parser:"@@"` Right []*opMultDivValue `parser:"@@*"` @@ -366,6 +491,17 @@ func (m *addSubTerm) checkForCustomError() error { return nil } +func (m *addSubTerm) accept(v grammarVisitor) { + if m.Left != nil { + m.Left.accept(v) + } + for _, r := range m.Right { + if r != nil { + r.accept(v) + } + } +} + type opAddSubTerm struct { Operator mathOp `parser:"@OpAddSub"` Term *addSubTerm `parser:"@@"` @@ -375,6 +511,12 @@ func (m *opAddSubTerm) checkForCustomError() error { return m.Term.checkForCustomError() } +func (m *opAddSubTerm) accept(v grammarVisitor) { + if m.Term != nil { + m.Term.accept(v) + } +} + type mathExpression struct { Left *addSubTerm `parser:"@@"` Right []*opAddSubTerm `parser:"@@*"` @@ -394,6 +536,19 @@ func (m *mathExpression) checkForCustomError() error { return nil } +func (m *mathExpression) accept(v grammarVisitor) { + if m.Left != nil { + m.Left.accept(v) + } + if m.Right != nil { + for _, r := range m.Right { + if r != nil { + r.accept(v) + } + } + } +} + type mathOp int const ( @@ -464,3 +619,11 @@ func buildLexer() *lexer.StatefulDefinition { {Name: "whitespace", Pattern: `\s+`}, }) } + +// grammarVisitor allows accessing the grammar AST nodes using the visitor pattern. +type grammarVisitor interface { + visitPath(v *path) + visitEditor(v *editor) + visitValue(v *value) + visitMathExprLiteral(v *mathExprLiteral) +} diff --git a/pkg/ottl/parser_test.go b/pkg/ottl/parser_test.go index d0a5e4b47add..8a6040741f63 100644 --- a/pkg/ottl/parser_test.go +++ b/pkg/ottl/parser_test.go @@ -12,6 +12,7 @@ import ( "testing" "time" + "github.com/alecthomas/participle/v2/lexer" "github.com/stretchr/testify/assert" "go.opentelemetry.io/collector/component/componenttest" @@ -207,6 +208,11 @@ func Test_parse(t *testing.T) { Value: &value{ Literal: &mathExprLiteral{ Path: &path{ + Pos: lexer.Position{ + Offset: 38, + Line: 1, + Column: 39, + }, Context: "bear", Fields: []field{ { @@ -267,6 +273,11 @@ func Test_parse(t *testing.T) { Value: value{ Literal: &mathExprLiteral{ Path: &path{ + Pos: lexer.Position{ + Offset: 24, + Line: 1, + Column: 25, + }, Context: "bear", Fields: []field{ { @@ -298,6 +309,11 @@ func Test_parse(t *testing.T) { Value: value{ Literal: &mathExprLiteral{ Path: &path{ + Pos: lexer.Position{ + Offset: 4, + Line: 1, + Column: 5, + }, Context: "foo", Fields: []field{ { @@ -337,6 +353,11 @@ func Test_parse(t *testing.T) { Value: value{ Literal: &mathExprLiteral{ Path: &path{ + Pos: lexer.Position{ + Offset: 4, + Line: 1, + Column: 5, + }, Context: "", Fields: []field{ { @@ -373,6 +394,11 @@ func Test_parse(t *testing.T) { Value: value{ Literal: &mathExprLiteral{ Path: &path{ + Pos: lexer.Position{ + Offset: 16, + Line: 1, + Column: 17, + }, Fields: []field{ { Name: "attributes", @@ -396,6 +422,11 @@ func Test_parse(t *testing.T) { Value: value{ Literal: &mathExprLiteral{ Path: &path{ + Pos: lexer.Position{ + Offset: 51, + Line: 1, + Column: 52, + }, Fields: []field{ { Name: "attributes", @@ -431,6 +462,11 @@ func Test_parse(t *testing.T) { Value: value{ Literal: &mathExprLiteral{ Path: &path{ + Pos: lexer.Position{ + Offset: 16, + Line: 1, + Column: 17, + }, Fields: []field{ { Name: "attributes", @@ -464,6 +500,11 @@ func Test_parse(t *testing.T) { Value: value{ Literal: &mathExprLiteral{ Path: &path{ + Pos: lexer.Position{ + Offset: 16, + Line: 1, + Column: 17, + }, Fields: []field{ { Name: "attributes", @@ -499,6 +540,11 @@ func Test_parse(t *testing.T) { Value: value{ Literal: &mathExprLiteral{ Path: &path{ + Pos: lexer.Position{ + Offset: 4, + Line: 1, + Column: 5, + }, Context: "foo", Fields: []field{ { @@ -553,6 +599,11 @@ func Test_parse(t *testing.T) { Value: value{ Literal: &mathExprLiteral{ Path: &path{ + Pos: lexer.Position{ + Offset: 4, + Line: 1, + Column: 5, + }, Context: "foo", Fields: []field{ { @@ -585,6 +636,11 @@ func Test_parse(t *testing.T) { Left: value{ Literal: &mathExprLiteral{ Path: &path{ + Pos: lexer.Position{ + Offset: 44, + Line: 1, + Column: 45, + }, Fields: []field{ { Name: "name", @@ -614,6 +670,11 @@ func Test_parse(t *testing.T) { Value: value{ Literal: &mathExprLiteral{ Path: &path{ + Pos: lexer.Position{ + Offset: 4, + Line: 1, + Column: 5, + }, Context: "foo", Fields: []field{ { @@ -646,6 +707,11 @@ func Test_parse(t *testing.T) { Left: value{ Literal: &mathExprLiteral{ Path: &path{ + Pos: lexer.Position{ + Offset: 44, + Line: 1, + Column: 45, + }, Fields: []field{ { Name: "name", @@ -675,6 +741,11 @@ func Test_parse(t *testing.T) { Value: value{ Literal: &mathExprLiteral{ Path: &path{ + Pos: lexer.Position{ + Offset: 7, + Line: 1, + Column: 8, + }, Context: "foo", Fields: []field{ { @@ -707,6 +778,11 @@ func Test_parse(t *testing.T) { Left: value{ Literal: &mathExprLiteral{ Path: &path{ + Pos: lexer.Position{ + Offset: 52, + Line: 1, + Column: 53, + }, Fields: []field{ { Name: "name", @@ -797,6 +873,11 @@ func Test_parse(t *testing.T) { Value: value{ Literal: &mathExprLiteral{ Path: &path{ + Pos: lexer.Position{ + Offset: 4, + Line: 1, + Column: 5, + }, Fields: []field{ { Name: "attributes", @@ -832,6 +913,11 @@ func Test_parse(t *testing.T) { Value: value{ Literal: &mathExprLiteral{ Path: &path{ + Pos: lexer.Position{ + Offset: 4, + Line: 1, + Column: 5, + }, Fields: []field{ { Name: "attributes", @@ -867,6 +953,11 @@ func Test_parse(t *testing.T) { Value: value{ Literal: &mathExprLiteral{ Path: &path{ + Pos: lexer.Position{ + Offset: 4, + Line: 1, + Column: 5, + }, Fields: []field{ { Name: "attributes", @@ -902,6 +993,11 @@ func Test_parse(t *testing.T) { Value: value{ Literal: &mathExprLiteral{ Path: &path{ + Pos: lexer.Position{ + Offset: 4, + Line: 1, + Column: 5, + }, Fields: []field{ { Name: "attributes", @@ -939,6 +1035,11 @@ func Test_parse(t *testing.T) { Value: value{ Literal: &mathExprLiteral{ Path: &path{ + Pos: lexer.Position{ + Offset: 4, + Line: 1, + Column: 5, + }, Fields: []field{ { Name: "attributes", @@ -980,6 +1081,11 @@ func Test_parse(t *testing.T) { Value: value{ Literal: &mathExprLiteral{ Path: &path{ + Pos: lexer.Position{ + Offset: 4, + Line: 1, + Column: 5, + }, Fields: []field{ { Name: "attributes", @@ -1024,6 +1130,11 @@ func Test_parse(t *testing.T) { Value: value{ Literal: &mathExprLiteral{ Path: &path{ + Pos: lexer.Position{ + Offset: 4, + Line: 1, + Column: 5, + }, Fields: []field{ { Name: "attributes", @@ -1095,6 +1206,11 @@ func Test_parse(t *testing.T) { { Literal: &mathExprLiteral{ Path: &path{ + Pos: lexer.Position{ + Offset: 70, + Line: 1, + Column: 71, + }, Fields: []field{ { Name: "attributes", @@ -1128,6 +1244,11 @@ func Test_parse(t *testing.T) { Value: value{ Literal: &mathExprLiteral{ Path: &path{ + Pos: lexer.Position{ + Offset: 4, + Line: 1, + Column: 5, + }, Fields: []field{ { Name: "attributes", @@ -1213,6 +1334,11 @@ func Test_parse(t *testing.T) { Left: &mathValue{ Literal: &mathExprLiteral{ Path: &path{ + Pos: lexer.Position{ + Offset: 55, + Line: 1, + Column: 56, + }, Fields: []field{ { Name: "three", @@ -1287,6 +1413,11 @@ func Test_parseCondition_full(t *testing.T) { Left: value{ Literal: &mathExprLiteral{ Path: &path{ + Pos: lexer.Position{ + Offset: 0, + Line: 1, + Column: 1, + }, Fields: []field{ { Name: "name", @@ -1314,6 +1445,11 @@ func Test_parseCondition_full(t *testing.T) { Left: value{ Literal: &mathExprLiteral{ Path: &path{ + Pos: lexer.Position{ + Offset: 0, + Line: 1, + Column: 1, + }, Fields: []field{ { Name: "name", @@ -1378,6 +1514,11 @@ func Test_parseCondition_full(t *testing.T) { Left: &mathValue{ Literal: &mathExprLiteral{ Path: &path{ + Pos: lexer.Position{ + Offset: 13, + Line: 1, + Column: 14, + }, Fields: []field{ { Name: "three", @@ -1477,6 +1618,11 @@ func setNameTest(b *booleanExpression) *parsedStatement { Value: value{ Literal: &mathExprLiteral{ Path: &path{ + Pos: lexer.Position{ + Offset: 4, + Line: 1, + Column: 5, + }, Fields: []field{ { Name: "name", @@ -1713,6 +1859,11 @@ func Test_parseWhere(t *testing.T) { Left: value{ Literal: &mathExprLiteral{ Path: &path{ + Pos: lexer.Position{ + Offset: 24, + Line: 1, + Column: 25, + }, Fields: []field{ { Name: "name", @@ -1735,6 +1886,11 @@ func Test_parseWhere(t *testing.T) { Left: value{ Literal: &mathExprLiteral{ Path: &path{ + Pos: lexer.Position{ + Offset: 42, + Line: 1, + Column: 43, + }, Fields: []field{ { Name: "name", @@ -1763,6 +1919,11 @@ func Test_parseWhere(t *testing.T) { Left: value{ Literal: &mathExprLiteral{ Path: &path{ + Pos: lexer.Position{ + Offset: 24, + Line: 1, + Column: 25, + }, Fields: []field{ { Name: "name", @@ -1787,6 +1948,11 @@ func Test_parseWhere(t *testing.T) { Left: value{ Literal: &mathExprLiteral{ Path: &path{ + Pos: lexer.Position{ + Offset: 41, + Line: 1, + Column: 42, + }, Fields: []field{ { Name: "name", @@ -1839,6 +2005,11 @@ func Test_parseWhere(t *testing.T) { Left: value{ Literal: &mathExprLiteral{ Path: &path{ + Pos: lexer.Position{ + Offset: 28, + Line: 1, + Column: 29, + }, Fields: []field{ { Name: "name", diff --git a/pkg/ottl/paths.go b/pkg/ottl/paths.go new file mode 100644 index 000000000000..dbb66ee7c994 --- /dev/null +++ b/pkg/ottl/paths.go @@ -0,0 +1,32 @@ +// Copyright The OpenTelemetry Authors +// SPDX-License-Identifier: Apache-2.0 + +package ottl // import "github.com/open-telemetry/opentelemetry-collector-contrib/pkg/ottl" + +// grammarPathVisitor is used to extract all path from a parsedStatement or booleanExpression +type grammarPathVisitor struct { + paths []path +} + +func (v *grammarPathVisitor) visitEditor(_ *editor) {} +func (v *grammarPathVisitor) visitValue(_ *value) {} +func (v *grammarPathVisitor) visitMathExprLiteral(_ *mathExprLiteral) {} + +func (v *grammarPathVisitor) visitPath(value *path) { + v.paths = append(v.paths, *value) +} + +func getParsedStatementPaths(ps *parsedStatement) []path { + visitor := &grammarPathVisitor{} + ps.Editor.accept(visitor) + if ps.WhereClause != nil { + ps.WhereClause.accept(visitor) + } + return visitor.paths +} + +func getBooleanExpressionPaths(be *booleanExpression) []path { + visitor := &grammarPathVisitor{} + be.accept(visitor) + return visitor.paths +} diff --git a/pkg/ottl/paths_test.go b/pkg/ottl/paths_test.go new file mode 100644 index 000000000000..9f31dda15718 --- /dev/null +++ b/pkg/ottl/paths_test.go @@ -0,0 +1,450 @@ +// Copyright The OpenTelemetry Authors +// SPDX-License-Identifier: Apache-2.0 + +package ottl + +import ( + "testing" + + "github.com/alecthomas/participle/v2/lexer" + "github.com/stretchr/testify/require" + + "github.com/open-telemetry/opentelemetry-collector-contrib/pkg/ottl/ottltest" +) + +func Test_getParsedStatementPaths(t *testing.T) { + tests := []struct { + name string + statement string + expected []path + }{ + { + name: "editor with nested map with path", + statement: `fff({"mapAttr": {"foo": "bar", "get": bear.honey, "arrayAttr":["foo", "bar"]}})`, + expected: []path{ + { + Pos: lexer.Position{ + Offset: 38, + Line: 1, + Column: 39, + }, + Context: "bear", + Fields: []field{ + { + Name: "honey", + }, + }, + }, + }, + }, + { + name: "editor with function path parameter", + statement: `set("foo", GetSomething(bear.honey))`, + expected: []path{ + { + Pos: lexer.Position{ + Offset: 24, + Line: 1, + Column: 25, + }, + Context: "bear", + Fields: []field{ + { + Name: "honey", + }, + }, + }, + }, + }, + { + name: "path with key", + statement: `set(foo.attributes["bar"].cat, "dog")`, + expected: []path{ + { + Pos: lexer.Position{ + Offset: 4, + Line: 1, + Column: 5, + }, + Context: "foo", + Fields: []field{ + { + Name: "attributes", + Keys: []key{ + { + String: ottltest.Strp("bar"), + }, + }, + }, + { + Name: "cat", + }, + }, + }, + }, + }, + { + name: "single path field segment", + statement: `set(attributes["bar"], "dog")`, + expected: []path{ + { + Pos: lexer.Position{ + Offset: 4, + Line: 1, + Column: 5, + }, + Context: "", + Fields: []field{ + { + Name: "attributes", + Keys: []key{ + { + String: ottltest.Strp("bar"), + }, + }, + }, + }, + }, + }, + }, + { + name: "converter parameters", + statement: `replace_pattern(attributes["message"], "device=*", attributes["device_name"], SHA256)`, + expected: []path{ + { + Pos: lexer.Position{ + Offset: 16, + Line: 1, + Column: 17, + }, + Fields: []field{ + { + Name: "attributes", + Keys: []key{ + { + String: ottltest.Strp("message"), + }, + }, + }, + }, + }, + { + Pos: lexer.Position{ + Offset: 51, + Line: 1, + Column: 52, + }, + Fields: []field{ + { + Name: "attributes", + Keys: []key{ + { + String: ottltest.Strp("device_name"), + }, + }, + }, + }, + }, + }, + }, + { + name: "complex path with multiple keys", + statement: `set(foo.bar["x"]["y"].z, Test()[0]["pass"])`, + expected: []path{ + { + Pos: lexer.Position{ + Offset: 4, + Line: 1, + Column: 5, + }, + Context: "foo", + Fields: []field{ + { + Name: "bar", + Keys: []key{ + { + String: ottltest.Strp("x"), + }, + { + String: ottltest.Strp("y"), + }, + }, + }, + { + Name: "z", + }, + }, + }, + }, + }, + { + name: "where clause", + statement: `set(foo.attributes["bar"].cat, "dog") where name == "fido"`, + expected: []path{ + { + Pos: lexer.Position{ + Offset: 4, + Line: 1, + Column: 5, + }, + Context: "foo", + Fields: []field{ + { + Name: "attributes", + Keys: []key{ + { + String: ottltest.Strp("bar"), + }, + }, + }, + { + Name: "cat", + }, + }, + }, + { + Pos: lexer.Position{ + Offset: 44, + Line: 1, + Column: 45, + }, + Fields: []field{ + { + Name: "name", + }, + }, + }, + }, + }, + { + name: "where clause multiple conditions", + statement: `set(foo.attributes["bar"].cat, "dog") where name == "fido" and surname == "dido" or surname == "DIDO"`, + expected: []path{ + { + Pos: lexer.Position{ + Offset: 4, + Line: 1, + Column: 5, + }, + Context: "foo", + Fields: []field{ + { + Name: "attributes", + Keys: []key{ + { + String: ottltest.Strp("bar"), + }, + }, + }, + { + Name: "cat", + }, + }, + }, + { + Pos: lexer.Position{ + Offset: 44, + Line: 1, + Column: 45, + }, + Fields: []field{ + { + Name: "name", + }, + }, + }, + { + Pos: lexer.Position{ + Offset: 63, + Line: 1, + Column: 64, + }, + Fields: []field{ + { + Name: "surname", + }, + }, + }, + { + Pos: lexer.Position{ + Offset: 84, + Line: 1, + Column: 85, + }, + Fields: []field{ + { + Name: "surname", + }, + }, + }, + }, + }, + { + name: "where clause sub expression", + statement: `set(foo.attributes["bar"].cat, "value") where three / (1 + 1) == foo.value`, + expected: []path{ + { + Pos: lexer.Position{ + Offset: 4, + Line: 1, + Column: 5, + }, + Context: "foo", + Fields: []field{ + { + Name: "attributes", + Keys: []key{ + { + String: ottltest.Strp("bar"), + }, + }, + }, + { + Name: "cat", + }, + }, + }, + { + Pos: lexer.Position{ + Offset: 46, + Line: 1, + Column: 47, + }, + Fields: []field{ + { + Name: "three", + }, + }, + }, + { + Pos: lexer.Position{ + Offset: 65, + Line: 1, + Column: 66, + }, + Context: "foo", + Fields: []field{ + { + Name: "value", + }, + }, + }, + }, + }, + { + name: "converter with path list", + statement: `set(attributes["test"], [bear.bear, bear.honey])`, + expected: []path{ + { + Pos: lexer.Position{ + Offset: 4, + Line: 1, + Column: 5, + }, + Fields: []field{ + { + Name: "attributes", + Keys: []key{ + { + String: ottltest.Strp("test"), + }, + }, + }, + }, + }, + { + Pos: lexer.Position{ + Offset: 25, + Line: 1, + Column: 26, + }, + Context: "bear", + Fields: []field{{Name: "bear"}}, + }, + { + Pos: lexer.Position{ + Offset: 36, + Line: 1, + Column: 37, + }, + Context: "bear", + Fields: []field{{Name: "honey"}}, + }, + }, + }, + { + name: "converter math math expression", + statement: `set(attributes["test"], 1000 - 600) where 1 + 1 * 2 == three / One()`, + expected: []path{ + { + Pos: lexer.Position{ + Offset: 4, + Line: 1, + Column: 5, + }, + Fields: []field{ + { + Name: "attributes", + Keys: []key{ + { + String: ottltest.Strp("test"), + }, + }, + }, + }, + }, + { + Pos: lexer.Position{ + Offset: 55, + Line: 1, + Column: 56, + }, + Fields: []field{ + { + Name: "three", + }, + }, + }, + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + ps, err := parseStatement(tt.statement) + require.NoError(t, err) + + paths := getParsedStatementPaths(ps) + require.Equal(t, tt.expected, paths) + }) + } +} + +func Test_getBooleanExpressionPaths(t *testing.T) { + expected := []path{ + { + Pos: lexer.Position{ + Offset: 0, + Line: 1, + Column: 1, + }, + Context: "honey", + Fields: []field{{Name: "bear"}}, + }, + { + Pos: lexer.Position{ + Offset: 21, + Line: 1, + Column: 22, + }, + Context: "foo", + Fields: []field{{Name: "bar"}}, + }, + } + + c, err := parseCondition("honey.bear == 1 and (foo.bar == true or 1 == 1)") + require.NoError(t, err) + + paths := getBooleanExpressionPaths(c) + require.Equal(t, expected, paths) +} diff --git a/pkg/pdatatest/plogtest/logs_test.go b/pkg/pdatatest/plogtest/logs_test.go index 345823e1846a..c8910d24c3ef 100644 --- a/pkg/pdatatest/plogtest/logs_test.go +++ b/pkg/pdatatest/plogtest/logs_test.go @@ -142,6 +142,14 @@ func TestCompareLogs(t *testing.T) { withoutOptions: errors.New(`resource "map[]": scope "collector": log record "map[]": timestamp doesn't match expected: 11651379494838206465, actual: 11651379494838206464`), withOptions: nil, }, + { + name: "ignore-log-record-attribute-value", + compareOptions: []CompareLogsOption{ + IgnoreLogRecordAttributeValue("Key1"), + }, + withoutOptions: errors.New(`resource "map[]": scope "": missing expected log record: map[Key1:Val2]; resource "map[]": scope "": unexpected log record: map[Key1:Val1]`), + withOptions: nil, + }, } for _, tc := range tcs { diff --git a/pkg/pdatatest/plogtest/options.go b/pkg/pdatatest/plogtest/options.go index 412efd7d7eb5..632b76297c99 100644 --- a/pkg/pdatatest/plogtest/options.go +++ b/pkg/pdatatest/plogtest/options.go @@ -42,13 +42,47 @@ func (opt ignoreResourceAttributeValue) applyOnLogs(expected, actual plog.Logs) opt.maskLogsResourceAttributeValue(actual) } -func (opt ignoreResourceAttributeValue) maskLogsResourceAttributeValue(metrics plog.Logs) { - rls := metrics.ResourceLogs() +func (opt ignoreResourceAttributeValue) maskLogsResourceAttributeValue(logs plog.Logs) { + rls := logs.ResourceLogs() for i := 0; i < rls.Len(); i++ { internal.MaskResourceAttributeValue(rls.At(i).Resource(), opt.attributeName) } } +// IgnoreLogRecordAttributeValue is a CompareLogsOption that sets the value of an attribute +// to empty bytes for every log record +func IgnoreLogRecordAttributeValue(attributeName string) CompareLogsOption { + return ignoreLogRecordAttributeValue{ + attributeName: attributeName, + } +} + +type ignoreLogRecordAttributeValue struct { + attributeName string +} + +func (opt ignoreLogRecordAttributeValue) applyOnLogs(expected, actual plog.Logs) { + opt.maskLogRecordAttributeValue(expected) + opt.maskLogRecordAttributeValue(actual) +} + +func (opt ignoreLogRecordAttributeValue) maskLogRecordAttributeValue(logs plog.Logs) { + rls := logs.ResourceLogs() + for i := 0; i < logs.ResourceLogs().Len(); i++ { + sls := rls.At(i).ScopeLogs() + for j := 0; j < sls.Len(); j++ { + lrs := sls.At(j).LogRecords() + for k := 0; k < lrs.Len(); k++ { + lr := lrs.At(k) + val, exists := lr.Attributes().Get(opt.attributeName) + if exists { + val.SetEmptyBytes() + } + } + } + } +} + func IgnoreTimestamp() CompareLogsOption { return compareLogsOptionFunc(func(expected, actual plog.Logs) { now := pcommon.NewTimestampFromTime(time.Now()) diff --git a/pkg/pdatatest/plogtest/testdata/ignore-log-record-attribute-value/actual.yaml b/pkg/pdatatest/plogtest/testdata/ignore-log-record-attribute-value/actual.yaml new file mode 100644 index 000000000000..5fed2e0e0108 --- /dev/null +++ b/pkg/pdatatest/plogtest/testdata/ignore-log-record-attribute-value/actual.yaml @@ -0,0 +1,12 @@ +resourceLogs: + - resource: {} + scopeLogs: + - logRecords: + - attributes: + - key: Key1 + value: + stringValue: Val1 + body: {} + spanId: "" + traceId: "" + scope: {} diff --git a/pkg/pdatatest/plogtest/testdata/ignore-log-record-attribute-value/expected.yaml b/pkg/pdatatest/plogtest/testdata/ignore-log-record-attribute-value/expected.yaml new file mode 100644 index 000000000000..fbd1af584d58 --- /dev/null +++ b/pkg/pdatatest/plogtest/testdata/ignore-log-record-attribute-value/expected.yaml @@ -0,0 +1,12 @@ +resourceLogs: + - resource: {} + scopeLogs: + - logRecords: + - attributes: + - key: Key1 + value: + stringValue: Val2 + body: {} + spanId: "" + traceId: "" + scope: {} diff --git a/pkg/translator/jaeger/traces_to_jaegerproto.go b/pkg/translator/jaeger/traces_to_jaegerproto.go index 5ece80c2e6b1..db79dac374b4 100644 --- a/pkg/translator/jaeger/traces_to_jaegerproto.go +++ b/pkg/translator/jaeger/traces_to_jaegerproto.go @@ -15,11 +15,11 @@ import ( // ProtoFromTraces translates internal trace data into the Jaeger Proto for GRPC. // Returns slice of translated Jaeger batches and error if translation failed. -func ProtoFromTraces(td ptrace.Traces) ([]*model.Batch, error) { +func ProtoFromTraces(td ptrace.Traces) []*model.Batch { resourceSpans := td.ResourceSpans() if resourceSpans.Len() == 0 { - return nil, nil + return nil } batches := make([]*model.Batch, 0, resourceSpans.Len()) @@ -31,7 +31,7 @@ func ProtoFromTraces(td ptrace.Traces) ([]*model.Batch, error) { } } - return batches, nil + return batches } func resourceSpansToJaegerProto(rs ptrace.ResourceSpans) *model.Batch { diff --git a/pkg/translator/jaeger/traces_to_jaegerproto_test.go b/pkg/translator/jaeger/traces_to_jaegerproto_test.go index 96cc4bdf1982..672d97c6f320 100644 --- a/pkg/translator/jaeger/traces_to_jaegerproto_test.go +++ b/pkg/translator/jaeger/traces_to_jaegerproto_test.go @@ -220,12 +220,10 @@ func TestInternalTracesToJaegerProto(t *testing.T) { name string td ptrace.Traces jb *model.Batch - err error }{ { name: "empty", td: ptrace.NewTraces(), - err: nil, }, { @@ -234,13 +232,11 @@ func TestInternalTracesToJaegerProto(t *testing.T) { jb: &model.Batch{ Process: generateProtoProcess(), }, - err: nil, }, { name: "no-resource-attrs", td: generateTracesResourceOnlyWithNoAttrs(), - err: nil, }, { @@ -254,7 +250,6 @@ func TestInternalTracesToJaegerProto(t *testing.T) { generateProtoSpanWithTraceState(), }, }, - err: nil, }, { name: "library-info", @@ -267,7 +262,6 @@ func TestInternalTracesToJaegerProto(t *testing.T) { generateProtoSpanWithLibraryInfo("io.opentelemetry.test"), }, }, - err: nil, }, { name: "two-spans-child-parent", @@ -281,7 +275,6 @@ func TestInternalTracesToJaegerProto(t *testing.T) { generateProtoChildSpan(), }, }, - err: nil, }, { @@ -296,7 +289,6 @@ func TestInternalTracesToJaegerProto(t *testing.T) { generateProtoFollowerSpan(), }, }, - err: nil, }, { @@ -310,7 +302,6 @@ func TestInternalTracesToJaegerProto(t *testing.T) { generateJProtoSpanWithEventAttribute(), }, }, - err: nil, }, { name: "a-spans-with-two-parent", @@ -330,8 +321,7 @@ func TestInternalTracesToJaegerProto(t *testing.T) { for _, test := range tests { t.Run(test.name, func(t *testing.T) { - jbs, err := ProtoFromTraces(test.td) - assert.EqualValues(t, test.err, err) + jbs := ProtoFromTraces(test.td) if test.jb == nil { assert.Empty(t, jbs) } else { @@ -348,8 +338,7 @@ func TestInternalTracesToJaegerProtoBatchesAndBack(t *testing.T) { "../../../internal/coreinternal/goldendataset/testdata/generated_pict_pairs_spans.txt") assert.NoError(t, err) for _, td := range tds { - protoBatches, err := ProtoFromTraces(td) - assert.NoError(t, err) + protoBatches := ProtoFromTraces(td) tdFromPB, err := ProtoToTraces(protoBatches) assert.NoError(t, err) assert.Equal(t, td.SpanCount(), tdFromPB.SpanCount()) @@ -388,7 +377,7 @@ func BenchmarkInternalTracesToJaegerProto(b *testing.B) { b.ResetTimer() for n := 0; n < b.N; n++ { - _, err := ProtoFromTraces(td) - assert.NoError(b, err) + batches := ProtoFromTraces(td) + assert.NotEmpty(b, batches) } } diff --git a/pkg/translator/opencensus/oc_to_traces.go b/pkg/translator/opencensus/oc_to_traces.go index 92c230e6f8ba..188dd2a0df91 100644 --- a/pkg/translator/opencensus/oc_to_traces.go +++ b/pkg/translator/opencensus/oc_to_traces.go @@ -12,7 +12,7 @@ import ( "go.opencensus.io/trace" "go.opentelemetry.io/collector/pdata/pcommon" "go.opentelemetry.io/collector/pdata/ptrace" - conventions "go.opentelemetry.io/collector/semconv/v1.6.1" + conventions "go.opentelemetry.io/collector/semconv/v1.12.0" "google.golang.org/protobuf/types/known/wrapperspb" "github.com/open-telemetry/opentelemetry-collector-contrib/internal/coreinternal/occonventions" diff --git a/pkg/translator/opencensus/traces_to_oc.go b/pkg/translator/opencensus/traces_to_oc.go index 9d6dae3841d0..0f698ca5bf6b 100644 --- a/pkg/translator/opencensus/traces_to_oc.go +++ b/pkg/translator/opencensus/traces_to_oc.go @@ -13,7 +13,7 @@ import ( "go.opencensus.io/trace" "go.opentelemetry.io/collector/pdata/pcommon" "go.opentelemetry.io/collector/pdata/ptrace" - conventions "go.opentelemetry.io/collector/semconv/v1.6.1" + conventions "go.opentelemetry.io/collector/semconv/v1.12.0" "google.golang.org/protobuf/types/known/wrapperspb" "github.com/open-telemetry/opentelemetry-collector-contrib/internal/coreinternal/occonventions" diff --git a/processor/logdedupprocessor/README.md b/processor/logdedupprocessor/README.md index edb5bf2a40c1..004f7862caac 100644 --- a/processor/logdedupprocessor/README.md +++ b/processor/logdedupprocessor/README.md @@ -15,7 +15,7 @@ This processor is used to deduplicate logs by detecting identical logs over a ra ## How It Works 1. The user configures the log deduplication processor in the desired logs pipeline. -2. All logs sent to the processor and aggregated over the configured `interval`. Logs are considered identical if they have the same body, resource attributes, severity, and log attributes. +2. If the processor does not provide `conditions`, all logs are considered eligible for aggregation. If the processor does have configured `conditions`, all log entries where at least one of the `conditions` evaluates `true` are considered eligible for aggregation. Eligible identical logs are aggregated over the configured `interval`. Logs are considered identical if they have the same body, resource attributes, severity, and log attributes. Logs that do not match any condition in `conditions` are passed onward in the pipeline without aggregating. 3. After the interval, the processor emits a single log with the count of logs that were deduplicated. The emitted log will have the same body, resource attributes, severity, and log attributes as the original log. The emitted log will also have the following new attributes: - `log_count`: The count of logs that were deduplicated over the interval. The name of the attribute is configurable via the `log_count_attribute` parameter. @@ -25,13 +25,17 @@ This processor is used to deduplicate logs by detecting identical logs over a ra **Note**: The `ObservedTimestamp` and `Timestamp` of the emitted log will be the time that the aggregated log was emitted and will not be the same as the `ObservedTimestamp` and `Timestamp` of the original logs. ## Configuration -| Field | Type | Default | Description | -| --- | --- | --- | --- | -| interval | duration | `10s` | The interval at which logs are aggregated. The counter will reset after each interval. | -| log_count_attribute | string | `log_count` | The name of the count attribute of deduplicated logs that will be added to the emitted aggregated log. | -| timezone | string | `UTC` | The timezone of the `first_observed_timestamp` and `last_observed_timestamp` timestamps on the emitted aggregated log. The available locations depend on the local IANA Time Zone database. [This page](https://en.wikipedia.org/wiki/List_of_tz_database_time_zones) contains many examples, such as `America/New_York`. | -| exclude_fields | []string | `[]` | Fields to exclude from duplication matching. Fields can be excluded from the log `body` or `attributes`. These fields will not be present in the emitted aggregated log. Nested fields must be `.` delimited. If a field contains a `.` it can be escaped by using a `\` see [example config](#example-config-with-excluded-fields).

**Note**: The entire `body` cannot be excluded. If the body is a map then fields within it can be excluded. | +| Field | Type | Default | Description | +| --- | --- | --- | --- | +| interval | duration | `10s` | The interval at which logs are aggregated. The counter will reset after each interval. | +| conditions | []string | `[]` | A slice of [OTTL] expressions used to evaluate which log records are deduped. All paths in the [log context] are available to reference. All [converters] are available to use. | +| log_count_attribute | string | `log_count` | The name of the count attribute of deduplicated logs that will be added to the emitted aggregated log. | +| timezone | string | `UTC` | The timezone of the `first_observed_timestamp` and `last_observed_timestamp` timestamps on the emitted aggregated log. The available locations depend on the local IANA Time Zone database. [This page](https://en.wikipedia.org/wiki/List_of_tz_database_time_zones) contains many examples, such as `America/New_York`. | +| exclude_fields | []string | `[]` | Fields to exclude from duplication matching. Fields can be excluded from the log `body` or `attributes`. These fields will not be present in the emitted aggregated log. Nested fields must be `.` delimited. If a field contains a `.` it can be escaped by using a `\` see [example config](#example-config-with-excluded-fields).

**Note**: The entire `body` cannot be excluded. If the body is a map then fields within it can be excluded. | +[OTTL]: https://github.com/open-telemetry/opentelemetry-collector-contrib/tree/v0.109.0/pkg/ottl#readme +[converters]: https://github.com/open-telemetry/opentelemetry-collector-contrib/blob/v0.109.0/pkg/ottl/ottlfuncs/README.md#converters +[log context]: https://github.com/open-telemetry/opentelemetry-collector-contrib/blob/v0.109.0/pkg/ottl/contexts/ottllog/README.md ### Example Config The following config is an example configuration for the log deduplication processor. It is configured with an aggregation interval of `60 seconds`, a timezone of `America/Los_Angeles`, and a log count attribute of `dedup_count`. It has no fields being excluded. @@ -82,3 +86,30 @@ service: processors: [logdedup] exporters: [googlecloud] ``` + + +### Example Config with Conditions +The following config is an example configuration that only performs the deduping process on telemetry where Attribute `ID` equals `1` OR where Resource Attribute `service.name` equals `my-service`: + +```yaml +receivers: + filelog: + include: [./example/*.log] +processors: + logdedup: + conditions: + - attributes["ID"] == 1 + - resource.attributes["service.name"] == "my-service" + interval: 60s + log_count_attribute: dedup_count + timezone: 'America/Los_Angeles' +exporters: + googlecloud: + +service: + pipelines: + logs: + receivers: [filelog] + processors: [logdedup] + exporters: [googlecloud] +``` diff --git a/processor/logdedupprocessor/config.go b/processor/logdedupprocessor/config.go index eb99fcadd98d..04459d3ecbf8 100644 --- a/processor/logdedupprocessor/config.go +++ b/processor/logdedupprocessor/config.go @@ -44,6 +44,7 @@ type Config struct { Interval time.Duration `mapstructure:"interval"` Timezone string `mapstructure:"timezone"` ExcludeFields []string `mapstructure:"exclude_fields"` + Conditions []string `mapstructure:"conditions"` } // createDefaultConfig returns the default config for the processor. @@ -53,6 +54,7 @@ func createDefaultConfig() component.Config { Interval: defaultInterval, Timezone: defaultTimezone, ExcludeFields: []string{}, + Conditions: []string{}, } } diff --git a/processor/logdedupprocessor/config_test.go b/processor/logdedupprocessor/config_test.go index 94e3bc771922..a4e8fb120cd5 100644 --- a/processor/logdedupprocessor/config_test.go +++ b/processor/logdedupprocessor/config_test.go @@ -90,6 +90,7 @@ func TestValidateConfig(t *testing.T) { LogCountAttribute: defaultLogCountAttribute, Interval: defaultInterval, Timezone: defaultTimezone, + Conditions: []string{}, ExcludeFields: []string{"body.thing", "attributes.otherthing"}, }, expectedErr: nil, diff --git a/processor/logdedupprocessor/factory.go b/processor/logdedupprocessor/factory.go index 1bcd5fc37d2f..0931e06cc176 100644 --- a/processor/logdedupprocessor/factory.go +++ b/processor/logdedupprocessor/factory.go @@ -11,6 +11,8 @@ import ( "go.opentelemetry.io/collector/consumer" "go.opentelemetry.io/collector/processor" + "github.com/open-telemetry/opentelemetry-collector-contrib/internal/filter/filterottl" + "github.com/open-telemetry/opentelemetry-collector-contrib/pkg/ottl" "github.com/open-telemetry/opentelemetry-collector-contrib/processor/logdedupprocessor/internal/metadata" ) @@ -30,5 +32,25 @@ func createLogsProcessor(_ context.Context, settings processor.Settings, cfg com return nil, fmt.Errorf("invalid config type: %+v", cfg) } - return newProcessor(processorCfg, consumer, settings) + processor, err := newProcessor(processorCfg, consumer, settings) + if err != nil { + return nil, fmt.Errorf("error creating processor: %w", err) + } + + if len(processorCfg.Conditions) == 0 { + processor.conditions = nil + } else { + conditions, err := filterottl.NewBoolExprForLog( + processorCfg.Conditions, + filterottl.StandardLogFuncs(), + ottl.PropagateError, + settings.TelemetrySettings, + ) + if err != nil { + return nil, fmt.Errorf("invalid condition: %w", err) + } + processor.conditions = conditions + } + + return processor, nil } diff --git a/processor/logdedupprocessor/factory_test.go b/processor/logdedupprocessor/factory_test.go index 9dfee3556611..1db3284ca45b 100644 --- a/processor/logdedupprocessor/factory_test.go +++ b/processor/logdedupprocessor/factory_test.go @@ -37,6 +37,37 @@ func TestCreateLogs(t *testing.T) { cfg: nil, expectedErr: "invalid config type", }, + { + name: "valid custom condition", + cfg: &Config{ + LogCountAttribute: defaultLogCountAttribute, + Interval: defaultInterval, + Timezone: defaultTimezone, + ExcludeFields: []string{}, + Conditions: []string{"false"}, + }, + }, + { + name: "valid multiple conditions", + cfg: &Config{ + LogCountAttribute: defaultLogCountAttribute, + Interval: defaultInterval, + Timezone: defaultTimezone, + ExcludeFields: []string{}, + Conditions: []string{"false", `(attributes["ID"] == 1)`}, + }, + }, + { + name: "invalid condition", + cfg: &Config{ + LogCountAttribute: defaultLogCountAttribute, + Interval: defaultInterval, + Timezone: defaultTimezone, + ExcludeFields: []string{}, + Conditions: []string{"x"}, + }, + expectedErr: "invalid condition", + }, } for _, tc := range testCases { diff --git a/processor/logdedupprocessor/generated_component_telemetry_test.go b/processor/logdedupprocessor/generated_component_telemetry_test.go index f4dc8b79694b..3254ebfd89d1 100644 --- a/processor/logdedupprocessor/generated_component_telemetry_test.go +++ b/processor/logdedupprocessor/generated_component_telemetry_test.go @@ -7,13 +7,12 @@ import ( "testing" "github.com/stretchr/testify/require" - sdkmetric "go.opentelemetry.io/otel/sdk/metric" - "go.opentelemetry.io/otel/sdk/metric/metricdata" - "go.opentelemetry.io/otel/sdk/metric/metricdata/metricdatatest" - "go.opentelemetry.io/collector/component" "go.opentelemetry.io/collector/processor" "go.opentelemetry.io/collector/processor/processortest" + sdkmetric "go.opentelemetry.io/otel/sdk/metric" + "go.opentelemetry.io/otel/sdk/metric/metricdata" + "go.opentelemetry.io/otel/sdk/metric/metricdata/metricdatatest" ) type componentTestTelemetry struct { diff --git a/processor/logdedupprocessor/go.mod b/processor/logdedupprocessor/go.mod index d0ca80ca9a36..038b6efe3ab2 100644 --- a/processor/logdedupprocessor/go.mod +++ b/processor/logdedupprocessor/go.mod @@ -3,6 +3,10 @@ module github.com/open-telemetry/opentelemetry-collector-contrib/processor/logde go 1.22.0 require ( + github.com/open-telemetry/opentelemetry-collector-contrib/internal/filter v0.111.0 + github.com/open-telemetry/opentelemetry-collector-contrib/pkg/golden v0.111.0 + github.com/open-telemetry/opentelemetry-collector-contrib/pkg/ottl v0.111.0 + github.com/open-telemetry/opentelemetry-collector-contrib/pkg/pdatatest v0.111.0 github.com/open-telemetry/opentelemetry-collector-contrib/pkg/pdatautil v0.111.0 github.com/stretchr/testify v1.9.0 go.opentelemetry.io/collector/component v0.111.0 @@ -26,20 +30,34 @@ require ( ) require ( + github.com/alecthomas/participle/v2 v2.1.1 // indirect + github.com/antchfx/xmlquery v1.4.1 // indirect + github.com/antchfx/xpath v1.3.1 // indirect github.com/cespare/xxhash/v2 v2.3.0 // indirect + github.com/elastic/go-grok v0.3.1 // indirect + github.com/elastic/lunes v0.1.0 // indirect github.com/go-logr/logr v1.4.2 // indirect github.com/go-logr/stdr v1.2.2 // indirect github.com/go-viper/mapstructure/v2 v2.1.0 // indirect + github.com/gobwas/glob v0.2.3 // indirect + github.com/goccy/go-json v0.10.3 // indirect github.com/gogo/protobuf v1.3.2 // indirect + github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da // indirect github.com/google/uuid v1.6.0 // indirect + github.com/hashicorp/golang-lru v0.5.4 // indirect + github.com/hashicorp/golang-lru/v2 v2.0.7 // indirect + github.com/iancoleman/strcase v0.3.0 // indirect github.com/json-iterator/go v1.1.12 // indirect github.com/knadh/koanf/maps v0.1.1 // indirect github.com/knadh/koanf/providers/confmap v0.1.0 // indirect github.com/knadh/koanf/v2 v2.1.1 // indirect + github.com/magefile/mage v1.15.0 // indirect github.com/mitchellh/copystructure v1.2.0 // indirect github.com/mitchellh/reflectwalk v1.0.2 // indirect github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect github.com/modern-go/reflect2 v1.0.2 // indirect + github.com/open-telemetry/opentelemetry-collector-contrib/internal/coreinternal v0.111.0 // indirect + github.com/ua-parser/uap-go v0.0.0-20240611065828-3a4781585db6 // indirect go.opentelemetry.io/collector/component/componentstatus v0.111.0 // indirect go.opentelemetry.io/collector/consumer/consumerprofiles v0.111.0 // indirect go.opentelemetry.io/collector/internal/globalsignal v0.111.0 // indirect @@ -47,15 +65,28 @@ require ( go.opentelemetry.io/collector/pdata/testdata v0.111.0 // indirect go.opentelemetry.io/collector/pipeline v0.111.0 // indirect go.opentelemetry.io/collector/processor/processorprofiles v0.111.0 // indirect + go.opentelemetry.io/collector/semconv v0.111.0 // indirect go.opentelemetry.io/otel v1.30.0 // indirect go.opentelemetry.io/otel/sdk v1.30.0 // indirect go.uber.org/multierr v1.11.0 // indirect - golang.org/x/net v0.28.0 // indirect + golang.org/x/exp v0.0.0-20240506185415-9bf2ced13842 // indirect + golang.org/x/net v0.29.0 // indirect golang.org/x/sys v0.25.0 // indirect - golang.org/x/text v0.17.0 // indirect + golang.org/x/text v0.18.0 // indirect google.golang.org/genproto/googleapis/rpc v0.0.0-20240822170219-fc7c04adadcd // indirect google.golang.org/grpc v1.67.1 // indirect google.golang.org/protobuf v1.34.2 // indirect + gopkg.in/yaml.v2 v2.4.0 // indirect ) replace github.com/open-telemetry/opentelemetry-collector-contrib/pkg/pdatautil => ../../pkg/pdatautil + +replace github.com/open-telemetry/opentelemetry-collector-contrib/pkg/pdatatest => ../../pkg/pdatatest + +replace github.com/open-telemetry/opentelemetry-collector-contrib/pkg/golden => ../../pkg/golden + +replace github.com/open-telemetry/opentelemetry-collector-contrib/pkg/ottl => ../../pkg/ottl + +replace github.com/open-telemetry/opentelemetry-collector-contrib/internal/filter => ../../internal/filter + +replace github.com/open-telemetry/opentelemetry-collector-contrib/internal/coreinternal => ../../internal/coreinternal diff --git a/processor/logdedupprocessor/go.sum b/processor/logdedupprocessor/go.sum index 18d96827a0f8..c2135021a81d 100644 --- a/processor/logdedupprocessor/go.sum +++ b/processor/logdedupprocessor/go.sum @@ -1,8 +1,22 @@ +github.com/alecthomas/assert/v2 v2.3.0 h1:mAsH2wmvjsuvyBvAmCtm7zFsBlb8mIHx5ySLVdDZXL0= +github.com/alecthomas/assert/v2 v2.3.0/go.mod h1:pXcQ2Asjp247dahGEmsZ6ru0UVwnkhktn7S0bBDLxvQ= +github.com/alecthomas/participle/v2 v2.1.1 h1:hrjKESvSqGHzRb4yW1ciisFJ4p3MGYih6icjJvbsmV8= +github.com/alecthomas/participle/v2 v2.1.1/go.mod h1:Y1+hAs8DHPmc3YUFzqllV+eSQ9ljPTk0ZkPMtEdAx2c= +github.com/alecthomas/repr v0.2.0 h1:HAzS41CIzNW5syS8Mf9UwXhNH1J9aix/BvDRf1Ml2Yk= +github.com/alecthomas/repr v0.2.0/go.mod h1:Fr0507jx4eOXV7AlPV6AVZLYrLIuIeSOWtW57eE/O/4= +github.com/antchfx/xmlquery v1.4.1 h1:YgpSwbeWvLp557YFTi8E3z6t6/hYjmFEtiEKbDfEbl0= +github.com/antchfx/xmlquery v1.4.1/go.mod h1:lKezcT8ELGt8kW5L+ckFMTbgdR61/odpPgDv8Gvi1fI= +github.com/antchfx/xpath v1.3.1 h1:PNbFuUqHwWl0xRjvUPjJ95Agbmdj2uzzIwmQKgu4oCk= +github.com/antchfx/xpath v1.3.1/go.mod h1:i54GszH55fYfBmoZXapTHN8T8tkcHfRgLyVwwqzXNcs= github.com/cespare/xxhash/v2 v2.3.0 h1:UL815xU9SqsFlibzuggzjXhog7bL6oX9BbNZnL2UFvs= github.com/cespare/xxhash/v2 v2.3.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/elastic/go-grok v0.3.1 h1:WEhUxe2KrwycMnlvMimJXvzRa7DoByJB4PVUIE1ZD/U= +github.com/elastic/go-grok v0.3.1/go.mod h1:n38ls8ZgOboZRgKcjMY8eFeZFMmcL9n2lP0iHhIDk64= +github.com/elastic/lunes v0.1.0 h1:amRtLPjwkWtzDF/RKzcEPMvSsSseLDLW+bnhfNSLRe4= +github.com/elastic/lunes v0.1.0/go.mod h1:xGphYIt3XdZRtyWosHQTErsQTd4OP1p9wsbVoHelrd4= github.com/go-logr/logr v1.2.2/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A= github.com/go-logr/logr v1.4.2 h1:6pFjapn8bFcIbiKo3XT4j/BhANplGihG6tvd+8rYgrY= github.com/go-logr/logr v1.4.2/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY= @@ -10,13 +24,27 @@ github.com/go-logr/stdr v1.2.2 h1:hSWxHoqTgW2S2qGc0LTAI563KZ5YKYRhT3MFKZMbjag= github.com/go-logr/stdr v1.2.2/go.mod h1:mMo/vtBO5dYbehREoey6XUKy/eSumjCCveDpRre4VKE= github.com/go-viper/mapstructure/v2 v2.1.0 h1:gHnMa2Y/pIxElCH2GlZZ1lZSsn6XMtufpGyP1XxdC/w= github.com/go-viper/mapstructure/v2 v2.1.0/go.mod h1:oJDH3BJKyqBA2TXFhDsKDGDTlndYOZ6rGS0BRZIxGhM= +github.com/gobwas/glob v0.2.3 h1:A4xDbljILXROh+kObIiy5kIaPYD8e96x1tgBhUI5J+Y= +github.com/gobwas/glob v0.2.3/go.mod h1:d3Ez4x06l9bZtSvzIay5+Yzi0fmZzPgnTbPcKjJAkT8= +github.com/goccy/go-json v0.10.3 h1:KZ5WoDbxAIgm2HNbYckL0se1fHD6rz5j4ywS6ebzDqA= +github.com/goccy/go-json v0.10.3/go.mod h1:oq7eo15ShAhp70Anwd5lgX2pLfOS3QCiwU/PULtXL6M= github.com/gogo/protobuf v1.3.2 h1:Ov1cvc58UF3b5XjBnZv7+opcTcQFZebYjWzi34vdm4Q= github.com/gogo/protobuf v1.3.2/go.mod h1:P1XiOD3dCwIKUDQYPy72D8LYyHL2YPYrpS2s69NZV8Q= +github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da h1:oI5xCqsCo564l8iNU+DwB5epxmsaqB+rhGL0m5jtYqE= +github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI= github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0= github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/hashicorp/golang-lru v0.5.4 h1:YDjusn29QI/Das2iO9M0BHnIbxPeyuCHsjMW+lJfyTc= +github.com/hashicorp/golang-lru v0.5.4/go.mod h1:iADmTwqILo4mZ8BN3D2Q6+9jd8WM5uGBxy+E8yxSoD4= +github.com/hashicorp/golang-lru/v2 v2.0.7 h1:a+bsQ5rvGLjzHuww6tVxozPZFVghXaHOwFs4luLUK2k= +github.com/hashicorp/golang-lru/v2 v2.0.7/go.mod h1:QeFd9opnmA6QUJc5vARoKUSoFhyfM2/ZepoAG6RGpeM= +github.com/hexops/gotextdiff v1.0.3 h1:gitA9+qJrrTCsiCl7+kh75nPqQt1cx4ZkudSTLoUqJM= +github.com/hexops/gotextdiff v1.0.3/go.mod h1:pSWU5MAI3yDq+fZBTazCSJysOMbxWL1BSow5/V2vxeg= +github.com/iancoleman/strcase v0.3.0 h1:nTXanmYxhfFAMjZL34Ov6gkzEsSJZ5DbhxWjvSASxEI= +github.com/iancoleman/strcase v0.3.0/go.mod h1:iwCmte+B7n89clKwxIoIXy/HfoL7AsD47ZCWhYzw7ho= github.com/json-iterator/go v1.1.12 h1:PV8peI4a0ysnczrg+LtxykD8LfKY9ML6u2jnxaEnrnM= github.com/json-iterator/go v1.1.12/go.mod h1:e30LSqwooZae/UwlEbR2852Gd8hjQvJoHmT4TnhNGBo= github.com/kisielk/errcheck v1.5.0/go.mod h1:pFxgyoBC7bSaBwPgfKdkLd5X25qrDl4LWUI2bnpBCr8= @@ -31,6 +59,8 @@ github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE= github.com/kr/pretty v0.3.1/go.mod h1:hoEshYVHaxMs3cyo3Yncou5ZscifuDolrwPKZanG3xk= github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= +github.com/magefile/mage v1.15.0 h1:BvGheCMAsG3bWUDbZ8AyXXpCNwU9u5CB6sM+HNb9HYg= +github.com/magefile/mage v1.15.0/go.mod h1:z5UZb/iS3GoOSn0JgWuiw7dxlurVYTu+/jHXqQg881A= github.com/mitchellh/copystructure v1.2.0 h1:vpKXTN4ewci03Vljg/q9QvCGUDttBOGBIa15WveJJGw= github.com/mitchellh/copystructure v1.2.0/go.mod h1:qLl+cE2AmVv+CoeAwDPye/v+N2HKCj9FbZEVFJRxO9s= github.com/mitchellh/reflectwalk v1.0.2 h1:G2LzWKi524PWgd3mLHV8Y5k7s6XUvT0Gef6zxSIeXaQ= @@ -48,8 +78,11 @@ github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+ github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= github.com/stretchr/testify v1.9.0 h1:HtqpIVDClZ4nwg75+f6Lvsy/wHu+3BoSGCbBAcpTsTg= github.com/stretchr/testify v1.9.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= +github.com/ua-parser/uap-go v0.0.0-20240611065828-3a4781585db6 h1:SIKIoA4e/5Y9ZOl0DCe3eVMLPOQzJxgZpfdHHeauNTM= +github.com/ua-parser/uap-go v0.0.0-20240611065828-3a4781585db6/go.mod h1:BUbeWZiieNxAuuADTBNb3/aeje6on3DhU3rpWsQSB1E= github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= +github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY= go.opentelemetry.io/collector/component v0.111.0 h1:AiDIrhkq6sbHnU9Rhq6t4DC4Gal43bryd1+NTJNojAQ= go.opentelemetry.io/collector/component v0.111.0/go.mod h1:wYwbRuhzK5bm5x1bX+ukm1tT50QXYLs4MKwzyfiVGoE= go.opentelemetry.io/collector/component/componentstatus v0.111.0 h1:DojO8TbkysTtEoxzN6fJqhgCsu0QhxgJ9R+1bitnowM= @@ -78,6 +111,8 @@ go.opentelemetry.io/collector/processor v0.111.0 h1:85Llb9ekzzvzAXgFaw/n7LHFJ5QA go.opentelemetry.io/collector/processor v0.111.0/go.mod h1:78Z4f96j9trPFZIRCiQk6nVRo6vua4cW9VYNfHTBsvo= go.opentelemetry.io/collector/processor/processorprofiles v0.111.0 h1:QxnwbqClJvS7zDWgsIaqqDs5YsmHgFvmZKQsmoLTqJM= go.opentelemetry.io/collector/processor/processorprofiles v0.111.0/go.mod h1:8qPd8Af0XX7Wlupe8JHmdhkKMiiJ5AO7OEFYW3fN0CQ= +go.opentelemetry.io/collector/semconv v0.111.0 h1:ELleMtLBzeZ3xhfhYPmFcLc0hJMqRxhOB0eY60WLivw= +go.opentelemetry.io/collector/semconv v0.111.0/go.mod h1:zCJ5njhWpejR+A40kiEoeFm1xq1uzyZwMnRNX6/D82A= go.opentelemetry.io/otel v1.30.0 h1:F2t8sK4qf1fAmY9ua4ohFS/K+FUuOPemHUIXHtktrts= go.opentelemetry.io/otel v1.30.0/go.mod h1:tFw4Br9b7fOS+uEao81PJjVMjW/5fvNCbpsDIXqP0pc= go.opentelemetry.io/otel/metric v1.30.0 h1:4xNulvn9gjzo4hjg+wzIKG7iNFEaBMX00Qd4QIZs7+w= @@ -97,30 +132,49 @@ go.uber.org/zap v1.27.0/go.mod h1:GB2qFLM7cTU87MWRP2mPIjqfIDnGu+VIO4V/SdhGo2E= golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= +golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= +golang.org/x/exp v0.0.0-20240506185415-9bf2ced13842 h1:vr/HnozRka3pE4EsMEg1lgkXJkTFJCVUX+S/ZT6wYzM= +golang.org/x/exp v0.0.0-20240506185415-9bf2ced13842/go.mod h1:XtvwrStGgqGPLc4cjQfWqZHG1YFdYs6swckp8vpsjnc= golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4= golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20200226121028-0de0cce0169b/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= -golang.org/x/net v0.28.0 h1:a9JDOJc5GMUJ0+UDqmLT86WiEy7iWyIhz8gz8E4e5hE= -golang.org/x/net v0.28.0/go.mod h1:yqtgsTWOOnlGLG9GFRrK3++bGOUEkNBoHZc8MEDWPNg= +golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= +golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= +golang.org/x/net v0.7.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs= +golang.org/x/net v0.29.0 h1:5ORfpBpCs4HzDYoodCDBbwHzdR5UrLBZ3sOnUJmFoHo= +golang.org/x/net v0.29.0/go.mod h1:gLkgy8jTGERgjzMic6DS9+SP0ajcu6Xu3Orq/SpETg0= golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.25.0 h1:r+8e+loiHxRqhXVl6ML1nO3l1+oFoWbnlu2Ehimmi34= golang.org/x/sys v0.25.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= +golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= +golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= -golang.org/x/text v0.17.0 h1:XtiM5bkSOt+ewxlOE/aE/AKEHibwj/6gvWMl9Rsh0Qc= -golang.org/x/text v0.17.0/go.mod h1:BuEKDfySbSR4drPmRPG/7iBdf8hvFMuRexcpahXilzY= +golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= +golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= +golang.org/x/text v0.18.0 h1:XvMDiNzPAl0jr17s6W9lcaIhGUfUORdGCNsuLmPG224= +golang.org/x/text v0.18.0/go.mod h1:BuEKDfySbSR4drPmRPG/7iBdf8hvFMuRexcpahXilzY= golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20200619180055-7c47624df98f/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= golang.org/x/tools v0.0.0-20210106214847-113979e3529a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= +golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc= golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= @@ -134,5 +188,8 @@ google.golang.org/protobuf v1.34.2/go.mod h1:qYOHts0dSfpeUzUFpOMr/WGzszTmLH+DiWn gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk= gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q= +gopkg.in/yaml.v2 v2.2.1/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY= +gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ= gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= diff --git a/processor/logdedupprocessor/internal/metadata/generated_telemetry.go b/processor/logdedupprocessor/internal/metadata/generated_telemetry.go index c0cdc9e39ec3..0fd0abafca48 100644 --- a/processor/logdedupprocessor/internal/metadata/generated_telemetry.go +++ b/processor/logdedupprocessor/internal/metadata/generated_telemetry.go @@ -5,12 +5,11 @@ package metadata import ( "errors" + "go.opentelemetry.io/collector/component" + "go.opentelemetry.io/collector/config/configtelemetry" "go.opentelemetry.io/otel/metric" "go.opentelemetry.io/otel/metric/noop" "go.opentelemetry.io/otel/trace" - - "go.opentelemetry.io/collector/component" - "go.opentelemetry.io/collector/config/configtelemetry" ) func Meter(settings component.TelemetrySettings) metric.Meter { diff --git a/processor/logdedupprocessor/internal/metadata/generated_telemetry_test.go b/processor/logdedupprocessor/internal/metadata/generated_telemetry_test.go index 49a5cbdefd75..bd8fe035313b 100644 --- a/processor/logdedupprocessor/internal/metadata/generated_telemetry_test.go +++ b/processor/logdedupprocessor/internal/metadata/generated_telemetry_test.go @@ -6,14 +6,13 @@ import ( "testing" "github.com/stretchr/testify/require" + "go.opentelemetry.io/collector/component" "go.opentelemetry.io/otel/metric" embeddedmetric "go.opentelemetry.io/otel/metric/embedded" noopmetric "go.opentelemetry.io/otel/metric/noop" "go.opentelemetry.io/otel/trace" embeddedtrace "go.opentelemetry.io/otel/trace/embedded" nooptrace "go.opentelemetry.io/otel/trace/noop" - - "go.opentelemetry.io/collector/component" ) type mockMeter struct { diff --git a/processor/logdedupprocessor/processor.go b/processor/logdedupprocessor/processor.go index 4263da3400dd..804c312bff0d 100644 --- a/processor/logdedupprocessor/processor.go +++ b/processor/logdedupprocessor/processor.go @@ -11,16 +11,20 @@ import ( "go.opentelemetry.io/collector/component" "go.opentelemetry.io/collector/consumer" + "go.opentelemetry.io/collector/pdata/pcommon" "go.opentelemetry.io/collector/pdata/plog" "go.opentelemetry.io/collector/processor" "go.uber.org/zap" + "github.com/open-telemetry/opentelemetry-collector-contrib/internal/filter/expr" + "github.com/open-telemetry/opentelemetry-collector-contrib/pkg/ottl/contexts/ottllog" "github.com/open-telemetry/opentelemetry-collector-contrib/processor/logdedupprocessor/internal/metadata" ) // logDedupProcessor is a logDedupProcessor that counts duplicate instances of logs. type logDedupProcessor struct { emitInterval time.Duration + conditions expr.BoolExpr[ottllog.TransformContext] aggregator *logAggregator remover *fieldRemover nextConsumer consumer.Logs @@ -78,7 +82,7 @@ func (p *logDedupProcessor) Shutdown(_ context.Context) error { } // ConsumeLogs processes the logs. -func (p *logDedupProcessor) ConsumeLogs(_ context.Context, pl plog.Logs) error { +func (p *logDedupProcessor) ConsumeLogs(ctx context.Context, pl plog.Logs) error { p.mux.Lock() defer p.mux.Unlock() @@ -89,21 +93,44 @@ func (p *logDedupProcessor) ConsumeLogs(_ context.Context, pl plog.Logs) error { for j := 0; j < rl.ScopeLogs().Len(); j++ { sl := rl.ScopeLogs().At(j) scope := sl.Scope() + logs := sl.LogRecords() + + logs.RemoveIf(func(logRecord plog.LogRecord) bool { + if p.conditions == nil { + p.aggregateLog(logRecord, scope, resource) + return true + } + + logCtx := ottllog.NewTransformContext(logRecord, scope, resource, sl, rl) + logMatch, err := p.conditions.Eval(ctx, logCtx) + if err != nil { + p.logger.Error("error matching conditions", zap.Error(err)) + return false + } + if logMatch { + p.aggregateLog(logRecord, scope, resource) + } + return logMatch + }) + } + } - for k := 0; k < sl.LogRecords().Len(); k++ { - logRecord := sl.LogRecords().At(k) - // Remove excluded fields if any - p.remover.RemoveFields(logRecord) - - // Add the log to the aggregator - p.aggregator.Add(resource, scope, logRecord) - } + // immediately consume any logs that didn't match any conditions + if pl.LogRecordCount() > 0 { + err := p.nextConsumer.ConsumeLogs(ctx, pl) + if err != nil { + p.logger.Error("failed to consume logs", zap.Error(err)) } } return nil } +func (p *logDedupProcessor) aggregateLog(logRecord plog.LogRecord, scope pcommon.InstrumentationScope, resource pcommon.Resource) { + p.remover.RemoveFields(logRecord) + p.aggregator.Add(resource, scope, logRecord) +} + // handleExportInterval sends metrics at the configured interval. func (p *logDedupProcessor) handleExportInterval(ctx context.Context) { defer p.wg.Done() diff --git a/processor/logdedupprocessor/processor_test.go b/processor/logdedupprocessor/processor_test.go index a7727d3cb81e..d5d4c1429f33 100644 --- a/processor/logdedupprocessor/processor_test.go +++ b/processor/logdedupprocessor/processor_test.go @@ -7,6 +7,7 @@ import ( "context" "errors" "fmt" + "path/filepath" "testing" "time" @@ -14,9 +15,11 @@ import ( "go.opentelemetry.io/collector/component/componenttest" "go.opentelemetry.io/collector/consumer" "go.opentelemetry.io/collector/consumer/consumertest" - "go.opentelemetry.io/collector/pdata/pcommon" "go.opentelemetry.io/collector/pdata/plog" "go.opentelemetry.io/collector/processor/processortest" + + "github.com/open-telemetry/opentelemetry-collector-contrib/pkg/golden" + "github.com/open-telemetry/opentelemetry-collector-contrib/pkg/pdatatest/plogtest" ) func Test_newProcessor(t *testing.T) { @@ -31,6 +34,7 @@ func Test_newProcessor(t *testing.T) { cfg: &Config{ LogCountAttribute: defaultLogCountAttribute, Interval: defaultInterval, + Conditions: []string{}, Timezone: "bad timezone", }, expected: nil, @@ -41,6 +45,7 @@ func Test_newProcessor(t *testing.T) { cfg: &Config{ LogCountAttribute: defaultLogCountAttribute, Interval: defaultInterval, + Conditions: []string{}, Timezone: defaultTimezone, }, expected: &logDedupProcessor{ @@ -84,10 +89,11 @@ func TestProcessorShutdownCtxError(t *testing.T) { LogCountAttribute: defaultLogCountAttribute, Interval: 1 * time.Second, Timezone: defaultTimezone, + Conditions: []string{}, } // Create a processor - p, err := newProcessor(cfg, logsSink, settings) + p, err := createLogsProcessor(context.Background(), settings, cfg, logsSink) require.NoError(t, err) // Start then stop the processor checking for errors @@ -109,13 +115,14 @@ func TestShutdownBeforeStart(t *testing.T) { LogCountAttribute: defaultLogCountAttribute, Interval: 1 * time.Second, Timezone: defaultTimezone, + Conditions: []string{}, ExcludeFields: []string{ fmt.Sprintf("%s.remove_me", attributeField), }, } // Create a processor - p, err := newProcessor(cfg, logsSink, settings) + p, err := createLogsProcessor(context.Background(), settings, cfg, logsSink) require.NoError(t, err) require.NotPanics(t, func() { err := p.Shutdown(context.Background()) @@ -130,33 +137,21 @@ func TestProcessorConsume(t *testing.T) { LogCountAttribute: defaultLogCountAttribute, Interval: 1 * time.Second, Timezone: defaultTimezone, + Conditions: []string{}, ExcludeFields: []string{ fmt.Sprintf("%s.remove_me", attributeField), }, } // Create a processor - p, err := newProcessor(cfg, logsSink, settings) + p, err := createLogsProcessor(context.Background(), settings, cfg, logsSink) require.NoError(t, err) err = p.Start(context.Background(), componenttest.NewNopHost()) require.NoError(t, err) - // Create plog payload - logRecord1 := generateTestLogRecord(t, "Body of the log") - logRecord2 := generateTestLogRecord(t, "Body of the log") - - // Differ by timestamp and attribute to be removed - logRecord1.SetTimestamp(pcommon.NewTimestampFromTime(time.Now().Add(time.Minute))) - logRecord2.Attributes().PutBool("remove_me", false) - - logs := plog.NewLogs() - rl := logs.ResourceLogs().AppendEmpty() - rl.Resource().Attributes().PutInt("one", 1) - - sl := rl.ScopeLogs().AppendEmpty() - logRecord1.CopyTo(sl.LogRecords().AppendEmpty()) - logRecord2.CopyTo(sl.LogRecords().AppendEmpty()) + logs, err := golden.ReadLogs(filepath.Join("testdata", "input", "basicLogs.yaml")) + require.NoError(t, err) // Consume the payload err = p.ConsumeLogs(context.Background(), logs) @@ -167,22 +162,13 @@ func TestProcessorConsume(t *testing.T) { return logsSink.LogRecordCount() > 0 }, 3*time.Second, 200*time.Millisecond) + expectedLogs, err := golden.ReadLogs(filepath.Join("testdata", "expected", "basicLogs.yaml")) + require.NoError(t, err) + allSinkLogs := logsSink.AllLogs() require.Len(t, allSinkLogs, 1) - consumedLogs := allSinkLogs[0] - require.Equal(t, 1, consumedLogs.LogRecordCount()) - - require.Equal(t, 1, consumedLogs.ResourceLogs().Len()) - consumedRl := consumedLogs.ResourceLogs().At(0) - require.Equal(t, 1, consumedRl.ScopeLogs().Len()) - consumedSl := consumedRl.ScopeLogs().At(0) - require.Equal(t, 1, consumedSl.LogRecords().Len()) - consumedLogRecord := consumedSl.LogRecords().At(0) - - countVal, ok := consumedLogRecord.Attributes().Get(cfg.LogCountAttribute) - require.True(t, ok) - require.Equal(t, int64(2), countVal.Int()) + require.NoError(t, plogtest.CompareLogs(expectedLogs, allSinkLogs[0], plogtest.IgnoreObservedTimestamp(), plogtest.IgnoreTimestamp(), plogtest.IgnoreLogRecordAttributeValue("first_observed_timestamp"), plogtest.IgnoreLogRecordAttributeValue("last_observed_timestamp"))) // Cleanup err = p.Shutdown(context.Background()) @@ -195,10 +181,11 @@ func Test_unsetLogsAreExportedOnShutdown(t *testing.T) { LogCountAttribute: defaultLogCountAttribute, Interval: 1 * time.Second, Timezone: defaultTimezone, + Conditions: []string{}, } // Create & start a processor - p, err := newProcessor(cfg, logsSink, processortest.NewNopSettings()) + p, err := createLogsProcessor(context.Background(), processortest.NewNopSettings(), cfg, logsSink) require.NoError(t, err) err = p.Start(context.Background(), componenttest.NewNopHost()) require.NoError(t, err) @@ -221,3 +208,104 @@ func Test_unsetLogsAreExportedOnShutdown(t *testing.T) { exportedLogs := logsSink.AllLogs() require.Len(t, exportedLogs, 1) } + +func TestProcessorConsumeCondition(t *testing.T) { + logsSink := &consumertest.LogsSink{} + cfg := &Config{ + LogCountAttribute: defaultLogCountAttribute, + Interval: 1 * time.Second, + Timezone: defaultTimezone, + Conditions: []string{`(attributes["ID"] == 1)`}, + ExcludeFields: []string{ + fmt.Sprintf("%s.remove_me", attributeField), + }, + } + + // Create a processor + p, err := createLogsProcessor(context.Background(), processortest.NewNopSettings(), cfg, logsSink) + require.NoError(t, err) + + err = p.Start(context.Background(), componenttest.NewNopHost()) + require.NoError(t, err) + + logs, err := golden.ReadLogs(filepath.Join("testdata", "input", "conditionLogs.yaml")) + require.NoError(t, err) + + // Consume the payload + err = p.ConsumeLogs(context.Background(), logs) + require.NoError(t, err) + + // Wait for the logs to be emitted + require.Eventually(t, func() bool { + return logsSink.LogRecordCount() > 4 + }, 3*time.Second, 200*time.Millisecond) + + allSinkLogs := logsSink.AllLogs() + require.Len(t, allSinkLogs, 2) + + expectedConsumedLogs, err := golden.ReadLogs(filepath.Join("testdata", "expected", "conditionConsumedLogs.yaml")) + require.NoError(t, err) + expectedDedupedLogs, err := golden.ReadLogs(filepath.Join("testdata", "expected", "conditionDedupedLogs.yaml")) + require.NoError(t, err) + + consumedLogs := allSinkLogs[0] + dedupedLogs := allSinkLogs[1] + + require.NoError(t, plogtest.CompareLogs(expectedConsumedLogs, consumedLogs, plogtest.IgnoreObservedTimestamp(), plogtest.IgnoreTimestamp(), plogtest.IgnoreLogRecordAttributeValue("first_observed_timestamp"), plogtest.IgnoreLogRecordAttributeValue("last_observed_timestamp"))) + require.NoError(t, plogtest.CompareLogs(expectedDedupedLogs, dedupedLogs, plogtest.IgnoreObservedTimestamp(), plogtest.IgnoreTimestamp(), plogtest.IgnoreLogRecordAttributeValue("first_observed_timestamp"), plogtest.IgnoreLogRecordAttributeValue("last_observed_timestamp"))) + + // Cleanup + err = p.Shutdown(context.Background()) + require.NoError(t, err) +} + +func TestProcessorConsumeMultipleConditions(t *testing.T) { + logsSink := &consumertest.LogsSink{} + cfg := &Config{ + LogCountAttribute: defaultLogCountAttribute, + Interval: 1 * time.Second, + Timezone: defaultTimezone, + Conditions: []string{`attributes["ID"] == 1`, `attributes["ID"] == 3`}, + ExcludeFields: []string{ + fmt.Sprintf("%s.remove_me", attributeField), + }, + } + + // Create a processor + p, err := createLogsProcessor(context.Background(), processortest.NewNopSettings(), cfg, logsSink) + require.NoError(t, err) + + err = p.Start(context.Background(), componenttest.NewNopHost()) + require.NoError(t, err) + + logs, err := golden.ReadLogs(filepath.Join("testdata", "input", "conditionLogs.yaml")) + require.NoError(t, err) + + // Consume the payload + err = p.ConsumeLogs(context.Background(), logs) + require.NoError(t, err) + + // Wait for the logs to be emitted + require.Eventually(t, func() bool { + return logsSink.LogRecordCount() > 3 + }, 3*time.Second, 200*time.Millisecond) + + allSinkLogs := logsSink.AllLogs() + require.Len(t, allSinkLogs, 2) + + consumedLogs := allSinkLogs[0] + dedupedLogs := allSinkLogs[1] + + expectedConsumedLogs, err := golden.ReadLogs(filepath.Join("testdata", "expected", "multipleConditionsConsumedLogs.yaml")) + require.NoError(t, err) + expectedDedupedLogs, err := golden.ReadLogs(filepath.Join("testdata", "expected", "multipleConditionsDedupedLogs.yaml")) + require.NoError(t, err) + + err = plogtest.CompareLogs(expectedConsumedLogs, consumedLogs, plogtest.IgnoreObservedTimestamp(), plogtest.IgnoreTimestamp(), plogtest.IgnoreLogRecordAttributeValue("first_observed_timestamp"), plogtest.IgnoreLogRecordAttributeValue("last_observed_timestamp")) + require.NoError(t, err) + require.NoError(t, plogtest.CompareLogs(expectedDedupedLogs, dedupedLogs, plogtest.IgnoreObservedTimestamp(), plogtest.IgnoreTimestamp(), plogtest.IgnoreLogRecordAttributeValue("first_observed_timestamp"), plogtest.IgnoreLogRecordAttributeValue("last_observed_timestamp"))) + + // Cleanup + err = p.Shutdown(context.Background()) + require.NoError(t, err) +} diff --git a/processor/logdedupprocessor/testdata/conditionConsumedLogs.yaml b/processor/logdedupprocessor/testdata/conditionConsumedLogs.yaml new file mode 100644 index 000000000000..93d3910a70f7 --- /dev/null +++ b/processor/logdedupprocessor/testdata/conditionConsumedLogs.yaml @@ -0,0 +1,69 @@ +resourceLogs: + - resource: {} + scopeLogs: + - logRecords: + - attributes: + - key: bool + value: + boolValue: true + - key: str + value: + stringValue: attr str + - key: ID + value: + intValue: "2" + body: + stringValue: Body of the log2 + severityText: info + spanId: "" + timeUnixNano: "1728069505995028000" + traceId: "" + - attributes: + - key: bool + value: + boolValue: true + - key: str + value: + stringValue: attr str + - key: ID + value: + intValue: "2" + body: + stringValue: Body of the log2 + severityText: info + spanId: "" + timeUnixNano: "1728069565995028000" + traceId: "" + - attributes: + - key: bool + value: + boolValue: true + - key: str + value: + stringValue: attr str + - key: ID + value: + intValue: "3" + body: + stringValue: Body of the log3 + severityText: info + spanId: "" + timeUnixNano: "1728069625995028000" + traceId: "" + - attributes: + - key: bool + value: + boolValue: true + - key: str + value: + stringValue: attr str + - key: ID + value: + intValue: "3" + body: + stringValue: Body of the log3 + severityText: info + spanId: "" + timeUnixNano: "1728069685995028000" + traceId: "" + scope: {} diff --git a/processor/logdedupprocessor/testdata/expected/basicLogs.yaml b/processor/logdedupprocessor/testdata/expected/basicLogs.yaml new file mode 100644 index 000000000000..337aebd8429a --- /dev/null +++ b/processor/logdedupprocessor/testdata/expected/basicLogs.yaml @@ -0,0 +1,32 @@ +resourceLogs: + - resource: + attributes: + - key: one + value: + intValue: "1" + scopeLogs: + - logRecords: + - attributes: + - key: bool + value: + boolValue: true + - key: str + value: + stringValue: attr str + - key: log_count + value: + intValue: "2" + - key: first_observed_timestamp + value: + stringValue: "2024-10-04T19:21:47Z" + - key: last_observed_timestamp + value: + stringValue: "2024-10-04T19:21:47Z" + body: + stringValue: Body of the log + observedTimeUnixNano: "1728069707998122000" + severityText: info + spanId: "" + timeUnixNano: "1728069708998920000" + traceId: "" + scope: {} diff --git a/processor/logdedupprocessor/testdata/expected/conditionConsumedLogs.yaml b/processor/logdedupprocessor/testdata/expected/conditionConsumedLogs.yaml new file mode 100644 index 000000000000..93d3910a70f7 --- /dev/null +++ b/processor/logdedupprocessor/testdata/expected/conditionConsumedLogs.yaml @@ -0,0 +1,69 @@ +resourceLogs: + - resource: {} + scopeLogs: + - logRecords: + - attributes: + - key: bool + value: + boolValue: true + - key: str + value: + stringValue: attr str + - key: ID + value: + intValue: "2" + body: + stringValue: Body of the log2 + severityText: info + spanId: "" + timeUnixNano: "1728069505995028000" + traceId: "" + - attributes: + - key: bool + value: + boolValue: true + - key: str + value: + stringValue: attr str + - key: ID + value: + intValue: "2" + body: + stringValue: Body of the log2 + severityText: info + spanId: "" + timeUnixNano: "1728069565995028000" + traceId: "" + - attributes: + - key: bool + value: + boolValue: true + - key: str + value: + stringValue: attr str + - key: ID + value: + intValue: "3" + body: + stringValue: Body of the log3 + severityText: info + spanId: "" + timeUnixNano: "1728069625995028000" + traceId: "" + - attributes: + - key: bool + value: + boolValue: true + - key: str + value: + stringValue: attr str + - key: ID + value: + intValue: "3" + body: + stringValue: Body of the log3 + severityText: info + spanId: "" + timeUnixNano: "1728069685995028000" + traceId: "" + scope: {} diff --git a/processor/logdedupprocessor/testdata/expected/conditionDedupedLogs.yaml b/processor/logdedupprocessor/testdata/expected/conditionDedupedLogs.yaml new file mode 100644 index 000000000000..ada5796a37aa --- /dev/null +++ b/processor/logdedupprocessor/testdata/expected/conditionDedupedLogs.yaml @@ -0,0 +1,31 @@ +resourceLogs: + - resource: {} + scopeLogs: + - logRecords: + - attributes: + - key: bool + value: + boolValue: true + - key: str + value: + stringValue: attr str + - key: ID + value: + intValue: "1" + - key: log_count + value: + intValue: "2" + - key: first_observed_timestamp + value: + stringValue: "2024-10-04T19:40:31Z" + - key: last_observed_timestamp + value: + stringValue: "2024-10-04T19:40:31Z" + body: + stringValue: Body of the log1 + observedTimeUnixNano: "1728070831326144000" + severityText: info + spanId: "" + timeUnixNano: "1728070832326078000" + traceId: "" + scope: {} diff --git a/processor/logdedupprocessor/testdata/expected/multipleConditionsConsumedLogs.yaml b/processor/logdedupprocessor/testdata/expected/multipleConditionsConsumedLogs.yaml new file mode 100644 index 000000000000..2001fc0908e2 --- /dev/null +++ b/processor/logdedupprocessor/testdata/expected/multipleConditionsConsumedLogs.yaml @@ -0,0 +1,37 @@ +resourceLogs: + - resource: {} + scopeLogs: + - logRecords: + - attributes: + - key: bool + value: + boolValue: true + - key: str + value: + stringValue: attr str + - key: ID + value: + intValue: "2" + body: + stringValue: Body of the log2 + severityText: info + spanId: "" + timeUnixNano: "1728069505995028000" + traceId: "" + - attributes: + - key: bool + value: + boolValue: true + - key: str + value: + stringValue: attr str + - key: ID + value: + intValue: "2" + body: + stringValue: Body of the log2 + severityText: info + spanId: "" + timeUnixNano: "1728069565995028000" + traceId: "" + scope: {} diff --git a/processor/logdedupprocessor/testdata/expected/multipleConditionsDedupedLogs.yaml b/processor/logdedupprocessor/testdata/expected/multipleConditionsDedupedLogs.yaml new file mode 100644 index 000000000000..04c55be19df3 --- /dev/null +++ b/processor/logdedupprocessor/testdata/expected/multipleConditionsDedupedLogs.yaml @@ -0,0 +1,57 @@ +resourceLogs: + - resource: {} + scopeLogs: + - logRecords: + - attributes: + - key: bool + value: + boolValue: true + - key: str + value: + stringValue: attr str + - key: ID + value: + intValue: "1" + - key: log_count + value: + intValue: "2" + - key: first_observed_timestamp + value: + stringValue: "2024-10-04T19:46:39Z" + - key: last_observed_timestamp + value: + stringValue: "2024-10-04T19:46:39Z" + body: + stringValue: Body of the log1 + observedTimeUnixNano: "1728071199778796000" + severityText: info + spanId: "" + timeUnixNano: "1728071200779450000" + traceId: "" + - attributes: + - key: bool + value: + boolValue: true + - key: str + value: + stringValue: attr str + - key: ID + value: + intValue: "3" + - key: log_count + value: + intValue: "2" + - key: first_observed_timestamp + value: + stringValue: "2024-10-04T19:46:39Z" + - key: last_observed_timestamp + value: + stringValue: "2024-10-04T19:46:39Z" + body: + stringValue: Body of the log3 + observedTimeUnixNano: "1728071199778800000" + severityText: info + spanId: "" + timeUnixNano: "1728071200779466000" + traceId: "" + scope: {} diff --git a/processor/logdedupprocessor/testdata/input/basicLogs.yaml b/processor/logdedupprocessor/testdata/input/basicLogs.yaml new file mode 100644 index 000000000000..601a729be289 --- /dev/null +++ b/processor/logdedupprocessor/testdata/input/basicLogs.yaml @@ -0,0 +1,38 @@ +resourceLogs: + - resource: + attributes: + - key: one + value: + intValue: "1" + scopeLogs: + - logRecords: + - attributes: + - key: bool + value: + boolValue: true + - key: str + value: + stringValue: attr str + body: + stringValue: Body of the log + severityText: info + spanId: "" + timeUnixNano: "1728069266547395000" + traceId: "" + - attributes: + - key: bool + value: + boolValue: true + - key: str + value: + stringValue: attr str + - key: remove_me + value: + boolValue: false + body: + stringValue: Body of the log + severityText: info + spanId: "" + timeUnixNano: "1728069206547395000" + traceId: "" + scope: {} diff --git a/processor/logdedupprocessor/testdata/input/conditionLogs.yaml b/processor/logdedupprocessor/testdata/input/conditionLogs.yaml new file mode 100644 index 000000000000..b353aa339351 --- /dev/null +++ b/processor/logdedupprocessor/testdata/input/conditionLogs.yaml @@ -0,0 +1,101 @@ +resourceLogs: + - resource: {} + scopeLogs: + - logRecords: + - attributes: + - key: bool + value: + boolValue: true + - key: str + value: + stringValue: attr str + - key: ID + value: + intValue: "1" + body: + stringValue: Body of the log1 + severityText: info + spanId: "" + timeUnixNano: "1728069385995028000" + traceId: "" + - attributes: + - key: bool + value: + boolValue: true + - key: str + value: + stringValue: attr str + - key: ID + value: + intValue: "2" + body: + stringValue: Body of the log2 + severityText: info + spanId: "" + timeUnixNano: "1728069505995028000" + traceId: "" + - attributes: + - key: bool + value: + boolValue: true + - key: str + value: + stringValue: attr str + - key: ID + value: + intValue: "1" + body: + stringValue: Body of the log1 + severityText: info + spanId: "" + timeUnixNano: "1728069445995028000" + traceId: "" + - attributes: + - key: bool + value: + boolValue: true + - key: str + value: + stringValue: attr str + - key: ID + value: + intValue: "2" + body: + stringValue: Body of the log2 + severityText: info + spanId: "" + timeUnixNano: "1728069565995028000" + traceId: "" + - attributes: + - key: bool + value: + boolValue: true + - key: str + value: + stringValue: attr str + - key: ID + value: + intValue: "3" + body: + stringValue: Body of the log3 + severityText: info + spanId: "" + timeUnixNano: "1728069625995028000" + traceId: "" + - attributes: + - key: bool + value: + boolValue: true + - key: str + value: + stringValue: attr str + - key: ID + value: + intValue: "3" + body: + stringValue: Body of the log3 + severityText: info + spanId: "" + timeUnixNano: "1728069685995028000" + traceId: "" + scope: {} diff --git a/processor/resourcedetectionprocessor/go.mod b/processor/resourcedetectionprocessor/go.mod index 81636d1e2c76..e069f9db1a12 100644 --- a/processor/resourcedetectionprocessor/go.mod +++ b/processor/resourcedetectionprocessor/go.mod @@ -4,7 +4,7 @@ go 1.22.0 require ( cloud.google.com/go/compute/metadata v0.5.2 - github.com/GoogleCloudPlatform/opentelemetry-operations-go/detectors/gcp v1.24.1 + github.com/GoogleCloudPlatform/opentelemetry-operations-go/detectors/gcp v1.24.2 github.com/aws/aws-sdk-go v1.55.5 github.com/google/go-cmp v0.6.0 github.com/hashicorp/consul/api v1.29.4 diff --git a/processor/resourcedetectionprocessor/go.sum b/processor/resourcedetectionprocessor/go.sum index e3fdf47ce666..3e80a0ec1851 100644 --- a/processor/resourcedetectionprocessor/go.sum +++ b/processor/resourcedetectionprocessor/go.sum @@ -35,8 +35,8 @@ github.com/Azure/go-autorest/tracing v0.6.0/go.mod h1:+vhtPC754Xsa23ID7GlGsrdKBp github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo= github.com/DataDog/datadog-go v3.2.0+incompatible/go.mod h1:LButxg5PwREeZtORoXG3tL4fMGNddJ+vMq1mwgfaqoQ= -github.com/GoogleCloudPlatform/opentelemetry-operations-go/detectors/gcp v1.24.1 h1:pB2F2JKCj1Znmp2rwxxt1J0Fg0wezTMgWYk5Mpbi1kg= -github.com/GoogleCloudPlatform/opentelemetry-operations-go/detectors/gcp v1.24.1/go.mod h1:itPGVDKf9cC/ov4MdvJ2QZ0khw4bfoo9jzwTJlaxy2k= +github.com/GoogleCloudPlatform/opentelemetry-operations-go/detectors/gcp v1.24.2 h1:cZpsGsWTIFKymTA0je7IIvi1O7Es7apb9CF3EQlOcfE= +github.com/GoogleCloudPlatform/opentelemetry-operations-go/detectors/gcp v1.24.2/go.mod h1:itPGVDKf9cC/ov4MdvJ2QZ0khw4bfoo9jzwTJlaxy2k= github.com/Microsoft/go-winio v0.6.1 h1:9/kr64B9VUZrLm5YYwbGtUJnMgqWVOdUAXu6Migciow= github.com/Microsoft/go-winio v0.6.1/go.mod h1:LRdKpFKfdobln8UmuiYcKPot9D2v6svN5+sAH+4kjUM= github.com/NYTimes/gziphandler v0.0.0-20170623195520-56545f4a5d46/go.mod h1:3wb06e3pkSAbeQ52E9H9iFoQsEEwGN64994WTCIhntQ= diff --git a/receiver/jaegerreceiver/jaeger_agent_test.go b/receiver/jaegerreceiver/jaeger_agent_test.go index 93fa35c85888..3a80c3c1804c 100644 --- a/receiver/jaegerreceiver/jaeger_agent_test.go +++ b/receiver/jaegerreceiver/jaeger_agent_test.go @@ -190,8 +190,7 @@ func testJaegerAgent(t *testing.T, agentEndpoint string, receiverConfig *configu // 3. Now finally send some spans td := generateTraceData() - batches, err := jaeger.ProtoFromTraces(td) - require.NoError(t, err) + batches := jaeger.ProtoFromTraces(td) for _, batch := range batches { require.NoError(t, jexp.EmitBatch(context.Background(), modelToThrift(batch))) } diff --git a/receiver/jaegerreceiver/trace_receiver_test.go b/receiver/jaegerreceiver/trace_receiver_test.go index c3571fb59f8c..dc10c767c5e2 100644 --- a/receiver/jaegerreceiver/trace_receiver_test.go +++ b/receiver/jaegerreceiver/trace_receiver_test.go @@ -95,8 +95,7 @@ func TestReception(t *testing.T) { _, port, _ := net.SplitHostPort(addr) collectorAddr := fmt.Sprintf("http://localhost:%s/api/traces", port) td := generateTraceData() - batches, err := jaeger.ProtoFromTraces(td) - require.NoError(t, err) + batches := jaeger.ProtoFromTraces(td) for _, batch := range batches { require.NoError(t, sendToCollector(collectorAddr, modelToThrift(batch))) } diff --git a/receiver/kafkareceiver/jaeger_unmarshaler_test.go b/receiver/kafkareceiver/jaeger_unmarshaler_test.go index 0ef0af468ed1..3af93c6ca398 100644 --- a/receiver/kafkareceiver/jaeger_unmarshaler_test.go +++ b/receiver/kafkareceiver/jaeger_unmarshaler_test.go @@ -24,8 +24,7 @@ func TestUnmarshalJaeger(t *testing.T) { span.SetEndTimestamp(pcommon.Timestamp(20)) span.SetTraceID([16]byte{1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16}) span.SetSpanID([8]byte{1, 2, 3, 4, 5, 6, 7, 8}) - batches, err := jaeger.ProtoFromTraces(td) - require.NoError(t, err) + batches := jaeger.ProtoFromTraces(td) protoBytes, err := batches[0].Spans[0].Marshal() require.NoError(t, err) diff --git a/receiver/prometheusremotewritereceiver/README.md b/receiver/prometheusremotewritereceiver/README.md index 8e85e04f59f2..8f851cc5a8cf 100644 --- a/receiver/prometheusremotewritereceiver/README.md +++ b/receiver/prometheusremotewritereceiver/README.md @@ -1,8 +1,10 @@ +# Prometheus Remote Write Receiver + | Status | | | ------------- |-----------| | Stability | [development]: metrics | -| Distributions | | +| Distributions | [] | | Issues | [![Open issues](https://img.shields.io/github/issues-search/open-telemetry/opentelemetry-collector-contrib?query=is%3Aissue%20is%3Aopen%20label%3Areceiver%2Fprometheusremotewrite%20&label=open&color=orange&logo=opentelemetry)](https://github.com/open-telemetry/opentelemetry-collector-contrib/issues?q=is%3Aopen+is%3Aissue+label%3Areceiver%2Fprometheusremotewrite) [![Closed issues](https://img.shields.io/github/issues-search/open-telemetry/opentelemetry-collector-contrib?query=is%3Aissue%20is%3Aclosed%20label%3Areceiver%2Fprometheusremotewrite%20&label=closed&color=blue&logo=opentelemetry)](https://github.com/open-telemetry/opentelemetry-collector-contrib/issues?q=is%3Aclosed+is%3Aissue+label%3Areceiver%2Fprometheusremotewrite) | | [Code Owners](https://github.com/open-telemetry/opentelemetry-collector-contrib/blob/main/CONTRIBUTING.md#becoming-a-code-owner) | [@dashpole](https://www.github.com/dashpole), [@ArthurSens](https://www.github.com/ArthurSens) | diff --git a/receiver/prometheusremotewritereceiver/doc.go b/receiver/prometheusremotewritereceiver/doc.go new file mode 100644 index 000000000000..f35cbbba6b4a --- /dev/null +++ b/receiver/prometheusremotewritereceiver/doc.go @@ -0,0 +1,6 @@ +// Copyright The OpenTelemetry Authors +// SPDX-License-Identifier: Apache-2.0 + +//go:generate mdatagen metadata.yaml + +package prometheusremotewritereceiver // import "github.com/open-telemetry/opentelemetry-collector-contrib/receiver/prometheusremotewritereceiver" diff --git a/receiver/prometheusremotewritereceiver/generated_component_test.go b/receiver/prometheusremotewritereceiver/generated_component_test.go new file mode 100644 index 000000000000..b321cd02dff1 --- /dev/null +++ b/receiver/prometheusremotewritereceiver/generated_component_test.go @@ -0,0 +1,69 @@ +// Code generated by mdatagen. DO NOT EDIT. + +package prometheusremotewritereceiver + +import ( + "context" + "testing" + + "github.com/stretchr/testify/require" + "go.opentelemetry.io/collector/component" + "go.opentelemetry.io/collector/component/componenttest" + "go.opentelemetry.io/collector/confmap/confmaptest" + "go.opentelemetry.io/collector/consumer/consumertest" + "go.opentelemetry.io/collector/receiver" + "go.opentelemetry.io/collector/receiver/receivertest" +) + +func TestComponentFactoryType(t *testing.T) { + require.Equal(t, "prometheusremotewrite", NewFactory().Type().String()) +} + +func TestComponentConfigStruct(t *testing.T) { + require.NoError(t, componenttest.CheckConfigStruct(NewFactory().CreateDefaultConfig())) +} + +func TestComponentLifecycle(t *testing.T) { + factory := NewFactory() + + tests := []struct { + name string + createFn func(ctx context.Context, set receiver.Settings, cfg component.Config) (component.Component, error) + }{ + + { + name: "metrics", + createFn: func(ctx context.Context, set receiver.Settings, cfg component.Config) (component.Component, error) { + return factory.CreateMetrics(ctx, set, cfg, consumertest.NewNop()) + }, + }, + } + + cm, err := confmaptest.LoadConf("metadata.yaml") + require.NoError(t, err) + cfg := factory.CreateDefaultConfig() + sub, err := cm.Sub("tests::config") + require.NoError(t, err) + require.NoError(t, sub.Unmarshal(&cfg)) + + for _, tt := range tests { + t.Run(tt.name+"-shutdown", func(t *testing.T) { + c, err := tt.createFn(context.Background(), receivertest.NewNopSettings(), cfg) + require.NoError(t, err) + err = c.Shutdown(context.Background()) + require.NoError(t, err) + }) + t.Run(tt.name+"-lifecycle", func(t *testing.T) { + firstRcvr, err := tt.createFn(context.Background(), receivertest.NewNopSettings(), cfg) + require.NoError(t, err) + host := componenttest.NewNopHost() + require.NoError(t, err) + require.NoError(t, firstRcvr.Start(context.Background(), host)) + require.NoError(t, firstRcvr.Shutdown(context.Background())) + secondRcvr, err := tt.createFn(context.Background(), receivertest.NewNopSettings(), cfg) + require.NoError(t, err) + require.NoError(t, secondRcvr.Start(context.Background(), host)) + require.NoError(t, secondRcvr.Shutdown(context.Background())) + }) + } +} diff --git a/receiver/prometheusremotewritereceiver/generated_package_test.go b/receiver/prometheusremotewritereceiver/generated_package_test.go new file mode 100644 index 000000000000..3435affa17ea --- /dev/null +++ b/receiver/prometheusremotewritereceiver/generated_package_test.go @@ -0,0 +1,13 @@ +// Code generated by mdatagen. DO NOT EDIT. + +package prometheusremotewritereceiver + +import ( + "testing" + + "go.uber.org/goleak" +) + +func TestMain(m *testing.M) { + goleak.VerifyTestMain(m) +} diff --git a/receiver/prometheusremotewritereceiver/go.mod b/receiver/prometheusremotewritereceiver/go.mod index f43f60408143..8ebded9ec08b 100644 --- a/receiver/prometheusremotewritereceiver/go.mod +++ b/receiver/prometheusremotewritereceiver/go.mod @@ -7,9 +7,11 @@ require ( go.opentelemetry.io/collector/component v0.111.0 go.opentelemetry.io/collector/component/componentstatus v0.111.0 go.opentelemetry.io/collector/config/confighttp v0.111.0 + go.opentelemetry.io/collector/confmap v1.17.0 go.opentelemetry.io/collector/consumer v0.111.0 go.opentelemetry.io/collector/consumer/consumertest v0.111.0 go.opentelemetry.io/collector/receiver v0.111.0 + go.uber.org/goleak v1.3.0 ) require ( @@ -18,11 +20,17 @@ require ( github.com/fsnotify/fsnotify v1.7.0 // indirect github.com/go-logr/logr v1.4.2 // indirect github.com/go-logr/stdr v1.2.2 // indirect + github.com/go-viper/mapstructure/v2 v2.1.0 // indirect github.com/gogo/protobuf v1.3.2 // indirect github.com/golang/snappy v0.0.4 // indirect github.com/google/uuid v1.6.0 // indirect github.com/json-iterator/go v1.1.12 // indirect github.com/klauspost/compress v1.17.10 // indirect + github.com/knadh/koanf/maps v0.1.1 // indirect + github.com/knadh/koanf/providers/confmap v0.1.0 // indirect + github.com/knadh/koanf/v2 v2.1.1 // indirect + github.com/mitchellh/copystructure v1.2.0 // indirect + github.com/mitchellh/reflectwalk v1.0.2 // indirect github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect github.com/modern-go/reflect2 v1.0.2 // indirect github.com/pmezard/go-difflib v1.0.0 // indirect diff --git a/receiver/prometheusremotewritereceiver/go.sum b/receiver/prometheusremotewritereceiver/go.sum index d7ed8e0b94bf..5300efd5812c 100644 --- a/receiver/prometheusremotewritereceiver/go.sum +++ b/receiver/prometheusremotewritereceiver/go.sum @@ -10,6 +10,8 @@ github.com/go-logr/logr v1.4.2 h1:6pFjapn8bFcIbiKo3XT4j/BhANplGihG6tvd+8rYgrY= github.com/go-logr/logr v1.4.2/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY= github.com/go-logr/stdr v1.2.2 h1:hSWxHoqTgW2S2qGc0LTAI563KZ5YKYRhT3MFKZMbjag= github.com/go-logr/stdr v1.2.2/go.mod h1:mMo/vtBO5dYbehREoey6XUKy/eSumjCCveDpRre4VKE= +github.com/go-viper/mapstructure/v2 v2.1.0 h1:gHnMa2Y/pIxElCH2GlZZ1lZSsn6XMtufpGyP1XxdC/w= +github.com/go-viper/mapstructure/v2 v2.1.0/go.mod h1:oJDH3BJKyqBA2TXFhDsKDGDTlndYOZ6rGS0BRZIxGhM= github.com/gogo/protobuf v1.3.2 h1:Ov1cvc58UF3b5XjBnZv7+opcTcQFZebYjWzi34vdm4Q= github.com/gogo/protobuf v1.3.2/go.mod h1:P1XiOD3dCwIKUDQYPy72D8LYyHL2YPYrpS2s69NZV8Q= github.com/golang/snappy v0.0.4 h1:yAGX7huGHXlcLOEtBnF4w7FQwA26wojNCwOYAEhLjQM= @@ -25,10 +27,20 @@ github.com/kisielk/errcheck v1.5.0/go.mod h1:pFxgyoBC7bSaBwPgfKdkLd5X25qrDl4LWUI github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= github.com/klauspost/compress v1.17.10 h1:oXAz+Vh0PMUvJczoi+flxpnBEPxoER1IaAnU/NMPtT0= github.com/klauspost/compress v1.17.10/go.mod h1:pMDklpSncoRMuLFrf1W9Ss9KT+0rH90U12bZKk7uwG0= +github.com/knadh/koanf/maps v0.1.1 h1:G5TjmUh2D7G2YWf5SQQqSiHRJEjaicvU0KpypqB3NIs= +github.com/knadh/koanf/maps v0.1.1/go.mod h1:npD/QZY3V6ghQDdcQzl1W4ICNVTkohC8E73eI2xW4yI= +github.com/knadh/koanf/providers/confmap v0.1.0 h1:gOkxhHkemwG4LezxxN8DMOFopOPghxRVp7JbIvdvqzU= +github.com/knadh/koanf/providers/confmap v0.1.0/go.mod h1:2uLhxQzJnyHKfxG927awZC7+fyHFdQkd697K4MdLnIU= +github.com/knadh/koanf/v2 v2.1.1 h1:/R8eXqasSTsmDCsAyYj+81Wteg8AqrV9CP6gvsTsOmM= +github.com/knadh/koanf/v2 v2.1.1/go.mod h1:4mnTRbZCK+ALuBXHZMjDfG9y714L7TykVnZkXbMU3Es= github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE= github.com/kr/pretty v0.3.1/go.mod h1:hoEshYVHaxMs3cyo3Yncou5ZscifuDolrwPKZanG3xk= github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= +github.com/mitchellh/copystructure v1.2.0 h1:vpKXTN4ewci03Vljg/q9QvCGUDttBOGBIa15WveJJGw= +github.com/mitchellh/copystructure v1.2.0/go.mod h1:qLl+cE2AmVv+CoeAwDPye/v+N2HKCj9FbZEVFJRxO9s= +github.com/mitchellh/reflectwalk v1.0.2 h1:G2LzWKi524PWgd3mLHV8Y5k7s6XUvT0Gef6zxSIeXaQ= +github.com/mitchellh/reflectwalk v1.0.2/go.mod h1:mSTlrgnPZtwu0c4WaC2kGObEpuNDbx0jmZXqmk4esnw= github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd h1:TRLaZ9cD/w8PVh93nsPXa1VrQ6jlwL5oN8l14QlcNfg= github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= @@ -66,6 +78,8 @@ go.opentelemetry.io/collector/config/configtls v1.17.0 h1:5DPgmBgpKEopLGmkjaihZH go.opentelemetry.io/collector/config/configtls v1.17.0/go.mod h1:xUV5/xAHJbwrCuT2rGurBGSUqyFFAVVBcQ5DJAENeCc= go.opentelemetry.io/collector/config/internal v0.111.0 h1:HTrN9xCpX42xlyDskWbhA/2NkSjMasxNEuGkmjjq7Q8= go.opentelemetry.io/collector/config/internal v0.111.0/go.mod h1:yC7E4h1Uj0SubxcFImh6OvBHFTjMh99+A5PuyIgDWqc= +go.opentelemetry.io/collector/confmap v1.17.0 h1:5UKHtPGtzNGaOGBsJ6aFpvsKElNUXOVuErBfC0eTWLM= +go.opentelemetry.io/collector/confmap v1.17.0/go.mod h1:GrIZ12P/9DPOuTpe2PIS51a0P/ZM6iKtByVee1Uf3+k= go.opentelemetry.io/collector/consumer v0.111.0 h1:d2kRTDnu+p0q4D5fTU+Pk59KRm5F2JRYrk30Ep5j0xI= go.opentelemetry.io/collector/consumer v0.111.0/go.mod h1:FjY9bPbVkFZLKKxnNbGsIqaz3lcFDKGf+7wxA1uCugs= go.opentelemetry.io/collector/consumer/consumerprofiles v0.111.0 h1:w9kGdTaXdwD/ZtbxVOvuYQEFKBX3THQgEz/enQnMt9s= diff --git a/receiver/prometheusremotewritereceiver/internal/metadata/generated_status.go b/receiver/prometheusremotewritereceiver/internal/metadata/generated_status.go index a62550fe69aa..0a755b78d5e8 100644 --- a/receiver/prometheusremotewritereceiver/internal/metadata/generated_status.go +++ b/receiver/prometheusremotewritereceiver/internal/metadata/generated_status.go @@ -8,7 +8,7 @@ import ( var ( Type = component.MustNewType("prometheusremotewrite") - ScopeName = "github.com/open-telemetry/opentelemetry-collector-contrib/receiver/prometheusreceiver" + ScopeName = "github.com/open-telemetry/opentelemetry-collector-contrib/receiver/prometheusremotewritereceiver" ) const ( diff --git a/receiver/pulsarreceiver/jaeger_unmarshaler_test.go b/receiver/pulsarreceiver/jaeger_unmarshaler_test.go index 70849f70ba59..c8446637cf4f 100644 --- a/receiver/pulsarreceiver/jaeger_unmarshaler_test.go +++ b/receiver/pulsarreceiver/jaeger_unmarshaler_test.go @@ -25,8 +25,7 @@ func TestUnmarshalJaeger(t *testing.T) { span.SetEndTimestamp(pcommon.Timestamp(20)) span.SetTraceID([16]byte{1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16}) span.SetSpanID([8]byte{1, 2, 3, 4, 5, 6, 7, 8}) - batches, err := jaeger.ProtoFromTraces(td) - require.NoError(t, err) + batches := jaeger.ProtoFromTraces(td) protoBytes, err := batches[0].Spans[0].Marshal() require.NoError(t, err) diff --git a/testbed/datasenders/jaeger.go b/testbed/datasenders/jaeger.go index 6e3bedb76543..a8d6a49286ac 100644 --- a/testbed/datasenders/jaeger.go +++ b/testbed/datasenders/jaeger.go @@ -17,7 +17,6 @@ import ( "go.opentelemetry.io/collector/config/configretry" "go.opentelemetry.io/collector/config/configtls" "go.opentelemetry.io/collector/consumer" - "go.opentelemetry.io/collector/consumer/consumererror" "go.opentelemetry.io/collector/exporter" "go.opentelemetry.io/collector/exporter/exporterhelper" "go.opentelemetry.io/collector/exporter/exportertest" @@ -147,17 +146,14 @@ func (s *protoGRPCSender) pushTraces( td ptrace.Traces, ) error { - batches, err := jaeger.ProtoFromTraces(td) - if err != nil { - return consumererror.NewPermanent(fmt.Errorf("failed to push trace data via Jaeger exporter: %w", err)) - } + batches := jaeger.ProtoFromTraces(td) if s.metadata.Len() > 0 { ctx = metadata.NewOutgoingContext(ctx, s.metadata) } for _, batch := range batches { - _, err = s.client.PostSpans( + _, err := s.client.PostSpans( ctx, &jaegerproto.PostSpansRequest{Batch: *batch}, grpc.WaitForReady(s.waitForReady))