From a4b60c71b23adac7c770246e0deea54384d4a42c Mon Sep 17 00:00:00 2001 From: Cole Laven <82364622+colelaven@users.noreply.github.com> Date: Tue, 8 Oct 2024 09:41:59 -0400 Subject: [PATCH] [processor/logdedup] feat: add ottl condition to logdedup processor (#35443) **Description:** Adds OTTL Condition field to Deduplicate Logs Processor **Link to tracking Issue:** Closes #35440 **Testing:** - Tested functionality with BindPlane - Added unit tests for the condition logic **Documentation:** Added documentation to the logdedup processor README about the condition field and an example configuration with a condition. --------- Co-authored-by: Mike Goldsmith --- ...t_add-condition-to-logdedup-processor.yaml | 27 +++ pkg/pdatatest/plogtest/logs_test.go | 8 + pkg/pdatatest/plogtest/options.go | 38 ++++- .../actual.yaml | 12 ++ .../expected.yaml | 12 ++ processor/logdedupprocessor/README.md | 45 ++++- processor/logdedupprocessor/config.go | 2 + processor/logdedupprocessor/config_test.go | 1 + processor/logdedupprocessor/factory.go | 24 ++- processor/logdedupprocessor/factory_test.go | 31 ++++ .../generated_component_telemetry_test.go | 7 +- processor/logdedupprocessor/go.mod | 35 +++- processor/logdedupprocessor/go.sum | 65 +++++++- .../internal/metadata/generated_telemetry.go | 5 +- .../metadata/generated_telemetry_test.go | 3 +- processor/logdedupprocessor/processor.go | 45 ++++- processor/logdedupprocessor/processor_test.go | 154 ++++++++++++++---- .../testdata/conditionConsumedLogs.yaml | 69 ++++++++ .../testdata/expected/basicLogs.yaml | 32 ++++ .../expected/conditionConsumedLogs.yaml | 69 ++++++++ .../expected/conditionDedupedLogs.yaml | 31 ++++ .../multipleConditionsConsumedLogs.yaml | 37 +++++ .../multipleConditionsDedupedLogs.yaml | 57 +++++++ .../testdata/input/basicLogs.yaml | 38 +++++ .../testdata/input/conditionLogs.yaml | 101 ++++++++++++ 25 files changed, 881 insertions(+), 67 deletions(-) create mode 100644 .chloggen/feat_add-condition-to-logdedup-processor.yaml create mode 100644 pkg/pdatatest/plogtest/testdata/ignore-log-record-attribute-value/actual.yaml create mode 100644 pkg/pdatatest/plogtest/testdata/ignore-log-record-attribute-value/expected.yaml create mode 100644 processor/logdedupprocessor/testdata/conditionConsumedLogs.yaml create mode 100644 processor/logdedupprocessor/testdata/expected/basicLogs.yaml create mode 100644 processor/logdedupprocessor/testdata/expected/conditionConsumedLogs.yaml create mode 100644 processor/logdedupprocessor/testdata/expected/conditionDedupedLogs.yaml create mode 100644 processor/logdedupprocessor/testdata/expected/multipleConditionsConsumedLogs.yaml create mode 100644 processor/logdedupprocessor/testdata/expected/multipleConditionsDedupedLogs.yaml create mode 100644 processor/logdedupprocessor/testdata/input/basicLogs.yaml create mode 100644 processor/logdedupprocessor/testdata/input/conditionLogs.yaml diff --git a/.chloggen/feat_add-condition-to-logdedup-processor.yaml b/.chloggen/feat_add-condition-to-logdedup-processor.yaml new file mode 100644 index 000000000000..bd7a30d6e5d9 --- /dev/null +++ b/.chloggen/feat_add-condition-to-logdedup-processor.yaml @@ -0,0 +1,27 @@ +# Use this changelog template to create an entry for release notes. + +# One of 'breaking', 'deprecation', 'new_component', 'enhancement', 'bug_fix' +change_type: enhancement + +# The name of the component, or a single word describing the area of concern, (e.g. filelogreceiver) +component: logdedupprocessor + +# A brief description of the change. Surround your text with quotes ("") if it needs to start with a backtick (`). +note: Add a `condition` field to the Log DeDuplication Processor. + +# Mandatory: One or more tracking issues related to the change. You can use the PR number here if no issue exists. +issues: [35440] + +# (Optional) One or more lines of additional information to render under the primary note. +# These lines will be padded with 2 spaces and then inserted directly into the document. +# Use pipe (|) for multiline entries. +subtext: + +# If your change doesn't affect end users or the exported elements of any package, +# you should instead start your pull request title with [chore] or use the "Skip Changelog" label. +# Optional: The change log or logs in which this entry should be included. +# e.g. '[user]' or '[user, api]' +# Include 'user' if the change is relevant to end users. +# Include 'api' if there is a change to a library API. +# Default: '[user]' +change_logs: [] diff --git a/pkg/pdatatest/plogtest/logs_test.go b/pkg/pdatatest/plogtest/logs_test.go index 345823e1846a..c8910d24c3ef 100644 --- a/pkg/pdatatest/plogtest/logs_test.go +++ b/pkg/pdatatest/plogtest/logs_test.go @@ -142,6 +142,14 @@ func TestCompareLogs(t *testing.T) { withoutOptions: errors.New(`resource "map[]": scope "collector": log record "map[]": timestamp doesn't match expected: 11651379494838206465, actual: 11651379494838206464`), withOptions: nil, }, + { + name: "ignore-log-record-attribute-value", + compareOptions: []CompareLogsOption{ + IgnoreLogRecordAttributeValue("Key1"), + }, + withoutOptions: errors.New(`resource "map[]": scope "": missing expected log record: map[Key1:Val2]; resource "map[]": scope "": unexpected log record: map[Key1:Val1]`), + withOptions: nil, + }, } for _, tc := range tcs { diff --git a/pkg/pdatatest/plogtest/options.go b/pkg/pdatatest/plogtest/options.go index 412efd7d7eb5..632b76297c99 100644 --- a/pkg/pdatatest/plogtest/options.go +++ b/pkg/pdatatest/plogtest/options.go @@ -42,13 +42,47 @@ func (opt ignoreResourceAttributeValue) applyOnLogs(expected, actual plog.Logs) opt.maskLogsResourceAttributeValue(actual) } -func (opt ignoreResourceAttributeValue) maskLogsResourceAttributeValue(metrics plog.Logs) { - rls := metrics.ResourceLogs() +func (opt ignoreResourceAttributeValue) maskLogsResourceAttributeValue(logs plog.Logs) { + rls := logs.ResourceLogs() for i := 0; i < rls.Len(); i++ { internal.MaskResourceAttributeValue(rls.At(i).Resource(), opt.attributeName) } } +// IgnoreLogRecordAttributeValue is a CompareLogsOption that sets the value of an attribute +// to empty bytes for every log record +func IgnoreLogRecordAttributeValue(attributeName string) CompareLogsOption { + return ignoreLogRecordAttributeValue{ + attributeName: attributeName, + } +} + +type ignoreLogRecordAttributeValue struct { + attributeName string +} + +func (opt ignoreLogRecordAttributeValue) applyOnLogs(expected, actual plog.Logs) { + opt.maskLogRecordAttributeValue(expected) + opt.maskLogRecordAttributeValue(actual) +} + +func (opt ignoreLogRecordAttributeValue) maskLogRecordAttributeValue(logs plog.Logs) { + rls := logs.ResourceLogs() + for i := 0; i < logs.ResourceLogs().Len(); i++ { + sls := rls.At(i).ScopeLogs() + for j := 0; j < sls.Len(); j++ { + lrs := sls.At(j).LogRecords() + for k := 0; k < lrs.Len(); k++ { + lr := lrs.At(k) + val, exists := lr.Attributes().Get(opt.attributeName) + if exists { + val.SetEmptyBytes() + } + } + } + } +} + func IgnoreTimestamp() CompareLogsOption { return compareLogsOptionFunc(func(expected, actual plog.Logs) { now := pcommon.NewTimestampFromTime(time.Now()) diff --git a/pkg/pdatatest/plogtest/testdata/ignore-log-record-attribute-value/actual.yaml b/pkg/pdatatest/plogtest/testdata/ignore-log-record-attribute-value/actual.yaml new file mode 100644 index 000000000000..5fed2e0e0108 --- /dev/null +++ b/pkg/pdatatest/plogtest/testdata/ignore-log-record-attribute-value/actual.yaml @@ -0,0 +1,12 @@ +resourceLogs: + - resource: {} + scopeLogs: + - logRecords: + - attributes: + - key: Key1 + value: + stringValue: Val1 + body: {} + spanId: "" + traceId: "" + scope: {} diff --git a/pkg/pdatatest/plogtest/testdata/ignore-log-record-attribute-value/expected.yaml b/pkg/pdatatest/plogtest/testdata/ignore-log-record-attribute-value/expected.yaml new file mode 100644 index 000000000000..fbd1af584d58 --- /dev/null +++ b/pkg/pdatatest/plogtest/testdata/ignore-log-record-attribute-value/expected.yaml @@ -0,0 +1,12 @@ +resourceLogs: + - resource: {} + scopeLogs: + - logRecords: + - attributes: + - key: Key1 + value: + stringValue: Val2 + body: {} + spanId: "" + traceId: "" + scope: {} diff --git a/processor/logdedupprocessor/README.md b/processor/logdedupprocessor/README.md index edb5bf2a40c1..004f7862caac 100644 --- a/processor/logdedupprocessor/README.md +++ b/processor/logdedupprocessor/README.md @@ -15,7 +15,7 @@ This processor is used to deduplicate logs by detecting identical logs over a ra ## How It Works 1. The user configures the log deduplication processor in the desired logs pipeline. -2. All logs sent to the processor and aggregated over the configured `interval`. Logs are considered identical if they have the same body, resource attributes, severity, and log attributes. +2. If the processor does not provide `conditions`, all logs are considered eligible for aggregation. If the processor does have configured `conditions`, all log entries where at least one of the `conditions` evaluates `true` are considered eligible for aggregation. Eligible identical logs are aggregated over the configured `interval`. Logs are considered identical if they have the same body, resource attributes, severity, and log attributes. Logs that do not match any condition in `conditions` are passed onward in the pipeline without aggregating. 3. After the interval, the processor emits a single log with the count of logs that were deduplicated. The emitted log will have the same body, resource attributes, severity, and log attributes as the original log. The emitted log will also have the following new attributes: - `log_count`: The count of logs that were deduplicated over the interval. The name of the attribute is configurable via the `log_count_attribute` parameter. @@ -25,13 +25,17 @@ This processor is used to deduplicate logs by detecting identical logs over a ra **Note**: The `ObservedTimestamp` and `Timestamp` of the emitted log will be the time that the aggregated log was emitted and will not be the same as the `ObservedTimestamp` and `Timestamp` of the original logs. ## Configuration -| Field | Type | Default | Description | -| --- | --- | --- | --- | -| interval | duration | `10s` | The interval at which logs are aggregated. The counter will reset after each interval. | -| log_count_attribute | string | `log_count` | The name of the count attribute of deduplicated logs that will be added to the emitted aggregated log. | -| timezone | string | `UTC` | The timezone of the `first_observed_timestamp` and `last_observed_timestamp` timestamps on the emitted aggregated log. The available locations depend on the local IANA Time Zone database. [This page](https://en.wikipedia.org/wiki/List_of_tz_database_time_zones) contains many examples, such as `America/New_York`. | -| exclude_fields | []string | `[]` | Fields to exclude from duplication matching. Fields can be excluded from the log `body` or `attributes`. These fields will not be present in the emitted aggregated log. Nested fields must be `.` delimited. If a field contains a `.` it can be escaped by using a `\` see [example config](#example-config-with-excluded-fields).

**Note**: The entire `body` cannot be excluded. If the body is a map then fields within it can be excluded. | +| Field | Type | Default | Description | +| --- | --- | --- | --- | +| interval | duration | `10s` | The interval at which logs are aggregated. The counter will reset after each interval. | +| conditions | []string | `[]` | A slice of [OTTL] expressions used to evaluate which log records are deduped. All paths in the [log context] are available to reference. All [converters] are available to use. | +| log_count_attribute | string | `log_count` | The name of the count attribute of deduplicated logs that will be added to the emitted aggregated log. | +| timezone | string | `UTC` | The timezone of the `first_observed_timestamp` and `last_observed_timestamp` timestamps on the emitted aggregated log. The available locations depend on the local IANA Time Zone database. [This page](https://en.wikipedia.org/wiki/List_of_tz_database_time_zones) contains many examples, such as `America/New_York`. | +| exclude_fields | []string | `[]` | Fields to exclude from duplication matching. Fields can be excluded from the log `body` or `attributes`. These fields will not be present in the emitted aggregated log. Nested fields must be `.` delimited. If a field contains a `.` it can be escaped by using a `\` see [example config](#example-config-with-excluded-fields).

**Note**: The entire `body` cannot be excluded. If the body is a map then fields within it can be excluded. | +[OTTL]: https://github.com/open-telemetry/opentelemetry-collector-contrib/tree/v0.109.0/pkg/ottl#readme +[converters]: https://github.com/open-telemetry/opentelemetry-collector-contrib/blob/v0.109.0/pkg/ottl/ottlfuncs/README.md#converters +[log context]: https://github.com/open-telemetry/opentelemetry-collector-contrib/blob/v0.109.0/pkg/ottl/contexts/ottllog/README.md ### Example Config The following config is an example configuration for the log deduplication processor. It is configured with an aggregation interval of `60 seconds`, a timezone of `America/Los_Angeles`, and a log count attribute of `dedup_count`. It has no fields being excluded. @@ -82,3 +86,30 @@ service: processors: [logdedup] exporters: [googlecloud] ``` + + +### Example Config with Conditions +The following config is an example configuration that only performs the deduping process on telemetry where Attribute `ID` equals `1` OR where Resource Attribute `service.name` equals `my-service`: + +```yaml +receivers: + filelog: + include: [./example/*.log] +processors: + logdedup: + conditions: + - attributes["ID"] == 1 + - resource.attributes["service.name"] == "my-service" + interval: 60s + log_count_attribute: dedup_count + timezone: 'America/Los_Angeles' +exporters: + googlecloud: + +service: + pipelines: + logs: + receivers: [filelog] + processors: [logdedup] + exporters: [googlecloud] +``` diff --git a/processor/logdedupprocessor/config.go b/processor/logdedupprocessor/config.go index eb99fcadd98d..04459d3ecbf8 100644 --- a/processor/logdedupprocessor/config.go +++ b/processor/logdedupprocessor/config.go @@ -44,6 +44,7 @@ type Config struct { Interval time.Duration `mapstructure:"interval"` Timezone string `mapstructure:"timezone"` ExcludeFields []string `mapstructure:"exclude_fields"` + Conditions []string `mapstructure:"conditions"` } // createDefaultConfig returns the default config for the processor. @@ -53,6 +54,7 @@ func createDefaultConfig() component.Config { Interval: defaultInterval, Timezone: defaultTimezone, ExcludeFields: []string{}, + Conditions: []string{}, } } diff --git a/processor/logdedupprocessor/config_test.go b/processor/logdedupprocessor/config_test.go index 94e3bc771922..a4e8fb120cd5 100644 --- a/processor/logdedupprocessor/config_test.go +++ b/processor/logdedupprocessor/config_test.go @@ -90,6 +90,7 @@ func TestValidateConfig(t *testing.T) { LogCountAttribute: defaultLogCountAttribute, Interval: defaultInterval, Timezone: defaultTimezone, + Conditions: []string{}, ExcludeFields: []string{"body.thing", "attributes.otherthing"}, }, expectedErr: nil, diff --git a/processor/logdedupprocessor/factory.go b/processor/logdedupprocessor/factory.go index 1bcd5fc37d2f..0931e06cc176 100644 --- a/processor/logdedupprocessor/factory.go +++ b/processor/logdedupprocessor/factory.go @@ -11,6 +11,8 @@ import ( "go.opentelemetry.io/collector/consumer" "go.opentelemetry.io/collector/processor" + "github.com/open-telemetry/opentelemetry-collector-contrib/internal/filter/filterottl" + "github.com/open-telemetry/opentelemetry-collector-contrib/pkg/ottl" "github.com/open-telemetry/opentelemetry-collector-contrib/processor/logdedupprocessor/internal/metadata" ) @@ -30,5 +32,25 @@ func createLogsProcessor(_ context.Context, settings processor.Settings, cfg com return nil, fmt.Errorf("invalid config type: %+v", cfg) } - return newProcessor(processorCfg, consumer, settings) + processor, err := newProcessor(processorCfg, consumer, settings) + if err != nil { + return nil, fmt.Errorf("error creating processor: %w", err) + } + + if len(processorCfg.Conditions) == 0 { + processor.conditions = nil + } else { + conditions, err := filterottl.NewBoolExprForLog( + processorCfg.Conditions, + filterottl.StandardLogFuncs(), + ottl.PropagateError, + settings.TelemetrySettings, + ) + if err != nil { + return nil, fmt.Errorf("invalid condition: %w", err) + } + processor.conditions = conditions + } + + return processor, nil } diff --git a/processor/logdedupprocessor/factory_test.go b/processor/logdedupprocessor/factory_test.go index 9dfee3556611..1db3284ca45b 100644 --- a/processor/logdedupprocessor/factory_test.go +++ b/processor/logdedupprocessor/factory_test.go @@ -37,6 +37,37 @@ func TestCreateLogs(t *testing.T) { cfg: nil, expectedErr: "invalid config type", }, + { + name: "valid custom condition", + cfg: &Config{ + LogCountAttribute: defaultLogCountAttribute, + Interval: defaultInterval, + Timezone: defaultTimezone, + ExcludeFields: []string{}, + Conditions: []string{"false"}, + }, + }, + { + name: "valid multiple conditions", + cfg: &Config{ + LogCountAttribute: defaultLogCountAttribute, + Interval: defaultInterval, + Timezone: defaultTimezone, + ExcludeFields: []string{}, + Conditions: []string{"false", `(attributes["ID"] == 1)`}, + }, + }, + { + name: "invalid condition", + cfg: &Config{ + LogCountAttribute: defaultLogCountAttribute, + Interval: defaultInterval, + Timezone: defaultTimezone, + ExcludeFields: []string{}, + Conditions: []string{"x"}, + }, + expectedErr: "invalid condition", + }, } for _, tc := range testCases { diff --git a/processor/logdedupprocessor/generated_component_telemetry_test.go b/processor/logdedupprocessor/generated_component_telemetry_test.go index f4dc8b79694b..3254ebfd89d1 100644 --- a/processor/logdedupprocessor/generated_component_telemetry_test.go +++ b/processor/logdedupprocessor/generated_component_telemetry_test.go @@ -7,13 +7,12 @@ import ( "testing" "github.com/stretchr/testify/require" - sdkmetric "go.opentelemetry.io/otel/sdk/metric" - "go.opentelemetry.io/otel/sdk/metric/metricdata" - "go.opentelemetry.io/otel/sdk/metric/metricdata/metricdatatest" - "go.opentelemetry.io/collector/component" "go.opentelemetry.io/collector/processor" "go.opentelemetry.io/collector/processor/processortest" + sdkmetric "go.opentelemetry.io/otel/sdk/metric" + "go.opentelemetry.io/otel/sdk/metric/metricdata" + "go.opentelemetry.io/otel/sdk/metric/metricdata/metricdatatest" ) type componentTestTelemetry struct { diff --git a/processor/logdedupprocessor/go.mod b/processor/logdedupprocessor/go.mod index d0ca80ca9a36..038b6efe3ab2 100644 --- a/processor/logdedupprocessor/go.mod +++ b/processor/logdedupprocessor/go.mod @@ -3,6 +3,10 @@ module github.com/open-telemetry/opentelemetry-collector-contrib/processor/logde go 1.22.0 require ( + github.com/open-telemetry/opentelemetry-collector-contrib/internal/filter v0.111.0 + github.com/open-telemetry/opentelemetry-collector-contrib/pkg/golden v0.111.0 + github.com/open-telemetry/opentelemetry-collector-contrib/pkg/ottl v0.111.0 + github.com/open-telemetry/opentelemetry-collector-contrib/pkg/pdatatest v0.111.0 github.com/open-telemetry/opentelemetry-collector-contrib/pkg/pdatautil v0.111.0 github.com/stretchr/testify v1.9.0 go.opentelemetry.io/collector/component v0.111.0 @@ -26,20 +30,34 @@ require ( ) require ( + github.com/alecthomas/participle/v2 v2.1.1 // indirect + github.com/antchfx/xmlquery v1.4.1 // indirect + github.com/antchfx/xpath v1.3.1 // indirect github.com/cespare/xxhash/v2 v2.3.0 // indirect + github.com/elastic/go-grok v0.3.1 // indirect + github.com/elastic/lunes v0.1.0 // indirect github.com/go-logr/logr v1.4.2 // indirect github.com/go-logr/stdr v1.2.2 // indirect github.com/go-viper/mapstructure/v2 v2.1.0 // indirect + github.com/gobwas/glob v0.2.3 // indirect + github.com/goccy/go-json v0.10.3 // indirect github.com/gogo/protobuf v1.3.2 // indirect + github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da // indirect github.com/google/uuid v1.6.0 // indirect + github.com/hashicorp/golang-lru v0.5.4 // indirect + github.com/hashicorp/golang-lru/v2 v2.0.7 // indirect + github.com/iancoleman/strcase v0.3.0 // indirect github.com/json-iterator/go v1.1.12 // indirect github.com/knadh/koanf/maps v0.1.1 // indirect github.com/knadh/koanf/providers/confmap v0.1.0 // indirect github.com/knadh/koanf/v2 v2.1.1 // indirect + github.com/magefile/mage v1.15.0 // indirect github.com/mitchellh/copystructure v1.2.0 // indirect github.com/mitchellh/reflectwalk v1.0.2 // indirect github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect github.com/modern-go/reflect2 v1.0.2 // indirect + github.com/open-telemetry/opentelemetry-collector-contrib/internal/coreinternal v0.111.0 // indirect + github.com/ua-parser/uap-go v0.0.0-20240611065828-3a4781585db6 // indirect go.opentelemetry.io/collector/component/componentstatus v0.111.0 // indirect go.opentelemetry.io/collector/consumer/consumerprofiles v0.111.0 // indirect go.opentelemetry.io/collector/internal/globalsignal v0.111.0 // indirect @@ -47,15 +65,28 @@ require ( go.opentelemetry.io/collector/pdata/testdata v0.111.0 // indirect go.opentelemetry.io/collector/pipeline v0.111.0 // indirect go.opentelemetry.io/collector/processor/processorprofiles v0.111.0 // indirect + go.opentelemetry.io/collector/semconv v0.111.0 // indirect go.opentelemetry.io/otel v1.30.0 // indirect go.opentelemetry.io/otel/sdk v1.30.0 // indirect go.uber.org/multierr v1.11.0 // indirect - golang.org/x/net v0.28.0 // indirect + golang.org/x/exp v0.0.0-20240506185415-9bf2ced13842 // indirect + golang.org/x/net v0.29.0 // indirect golang.org/x/sys v0.25.0 // indirect - golang.org/x/text v0.17.0 // indirect + golang.org/x/text v0.18.0 // indirect google.golang.org/genproto/googleapis/rpc v0.0.0-20240822170219-fc7c04adadcd // indirect google.golang.org/grpc v1.67.1 // indirect google.golang.org/protobuf v1.34.2 // indirect + gopkg.in/yaml.v2 v2.4.0 // indirect ) replace github.com/open-telemetry/opentelemetry-collector-contrib/pkg/pdatautil => ../../pkg/pdatautil + +replace github.com/open-telemetry/opentelemetry-collector-contrib/pkg/pdatatest => ../../pkg/pdatatest + +replace github.com/open-telemetry/opentelemetry-collector-contrib/pkg/golden => ../../pkg/golden + +replace github.com/open-telemetry/opentelemetry-collector-contrib/pkg/ottl => ../../pkg/ottl + +replace github.com/open-telemetry/opentelemetry-collector-contrib/internal/filter => ../../internal/filter + +replace github.com/open-telemetry/opentelemetry-collector-contrib/internal/coreinternal => ../../internal/coreinternal diff --git a/processor/logdedupprocessor/go.sum b/processor/logdedupprocessor/go.sum index 18d96827a0f8..c2135021a81d 100644 --- a/processor/logdedupprocessor/go.sum +++ b/processor/logdedupprocessor/go.sum @@ -1,8 +1,22 @@ +github.com/alecthomas/assert/v2 v2.3.0 h1:mAsH2wmvjsuvyBvAmCtm7zFsBlb8mIHx5ySLVdDZXL0= +github.com/alecthomas/assert/v2 v2.3.0/go.mod h1:pXcQ2Asjp247dahGEmsZ6ru0UVwnkhktn7S0bBDLxvQ= +github.com/alecthomas/participle/v2 v2.1.1 h1:hrjKESvSqGHzRb4yW1ciisFJ4p3MGYih6icjJvbsmV8= +github.com/alecthomas/participle/v2 v2.1.1/go.mod h1:Y1+hAs8DHPmc3YUFzqllV+eSQ9ljPTk0ZkPMtEdAx2c= +github.com/alecthomas/repr v0.2.0 h1:HAzS41CIzNW5syS8Mf9UwXhNH1J9aix/BvDRf1Ml2Yk= +github.com/alecthomas/repr v0.2.0/go.mod h1:Fr0507jx4eOXV7AlPV6AVZLYrLIuIeSOWtW57eE/O/4= +github.com/antchfx/xmlquery v1.4.1 h1:YgpSwbeWvLp557YFTi8E3z6t6/hYjmFEtiEKbDfEbl0= +github.com/antchfx/xmlquery v1.4.1/go.mod h1:lKezcT8ELGt8kW5L+ckFMTbgdR61/odpPgDv8Gvi1fI= +github.com/antchfx/xpath v1.3.1 h1:PNbFuUqHwWl0xRjvUPjJ95Agbmdj2uzzIwmQKgu4oCk= +github.com/antchfx/xpath v1.3.1/go.mod h1:i54GszH55fYfBmoZXapTHN8T8tkcHfRgLyVwwqzXNcs= github.com/cespare/xxhash/v2 v2.3.0 h1:UL815xU9SqsFlibzuggzjXhog7bL6oX9BbNZnL2UFvs= github.com/cespare/xxhash/v2 v2.3.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/elastic/go-grok v0.3.1 h1:WEhUxe2KrwycMnlvMimJXvzRa7DoByJB4PVUIE1ZD/U= +github.com/elastic/go-grok v0.3.1/go.mod h1:n38ls8ZgOboZRgKcjMY8eFeZFMmcL9n2lP0iHhIDk64= +github.com/elastic/lunes v0.1.0 h1:amRtLPjwkWtzDF/RKzcEPMvSsSseLDLW+bnhfNSLRe4= +github.com/elastic/lunes v0.1.0/go.mod h1:xGphYIt3XdZRtyWosHQTErsQTd4OP1p9wsbVoHelrd4= github.com/go-logr/logr v1.2.2/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A= github.com/go-logr/logr v1.4.2 h1:6pFjapn8bFcIbiKo3XT4j/BhANplGihG6tvd+8rYgrY= github.com/go-logr/logr v1.4.2/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY= @@ -10,13 +24,27 @@ github.com/go-logr/stdr v1.2.2 h1:hSWxHoqTgW2S2qGc0LTAI563KZ5YKYRhT3MFKZMbjag= github.com/go-logr/stdr v1.2.2/go.mod h1:mMo/vtBO5dYbehREoey6XUKy/eSumjCCveDpRre4VKE= github.com/go-viper/mapstructure/v2 v2.1.0 h1:gHnMa2Y/pIxElCH2GlZZ1lZSsn6XMtufpGyP1XxdC/w= github.com/go-viper/mapstructure/v2 v2.1.0/go.mod h1:oJDH3BJKyqBA2TXFhDsKDGDTlndYOZ6rGS0BRZIxGhM= +github.com/gobwas/glob v0.2.3 h1:A4xDbljILXROh+kObIiy5kIaPYD8e96x1tgBhUI5J+Y= +github.com/gobwas/glob v0.2.3/go.mod h1:d3Ez4x06l9bZtSvzIay5+Yzi0fmZzPgnTbPcKjJAkT8= +github.com/goccy/go-json v0.10.3 h1:KZ5WoDbxAIgm2HNbYckL0se1fHD6rz5j4ywS6ebzDqA= +github.com/goccy/go-json v0.10.3/go.mod h1:oq7eo15ShAhp70Anwd5lgX2pLfOS3QCiwU/PULtXL6M= github.com/gogo/protobuf v1.3.2 h1:Ov1cvc58UF3b5XjBnZv7+opcTcQFZebYjWzi34vdm4Q= github.com/gogo/protobuf v1.3.2/go.mod h1:P1XiOD3dCwIKUDQYPy72D8LYyHL2YPYrpS2s69NZV8Q= +github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da h1:oI5xCqsCo564l8iNU+DwB5epxmsaqB+rhGL0m5jtYqE= +github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI= github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0= github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/hashicorp/golang-lru v0.5.4 h1:YDjusn29QI/Das2iO9M0BHnIbxPeyuCHsjMW+lJfyTc= +github.com/hashicorp/golang-lru v0.5.4/go.mod h1:iADmTwqILo4mZ8BN3D2Q6+9jd8WM5uGBxy+E8yxSoD4= +github.com/hashicorp/golang-lru/v2 v2.0.7 h1:a+bsQ5rvGLjzHuww6tVxozPZFVghXaHOwFs4luLUK2k= +github.com/hashicorp/golang-lru/v2 v2.0.7/go.mod h1:QeFd9opnmA6QUJc5vARoKUSoFhyfM2/ZepoAG6RGpeM= +github.com/hexops/gotextdiff v1.0.3 h1:gitA9+qJrrTCsiCl7+kh75nPqQt1cx4ZkudSTLoUqJM= +github.com/hexops/gotextdiff v1.0.3/go.mod h1:pSWU5MAI3yDq+fZBTazCSJysOMbxWL1BSow5/V2vxeg= +github.com/iancoleman/strcase v0.3.0 h1:nTXanmYxhfFAMjZL34Ov6gkzEsSJZ5DbhxWjvSASxEI= +github.com/iancoleman/strcase v0.3.0/go.mod h1:iwCmte+B7n89clKwxIoIXy/HfoL7AsD47ZCWhYzw7ho= github.com/json-iterator/go v1.1.12 h1:PV8peI4a0ysnczrg+LtxykD8LfKY9ML6u2jnxaEnrnM= github.com/json-iterator/go v1.1.12/go.mod h1:e30LSqwooZae/UwlEbR2852Gd8hjQvJoHmT4TnhNGBo= github.com/kisielk/errcheck v1.5.0/go.mod h1:pFxgyoBC7bSaBwPgfKdkLd5X25qrDl4LWUI2bnpBCr8= @@ -31,6 +59,8 @@ github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE= github.com/kr/pretty v0.3.1/go.mod h1:hoEshYVHaxMs3cyo3Yncou5ZscifuDolrwPKZanG3xk= github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= +github.com/magefile/mage v1.15.0 h1:BvGheCMAsG3bWUDbZ8AyXXpCNwU9u5CB6sM+HNb9HYg= +github.com/magefile/mage v1.15.0/go.mod h1:z5UZb/iS3GoOSn0JgWuiw7dxlurVYTu+/jHXqQg881A= github.com/mitchellh/copystructure v1.2.0 h1:vpKXTN4ewci03Vljg/q9QvCGUDttBOGBIa15WveJJGw= github.com/mitchellh/copystructure v1.2.0/go.mod h1:qLl+cE2AmVv+CoeAwDPye/v+N2HKCj9FbZEVFJRxO9s= github.com/mitchellh/reflectwalk v1.0.2 h1:G2LzWKi524PWgd3mLHV8Y5k7s6XUvT0Gef6zxSIeXaQ= @@ -48,8 +78,11 @@ github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+ github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= github.com/stretchr/testify v1.9.0 h1:HtqpIVDClZ4nwg75+f6Lvsy/wHu+3BoSGCbBAcpTsTg= github.com/stretchr/testify v1.9.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= +github.com/ua-parser/uap-go v0.0.0-20240611065828-3a4781585db6 h1:SIKIoA4e/5Y9ZOl0DCe3eVMLPOQzJxgZpfdHHeauNTM= +github.com/ua-parser/uap-go v0.0.0-20240611065828-3a4781585db6/go.mod h1:BUbeWZiieNxAuuADTBNb3/aeje6on3DhU3rpWsQSB1E= github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= +github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY= go.opentelemetry.io/collector/component v0.111.0 h1:AiDIrhkq6sbHnU9Rhq6t4DC4Gal43bryd1+NTJNojAQ= go.opentelemetry.io/collector/component v0.111.0/go.mod h1:wYwbRuhzK5bm5x1bX+ukm1tT50QXYLs4MKwzyfiVGoE= go.opentelemetry.io/collector/component/componentstatus v0.111.0 h1:DojO8TbkysTtEoxzN6fJqhgCsu0QhxgJ9R+1bitnowM= @@ -78,6 +111,8 @@ go.opentelemetry.io/collector/processor v0.111.0 h1:85Llb9ekzzvzAXgFaw/n7LHFJ5QA go.opentelemetry.io/collector/processor v0.111.0/go.mod h1:78Z4f96j9trPFZIRCiQk6nVRo6vua4cW9VYNfHTBsvo= go.opentelemetry.io/collector/processor/processorprofiles v0.111.0 h1:QxnwbqClJvS7zDWgsIaqqDs5YsmHgFvmZKQsmoLTqJM= go.opentelemetry.io/collector/processor/processorprofiles v0.111.0/go.mod h1:8qPd8Af0XX7Wlupe8JHmdhkKMiiJ5AO7OEFYW3fN0CQ= +go.opentelemetry.io/collector/semconv v0.111.0 h1:ELleMtLBzeZ3xhfhYPmFcLc0hJMqRxhOB0eY60WLivw= +go.opentelemetry.io/collector/semconv v0.111.0/go.mod h1:zCJ5njhWpejR+A40kiEoeFm1xq1uzyZwMnRNX6/D82A= go.opentelemetry.io/otel v1.30.0 h1:F2t8sK4qf1fAmY9ua4ohFS/K+FUuOPemHUIXHtktrts= go.opentelemetry.io/otel v1.30.0/go.mod h1:tFw4Br9b7fOS+uEao81PJjVMjW/5fvNCbpsDIXqP0pc= go.opentelemetry.io/otel/metric v1.30.0 h1:4xNulvn9gjzo4hjg+wzIKG7iNFEaBMX00Qd4QIZs7+w= @@ -97,30 +132,49 @@ go.uber.org/zap v1.27.0/go.mod h1:GB2qFLM7cTU87MWRP2mPIjqfIDnGu+VIO4V/SdhGo2E= golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= +golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= +golang.org/x/exp v0.0.0-20240506185415-9bf2ced13842 h1:vr/HnozRka3pE4EsMEg1lgkXJkTFJCVUX+S/ZT6wYzM= +golang.org/x/exp v0.0.0-20240506185415-9bf2ced13842/go.mod h1:XtvwrStGgqGPLc4cjQfWqZHG1YFdYs6swckp8vpsjnc= golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4= golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20200226121028-0de0cce0169b/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= -golang.org/x/net v0.28.0 h1:a9JDOJc5GMUJ0+UDqmLT86WiEy7iWyIhz8gz8E4e5hE= -golang.org/x/net v0.28.0/go.mod h1:yqtgsTWOOnlGLG9GFRrK3++bGOUEkNBoHZc8MEDWPNg= +golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= +golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= +golang.org/x/net v0.7.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs= +golang.org/x/net v0.29.0 h1:5ORfpBpCs4HzDYoodCDBbwHzdR5UrLBZ3sOnUJmFoHo= +golang.org/x/net v0.29.0/go.mod h1:gLkgy8jTGERgjzMic6DS9+SP0ajcu6Xu3Orq/SpETg0= golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.25.0 h1:r+8e+loiHxRqhXVl6ML1nO3l1+oFoWbnlu2Ehimmi34= golang.org/x/sys v0.25.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= +golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= +golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= -golang.org/x/text v0.17.0 h1:XtiM5bkSOt+ewxlOE/aE/AKEHibwj/6gvWMl9Rsh0Qc= -golang.org/x/text v0.17.0/go.mod h1:BuEKDfySbSR4drPmRPG/7iBdf8hvFMuRexcpahXilzY= +golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= +golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= +golang.org/x/text v0.18.0 h1:XvMDiNzPAl0jr17s6W9lcaIhGUfUORdGCNsuLmPG224= +golang.org/x/text v0.18.0/go.mod h1:BuEKDfySbSR4drPmRPG/7iBdf8hvFMuRexcpahXilzY= golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20200619180055-7c47624df98f/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= golang.org/x/tools v0.0.0-20210106214847-113979e3529a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= +golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc= golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= @@ -134,5 +188,8 @@ google.golang.org/protobuf v1.34.2/go.mod h1:qYOHts0dSfpeUzUFpOMr/WGzszTmLH+DiWn gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk= gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q= +gopkg.in/yaml.v2 v2.2.1/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY= +gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ= gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= diff --git a/processor/logdedupprocessor/internal/metadata/generated_telemetry.go b/processor/logdedupprocessor/internal/metadata/generated_telemetry.go index c0cdc9e39ec3..0fd0abafca48 100644 --- a/processor/logdedupprocessor/internal/metadata/generated_telemetry.go +++ b/processor/logdedupprocessor/internal/metadata/generated_telemetry.go @@ -5,12 +5,11 @@ package metadata import ( "errors" + "go.opentelemetry.io/collector/component" + "go.opentelemetry.io/collector/config/configtelemetry" "go.opentelemetry.io/otel/metric" "go.opentelemetry.io/otel/metric/noop" "go.opentelemetry.io/otel/trace" - - "go.opentelemetry.io/collector/component" - "go.opentelemetry.io/collector/config/configtelemetry" ) func Meter(settings component.TelemetrySettings) metric.Meter { diff --git a/processor/logdedupprocessor/internal/metadata/generated_telemetry_test.go b/processor/logdedupprocessor/internal/metadata/generated_telemetry_test.go index 49a5cbdefd75..bd8fe035313b 100644 --- a/processor/logdedupprocessor/internal/metadata/generated_telemetry_test.go +++ b/processor/logdedupprocessor/internal/metadata/generated_telemetry_test.go @@ -6,14 +6,13 @@ import ( "testing" "github.com/stretchr/testify/require" + "go.opentelemetry.io/collector/component" "go.opentelemetry.io/otel/metric" embeddedmetric "go.opentelemetry.io/otel/metric/embedded" noopmetric "go.opentelemetry.io/otel/metric/noop" "go.opentelemetry.io/otel/trace" embeddedtrace "go.opentelemetry.io/otel/trace/embedded" nooptrace "go.opentelemetry.io/otel/trace/noop" - - "go.opentelemetry.io/collector/component" ) type mockMeter struct { diff --git a/processor/logdedupprocessor/processor.go b/processor/logdedupprocessor/processor.go index 4263da3400dd..804c312bff0d 100644 --- a/processor/logdedupprocessor/processor.go +++ b/processor/logdedupprocessor/processor.go @@ -11,16 +11,20 @@ import ( "go.opentelemetry.io/collector/component" "go.opentelemetry.io/collector/consumer" + "go.opentelemetry.io/collector/pdata/pcommon" "go.opentelemetry.io/collector/pdata/plog" "go.opentelemetry.io/collector/processor" "go.uber.org/zap" + "github.com/open-telemetry/opentelemetry-collector-contrib/internal/filter/expr" + "github.com/open-telemetry/opentelemetry-collector-contrib/pkg/ottl/contexts/ottllog" "github.com/open-telemetry/opentelemetry-collector-contrib/processor/logdedupprocessor/internal/metadata" ) // logDedupProcessor is a logDedupProcessor that counts duplicate instances of logs. type logDedupProcessor struct { emitInterval time.Duration + conditions expr.BoolExpr[ottllog.TransformContext] aggregator *logAggregator remover *fieldRemover nextConsumer consumer.Logs @@ -78,7 +82,7 @@ func (p *logDedupProcessor) Shutdown(_ context.Context) error { } // ConsumeLogs processes the logs. -func (p *logDedupProcessor) ConsumeLogs(_ context.Context, pl plog.Logs) error { +func (p *logDedupProcessor) ConsumeLogs(ctx context.Context, pl plog.Logs) error { p.mux.Lock() defer p.mux.Unlock() @@ -89,21 +93,44 @@ func (p *logDedupProcessor) ConsumeLogs(_ context.Context, pl plog.Logs) error { for j := 0; j < rl.ScopeLogs().Len(); j++ { sl := rl.ScopeLogs().At(j) scope := sl.Scope() + logs := sl.LogRecords() + + logs.RemoveIf(func(logRecord plog.LogRecord) bool { + if p.conditions == nil { + p.aggregateLog(logRecord, scope, resource) + return true + } + + logCtx := ottllog.NewTransformContext(logRecord, scope, resource, sl, rl) + logMatch, err := p.conditions.Eval(ctx, logCtx) + if err != nil { + p.logger.Error("error matching conditions", zap.Error(err)) + return false + } + if logMatch { + p.aggregateLog(logRecord, scope, resource) + } + return logMatch + }) + } + } - for k := 0; k < sl.LogRecords().Len(); k++ { - logRecord := sl.LogRecords().At(k) - // Remove excluded fields if any - p.remover.RemoveFields(logRecord) - - // Add the log to the aggregator - p.aggregator.Add(resource, scope, logRecord) - } + // immediately consume any logs that didn't match any conditions + if pl.LogRecordCount() > 0 { + err := p.nextConsumer.ConsumeLogs(ctx, pl) + if err != nil { + p.logger.Error("failed to consume logs", zap.Error(err)) } } return nil } +func (p *logDedupProcessor) aggregateLog(logRecord plog.LogRecord, scope pcommon.InstrumentationScope, resource pcommon.Resource) { + p.remover.RemoveFields(logRecord) + p.aggregator.Add(resource, scope, logRecord) +} + // handleExportInterval sends metrics at the configured interval. func (p *logDedupProcessor) handleExportInterval(ctx context.Context) { defer p.wg.Done() diff --git a/processor/logdedupprocessor/processor_test.go b/processor/logdedupprocessor/processor_test.go index a7727d3cb81e..d5d4c1429f33 100644 --- a/processor/logdedupprocessor/processor_test.go +++ b/processor/logdedupprocessor/processor_test.go @@ -7,6 +7,7 @@ import ( "context" "errors" "fmt" + "path/filepath" "testing" "time" @@ -14,9 +15,11 @@ import ( "go.opentelemetry.io/collector/component/componenttest" "go.opentelemetry.io/collector/consumer" "go.opentelemetry.io/collector/consumer/consumertest" - "go.opentelemetry.io/collector/pdata/pcommon" "go.opentelemetry.io/collector/pdata/plog" "go.opentelemetry.io/collector/processor/processortest" + + "github.com/open-telemetry/opentelemetry-collector-contrib/pkg/golden" + "github.com/open-telemetry/opentelemetry-collector-contrib/pkg/pdatatest/plogtest" ) func Test_newProcessor(t *testing.T) { @@ -31,6 +34,7 @@ func Test_newProcessor(t *testing.T) { cfg: &Config{ LogCountAttribute: defaultLogCountAttribute, Interval: defaultInterval, + Conditions: []string{}, Timezone: "bad timezone", }, expected: nil, @@ -41,6 +45,7 @@ func Test_newProcessor(t *testing.T) { cfg: &Config{ LogCountAttribute: defaultLogCountAttribute, Interval: defaultInterval, + Conditions: []string{}, Timezone: defaultTimezone, }, expected: &logDedupProcessor{ @@ -84,10 +89,11 @@ func TestProcessorShutdownCtxError(t *testing.T) { LogCountAttribute: defaultLogCountAttribute, Interval: 1 * time.Second, Timezone: defaultTimezone, + Conditions: []string{}, } // Create a processor - p, err := newProcessor(cfg, logsSink, settings) + p, err := createLogsProcessor(context.Background(), settings, cfg, logsSink) require.NoError(t, err) // Start then stop the processor checking for errors @@ -109,13 +115,14 @@ func TestShutdownBeforeStart(t *testing.T) { LogCountAttribute: defaultLogCountAttribute, Interval: 1 * time.Second, Timezone: defaultTimezone, + Conditions: []string{}, ExcludeFields: []string{ fmt.Sprintf("%s.remove_me", attributeField), }, } // Create a processor - p, err := newProcessor(cfg, logsSink, settings) + p, err := createLogsProcessor(context.Background(), settings, cfg, logsSink) require.NoError(t, err) require.NotPanics(t, func() { err := p.Shutdown(context.Background()) @@ -130,33 +137,21 @@ func TestProcessorConsume(t *testing.T) { LogCountAttribute: defaultLogCountAttribute, Interval: 1 * time.Second, Timezone: defaultTimezone, + Conditions: []string{}, ExcludeFields: []string{ fmt.Sprintf("%s.remove_me", attributeField), }, } // Create a processor - p, err := newProcessor(cfg, logsSink, settings) + p, err := createLogsProcessor(context.Background(), settings, cfg, logsSink) require.NoError(t, err) err = p.Start(context.Background(), componenttest.NewNopHost()) require.NoError(t, err) - // Create plog payload - logRecord1 := generateTestLogRecord(t, "Body of the log") - logRecord2 := generateTestLogRecord(t, "Body of the log") - - // Differ by timestamp and attribute to be removed - logRecord1.SetTimestamp(pcommon.NewTimestampFromTime(time.Now().Add(time.Minute))) - logRecord2.Attributes().PutBool("remove_me", false) - - logs := plog.NewLogs() - rl := logs.ResourceLogs().AppendEmpty() - rl.Resource().Attributes().PutInt("one", 1) - - sl := rl.ScopeLogs().AppendEmpty() - logRecord1.CopyTo(sl.LogRecords().AppendEmpty()) - logRecord2.CopyTo(sl.LogRecords().AppendEmpty()) + logs, err := golden.ReadLogs(filepath.Join("testdata", "input", "basicLogs.yaml")) + require.NoError(t, err) // Consume the payload err = p.ConsumeLogs(context.Background(), logs) @@ -167,22 +162,13 @@ func TestProcessorConsume(t *testing.T) { return logsSink.LogRecordCount() > 0 }, 3*time.Second, 200*time.Millisecond) + expectedLogs, err := golden.ReadLogs(filepath.Join("testdata", "expected", "basicLogs.yaml")) + require.NoError(t, err) + allSinkLogs := logsSink.AllLogs() require.Len(t, allSinkLogs, 1) - consumedLogs := allSinkLogs[0] - require.Equal(t, 1, consumedLogs.LogRecordCount()) - - require.Equal(t, 1, consumedLogs.ResourceLogs().Len()) - consumedRl := consumedLogs.ResourceLogs().At(0) - require.Equal(t, 1, consumedRl.ScopeLogs().Len()) - consumedSl := consumedRl.ScopeLogs().At(0) - require.Equal(t, 1, consumedSl.LogRecords().Len()) - consumedLogRecord := consumedSl.LogRecords().At(0) - - countVal, ok := consumedLogRecord.Attributes().Get(cfg.LogCountAttribute) - require.True(t, ok) - require.Equal(t, int64(2), countVal.Int()) + require.NoError(t, plogtest.CompareLogs(expectedLogs, allSinkLogs[0], plogtest.IgnoreObservedTimestamp(), plogtest.IgnoreTimestamp(), plogtest.IgnoreLogRecordAttributeValue("first_observed_timestamp"), plogtest.IgnoreLogRecordAttributeValue("last_observed_timestamp"))) // Cleanup err = p.Shutdown(context.Background()) @@ -195,10 +181,11 @@ func Test_unsetLogsAreExportedOnShutdown(t *testing.T) { LogCountAttribute: defaultLogCountAttribute, Interval: 1 * time.Second, Timezone: defaultTimezone, + Conditions: []string{}, } // Create & start a processor - p, err := newProcessor(cfg, logsSink, processortest.NewNopSettings()) + p, err := createLogsProcessor(context.Background(), processortest.NewNopSettings(), cfg, logsSink) require.NoError(t, err) err = p.Start(context.Background(), componenttest.NewNopHost()) require.NoError(t, err) @@ -221,3 +208,104 @@ func Test_unsetLogsAreExportedOnShutdown(t *testing.T) { exportedLogs := logsSink.AllLogs() require.Len(t, exportedLogs, 1) } + +func TestProcessorConsumeCondition(t *testing.T) { + logsSink := &consumertest.LogsSink{} + cfg := &Config{ + LogCountAttribute: defaultLogCountAttribute, + Interval: 1 * time.Second, + Timezone: defaultTimezone, + Conditions: []string{`(attributes["ID"] == 1)`}, + ExcludeFields: []string{ + fmt.Sprintf("%s.remove_me", attributeField), + }, + } + + // Create a processor + p, err := createLogsProcessor(context.Background(), processortest.NewNopSettings(), cfg, logsSink) + require.NoError(t, err) + + err = p.Start(context.Background(), componenttest.NewNopHost()) + require.NoError(t, err) + + logs, err := golden.ReadLogs(filepath.Join("testdata", "input", "conditionLogs.yaml")) + require.NoError(t, err) + + // Consume the payload + err = p.ConsumeLogs(context.Background(), logs) + require.NoError(t, err) + + // Wait for the logs to be emitted + require.Eventually(t, func() bool { + return logsSink.LogRecordCount() > 4 + }, 3*time.Second, 200*time.Millisecond) + + allSinkLogs := logsSink.AllLogs() + require.Len(t, allSinkLogs, 2) + + expectedConsumedLogs, err := golden.ReadLogs(filepath.Join("testdata", "expected", "conditionConsumedLogs.yaml")) + require.NoError(t, err) + expectedDedupedLogs, err := golden.ReadLogs(filepath.Join("testdata", "expected", "conditionDedupedLogs.yaml")) + require.NoError(t, err) + + consumedLogs := allSinkLogs[0] + dedupedLogs := allSinkLogs[1] + + require.NoError(t, plogtest.CompareLogs(expectedConsumedLogs, consumedLogs, plogtest.IgnoreObservedTimestamp(), plogtest.IgnoreTimestamp(), plogtest.IgnoreLogRecordAttributeValue("first_observed_timestamp"), plogtest.IgnoreLogRecordAttributeValue("last_observed_timestamp"))) + require.NoError(t, plogtest.CompareLogs(expectedDedupedLogs, dedupedLogs, plogtest.IgnoreObservedTimestamp(), plogtest.IgnoreTimestamp(), plogtest.IgnoreLogRecordAttributeValue("first_observed_timestamp"), plogtest.IgnoreLogRecordAttributeValue("last_observed_timestamp"))) + + // Cleanup + err = p.Shutdown(context.Background()) + require.NoError(t, err) +} + +func TestProcessorConsumeMultipleConditions(t *testing.T) { + logsSink := &consumertest.LogsSink{} + cfg := &Config{ + LogCountAttribute: defaultLogCountAttribute, + Interval: 1 * time.Second, + Timezone: defaultTimezone, + Conditions: []string{`attributes["ID"] == 1`, `attributes["ID"] == 3`}, + ExcludeFields: []string{ + fmt.Sprintf("%s.remove_me", attributeField), + }, + } + + // Create a processor + p, err := createLogsProcessor(context.Background(), processortest.NewNopSettings(), cfg, logsSink) + require.NoError(t, err) + + err = p.Start(context.Background(), componenttest.NewNopHost()) + require.NoError(t, err) + + logs, err := golden.ReadLogs(filepath.Join("testdata", "input", "conditionLogs.yaml")) + require.NoError(t, err) + + // Consume the payload + err = p.ConsumeLogs(context.Background(), logs) + require.NoError(t, err) + + // Wait for the logs to be emitted + require.Eventually(t, func() bool { + return logsSink.LogRecordCount() > 3 + }, 3*time.Second, 200*time.Millisecond) + + allSinkLogs := logsSink.AllLogs() + require.Len(t, allSinkLogs, 2) + + consumedLogs := allSinkLogs[0] + dedupedLogs := allSinkLogs[1] + + expectedConsumedLogs, err := golden.ReadLogs(filepath.Join("testdata", "expected", "multipleConditionsConsumedLogs.yaml")) + require.NoError(t, err) + expectedDedupedLogs, err := golden.ReadLogs(filepath.Join("testdata", "expected", "multipleConditionsDedupedLogs.yaml")) + require.NoError(t, err) + + err = plogtest.CompareLogs(expectedConsumedLogs, consumedLogs, plogtest.IgnoreObservedTimestamp(), plogtest.IgnoreTimestamp(), plogtest.IgnoreLogRecordAttributeValue("first_observed_timestamp"), plogtest.IgnoreLogRecordAttributeValue("last_observed_timestamp")) + require.NoError(t, err) + require.NoError(t, plogtest.CompareLogs(expectedDedupedLogs, dedupedLogs, plogtest.IgnoreObservedTimestamp(), plogtest.IgnoreTimestamp(), plogtest.IgnoreLogRecordAttributeValue("first_observed_timestamp"), plogtest.IgnoreLogRecordAttributeValue("last_observed_timestamp"))) + + // Cleanup + err = p.Shutdown(context.Background()) + require.NoError(t, err) +} diff --git a/processor/logdedupprocessor/testdata/conditionConsumedLogs.yaml b/processor/logdedupprocessor/testdata/conditionConsumedLogs.yaml new file mode 100644 index 000000000000..93d3910a70f7 --- /dev/null +++ b/processor/logdedupprocessor/testdata/conditionConsumedLogs.yaml @@ -0,0 +1,69 @@ +resourceLogs: + - resource: {} + scopeLogs: + - logRecords: + - attributes: + - key: bool + value: + boolValue: true + - key: str + value: + stringValue: attr str + - key: ID + value: + intValue: "2" + body: + stringValue: Body of the log2 + severityText: info + spanId: "" + timeUnixNano: "1728069505995028000" + traceId: "" + - attributes: + - key: bool + value: + boolValue: true + - key: str + value: + stringValue: attr str + - key: ID + value: + intValue: "2" + body: + stringValue: Body of the log2 + severityText: info + spanId: "" + timeUnixNano: "1728069565995028000" + traceId: "" + - attributes: + - key: bool + value: + boolValue: true + - key: str + value: + stringValue: attr str + - key: ID + value: + intValue: "3" + body: + stringValue: Body of the log3 + severityText: info + spanId: "" + timeUnixNano: "1728069625995028000" + traceId: "" + - attributes: + - key: bool + value: + boolValue: true + - key: str + value: + stringValue: attr str + - key: ID + value: + intValue: "3" + body: + stringValue: Body of the log3 + severityText: info + spanId: "" + timeUnixNano: "1728069685995028000" + traceId: "" + scope: {} diff --git a/processor/logdedupprocessor/testdata/expected/basicLogs.yaml b/processor/logdedupprocessor/testdata/expected/basicLogs.yaml new file mode 100644 index 000000000000..337aebd8429a --- /dev/null +++ b/processor/logdedupprocessor/testdata/expected/basicLogs.yaml @@ -0,0 +1,32 @@ +resourceLogs: + - resource: + attributes: + - key: one + value: + intValue: "1" + scopeLogs: + - logRecords: + - attributes: + - key: bool + value: + boolValue: true + - key: str + value: + stringValue: attr str + - key: log_count + value: + intValue: "2" + - key: first_observed_timestamp + value: + stringValue: "2024-10-04T19:21:47Z" + - key: last_observed_timestamp + value: + stringValue: "2024-10-04T19:21:47Z" + body: + stringValue: Body of the log + observedTimeUnixNano: "1728069707998122000" + severityText: info + spanId: "" + timeUnixNano: "1728069708998920000" + traceId: "" + scope: {} diff --git a/processor/logdedupprocessor/testdata/expected/conditionConsumedLogs.yaml b/processor/logdedupprocessor/testdata/expected/conditionConsumedLogs.yaml new file mode 100644 index 000000000000..93d3910a70f7 --- /dev/null +++ b/processor/logdedupprocessor/testdata/expected/conditionConsumedLogs.yaml @@ -0,0 +1,69 @@ +resourceLogs: + - resource: {} + scopeLogs: + - logRecords: + - attributes: + - key: bool + value: + boolValue: true + - key: str + value: + stringValue: attr str + - key: ID + value: + intValue: "2" + body: + stringValue: Body of the log2 + severityText: info + spanId: "" + timeUnixNano: "1728069505995028000" + traceId: "" + - attributes: + - key: bool + value: + boolValue: true + - key: str + value: + stringValue: attr str + - key: ID + value: + intValue: "2" + body: + stringValue: Body of the log2 + severityText: info + spanId: "" + timeUnixNano: "1728069565995028000" + traceId: "" + - attributes: + - key: bool + value: + boolValue: true + - key: str + value: + stringValue: attr str + - key: ID + value: + intValue: "3" + body: + stringValue: Body of the log3 + severityText: info + spanId: "" + timeUnixNano: "1728069625995028000" + traceId: "" + - attributes: + - key: bool + value: + boolValue: true + - key: str + value: + stringValue: attr str + - key: ID + value: + intValue: "3" + body: + stringValue: Body of the log3 + severityText: info + spanId: "" + timeUnixNano: "1728069685995028000" + traceId: "" + scope: {} diff --git a/processor/logdedupprocessor/testdata/expected/conditionDedupedLogs.yaml b/processor/logdedupprocessor/testdata/expected/conditionDedupedLogs.yaml new file mode 100644 index 000000000000..ada5796a37aa --- /dev/null +++ b/processor/logdedupprocessor/testdata/expected/conditionDedupedLogs.yaml @@ -0,0 +1,31 @@ +resourceLogs: + - resource: {} + scopeLogs: + - logRecords: + - attributes: + - key: bool + value: + boolValue: true + - key: str + value: + stringValue: attr str + - key: ID + value: + intValue: "1" + - key: log_count + value: + intValue: "2" + - key: first_observed_timestamp + value: + stringValue: "2024-10-04T19:40:31Z" + - key: last_observed_timestamp + value: + stringValue: "2024-10-04T19:40:31Z" + body: + stringValue: Body of the log1 + observedTimeUnixNano: "1728070831326144000" + severityText: info + spanId: "" + timeUnixNano: "1728070832326078000" + traceId: "" + scope: {} diff --git a/processor/logdedupprocessor/testdata/expected/multipleConditionsConsumedLogs.yaml b/processor/logdedupprocessor/testdata/expected/multipleConditionsConsumedLogs.yaml new file mode 100644 index 000000000000..2001fc0908e2 --- /dev/null +++ b/processor/logdedupprocessor/testdata/expected/multipleConditionsConsumedLogs.yaml @@ -0,0 +1,37 @@ +resourceLogs: + - resource: {} + scopeLogs: + - logRecords: + - attributes: + - key: bool + value: + boolValue: true + - key: str + value: + stringValue: attr str + - key: ID + value: + intValue: "2" + body: + stringValue: Body of the log2 + severityText: info + spanId: "" + timeUnixNano: "1728069505995028000" + traceId: "" + - attributes: + - key: bool + value: + boolValue: true + - key: str + value: + stringValue: attr str + - key: ID + value: + intValue: "2" + body: + stringValue: Body of the log2 + severityText: info + spanId: "" + timeUnixNano: "1728069565995028000" + traceId: "" + scope: {} diff --git a/processor/logdedupprocessor/testdata/expected/multipleConditionsDedupedLogs.yaml b/processor/logdedupprocessor/testdata/expected/multipleConditionsDedupedLogs.yaml new file mode 100644 index 000000000000..04c55be19df3 --- /dev/null +++ b/processor/logdedupprocessor/testdata/expected/multipleConditionsDedupedLogs.yaml @@ -0,0 +1,57 @@ +resourceLogs: + - resource: {} + scopeLogs: + - logRecords: + - attributes: + - key: bool + value: + boolValue: true + - key: str + value: + stringValue: attr str + - key: ID + value: + intValue: "1" + - key: log_count + value: + intValue: "2" + - key: first_observed_timestamp + value: + stringValue: "2024-10-04T19:46:39Z" + - key: last_observed_timestamp + value: + stringValue: "2024-10-04T19:46:39Z" + body: + stringValue: Body of the log1 + observedTimeUnixNano: "1728071199778796000" + severityText: info + spanId: "" + timeUnixNano: "1728071200779450000" + traceId: "" + - attributes: + - key: bool + value: + boolValue: true + - key: str + value: + stringValue: attr str + - key: ID + value: + intValue: "3" + - key: log_count + value: + intValue: "2" + - key: first_observed_timestamp + value: + stringValue: "2024-10-04T19:46:39Z" + - key: last_observed_timestamp + value: + stringValue: "2024-10-04T19:46:39Z" + body: + stringValue: Body of the log3 + observedTimeUnixNano: "1728071199778800000" + severityText: info + spanId: "" + timeUnixNano: "1728071200779466000" + traceId: "" + scope: {} diff --git a/processor/logdedupprocessor/testdata/input/basicLogs.yaml b/processor/logdedupprocessor/testdata/input/basicLogs.yaml new file mode 100644 index 000000000000..601a729be289 --- /dev/null +++ b/processor/logdedupprocessor/testdata/input/basicLogs.yaml @@ -0,0 +1,38 @@ +resourceLogs: + - resource: + attributes: + - key: one + value: + intValue: "1" + scopeLogs: + - logRecords: + - attributes: + - key: bool + value: + boolValue: true + - key: str + value: + stringValue: attr str + body: + stringValue: Body of the log + severityText: info + spanId: "" + timeUnixNano: "1728069266547395000" + traceId: "" + - attributes: + - key: bool + value: + boolValue: true + - key: str + value: + stringValue: attr str + - key: remove_me + value: + boolValue: false + body: + stringValue: Body of the log + severityText: info + spanId: "" + timeUnixNano: "1728069206547395000" + traceId: "" + scope: {} diff --git a/processor/logdedupprocessor/testdata/input/conditionLogs.yaml b/processor/logdedupprocessor/testdata/input/conditionLogs.yaml new file mode 100644 index 000000000000..b353aa339351 --- /dev/null +++ b/processor/logdedupprocessor/testdata/input/conditionLogs.yaml @@ -0,0 +1,101 @@ +resourceLogs: + - resource: {} + scopeLogs: + - logRecords: + - attributes: + - key: bool + value: + boolValue: true + - key: str + value: + stringValue: attr str + - key: ID + value: + intValue: "1" + body: + stringValue: Body of the log1 + severityText: info + spanId: "" + timeUnixNano: "1728069385995028000" + traceId: "" + - attributes: + - key: bool + value: + boolValue: true + - key: str + value: + stringValue: attr str + - key: ID + value: + intValue: "2" + body: + stringValue: Body of the log2 + severityText: info + spanId: "" + timeUnixNano: "1728069505995028000" + traceId: "" + - attributes: + - key: bool + value: + boolValue: true + - key: str + value: + stringValue: attr str + - key: ID + value: + intValue: "1" + body: + stringValue: Body of the log1 + severityText: info + spanId: "" + timeUnixNano: "1728069445995028000" + traceId: "" + - attributes: + - key: bool + value: + boolValue: true + - key: str + value: + stringValue: attr str + - key: ID + value: + intValue: "2" + body: + stringValue: Body of the log2 + severityText: info + spanId: "" + timeUnixNano: "1728069565995028000" + traceId: "" + - attributes: + - key: bool + value: + boolValue: true + - key: str + value: + stringValue: attr str + - key: ID + value: + intValue: "3" + body: + stringValue: Body of the log3 + severityText: info + spanId: "" + timeUnixNano: "1728069625995028000" + traceId: "" + - attributes: + - key: bool + value: + boolValue: true + - key: str + value: + stringValue: attr str + - key: ID + value: + intValue: "3" + body: + stringValue: Body of the log3 + severityText: info + spanId: "" + timeUnixNano: "1728069685995028000" + traceId: "" + scope: {}