Skip to content

Commit

Permalink
Merge branch 'main' into feat/s3-registry-cleanup
Browse files Browse the repository at this point in the history
  • Loading branch information
Kavindu-Dodan authored Dec 3, 2024
2 parents 0f0eee1 + a1acad5 commit b17324a
Show file tree
Hide file tree
Showing 82 changed files with 1,572 additions and 567 deletions.
7 changes: 6 additions & 1 deletion CHANGELOG.next.asciidoc
Original file line number Diff line number Diff line change
Expand Up @@ -50,8 +50,8 @@ https://github.com/elastic/beats/compare/v8.8.1\...main[Check the HEAD diff]
- Remove deprecated awscloudwatch field from Filebeat. {pull}41089[41089]
- The performance of ingesting SQS data with the S3 input has improved by up to 60x for queues with many small events. `max_number_of_messages` config for SQS mode is now ignored, as the new design no longer needs a manual cap on messages. Instead, use `number_of_workers` to scale ingestion rate in both S3 and SQS modes. The increased efficiency may increase network bandwidth consumption, which can be throttled by lowering `number_of_workers`. It may also increase number of events stored in memory, which can be throttled by lowering the configured size of the internal queue. {pull}40699[40699]
- Fixes filestream logging the error "filestream input with ID 'ID' already exists, this will lead to data duplication[...]" on Kubernetes when using autodiscover. {pull}41585[41585]

- Add kafka compression support for ZSTD.
- Filebeat fails to start if there is any input with a duplicated ID. It logs the duplicated IDs and the offending inputs configurations. {pull}41731[41731]

*Heartbeat*

Expand Down Expand Up @@ -184,6 +184,7 @@ https://github.com/elastic/beats/compare/v8.8.1\...main[Check the HEAD diff]
- Fix the "No such input type exist: 'salesforce'" error on the Windows/AIX platform. {pull}41664[41664]
- Fix missing key in streaming input logging. {pull}41600[41600]
- Improve S3 object size metric calculation to support situations where Content-Length is not available. {pull}41755[41755]
- Fix handling of http_endpoint request exceeding memory limits. {issue}41764[41764] {pull}41765[41765]

*Heartbeat*

Expand Down Expand Up @@ -216,6 +217,7 @@ https://github.com/elastic/beats/compare/v8.8.1\...main[Check the HEAD diff]
- Fix Kubernetes metadata sometimes not being present after startup {pull}41216[41216]
- Do not report non-existant 0 values for RSS metrics in docker/memory {pull}41449[41449]
- Log Cisco Meraki `getDevicePerformanceScores` errors without stopping metrics collection. {pull}41622[41622]
- Don't skip first bucket value in GCP metrics metricset for distribution type metrics {pull}41822[41822]


*Osquerybeat*
Expand Down Expand Up @@ -345,7 +347,9 @@ https://github.com/elastic/beats/compare/v8.8.1\...main[Check the HEAD diff]
- Add support for Journald in the System module. {pull}41555[41555]
- Add ability to remove request trace logs from http_endpoint input. {pull}40005[40005]
- Add ability to remove request trace logs from entityanalytics input. {pull}40004[40004]
- Refactor & cleanup with updates to default values and documentation. {pull}41834[41834]
- Update CEL mito extensions to v1.16.0. {pull}41727[41727]
- Add evaluation state dump debugging option to CEL input. {pull}41335[41335]
- AWS S3 input registry cleanup for untracked s3 objects. {pull}41694[41694]

*Auditbeat*
Expand Down Expand Up @@ -397,6 +401,7 @@ https://github.com/elastic/beats/compare/v8.8.1\...main[Check the HEAD diff]
- Bump aerospike-client-go to version v7.7.1 and add support for basic auth in Aerospike module {pull}41233[41233]
- Only watch metadata for ReplicaSets in metricbeat k8s module {pull}41289[41289]
- Add support for region/zone for Vertex AI service in GCP module {pull}41551[41551]
- Add support for location label as an optional configuration parameter in GCP metrics metricset. {issue}41550[41550] {pull}41626[41626]

*Metricbeat*

Expand Down
361 changes: 180 additions & 181 deletions NOTICE.txt

Large diffs are not rendered by default.

54 changes: 27 additions & 27 deletions catalog-info.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ metadata:
spec:
type: buildkite-pipeline
owner: group:ingest-fp
system: buildkite
system: platform-ingest
implementation:
apiVersion: buildkite.elastic.dev/v1
kind: Pipeline
Expand Down Expand Up @@ -81,7 +81,7 @@ metadata:
spec:
type: buildkite-pipeline
owner: group:ingest-fp
system: buildkite
system: platform-ingest
implementation:
apiVersion: buildkite.elastic.dev/v1
kind: Pipeline
Expand Down Expand Up @@ -128,7 +128,7 @@ metadata:
spec:
type: buildkite-pipeline
owner: group:ingest-fp
system: buildkite
system: platform-ingest
implementation:
apiVersion: buildkite.elastic.dev/v1
kind: Pipeline
Expand Down Expand Up @@ -175,7 +175,7 @@ metadata:
spec:
type: buildkite-pipeline
owner: group:ingest-fp
system: buildkite
system: platform-ingest
implementation:
apiVersion: buildkite.elastic.dev/v1
kind: Pipeline
Expand Down Expand Up @@ -222,7 +222,7 @@ metadata:
spec:
type: buildkite-pipeline
owner: group:ingest-fp
system: buildkite
system: platform-ingest
implementation:
apiVersion: buildkite.elastic.dev/v1
kind: Pipeline
Expand Down Expand Up @@ -269,7 +269,7 @@ metadata:
spec:
type: buildkite-pipeline
owner: group:ingest-fp
system: buildkite
system: platform-ingest
implementation:
apiVersion: buildkite.elastic.dev/v1
kind: Pipeline
Expand Down Expand Up @@ -316,7 +316,7 @@ metadata:
spec:
type: buildkite-pipeline
owner: group:ingest-fp
system: buildkite
system: platform-ingest
implementation:
apiVersion: buildkite.elastic.dev/v1
kind: Pipeline
Expand Down Expand Up @@ -363,7 +363,7 @@ metadata:
spec:
type: buildkite-pipeline
owner: group:ingest-fp
system: buildkite
system: platform-ingest
implementation:
apiVersion: buildkite.elastic.dev/v1
kind: Pipeline
Expand Down Expand Up @@ -410,7 +410,7 @@ metadata:
spec:
type: buildkite-pipeline
owner: group:ingest-fp
system: buildkite
system: platform-ingest
implementation:
apiVersion: buildkite.elastic.dev/v1
kind: Pipeline
Expand Down Expand Up @@ -456,7 +456,7 @@ metadata:
spec:
type: buildkite-pipeline
owner: group:ingest-fp
system: buildkite
system: platform-ingest
implementation:
apiVersion: buildkite.elastic.dev/v1
kind: Pipeline
Expand Down Expand Up @@ -503,7 +503,7 @@ metadata:
spec:
type: buildkite-pipeline
owner: group:ingest-fp
system: buildkite
system: platform-ingest
implementation:
apiVersion: buildkite.elastic.dev/v1
kind: Pipeline
Expand Down Expand Up @@ -550,7 +550,7 @@ metadata:
spec:
type: buildkite-pipeline
owner: group:ingest-fp
system: buildkite
system: platform-ingest
implementation:
apiVersion: buildkite.elastic.dev/v1
kind: Pipeline
Expand Down Expand Up @@ -597,7 +597,7 @@ metadata:
spec:
type: buildkite-pipeline
owner: group:ingest-fp
system: buildkite
system: platform-ingest
implementation:
apiVersion: buildkite.elastic.dev/v1
kind: Pipeline
Expand Down Expand Up @@ -644,7 +644,7 @@ metadata:
spec:
type: buildkite-pipeline
owner: group:ingest-fp
system: buildkite
system: platform-ingest
implementation:
apiVersion: buildkite.elastic.dev/v1
kind: Pipeline
Expand Down Expand Up @@ -690,7 +690,7 @@ metadata:
spec:
type: buildkite-pipeline
owner: group:ingest-fp
system: buildkite
system: platform-ingest
implementation:
apiVersion: buildkite.elastic.dev/v1
kind: Pipeline
Expand Down Expand Up @@ -725,7 +725,7 @@ metadata:
spec:
type: buildkite-pipeline
owner: group:ingest-fp
system: buildkite
system: platform-ingest
implementation:
apiVersion: buildkite.elastic.dev/v1
kind: Pipeline
Expand Down Expand Up @@ -761,7 +761,7 @@ metadata:
spec:
type: buildkite-pipeline
owner: group:ingest-fp
system: buildkite
system: platform-ingest
implementation:
apiVersion: buildkite.elastic.dev/v1
kind: Pipeline
Expand Down Expand Up @@ -808,7 +808,7 @@ metadata:
spec:
type: buildkite-pipeline
owner: group:ingest-fp
system: buildkite
system: platform-ingest
implementation:
apiVersion: buildkite.elastic.dev/v1
kind: Pipeline
Expand Down Expand Up @@ -855,7 +855,7 @@ metadata:
spec:
type: buildkite-pipeline
owner: group:ingest-fp
system: buildkite
system: platform-ingest
implementation:
apiVersion: buildkite.elastic.dev/v1
kind: Pipeline
Expand Down Expand Up @@ -902,7 +902,7 @@ metadata:
spec:
type: buildkite-pipeline
owner: group:ingest-fp
system: buildkite
system: platform-ingest
implementation:
apiVersion: buildkite.elastic.dev/v1
kind: Pipeline
Expand Down Expand Up @@ -949,7 +949,7 @@ metadata:
spec:
type: buildkite-pipeline
owner: group:ingest-fp
system: buildkite
system: platform-ingest
implementation:
apiVersion: buildkite.elastic.dev/v1
kind: Pipeline
Expand Down Expand Up @@ -996,7 +996,7 @@ metadata:
spec:
type: buildkite-pipeline
owner: group:ingest-fp
system: buildkite
system: platform-ingest
implementation:
apiVersion: buildkite.elastic.dev/v1
kind: Pipeline
Expand Down Expand Up @@ -1043,7 +1043,7 @@ metadata:
spec:
type: buildkite-pipeline
owner: group:ingest-fp
system: buildkite
system: platform-ingest
implementation:
apiVersion: buildkite.elastic.dev/v1
kind: Pipeline
Expand Down Expand Up @@ -1092,7 +1092,7 @@ metadata:
spec:
type: buildkite-pipeline
owner: group:ingest-fp
system: buildkite
system: platform-ingest
implementation:
apiVersion: buildkite.elastic.dev/v1
kind: Pipeline
Expand Down Expand Up @@ -1128,7 +1128,7 @@ metadata:
spec:
type: buildkite-pipeline
owner: group:ingest-fp
system: buildkite
system: platform-ingest
implementation:
apiVersion: buildkite.elastic.dev/v1
kind: Pipeline
Expand Down Expand Up @@ -1162,7 +1162,7 @@ metadata:
spec:
type: buildkite-pipeline
owner: group:ingest-fp
system: buildkite
system: platform-ingest
implementation:
apiVersion: buildkite.elastic.dev/v1
kind: Pipeline
Expand Down Expand Up @@ -1205,7 +1205,7 @@ metadata:
spec:
type: buildkite-pipeline
owner: group:ingest-fp
system: buildkite
system: platform-ingest
implementation:
apiVersion: buildkite.elastic.dev/v1
kind: Pipeline
Expand Down
2 changes: 2 additions & 0 deletions docs/devguide/testing.asciidoc
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,8 @@ Integration tests are labelled with the `//go:build integration` build tag and u

To run the integration tests use the `mage goIntegTest` target, which will start the required services using https://docs.docker.com/compose/[docker-compose] and run all integration tests.

It is also possible to run module specific integration tests. For example, to run kafka only tests use `MODULE=kafka mage integTest -v`

It is possible to start the `docker-compose` services manually to allow selecting which specific tests should be run. An example follows for filebeat:

[source,bash]
Expand Down
6 changes: 6 additions & 0 deletions filebeat/beater/filebeat.go
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,7 @@ import (
"github.com/elastic/beats/v7/filebeat/fileset"
_ "github.com/elastic/beats/v7/filebeat/include"
"github.com/elastic/beats/v7/filebeat/input"
"github.com/elastic/beats/v7/filebeat/input/filestream"
"github.com/elastic/beats/v7/filebeat/input/filestream/takeover"
v2 "github.com/elastic/beats/v7/filebeat/input/v2"
"github.com/elastic/beats/v7/filebeat/input/v2/compat"
Expand Down Expand Up @@ -291,6 +292,11 @@ func (fb *Filebeat) Run(b *beat.Beat) error {
}
defer stateStore.Close()

err = filestream.ValidateInputIDs(config.Inputs, logp.NewLogger("input.filestream"))
if err != nil {
logp.Err("invalid filestream configuration: %+v", err)
return err
}
err = processLogInputTakeOver(stateStore, config)
if err != nil {
logp.Err("Failed to attempt filestream state take over: %+v", err)
Expand Down
59 changes: 59 additions & 0 deletions filebeat/input/filestream/config.go
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@ package filestream

import (
"fmt"
"strings"
"time"

"github.com/dustin/go-humanize"
Expand All @@ -27,6 +28,7 @@ import (
"github.com/elastic/beats/v7/libbeat/reader/parser"
"github.com/elastic/beats/v7/libbeat/reader/readfile"
conf "github.com/elastic/elastic-agent-libs/config"
"github.com/elastic/elastic-agent-libs/logp"
)

// Config stores the options of a file stream.
Expand Down Expand Up @@ -142,3 +144,60 @@ func (c *config) Validate() error {

return nil
}

// ValidateInputIDs checks all filestream inputs to ensure all input IDs are
// unique. If there is a duplicated ID, it logs an error containing the offending
// input configurations and returns an error containing the duplicated IDs.
// A single empty ID is a valid ID as it's unique, however multiple empty IDs
// are not unique and are therefore are treated as any other duplicated ID.
func ValidateInputIDs(inputs []*conf.C, logger *logp.Logger) error {
duplicatedConfigs := make(map[string][]*conf.C)
var duplicates []string
for _, input := range inputs {
fsInput := struct {
ID string `config:"id"`
Type string `config:"type"`
}{}
err := input.Unpack(&fsInput)
if err != nil {
return fmt.Errorf("failed to unpack filestream input configuration: %w", err)
}
if fsInput.Type == "filestream" {
duplicatedConfigs[fsInput.ID] = append(duplicatedConfigs[fsInput.ID], input)
// we just need to collect the duplicated IDs once, therefore collect
// it only the first time we see a duplicated ID.
if len(duplicatedConfigs[fsInput.ID]) == 2 {
duplicates = append(duplicates, fsInput.ID)
}
}
}

if len(duplicates) != 0 {
jsonDupCfg := collectOffendingInputs(duplicates, duplicatedConfigs)
logger.Errorw("filestream inputs with duplicated IDs", "inputs", jsonDupCfg)
var quotedDuplicates []string
for _, dup := range duplicates {
quotedDuplicates = append(quotedDuplicates, fmt.Sprintf("%q", dup))
}
return fmt.Errorf("filestream inputs validation error: filestream inputs with duplicated IDs: %v", strings.Join(quotedDuplicates, ","))
}

return nil
}

func collectOffendingInputs(duplicates []string, ids map[string][]*conf.C) []map[string]interface{} {
var cfgs []map[string]interface{}

for _, id := range duplicates {
for _, dupcfgs := range ids[id] {
toJson := map[string]interface{}{}
err := dupcfgs.Unpack(&toJson)
if err != nil {
toJson[id] = fmt.Sprintf("failed to unpack config: %v", err)
}
cfgs = append(cfgs, toJson)
}
}

return cfgs
}
Loading

0 comments on commit b17324a

Please sign in to comment.