diff --git a/CHANGELOG.md b/CHANGELOG.md index ff0c277f15..ff3e74db5c 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -7,6 +7,29 @@ This document contains a historical list of changes between releases. Only changes that impact end-user behavior are listed; changes to documentation or internal API changes are not present. +v1.4.2 +----------------- + +### Bugfixes + +- Update windows_exporter from v0.27.2 vo v0.27.3: (@jkroepke) + - Fixes a bug where scraping Windows service crashes alloy + +- Update yet-another-cloudwatch-exporter from v0.60.0 vo v0.61.0: (@morremeyer) + - Fixes a bug where cloudwatch S3 metrics are reported as `0` + +- Issue 1687 - otelcol.exporter.awss3 fails to configure (@cydergoth) + - Fix parsing of the Level configuration attribute in debug_metrics config block + - Ensure "optional" debug_metrics config block really is optional + +- Fixed an issue with `loki.process` where `stage.luhn` and `stage.timestamp` would not apply + default configuration settings correctly (@thampiotr) + +- Fixed an issue with `loki.process` where configuration could be reloaded even if there + were no changes. (@ptodev, @thampiotr) + +- Fix issue where `loki.source.kubernetes` took into account all labels, instead of specific logs labels. Resulting in duplication. (@mattdurham) + v1.4.1 ----------------- diff --git a/VERSION b/VERSION index 92d0fee589..eb44c421bf 100644 --- a/VERSION +++ b/VERSION @@ -20,4 +20,4 @@ # # Lines starting with "#" and blank lines are ignored. -v1.4.1 +v1.4.2 diff --git a/docs/sources/_index.md b/docs/sources/_index.md index f7ce81a068..68d72358ba 100644 --- a/docs/sources/_index.md +++ b/docs/sources/_index.md @@ -4,7 +4,7 @@ title: Grafana Alloy description: Grafana Alloy is a a vendor-neutral distribution of the OTel Collector weight: 350 cascade: - ALLOY_RELEASE: v1.4.1 + ALLOY_RELEASE: v1.4.2 OTEL_VERSION: v0.105.0 FULL_PRODUCT_NAME: Grafana Alloy PRODUCT_NAME: Alloy diff --git a/docs/sources/reference/components/loki/loki.source.kubernetes.md b/docs/sources/reference/components/loki/loki.source.kubernetes.md index f9417be63e..6205ec5dbb 100644 --- a/docs/sources/reference/components/loki/loki.source.kubernetes.md +++ b/docs/sources/reference/components/loki/loki.source.kubernetes.md @@ -145,6 +145,20 @@ If {{< param "PRODUCT_NAME" >}} is _not_ running in clustered mode, then the blo `loki.source.kubernetes` collects logs from every target it receives in its arguments. +Clustering only looks at the following labels for determining the shard key: +* `__pod_namespace__` +* `__pod_name__` +* `__pod_container_name__` +* `__pod_uid__` +* `__meta_kubernetes_namespace` +* `__meta_kubernetes_pod_name` +* `__meta_kubernetes_pod_container_name` +* `__meta_kubernetes_pod_uid` +* `container` +* `pod` +* `job` +* `namespace` + [using clustering]: ../../../../get-started/clustering/ ## Exported fields diff --git a/go.mod b/go.mod index fe5a3f8ab1..d55448cec2 100644 --- a/go.mod +++ b/go.mod @@ -100,7 +100,7 @@ require ( github.com/mwitkow/go-conntrack v0.0.0-20190716064945-2f068394615f github.com/natefinch/atomic v1.0.1 github.com/ncabatoff/process-exporter v0.7.10 - github.com/nerdswords/yet-another-cloudwatch-exporter v0.60.0 + github.com/nerdswords/yet-another-cloudwatch-exporter v0.61.0 github.com/oklog/run v1.1.0 github.com/olekukonko/tablewriter v0.0.5 github.com/oliver006/redis_exporter v1.54.0 @@ -149,7 +149,7 @@ require ( github.com/prometheus-community/elasticsearch_exporter v1.5.0 github.com/prometheus-community/postgres_exporter v0.11.1 github.com/prometheus-community/stackdriver_exporter v0.15.1 - github.com/prometheus-community/windows_exporter v0.27.2 + github.com/prometheus-community/windows_exporter v0.27.3 github.com/prometheus-operator/prometheus-operator v0.66.0 github.com/prometheus-operator/prometheus-operator/pkg/apis/monitoring v0.66.0 github.com/prometheus-operator/prometheus-operator/pkg/client v0.66.0 @@ -298,7 +298,7 @@ require ( github.com/Azure/go-autorest/logger v0.2.1 // indirect github.com/Azure/go-autorest/tracing v0.6.0 // indirect github.com/AzureAD/microsoft-authentication-library-for-go v1.2.2 // indirect - github.com/BurntSushi/toml v1.3.2 // indirect + github.com/BurntSushi/toml v1.3.2 github.com/ClickHouse/clickhouse-go v1.5.4 // indirect github.com/Code-Hex/go-generics-cache v1.5.1 // indirect github.com/DataDog/agent-payload/v5 v5.0.131 // indirect @@ -398,23 +398,23 @@ require ( github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.6.16 // indirect github.com/aws/aws-sdk-go-v2/internal/ini v1.8.1 // indirect github.com/aws/aws-sdk-go-v2/internal/v4a v1.3.0 // indirect - github.com/aws/aws-sdk-go-v2/service/amp v1.25.5 // indirect - github.com/aws/aws-sdk-go-v2/service/apigateway v1.23.7 // indirect - github.com/aws/aws-sdk-go-v2/service/apigatewayv2 v1.20.5 // indirect - github.com/aws/aws-sdk-go-v2/service/autoscaling v1.40.6 // indirect - github.com/aws/aws-sdk-go-v2/service/cloudwatch v1.38.1 // indirect - github.com/aws/aws-sdk-go-v2/service/databasemigrationservice v1.38.5 // indirect - github.com/aws/aws-sdk-go-v2/service/ec2 v1.161.1 // indirect + github.com/aws/aws-sdk-go-v2/service/amp v1.26.1 // indirect + github.com/aws/aws-sdk-go-v2/service/apigateway v1.24.1 // indirect + github.com/aws/aws-sdk-go-v2/service/apigatewayv2 v1.21.1 // indirect + github.com/aws/aws-sdk-go-v2/service/autoscaling v1.41.1 // indirect + github.com/aws/aws-sdk-go-v2/service/cloudwatch v1.39.1 // indirect + github.com/aws/aws-sdk-go-v2/service/databasemigrationservice v1.39.1 // indirect + github.com/aws/aws-sdk-go-v2/service/ec2 v1.165.1 // indirect github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.11.4 // indirect github.com/aws/aws-sdk-go-v2/service/internal/checksum v1.3.0 // indirect github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.11.18 // indirect github.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.17.0 // indirect - github.com/aws/aws-sdk-go-v2/service/resourcegroupstaggingapi v1.21.5 // indirect + github.com/aws/aws-sdk-go-v2/service/resourcegroupstaggingapi v1.22.1 // indirect github.com/aws/aws-sdk-go-v2/service/secretsmanager v1.27.0 // indirect - github.com/aws/aws-sdk-go-v2/service/shield v1.25.5 // indirect + github.com/aws/aws-sdk-go-v2/service/shield v1.26.1 // indirect github.com/aws/aws-sdk-go-v2/service/sso v1.22.5 // indirect github.com/aws/aws-sdk-go-v2/service/ssooidc v1.26.5 // indirect - github.com/aws/aws-sdk-go-v2/service/storagegateway v1.27.5 // indirect + github.com/aws/aws-sdk-go-v2/service/storagegateway v1.30.1 // indirect github.com/aws/aws-sdk-go-v2/service/sts v1.30.4 // indirect github.com/aws/smithy-go v1.20.4 // indirect github.com/axiomhq/hyperloglog v0.0.0-20240124082744-24bca3a5b39b // indirect @@ -813,6 +813,7 @@ require ( ) require ( + github.com/aws/aws-sdk-go-v2/service/iam v1.33.1 // indirect github.com/checkpoint-restore/go-criu/v6 v6.3.0 // indirect github.com/containerd/platforms v0.2.1 // indirect ) diff --git a/go.sum b/go.sum index 92e8705aec..2b41690948 100644 --- a/go.sum +++ b/go.sum @@ -503,21 +503,23 @@ github.com/aws/aws-sdk-go-v2/internal/ini v1.8.1 h1:VaRN3TlFdd6KxX1x3ILT5ynH6HvK github.com/aws/aws-sdk-go-v2/internal/ini v1.8.1/go.mod h1:FbtygfRFze9usAadmnGJNc8KsP346kEe+y2/oyhGAGc= github.com/aws/aws-sdk-go-v2/internal/v4a v1.3.0 h1:TkbRExyKSVHELwG9gz2+gql37jjec2R5vus9faTomwE= github.com/aws/aws-sdk-go-v2/internal/v4a v1.3.0/go.mod h1:T3/9xMKudHhnj8it5EqIrhvv11tVZqWYkKcot+BFStc= -github.com/aws/aws-sdk-go-v2/service/amp v1.25.5 h1:OV/xhdkvG4rY7lcEBPS9pPbT83ezxXE+gM9nVA1OHWU= -github.com/aws/aws-sdk-go-v2/service/amp v1.25.5/go.mod h1:i5BA2ACkXa8Pzqinz/xEukdVJnMdfQLRcx7ftb5g0pk= -github.com/aws/aws-sdk-go-v2/service/apigateway v1.23.7 h1:VOV21NHMzI0OgywTq2iY9UnXIpH4j4s3pa4ensk8Hh8= -github.com/aws/aws-sdk-go-v2/service/apigateway v1.23.7/go.mod h1:3h9BDpayKgNNrpHZBvL7gCIeikqiE7oBxGGcrzmtLAM= -github.com/aws/aws-sdk-go-v2/service/apigatewayv2 v1.20.5 h1:nk9qRsqcLik5FycE6+y16Xj46oCnoMc0Gp8Q2RHOCpg= -github.com/aws/aws-sdk-go-v2/service/apigatewayv2 v1.20.5/go.mod h1:PkfhkgYj7XKPO/kGyF7s4DC5ZVrxfHoWDD+rrxobLMg= +github.com/aws/aws-sdk-go-v2/service/amp v1.26.1 h1:svGkgUKZDc5SNSiP6BgcOe/6sPmwBniltU6uHmxrjqo= +github.com/aws/aws-sdk-go-v2/service/amp v1.26.1/go.mod h1:mBtHxQRTrzQB0G5oap7IcgP9Ny5p9BJSGhWnuQ+35EY= +github.com/aws/aws-sdk-go-v2/service/apigateway v1.24.1 h1:20jy3+l7bQA6tJ1nJxg8+l+1Xm9KrDslNVvc8HvLoEI= +github.com/aws/aws-sdk-go-v2/service/apigateway v1.24.1/go.mod h1:gcUX4UmbhON3+0ij28aojD7vaMPqsRYFefqwxCLbUnw= +github.com/aws/aws-sdk-go-v2/service/apigatewayv2 v1.21.1 h1:qbbxz47vQdGzvLeHS8xmvp3shV0n8cAnmred+Ehj6qQ= +github.com/aws/aws-sdk-go-v2/service/apigatewayv2 v1.21.1/go.mod h1:3tgssMs7RA6rZoI6K9w6Nc6gCmhadTFAOU+OXh4tPkQ= github.com/aws/aws-sdk-go-v2/service/appconfig v1.4.2/go.mod h1:FZ3HkCe+b10uFZZkFdvf98LHW21k49W8o8J366lqVKY= -github.com/aws/aws-sdk-go-v2/service/autoscaling v1.40.6 h1:IDoEdCkKRy7iPlRVSuDATGE57xUjrk5i1M9eWPYwr/Y= -github.com/aws/aws-sdk-go-v2/service/autoscaling v1.40.6/go.mod h1:ZErgk/bPaaZIpj+lUWGlwI1A0UFhSIscgnCPzTLnb2s= -github.com/aws/aws-sdk-go-v2/service/cloudwatch v1.38.1 h1:Lrq1Tuj+tA569WQzuESkm/rUfhIQMmNoZW6rRuZVHVI= -github.com/aws/aws-sdk-go-v2/service/cloudwatch v1.38.1/go.mod h1:U12sr6Lt14X96f16t+rR52+2BdqtydwN7DjEEHRMjO0= -github.com/aws/aws-sdk-go-v2/service/databasemigrationservice v1.38.5 h1:V97n9sqRIMhQP4GuB3xOBOTsg/41uLo3jyewvHSjxwE= -github.com/aws/aws-sdk-go-v2/service/databasemigrationservice v1.38.5/go.mod h1:hTZS15Gghi40UxU03Cv09Qr2tXgoQrZOSGY6oaNUNAg= -github.com/aws/aws-sdk-go-v2/service/ec2 v1.161.1 h1:NbjXshriDs5bGeqKvrOF70L41X0aCMC60ImN2vkcQAc= -github.com/aws/aws-sdk-go-v2/service/ec2 v1.161.1/go.mod h1:xejKuuRDjz6z5OqyeLsz01MlOqqW7CqpAB4PabNvpu8= +github.com/aws/aws-sdk-go-v2/service/autoscaling v1.41.1 h1:ZNokD9M3On22Qscssyi3iQAzkoeOJxnE5NANNCzPzIA= +github.com/aws/aws-sdk-go-v2/service/autoscaling v1.41.1/go.mod h1:5XY8CFGBv6dZp/thbk8FRIAWjqNckM7PsL848KHdzjI= +github.com/aws/aws-sdk-go-v2/service/cloudwatch v1.39.1 h1:U2qFeD0atfYsNMX7pVPvTG+vI7jCoelcWomOK7F8b34= +github.com/aws/aws-sdk-go-v2/service/cloudwatch v1.39.1/go.mod h1:6cstKfQIguQDuWrHKYhjod025+J7n0AR+azv5t9HYBY= +github.com/aws/aws-sdk-go-v2/service/databasemigrationservice v1.39.1 h1:4NU35PwZufWYLQV1JD43Z2ZGmTSv35250dNYEgrZaRs= +github.com/aws/aws-sdk-go-v2/service/databasemigrationservice v1.39.1/go.mod h1:vKjaBNIcIPOlhBc4+lei/hFnPl7GNDCnkSnGFuKQ0Xg= +github.com/aws/aws-sdk-go-v2/service/ec2 v1.165.1 h1:LkSnU1c9JKJyXYcwpWgQGuwctwv3pDenMUgH2CmLd1A= +github.com/aws/aws-sdk-go-v2/service/ec2 v1.165.1/go.mod h1:Wv7N3iFOKVsZNIaw9MOBUmwCkX6VMmQQRFhMrHtNGno= +github.com/aws/aws-sdk-go-v2/service/iam v1.33.1 h1:0dcMo3330L9LIckl+4iujMoq0AdR8LMK0TtgrjHUi6M= +github.com/aws/aws-sdk-go-v2/service/iam v1.33.1/go.mod h1:sX/naR5tYtlGFN0Bjg9VPNgYNg/rqiDUuKTW9peFnZk= github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.11.4 h1:KypMCbLPPHEmf9DgMGw51jMj77VfGPAN2Kv4cfhlfgI= github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.11.4/go.mod h1:Vz1JQXliGcQktFTN/LN6uGppAIRoLBR2bMvIMP0gOjc= github.com/aws/aws-sdk-go-v2/service/internal/checksum v1.3.0 h1:UiSyK6ent6OKpkMJN3+k5HZ4sk4UfchEaaW5wv7SblQ= @@ -527,23 +529,23 @@ github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.11.18 h1:tJ5RnkHC github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.11.18/go.mod h1:++NHzT+nAF7ZPrHPsA+ENvsXkOO8wEu+C6RXltAG4/c= github.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.17.0 h1:l5puwOHr7IxECuPMIuZG7UKOzAnF24v6t4l+Z5Moay4= github.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.17.0/go.mod h1:Oov79flWa/n7Ni+lQC3z+VM7PoRM47omRqbJU9B5Y7E= -github.com/aws/aws-sdk-go-v2/service/resourcegroupstaggingapi v1.21.5 h1:GR0vFRc5TpN36ppQJjd+gjRRC9vMAHN5C2W53oMWCJU= -github.com/aws/aws-sdk-go-v2/service/resourcegroupstaggingapi v1.21.5/go.mod h1:FWw+Jnx+SlpsrU/NQ/f7f+1RdixTApZiU2o9FOubiDQ= +github.com/aws/aws-sdk-go-v2/service/resourcegroupstaggingapi v1.22.1 h1:73im9DnuBD4+G8hHsbqb0NSA+n6QJ5ApFk6/YeOz8k8= +github.com/aws/aws-sdk-go-v2/service/resourcegroupstaggingapi v1.22.1/go.mod h1:p5FuKT8Rj4fnlT84Pzy7itV11NZ39Fwm/Y52S8Lg1Oc= github.com/aws/aws-sdk-go-v2/service/s3 v1.49.0 h1:VfU15izXQjz4m9y1DkbY79iylIiuPwWtrram4cSpWEI= github.com/aws/aws-sdk-go-v2/service/s3 v1.49.0/go.mod h1:1o/W6JFUuREj2ExoQ21vHJgO7wakvjhol91M9eknFgs= github.com/aws/aws-sdk-go-v2/service/secretsmanager v1.27.0 h1:64jRTsqBcIqlA4N7ZFYy+ysGPE7Rz/nJgU2fwv2cymk= github.com/aws/aws-sdk-go-v2/service/secretsmanager v1.27.0/go.mod h1:JsJDZFHwLGZu6dxhV9EV1gJrMnCeE4GEXubSZA59xdA= github.com/aws/aws-sdk-go-v2/service/servicediscovery v1.31.4 h1:YEY+Y4Lf3TuFrw8keb8NZ5nsbo/YplxEgZWbqnDlq+Y= github.com/aws/aws-sdk-go-v2/service/servicediscovery v1.31.4/go.mod h1:5autx6GwAtQVv8S/qTwBKfxzAAwe8hOlzVuTtLdliVw= -github.com/aws/aws-sdk-go-v2/service/shield v1.25.5 h1:4fTqvsBpHhPA9ngalsvdLPRir22WQNhFDFmeGSKchQQ= -github.com/aws/aws-sdk-go-v2/service/shield v1.25.5/go.mod h1:KizNr+ORjXFVELwvx3ubt49LMeTeBXm9EbhUcDXvHa8= +github.com/aws/aws-sdk-go-v2/service/shield v1.26.1 h1:vlqoPRFrhs/djRKnrPNJvzzVLIsMWITGgP4gHIzprSU= +github.com/aws/aws-sdk-go-v2/service/shield v1.26.1/go.mod h1:1aUTOI7FTFp3ng7NH3C0UqDkbofoLb7NLcd/ufvlHdY= github.com/aws/aws-sdk-go-v2/service/sso v1.4.2/go.mod h1:NBvT9R1MEF+Ud6ApJKM0G+IkPchKS7p7c2YPKwHmBOk= github.com/aws/aws-sdk-go-v2/service/sso v1.22.5 h1:zCsFCKvbj25i7p1u94imVoO447I/sFv8qq+lGJhRN0c= github.com/aws/aws-sdk-go-v2/service/sso v1.22.5/go.mod h1:ZeDX1SnKsVlejeuz41GiajjZpRSWR7/42q/EyA/QEiM= github.com/aws/aws-sdk-go-v2/service/ssooidc v1.26.5 h1:SKvPgvdvmiTWoi0GAJ7AsJfOz3ngVkD/ERbs5pUnHNI= github.com/aws/aws-sdk-go-v2/service/ssooidc v1.26.5/go.mod h1:20sz31hv/WsPa3HhU3hfrIet2kxM4Pe0r20eBZ20Tac= -github.com/aws/aws-sdk-go-v2/service/storagegateway v1.27.5 h1:5t0w6FzW65K9nX+7YEKPLvsuOSOMl9HkJ01rG5VXsmQ= -github.com/aws/aws-sdk-go-v2/service/storagegateway v1.27.5/go.mod h1:gCrKxQ0omX8dIo7jQbXW5typwg8Z4rdVXYndHVoJ4XM= +github.com/aws/aws-sdk-go-v2/service/storagegateway v1.30.1 h1:/teUr5AA4/AUaw8A1wF6wcki4oc//lxonloUq1bl1VU= +github.com/aws/aws-sdk-go-v2/service/storagegateway v1.30.1/go.mod h1:LigoGatDhnWionzCxyHIQ96cQhwmLgTEkQDOzZg1Q3E= github.com/aws/aws-sdk-go-v2/service/sts v1.7.2/go.mod h1:8EzeIqfWt2wWT4rJVu3f21TfrhJ8AEMzVybRNSb/b4g= github.com/aws/aws-sdk-go-v2/service/sts v1.30.4 h1:iAckBT2OeEK/kBDyN/jDtpEExhjeeA/Im2q4X0rJZT8= github.com/aws/aws-sdk-go-v2/service/sts v1.30.4/go.mod h1:vmSqFK+BVIwVpDAGZB3CoCXHzurt4qBE8lf+I/kRTh0= @@ -1849,8 +1851,8 @@ github.com/ncabatoff/go-seq v0.0.0-20180805175032-b08ef85ed833 h1:t4WWQ9I797y7QU github.com/ncabatoff/go-seq v0.0.0-20180805175032-b08ef85ed833/go.mod h1:0CznHmXSjMEqs5Tezj/w2emQoM41wzYM9KpDKUHPYag= github.com/ncabatoff/process-exporter v0.7.10 h1:+Ere7+3se6QqP54gg7aBRagWcL8bq3u5zNi/GRSWeKQ= github.com/ncabatoff/process-exporter v0.7.10/go.mod h1:DHZRZjqxw9LCOpLlX0DjBuyn6d5plh41Jv6Tmttj7Ek= -github.com/nerdswords/yet-another-cloudwatch-exporter v0.60.0 h1:+027WNpx6sqn1kuhl4fPPz65TaF7kzG9ymAOJlasZjg= -github.com/nerdswords/yet-another-cloudwatch-exporter v0.60.0/go.mod h1:EXA9yqANHYmkbpe9a41X7iFJbK8/WNhp+Ph/+5DvZo4= +github.com/nerdswords/yet-another-cloudwatch-exporter v0.61.0 h1:aZIz1Dh+dXoesIvv56uReOpvDE21RvRgADhyTgEdNXw= +github.com/nerdswords/yet-another-cloudwatch-exporter v0.61.0/go.mod h1:n/wLEzpw3i44nWQ5UydQBEvPMxeKd2kYqfGt1GFcuKk= github.com/newrelic/newrelic-telemetry-sdk-go v0.2.0/go.mod h1:G9MqE/cHGv3Hx3qpYhfuyFUsGx2DpVcGi1iJIqTg+JQ= github.com/nicolai86/scaleway-sdk v1.10.2-0.20180628010248-798f60e20bb2 h1:BQ1HW7hr4IVovMwWg0E0PYcyW8CzqDcVmaew9cujU4s= github.com/nicolai86/scaleway-sdk v1.10.2-0.20180628010248-798f60e20bb2/go.mod h1:TLb2Sg7HQcgGdloNxkrmtgDNR9uVYF3lfdFIN4Ro6Sk= @@ -2122,8 +2124,8 @@ github.com/prometheus-community/prom-label-proxy v0.6.0 h1:vRY29tUex8qI2MEimovTz github.com/prometheus-community/prom-label-proxy v0.6.0/go.mod h1:XyAyskjjhqEx0qnbGUVeAkYSz3Wm9gStT7/wXFxD8n0= github.com/prometheus-community/stackdriver_exporter v0.15.1 h1:+k26zeBy8BlG+eDKPtYA7IKXrQCI7ISLfktjIUu7wBQ= github.com/prometheus-community/stackdriver_exporter v0.15.1/go.mod h1:UmmIgnrVQqDAeM8pSeYntBcUxPhp8oqb8W3nvRYzsSg= -github.com/prometheus-community/windows_exporter v0.27.2 h1:/tdRTouPMVsC4qt8+s9NOPEm7L/9qdDxmasiETlx+Wk= -github.com/prometheus-community/windows_exporter v0.27.2/go.mod h1:8+T6hfv71nvgVIzguouXkIGoa15ni+uXHHULBOA2bZo= +github.com/prometheus-community/windows_exporter v0.27.3 h1:L5Dc4gqc3477Y6jaVHhkm25jysqbxg1ajMyPbmnqScw= +github.com/prometheus-community/windows_exporter v0.27.3/go.mod h1:8+T6hfv71nvgVIzguouXkIGoa15ni+uXHHULBOA2bZo= github.com/prometheus-operator/prometheus-operator v0.66.0 h1:Jj4mbGAkfBbTih6ait03f2vUjEHB7Kb4gnlAmWu7AJ0= github.com/prometheus-operator/prometheus-operator v0.66.0/go.mod h1:U7S3+u6YTxwCTMNIQxZWttEq70qBA4Qps7/c5mUZOpQ= github.com/prometheus-operator/prometheus-operator/pkg/apis/monitoring v0.66.0 h1:PPW01FLVjJHMNcbAL1DDD9EZceSQKMOU/VpK0irrxrI= diff --git a/internal/component/discovery/discovery.go b/internal/component/discovery/discovery.go index 91dbcb35ff..726b2a0552 100644 --- a/internal/component/discovery/discovery.go +++ b/internal/component/discovery/discovery.go @@ -2,6 +2,7 @@ package discovery import ( "context" + "slices" "sort" "strings" "sync" @@ -41,6 +42,17 @@ func (t Target) NonMetaLabels() labels.Labels { return lset } +func (t Target) SpecificLabels(lbls []string) labels.Labels { + var lset labels.Labels + for k, v := range t { + if slices.Contains(lbls, k) { + lset = append(lset, labels.Label{Name: k, Value: v}) + } + } + sort.Sort(lset) + return lset +} + // Exports holds values which are exported by all discovery components. type Exports struct { Targets []Target `alloy:"targets,attr"` diff --git a/internal/component/discovery/distributed_targets.go b/internal/component/discovery/distributed_targets.go index c0b2831b8d..5e776f6b40 100644 --- a/internal/component/discovery/distributed_targets.go +++ b/internal/component/discovery/distributed_targets.go @@ -19,6 +19,13 @@ type DistributedTargets struct { // NewDistributedTargets creates the abstraction that allows components to // dynamically shard targets between components. func NewDistributedTargets(clusteringEnabled bool, cluster cluster.Cluster, allTargets []Target) *DistributedTargets { + return NewDistributedTargetsWithCustomLabels(clusteringEnabled, cluster, allTargets, nil) +} + +// NewDistributedTargetsWithCustomLabels creates the abstraction that allows components to +// dynamically shard targets between components. Passing in labels will limit the sharding to only use those labels for computing the hash key. +// Passing in nil or empty array means look at all labels. +func NewDistributedTargetsWithCustomLabels(clusteringEnabled bool, cluster cluster.Cluster, allTargets []Target, labels []string) *DistributedTargets { if !clusteringEnabled || cluster == nil { cluster = disabledCluster{} } @@ -32,8 +39,20 @@ func NewDistributedTargets(clusteringEnabled bool, cluster cluster.Cluster, allT localTargetKeys := make([]shard.Key, 0, localCap) remoteTargetKeys := make(map[shard.Key]struct{}, len(allTargets)-localCap) + // Need to handle duplicate entries. + singlular := make(map[shard.Key]struct{}) for _, tgt := range allTargets { - targetKey := keyFor(tgt) + var targetKey shard.Key + // If we have no custom labels check all non-meta labels. + if len(labels) == 0 { + targetKey = keyFor(tgt) + } else { + targetKey = keyForLabels(tgt, labels) + } + if _, ok := singlular[targetKey]; ok { + continue + } + singlular[targetKey] = struct{}{} peers, err := cluster.Lookup(targetKey, 1, shard.OpReadWrite) belongsToLocal := err != nil || len(peers) == 0 || peers[0].Self @@ -57,6 +76,10 @@ func (dt *DistributedTargets) LocalTargets() []Target { return dt.localTargets } +func (dt *DistributedTargets) TargetCount() int { + return len(dt.localTargetKeys) + len(dt.remoteTargetKeys) +} + // MovedToRemoteInstance returns the set of local targets from prev // that are no longer local in dt, indicating an active target has moved. // Only targets which exist in both prev and dt are returned. If prev @@ -79,6 +102,10 @@ func keyFor(tgt Target) shard.Key { return shard.Key(tgt.NonMetaLabels().Hash()) } +func keyForLabels(tgt Target, lbls []string) shard.Key { + return shard.Key(tgt.SpecificLabels(lbls).Hash()) +} + type disabledCluster struct{} var _ cluster.Cluster = disabledCluster{} diff --git a/internal/component/loki/process/stages/drop.go b/internal/component/loki/process/stages/drop.go index a6c2ca2da5..67b23754f4 100644 --- a/internal/component/loki/process/stages/drop.go +++ b/internal/component/loki/process/stages/drop.go @@ -10,8 +10,9 @@ import ( "github.com/alecthomas/units" "github.com/go-kit/log" - "github.com/grafana/alloy/internal/runtime/logging/level" "github.com/prometheus/client_golang/prometheus" + + "github.com/grafana/alloy/internal/runtime/logging/level" ) const ( @@ -37,50 +38,49 @@ type DropConfig struct { Expression string `alloy:"expression,attr,optional"` OlderThan time.Duration `alloy:"older_than,attr,optional"` LongerThan units.Base2Bytes `alloy:"longer_than,attr,optional"` - regex *regexp.Regexp } // validateDropConfig validates the DropConfig for the dropStage -func validateDropConfig(cfg *DropConfig) error { +func validateDropConfig(cfg *DropConfig) (*regexp.Regexp, error) { if cfg == nil || (cfg.Source == "" && cfg.Expression == "" && cfg.OlderThan == emptyDuration && cfg.LongerThan == emptySize) { - - return errors.New(ErrDropStageEmptyConfig) + return nil, errors.New(ErrDropStageEmptyConfig) } if cfg.DropReason == "" { cfg.DropReason = defaultDropReason } if cfg.Value != "" && cfg.Expression != "" { - return errors.New(ErrDropStageInvalidConfig) + return nil, errors.New(ErrDropStageInvalidConfig) } if cfg.Separator == "" { cfg.Separator = defaultSeparator } if cfg.Value != "" && cfg.Source == "" { - return errors.New(ErrDropStageNoSourceWithValue) + return nil, errors.New(ErrDropStageNoSourceWithValue) } + var ( + expr *regexp.Regexp + err error + ) if cfg.Expression != "" { - expr, err := regexp.Compile(cfg.Expression) - if err != nil { - return fmt.Errorf(ErrDropStageInvalidRegex, err) + if expr, err = regexp.Compile(cfg.Expression); err != nil { + return nil, fmt.Errorf(ErrDropStageInvalidRegex, err) } - cfg.regex = expr } // The first step to exclude `value` and fully replace it with the `expression`. // It will simplify code and less confusing for the end-user on which option to choose. if cfg.Value != "" { - expr, err := regexp.Compile(fmt.Sprintf("^%s$", regexp.QuoteMeta(cfg.Value))) + expr, err = regexp.Compile(fmt.Sprintf("^%s$", regexp.QuoteMeta(cfg.Value))) if err != nil { - return fmt.Errorf(ErrDropStageInvalidRegex, err) + return nil, fmt.Errorf(ErrDropStageInvalidRegex, err) } - cfg.regex = expr } - return nil + return expr, nil } // newDropStage creates a DropStage from config func newDropStage(logger log.Logger, config DropConfig, registerer prometheus.Registerer) (Stage, error) { - err := validateDropConfig(&config) + regex, err := validateDropConfig(&config) if err != nil { return nil, err } @@ -88,6 +88,7 @@ func newDropStage(logger log.Logger, config DropConfig, registerer prometheus.Re return &dropStage{ logger: log.With(logger, "component", "stage", "type", "drop"), cfg: &config, + regex: regex, dropCount: getDropCountMetric(registerer), }, nil } @@ -96,6 +97,7 @@ func newDropStage(logger log.Logger, config DropConfig, registerer prometheus.Re type dropStage struct { logger log.Logger cfg *DropConfig + regex *regexp.Regexp dropCount *prometheus.CounterVec } @@ -144,7 +146,7 @@ func (m *dropStage) shouldDrop(e Entry) bool { return false } } - if m.cfg.Source != "" && m.cfg.regex == nil { + if m.cfg.Source != "" && m.regex == nil { var match bool match = true for _, src := range splitSource(m.cfg.Source) { @@ -165,8 +167,8 @@ func (m *dropStage) shouldDrop(e Entry) bool { } } - if m.cfg.Source == "" && m.cfg.regex != nil { - if !m.cfg.regex.MatchString(e.Line) { + if m.cfg.Source == "" && m.regex != nil { + if !m.regex.MatchString(e.Line) { // Not a match to the regex, don't drop if Debug { level.Debug(m.logger).Log("msg", "line will not be dropped, the provided regular expression did not match the log line") @@ -178,7 +180,7 @@ func (m *dropStage) shouldDrop(e Entry) bool { } } - if m.cfg.Source != "" && m.cfg.regex != nil { + if m.cfg.Source != "" && m.regex != nil { var extractedData []string for _, src := range splitSource(m.cfg.Source) { if e, ok := e.Extracted[src]; ok { @@ -192,7 +194,7 @@ func (m *dropStage) shouldDrop(e Entry) bool { extractedData = append(extractedData, s) } } - if !m.cfg.regex.MatchString(strings.Join(extractedData, m.cfg.Separator)) { + if !m.regex.MatchString(strings.Join(extractedData, m.cfg.Separator)) { // Not a match to the regex, don't drop if Debug { level.Debug(m.logger).Log("msg", "line will not be dropped, the provided regular expression did not match the log line") diff --git a/internal/component/loki/process/stages/drop_test.go b/internal/component/loki/process/stages/drop_test.go index 736ddfe5d1..e77293f45d 100644 --- a/internal/component/loki/process/stages/drop_test.go +++ b/internal/component/loki/process/stages/drop_test.go @@ -7,12 +7,13 @@ import ( "time" "github.com/alecthomas/units" - "github.com/grafana/alloy/internal/util" dskit "github.com/grafana/dskit/server" "github.com/prometheus/client_golang/prometheus" "github.com/prometheus/common/model" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" + + "github.com/grafana/alloy/internal/util" ) // Not all these are tested but are here to make sure the different types marshal without error @@ -411,7 +412,7 @@ func Test_dropStage_Process(t *testing.T) { } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { - err := validateDropConfig(tt.config) + _, err := validateDropConfig(tt.config) if err != nil { t.Error(err) } @@ -465,7 +466,7 @@ func Test_validateDropConfig(t *testing.T) { } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { - if err := validateDropConfig(tt.config); ((err != nil) && (err.Error() != tt.wantErr.Error())) || (err == nil && tt.wantErr != nil) { + if _, err := validateDropConfig(tt.config); ((err != nil) && (err.Error() != tt.wantErr.Error())) || (err == nil && tt.wantErr != nil) { t.Errorf("validateDropConfig() error = %v, wantErr = %v", err, tt.wantErr) } }) diff --git a/internal/component/loki/process/stages/geoip.go b/internal/component/loki/process/stages/geoip.go index 47994acf7c..02bebe7d71 100644 --- a/internal/component/loki/process/stages/geoip.go +++ b/internal/component/loki/process/stages/geoip.go @@ -7,15 +7,15 @@ import ( "reflect" "github.com/go-kit/log" - "github.com/grafana/alloy/internal/runtime/logging/level" "github.com/jmespath/go-jmespath" "github.com/oschwald/geoip2-golang" "github.com/oschwald/maxminddb-golang" "github.com/prometheus/common/model" + + "github.com/grafana/alloy/internal/runtime/logging/level" ) var ( - ErrEmptyGeoIPStageConfig = errors.New("geoip stage config cannot be empty") ErrEmptyDBPathGeoIPStageConfig = errors.New("db path cannot be empty") ErrEmptySourceGeoIPStageConfig = errors.New("source cannot be empty") ErrEmptyDBTypeGeoIPStageConfig = errors.New("db type should be either city or asn") diff --git a/internal/component/loki/process/stages/labels.go b/internal/component/loki/process/stages/labels.go index e47517c329..4764017184 100644 --- a/internal/component/loki/process/stages/labels.go +++ b/internal/component/loki/process/stages/labels.go @@ -7,8 +7,9 @@ import ( "time" "github.com/go-kit/log" - "github.com/grafana/alloy/internal/runtime/logging/level" "github.com/prometheus/common/model" + + "github.com/grafana/alloy/internal/runtime/logging/level" ) const ( @@ -22,53 +23,56 @@ type LabelsConfig struct { } // validateLabelsConfig validates the Label stage configuration -func validateLabelsConfig(c LabelsConfig) error { +func validateLabelsConfig(c LabelsConfig) (map[string]string, error) { + // We must not mutate the c.Values, create a copy with changes we need. + ret := map[string]string{} if c.Values == nil { - return errors.New(ErrEmptyLabelStageConfig) + return nil, errors.New(ErrEmptyLabelStageConfig) } for labelName, labelSrc := range c.Values { if !model.LabelName(labelName).IsValid() { - return fmt.Errorf(ErrInvalidLabelName, labelName) + return nil, fmt.Errorf(ErrInvalidLabelName, labelName) } // If no label source was specified, use the key name if labelSrc == nil || *labelSrc == "" { - lName := labelName - c.Values[labelName] = &lName + ret[labelName] = labelName + } else { + ret[labelName] = *labelSrc } } - return nil + return ret, nil } // newLabelStage creates a new label stage to set labels from extracted data func newLabelStage(logger log.Logger, configs LabelsConfig) (Stage, error) { - err := validateLabelsConfig(configs) + labelsConfig, err := validateLabelsConfig(configs) if err != nil { return nil, err } return toStage(&labelStage{ - cfgs: configs, - logger: logger, + labelsConfig: labelsConfig, + logger: logger, }), nil } // labelStage sets labels from extracted data type labelStage struct { - cfgs LabelsConfig - logger log.Logger + labelsConfig map[string]string + logger log.Logger } // Process implements Stage func (l *labelStage) Process(labels model.LabelSet, extracted map[string]interface{}, _ *time.Time, _ *string) { - processLabelsConfigs(l.logger, extracted, l.cfgs, func(labelName model.LabelName, labelValue model.LabelValue) { + processLabelsConfigs(l.logger, extracted, l.labelsConfig, func(labelName model.LabelName, labelValue model.LabelValue) { labels[labelName] = labelValue }) } type labelsConsumer func(labelName model.LabelName, labelValue model.LabelValue) -func processLabelsConfigs(logger log.Logger, extracted map[string]interface{}, configs LabelsConfig, consumer labelsConsumer) { - for lName, lSrc := range configs.Values { - if lValue, ok := extracted[*lSrc]; ok { +func processLabelsConfigs(logger log.Logger, extracted map[string]interface{}, labelsConfig map[string]string, consumer labelsConsumer) { + for lName, lSrc := range labelsConfig { + if lValue, ok := extracted[lSrc]; ok { s, err := getString(lValue) if err != nil { if Debug { diff --git a/internal/component/loki/process/stages/labels_test.go b/internal/component/loki/process/stages/labels_test.go index 201ab06b99..8cb0ac7cf1 100644 --- a/internal/component/loki/process/stages/labels_test.go +++ b/internal/component/loki/process/stages/labels_test.go @@ -72,10 +72,8 @@ func TestLabelsPipelineWithMissingKey_Labels(t *testing.T) { } var ( - lv1 = "lv1" - lv2c = "l2" - lv3 = "" - lv3c = "l3" + lv1 = "lv1" + lv3 = "" ) var emptyLabelsConfig = LabelsConfig{nil} @@ -84,19 +82,19 @@ func TestLabels(t *testing.T) { tests := map[string]struct { config LabelsConfig err error - expectedCfgs LabelsConfig + expectedCfgs map[string]string }{ "missing config": { config: emptyLabelsConfig, err: errors.New(ErrEmptyLabelStageConfig), - expectedCfgs: emptyLabelsConfig, + expectedCfgs: nil, }, "invalid label name": { config: LabelsConfig{ Values: map[string]*string{"#*FDDS*": nil}, }, err: fmt.Errorf(ErrInvalidLabelName, "#*FDDS*"), - expectedCfgs: emptyLabelsConfig, + expectedCfgs: nil, }, "label value is set from name": { config: LabelsConfig{Values: map[string]*string{ @@ -105,18 +103,18 @@ func TestLabels(t *testing.T) { "l3": &lv3, }}, err: nil, - expectedCfgs: LabelsConfig{Values: map[string]*string{ - "l1": &lv1, - "l2": &lv2c, - "l3": &lv3c, - }}, + expectedCfgs: map[string]string{ + "l1": lv1, + "l2": "l2", + "l3": "l3", + }, }, } for name, test := range tests { test := test t.Run(name, func(t *testing.T) { t.Parallel() - err := validateLabelsConfig(test.config) + actual, err := validateLabelsConfig(test.config) if (err != nil) != (test.err != nil) { t.Errorf("validateLabelsConfig() expected error = %v, actual error = %v", test.err, err) return @@ -125,8 +123,8 @@ func TestLabels(t *testing.T) { t.Errorf("validateLabelsConfig() expected error = %v, actual error = %v", test.err, err) return } - if test.expectedCfgs.Values != nil { - assert.Equal(t, test.expectedCfgs, test.config) + if test.expectedCfgs != nil { + assert.Equal(t, test.expectedCfgs, actual) } }) } diff --git a/internal/component/loki/process/stages/luhn.go b/internal/component/loki/process/stages/luhn.go index 14cf8b6fef..3640af0c5c 100644 --- a/internal/component/loki/process/stages/luhn.go +++ b/internal/component/loki/process/stages/luhn.go @@ -17,7 +17,7 @@ type LuhnFilterConfig struct { } // validateLuhnFilterConfig validates the LuhnFilterConfig. -func validateLuhnFilterConfig(c LuhnFilterConfig) error { +func validateLuhnFilterConfig(c *LuhnFilterConfig) error { if c.Replacement == "" { c.Replacement = "**REDACTED**" } @@ -32,7 +32,7 @@ func validateLuhnFilterConfig(c LuhnFilterConfig) error { // newLuhnFilterStage creates a new LuhnFilterStage. func newLuhnFilterStage(config LuhnFilterConfig) (Stage, error) { - if err := validateLuhnFilterConfig(config); err != nil { + if err := validateLuhnFilterConfig(&config); err != nil { return nil, err } return toStage(&luhnFilterStage{ diff --git a/internal/component/loki/process/stages/luhn_test.go b/internal/component/loki/process/stages/luhn_test.go index ef618aa863..9ac622ce44 100644 --- a/internal/component/loki/process/stages/luhn_test.go +++ b/internal/component/loki/process/stages/luhn_test.go @@ -2,6 +2,8 @@ package stages import ( "testing" + + "github.com/stretchr/testify/require" ) // Test cases for the Luhn algorithm validation @@ -52,3 +54,81 @@ func TestReplaceLuhnValidNumbers(t *testing.T) { } } } + +func TestValidateConfig(t *testing.T) { + source := ".*" + emptySource := "" + cases := []struct { + name string + input LuhnFilterConfig + expected LuhnFilterConfig + errorContainsStr string + }{ + { + name: "successful validation", + input: LuhnFilterConfig{ + Replacement: "ABC", + Source: &source, + MinLength: 10, + }, + expected: LuhnFilterConfig{ + Replacement: "ABC", + Source: &source, + MinLength: 10, + }, + }, + { + name: "nil source", + input: LuhnFilterConfig{ + Replacement: "ABC", + Source: nil, + MinLength: 10, + }, + expected: LuhnFilterConfig{ + Replacement: "ABC", + Source: nil, + MinLength: 10, + }, + }, + { + name: "empty source error", + input: LuhnFilterConfig{ + Replacement: "ABC", + Source: &emptySource, + MinLength: 11, + }, + expected: LuhnFilterConfig{ + Replacement: "ABC", + Source: &emptySource, + MinLength: 11, + }, + errorContainsStr: "empty source", + }, + { + name: "defaults update", + input: LuhnFilterConfig{ + Replacement: "", + Source: &source, + MinLength: -10, + }, + expected: LuhnFilterConfig{ + Replacement: "**REDACTED**", + Source: &source, + MinLength: 13, + }, + }, + } + + for _, c := range cases { + t.Run(c.name, func(t *testing.T) { + err := validateLuhnFilterConfig(&c.input) + if c.errorContainsStr == "" { + require.NoError(t, err) + } else { + require.ErrorContains(t, err, c.errorContainsStr) + } + require.Equal(t, c.expected, c.input) + }) + } + +} diff --git a/internal/component/loki/process/stages/match.go b/internal/component/loki/process/stages/match.go index 4f07a976da..8cb71e2e27 100644 --- a/internal/component/loki/process/stages/match.go +++ b/internal/component/loki/process/stages/match.go @@ -13,13 +13,11 @@ import ( // Configuration errors. var ( - ErrEmptyMatchStageConfig = errors.New("match stage config cannot be empty") - ErrPipelineNameRequired = errors.New("match stage pipeline name can be omitted but cannot be an empty string") - ErrSelectorRequired = errors.New("selector statement required for match stage") - ErrMatchRequiresStages = errors.New("match stage requires at least one additional stage to be defined in '- stages'") - ErrSelectorSyntax = errors.New("invalid selector syntax for match stage") - ErrStagesWithDropLine = errors.New("match stage configured to drop entries cannot contains stages") - ErrUnknownMatchAction = errors.New("match stage action should be 'keep' or 'drop'") + ErrSelectorRequired = errors.New("selector statement required for match stage") + ErrMatchRequiresStages = errors.New("match stage requires at least one additional stage to be defined in '- stages'") + ErrSelectorSyntax = errors.New("invalid selector syntax for match stage") + ErrStagesWithDropLine = errors.New("match stage configured to drop entries cannot contains stages") + ErrUnknownMatchAction = errors.New("match stage action should be 'keep' or 'drop'") MatchActionKeep = "keep" MatchActionDrop = "drop" diff --git a/internal/component/loki/process/stages/match_test.go b/internal/component/loki/process/stages/match_test.go index d1f5e05027..3a67c064be 100644 --- a/internal/component/loki/process/stages/match_test.go +++ b/internal/component/loki/process/stages/match_test.go @@ -5,9 +5,11 @@ import ( "testing" "time" - "github.com/grafana/alloy/internal/util" "github.com/prometheus/client_golang/prometheus" "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + "github.com/grafana/alloy/internal/util" ) var testMatchAlloy = ` @@ -191,19 +193,26 @@ func TestValidateMatcherConfig(t *testing.T) { emptyStages := []StageConfig{} defaultStage := []StageConfig{{MatchConfig: &MatchConfig{}}} tests := []struct { - name string - cfg *MatchConfig - wantErr bool + name string + cfg *MatchConfig + wantErr bool + expected *MatchConfig }{ - {"pipeline name required", &MatchConfig{}, true}, - {"selector required", &MatchConfig{Selector: ""}, true}, - {"nil stages without dropping", &MatchConfig{PipelineName: "", Selector: `{app="foo"}`, Action: MatchActionKeep, Stages: nil}, true}, - {"empty stages without dropping", &MatchConfig{Selector: `{app="foo"}`, Action: MatchActionKeep, Stages: emptyStages}, true}, - {"stages with dropping", &MatchConfig{Selector: `{app="foo"}`, Action: MatchActionDrop, Stages: defaultStage}, true}, - {"empty stages dropping", &MatchConfig{Selector: `{app="foo"}`, Action: MatchActionDrop, Stages: emptyStages}, false}, - {"stages without dropping", &MatchConfig{Selector: `{app="foo"}`, Action: MatchActionKeep, Stages: defaultStage}, false}, - {"bad selector", &MatchConfig{Selector: `{app="foo}`, Action: MatchActionKeep, Stages: defaultStage}, true}, - {"bad action", &MatchConfig{Selector: `{app="foo}`, Action: "nope", Stages: emptyStages}, true}, + {name: "pipeline name required", cfg: &MatchConfig{}, wantErr: true}, + {name: "selector required", cfg: &MatchConfig{Selector: ""}, wantErr: true}, + {name: "nil stages without dropping", cfg: &MatchConfig{PipelineName: "", Selector: `{app="foo"}`, Action: MatchActionKeep, Stages: nil}, wantErr: true}, + {name: "empty stages without dropping", cfg: &MatchConfig{Selector: `{app="foo"}`, Action: MatchActionKeep, Stages: emptyStages}, wantErr: true}, + {name: "stages with dropping", cfg: &MatchConfig{Selector: `{app="foo"}`, Action: MatchActionDrop, Stages: defaultStage}, wantErr: true}, + {name: "empty stages dropping", cfg: &MatchConfig{Selector: `{app="foo"}`, Action: MatchActionDrop, Stages: emptyStages}}, + {name: "stages without dropping", cfg: &MatchConfig{Selector: `{app="foo"}`, Action: MatchActionKeep, Stages: defaultStage}}, + {name: "bad selector", cfg: &MatchConfig{Selector: `{app="foo}`, Action: MatchActionKeep, Stages: defaultStage}, wantErr: true}, + {name: "bad action", cfg: &MatchConfig{Selector: `{app="foo}`, Action: "nope", Stages: emptyStages}, wantErr: true}, + { + name: "sets default action to keep", + cfg: &MatchConfig{Selector: `{app="foo"}`, Stages: defaultStage}, + wantErr: false, + expected: &MatchConfig{Selector: `{app="foo"}`, Action: MatchActionKeep, Stages: defaultStage}, + }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { @@ -212,6 +221,9 @@ func TestValidateMatcherConfig(t *testing.T) { t.Errorf("validateMatcherConfig() error = %v, wantErr %v", err, tt.wantErr) return } + if tt.expected != nil { + require.Equal(t, tt.expected, tt.cfg) + } }) } } diff --git a/internal/component/loki/process/stages/metric.go b/internal/component/loki/process/stages/metric.go index 632e918b16..1d4a0cf936 100644 --- a/internal/component/loki/process/stages/metric.go +++ b/internal/component/loki/process/stages/metric.go @@ -1,7 +1,6 @@ package stages import ( - "errors" "fmt" "math" "reflect" @@ -9,31 +8,19 @@ import ( "time" "github.com/go-kit/log" - "github.com/grafana/alloy/internal/component/loki/process/metric" - "github.com/grafana/alloy/internal/runtime/logging/level" "github.com/prometheus/client_golang/prometheus" "github.com/prometheus/common/model" + + "github.com/grafana/alloy/internal/component/loki/process/metric" + "github.com/grafana/alloy/internal/runtime/logging/level" ) // Metric types. const ( - MetricTypeCounter = "counter" - MetricTypeGauge = "gauge" - MetricTypeHistogram = "histogram" - defaultMetricsPrefix = "loki_process_custom_" ) -// Configuration errors. -var ( - ErrEmptyMetricsStageConfig = errors.New("empty metric stage configuration") - ErrMetricsStageInvalidType = errors.New("invalid metric type: must be one of 'counter', 'gauge', or 'histogram'") - ErrInvalidIdleDur = errors.New("max_idle_duration could not be parsed as a time.Duration") - ErrSubSecIdleDur = errors.New("max_idle_duration less than 1s not allowed") -) - // MetricConfig is a single metrics configuration. -// TODO(@tpaschalis) Rework once Alloy squashing is implemented. type MetricConfig struct { Counter *metric.CounterConfig `alloy:"counter,block,optional"` Gauge *metric.GaugeConfig `alloy:"gauge,block,optional"` @@ -106,7 +93,6 @@ func newMetricStage(logger log.Logger, config MetricsConfig, registry prometheus } return &metricStage{ logger: logger, - cfg: config, metrics: metrics, }, nil } @@ -114,7 +100,6 @@ func newMetricStage(logger log.Logger, config MetricsConfig, registry prometheus // metricStage creates and updates prometheus metrics based on extracted pipeline data type metricStage struct { logger log.Logger - cfg MetricsConfig metrics map[string]cfgCollector } @@ -132,7 +117,7 @@ func (m *metricStage) Run(in chan Entry) chan Entry { } // Process implements Stage -func (m *metricStage) Process(labels model.LabelSet, extracted map[string]interface{}, t *time.Time, entry *string) { +func (m *metricStage) Process(labels model.LabelSet, extracted map[string]interface{}, _ *time.Time, entry *string) { for name, cc := range m.metrics { // There is a special case for counters where we count even if there is no match in the extracted map. if c, ok := cc.collector.(*metric.Counters); ok { diff --git a/internal/component/loki/process/stages/multiline.go b/internal/component/loki/process/stages/multiline.go index f754909754..99c9416e93 100644 --- a/internal/component/loki/process/stages/multiline.go +++ b/internal/component/loki/process/stages/multiline.go @@ -18,9 +18,8 @@ import ( // Configuration errors. var ( - ErrMultilineStageEmptyConfig = errors.New("multiline stage config must define `firstline` regular expression") - ErrMultilineStageInvalidRegex = errors.New("multiline stage first line regex compilation error") - ErrMultilineStageInvalidMaxWaitTime = errors.New("multiline stage `max_wait_time` parse error") + ErrMultilineStageEmptyConfig = errors.New("multiline stage config must define `firstline` regular expression") + ErrMultilineStageInvalidRegex = errors.New("multiline stage first line regex compilation error") ) // MultilineConfig contains the configuration for a Multiline stage. @@ -28,7 +27,6 @@ type MultilineConfig struct { Expression string `alloy:"firstline,attr"` MaxLines uint64 `alloy:"max_lines,attr,optional"` MaxWaitTime time.Duration `alloy:"max_wait_time,attr,optional"` - regex *regexp.Regexp } // DefaultMultilineConfig applies the default values on @@ -51,24 +49,24 @@ func (args *MultilineConfig) Validate() error { return nil } -func validateMultilineConfig(cfg *MultilineConfig) error { +func validateMultilineConfig(cfg MultilineConfig) (*regexp.Regexp, error) { if cfg.Expression == "" { - return ErrMultilineStageEmptyConfig + return nil, ErrMultilineStageEmptyConfig } expr, err := regexp.Compile(cfg.Expression) if err != nil { - return fmt.Errorf("%v: %w", ErrMultilineStageInvalidRegex, err) + return nil, fmt.Errorf("%v: %w", ErrMultilineStageInvalidRegex, err) } - cfg.regex = expr - return nil + return expr, nil } // multilineStage matches lines to determine whether the following lines belong to a block and should be collapsed type multilineStage struct { logger log.Logger cfg MultilineConfig + regex *regexp.Regexp } // multilineState captures the internal state of a running multiline stage. @@ -80,7 +78,7 @@ type multilineState struct { // newMultilineStage creates a MulitlineStage from config func newMultilineStage(logger log.Logger, config MultilineConfig) (Stage, error) { - err := validateMultilineConfig(&config) + regex, err := validateMultilineConfig(config) if err != nil { return nil, err } @@ -88,6 +86,7 @@ func newMultilineStage(logger log.Logger, config MultilineConfig) (Stage, error) return &multilineStage{ logger: log.With(logger, "component", "stage", "type", "multiline"), cfg: config, + regex: regex, }, nil } @@ -96,7 +95,7 @@ func (m *multilineStage) Run(in chan Entry) chan Entry { go func() { defer close(out) - streams := make(map[model.Fingerprint](chan Entry)) + streams := make(map[model.Fingerprint]chan Entry) wg := new(sync.WaitGroup) for e := range in { @@ -104,7 +103,7 @@ func (m *multilineStage) Run(in chan Entry) chan Entry { s, ok := streams[key] if !ok { // Pass through entries until we hit first start line. - if !m.cfg.regex.MatchString(e.Line) { + if !m.regex.MatchString(e.Line) { level.Debug(m.logger).Log("msg", "pass through entry", "stream", key) out <- e continue @@ -152,7 +151,7 @@ func (m *multilineStage) runMultiline(in chan Entry, out chan Entry, wg *sync.Wa return } - isFirstLine := m.cfg.regex.MatchString(e.Line) + isFirstLine := m.regex.MatchString(e.Line) if isFirstLine { level.Debug(m.logger).Log("msg", "flush multiline block because new start line", "block", state.buffer.String(), "stream", e.Labels.FastFingerprint()) m.flush(out, state) diff --git a/internal/component/loki/process/stages/multiline_test.go b/internal/component/loki/process/stages/multiline_test.go index dda55f08c6..bdad1743cc 100644 --- a/internal/component/loki/process/stages/multiline_test.go +++ b/internal/component/loki/process/stages/multiline_test.go @@ -17,11 +17,12 @@ import ( func TestMultilineStageProcess(t *testing.T) { logger := util.TestAlloyLogger(t) mcfg := MultilineConfig{Expression: "^START", MaxWaitTime: 3 * time.Second} - err := validateMultilineConfig(&mcfg) + regex, err := validateMultilineConfig(mcfg) require.NoError(t, err) stage := &multilineStage{ cfg: mcfg, + regex: regex, logger: logger, } @@ -44,11 +45,12 @@ func TestMultilineStageProcess(t *testing.T) { func TestMultilineStageMultiStreams(t *testing.T) { logger := util.TestAlloyLogger(t) mcfg := MultilineConfig{Expression: "^START", MaxWaitTime: 3 * time.Second} - err := validateMultilineConfig(&mcfg) + regex, err := validateMultilineConfig(mcfg) require.NoError(t, err) stage := &multilineStage{ cfg: mcfg, + regex: regex, logger: logger, } @@ -84,11 +86,12 @@ func TestMultilineStageMultiStreams(t *testing.T) { func TestMultilineStageMaxWaitTime(t *testing.T) { logger := util.TestAlloyLogger(t) mcfg := MultilineConfig{Expression: "^START", MaxWaitTime: 100 * time.Millisecond} - err := validateMultilineConfig(&mcfg) + regex, err := validateMultilineConfig(mcfg) require.NoError(t, err) stage := &multilineStage{ cfg: mcfg, + regex: regex, logger: logger, } diff --git a/internal/component/loki/process/stages/pipeline.go b/internal/component/loki/process/stages/pipeline.go index 043e16cce6..e6583b4514 100644 --- a/internal/component/loki/process/stages/pipeline.go +++ b/internal/component/loki/process/stages/pipeline.go @@ -6,16 +6,16 @@ import ( "sync" "github.com/go-kit/log" - "github.com/grafana/alloy/internal/component/common/loki" "github.com/prometheus/client_golang/prometheus" "golang.org/x/time/rate" + + "github.com/grafana/alloy/internal/component/common/loki" ) // StageConfig defines a single stage in a processing pipeline. // We define these as pointers types so we can use reflection to check that // exactly one is set. type StageConfig struct { - //TODO(thampiotr): sync these with new stages CRIConfig *CRIConfig `alloy:"cri,block,optional"` DecolorizeConfig *DecolorizeConfig `alloy:"decolorize,block,optional"` DockerConfig *DockerConfig `alloy:"docker,block,optional"` @@ -86,24 +86,8 @@ func RunWith(input chan Entry, process func(e Entry) Entry) chan Entry { return out } -// RunWithSkip same as RunWith, except it skip sending it to output channel, if `process` functions returns `skip` true. -func RunWithSkip(input chan Entry, process func(e Entry) (Entry, bool)) chan Entry { - out := make(chan Entry) - go func() { - defer close(out) - for e := range input { - ee, skip := process(e) - if skip { - continue - } - out <- ee - } - }() - - return out -} - -// RunWithSkiporSendMany same as RunWithSkip, except it can either skip sending it to output channel, if `process` functions returns `skip` true. Or send many entries. +// RunWithSkipOrSendMany same as RunWith, except it handles sending multiple entries at the same time and it wil skip +// sending the batch to output channel, if `process` functions returns `skip` true. func RunWithSkipOrSendMany(input chan Entry, process func(e Entry) ([]Entry, bool)) chan Entry { out := make(chan Entry) go func() { diff --git a/internal/component/loki/process/stages/replace.go b/internal/component/loki/process/stages/replace.go index ed34927a73..a4c2ddfd2e 100644 --- a/internal/component/loki/process/stages/replace.go +++ b/internal/component/loki/process/stages/replace.go @@ -2,7 +2,6 @@ package stages import ( "bytes" - "errors" "fmt" "reflect" "regexp" @@ -10,14 +9,9 @@ import ( "time" "github.com/go-kit/log" - "github.com/grafana/alloy/internal/runtime/logging/level" "github.com/prometheus/common/model" -) -// Config Errors -var ( - ErrEmptyReplaceStageConfig = errors.New("empty replace stage configuration") - ErrEmptyReplaceStageSource = errors.New("empty source in replace stage") + "github.com/grafana/alloy/internal/runtime/logging/level" ) func init() { diff --git a/internal/component/loki/process/stages/static_labels.go b/internal/component/loki/process/stages/static_labels.go index 8ffaaa56f1..d3f372844a 100644 --- a/internal/component/loki/process/stages/static_labels.go +++ b/internal/component/loki/process/stages/static_labels.go @@ -7,8 +7,9 @@ import ( "time" "github.com/go-kit/log" - "github.com/grafana/alloy/internal/runtime/logging/level" "github.com/prometheus/common/model" + + "github.com/grafana/alloy/internal/runtime/logging/level" ) // ErrEmptyStaticLabelStageConfig error returned if the config is empty. @@ -26,7 +27,7 @@ func newStaticLabelsStage(logger log.Logger, config StaticLabelsConfig) (Stage, } return toStage(&staticLabelStage{ - Config: config, + config: config, logger: logger, }), nil } @@ -45,13 +46,13 @@ func validateLabelStaticConfig(c StaticLabelsConfig) error { // staticLabelStage implements Stage. type staticLabelStage struct { - Config StaticLabelsConfig + config StaticLabelsConfig logger log.Logger } // Process implements Stage. func (l *staticLabelStage) Process(labels model.LabelSet, extracted map[string]interface{}, t *time.Time, entry *string) { - for lName, lSrc := range l.Config.Values { + for lName, lSrc := range l.config.Values { if lSrc == nil || *lSrc == "" { continue } diff --git a/internal/component/loki/process/stages/structured_metadata.go b/internal/component/loki/process/stages/structured_metadata.go index a9a3efcb84..211f6c63a4 100644 --- a/internal/component/loki/process/stages/structured_metadata.go +++ b/internal/component/loki/process/stages/structured_metadata.go @@ -8,19 +8,19 @@ import ( ) func newStructuredMetadataStage(logger log.Logger, configs LabelsConfig) (Stage, error) { - err := validateLabelsConfig(configs) + labelsConfig, err := validateLabelsConfig(configs) if err != nil { return nil, err } return &structuredMetadataStage{ - cfgs: configs, - logger: logger, + labelsConfig: labelsConfig, + logger: logger, }, nil } type structuredMetadataStage struct { - cfgs LabelsConfig - logger log.Logger + labelsConfig map[string]string + logger log.Logger } func (s *structuredMetadataStage) Name() string { @@ -34,7 +34,7 @@ func (*structuredMetadataStage) Cleanup() { func (s *structuredMetadataStage) Run(in chan Entry) chan Entry { return RunWith(in, func(e Entry) Entry { - processLabelsConfigs(s.logger, e.Extracted, s.cfgs, func(labelName model.LabelName, labelValue model.LabelValue) { + processLabelsConfigs(s.logger, e.Extracted, s.labelsConfig, func(labelName model.LabelName, labelValue model.LabelValue) { e.StructuredMetadata = append(e.StructuredMetadata, logproto.LabelAdapter{Name: string(labelName), Value: string(labelValue)}) }) return s.extractFromLabels(e) @@ -45,8 +45,8 @@ func (s *structuredMetadataStage) extractFromLabels(e Entry) Entry { labels := e.Labels foundLabels := []model.LabelName{} - for lName, lSrc := range s.cfgs.Values { - labelKey := model.LabelName(*lSrc) + for lName, lSrc := range s.labelsConfig { + labelKey := model.LabelName(lSrc) if lValue, ok := labels[labelKey]; ok { e.StructuredMetadata = append(e.StructuredMetadata, logproto.LabelAdapter{Name: lName, Value: string(lValue)}) foundLabels = append(foundLabels, labelKey) diff --git a/internal/component/loki/process/stages/template.go b/internal/component/loki/process/stages/template.go index 00ace665d9..3a81a56023 100644 --- a/internal/component/loki/process/stages/template.go +++ b/internal/component/loki/process/stages/template.go @@ -13,16 +13,16 @@ import ( "github.com/Masterminds/sprig/v3" "github.com/go-kit/log" - "github.com/grafana/alloy/internal/runtime/logging/level" "github.com/prometheus/common/model" + "github.com/grafana/alloy/internal/runtime/logging/level" + "golang.org/x/crypto/sha3" ) // Config Errors. var ( - ErrEmptyTemplateStageConfig = errors.New("template stage config cannot be empty") - ErrTemplateSourceRequired = errors.New("template source value is required") + ErrTemplateSourceRequired = errors.New("template source value is required") ) var extraFunctionMap = template.FuncMap{ diff --git a/internal/component/loki/process/stages/timestamp.go b/internal/component/loki/process/stages/timestamp.go index a2dc46c5d5..8f8b61e0c7 100644 --- a/internal/component/loki/process/stages/timestamp.go +++ b/internal/component/loki/process/stages/timestamp.go @@ -5,13 +5,13 @@ import ( "fmt" "reflect" "time" + _ "time/tzdata" // embed timezone data "github.com/go-kit/log" - "github.com/grafana/alloy/internal/runtime/logging/level" lru "github.com/hashicorp/golang-lru" "github.com/prometheus/common/model" - _ "time/tzdata" // embed timezone data + "github.com/grafana/alloy/internal/runtime/logging/level" ) // Config errors. @@ -53,7 +53,7 @@ type TimestampConfig struct { type parser func(string) (time.Time, error) -func validateTimestampConfig(cfg TimestampConfig) (parser, error) { +func validateTimestampConfig(cfg *TimestampConfig) (parser, error) { if cfg.Source == "" { return nil, ErrTimestampSourceRequired } @@ -99,7 +99,7 @@ func validateTimestampConfig(cfg TimestampConfig) (parser, error) { // newTimestampStage creates a new timestamp extraction pipeline stage. func newTimestampStage(logger log.Logger, config TimestampConfig) (Stage, error) { - parser, err := validateTimestampConfig(config) + parser, err := validateTimestampConfig(&config) if err != nil { return nil, err } diff --git a/internal/component/loki/process/stages/timestamp_test.go b/internal/component/loki/process/stages/timestamp_test.go index 6e6c2cb56a..f1dff22267 100644 --- a/internal/component/loki/process/stages/timestamp_test.go +++ b/internal/component/loki/process/stages/timestamp_test.go @@ -8,11 +8,12 @@ import ( "time" "github.com/go-kit/log" - "github.com/grafana/alloy/internal/util" "github.com/prometheus/client_golang/prometheus" "github.com/prometheus/common/model" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" + + "github.com/grafana/alloy/internal/util" ) var testTimestampAlloy = ` @@ -75,9 +76,10 @@ func TestTimestampValidation(t *testing.T) { config *TimestampConfig // Note the error text validation is a little loose as it only validates with strings.HasPrefix // this is to work around different errors related to timezone loading on different systems - err error - testString string - expectedTime time.Time + err error + testString string + expectedTime time.Time + expectedConfig *TimestampConfig }{ "missing source": { config: &TimestampConfig{}, @@ -106,6 +108,18 @@ func TestTimestampValidation(t *testing.T) { testString: "2012-11-01T22:08:41-04:00", expectedTime: time.Date(2012, 11, 01, 22, 8, 41, 0, time.FixedZone("", -4*60*60)), }, + "sets default action on failure": { + config: &TimestampConfig{ + Source: "source1", + Format: time.RFC3339, + }, + err: nil, + expectedConfig: &TimestampConfig{ + Source: "source1", + Format: time.RFC3339, + ActionOnFailure: "fudge", + }, + }, "custom format with year": { config: &TimestampConfig{ Source: "source1", @@ -166,23 +180,26 @@ func TestTimestampValidation(t *testing.T) { test := test t.Run(name, func(t *testing.T) { t.Parallel() - parser, err := validateTimestampConfig(*test.config) + parser, err := validateTimestampConfig(test.config) if (err != nil) != (test.err != nil) { - t.Errorf("validateOutputConfig() expected error = %v, actual error = %v", test.err, err) + t.Errorf("validateTimestampConfig() expected error = %v, actual error = %v", test.err, err) return } if (err != nil) && !strings.HasPrefix(err.Error(), test.err.Error()) { - t.Errorf("validateOutputConfig() expected error = %v, actual error = %v", test.err, err) + t.Errorf("validateTimestampConfig() expected error = %v, actual error = %v", test.err, err) return } if test.testString != "" { ts, err := parser(test.testString) if err != nil { - t.Errorf("validateOutputConfig() unexpected error parsing test time: %v", err) + t.Errorf("validateTimestampConfig() unexpected error parsing test time: %v", err) return } assert.Equal(t, test.expectedTime.UnixNano(), ts.UnixNano()) } + if test.expectedConfig != nil { + assert.Equal(t, test.expectedConfig, test.config) + } }) } } @@ -328,9 +345,8 @@ func TestTimestampStage_ProcessActionOnFailure(t *testing.T) { }, "should fudge the timestamp based on the last known value on timestamp parsing failure": { config: TimestampConfig{ - Source: "time", - Format: time.RFC3339Nano, - ActionOnFailure: TimestampActionOnFailureFudge, + Source: "time", + Format: time.RFC3339Nano, }, inputEntries: []inputEntry{ {timestamp: time.Unix(1, 0), extracted: map[string]interface{}{"time": "2019-10-01T01:02:03.400000000Z"}}, @@ -345,9 +361,8 @@ func TestTimestampStage_ProcessActionOnFailure(t *testing.T) { }, "should fudge the timestamp based on the last known value for the right file target": { config: TimestampConfig{ - Source: "time", - Format: time.RFC3339Nano, - ActionOnFailure: TimestampActionOnFailureFudge, + Source: "time", + Format: time.RFC3339Nano, }, inputEntries: []inputEntry{ {timestamp: time.Unix(1, 0), labels: model.LabelSet{"filename": "/1.log"}, extracted: map[string]interface{}{"time": "2019-10-01T01:02:03.400000000Z"}}, @@ -366,9 +381,8 @@ func TestTimestampStage_ProcessActionOnFailure(t *testing.T) { }, "should keep the input timestamp if unable to fudge because there's no known valid timestamp yet": { config: TimestampConfig{ - Source: "time", - Format: time.RFC3339Nano, - ActionOnFailure: TimestampActionOnFailureFudge, + Source: "time", + Format: time.RFC3339Nano, }, inputEntries: []inputEntry{ {timestamp: time.Unix(1, 0), labels: model.LabelSet{"filename": "/1.log"}, extracted: map[string]interface{}{"time": "2019-10-01T01:02:03.400000000Z"}}, diff --git a/internal/component/loki/source/kubernetes/kubernetes.go b/internal/component/loki/source/kubernetes/kubernetes.go index e9cf40fc55..dcab367b4b 100644 --- a/internal/component/loki/source/kubernetes/kubernetes.go +++ b/internal/component/loki/source/kubernetes/kubernetes.go @@ -187,7 +187,7 @@ func (c *Component) Update(args component.Arguments) error { } func (c *Component) resyncTargets(targets []discovery.Target) { - distTargets := discovery.NewDistributedTargets(c.args.Clustering.Enabled, c.cluster, targets) + distTargets := discovery.NewDistributedTargetsWithCustomLabels(c.args.Clustering.Enabled, c.cluster, targets, kubetail.ClusteringLabels) targets = distTargets.LocalTargets() tailTargets := make([]*kubetail.Target, 0, len(targets)) diff --git a/internal/component/loki/source/kubernetes/kubernetes_test.go b/internal/component/loki/source/kubernetes/kubernetes_test.go index c834f75c3f..dd16a4fa3d 100644 --- a/internal/component/loki/source/kubernetes/kubernetes_test.go +++ b/internal/component/loki/source/kubernetes/kubernetes_test.go @@ -1,6 +1,9 @@ package kubernetes import ( + "github.com/grafana/alloy/internal/component/discovery" + "github.com/grafana/alloy/internal/component/loki/source/kubernetes/kubetail" + "github.com/grafana/alloy/internal/service/cluster" "testing" "github.com/grafana/alloy/syntax" @@ -43,3 +46,32 @@ func TestBadAlloyConfig(t *testing.T) { err := syntax.Unmarshal([]byte(exampleAlloyConfig), &args) require.ErrorContains(t, err, "at most one of basic_auth, authorization, oauth2, bearer_token & bearer_token_file must be configured") } + +func TestClusteringDuplicateAddress(t *testing.T) { + // Since loki.source.kubernetes looks up by pod name, if we dont use the special NewDistributedTargetsWithCustomLabels + // then we can pull logs multiple times if the address is reused for the port. This works fine for scraping since those are different + // endpoints, but from a log perspective they are the same logs. + distTargets := discovery.NewDistributedTargetsWithCustomLabels( + true, + cluster.Mock(), + []discovery.Target{ + { + "__address__": "localhost:9090", + "container": "alloy", + "pod": "grafana-k8s-monitoring-alloy-0", + "job": "integrations/alloy", + "namespace": "default", + }, + { + "__address__": "localhost:8080", + "container": "alloy", + "pod": "grafana-k8s-monitoring-alloy-0", + "job": "integrations/alloy", + "namespace": "default", + }, + }, + kubetail.ClusteringLabels, + ) + require.True(t, distTargets.TargetCount() == 1) + +} diff --git a/internal/component/loki/source/kubernetes/kubetail/target.go b/internal/component/loki/source/kubernetes/kubetail/target.go index a576c1f0ea..68f00bcc0e 100644 --- a/internal/component/loki/source/kubernetes/kubetail/target.go +++ b/internal/component/loki/source/kubernetes/kubetail/target.go @@ -25,6 +25,21 @@ const ( kubePodUID = "__meta_kubernetes_pod_uid" ) +var ClusteringLabels = []string{ + LabelPodNamespace, + LabelPodName, + LabelPodContainerName, + LabelPodUID, + kubePodNamespace, + kubePodName, + kubePodContainerName, + kubePodUID, + "container", + "pod", + "job", + "namespace", +} + // Target represents an individual container being tailed for logs. type Target struct { origLabels labels.Labels // Original discovery labels diff --git a/internal/component/otelcol/config/config_debug_metrics.go b/internal/component/otelcol/config/config_debug_metrics.go index bf7e1ffd02..2a147689e0 100644 --- a/internal/component/otelcol/config/config_debug_metrics.go +++ b/internal/component/otelcol/config/config_debug_metrics.go @@ -19,6 +19,13 @@ const ( LevelDetailed = "detailed" ) +var levels = map[Level]bool{ + LevelNone: true, + LevelBasic: true, + LevelNormal:true, + LevelDetailed: true, +} + func (l Level) Convert() (configtelemetry.Level, error) { switch l { case LevelNone: @@ -34,6 +41,16 @@ func (l Level) Convert() (configtelemetry.Level, error) { } } +// UnmarshalText implements encoding.TextUnmarshaler for Level. +func (l *Level) UnmarshalText(text []byte) error { + alloyLevelStr := Level(text) + if _, exists := levels[alloyLevelStr]; exists { + *l = alloyLevelStr + return nil + } + return fmt.Errorf("unrecognized debug level %q", string(text)) +} + // DebugMetricsArguments configures internal metrics of the components type DebugMetricsArguments struct { DisableHighCardinalityMetrics bool `alloy:"disable_high_cardinality_metrics,attr,optional"` diff --git a/internal/component/otelcol/exporter/awss3/awss3.go b/internal/component/otelcol/exporter/awss3/awss3.go index 6ab2b7cca5..effa87632e 100644 --- a/internal/component/otelcol/exporter/awss3/awss3.go +++ b/internal/component/otelcol/exporter/awss3/awss3.go @@ -44,6 +44,7 @@ var _ exporter.Arguments = Arguments{} func (args *Arguments) SetToDefault() { args.MarshalerName.SetToDefault() args.S3Uploader.SetToDefault() + args.DebugMetrics.SetToDefault() } func (args Arguments) Convert() (otelcomponent.Config, error) { diff --git a/internal/component/otelcol/exporter/awss3/awss3_test.go b/internal/component/otelcol/exporter/awss3/awss3_test.go index a9605e06f0..81c241bd5e 100644 --- a/internal/component/otelcol/exporter/awss3/awss3_test.go +++ b/internal/component/otelcol/exporter/awss3/awss3_test.go @@ -31,6 +31,19 @@ func TestDebugMetricsConfig(t *testing.T) { Level: otelcolCfg.LevelDetailed, }, }, + { + testName: "no_optional_debug", + agentCfg: ` + s3_uploader { + s3_bucket = "test" + s3_prefix = "logs" + } + `, + expected: otelcolCfg.DebugMetricsArguments{ + DisableHighCardinalityMetrics: true, + Level: otelcolCfg.LevelDetailed, + }, + }, { testName: "explicit_false", agentCfg: ` @@ -63,6 +76,22 @@ func TestDebugMetricsConfig(t *testing.T) { Level: otelcolCfg.LevelDetailed, }, }, + { + testName: "explicit_debug_level", + agentCfg: ` + s3_uploader { + s3_bucket = "test" + s3_prefix = "logs" + } + debug_metrics { + level = "none" + } + `, + expected: otelcolCfg.DebugMetricsArguments{ + DisableHighCardinalityMetrics: true, + Level: otelcolCfg.LevelNone, + }, + }, } for _, tc := range tests { diff --git a/internal/component/prometheus/exporter/cloudwatch/config.go b/internal/component/prometheus/exporter/cloudwatch/config.go index c1032e91d6..a4259218d1 100644 --- a/internal/component/prometheus/exporter/cloudwatch/config.go +++ b/internal/component/prometheus/exporter/cloudwatch/config.go @@ -318,15 +318,6 @@ func toYACEDiscoveryJob(rj DiscoveryJob) *yaceConf.Job { // By setting RoundingPeriod to nil, the exporter will align the start and end times for retrieving CloudWatch // metrics, with the smallest period in the retrieved batch. RoundingPeriod: nil, - JobLevelMetricFields: yaceConf.JobLevelMetricFields{ - // Set to zero job-wide scraping time settings. This should be configured at the metric level to make the data - // being fetched more explicit. - Period: 0, - Length: 0, - Delay: 0, - NilToZero: nilToZero, - AddCloudwatchTimestamp: &addCloudwatchTimestamp, - }, Metrics: toYACEMetrics(rj.Metrics, nilToZero), } return job @@ -348,15 +339,6 @@ func toYACECustomNamespaceJob(cn CustomNamespaceJob) *yaceConf.CustomNamespace { // metrics, with the smallest period in the retrieved batch. RoundingPeriod: nil, RecentlyActiveOnly: cn.RecentlyActiveOnly, - JobLevelMetricFields: yaceConf.JobLevelMetricFields{ - // Set to zero job-wide scraping time settings. This should be configured at the metric level to make the data - // being fetched more explicit. - Period: 0, - Length: 0, - Delay: 0, - NilToZero: nilToZero, - AddCloudwatchTimestamp: &addCloudwatchTimestamp, - }, Metrics: toYACEMetrics(cn.Metrics, nilToZero), } } diff --git a/internal/component/prometheus/exporter/cloudwatch/config_test.go b/internal/component/prometheus/exporter/cloudwatch/config_test.go index c807b40dce..ed381d8775 100644 --- a/internal/component/prometheus/exporter/cloudwatch/config_test.go +++ b/internal/component/prometheus/exporter/cloudwatch/config_test.go @@ -308,13 +308,6 @@ func TestCloudwatchComponentConfig(t *testing.T) { }, }, RoundingPeriod: nil, - JobLevelMetricFields: yaceModel.JobLevelMetricFields{ - Period: 0, - Length: 0, - Delay: 0, - AddCloudwatchTimestamp: &falsePtr, - NilToZero: &defaultNilToZero, - }, ExportedTagsOnMetrics: []string{"name"}, DimensionsRegexps: []yaceModel.DimensionsRegexp{ { @@ -343,13 +336,6 @@ func TestCloudwatchComponentConfig(t *testing.T) { }, }, RoundingPeriod: nil, - JobLevelMetricFields: yaceModel.JobLevelMetricFields{ - Period: 0, - Length: 0, - Delay: 0, - AddCloudwatchTimestamp: &falsePtr, - NilToZero: &defaultNilToZero, - }, ExportedTagsOnMetrics: []string{}, DimensionsRegexps: []yaceModel.DimensionsRegexp{ { @@ -383,13 +369,6 @@ func TestCloudwatchComponentConfig(t *testing.T) { }, }, RoundingPeriod: nil, - JobLevelMetricFields: yaceModel.JobLevelMetricFields{ - Period: 0, - Length: 0, - Delay: 0, - AddCloudwatchTimestamp: &falsePtr, - NilToZero: &defaultNilToZero, - }, ExportedTagsOnMetrics: []string{}, DimensionsRegexps: []yaceModel.DimensionsRegexp{ { @@ -435,13 +414,6 @@ func TestCloudwatchComponentConfig(t *testing.T) { }, }, RoundingPeriod: nil, - JobLevelMetricFields: yaceModel.JobLevelMetricFields{ - Period: 0, - Length: 0, - Delay: 0, - AddCloudwatchTimestamp: &falsePtr, - NilToZero: &defaultNilToZero, - }, }, }, }, @@ -546,13 +518,6 @@ func TestCloudwatchComponentConfig(t *testing.T) { }, }, RoundingPeriod: nil, - JobLevelMetricFields: yaceModel.JobLevelMetricFields{ - Period: 0, - Length: 0, - Delay: 0, - AddCloudwatchTimestamp: &falsePtr, - NilToZero: &falsePtr, - }, ExportedTagsOnMetrics: []string{"name"}, DimensionsRegexps: []yaceModel.DimensionsRegexp{ { @@ -598,13 +563,6 @@ func TestCloudwatchComponentConfig(t *testing.T) { }, }, RoundingPeriod: nil, - JobLevelMetricFields: yaceModel.JobLevelMetricFields{ - Period: 0, - Length: 0, - Delay: 0, - AddCloudwatchTimestamp: &falsePtr, - NilToZero: &falsePtr, - }, }, }, }, diff --git a/internal/static/integrations/cloudwatch_exporter/config.go b/internal/static/integrations/cloudwatch_exporter/config.go index 7756bc72b2..9dcd2fd154 100644 --- a/internal/static/integrations/cloudwatch_exporter/config.go +++ b/internal/static/integrations/cloudwatch_exporter/config.go @@ -297,16 +297,6 @@ func toYACEDiscoveryJob(job *DiscoveryJob) *yaceConf.Job { // By setting RoundingPeriod to nil, the exporter will align the start and end times for retrieving CloudWatch // metrics, with the smallest period in the retrieved batch. RoundingPeriod: nil, - - JobLevelMetricFields: yaceConf.JobLevelMetricFields{ - // Set to zero job-wide scraping time settings. This should be configured at the metric level to make the data - // being fetched more explicit. - Period: 0, - Length: 0, - Delay: 0, - NilToZero: nilToZero, - AddCloudwatchTimestamp: &addCloudwatchTimestamp, - }, } return &yaceJob } diff --git a/internal/static/integrations/cloudwatch_exporter/config_test.go b/internal/static/integrations/cloudwatch_exporter/config_test.go index f91798c446..dce3369d3b 100644 --- a/internal/static/integrations/cloudwatch_exporter/config_test.go +++ b/internal/static/integrations/cloudwatch_exporter/config_test.go @@ -219,14 +219,6 @@ var expectedConfig = model.JobsConfig{ Regexp: regexp.MustCompile("instance/(?P[^/]+)"), DimensionsNames: []string{"InstanceId"}, }}, - JobLevelMetricFields: model.JobLevelMetricFields{ - Statistics: []string(nil), - Period: 0, - Length: 0, - Delay: 0, - NilToZero: &truePtr, - AddCloudwatchTimestamp: &falsePtr, - }, }, { Regions: []string{"us-east-2"}, Type: "AWS/S3", @@ -253,14 +245,6 @@ var expectedConfig = model.JobsConfig{ Regexp: regexp.MustCompile("(?P[^:]+)$"), DimensionsNames: []string{"BucketName"}, }}, - JobLevelMetricFields: model.JobLevelMetricFields{ - Statistics: []string(nil), - Period: 0, - Length: 0, - Delay: 0, - NilToZero: &truePtr, - AddCloudwatchTimestamp: &falsePtr, - }, }}, StaticJobs: []model.StaticJob{{ Name: "custom_tesis_metrics", @@ -314,14 +298,6 @@ var expectedConfig3 = model.JobsConfig{ Regexp: regexp.MustCompile("instance/(?P[^/]+)"), DimensionsNames: []string{"InstanceId"}, }}, - JobLevelMetricFields: model.JobLevelMetricFields{ - Statistics: []string(nil), - Period: 0, - Length: 0, - Delay: 0, - NilToZero: &falsePtr, - AddCloudwatchTimestamp: &falsePtr, - }, }, { Regions: []string{"us-east-2"}, @@ -350,14 +326,6 @@ var expectedConfig3 = model.JobsConfig{ Regexp: regexp.MustCompile("(?P[^:]+)$"), DimensionsNames: []string{"BucketName"}, }}, - JobLevelMetricFields: model.JobLevelMetricFields{ - Statistics: []string(nil), - Period: 0, - Length: 0, - Delay: 0, - NilToZero: &truePtr, - AddCloudwatchTimestamp: &falsePtr, - }, }, }, StaticJobs: []model.StaticJob{{ diff --git a/internal/web/ui/yarn.lock b/internal/web/ui/yarn.lock index eef8429c90..4458f03471 100644 --- a/internal/web/ui/yarn.lock +++ b/internal/web/ui/yarn.lock @@ -1971,16 +1971,35 @@ "@jridgewell/sourcemap-codec" "^1.4.10" "@jridgewell/trace-mapping" "^0.3.9" +"@jridgewell/gen-mapping@^0.3.5": + version "0.3.5" + resolved "https://registry.yarnpkg.com/@jridgewell/gen-mapping/-/gen-mapping-0.3.5.tgz#dcce6aff74bdf6dad1a95802b69b04a2fcb1fb36" + integrity sha512-IzL8ZoEDIBRWEzlCcRhOaCupYyN5gdIK+Q6fbFdPDg6HqX6jpkItn7DFIpW9LQzXG6Df9sA7+OKnq0qlz/GaQg== + dependencies: + "@jridgewell/set-array" "^1.2.1" + "@jridgewell/sourcemap-codec" "^1.4.10" + "@jridgewell/trace-mapping" "^0.3.24" + "@jridgewell/resolve-uri@3.1.0": version "3.1.0" resolved "https://registry.yarnpkg.com/@jridgewell/resolve-uri/-/resolve-uri-3.1.0.tgz#2203b118c157721addfe69d47b70465463066d78" integrity sha512-F2msla3tad+Mfht5cJq7LSXcdudKTWCVYUgw6pLFOOHSTtZlj6SWNYAp+AhuqLmWdBO2X5hPrLcu8cVP8fy28w== +"@jridgewell/resolve-uri@^3.1.0": + version "3.1.2" + resolved "https://registry.yarnpkg.com/@jridgewell/resolve-uri/-/resolve-uri-3.1.2.tgz#7a0ee601f60f99a20c7c7c5ff0c80388c1189bd6" + integrity sha512-bRISgCIjP20/tbWSPWMEi54QVPRZExkuD9lJL+UIxUKtwVJA8wW1Trb1jMs1RFXo1CBTNZ/5hpC9QvmKWdopKw== + "@jridgewell/set-array@^1.0.1": version "1.1.2" resolved "https://registry.yarnpkg.com/@jridgewell/set-array/-/set-array-1.1.2.tgz#7c6cf998d6d20b914c0a55a91ae928ff25965e72" integrity sha512-xnkseuNADM0gt2bs+BvhO0p78Mk762YnZdsuzFV018NoG1Sj1SCQvpSqa7XUaTam5vAGasABV9qXASMKnFMwMw== +"@jridgewell/set-array@^1.2.1": + version "1.2.1" + resolved "https://registry.yarnpkg.com/@jridgewell/set-array/-/set-array-1.2.1.tgz#558fb6472ed16a4c850b889530e6b36438c49280" + integrity sha512-R8gLRTZeyp03ymzP/6Lil/28tGeGEzhx1q2k703KGWRAI1VdvPIXdG70VJc2pAMw3NA6JKL5hhFu1sJX0Mnn/A== + "@jridgewell/source-map@^0.3.2": version "0.3.3" resolved "https://registry.yarnpkg.com/@jridgewell/source-map/-/source-map-0.3.3.tgz#8108265659d4c33e72ffe14e33d6cc5eb59f2fda" @@ -1989,6 +2008,14 @@ "@jridgewell/gen-mapping" "^0.3.0" "@jridgewell/trace-mapping" "^0.3.9" +"@jridgewell/source-map@^0.3.3": + version "0.3.6" + resolved "https://registry.yarnpkg.com/@jridgewell/source-map/-/source-map-0.3.6.tgz#9d71ca886e32502eb9362c9a74a46787c36df81a" + integrity sha512-1ZJTZebgqllO79ue2bm3rIGud/bOe0pP5BjSRCRxxYkEZS8STV7zN84UBbiYu7jy+eCKSnVIUgoWWE/tt+shMQ== + dependencies: + "@jridgewell/gen-mapping" "^0.3.5" + "@jridgewell/trace-mapping" "^0.3.25" + "@jridgewell/sourcemap-codec@1.4.14": version "1.4.14" resolved "https://registry.yarnpkg.com/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.4.14.tgz#add4c98d341472a289190b424efbdb096991bb24" @@ -1999,6 +2026,11 @@ resolved "https://registry.yarnpkg.com/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.4.15.tgz#d7c6e6755c78567a951e04ab52ef0fd26de59f32" integrity sha512-eF2rxCRulEKXHTRiDrDy6erMYWqNw4LPdQ8UQA4huuxaQsVeRPFl2oM8oDGxMFhJUWZf9McpLtJasDDZb/Bpeg== +"@jridgewell/sourcemap-codec@^1.4.14": + version "1.5.0" + resolved "https://registry.yarnpkg.com/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.5.0.tgz#3188bcb273a414b0d215fd22a58540b989b9409a" + integrity sha512-gv3ZRaISU3fjPAgNsriBRqGWQL6quFx04YMPW/zD8XMLsU32mhCCbfbO6KZFLjvYpCZ8zyDEgqsgf+PwPaM7GQ== + "@jridgewell/trace-mapping@^0.3.17", "@jridgewell/trace-mapping@^0.3.9": version "0.3.18" resolved "https://registry.yarnpkg.com/@jridgewell/trace-mapping/-/trace-mapping-0.3.18.tgz#25783b2086daf6ff1dcb53c9249ae480e4dd4cd6" @@ -2007,6 +2039,14 @@ "@jridgewell/resolve-uri" "3.1.0" "@jridgewell/sourcemap-codec" "1.4.14" +"@jridgewell/trace-mapping@^0.3.20", "@jridgewell/trace-mapping@^0.3.24", "@jridgewell/trace-mapping@^0.3.25": + version "0.3.25" + resolved "https://registry.yarnpkg.com/@jridgewell/trace-mapping/-/trace-mapping-0.3.25.tgz#15f190e98895f3fc23276ee14bc76b675c2e50f0" + integrity sha512-vNk6aEwybGtawWmy/PzwnGDOjCkLWSD2wqvjGGAgOAwCGWySYXfYoxt00IJkTF+8Lb57DwOb3Aa0o9CApepiYQ== + dependencies: + "@jridgewell/resolve-uri" "^3.1.0" + "@jridgewell/sourcemap-codec" "^1.4.14" + "@leeoniya/ufuzzy@1.0.14": version "1.0.14" resolved "https://registry.yarnpkg.com/@leeoniya/ufuzzy/-/ufuzzy-1.0.14.tgz#01572c0de9cfa1420cf6ecac76dd59db5ebd1337" @@ -2857,15 +2897,7 @@ "@types/d3-transition" "*" "@types/d3-zoom" "*" -"@types/eslint-scope@^3.7.3": - version "3.7.4" - resolved "https://registry.yarnpkg.com/@types/eslint-scope/-/eslint-scope-3.7.4.tgz#37fc1223f0786c39627068a12e94d6e6fc61de16" - integrity sha512-9K4zoImiZc3HlIp6AVUDE4CWYx22a+lhSZMYNpbjW04+YF0KWj4pJXnEMjdnFTiQibFFmElcsasJXDbdI/EPhA== - dependencies: - "@types/eslint" "*" - "@types/estree" "*" - -"@types/eslint@*", "@types/eslint@^7.29.0 || ^8.4.1": +"@types/eslint@^7.29.0 || ^8.4.1": version "8.37.0" resolved "https://registry.yarnpkg.com/@types/eslint/-/eslint-8.37.0.tgz#29cebc6c2a3ac7fea7113207bf5a828fdf4d7ef1" integrity sha512-Piet7dG2JBuDIfohBngQ3rCt7MgO9xCO4xIMKxBThCq5PNRB91IjlJ10eJVwfoNtvTErmxLzwBZ7rHZtbOMmFQ== @@ -2873,7 +2905,7 @@ "@types/estree" "*" "@types/json-schema" "*" -"@types/estree@*", "@types/estree@^1.0.0": +"@types/estree@*": version "1.0.1" resolved "https://registry.yarnpkg.com/@types/estree/-/estree-1.0.1.tgz#aa22750962f3bf0e79d753d3cc067f010c95f194" integrity sha512-LG4opVs2ANWZ1TJoKc937iMmNstM/d0ae1vNbnBvBhqCSezgVUOzcLCqbI5elV8Vy6WKwKjaqR+zO9VKirBBCA== @@ -2883,6 +2915,11 @@ resolved "https://registry.yarnpkg.com/@types/estree/-/estree-0.0.39.tgz#e177e699ee1b8c22d23174caaa7422644389509f" integrity sha512-EYNwp3bU+98cpU4lAWYYL7Zz+2gryWH1qbdDTidVd6hkiR6weksdbMadyXKXNPEkQFhXM+hVO9ZygomHXp+AIw== +"@types/estree@^1.0.5": + version "1.0.6" + resolved "https://registry.yarnpkg.com/@types/estree/-/estree-1.0.6.tgz#628effeeae2064a1b4e79f78e81d87b7e5fc7b50" + integrity sha512-AYnb1nQyY49te+VRAVgmzfcgjYS91mY5P0TKUDCLEM+gNnA+3T6rWITXRLYCpahpqSQbN5cE+gHpnPyXjHWxcw== + "@types/express-serve-static-core@*", "@types/express-serve-static-core@^4.17.33": version "4.17.35" resolved "https://registry.yarnpkg.com/@types/express-serve-static-core/-/express-serve-static-core-4.17.35.tgz#c95dd4424f0d32e525d23812aa8ab8e4d3906c4f" @@ -3245,10 +3282,10 @@ "@typescript-eslint/types" "5.59.5" eslint-visitor-keys "^3.3.0" -"@webassemblyjs/ast@1.11.6", "@webassemblyjs/ast@^1.11.5": - version "1.11.6" - resolved "https://registry.yarnpkg.com/@webassemblyjs/ast/-/ast-1.11.6.tgz#db046555d3c413f8966ca50a95176a0e2c642e24" - integrity sha512-IN1xI7PwOvLPgjcf180gC1bqn3q/QaOCwYUahIOhbYUu8KA/3tw2RT/T0Gidi1l7Hhj5D/INhJxiICObqpMu4Q== +"@webassemblyjs/ast@1.12.1", "@webassemblyjs/ast@^1.12.1": + version "1.12.1" + resolved "https://registry.yarnpkg.com/@webassemblyjs/ast/-/ast-1.12.1.tgz#bb16a0e8b1914f979f45864c23819cc3e3f0d4bb" + integrity sha512-EKfMUOPRRUTy5UII4qJDGPpqfwjOmZ5jeGFwid9mnoqIFK+e0vqoi1qH56JpmZSzEL53jKnNzScdmftJyG5xWg== dependencies: "@webassemblyjs/helper-numbers" "1.11.6" "@webassemblyjs/helper-wasm-bytecode" "1.11.6" @@ -3263,10 +3300,10 @@ resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-api-error/-/helper-api-error-1.11.6.tgz#6132f68c4acd59dcd141c44b18cbebbd9f2fa768" integrity sha512-o0YkoP4pVu4rN8aTJgAyj9hC2Sv5UlkzCHhxqWj8butaLvnpdc2jOwh4ewE6CX0txSfLn/UYaV/pheS2Txg//Q== -"@webassemblyjs/helper-buffer@1.11.6": - version "1.11.6" - resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-buffer/-/helper-buffer-1.11.6.tgz#b66d73c43e296fd5e88006f18524feb0f2c7c093" - integrity sha512-z3nFzdcp1mb8nEOFFk8DrYLpHvhKC3grJD2ardfKOzmbmJvEf/tPIqCY+sNcwZIY8ZD7IkB2l7/pqhUhqm7hLA== +"@webassemblyjs/helper-buffer@1.12.1": + version "1.12.1" + resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-buffer/-/helper-buffer-1.12.1.tgz#6df20d272ea5439bf20ab3492b7fb70e9bfcb3f6" + integrity sha512-nzJwQw99DNDKr9BVCOZcLuJJUlqkJh+kVzVl6Fmq/tI5ZtEyWT1KZMyOXltXLZJmDtvLCDgwsyrkohEtopTXCw== "@webassemblyjs/helper-numbers@1.11.6": version "1.11.6" @@ -3282,15 +3319,15 @@ resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-wasm-bytecode/-/helper-wasm-bytecode-1.11.6.tgz#bb2ebdb3b83aa26d9baad4c46d4315283acd51e9" integrity sha512-sFFHKwcmBprO9e7Icf0+gddyWYDViL8bpPjJJl0WHxCdETktXdmtWLGVzoHbqUcY4Be1LkNfwTmXOJUFZYSJdA== -"@webassemblyjs/helper-wasm-section@1.11.6": - version "1.11.6" - resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-wasm-section/-/helper-wasm-section-1.11.6.tgz#ff97f3863c55ee7f580fd5c41a381e9def4aa577" - integrity sha512-LPpZbSOwTpEC2cgn4hTydySy1Ke+XEu+ETXuoyvuyezHO3Kjdu90KK95Sh9xTbmjrCsUwvWwCOQQNta37VrS9g== +"@webassemblyjs/helper-wasm-section@1.12.1": + version "1.12.1" + resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-wasm-section/-/helper-wasm-section-1.12.1.tgz#3da623233ae1a60409b509a52ade9bc22a37f7bf" + integrity sha512-Jif4vfB6FJlUlSbgEMHUyk1j234GTNG9dBJ4XJdOySoj518Xj0oGsNi59cUQF4RRMS9ouBUxDDdyBVfPTypa5g== dependencies: - "@webassemblyjs/ast" "1.11.6" - "@webassemblyjs/helper-buffer" "1.11.6" + "@webassemblyjs/ast" "1.12.1" + "@webassemblyjs/helper-buffer" "1.12.1" "@webassemblyjs/helper-wasm-bytecode" "1.11.6" - "@webassemblyjs/wasm-gen" "1.11.6" + "@webassemblyjs/wasm-gen" "1.12.1" "@webassemblyjs/ieee754@1.11.6": version "1.11.6" @@ -3311,59 +3348,59 @@ resolved "https://registry.yarnpkg.com/@webassemblyjs/utf8/-/utf8-1.11.6.tgz#90f8bc34c561595fe156603be7253cdbcd0fab5a" integrity sha512-vtXf2wTQ3+up9Zsg8sa2yWiQpzSsMyXj0qViVP6xKGCUT8p8YJ6HqI7l5eCnWx1T/FYdsv07HQs2wTFbbof/RA== -"@webassemblyjs/wasm-edit@^1.11.5": - version "1.11.6" - resolved "https://registry.yarnpkg.com/@webassemblyjs/wasm-edit/-/wasm-edit-1.11.6.tgz#c72fa8220524c9b416249f3d94c2958dfe70ceab" - integrity sha512-Ybn2I6fnfIGuCR+Faaz7YcvtBKxvoLV3Lebn1tM4o/IAJzmi9AWYIPWpyBfU8cC+JxAO57bk4+zdsTjJR+VTOw== +"@webassemblyjs/wasm-edit@^1.12.1": + version "1.12.1" + resolved "https://registry.yarnpkg.com/@webassemblyjs/wasm-edit/-/wasm-edit-1.12.1.tgz#9f9f3ff52a14c980939be0ef9d5df9ebc678ae3b" + integrity sha512-1DuwbVvADvS5mGnXbE+c9NfA8QRcZ6iKquqjjmR10k6o+zzsRVesil54DKexiowcFCPdr/Q0qaMgB01+SQ1u6g== dependencies: - "@webassemblyjs/ast" "1.11.6" - "@webassemblyjs/helper-buffer" "1.11.6" + "@webassemblyjs/ast" "1.12.1" + "@webassemblyjs/helper-buffer" "1.12.1" "@webassemblyjs/helper-wasm-bytecode" "1.11.6" - "@webassemblyjs/helper-wasm-section" "1.11.6" - "@webassemblyjs/wasm-gen" "1.11.6" - "@webassemblyjs/wasm-opt" "1.11.6" - "@webassemblyjs/wasm-parser" "1.11.6" - "@webassemblyjs/wast-printer" "1.11.6" - -"@webassemblyjs/wasm-gen@1.11.6": - version "1.11.6" - resolved "https://registry.yarnpkg.com/@webassemblyjs/wasm-gen/-/wasm-gen-1.11.6.tgz#fb5283e0e8b4551cc4e9c3c0d7184a65faf7c268" - integrity sha512-3XOqkZP/y6B4F0PBAXvI1/bky7GryoogUtfwExeP/v7Nzwo1QLcq5oQmpKlftZLbT+ERUOAZVQjuNVak6UXjPA== - dependencies: - "@webassemblyjs/ast" "1.11.6" + "@webassemblyjs/helper-wasm-section" "1.12.1" + "@webassemblyjs/wasm-gen" "1.12.1" + "@webassemblyjs/wasm-opt" "1.12.1" + "@webassemblyjs/wasm-parser" "1.12.1" + "@webassemblyjs/wast-printer" "1.12.1" + +"@webassemblyjs/wasm-gen@1.12.1": + version "1.12.1" + resolved "https://registry.yarnpkg.com/@webassemblyjs/wasm-gen/-/wasm-gen-1.12.1.tgz#a6520601da1b5700448273666a71ad0a45d78547" + integrity sha512-TDq4Ojh9fcohAw6OIMXqiIcTq5KUXTGRkVxbSo1hQnSy6lAM5GSdfwWeSxpAo0YzgsgF182E/U0mDNhuA0tW7w== + dependencies: + "@webassemblyjs/ast" "1.12.1" "@webassemblyjs/helper-wasm-bytecode" "1.11.6" "@webassemblyjs/ieee754" "1.11.6" "@webassemblyjs/leb128" "1.11.6" "@webassemblyjs/utf8" "1.11.6" -"@webassemblyjs/wasm-opt@1.11.6": - version "1.11.6" - resolved "https://registry.yarnpkg.com/@webassemblyjs/wasm-opt/-/wasm-opt-1.11.6.tgz#d9a22d651248422ca498b09aa3232a81041487c2" - integrity sha512-cOrKuLRE7PCe6AsOVl7WasYf3wbSo4CeOk6PkrjS7g57MFfVUF9u6ysQBBODX0LdgSvQqRiGz3CXvIDKcPNy4g== +"@webassemblyjs/wasm-opt@1.12.1": + version "1.12.1" + resolved "https://registry.yarnpkg.com/@webassemblyjs/wasm-opt/-/wasm-opt-1.12.1.tgz#9e6e81475dfcfb62dab574ac2dda38226c232bc5" + integrity sha512-Jg99j/2gG2iaz3hijw857AVYekZe2SAskcqlWIZXjji5WStnOpVoat3gQfT/Q5tb2djnCjBtMocY/Su1GfxPBg== dependencies: - "@webassemblyjs/ast" "1.11.6" - "@webassemblyjs/helper-buffer" "1.11.6" - "@webassemblyjs/wasm-gen" "1.11.6" - "@webassemblyjs/wasm-parser" "1.11.6" + "@webassemblyjs/ast" "1.12.1" + "@webassemblyjs/helper-buffer" "1.12.1" + "@webassemblyjs/wasm-gen" "1.12.1" + "@webassemblyjs/wasm-parser" "1.12.1" -"@webassemblyjs/wasm-parser@1.11.6", "@webassemblyjs/wasm-parser@^1.11.5": - version "1.11.6" - resolved "https://registry.yarnpkg.com/@webassemblyjs/wasm-parser/-/wasm-parser-1.11.6.tgz#bb85378c527df824004812bbdb784eea539174a1" - integrity sha512-6ZwPeGzMJM3Dqp3hCsLgESxBGtT/OeCvCZ4TA1JUPYgmhAx38tTPR9JaKy0S5H3evQpO/h2uWs2j6Yc/fjkpTQ== +"@webassemblyjs/wasm-parser@1.12.1", "@webassemblyjs/wasm-parser@^1.12.1": + version "1.12.1" + resolved "https://registry.yarnpkg.com/@webassemblyjs/wasm-parser/-/wasm-parser-1.12.1.tgz#c47acb90e6f083391e3fa61d113650eea1e95937" + integrity sha512-xikIi7c2FHXysxXe3COrVUPSheuBtpcfhbpFj4gmu7KRLYOzANztwUU0IbsqvMqzuNK2+glRGWCEqZo1WCLyAQ== dependencies: - "@webassemblyjs/ast" "1.11.6" + "@webassemblyjs/ast" "1.12.1" "@webassemblyjs/helper-api-error" "1.11.6" "@webassemblyjs/helper-wasm-bytecode" "1.11.6" "@webassemblyjs/ieee754" "1.11.6" "@webassemblyjs/leb128" "1.11.6" "@webassemblyjs/utf8" "1.11.6" -"@webassemblyjs/wast-printer@1.11.6": - version "1.11.6" - resolved "https://registry.yarnpkg.com/@webassemblyjs/wast-printer/-/wast-printer-1.11.6.tgz#a7bf8dd7e362aeb1668ff43f35cb849f188eff20" - integrity sha512-JM7AhRcE+yW2GWYaKeHL5vt4xqee5N2WcezptmgyhNS+ScggqcT1OtXykhAb13Sn5Yas0j2uv9tHgrjwvzAP4A== +"@webassemblyjs/wast-printer@1.12.1": + version "1.12.1" + resolved "https://registry.yarnpkg.com/@webassemblyjs/wast-printer/-/wast-printer-1.12.1.tgz#bcecf661d7d1abdaf989d8341a4833e33e2b31ac" + integrity sha512-+X4WAlOisVWQMikjbcvY2e0rwPsKQ9F688lksZhBcPycBBuii3O7m8FACbDMWDojpAqvjIncrG8J0XHKyQfVeA== dependencies: - "@webassemblyjs/ast" "1.11.6" + "@webassemblyjs/ast" "1.12.1" "@xtuc/long" "4.2.2" "@wojtekmaj/date-utils@^1.1.3": @@ -3407,10 +3444,10 @@ acorn-globals@^6.0.0: acorn "^7.1.1" acorn-walk "^7.1.1" -acorn-import-assertions@^1.7.6: - version "1.9.0" - resolved "https://registry.yarnpkg.com/acorn-import-assertions/-/acorn-import-assertions-1.9.0.tgz#507276249d684797c84e0734ef84860334cfb1ac" - integrity sha512-cmMwop9x+8KFhxvKrKfPYmN6/pKTYYHBqLa0DfvVZcKMJWNyWLnaqND7dx/qn66R7ewM1UX5XMaDVP5wlVTaVA== +acorn-import-attributes@^1.9.5: + version "1.9.5" + resolved "https://registry.yarnpkg.com/acorn-import-attributes/-/acorn-import-attributes-1.9.5.tgz#7eb1557b1ba05ef18b5ed0ec67591bfab04688ef" + integrity sha512-n02Vykv5uA3eHGM/Z2dQrcD56kL8TyDb2p1+0P83PClMnC/nc+anbQRhIOWnSq4Ke/KvDPrY3C9hDtC/A3eHnQ== acorn-jsx@^5.3.2: version "5.3.2" @@ -3432,6 +3469,11 @@ acorn@^8.2.4, acorn@^8.5.0, acorn@^8.7.1, acorn@^8.8.0: resolved "https://registry.yarnpkg.com/acorn/-/acorn-8.8.2.tgz#1b2f25db02af965399b9776b0c2c391276d37c4a" integrity sha512-xjIYgE8HBrkpd/sJqOGNspf8uHG+NOHGOw6a/Urj8taM2EXfdNAH2oFcPeIFfsv3+kz/mJrS5VuMqbNLjCa2vw== +acorn@^8.8.2: + version "8.12.1" + resolved "https://registry.yarnpkg.com/acorn/-/acorn-8.12.1.tgz#71616bdccbe25e27a54439e0046e89ca76df2248" + integrity sha512-tcpGyI9zbizT9JbV6oYE477V6mTlXvvi0T0G3SNIYE2apm/G5huBa1+K89VGeovbg+jycCrfhl3ADxErOuO6Jg== + add-dom-event-listener@^1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/add-dom-event-listener/-/add-dom-event-listener-1.1.0.tgz#6a92db3a0dd0abc254e095c0f1dc14acbbaae310" @@ -3954,7 +3996,7 @@ browser-process-hrtime@^1.0.0: resolved "https://registry.yarnpkg.com/browser-process-hrtime/-/browser-process-hrtime-1.0.0.tgz#3c9b4b7d782c8121e56f10106d84c0d0ffc94626" integrity sha512-9o5UecI3GhkpM6DrXr69PblIuWxPKk9Y0jHBRhdocZ2y7YECBFCsHm79Pr3OyR2AvjhDkabFJaDJMYRazHgsow== -browserslist@^4.0.0, browserslist@^4.14.5, browserslist@^4.18.1, browserslist@^4.21.3, browserslist@^4.21.4, browserslist@^4.21.5: +browserslist@^4.0.0, browserslist@^4.18.1, browserslist@^4.21.3, browserslist@^4.21.4, browserslist@^4.21.5: version "4.21.5" resolved "https://registry.yarnpkg.com/browserslist/-/browserslist-4.21.5.tgz#75c5dae60063ee641f977e00edd3cfb2fb7af6a7" integrity sha512-tUkiguQGW7S3IhB7N+c2MV/HZPSCPAAiYBZXLsBhFB/PCy6ZKKsZrmBayHV9fdGV/ARIfJ14NkxKzRDjvp7L6w== @@ -3964,6 +4006,16 @@ browserslist@^4.0.0, browserslist@^4.14.5, browserslist@^4.18.1, browserslist@^4 node-releases "^2.0.8" update-browserslist-db "^1.0.10" +browserslist@^4.21.10: + version "4.23.3" + resolved "https://registry.yarnpkg.com/browserslist/-/browserslist-4.23.3.tgz#debb029d3c93ebc97ffbc8d9cbb03403e227c800" + integrity sha512-btwCFJVjI4YWDNfau8RhZ+B1Q/VLoUITrm3RlP6y1tYGWIOa+InuYiRGXUBXo8nA1qKmHMyLB/iVQg5TT4eFoA== + dependencies: + caniuse-lite "^1.0.30001646" + electron-to-chromium "^1.5.4" + node-releases "^2.0.18" + update-browserslist-db "^1.1.0" + bser@2.1.1: version "2.1.1" resolved "https://registry.yarnpkg.com/bser/-/bser-2.1.1.tgz#e6787da20ece9d07998533cfd9de6f5c38f4bc05" @@ -4058,6 +4110,11 @@ caniuse-lite@^1.0.0, caniuse-lite@^1.0.30001449, caniuse-lite@^1.0.30001464: resolved "https://registry.yarnpkg.com/caniuse-lite/-/caniuse-lite-1.0.30001487.tgz#d882d1a34d89c11aea53b8cdc791931bdab5fe1b" integrity sha512-83564Z3yWGqXsh2vaH/mhXfEM0wX+NlBCm1jYHOb97TrTWJEmPTccZgeLTPBUUb0PNVo+oomb7wkimZBIERClA== +caniuse-lite@^1.0.30001646: + version "1.0.30001662" + resolved "https://registry.yarnpkg.com/caniuse-lite/-/caniuse-lite-1.0.30001662.tgz#3574b22dfec54a3f3b6787331da1040fe8e763ec" + integrity sha512-sgMUVwLmGseH8ZIrm1d51UbrhqMCH3jvS7gF/M6byuHOnKyLOBL7W8yz5V02OHwgLGA36o/AFhWzzh4uc5aqTA== + case-sensitive-paths-webpack-plugin@^2.4.0: version "2.4.0" resolved "https://registry.yarnpkg.com/case-sensitive-paths-webpack-plugin/-/case-sensitive-paths-webpack-plugin-2.4.0.tgz#db64066c6422eed2e08cc14b986ca43796dbc6d4" @@ -5286,6 +5343,11 @@ electron-to-chromium@^1.4.284: resolved "https://registry.yarnpkg.com/electron-to-chromium/-/electron-to-chromium-1.4.394.tgz#989abe104a40366755648876cde2cdeda9f31133" integrity sha512-0IbC2cfr8w5LxTz+nmn2cJTGafsK9iauV2r5A5scfzyovqLrxuLoxOHE5OBobP3oVIggJT+0JfKnw9sm87c8Hw== +electron-to-chromium@^1.5.4: + version "1.5.26" + resolved "https://registry.yarnpkg.com/electron-to-chromium/-/electron-to-chromium-1.5.26.tgz#449b4fa90e83ab98abbe3b6a96c8ee395de94452" + integrity sha512-Z+OMe9M/V6Ep9n/52+b7lkvYEps26z4Yz3vjWL1V61W0q+VLF1pOHhMY17sa4roz4AWmULSI8E6SAojZA5L0YQ== + emittery@^0.10.2: version "0.10.2" resolved "https://registry.yarnpkg.com/emittery/-/emittery-0.10.2.tgz#902eec8aedb8c41938c46e9385e9db7e03182933" @@ -5321,10 +5383,10 @@ encodeurl@~2.0.0: resolved "https://registry.yarnpkg.com/encodeurl/-/encodeurl-2.0.0.tgz#7b8ea898077d7e409d3ac45474ea38eaf0857a58" integrity sha512-Q0n9HRi4m6JuGIV1eFlmvJB7ZEVxu93IrMyiMsGC0lrMJMWzRgx6WGquyfQgZVb31vhGgXnfmPNNXmxnOkRBrg== -enhanced-resolve@^5.14.0: - version "5.14.0" - resolved "https://registry.yarnpkg.com/enhanced-resolve/-/enhanced-resolve-5.14.0.tgz#0b6c676c8a3266c99fa281e4433a706f5c0c61c4" - integrity sha512-+DCows0XNwLDcUhbFJPdlQEVnT2zXlCv7hPxemTz86/O+B/hCQ+mb7ydkPKiflpVraqLPCAfu7lDy+hBXueojw== +enhanced-resolve@^5.17.1: + version "5.17.1" + resolved "https://registry.yarnpkg.com/enhanced-resolve/-/enhanced-resolve-5.17.1.tgz#67bfbbcc2f81d511be77d686a90267ef7f898a15" + integrity sha512-LMHl3dXhTcfv8gM4kEzIUeTQ+7fpdA0l2tUf34BddXPkz2A5xJ5L/Pchd5BL6rdccM9QGvu0sWZzK1Z1t4wwyg== dependencies: graceful-fs "^4.2.4" tapable "^2.2.0" @@ -5455,6 +5517,11 @@ escalade@^3.1.1: resolved "https://registry.yarnpkg.com/escalade/-/escalade-3.1.1.tgz#d8cfdc7000965c5a0174b4a82eaa5c0552742e40" integrity sha512-k0er2gUkLf8O0zKJiAhmkTnJlTvINGv7ygDNPbeIsX/TJjGJZHuh9B2UxbsaEkmlEo9MfhrSzmhIlhRlI2GXnw== +escalade@^3.1.2: + version "3.2.0" + resolved "https://registry.yarnpkg.com/escalade/-/escalade-3.2.0.tgz#011a3f69856ba189dffa7dc8fcce99d2a87903e5" + integrity sha512-WUj2qlxaQtO4g6Pq5c29GTcWGDyd8itL8zTlipgECz3JesAiiOKotd8JU6otB3PACgG6xkJUyVhboMS+bje/jA== + escape-html@~1.0.3: version "1.0.3" resolved "https://registry.yarnpkg.com/escape-html/-/escape-html-1.0.3.tgz#0258eae4d3d0c0974de1c169188ef0051d1d1988" @@ -6349,7 +6416,7 @@ gopd@^1.0.1: dependencies: get-intrinsic "^1.1.3" -graceful-fs@^4.1.2, graceful-fs@^4.1.6, graceful-fs@^4.2.0, graceful-fs@^4.2.4, graceful-fs@^4.2.6, graceful-fs@^4.2.9: +graceful-fs@^4.1.2, graceful-fs@^4.1.6, graceful-fs@^4.2.0, graceful-fs@^4.2.11, graceful-fs@^4.2.4, graceful-fs@^4.2.6, graceful-fs@^4.2.9: version "4.2.11" resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.2.11.tgz#4183e4e8bf08bb6e05bbb2f7d2e0c8f712ca40e3" integrity sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ== @@ -8266,6 +8333,11 @@ node-int64@^0.4.0: resolved "https://registry.yarnpkg.com/node-int64/-/node-int64-0.4.0.tgz#87a9065cdb355d3182d8f94ce11188b825c68a3b" integrity sha512-O5lz91xSOeoXP6DulyHfllpq+Eg00MWitZIbtPfoSEvqIHdl5gfcY6hYzDWnj0qD5tz52PI08u9qUvSVeUBeHw== +node-releases@^2.0.18: + version "2.0.18" + resolved "https://registry.yarnpkg.com/node-releases/-/node-releases-2.0.18.tgz#f010e8d35e2fe8d6b2944f03f70213ecedc4ca3f" + integrity sha512-d9VeXT4SJ7ZeOqGX6R5EM022wpL+eWPooLI+5UpWn2jCT1aosUQEhQP214x33Wkwx3JQMvIm+tIoVOdodFS40g== + node-releases@^2.0.8: version "2.0.10" resolved "https://registry.yarnpkg.com/node-releases/-/node-releases-2.0.10.tgz#c311ebae3b6a148c89b1813fd7c4d3c024ef537f" @@ -8671,6 +8743,11 @@ picocolors@^1.0.0: resolved "https://registry.yarnpkg.com/picocolors/-/picocolors-1.0.0.tgz#cb5bdc74ff3f51892236eaf79d68bc44564ab81c" integrity sha512-1fygroTLlHu66zi26VoTDv8yRgm0Fccecssto+MhsZ0D/DGW2sm8E8AjW7NU5VVTRt5GxbeZ5qBuJr+HyLYkjQ== +picocolors@^1.0.1: + version "1.1.0" + resolved "https://registry.yarnpkg.com/picocolors/-/picocolors-1.1.0.tgz#5358b76a78cde483ba5cef6a9dc9671440b27d59" + integrity sha512-TQ92mBOW0l3LeMeyLV6mzy/kWr8lkd/hp3mTg7wYK7zJhuBStmGMBG0BdeDZS/dZx1IukaX6Bk11zcln25o1Aw== + picomatch@^2.0.4, picomatch@^2.2.1, picomatch@^2.2.2, picomatch@^2.2.3, picomatch@^2.3.1: version "2.3.1" resolved "https://registry.yarnpkg.com/picomatch/-/picomatch-2.3.1.tgz#3ba3833733646d9d3e4995946c1365a67fb07a42" @@ -10269,9 +10346,9 @@ rollup-plugin-terser@^7.0.0: terser "^5.0.0" rollup@^2.43.1: - version "2.79.1" - resolved "https://registry.yarnpkg.com/rollup/-/rollup-2.79.1.tgz#bedee8faef7c9f93a2647ac0108748f497f081c7" - integrity sha512-uKxbd0IhMZOhjAiD5oAFp7BqvkA4Dv47qpOCtaNvng4HBwdbWtdOh8f5nZNuk2rp51PMGk3bzfWu5oayNEuYnw== + version "2.79.2" + resolved "https://registry.yarnpkg.com/rollup/-/rollup-2.79.2.tgz#f150e4a5db4b121a21a747d762f701e5e9f49090" + integrity sha512-fS6iqSPZDs3dr/y7Od6y5nha8dW1YnbgtsyotCVvoFGKbERG++CVRFv1meyGDE1SNItQA8BrnCw7ScdAhRJ3XQ== optionalDependencies: fsevents "~2.3.2" @@ -10370,7 +10447,7 @@ schema-utils@^2.6.5: ajv "^6.12.4" ajv-keywords "^3.5.2" -schema-utils@^3.0.0, schema-utils@^3.1.1, schema-utils@^3.1.2: +schema-utils@^3.0.0, schema-utils@^3.1.1: version "3.1.2" resolved "https://registry.yarnpkg.com/schema-utils/-/schema-utils-3.1.2.tgz#36c10abca6f7577aeae136c804b0c741edeadc99" integrity sha512-pvjEHOgWc9OWA/f/DE3ohBWTD6EleVLf7iFUkoSwAxttdBhB9QUebQgxER2kWueOvRJXPHNnyrvvh9eZINB8Eg== @@ -10379,6 +10456,15 @@ schema-utils@^3.0.0, schema-utils@^3.1.1, schema-utils@^3.1.2: ajv "^6.12.5" ajv-keywords "^3.5.2" +schema-utils@^3.2.0: + version "3.3.0" + resolved "https://registry.yarnpkg.com/schema-utils/-/schema-utils-3.3.0.tgz#f50a88877c3c01652a15b622ae9e9795df7a60fe" + integrity sha512-pN/yOAvcC+5rQ5nERGuwrjLlYvLTbCibnZ1I7B1LaiAz9BRBlE9GMgE/eqV30P7aJQUf7Ddimy/RsbYO/GrVGg== + dependencies: + "@types/json-schema" "^7.0.8" + ajv "^6.12.5" + ajv-keywords "^3.5.2" + schema-utils@^4.0.0: version "4.0.1" resolved "https://registry.yarnpkg.com/schema-utils/-/schema-utils-4.0.1.tgz#eb2d042df8b01f4b5c276a2dfd41ba0faab72e8d" @@ -11159,7 +11245,7 @@ terminal-link@^2.0.0: ansi-escapes "^4.2.1" supports-hyperlinks "^2.0.0" -terser-webpack-plugin@^5.2.5, terser-webpack-plugin@^5.3.7: +terser-webpack-plugin@^5.2.5: version "5.3.8" resolved "https://registry.yarnpkg.com/terser-webpack-plugin/-/terser-webpack-plugin-5.3.8.tgz#415e03d2508f7de63d59eca85c5d102838f06610" integrity sha512-WiHL3ElchZMsK27P8uIUh4604IgJyAW47LVXGbEoB21DbQcZ+OuMpGjVYnEUaqcWM6dO8uS2qUbA7LSCWqvsbg== @@ -11170,6 +11256,17 @@ terser-webpack-plugin@^5.2.5, terser-webpack-plugin@^5.3.7: serialize-javascript "^6.0.1" terser "^5.16.8" +terser-webpack-plugin@^5.3.10: + version "5.3.10" + resolved "https://registry.yarnpkg.com/terser-webpack-plugin/-/terser-webpack-plugin-5.3.10.tgz#904f4c9193c6fd2a03f693a2150c62a92f40d199" + integrity sha512-BKFPWlPDndPs+NGGCr1U59t0XScL5317Y0UReNrHaw9/FwhPENlq6bfgs+4yPfyP51vqC1bQ4rp1EfXW5ZSH9w== + dependencies: + "@jridgewell/trace-mapping" "^0.3.20" + jest-worker "^27.4.5" + schema-utils "^3.1.1" + serialize-javascript "^6.0.1" + terser "^5.26.0" + terser@^5.0.0, terser@^5.10.0, terser@^5.16.8: version "5.17.3" resolved "https://registry.yarnpkg.com/terser/-/terser-5.17.3.tgz#7f908f16b3cdf3f6c0f8338e6c1c674837f90d25" @@ -11180,6 +11277,16 @@ terser@^5.0.0, terser@^5.10.0, terser@^5.16.8: commander "^2.20.0" source-map-support "~0.5.20" +terser@^5.26.0: + version "5.33.0" + resolved "https://registry.yarnpkg.com/terser/-/terser-5.33.0.tgz#8f9149538c7468ffcb1246cfec603c16720d2db1" + integrity sha512-JuPVaB7s1gdFKPKTelwUyRq5Sid2A3Gko2S0PncwdBq7kN9Ti9HPWDQ06MPsEDGsZeVESjKEnyGy68quBk1w6g== + dependencies: + "@jridgewell/source-map" "^0.3.3" + acorn "^8.8.2" + commander "^2.20.0" + source-map-support "~0.5.20" + test-exclude@^6.0.0: version "6.0.0" resolved "https://registry.yarnpkg.com/test-exclude/-/test-exclude-6.0.0.tgz#04a8698661d805ea6fa293b6cb9e63ac044ef15e" @@ -11506,6 +11613,14 @@ update-browserslist-db@^1.0.10: escalade "^3.1.1" picocolors "^1.0.0" +update-browserslist-db@^1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/update-browserslist-db/-/update-browserslist-db-1.1.0.tgz#7ca61c0d8650766090728046e416a8cde682859e" + integrity sha512-EdRAaAyk2cUE1wOf2DkEhzxqOQvFOoRJFNS6NeyJ01Gp2beMRpBAINjM2iDXE3KCuKhwnvHIQCJm6ThL2Z+HzQ== + dependencies: + escalade "^3.1.2" + picocolors "^1.0.1" + uplot@1.6.30: version "1.6.30" resolved "https://registry.yarnpkg.com/uplot/-/uplot-1.6.30.tgz#1622a96b7cb2e50622c74330823c321847cbc147" @@ -11613,10 +11728,10 @@ warning@^4.0.0: dependencies: loose-envify "^1.0.0" -watchpack@^2.4.0: - version "2.4.0" - resolved "https://registry.yarnpkg.com/watchpack/-/watchpack-2.4.0.tgz#fa33032374962c78113f93c7f2fb4c54c9862a5d" - integrity sha512-Lcvm7MGST/4fup+ifyKi2hjyIAwcdI4HRgtvTpIUxBRhB+RFtUh8XtDOxUfctVCnhVi+QQj49i91OyvzkJl6cg== +watchpack@^2.4.1: + version "2.4.2" + resolved "https://registry.yarnpkg.com/watchpack/-/watchpack-2.4.2.tgz#2feeaed67412e7c33184e5a79ca738fbd38564da" + integrity sha512-TnbFSbcOCcDgjZ4piURLCbJ3nJhznVh9kw6F6iokjiFPl8ONxe9A6nMDVXDiNbrSfLILs6vB07F7wLBrwPYzJw== dependencies: glob-to-regexp "^0.4.1" graceful-fs "^4.1.2" @@ -11730,33 +11845,32 @@ webpack-sources@^3.2.3: integrity sha512-/DyMEOrDgLKKIG0fmvtz+4dUX/3Ghozwgm6iPp8KRhvn+eQf9+Q7GWxVNMk3+uCPWfdXYC4ExGBckIXdFEfH1w== webpack@^5.64.4: - version "5.82.1" - resolved "https://registry.yarnpkg.com/webpack/-/webpack-5.82.1.tgz#8f38c78e53467556e8a89054ebd3ef6e9f67dbab" - integrity sha512-C6uiGQJ+Gt4RyHXXYt+v9f+SN1v83x68URwgxNQ98cvH8kxiuywWGP4XeNZ1paOzZ63aY3cTciCEQJNFUljlLw== - dependencies: - "@types/eslint-scope" "^3.7.3" - "@types/estree" "^1.0.0" - "@webassemblyjs/ast" "^1.11.5" - "@webassemblyjs/wasm-edit" "^1.11.5" - "@webassemblyjs/wasm-parser" "^1.11.5" + version "5.94.0" + resolved "https://registry.yarnpkg.com/webpack/-/webpack-5.94.0.tgz#77a6089c716e7ab90c1c67574a28da518a20970f" + integrity sha512-KcsGn50VT+06JH/iunZJedYGUJS5FGjow8wb9c0v5n1Om8O1g4L6LjtfxwlXIATopoQu+vOXXa7gYisWxCoPyg== + dependencies: + "@types/estree" "^1.0.5" + "@webassemblyjs/ast" "^1.12.1" + "@webassemblyjs/wasm-edit" "^1.12.1" + "@webassemblyjs/wasm-parser" "^1.12.1" acorn "^8.7.1" - acorn-import-assertions "^1.7.6" - browserslist "^4.14.5" + acorn-import-attributes "^1.9.5" + browserslist "^4.21.10" chrome-trace-event "^1.0.2" - enhanced-resolve "^5.14.0" + enhanced-resolve "^5.17.1" es-module-lexer "^1.2.1" eslint-scope "5.1.1" events "^3.2.0" glob-to-regexp "^0.4.1" - graceful-fs "^4.2.9" + graceful-fs "^4.2.11" json-parse-even-better-errors "^2.3.1" loader-runner "^4.2.0" mime-types "^2.1.27" neo-async "^2.6.2" - schema-utils "^3.1.2" + schema-utils "^3.2.0" tapable "^2.1.1" - terser-webpack-plugin "^5.3.7" - watchpack "^2.4.0" + terser-webpack-plugin "^5.3.10" + watchpack "^2.4.1" webpack-sources "^3.2.3" websocket-driver@>=0.5.1, websocket-driver@^0.7.4: