diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index e98575f41..714f0e932 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -51,8 +51,6 @@ jobs: - go-version: 1.13.x dirs: _integrations/nrecho pin: github.com/labstack/echo@v3.3.10 - - go-version: 1.13.x - dirs: _integrations/nrgin/v1 - go-version: 1.13.x dirs: _integrations/nrgorilla/v1 - go-version: 1.13.x diff --git a/CHANGELOG.md b/CHANGELOG.md index 08009973d..0610662bc 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,45 @@ +## 3.20.0 + +**PLEASE READ** these changes, and verify your config settings to ensure your application behaves how you intend it to. This release changes some default behaviors in the go agent. + +### Added +* The Module Dependency Metrics feature was added. This collects the list of modules imported into your application, to aid in management of your application dependencies, enabling easier vulnerability detection and response, etc. + * This feature is enabled by default, but may be disabled by explicitly including `ConfigModuleDependencyMetricsEnable(false)` in your application, or setting the equivalent environment variable or `Config` field direclty. + * Modules may be explicitly excluded from the report via the `ConfigModuleDependencyMetricsIgnoredPrefixes` option. + * Excluded module names may be redacted via the `ConfigModuleDependencyMetricsRedactIgnoredPrefixes` option. This is enabled by default. +* Application Log Forwarding will now be **ENABLED** by default + * Automatic application log forwarding is now enabled by default. This means that logging frameworks wrapped with one of the [logcontext-v2 integrations](https://docs.newrelic.com/docs/apm/agents/go-agent/get-started/go-agent-compatibility-requirements/) will automatically send enriched application logs to New Relic with this version of the agent. To learn more about this feature, see the [APM logs in context documentation](https://docs.newrelic.com/docs/logs/logs-context/logs-in-context/). For additional configuration options, see the [Go logs in context documentation](https://docs.newrelic.com/docs/logs/logs-context/configure-logs-context-go). To learn about how to toggle log ingestion on or off by account, see our documentation to [disable automatic](https://docs.newrelic.com/docs/logs/logs-context/disable-automatic-logging) logging via the UI or API. + * If you are using a logcontext-v2 extension, but don't want the agent to automatically forward logs, please configure `ConfigAppLogForwardingEnabled(false)` in your application. + * Environment variables have been added for all application logging config options: + * `NEW_RELIC_APPLICATION_LOGGING_ENABLED` + * `NEW_RELIC_APPLICATION_LOGGING_FORWARDING_ENABLED` + * `NEW_RELIC_APPLICATION_LOGGING_FORWARDING_MAX_SAMPLES_STORED` + * `NEW_RELIC_APPLICATION_LOGGING_METRICS_ENABLED` + * `NEW_RELIC_APPLICATION_LOGGING_LOCAL_DECORATING_ENABLED` +* Custom Event Limit Increase + * This version increases the **DEFAULT** limit of custom events from 10,000 events per minute to 30,000 events per minute. In the scenario that custom events were being limited, this change will allow more custom events to be sent to New Relic. There is also a new configurable **MAXIMUM** limit of 100,000 events per minute. To change the limits, set `ConfigCustomInsightsEventsMaxSamplesStored(limit)` to the limit you want in your application. To learn more about the change and how to determine if custom events are being dropped, see our Explorers Hub [post](https://discuss.newrelic.com/t/send-more-custom-events-with-the-latest-apm-agents/190497). + * New config option `ConfigCustomInsightsEventsEnabled(false)` can be used to disable the collection of custom events in your application. + +### Changed +* Changed the following names to be consistent with their usage and other related identifier names. The old names remain for backward compatibility, but new code should use the new names. + * `ConfigCodeLevelMetricsIgnoredPrefix` -> `ConfigCodeLevelMetricsIgnoredPrefixes` + * `ConfigCodeLevelMetricsPathPrefix` -> `ConfigCodeLevelMetricsPathPrefixes` + * `NEW_RELIC_CODE_LEVEL_METRICS_PATH_PREFIX` -> `NEW_RELIC_CODE_LEVEL_METRICS_PATH_PREFIXES` + * `NEW_RELIC_CODE_LEVEL_METRICS_IGNORED_PREFIX` -> `NEW_RELIC_CODE_LEVEL_METRICS_IGNORED_PREFIXES` + +* When excluding information reported from CodeLevelMetrics via the `IgnoredPrefixes` or `PathPrefixes` configuration fields (e.g., by specifying `ConfigCodeLevelMetricsIgnoredPrefixes` or `ConfigCodeLevelMetricsPathPrefixes`), the names of the ignored prefixes and the configured path prefixes may now be redacted from the agent configuration information sent to New Relic. + * This redaction is enabled by default, but may be disabled by supplying a `false` value to `ConfigCodeLevelMetricsRedactPathPrefixes` or `ConfigCodeLevelMetricsRedactIgnoredPrefixes`, or by setting the corresponding `Config` fields or environment variables to `false`. + +### Fixed +* [#583](https://github.com/newrelic/go-agent/issues/583): fixed a bug in zerologWriter where comma separated fields in log message confused the JSON parser and could cause panics. + +### Support Statement +New Relic recommends that you upgrade the agent regularly to ensure that you’re getting the latest features and performance benefits. Additionally, older releases will no longer be supported when they reach end-of-life. + +We also recommend using the latest version of the Go language. At minimum, you should at least be using no version of Go older than what is supported by the Go team themselves. + +See the [Go Agent EOL Policy](https://docs.newrelic.com/docs/apm/agents/go-agent/get-started/go-agent-eol-policy/) for details about supported versions of the Go Agent and third-party components. + ## 3.19.2 ### Changed diff --git a/v3/integrations/logcontext-v2/zerologWriter/zerolog-writer.go b/v3/integrations/logcontext-v2/zerologWriter/zerolog-writer.go index bcc9102fb..7d94ed141 100644 --- a/v3/integrations/logcontext-v2/zerologWriter/zerolog-writer.go +++ b/v3/integrations/logcontext-v2/zerologWriter/zerolog-writer.go @@ -4,6 +4,8 @@ import ( "context" "io" "strings" + "time" + "unicode" "github.com/newrelic/go-agent/v3/integrations/logcontext-v2/nrwriter" "github.com/newrelic/go-agent/v3/internal" @@ -51,22 +53,33 @@ func parseJSONLogData(log []byte) newrelic.LogData { // For this iteration of the tool, the entire log gets captured as the message data := newrelic.LogData{} data.Message = string(log) + data.Timestamp = time.Now().UnixMilli() for i := 0; i < len(log)-1; { // get key; always a string field - key, keyEnd := getStringField(log, i) - - // find index where value starts - valStart := getValueIndex(log, keyEnd) - valEnd := valStart + key, valStart := getKey(log, i) + var next int // NOTE: depending on the key, the type of field the value is can differ switch key { case zerolog.LevelFieldName: - data.Severity, valEnd = getStringField(log, valStart) + data.Severity, next = getStringValue(log, valStart+1) + case zerolog.ErrorStackFieldName: + _, next = getStackTrace(log, valStart) + default: + if i >= len(log)-1 { + return data + } + // TODO: once we update the logging spec to support custom attributes, capture these + if isStringValue(log, valStart) { + _, next = getStringValue(log, valStart+1) + } else if isNumberValue(log, valStart) { + _, next = getNumberValue(log, valStart) + } else { + return data + } } - next := nextKeyIndex(log, valEnd) if next == -1 { return data } @@ -76,34 +89,21 @@ func parseJSONLogData(log []byte) newrelic.LogData { return data } -func getValueIndex(p []byte, indx int) int { - // Find the index where the value begins - for i := indx; i < len(p)-1; i++ { - if p[i] == ':' { - return i + 1 - } - } - - return -1 +func isStringValue(p []byte, indx int) bool { + return p[indx] == '"' } -func nextKeyIndex(p []byte, indx int) int { - // Find the index where the key begins - for i := indx; i < len(p)-1; i++ { - if p[i] == ',' { - return i + 1 - } - } - - return -1 +func isNumberValue(p []byte, indx int) bool { + return unicode.IsDigit(rune(p[indx])) } -func getStringField(p []byte, indx int) (string, int) { +// zerolog keys are always JSON strings +func getKey(p []byte, indx int) (string, int) { value := strings.Builder{} i := indx // find start of string field - for ; i < len(p)-1; i++ { + for ; i < len(p); i++ { if p[i] == '"' { i += 1 break @@ -111,14 +111,76 @@ func getStringField(p []byte, indx int) (string, int) { } // parse value of string field - for ; i < len(p)-1; i++ { - if p[i] == '"' { - return value.String(), i + 1 + for ; i < len(p); i++ { + if p[i] == '"' && i+1 < len(p) && p[i+1] == ':' { + return value.String(), i + 2 } else { value.WriteByte(p[i]) } + } + + return "", -1 +} + +func isEOL(p []byte, i int) bool { + return p[i] == '}' && i+2 == len(p) +} + +func getStringValue(p []byte, indx int) (string, int) { + value := strings.Builder{} + // parse value of string field + for i := indx; i < len(p); i++ { + if p[i] == '"' && i+1 < len(p) { + if p[i+1] == ',' && i+2 < len(p) && p[i+2] == '"' { + return value.String(), i + 2 + } else if isEOL(p, i+1) { + return value.String(), -1 + } + } + value.WriteByte(p[i]) } return "", -1 } + +func getNumberValue(p []byte, indx int) (string, int) { + value := strings.Builder{} + + // parse value of string field + for i := indx; i < len(p); i++ { + if p[i] == ',' && i+1 < len(p) && p[i+1] == '"' { + return value.String(), i + 1 + } else if isEOL(p, i) { + return value.String(), -1 + } else { + value.WriteByte(p[i]) + } + } + + return "", -1 +} + +func getStackTrace(p []byte, indx int) (string, int) { + value := strings.Builder{} + + // parse value of string field + for i := indx; i < len(p); i++ { + if p[i] == ']' { + value.WriteByte(p[i]) + + if i+1 < len(p) { + if isEOL(p, i+1) { + return value.String(), -1 + } + if p[i+1] == ',' && i+2 < len(p) && p[i+2] == '"' { + return value.String(), i + 2 + } + } + } else { + value.WriteByte(p[i]) + } + } + + return value.String(), -1 +} diff --git a/v3/integrations/logcontext-v2/zerologWriter/zerolog-writer_test.go b/v3/integrations/logcontext-v2/zerologWriter/zerolog-writer_test.go index 9d99a50fe..ab0a2f974 100644 --- a/v3/integrations/logcontext-v2/zerologWriter/zerolog-writer_test.go +++ b/v3/integrations/logcontext-v2/zerologWriter/zerolog-writer_test.go @@ -2,6 +2,8 @@ package zerologWriter import ( "bytes" + "context" + "io" "testing" "github.com/newrelic/go-agent/v3/internal" @@ -24,37 +26,103 @@ func TestParseLogData(t *testing.T) { } tests := []logTest{ { - `{"time":1516134303,"level":"debug","message":"hello world"}`, + `{"time":1516134303,"level":"debug","message":"hello world"}` + "\n", "level", newrelic.LogData{ - Message: `{"time":1516134303,"level":"debug","message":"hello world"}`, + Message: `{"time":1516134303,"level":"debug","message":"hello world"}` + "\n", Severity: "debug", }, }, { - `{"time":1516134303,"level":"info","message":"hello world"}`, + `{"time":1516134303,"level":"info","message":"hello world"}` + "\n", "level", newrelic.LogData{ - Message: `{"time":1516134303,"level":"info","message":"hello world"}`, + Message: `{"time":1516134303,"level":"info","message":"hello world"}` + "\n", Severity: "info", }, }, { - `{"time":1516133263,"level":"fatal","error":"A repo man spends his life getting into tense situations","service":"myservice","message":"Cannot start myservice"}`, + `{"time":1516133263,"level":"fatal","error":"A repo man spends his life getting into tense situations","service":"myservice","message":"Cannot start myservice"}` + "\n", "level", newrelic.LogData{ - Message: `{"time":1516133263,"level":"fatal","error":"A repo man spends his life getting into tense situations","service":"myservice","message":"Cannot start myservice"}`, + Message: `{"time":1516133263,"level":"fatal","error":"A repo man spends his life getting into tense situations","service":"myservice","message":"Cannot start myservice"}` + "\n", Severity: "fatal", }, }, { - `{"time":1516134303,"hi":"info","message":"hello world"}`, + `{"time":1516134303,"hi":"info","message":"hello world"}` + "\n", "hi", newrelic.LogData{ - Message: `{"time":1516134303,"hi":"info","message":"hello world"}`, + Message: `{"time":1516134303,"hi":"info","message":"hello world"}` + "\n", Severity: "info", }, }, + { + `{"time":1516134303,"level":"debug","message":"hello, world"}` + "\n", + "level", + newrelic.LogData{ + Message: `{"time":1516134303,"level":"debug","message":"hello, world"}` + "\n", + Severity: "debug", + }, + }, + { + `{"time":1516134303,"level":"debug","message":"hello, world { thing }"}` + "\n", + "level", + newrelic.LogData{ + Message: `{"time":1516134303,"level":"debug","message":"hello, world { thing }"}` + "\n", + Severity: "debug", + }, + }, + { + `{"time":1516134303,"level":"debug","message":"hello, world \"{ thing \"}"}` + "\n", + "level", + newrelic.LogData{ + Message: `{"time":1516134303,"level":"debug","message":"hello, world \"{ thing \"}"}` + "\n", + Severity: "debug", + }, + }, + { + `{"message":"hello, world \"{ thing \"}","time":1516134303,"level":"debug"}` + "\n", + "level", + newrelic.LogData{ + Message: `{"message":"hello, world \"{ thing \"}","time":1516134303,"level":"debug"}` + "\n", + Severity: "debug", + }, + }, + { + // basic stack trace test + `{"level":"error","stack":[{"func":"inner","line":"20","source":"errors.go"},{"func":"middle","line":"24","source":"errors.go"},{"func":"outer","line":"32","source":"errors.go"},{"func":"main","line":"15","source":"errors.go"},{"func":"main","line":"204","source":"proc.go"},{"func":"goexit","line":"1374","source":"asm_amd64.s"}],"error":"seems we have an error here","time":1609086683}` + "\n", + "level", + newrelic.LogData{ + Message: `{"level":"error","stack":[{"func":"inner","line":"20","source":"errors.go"},{"func":"middle","line":"24","source":"errors.go"},{"func":"outer","line":"32","source":"errors.go"},{"func":"main","line":"15","source":"errors.go"},{"func":"main","line":"204","source":"proc.go"},{"func":"goexit","line":"1374","source":"asm_amd64.s"}],"error":"seems we have an error here","time":1609086683}` + "\n", + Severity: "error", + }, + }, + { + // Tests that code can handle a stack trace, even if its at EOL + `{"level":"error","stack":[{"func":"inner","line":"20","source":"errors.go"},{"func":"middle","line":"24","source":"errors.go"},{"func":"outer","line":"32","source":"errors.go"},{"func":"main","line":"15","source":"errors.go"},{"func":"main","line":"204","source":"proc.go"},{"func":"goexit","line":"1374","source":"asm_amd64.s"}]}` + "\n", + "level", + newrelic.LogData{ + Message: `{"level":"error","stack":[{"func":"inner","line":"20","source":"errors.go"},{"func":"middle","line":"24","source":"errors.go"},{"func":"outer","line":"32","source":"errors.go"},{"func":"main","line":"15","source":"errors.go"},{"func":"main","line":"204","source":"proc.go"},{"func":"goexit","line":"1374","source":"asm_amd64.s"}]}` + "\n", + Severity: "error", + }, + }, + { + `{"level":"debug","Scale":"833 cents","Interval":833.09,"time":1562212768,"message":"Fibonacci is everywhere"}` + "\n", + "level", + newrelic.LogData{ + Message: `{"level":"debug","Scale":"833 cents","Interval":833.09,"time":1562212768,"message":"Fibonacci is everywhere"}` + "\n", + Severity: "debug", + }, + }, + { + `{"Scale":"833 cents","Interval":833.09,"time":1562212768,"message":"Fibonacci is everywhere","level":"debug"}` + "\n", + "level", + newrelic.LogData{ + Message: `{"Scale":"833 cents","Interval":833.09,"time":1562212768,"message":"Fibonacci is everywhere","level":"debug"}` + "\n", + Severity: "debug", + }, + }, } for _, test := range tests { if test.levelKey != "" { @@ -63,18 +131,70 @@ func TestParseLogData(t *testing.T) { val := parseJSONLogData([]byte(test.log)) if val.Message != test.expect.Message { - parserTestError(t, "Message", val.Message, test.expect.Message) + parserTestError(t, "Message", val.Message, test.expect.Message, test.log) } if val.Severity != test.expect.Severity { - parserTestError(t, "Severity", val.Severity, test.expect.Severity) + parserTestError(t, "Severity", val.Severity, test.expect.Severity, test.log) } zerolog.LevelFieldName = "level" } } -func parserTestError(t *testing.T, field, actual, expect string) { - t.Errorf("The parsed %s does not match the expected message: parsed \"%s\" expected \"%s\"", field, actual, expect) +func TestParseLogDataEscapes(t *testing.T) { + type logTest struct { + logMessage string + levelKey string + expectMessage string + } + tests := []logTest{ + { + "escape quote,\"", + "info", + `{"level":"info","message":"escape quote,\""}`, + }, + { + "escape quote,\", hi", + "info", + `{"level":"info","message":"escape quote,\", hi"}`, + }, + { + "escape quote,\",\" hi", + "info", + `{"level":"info","message":"escape quote,\",\" hi"}`, + }, + { + "escape bracket,\"}\n hi", + "info", + `{"level":"info","message":"escape bracket,\"}\n hi"}`, + }, + } + + app := integrationsupport.NewTestApp( + integrationsupport.SampleEverythingReplyFn, + newrelic.ConfigAppLogForwardingEnabled(true), + ) + + writer := New(io.Discard, app.Application) + writer.DebugLogging(true) + logger := zerolog.New(writer) + + wantLog := []internal.WantLog{} + for _, test := range tests { + logger.Info().Msg(test.logMessage) + wantLog = append(wantLog, internal.WantLog{ + Severity: zerolog.LevelInfoValue, + Message: test.expectMessage, + Timestamp: internal.MatchAnyUnixMilli, + }) + + } + app.ExpectLogEvents(t, wantLog) + +} + +func parserTestError(t *testing.T, field, actual, expect, input string) { + t.Errorf("The parsed %s does not match the expected message: parsed \"%s\" expected \"%s\"\nFailed on input: %s", field, actual, expect, input) } func TestE2E(t *testing.T) { @@ -105,6 +225,83 @@ func TestE2E(t *testing.T) { }) } +func TestE2EWithContext(t *testing.T) { + app := integrationsupport.NewTestApp( + integrationsupport.SampleEverythingReplyFn, + newrelic.ConfigAppLogDecoratingEnabled(true), + newrelic.ConfigAppLogForwardingEnabled(true), + ) + buf := bytes.NewBuffer([]byte{}) + a := New(buf, app.Application) + a.DebugLogging(true) + + txn := app.Application.StartTransaction("test") + + ctx := newrelic.NewContext(context.Background(), txn) + txnWriter := a.WithContext(ctx) + logger := zerolog.New(txnWriter) + + logger.Info().Msg("Hello World!") + traceID := txn.GetLinkingMetadata().TraceID + spanID := txn.GetLinkingMetadata().SpanID + txn.End() // must end txn to dump logs into harvest + + logcontext.ValidateDecoratedOutput(t, buf, &logcontext.DecorationExpect{ + EntityGUID: integrationsupport.TestEntityGUID, + Hostname: host, + EntityName: integrationsupport.SampleAppName, + }) + + app.ExpectLogEvents(t, []internal.WantLog{ + { + Severity: zerolog.LevelInfoValue, + Message: `{"level":"info","message":"Hello World!"}`, + Timestamp: internal.MatchAnyUnixMilli, + TraceID: traceID, + SpanID: spanID, + }, + }) +} + +func TestE2EWithTxn(t *testing.T) { + app := integrationsupport.NewTestApp( + integrationsupport.SampleEverythingReplyFn, + newrelic.ConfigAppLogDecoratingEnabled(true), + newrelic.ConfigAppLogForwardingEnabled(true), + ) + buf := bytes.NewBuffer([]byte{}) + a := New(buf, app.Application) + a.DebugLogging(true) + + txn := app.Application.StartTransaction("test") + + // create logger with txn context + txnWriter := a.WithTransaction(txn) + logger := zerolog.New(txnWriter) + + logger.Info().Msg("Hello World!") + traceID := txn.GetLinkingMetadata().TraceID + spanID := txn.GetLinkingMetadata().SpanID + txn.End() // must end txn to dump logs into harvest + + logcontext.ValidateDecoratedOutput(t, buf, &logcontext.DecorationExpect{ + EntityGUID: integrationsupport.TestEntityGUID, + Hostname: host, + EntityName: integrationsupport.SampleAppName, + }) + + app.ExpectLogEvents(t, []internal.WantLog{ + { + Severity: zerolog.LevelInfoValue, + Message: `{"level":"info","message":"Hello World!"}`, + Timestamp: internal.MatchAnyUnixMilli, + TraceID: traceID, + SpanID: spanID, + }, + }) + +} + func BenchmarkParseLogLevel(b *testing.B) { log := []byte(`{"time":1516134303,"level":"debug","message":"hello world"}`) diff --git a/v3/internal/connect_reply.go b/v3/internal/connect_reply.go index 41bde1564..44772a3e4 100644 --- a/v3/internal/connect_reply.go +++ b/v3/internal/connect_reply.go @@ -255,6 +255,33 @@ func CreateFullTxnName(input string, reply *ConnectReply, isWeb bool) string { return reply.SegmentTerms.apply(afterNameRules) } +// RequestEventLimits sets limits for reservior testing +type RequestEventLimits struct { + CustomEvents int +} + +const ( + // CustomEventHarvestsPerMinute is the number of times per minute custom events are harvested + CustomEventHarvestsPerMinute = 5 +) + +// MockConnectReplyEventLimits sets up a mock connect reply to test event limits +// currently only verifies custom insights events +func (r *ConnectReply) MockConnectReplyEventLimits(limits *RequestEventLimits) { + r.SetSampleEverything() + + r.EventData.Limits.CustomEvents = uintPtr(uint(limits.CustomEvents) / (60 / CustomEventHarvestsPerMinute)) + + // The mock server will be limited to a maximum of 100,000 events per minute + if limits.CustomEvents > 100000 { + r.EventData.Limits.CustomEvents = uintPtr(uint(100000) / (60 / CustomEventHarvestsPerMinute)) + } + + if limits.CustomEvents <= 0 { + r.EventData.Limits.CustomEvents = uintPtr(uint(0) / (60 / CustomEventHarvestsPerMinute)) + } +} + // SetSampleEverything is used for testing to ensure span events get saved. func (r *ConnectReply) SetSampleEverything() { // These constants are not large enough to sample everything forever, diff --git a/v3/internal/limits.go b/v3/internal/limits.go index a7becf544..b0f8228ec 100644 --- a/v3/internal/limits.go +++ b/v3/internal/limits.go @@ -15,7 +15,7 @@ const ( MaxPayloadSizeInBytes = 1000 * 1000 // MaxCustomEvents is the maximum number of Transaction Events that can be captured // per 60-second harvest cycle - MaxCustomEvents = 10 * 1000 + MaxCustomEvents = 30 * 1000 // MaxLogEvents is the maximum number of Log Events that can be captured per // 60-second harvest cycle MaxLogEvents = 10 * 1000 diff --git a/v3/newrelic/config.go b/v3/newrelic/config.go index d9932fce8..81cffdce7 100644 --- a/v3/newrelic/config.go +++ b/v3/newrelic/config.go @@ -360,6 +360,12 @@ type Config struct { // as attributes. If this is disabled, no such metrics will be collected // or reported. Enabled bool + // RedactPathPrefixes, if true, will redact a non-nil list of PathPrefixes + // from the configuration data transmitted by the agent. + RedactPathPrefixes bool + // RedactIgnoredPrefixes, if true, will redact a non-nil list of IgnoredPrefixes + // from the configuration data transmitted by the agent. + RedactIgnoredPrefixes bool // Scope is a combination of CodeLevelMetricsScope values OR-ed together // to indicate which specific kinds of events will carry CodeLevelMetrics // data. This allows the agent to spend resources on discovering the source @@ -401,21 +407,34 @@ type Config struct { // names look like they are internal to the agent itself. IgnoredPrefixes []string } + + // ModuleDependencyMetrics controls reporting of the packages used to build the instrumented + // application, to help manage project dependencies. + ModuleDependencyMetrics struct { + // Enabled controls whether the module dependencies are collected and reported. + Enabled bool + // RedactIgnoredPrefixes, if true, redacts a non-nil list of IgnoredPrefixes from + // the configuration data transmitted by the agent. + RedactIgnoredPrefixes bool + // IgnoredPrefixes is a list of module path prefixes. Any module whose import pathname + // begins with one of these prefixes is excluded from the dependency reporting. + // This list of ignored prefixes itself is not reported outside the agent. + IgnoredPrefixes []string + } } -// // CodeLevelMetricsScope is a bit-encoded value. Each such value describes // a trace type for which code-level metrics are to be collected and // reported. -// type CodeLevelMetricsScope uint32 // These constants specify the types of telemetry data to which we will // attach code level metric data. // // Currently, this includes -// TransactionCLM any kind of transaction -// AllCLM all kinds of telemetry data for which CLM is implemented (the default) +// +// TransactionCLM any kind of transaction +// AllCLM all kinds of telemetry data for which CLM is implemented (the default) // // The zero value of CodeLevelMetricsScope means "all types" as a convenience so that // new variables of this type provide the default expected behavior @@ -429,7 +448,6 @@ const ( AllCLM CodeLevelMetricsScope = 0 ) -// // CodeLevelMetricsScopeLabelToValue accepts a number of string values representing // the possible scope restrictions available for the agent, returning the // CodeLevelMetricsScope value which represents the combination of all of the given @@ -441,9 +459,9 @@ const ( // will represent any valid label strings passed, if any). // // Currently, this function recognizes the following labels: -// for AllCLM: "all" (if this value appears anywhere in the list of strings, AllCLM will be returned) -// for TransactionCLM: "transaction", "transactions", "txn" // +// for AllCLM: "all" (if this value appears anywhere in the list of strings, AllCLM will be returned) +// for TransactionCLM: "transaction", "transactions", "txn" func CodeLevelMetricsScopeLabelToValue(labels ...string) (CodeLevelMetricsScope, bool) { var scope CodeLevelMetricsScope ok := true @@ -465,10 +483,8 @@ func CodeLevelMetricsScopeLabelToValue(labels ...string) (CodeLevelMetricsScope, return scope, ok } -// // UnmarshalText allows for a CodeLevelMetricsScope value to be read from a JSON // string (or other text encodings) whose value is a comma-separated list of scope labels. -// func (s *CodeLevelMetricsScope) UnmarshalText(b []byte) error { var ok bool @@ -479,10 +495,8 @@ func (s *CodeLevelMetricsScope) UnmarshalText(b []byte) error { return nil } -// // MarshalText allows for a CodeLevelMetrics value to be encoded into JSON strings and other // text encodings. -// func (s CodeLevelMetricsScope) MarshalText() ([]byte, error) { if s == 0 || s == AllCLM { return []byte("all"), nil @@ -495,12 +509,10 @@ func (s CodeLevelMetricsScope) MarshalText() ([]byte, error) { return nil, fmt.Errorf("unrecognized bit pattern in CodeLevelMetricsScope value") } -// // CodeLevelMetricsScopeLabelListToValue is a convenience function which // is like CodeLevelMetricsScopeLabeltoValue except that it takes a single // string which contains comma-separated values instead of an already-broken-out // set of individual label strings. -// func CodeLevelMetricsScopeLabelListToValue(labels string) (CodeLevelMetricsScope, bool) { return CodeLevelMetricsScopeLabelToValue(strings.Split(labels, ",")...) } @@ -598,7 +610,7 @@ func defaultConfig() Config { // Application Logging Settings c.ApplicationLogging.Enabled = true - c.ApplicationLogging.Forwarding.Enabled = false + c.ApplicationLogging.Forwarding.Enabled = true c.ApplicationLogging.Forwarding.MaxSamplesStored = internal.MaxLogEvents c.ApplicationLogging.Metrics.Enabled = true c.ApplicationLogging.LocalDecorating.Enabled = false @@ -630,7 +642,13 @@ func defaultConfig() Config { // Code Level Metrics c.CodeLevelMetrics.Enabled = false + c.CodeLevelMetrics.RedactPathPrefixes = true + c.CodeLevelMetrics.RedactIgnoredPrefixes = true c.CodeLevelMetrics.Scope = AllCLM + + // Module Dependency Metrics + c.ModuleDependencyMetrics.Enabled = true + c.ModuleDependencyMetrics.RedactIgnoredPrefixes = true return c } @@ -797,12 +815,12 @@ func (s settings) MarshalJSON() ([]byte, error) { c.Logger = nil js, err := json.Marshal(c) - if nil != err { + if err != nil { return nil, err } fields := make(map[string]interface{}) err = json.Unmarshal(js, &fields) - if nil != err { + if err != nil { return nil, err } // The License field is not simply ignored by adding the `json:"-"` tag @@ -816,6 +834,28 @@ func (s settings) MarshalJSON() ([]byte, error) { fields[`browser_monitoring.loader`] = "rum" } + // Protect privacy for restricted fields + if clmConfig, ok := fields["CodeLevelMetrics"]; ok { + if clmMap, ok := clmConfig.(map[string]interface{}); ok { + if c.CodeLevelMetrics.RedactIgnoredPrefixes && c.CodeLevelMetrics.IgnoredPrefixes != nil { + delete(clmMap, "IgnoredPrefixes") + delete(clmMap, "IgnoredPrefix") + } + if c.CodeLevelMetrics.RedactPathPrefixes && c.CodeLevelMetrics.PathPrefixes != nil { + delete(clmMap, "PathPrefixes") + delete(clmMap, "PathPrefix") + } + } + } + + if mdmConfig, ok := fields["ModuleDependencyMetrics"]; ok { + if mdmMap, ok := mdmConfig.(map[string]interface{}); ok { + if c.ModuleDependencyMetrics.RedactIgnoredPrefixes && c.ModuleDependencyMetrics.IgnoredPrefixes != nil { + delete(mdmMap, "IgnoredPrefixes") + } + } + } + return json.Marshal(fields) } @@ -968,7 +1008,7 @@ func newInternalConfig(cfg Config, getenv func(string) string, environ []string) } func (c config) createConnectJSON(securityPolicies *internal.SecurityPolicies) ([]byte, error) { - env := newEnvironment() + env := newEnvironment(&c) util := utilization.Gather(utilization.Config{ DetectAWS: c.Utilization.DetectAWS, DetectAzure: c.Utilization.DetectAzure, diff --git a/v3/newrelic/config_options.go b/v3/newrelic/config_options.go index 2c02c47c6..aea7d4026 100644 --- a/v3/newrelic/config_options.go +++ b/v3/newrelic/config_options.go @@ -47,6 +47,11 @@ func ConfigCustomInsightsEventsMaxSamplesStored(limit int) ConfigOption { return func(cfg *Config) { cfg.CustomInsightsEvents.MaxSamplesStored = limit } } +// ConfigCustomInsightsEventsEnabled enables or disables the collection of custom insight events. +func ConfigCustomInsightsEventsEnabled(enabled bool) ConfigOption { + return func(cfg *Config) { cfg.CustomInsightsEvents.Enabled = enabled } +} + // ConfigDistributedTracerReservoirLimit alters the sample reservoir size (maximum // number of span events to be collected) for distributed tracing instead of // using the built-in default. @@ -73,7 +78,21 @@ func ConfigCodeLevelMetricsEnabled(enabled bool) ConfigOption { // In agent version 3.18.0 (only), this took a single string parameter. // It now takes a variable number of parameters, preserving the old call semantics // for backward compatibility while allowing for multiple IgnoredPrefix values now. +// +// Deprecated: New code should use ConfigCodeLevelmetricsIgnoredPrefixes instead, +// so the naming of this function is consistent with other related identifiers and +// the fact that multiple such prefixes are now used. func ConfigCodeLevelMetricsIgnoredPrefix(prefix ...string) ConfigOption { + return ConfigCodeLevelMetricsIgnoredPrefixes(prefix...) +} + +// ConfigCodeLevelMetricsIgnoredPrefixes alters the way the Code Level Metrics +// collection code searches for the right function to report for a given +// telemetry trace. It will find the innermost function whose name does NOT +// begin with any of the strings given here. By default (or if no paramters are given), +// it will ignore functions whose names imply that the function is part of +// the agent itself. +func ConfigCodeLevelMetricsIgnoredPrefixes(prefix ...string) ConfigOption { return func(cfg *Config) { cfg.CodeLevelMetrics.IgnoredPrefixes = prefix @@ -85,6 +104,38 @@ func ConfigCodeLevelMetricsIgnoredPrefix(prefix ...string) ConfigOption { } } +// ConfigCodeLevelMetricsRedactIgnoredPrefixes controls whether the names +// of ignored modules should be redacted from the agent configuration data +// reported and visible in the New Relic UI. Since one of the reasons these +// modules may be excluded is to preserve confidentiality of module or +// directory names, the default behavior (if this option is set to true) +// is to redact those names from the configuration data so that the only thing +// reported is that some list of unnamed modules were excluded from reporting. +// If this is set to false, then the names of the ignored modules will be +// listed in the configuration data, although those modules will still be ignored +// by Code Level Metrics. +func ConfigCodeLevelMetricsRedactIgnoredPrefixes(enabled bool) ConfigOption { + return func(cfg *Config) { + cfg.CodeLevelMetrics.RedactIgnoredPrefixes = enabled + } +} + +// ConfigCodeLevelMetricsRedactPathPrefixes controls whether the names +// of source code parent directories should be redacted from the agent configuration data +// reported and visible in the New Relic UI. Since one of the reasons these +// path prefixes may be excluded is to preserve confidentiality of +// directory names, the default behavior (if this option is set to true) +// is to redact those names from the configuration data so that the only thing +// reported is that some list of unnamed path prefixes were removed from reported pathnames. +// If this is set to false, then the names of the removed path prefixes will be +// listed in the configuration data, although those strings will still be removed from pathnames +// reported by Code Level Metrics. +func ConfigCodeLevelMetricsRedactPathPrefixes(enabled bool) ConfigOption { + return func(cfg *Config) { + cfg.CodeLevelMetrics.RedactPathPrefixes = enabled + } +} + // ConfigCodeLevelMetricsScope narrows the scope of where code level // metrics are to be used. By default, if CodeLevelMetrics are enabled, // they apply everywhere the agent currently supports them. To narrow @@ -116,7 +167,27 @@ func ConfigCodeLevelMetricsScope(scope CodeLevelMetricsScope) ConfigOption { // In agent versions 3.18.0 and 3.18.1, this took a single string parameter. // It now takes a variable number of parameters, preserving the old call semantics // for backward compatibility while allowing for multiple PathPrefix values now. +// +// Deprecated: New code should use ConfigCodeLevelMetricsPathPrefixes instead, +// so the naming of this function is consistent with other related identifiers +// and the fact that multiple such prefixes are now used. func ConfigCodeLevelMetricsPathPrefix(prefix ...string) ConfigOption { + return ConfigCodeLevelMetricsPathPrefixes(prefix...) +} + +// ConfigCodeLevelMetricsPathPrefixes specifies the filename pattern(s) that describe(s) the start of +// the project area(s). When reporting a source filename for Code Level Metrics, and any of the +// values in the path prefix list are found in the source filename, anything before that prefix +// is discarded from the file pathname. This will be based on the first value in the prefix list +// that is found in the pathname. +// +// For example, if +// the path prefix list is set to ["myproject/src", "myproject/extra"], then a function located in a file +// called "/usr/local/src/myproject/src/foo.go" will be reported with the +// pathname "myproject/src/foo.go". If this value is empty or none of the prefix strings +// are found in a file's pathname, the full path +// will be reported (e.g., "/usr/local/src/myproject/src/foo.go"). +func ConfigCodeLevelMetricsPathPrefixes(prefix ...string) ConfigOption { return func(cfg *Config) { cfg.CodeLevelMetrics.PathPrefixes = prefix @@ -201,6 +272,38 @@ func ConfigInfoLogger(w io.Writer) ConfigOption { return ConfigLogger(NewLogger(w)) } +// ConfigModuleDependencyMetricsEnabled controls whether the agent collects and reports +// the list of modules compiled into the instrumented application. +func ConfigModuleDependencyMetricsEnabled(enabled bool) ConfigOption { + return func(cfg *Config) { + cfg.ModuleDependencyMetrics.Enabled = enabled + } +} + +// ConfigModuleDependencyMetricsIgnoredPrefixes sets the list of module path prefix strings +// indicating which modules should be excluded from the dependency report. +func ConfigModuleDependencyMetricsIgnoredPrefixes(prefix ...string) ConfigOption { + return func(cfg *Config) { + cfg.ModuleDependencyMetrics.IgnoredPrefixes = prefix + } +} + +// ConfigModuleDependencyMetricsRedactIgnoredPrefixes controls whether the names +// of ignored module path prefixes should be redacted from the agent configuration data +// reported and visible in the New Relic UI. Since one of the reasons these +// modules may be excluded is to preserve confidentiality of module or +// directory names, the default behavior (if this option is set to true) +// is to redact those names from the configuration data so that the only thing +// reported is that some list of unnamed modules were excluded from reporting. +// If this is set to false, then the names of the ignored modules will be +// listed in the configuration data, although those modules will still be ignored +// by Module Dependency Metrics. +func ConfigModuleDependencyMetricsRedactIgnoredPrefixes(enabled bool) ConfigOption { + return func(cfg *Config) { + cfg.ModuleDependencyMetrics.RedactIgnoredPrefixes = enabled + } +} + // ConfigDebugLogger populates the config with a Logger at debug level. func ConfigDebugLogger(w io.Writer) ConfigOption { return ConfigLogger(NewDebugLogger(w)) @@ -208,29 +311,39 @@ func ConfigDebugLogger(w io.Writer) ConfigOption { // ConfigFromEnvironment populates the config based on environment variables: // -// NEW_RELIC_APP_NAME sets AppName -// NEW_RELIC_ATTRIBUTES_EXCLUDE sets Attributes.Exclude using a comma-separated list, eg. "request.headers.host,request.method" -// NEW_RELIC_ATTRIBUTES_INCLUDE sets Attributes.Include using a comma-separated list -// NEW_RELIC_CODE_LEVEL_METRICS_ENABLED sets CodeLevelMetrics.Enabled -// NEW_RELIC_CODE_LEVEL_METRICS_SCOPE sets CodeLevelMetrics.Scope using a comma-separated list, e.g. "transaction" -// NEW_RELIC_CODE_LEVEL_METRICS_PATH_PREFIX sets CodeLevelMetrics.PathPrefixes using a comma-separated list -// NEW_RELIC_CODE_LEVEL_METRICS_IGNORED_PREFIX sets CodeLevelMetrics.IgnoredPrefixes using a comma-separated list -// NEW_RELIC_DISTRIBUTED_TRACING_ENABLED sets DistributedTracer.Enabled using strconv.ParseBool -// NEW_RELIC_ENABLED sets Enabled using strconv.ParseBool -// NEW_RELIC_HIGH_SECURITY sets HighSecurity using strconv.ParseBool -// NEW_RELIC_HOST sets Host -// NEW_RELIC_INFINITE_TRACING_SPAN_EVENTS_QUEUE_SIZE sets InfiniteTracing.SpanEvents.QueueSize using strconv.Atoi -// NEW_RELIC_INFINITE_TRACING_TRACE_OBSERVER_PORT sets InfiniteTracing.TraceObserver.Port using strconv.Atoi -// NEW_RELIC_INFINITE_TRACING_TRACE_OBSERVER_HOST sets InfiniteTracing.TraceObserver.Host -// NEW_RELIC_LABELS sets Labels using a semi-colon delimited string of colon-separated pairs, eg. "Server:One;DataCenter:Primary" -// NEW_RELIC_LICENSE_KEY sets License -// NEW_RELIC_LOG sets Logger to log to either "stdout" or "stderr" (filenames are not supported) -// NEW_RELIC_LOG_LEVEL controls the NEW_RELIC_LOG level, must be "debug" for debug, or empty for info -// NEW_RELIC_PROCESS_HOST_DISPLAY_NAME sets HostDisplayName -// NEW_RELIC_SECURITY_POLICIES_TOKEN sets SecurityPoliciesToken -// NEW_RELIC_UTILIZATION_BILLING_HOSTNAME sets Utilization.BillingHostname -// NEW_RELIC_UTILIZATION_LOGICAL_PROCESSORS sets Utilization.LogicalProcessors using strconv.Atoi -// NEW_RELIC_UTILIZATION_TOTAL_RAM_MIB sets Utilization.TotalRAMMIB using strconv.Atoi +// NEW_RELIC_APP_NAME sets AppName +// NEW_RELIC_ATTRIBUTES_EXCLUDE sets Attributes.Exclude using a comma-separated list, eg. "request.headers.host,request.method" +// NEW_RELIC_ATTRIBUTES_INCLUDE sets Attributes.Include using a comma-separated list +// NEW_RELIC_MODULE_DEPENDENCY_METRICS_ENABLED sets ModuleDependencyMetrics.Enabled +// NEW_RELIC_MODULE_DEPENDENCY_METRICS_IGNORED_PREFIXES sets ModuleDependencyMetrics.IgnoredPrefixes +// NEW_RELIC_MODULE_DEPENDENCY_METRICS_REDACT_IGNORED_PREFIXES sets ModuleDependencyMetrics.RedactIgnoredPrefixes to a boolean value +// NEW_RELIC_CODE_LEVEL_METRICS_ENABLED sets CodeLevelMetrics.Enabled +// NEW_RELIC_CODE_LEVEL_METRICS_SCOPE sets CodeLevelMetrics.Scope using a comma-separated list, e.g. "transaction" +// NEW_RELIC_CODE_LEVEL_METRICS_PATH_PREFIX sets CodeLevelMetrics.PathPrefixes using a comma-separated list +// NEW_RELIC_CODE_LEVEL_METRICS_REDACT_PATH_PREFIXES sets CodeLevelMetrics.RedactPathPrefixes to a boolean value +// NEW_RELIC_CODE_LEVEL_METRICS_REDACT_IGNORED_PREFIXES sets CodeLevelMetrics.RedactIgnoredPrefixes to a boolean value +// NEW_RELIC_CODE_LEVEL_METRICS_IGNORED_PREFIX sets CodeLevelMetrics.IgnoredPrefixes using a comma-separated list +// NEW_RELIC_DISTRIBUTED_TRACING_ENABLED sets DistributedTracer.Enabled using strconv.ParseBool +// NEW_RELIC_ENABLED sets Enabled using strconv.ParseBool +// NEW_RELIC_HIGH_SECURITY sets HighSecurity using strconv.ParseBool +// NEW_RELIC_HOST sets Host +// NEW_RELIC_INFINITE_TRACING_SPAN_EVENTS_QUEUE_SIZE sets InfiniteTracing.SpanEvents.QueueSize using strconv.Atoi +// NEW_RELIC_INFINITE_TRACING_TRACE_OBSERVER_PORT sets InfiniteTracing.TraceObserver.Port using strconv.Atoi +// NEW_RELIC_INFINITE_TRACING_TRACE_OBSERVER_HOST sets InfiniteTracing.TraceObserver.Host +// NEW_RELIC_LABELS sets Labels using a semi-colon delimited string of colon-separated pairs, eg. "Server:One;DataCenter:Primary" +// NEW_RELIC_LICENSE_KEY sets License +// NEW_RELIC_LOG sets Logger to log to either "stdout" or "stderr" (filenames are not supported) +// NEW_RELIC_LOG_LEVEL controls the NEW_RELIC_LOG level, must be "debug" for debug, or empty for info +// NEW_RELIC_PROCESS_HOST_DISPLAY_NAME sets HostDisplayName +// NEW_RELIC_SECURITY_POLICIES_TOKEN sets SecurityPoliciesToken +// NEW_RELIC_UTILIZATION_BILLING_HOSTNAME sets Utilization.BillingHostname +// NEW_RELIC_UTILIZATION_LOGICAL_PROCESSORS sets Utilization.LogicalProcessors using strconv.Atoi +// NEW_RELIC_UTILIZATION_TOTAL_RAM_MIB sets Utilization.TotalRAMMIB using strconv.Atoi +// NEW_RELIC_APPLICATION_LOGGING_ENABLED sets ApplicationLogging.Enabled. Set to false to disable all application logging features. +// NEW_RELIC_APPLICATION_LOGGING_FORWARDING_ENABLED sets ApplicationLogging.LogForwarding.Enabled. Set to false to disable in agent log forwarding. +// NEW_RELIC_APPLICATION_LOGGING_METRICS_ENABLED sets ApplicationLogging.Metrics.Enabled. Set to false to disable the collection of application log metrics. +// NEW_RELIC_APPLICATION_LOGGING_LOCAL_DECORATING_ENABLED sets ApplicationLogging.LocalDecoration.Enabled. Set to true to enable local log decoration. +// NEW_RELIC_APPLICATION_LOGGING_FORWARDING_MAX_SAMPLES_STORED sets ApplicationLogging.LogForwarding.Limit. Set to 0 to prevent captured logs from being forwarded. // // This function is strict and will assign Config.Error if any of the // environment variables cannot be parsed. @@ -270,7 +383,11 @@ func configFromEnvironment(getenv func(string) string) ConfigOption { assignString(&cfg.AppName, "NEW_RELIC_APP_NAME") assignString(&cfg.License, "NEW_RELIC_LICENSE_KEY") + assignBool(&cfg.ModuleDependencyMetrics.Enabled, "NEW_RELIC_MODULE_DEPENDENCY_METRICS_ENABLED") + assignBool(&cfg.ModuleDependencyMetrics.RedactIgnoredPrefixes, "NEW_RELIC_MODULE_DEPENDENCY_METRICS_REDACT_IGNORED_PREFIXES") assignBool(&cfg.CodeLevelMetrics.Enabled, "NEW_RELIC_CODE_LEVEL_METRICS_ENABLED") + assignBool(&cfg.CodeLevelMetrics.RedactPathPrefixes, "NEW_RELIC_CODE_LEVEL_METRICS_REDACT_PATH_PREFIXES") + assignBool(&cfg.CodeLevelMetrics.RedactIgnoredPrefixes, "NEW_RELIC_CODE_LEVEL_METRICS_REDACT_IGNORED_PREFIXES") assignBool(&cfg.DistributedTracer.Enabled, "NEW_RELIC_DISTRIBUTED_TRACING_ENABLED") assignBool(&cfg.Enabled, "NEW_RELIC_ENABLED") assignBool(&cfg.HighSecurity, "NEW_RELIC_HIGH_SECURITY") @@ -284,6 +401,13 @@ func configFromEnvironment(getenv func(string) string) ConfigOption { assignInt(&cfg.Utilization.TotalRAMMIB, "NEW_RELIC_UTILIZATION_TOTAL_RAM_MIB") assignInt(&cfg.InfiniteTracing.SpanEvents.QueueSize, "NEW_RELIC_INFINITE_TRACING_SPAN_EVENTS_QUEUE_SIZE") + // Application Logging Env Variables + assignBool(&cfg.ApplicationLogging.Enabled, "NEW_RELIC_APPLICATION_LOGGING_ENABLED") + assignBool(&cfg.ApplicationLogging.Forwarding.Enabled, "NEW_RELIC_APPLICATION_LOGGING_FORWARDING_ENABLED") + assignInt(&cfg.ApplicationLogging.Forwarding.MaxSamplesStored, "NEW_RELIC_APPLICATION_LOGGING_FORWARDING_MAX_SAMPLES_STORED") + assignBool(&cfg.ApplicationLogging.Metrics.Enabled, "NEW_RELIC_APPLICATION_LOGGING_METRICS_ENABLED") + assignBool(&cfg.ApplicationLogging.LocalDecorating.Enabled, "NEW_RELIC_APPLICATION_LOGGING_LOCAL_DECORATING_ENABLED") + if env := getenv("NEW_RELIC_LABELS"); env != "" { if labels := getLabels(getenv("NEW_RELIC_LABELS")); len(labels) > 0 { cfg.Labels = labels @@ -307,12 +431,20 @@ func configFromEnvironment(getenv func(string) string) ConfigOption { } } - if env := getenv("NEW_RELIC_CODE_LEVEL_METRICS_IGNORED_PREFIX"); env != "" { + if env := getenv("NEW_RELIC_CODE_LEVEL_METRICS_IGNORED_PREFIXES"); env != "" { + cfg.CodeLevelMetrics.IgnoredPrefixes = strings.Split(env, ",") + } else if env := getenv("NEW_RELIC_CODE_LEVEL_METRICS_IGNORED_PREFIX"); env != "" { cfg.CodeLevelMetrics.IgnoredPrefixes = strings.Split(env, ",") } - if env := getenv("NEW_RELIC_CODE_LEVEL_METRICS_PATH_PREFIX"); env != "" { + if env := getenv("NEW_RELIC_CODE_LEVEL_METRICS_PATH_PREFIXES"); env != "" { cfg.CodeLevelMetrics.PathPrefixes = strings.Split(env, ",") + } else if env := getenv("NEW_RELIC_CODE_LEVEL_METRICS_PATH_PREFIX"); env != "" { + cfg.CodeLevelMetrics.PathPrefixes = strings.Split(env, ",") + } + + if env := getenv("NEW_RELIC_MODULE_DEPENDENCY_METRICS_IGNORED_PREFIXES"); env != "" { + cfg.ModuleDependencyMetrics.IgnoredPrefixes = strings.Split(env, ",") } if env := getenv("NEW_RELIC_LOG"); env != "" { diff --git a/v3/newrelic/config_options_test.go b/v3/newrelic/config_options_test.go index 232cb5d30..f1f90ebb0 100644 --- a/v3/newrelic/config_options_test.go +++ b/v3/newrelic/config_options_test.go @@ -51,6 +51,8 @@ func TestConfigFromEnvironment(t *testing.T) { return "/foo/bar,/spam/spam/spam/frotz" case "NEW_RELIC_CODE_LEVEL_METRICS_IGNORED_PREFIX": return "/a/b,/c/d" + case "NEW_RELIC_APPLICATION_LOGGING_ENABLED": + return "false" } return "" }) @@ -76,11 +78,16 @@ func TestConfigFromEnvironment(t *testing.T) { expect.CodeLevelMetrics.PathPrefixes = []string{"/foo/bar", "/spam/spam/spam/frotz"} expect.CodeLevelMetrics.IgnoredPrefixes = []string{"/a/b", "/c/d"} + expect.ApplicationLogging.Enabled = false + expect.ApplicationLogging.Forwarding.Enabled = true + expect.ApplicationLogging.Metrics.Enabled = true + expect.ApplicationLogging.LocalDecorating.Enabled = false + cfg := defaultConfig() cfgOpt(&cfg) if !reflect.DeepEqual(expect, cfg) { - t.Error(cfg) + t.Errorf("%+v", cfg) } } diff --git a/v3/newrelic/config_test.go b/v3/newrelic/config_test.go index da95056b5..056889f07 100644 --- a/v3/newrelic/config_test.go +++ b/v3/newrelic/config_test.go @@ -131,9 +131,9 @@ func TestCopyConfigReferenceFieldsPresent(t *testing.T) { "settings":{ "AppName":"my appname", "ApplicationLogging": { - "Enabled":true, + "Enabled": true, "Forwarding": { - "Enabled": false, + "Enabled": true, "MaxSamplesStored": %d }, "LocalDecorating":{ @@ -148,7 +148,7 @@ func TestCopyConfigReferenceFieldsPresent(t *testing.T) { "Attributes":{"Enabled":false,"Exclude":["10"],"Include":["9"]}, "Enabled":true }, - "CodeLevelMetrics":{"Enabled":false,"IgnoredPrefix":"","IgnoredPrefixes":null,"PathPrefix":"","PathPrefixes":null,"Scope":"all"}, + "CodeLevelMetrics":{"Enabled":false,"IgnoredPrefix":"","IgnoredPrefixes":null,"PathPrefix":"","PathPrefixes":null,"RedactIgnoredPrefixes":true,"RedactPathPrefixes":true,"Scope":"all"}, "CrossApplicationTracer":{"Enabled":false}, "CustomInsightsEvents":{ "Enabled":true, @@ -189,6 +189,7 @@ func TestCopyConfigReferenceFieldsPresent(t *testing.T) { }, "Labels":{"zip":"zap"}, "Logger":"*logger.logFile", + "ModuleDependencyMetrics":{"Enabled":true,"IgnoredPrefixes":null,"RedactIgnoredPrefixes":true}, "RuntimeSampler":{"Enabled":true}, "SecurityPoliciesToken":"", "ServerlessMode":{ @@ -240,11 +241,12 @@ func TestCopyConfigReferenceFieldsPresent(t *testing.T) { "high_security":false, "labels":[{"label_type":"zip","label_value":"zap"}], "environment":[ + ["runtime.NumCPU",8], ["runtime.Compiler","comp"], ["runtime.GOARCH","arch"], ["runtime.GOOS","goos"], ["runtime.Version","vers"], - ["runtime.NumCPU",8] + ["Modules", null] ], "identifier":"my appname", "utilization":{ @@ -300,6 +302,7 @@ func TestCopyConfigReferenceFieldsPresent(t *testing.T) { } out := standardizeNumbers(string(js)) if out != expect { + t.Error(expect) t.Error(out) } } @@ -324,7 +327,7 @@ func TestCopyConfigReferenceFieldsAbsent(t *testing.T) { "ApplicationLogging": { "Enabled": true, "Forwarding": { - "Enabled": false, + "Enabled": true, "MaxSamplesStored": %d }, "LocalDecorating":{ @@ -343,7 +346,7 @@ func TestCopyConfigReferenceFieldsAbsent(t *testing.T) { }, "Enabled":true }, - "CodeLevelMetrics":{"Enabled":false,"IgnoredPrefix":"","IgnoredPrefixes":null,"PathPrefix":"","PathPrefixes":null,"Scope":"all"}, + "CodeLevelMetrics":{"Enabled":false,"IgnoredPrefix":"","IgnoredPrefixes":null,"PathPrefix":"","PathPrefixes":null,"RedactIgnoredPrefixes":true,"RedactPathPrefixes":true,"Scope":"all"}, "CrossApplicationTracer":{"Enabled":false}, "CustomInsightsEvents":{ "Enabled":true, @@ -384,6 +387,7 @@ func TestCopyConfigReferenceFieldsAbsent(t *testing.T) { }, "Labels":null, "Logger":null, + "ModuleDependencyMetrics":{"Enabled":true,"IgnoredPrefixes":null,"RedactIgnoredPrefixes":true}, "RuntimeSampler":{"Enabled":true}, "SecurityPoliciesToken":"", "ServerlessMode":{ @@ -432,11 +436,12 @@ func TestCopyConfigReferenceFieldsAbsent(t *testing.T) { "app_name":["my appname"], "high_security":false, "environment":[ + ["runtime.NumCPU",8], ["runtime.Compiler","comp"], ["runtime.GOARCH","arch"], ["runtime.GOOS","goos"], ["runtime.Version","vers"], - ["runtime.NumCPU",8] + ["Modules", null] ], "identifier":"my appname", "utilization":{ diff --git a/v3/newrelic/environment.go b/v3/newrelic/environment.go index 2e5c9182b..9ab5a42e8 100644 --- a/v3/newrelic/environment.go +++ b/v3/newrelic/environment.go @@ -5,17 +5,21 @@ package newrelic import ( "encoding/json" + "fmt" "reflect" "runtime" + "runtime/debug" + "strings" ) // environment describes the application's environment. type environment struct { - Compiler string `env:"runtime.Compiler"` - GOARCH string `env:"runtime.GOARCH"` - GOOS string `env:"runtime.GOOS"` - Version string `env:"runtime.Version"` - NumCPU int `env:"runtime.NumCPU"` + NumCPU int `env:"runtime.NumCPU"` + Compiler string `env:"runtime.Compiler"` + GOARCH string `env:"runtime.GOARCH"` + GOOS string `env:"runtime.GOOS"` + Version string `env:"runtime.Version"` + Modules []string `env:"Modules"` } var ( @@ -30,16 +34,58 @@ var ( ) // newEnvironment returns a new Environment. -func newEnvironment() environment { +func newEnvironment(c *config) environment { return environment{ Compiler: runtime.Compiler, GOARCH: runtime.GOARCH, GOOS: runtime.GOOS, Version: runtime.Version(), NumCPU: runtime.NumCPU(), + Modules: getDependencyModuleList(c), } } +// indended for testing purposes. This just returns the formatted +// modules subject to the user's filtering rules. +func injectDependencyModuleList(c *config, modules []*debug.Module) []string { + var modList []string + + if c != nil && c.ModuleDependencyMetrics.Enabled { + for _, module := range modules { + if module != nil && includeModule(module.Path, c.ModuleDependencyMetrics.IgnoredPrefixes) { + modList = append(modList, fmt.Sprintf("%s(%s)", module.Path, module.Version)) + } + } + } + return modList +} + +func getDependencyModuleList(c *config) []string { + var modList []string + + if c != nil && c.ModuleDependencyMetrics.Enabled { + info, ok := debug.ReadBuildInfo() + if info != nil && ok { + for _, module := range info.Deps { + if module != nil && includeModule(module.Path, c.ModuleDependencyMetrics.IgnoredPrefixes) { + modList = append(modList, fmt.Sprintf("%s(%s)", module.Path, module.Version)) + } + } + } + } + return modList +} + +func includeModule(name string, ignoredModulePrefixes []string) bool { + for _, excluded := range ignoredModulePrefixes { + if strings.HasPrefix(name, excluded) { + return false + } + } + + return true +} + // MarshalJSON prepares Environment JSON in the format expected by the collector // during the connect command. func (e environment) MarshalJSON() ([]byte, error) { diff --git a/v3/newrelic/environment_test.go b/v3/newrelic/environment_test.go index dc07caed4..bc3c88d7e 100644 --- a/v3/newrelic/environment_test.go +++ b/v3/newrelic/environment_test.go @@ -5,7 +5,9 @@ package newrelic import ( "encoding/json" + "regexp" "runtime" + "runtime/debug" "testing" "github.com/newrelic/go-agent/v3/internal" @@ -17,18 +19,19 @@ func TestMarshalEnvironment(t *testing.T) { t.Fatal(err) } expect := internal.CompactJSONString(`[ + ["runtime.NumCPU",8], ["runtime.Compiler","comp"], ["runtime.GOARCH","arch"], ["runtime.GOOS","goos"], ["runtime.Version","vers"], - ["runtime.NumCPU",8]]`) + ["Modules",null]]`) if string(js) != expect { t.Fatal(string(js)) } } func TestEnvironmentFields(t *testing.T) { - env := newEnvironment() + env := newEnvironment(nil) if env.Compiler != runtime.Compiler { t.Error(env.Compiler, runtime.Compiler) } @@ -44,4 +47,109 @@ func TestEnvironmentFields(t *testing.T) { if env.NumCPU != runtime.NumCPU() { t.Error(env.NumCPU, runtime.NumCPU()) } + if env.Modules != nil { + t.Error(env.Modules, nil) + } +} + +func TestModuleDependency(t *testing.T) { + cfg := config{Config: defaultConfig()} + + // check that the default is to be enabled + if !cfg.ModuleDependencyMetrics.Enabled { + t.Error("MDM should be enabled, was", cfg.ModuleDependencyMetrics.Enabled) + } + + // if disabled, we shouldn't get any data + cfg.ModuleDependencyMetrics.Enabled = false + env := newEnvironment(&cfg) + if env.Modules != nil && len(env.Modules) != 0 { + t.Error("MDM module list not empty:", env.Modules) + } + + // enabled, and we should see our list of modules reported. + // first, get the list of modules we should expect to see. + // of course, we can't do that from a unit test, so we'll mock up a set + // of modules to at least check that the various options work. + expectedModules := make(map[string]*debug.Module) + mockedModuleList := []*debug.Module{ + &debug.Module{Path: "example/path/to/module", Version: "v1.2.3"}, + &debug.Module{Path: "github.com/another/module", Version: "v0.1.2"}, + &debug.Module{Path: "some/development/module", Version: "(develop)"}, + } + for _, module := range mockedModuleList { + expectedModules[module.Path] = module + } + + cfg.ModuleDependencyMetrics.Enabled = true + env = newEnvironment(&cfg) + env.Modules = injectDependencyModuleList(&cfg, mockedModuleList) + checkModuleListsMatch(t, expectedModules, env.Modules, "full module list") + + // try to elide some modules now + cfg.ModuleDependencyMetrics.IgnoredPrefixes = []string{"github.com"} + env = newEnvironment(&cfg) + env.Modules = injectDependencyModuleList(&cfg, mockedModuleList) + delete(expectedModules, "github.com/another/module") + checkModuleListsMatch(t, expectedModules, env.Modules, "reduced module list") + + // more... + cfg.ModuleDependencyMetrics.IgnoredPrefixes = []string{"github.com", "exam"} + env = newEnvironment(&cfg) + env.Modules = injectDependencyModuleList(&cfg, mockedModuleList) + delete(expectedModules, "example/path/to/module") + checkModuleListsMatch(t, expectedModules, env.Modules, "reduced module list") +} + +func checkModuleListsMatch(t *testing.T, expected map[string]*debug.Module, actual []string, message string) { + if expected == nil { + t.Error(message, "expected list is nil") + } + if len(expected) > 0 && actual == nil { + t.Error(message, "actual list is nil") + } + if len(expected) != len(actual) { + t.Error(message, "actual list has", len(actual), "module(s) but expected", len(expected)) + } + + modulePattern := regexp.MustCompile(`^(.+?)\((.+)\)$`) + checked := make(map[string]bool) + for path, _ := range expected { + checked[path] = false + } + + for i, actualName := range actual { + matches := modulePattern.FindStringSubmatch(actualName) + if matches == nil || len(matches) != 3 { + t.Errorf("%s: actual module element #%d could not be parsed: \"%v\"", message, i, actualName) + continue + } + + if module, present := expected[matches[1]]; present { + if matches[1] != module.Path { + t.Errorf("%s: actual module element #%d \"%v\" mismatch to path \"%v\" which really shouldn't be possible", + message, i, matches[1], module.Path) + continue + } + if matches[2] != module.Version { + t.Errorf("%s: actual module element #%d \"%v\" version \"%v\" mismatch to expected version \"%v\"", + message, i, matches[1], matches[2], module.Version) + continue + } + if checked[matches[1]] { + t.Errorf("%s: actual module element #%d \"%v\" was already seen earlier in the module list", + message, i, matches[1]) + continue + } + checked[matches[1]] = true + } else { + t.Errorf("%s: actual module element #%d \"%v\" unexpected", message, i, matches[1]) + } + } + + for expectedName, wasChecked := range checked { + if !wasChecked { + t.Errorf("%s: did not see expected module \"%v\"", message, expectedName) + } + } } diff --git a/v3/newrelic/log_event.go b/v3/newrelic/log_event.go index 7aa051d3e..ee81effff 100644 --- a/v3/newrelic/log_event.go +++ b/v3/newrelic/log_event.go @@ -116,7 +116,7 @@ type logEnricherConfig struct { txn *Transaction } -// EnricherOption is a function that configures the enricher based on the source of data it recieves. +// EnricherOption is a function that configures the enricher based on the source of data it receives. type EnricherOption func(*logEnricherConfig) // FromApp configures the log enricher to build a linking payload from an application. @@ -124,7 +124,7 @@ func FromApp(app *Application) EnricherOption { return func(cfg *logEnricherConfig) { cfg.app = app } } -// FromTxn configres the log enricher to build a linking payload from a transaction. +// FromTxn configures the log enricher to build a linking payload from a transaction. func FromTxn(txn *Transaction) EnricherOption { return func(cfg *logEnricherConfig) { cfg.txn = txn } } @@ -137,7 +137,7 @@ type linkingMetadata struct { entityName string } -// EnrichLog appends newrelic linnking metadata to a log stored in a byte buffer. +// EnrichLog appends newrelic linking metadata to a log stored in a byte buffer. // This should only be used by plugins built for frameworks. func EnrichLog(buf *bytes.Buffer, opts EnricherOption) error { config := logEnricherConfig{} diff --git a/v3/newrelic/reservoir_limits_test.go b/v3/newrelic/reservoir_limits_test.go new file mode 100644 index 000000000..3c5f8f8e6 --- /dev/null +++ b/v3/newrelic/reservoir_limits_test.go @@ -0,0 +1,118 @@ +// Copyright 2020 New Relic Corporation. All rights reserved. +// SPDX-License-Identifier: Apache-2.0 + +package newrelic + +import ( + "testing" + + "github.com/newrelic/go-agent/v3/internal" +) + +// Check Default Value +func TestCustomLimitsBasic(t *testing.T) { + limit := internal.MaxCustomEvents + limits := &internal.RequestEventLimits{ + CustomEvents: limit, + } + // This function will mock a connect reply from the server + mockReplyFunction := func(reply *internal.ConnectReply) { + reply.MockConnectReplyEventLimits(limits) + } + testApp := newTestApp( + mockReplyFunction, + ConfigCustomInsightsEventsMaxSamplesStored(limit), + ) + + customEventRate := limit / (60 / internal.CustomEventHarvestsPerMinute) + + // Check if custom event queue capacity == rate + if customEventRate != testApp.app.testHarvest.CustomEvents.capacity() { + t.Errorf("Custom Events Rate is not equal to harvest: expected %d, actual %d", customEventRate, testApp.app.testHarvest.CustomEvents.capacity()) + } +} +func TestCustomEventLimitUserSet(t *testing.T) { + limit := 7000 + limits := &internal.RequestEventLimits{ + CustomEvents: limit, + } + mockReplyFunction := func(reply *internal.ConnectReply) { + reply.MockConnectReplyEventLimits(limits) + } + testApp := newTestApp( + mockReplyFunction, + ConfigCustomInsightsEventsMaxSamplesStored(limit), + ) + + customEventRate := limit / (60 / internal.CustomEventHarvestsPerMinute) + + if customEventRate != testApp.app.testHarvest.CustomEvents.capacity() { + t.Errorf("Custom Events Rate is not equal to harvest: expected %d, actual %d", customEventRate, testApp.app.testHarvest.CustomEvents.capacity()) + } +} + +func TestCustomLimitEnthusiast(t *testing.T) { + limit := 100000 + limits := &internal.RequestEventLimits{ + CustomEvents: limit, + } + // This function will mock a connect reply from the server + mockReplyFunction := func(reply *internal.ConnectReply) { + reply.MockConnectReplyEventLimits(limits) + } + testApp := newTestApp( + mockReplyFunction, + ConfigCustomInsightsEventsMaxSamplesStored(limit), + ) + + customEventRate := limit / (60 / internal.CustomEventHarvestsPerMinute) + + // Check if custom event queue capacity == rate + if customEventRate != testApp.app.testHarvest.CustomEvents.capacity() { + t.Errorf("Custom Events Rate is not equal to harvest: expected %d, actual %d", customEventRate, testApp.app.testHarvest.CustomEvents.capacity()) + } +} + +func TestCustomLimitsTypo(t *testing.T) { + limit := 1000000 + limits := &internal.RequestEventLimits{ + CustomEvents: limit, + } + // This function will mock a connect reply from the server + mockReplyFunction := func(reply *internal.ConnectReply) { + reply.MockConnectReplyEventLimits(limits) + } + testApp := newTestApp( + mockReplyFunction, + ConfigCustomInsightsEventsMaxSamplesStored(limit), + ) + + customEventRate := 100000 / (60 / internal.CustomEventHarvestsPerMinute) + + // Check if custom event queue capacity == rate + if customEventRate != testApp.app.testHarvest.CustomEvents.capacity() { + t.Errorf("Custom Events Rate is not equal to harvest: expected %d, actual %d", 8333, testApp.app.testHarvest.CustomEvents.capacity()) + } +} + +func TestCustomLimitZero(t *testing.T) { + limit := 0 + limits := &internal.RequestEventLimits{ + CustomEvents: limit, + } + // This function will mock a connect reply from the server + mockReplyFunction := func(reply *internal.ConnectReply) { + reply.MockConnectReplyEventLimits(limits) + } + testApp := newTestApp( + mockReplyFunction, + ConfigCustomInsightsEventsMaxSamplesStored(limit), + ) + + customEventRate := limit / (60 / internal.CustomEventHarvestsPerMinute) + + // Check if custom event queue capacity == rate + if customEventRate != testApp.app.testHarvest.CustomEvents.capacity() { + t.Errorf("Custom Events Rate is not equal to harvest: expected %d, actual %d", customEventRate, testApp.app.testHarvest.CustomEvents.capacity()) + } +} diff --git a/v3/newrelic/version.go b/v3/newrelic/version.go index 255032812..135afe0a8 100644 --- a/v3/newrelic/version.go +++ b/v3/newrelic/version.go @@ -11,7 +11,7 @@ import ( const ( // Version is the full string version of this Go Agent. - Version = "3.19.2" + Version = "3.20.0" ) var (