diff --git a/constraint/pkg/client/client.go b/constraint/pkg/client/client.go index 8d9cd57b4..f22aee674 100644 --- a/constraint/pkg/client/client.go +++ b/constraint/pkg/client/client.go @@ -16,6 +16,7 @@ import ( clienterrors "github.com/open-policy-agent/frameworks/constraint/pkg/client/errors" "github.com/open-policy-agent/frameworks/constraint/pkg/core/templates" "github.com/open-policy-agent/frameworks/constraint/pkg/handler" + "github.com/open-policy-agent/frameworks/constraint/pkg/instrumentation" "github.com/open-policy-agent/frameworks/constraint/pkg/types" "k8s.io/apiextensions-apiserver/pkg/apis/apiextensions" "k8s.io/apimachinery/pkg/apis/meta/v1/unstructured" @@ -670,7 +671,7 @@ func (c *Client) Review(ctx context.Context, obj interface{}, opts ...drivers.Qu for target, review := range reviews { constraints := constraintsByTarget[target] - resp, err := c.review(ctx, target, constraints, review, opts...) + resp, stats, err := c.review(ctx, target, constraints, review, opts...) if err != nil { errMap.Add(target, err) continue @@ -684,6 +685,18 @@ func (c *Client) Review(ctx context.Context, obj interface{}, opts ...drivers.Qu resp.Sort() responses.ByTarget[target] = resp + if stats != nil { + // add the target label to these stats for future collation. + targetLabel := &instrumentation.Label{Name: "target", Value: target} + for _, stat := range stats { + if stat.Labels == nil || len(stat.Labels) == 0 { + stat.Labels = []*instrumentation.Label{targetLabel} + } else { + stat.Labels = append(stat.Labels, targetLabel) + } + } + responses.StatsEntries = append(responses.StatsEntries, stats...) + } } if len(errMap) == 0 { @@ -693,8 +706,9 @@ func (c *Client) Review(ctx context.Context, obj interface{}, opts ...drivers.Qu return responses, &errMap } -func (c *Client) review(ctx context.Context, target string, constraints []*unstructured.Unstructured, review interface{}, opts ...drivers.QueryOpt) (*types.Response, error) { +func (c *Client) review(ctx context.Context, target string, constraints []*unstructured.Unstructured, review interface{}, opts ...drivers.QueryOpt) (*types.Response, []*instrumentation.StatsEntry, error) { var results []*types.Result + var stats []*instrumentation.StatsEntry var tracesBuilder strings.Builder errs := &errors.ErrorMap{} @@ -703,11 +717,11 @@ func (c *Client) review(ctx context.Context, target string, constraints []*unstr for _, constraint := range constraints { template, ok := c.templates[strings.ToLower(constraint.GetObjectKind().GroupVersionKind().Kind)] if !ok { - return nil, fmt.Errorf("%w: while loading driver for constraint %s", ErrMissingConstraintTemplate, constraint.GetName()) + return nil, nil, fmt.Errorf("%w: while loading driver for constraint %s", ErrMissingConstraintTemplate, constraint.GetName()) } driver := c.driverForTemplate(template.template) if driver == "" { - return nil, fmt.Errorf("%w: while loading driver for constraint %s", clienterrors.ErrNoDriver, constraint.GetName()) + return nil, nil, fmt.Errorf("%w: while loading driver for constraint %s", clienterrors.ErrNoDriver, constraint.GetName()) } driverToConstraints[driver] = append(driverToConstraints[driver], constraint) } @@ -716,17 +730,21 @@ func (c *Client) review(ctx context.Context, target string, constraints []*unstr if len(driverToConstraints[driverName]) == 0 { continue } - driverResults, trace, err := driver.Query(ctx, target, driverToConstraints[driverName], review, opts...) + qr, err := driver.Query(ctx, target, driverToConstraints[driverName], review, opts...) if err != nil { errs.Add(driverName, err) continue } - results = append(results, driverResults...) + if qr != nil { + results = append(results, qr.Results...) - if trace != nil { - tracesBuilder.WriteString(fmt.Sprintf("DRIVER %s:\n\n", driverName)) - tracesBuilder.WriteString(*trace) - tracesBuilder.WriteString("\n\n") + stats = append(stats, qr.StatsEntries...) + + if qr.Trace != nil { + tracesBuilder.WriteString(fmt.Sprintf("DRIVER %s:\n\n", driverName)) + tracesBuilder.WriteString(*qr.Trace) + tracesBuilder.WriteString("\n\n") + } } } @@ -749,7 +767,7 @@ func (c *Client) review(ctx context.Context, target string, constraints []*unstr Trace: trace, Target: target, Results: results, - }, errRet + }, stats, errRet } // Dump dumps the state of OPA to aid in debugging. @@ -767,6 +785,25 @@ func (c *Client) Dump(ctx context.Context) (string, error) { return dumpBuilder.String(), nil } +func (c *Client) GetDescriptionForStat(source instrumentation.Source, statName string) string { + if source.Type != instrumentation.EngineSourceType { + // only handle engine source for now + return instrumentation.UnknownDescription + } + + driver, ok := c.drivers[source.Value] + if !ok { + return instrumentation.UnknownDescription + } + + desc, err := driver.GetDescriptionForStat(statName) + if err != nil { + return instrumentation.UnknownDescription + } + + return desc +} + // knownTargets returns a sorted list of known target names. func (c *Client) knownTargets() []string { var knownTargets []string diff --git a/constraint/pkg/client/drivers/fake/fake.go b/constraint/pkg/client/drivers/fake/fake.go index 8cdfb479d..d9407401d 100644 --- a/constraint/pkg/client/drivers/fake/fake.go +++ b/constraint/pkg/client/drivers/fake/fake.go @@ -197,7 +197,7 @@ func (d *Driver) RemoveData(ctx context.Context, target string, path storage.Pat return nil } -func (d *Driver) Query(ctx context.Context, target string, constraints []*unstructured.Unstructured, review interface{}, opts ...drivers.QueryOpt) ([]*types.Result, *string, error) { +func (d *Driver) Query(ctx context.Context, target string, constraints []*unstructured.Unstructured, review interface{}, opts ...drivers.QueryOpt) (*drivers.QueryResponse, error) { results := []*types.Result{} for i := range constraints { constraint := constraints[i] @@ -209,9 +209,14 @@ func (d *Driver) Query(ctx context.Context, target string, constraints []*unstru } results = append(results, result) } - return results, nil, nil + + return &drivers.QueryResponse{Results: results}, nil } func (d *Driver) Dump(ctx context.Context) (string, error) { return "", nil } + +func (d *Driver) GetDescriptionForStat(_ string) (string, error) { + return "", fmt.Errorf("unknown stat name") +} diff --git a/constraint/pkg/client/drivers/interface.go b/constraint/pkg/client/drivers/interface.go index 4f8da7caf..090c732a5 100644 --- a/constraint/pkg/client/drivers/interface.go +++ b/constraint/pkg/client/drivers/interface.go @@ -4,7 +4,6 @@ import ( "context" "github.com/open-policy-agent/frameworks/constraint/pkg/core/templates" - "github.com/open-policy-agent/frameworks/constraint/pkg/types" "github.com/open-policy-agent/opa/storage" "k8s.io/apimachinery/pkg/apis/meta/v1/unstructured" ) @@ -42,15 +41,17 @@ type Driver interface { RemoveData(ctx context.Context, target string, path storage.Path) error // Query runs the passed target's Constraints against review. - // - // Returns results for each violated Constraint. - // Returns a trace if specified in query options or enabled at Driver creation. + // Returns a QueryResponse type. // Returns an error if there was a problem executing the Query. - Query(ctx context.Context, target string, constraints []*unstructured.Unstructured, review interface{}, opts ...QueryOpt) ([]*types.Result, *string, error) + Query(ctx context.Context, target string, constraints []*unstructured.Unstructured, review interface{}, opts ...QueryOpt) (*QueryResponse, error) // Dump outputs the entire state of compiled Templates, added Constraints, and // cached data used for referential Constraints. Dump(ctx context.Context) (string, error) + + // GetDescriptionForStat returns the description for a given stat name + // or errors out for an unknown stat. + GetDescriptionForStat(statName string) (string, error) } // ConstraintKey uniquely identifies a Constraint. diff --git a/constraint/pkg/client/drivers/query_opts.go b/constraint/pkg/client/drivers/query_opts.go index 404c32ebb..4b244cc2f 100644 --- a/constraint/pkg/client/drivers/query_opts.go +++ b/constraint/pkg/client/drivers/query_opts.go @@ -2,9 +2,10 @@ package drivers type QueryCfg struct { TracingEnabled bool + StatsEnabled bool } -// QueryOpt specifies optional arguments for Rego queries. +// QueryOpt specifies optional arguments for Query driver calls. type QueryOpt func(*QueryCfg) // Tracing enables Rego tracing for a single query. @@ -14,3 +15,12 @@ func Tracing(enabled bool) QueryOpt { cfg.TracingEnabled = enabled } } + +// Stats(true) enables the driver to return evaluation stats for a single +// query. If stats is enabled for the Driver at construction time, then +// Stats(false) does not disable Stats for this single query. +func Stats(enabled bool) QueryOpt { + return func(cfg *QueryCfg) { + cfg.StatsEnabled = enabled + } +} diff --git a/constraint/pkg/client/drivers/rego/args.go b/constraint/pkg/client/drivers/rego/args.go index 9868cc308..59215355b 100644 --- a/constraint/pkg/client/drivers/rego/args.go +++ b/constraint/pkg/client/drivers/rego/args.go @@ -154,6 +154,16 @@ func Externs(externs ...string) Arg { } } +// GatherStats starts collecting various stats around the +// underlying engine's calls. +func GatherStats() Arg { + return func(driver *Driver) error { + driver.gatherStats = true + + return nil + } +} + // Currently rules should only access data.inventory. var validDataFields = map[string]bool{ "inventory": true, diff --git a/constraint/pkg/client/drivers/rego/driver.go b/constraint/pkg/client/drivers/rego/driver.go index c1b546e12..751b3a8e2 100644 --- a/constraint/pkg/client/drivers/rego/driver.go +++ b/constraint/pkg/client/drivers/rego/driver.go @@ -17,6 +17,7 @@ import ( clienterrors "github.com/open-policy-agent/frameworks/constraint/pkg/client/errors" "github.com/open-policy-agent/frameworks/constraint/pkg/core/templates" "github.com/open-policy-agent/frameworks/constraint/pkg/externaldata" + "github.com/open-policy-agent/frameworks/constraint/pkg/instrumentation" "github.com/open-policy-agent/frameworks/constraint/pkg/types" "github.com/open-policy-agent/opa/ast" "github.com/open-policy-agent/opa/rego" @@ -31,6 +32,15 @@ import ( const ( libRoot = "data.lib" violation = "violation" + + templateRunTimeNS = "templateRunTimeNS" + templateRunTimeNsDesc = "the number of nanoseconds it took to evaluate all constraints for a template" + + constraintCountName = "constraintCount" + constraintCountDescription = "the number of constraints that were evaluated for the given constraint kind" + + tracingEnabledLabelName = "TracingEnabled" + printEnabledLabelName = "PrintEnabled" ) var _ drivers.Driver = &Driver{} @@ -72,14 +82,9 @@ type Driver struct { // clientCertWatcher is a watcher for the TLS certificate used to communicate with providers. clientCertWatcher *certwatcher.CertWatcher -} -// EvaluationMeta has rego specific metadata from evaluation. -type EvaluationMeta struct { - // TemplateRunTime is the number of milliseconds it took to evaluate all constraints for a template. - TemplateRunTime float64 `json:"templateRunTime"` - // ConstraintCount indicates how many constraints were evaluated for an underlying rego engine eval call. - ConstraintCount uint `json:"constraintCount"` + // gatherStats controls whether the driver gathers any stats around its API calls. + gatherStats bool } // Name returns the name of the driver. @@ -233,9 +238,9 @@ func (d *Driver) eval(ctx context.Context, compiler *ast.Compiler, target string return res, t, err } -func (d *Driver) Query(ctx context.Context, target string, constraints []*unstructured.Unstructured, review interface{}, opts ...drivers.QueryOpt) ([]*types.Result, *string, error) { +func (d *Driver) Query(ctx context.Context, target string, constraints []*unstructured.Unstructured, review interface{}, opts ...drivers.QueryOpt) (*drivers.QueryResponse, error) { if len(constraints) == 0 { - return nil, nil, nil + return nil, nil } constraintsByKind := toConstraintsByKind(constraints) @@ -250,12 +255,19 @@ func (d *Driver) Query(ctx context.Context, target string, constraints []*unstru // once per call to Query instead of once per compiler. reviewMap, err := toInterfaceMap(review) if err != nil { - return nil, nil, err + return nil, err } d.mtx.RLock() defer d.mtx.RUnlock() + cfg := &drivers.QueryCfg{} + for _, opt := range opts { + opt(cfg) + } + + var statsEntries []*instrumentation.StatsEntry + for kind, kindConstraints := range constraintsByKind { evalStartTime := time.Now() compiler := d.compilers.getCompiler(target, kind) @@ -263,14 +275,14 @@ func (d *Driver) Query(ctx context.Context, target string, constraints []*unstru // The Template was just removed, so the Driver is in an inconsistent // state with Client. Raise this as an error rather than attempting to // continue. - return nil, nil, fmt.Errorf("missing Template %q for target %q", kind, target) + return nil, fmt.Errorf("missing Template %q for target %q", kind, target) } // Parse input into an ast.Value to avoid round-tripping through JSON when // possible. parsedInput, err := toParsedInput(target, kindConstraints, reviewMap) if err != nil { - return nil, nil, err + return nil, err } resultSet, trace, err := d.eval(ctx, compiler, target, path, parsedInput, opts...) @@ -297,25 +309,54 @@ func (d *Driver) Query(ctx context.Context, target string, constraints []*unstru kindResults, err := drivers.ToResults(constraintsMap, resultSet) if err != nil { - return nil, nil, err - } - - for _, result := range kindResults { - result.EvaluationMeta = EvaluationMeta{ - TemplateRunTime: float64(evalEndTime.Nanoseconds()) / 1000000, - ConstraintCount: uint(len(kindResults)), - } + return nil, err } results = append(results, kindResults...) + + if d.gatherStats || (cfg != nil && cfg.StatsEnabled) { + statsEntries = append(statsEntries, + &instrumentation.StatsEntry{ + Scope: instrumentation.TemplateScope, + StatsFor: kind, + Stats: []*instrumentation.Stat{ + { + Name: templateRunTimeNS, + Value: uint64(evalEndTime.Nanoseconds()), + Source: instrumentation.Source{ + Type: instrumentation.EngineSourceType, + Value: schema.Name, + }, + }, + { + Name: constraintCountName, + Value: len(kindConstraints), + Source: instrumentation.Source{ + Type: instrumentation.EngineSourceType, + Value: schema.Name, + }, + }, + }, + Labels: []*instrumentation.Label{ + { + Name: tracingEnabledLabelName, + Value: d.traceEnabled || cfg.TracingEnabled, + }, + { + Name: printEnabledLabelName, + Value: d.printEnabled, + }, + }, + }) + } } traceString := traceBuilder.String() if len(traceString) != 0 { - return results, &traceString, nil + return &drivers.QueryResponse{Results: results, Trace: &traceString, StatsEntries: statsEntries}, nil } - return results, nil, nil + return &drivers.QueryResponse{Results: results, StatsEntries: statsEntries}, nil } func (d *Driver) Dump(ctx context.Context) (string, error) { @@ -358,6 +399,17 @@ func (d *Driver) Dump(ctx context.Context) (string, error) { return string(b), nil } +func (d *Driver) GetDescriptionForStat(statName string) (string, error) { + switch statName { + case templateRunTimeNS: + return templateRunTimeNsDesc, nil + case constraintCountName: + return constraintCountDescription, nil + default: + return "", fmt.Errorf("unknown stat name") + } +} + func (d *Driver) getTLSCertificate() (*tls.Certificate, error) { if !d.enableExternalDataClientAuth { return nil, nil diff --git a/constraint/pkg/client/drivers/rego/driver_unit_test.go b/constraint/pkg/client/drivers/rego/driver_unit_test.go index 2b54f5daa..c495962a5 100644 --- a/constraint/pkg/client/drivers/rego/driver_unit_test.go +++ b/constraint/pkg/client/drivers/rego/driver_unit_test.go @@ -15,9 +15,12 @@ import ( "github.com/open-policy-agent/frameworks/constraint/pkg/apis/constraints" "github.com/open-policy-agent/frameworks/constraint/pkg/apis/externaldata/unversioned" "github.com/open-policy-agent/frameworks/constraint/pkg/client/clienttest/cts" + "github.com/open-policy-agent/frameworks/constraint/pkg/client/drivers" + "github.com/open-policy-agent/frameworks/constraint/pkg/client/drivers/rego/schema" clienterrors "github.com/open-policy-agent/frameworks/constraint/pkg/client/errors" "github.com/open-policy-agent/frameworks/constraint/pkg/externaldata" "github.com/open-policy-agent/frameworks/constraint/pkg/handler/handlertest" + "github.com/open-policy-agent/frameworks/constraint/pkg/instrumentation" "github.com/open-policy-agent/opa/ast" "github.com/open-policy-agent/opa/storage" "github.com/open-policy-agent/opa/storage/inmem" @@ -44,6 +47,14 @@ fooisbar[msg] { } ` + NeverViolate string = ` + package foobar + + violation[{"msg": "always violate"}] { + false + } +` + ExternalData string = ` package foobar @@ -140,7 +151,7 @@ func TestDriver_Query(t *testing.T) { t.Fatalf("got AddConstraint() error = %v, want %v", err, nil) } - res, _, err := d.Query( + qr, err := d.Query( ctx, cts.MockTargetHandler, []*unstructured.Unstructured{cts.MakeConstraint(t, "Fakes", "foo-1")}, @@ -149,7 +160,7 @@ func TestDriver_Query(t *testing.T) { if err != nil { t.Fatalf("got Query() error = %v, want %v", err, nil) } - if len(res) == 0 { + if len(qr.Results) == 0 { t.Fatalf("got 0 errors on normal query; want 1") } @@ -159,7 +170,7 @@ func TestDriver_Query(t *testing.T) { t.Fatalf("got RemoveData() error = %v, want %v", err, nil) } - res, _, err = d.Query( + qr, err = d.Query( ctx, cts.MockTargetHandler, []*unstructured.Unstructured{cts.MakeConstraint(t, "Fakes", "foo-1")}, @@ -168,21 +179,461 @@ func TestDriver_Query(t *testing.T) { if err != nil { t.Fatalf("got Query() (#2) error = %v, want %v", err, nil) } - if len(res) == 0 { + if len(qr.Results) == 0 { t.Fatalf("got 0 errors on data-less query; want 1") } +} + +// TestDriver_Query_Stats tests that StatsEntries are returned for both +// violating and non violating constraints. It tests both the QueryOpt.Stats() +// and the GatherStats() modes. +func TestDriver_Query_Stats(t *testing.T) { + ctx := context.Background() + + c1 := cts.MakeConstraint(t, "Fakes", "foo-1") + c2 := cts.MakeConstraint(t, "Fakes", "foo-2") + c3 := cts.MakeConstraint(t, "Fakes-2", "foo-1") + c4 := cts.MakeConstraint(t, "Fakes-2", "foo-2") + + prepareDriverFunc := func(d *Driver) { + /// Start Kind: Fakes + tmpl := cts.New(cts.OptTargets(cts.Target(cts.MockTargetHandler, AlwaysViolate))) + if err := d.AddTemplate(ctx, tmpl); err != nil { + t.Fatalf("got AddTemplate() error = %v, want %v", err, nil) + } + tmpl = cts.New(cts.OptTargets(cts.Target(cts.MockTargetHandler, NeverViolate))) + if err := d.AddTemplate(ctx, tmpl); err != nil { + t.Fatalf("got AddTemplate() error = %v, want %v", err, nil) + } + + if err := d.AddConstraint(ctx, c1); err != nil { + t.Fatalf("got AddConstraint() error = %v, want %v", err, nil) + } + if err := d.AddConstraint(ctx, c2); err != nil { + t.Fatalf("got AddConstraint() error = %v, want %v", err, nil) + } + /// End Kind: Fakes + + /// Start Kind: Fakes-2 + tmpl = cts.New(cts.OptTargets(cts.Target(cts.MockTargetHandler, AlwaysViolate)), cts.OptCRDNames("Fakes-2")) + if err := d.AddTemplate(ctx, tmpl); err != nil { + t.Fatalf("got AddTemplate() error = %v, want %v", err, nil) + } + tmpl = cts.New(cts.OptTargets(cts.Target(cts.MockTargetHandler, NeverViolate)), cts.OptCRDNames("Fakes-2")) + if err := d.AddTemplate(ctx, tmpl); err != nil { + t.Fatalf("got AddTemplate() error = %v, want %v", err, nil) + } - stats, ok := res[0].EvaluationMeta.(EvaluationMeta) - if !ok { - t.Fatalf("could not type convert to RegoEvaluationMeta") + if err := d.AddConstraint(ctx, c1); err != nil { + t.Fatalf("got AddConstraint() error = %v, want %v", err, nil) + } + if err := d.AddConstraint(ctx, c2); err != nil { + t.Fatalf("got AddConstraint() error = %v, want %v", err, nil) + } + /// End Kind: Fakes-2 } - if stats.TemplateRunTime == 0 { - t.Fatalf("expected %v's value to be positive was zero", "TemplateRunTime") + target := cts.MockTargetHandler + review := map[string]interface{}{} + + tests := []struct { + name string + driverArgs []Arg + constraints []*unstructured.Unstructured + opts []drivers.QueryOpt + expectedStatsEntries []*instrumentation.StatsEntry + }{ + { + name: "violations; no stats enabled", + constraints: []*unstructured.Unstructured{c1}, + opts: []drivers.QueryOpt{}, + expectedStatsEntries: []*instrumentation.StatsEntry{}, + }, + { + name: "no violations; no stats enabled", + constraints: []*unstructured.Unstructured{c2}, + opts: []drivers.QueryOpt{}, + expectedStatsEntries: []*instrumentation.StatsEntry{}, + }, + { + name: "violations; stats enabled", + constraints: []*unstructured.Unstructured{c1}, + opts: []drivers.QueryOpt{ + drivers.Stats(true), + }, + expectedStatsEntries: []*instrumentation.StatsEntry{ + { + Scope: instrumentation.TemplateScope, + StatsFor: "Fakes", + Stats: []*instrumentation.Stat{ + { + Name: "templateRunTimeNS", + // Value: uint64(0), // tested separately + Source: instrumentation.Source{ + Type: instrumentation.EngineSourceType, + Value: schema.Name, + }, + }, + { + Name: "constraintCount", + Value: 1, + Source: instrumentation.Source{ + Type: instrumentation.EngineSourceType, + Value: schema.Name, + }, + }, + }, + Labels: []*instrumentation.Label{ + { + Name: tracingEnabledLabelName, + Value: false, + }, + { + Name: printEnabledLabelName, + Value: false, + }, + }, + }, + }, + }, + { + name: "violations; stats enabled w driver args", + constraints: []*unstructured.Unstructured{c1}, + opts: []drivers.QueryOpt{}, + driverArgs: []Arg{GatherStats()}, + expectedStatsEntries: []*instrumentation.StatsEntry{ + { + Scope: instrumentation.TemplateScope, + StatsFor: "Fakes", + Stats: []*instrumentation.Stat{ + { + Name: "templateRunTimeNS", + // Value: uint64(0), // tested separately + Source: instrumentation.Source{ + Type: instrumentation.EngineSourceType, + Value: schema.Name, + }, + }, + { + Name: "constraintCount", + Value: 1, + Source: instrumentation.Source{ + Type: instrumentation.EngineSourceType, + Value: schema.Name, + }, + }, + }, + Labels: []*instrumentation.Label{ + { + Name: tracingEnabledLabelName, + Value: false, + }, + { + Name: printEnabledLabelName, + Value: false, + }, + }, + }, + }, + }, + { + name: "no violations; stats enabled", + constraints: []*unstructured.Unstructured{c2}, + opts: []drivers.QueryOpt{ + drivers.Stats(true), + }, + expectedStatsEntries: []*instrumentation.StatsEntry{ + { + Scope: instrumentation.TemplateScope, + StatsFor: "Fakes", + Stats: []*instrumentation.Stat{ + { + Name: "templateRunTimeNS", + // Value: uint64(0), // tested separately + Source: instrumentation.Source{ + Type: instrumentation.EngineSourceType, + Value: schema.Name, + }, + }, + { + Name: "constraintCount", + Value: 1, + Source: instrumentation.Source{ + Type: instrumentation.EngineSourceType, + Value: schema.Name, + }, + }, + }, + Labels: []*instrumentation.Label{ + { + Name: tracingEnabledLabelName, + Value: false, + }, + { + Name: printEnabledLabelName, + Value: false, + }, + }, + }, + }, + }, + { + name: "no violations; stats enabled w driver args", + constraints: []*unstructured.Unstructured{c2}, + opts: []drivers.QueryOpt{}, + driverArgs: []Arg{GatherStats()}, + expectedStatsEntries: []*instrumentation.StatsEntry{ + { + Scope: instrumentation.TemplateScope, + StatsFor: "Fakes", + Stats: []*instrumentation.Stat{ + { + Name: "templateRunTimeNS", + // Value: uint64(0), // tested separately + Source: instrumentation.Source{ + Type: instrumentation.EngineSourceType, + Value: schema.Name, + }, + }, + { + Name: "constraintCount", + Value: 1, + Source: instrumentation.Source{ + Type: instrumentation.EngineSourceType, + Value: schema.Name, + }, + }, + }, + Labels: []*instrumentation.Label{ + { + Name: tracingEnabledLabelName, + Value: false, + }, + { + Name: printEnabledLabelName, + Value: false, + }, + }, + }, + }, + }, + { + name: "violations and no violations; stats enabled", + constraints: []*unstructured.Unstructured{c1, c2}, + opts: []drivers.QueryOpt{ + drivers.Stats(true), + }, + expectedStatsEntries: []*instrumentation.StatsEntry{ + { + Scope: instrumentation.TemplateScope, + StatsFor: "Fakes", + Stats: []*instrumentation.Stat{ + { + Name: "templateRunTimeNS", + // Value: uint64(0), // tested separately + Source: instrumentation.Source{ + Type: instrumentation.EngineSourceType, + Value: schema.Name, + }, + }, + { + Name: "constraintCount", + Value: 2, + Source: instrumentation.Source{ + Type: instrumentation.EngineSourceType, + Value: schema.Name, + }, + }, + }, + Labels: []*instrumentation.Label{ + { + Name: tracingEnabledLabelName, + Value: false, + }, + { + Name: printEnabledLabelName, + Value: false, + }, + }, + }, + }, + }, + { + name: "violations and no violations; stats enabled; multiple kinds", + constraints: []*unstructured.Unstructured{c1, c2, c3, c4}, + opts: []drivers.QueryOpt{ + drivers.Stats(true), + }, + expectedStatsEntries: []*instrumentation.StatsEntry{ + { + Scope: instrumentation.TemplateScope, + StatsFor: "Fakes", + Stats: []*instrumentation.Stat{ + { + Name: "templateRunTimeNS", + // Value: uint64(0), // tested separately + Source: instrumentation.Source{ + Type: instrumentation.EngineSourceType, + Value: schema.Name, + }, + }, + { + Name: "constraintCount", + Value: 2, + Source: instrumentation.Source{ + Type: instrumentation.EngineSourceType, + Value: schema.Name, + }, + }, + }, + Labels: []*instrumentation.Label{ + { + Name: tracingEnabledLabelName, + Value: false, + }, + { + Name: printEnabledLabelName, + Value: false, + }, + }, + }, + { + Scope: instrumentation.TemplateScope, + StatsFor: "Fakes-2", + Stats: []*instrumentation.Stat{ + { + Name: "templateRunTimeNS", + Value: uint64(0), + Source: instrumentation.Source{ + Type: instrumentation.EngineSourceType, + Value: schema.Name, + }, + }, + { + Name: "constraintCount", + Value: 2, + Source: instrumentation.Source{ + Type: instrumentation.EngineSourceType, + Value: schema.Name, + }, + }, + }, + Labels: []*instrumentation.Label{ + { + Name: tracingEnabledLabelName, + Value: false, + }, + { + Name: printEnabledLabelName, + Value: false, + }, + }, + }, + }, + }, + { + name: "violations; stats enabled; driver args on", + constraints: []*unstructured.Unstructured{c1}, + opts: []drivers.QueryOpt{ + drivers.Stats(true), + }, + driverArgs: []Arg{ + Tracing(true), + PrintEnabled(true), + }, + expectedStatsEntries: []*instrumentation.StatsEntry{ + { + Scope: instrumentation.TemplateScope, + StatsFor: "Fakes", + Stats: []*instrumentation.Stat{ + { + Name: "templateRunTimeNS", + // Value: uint64(0), // tested separately + Source: instrumentation.Source{ + Type: instrumentation.EngineSourceType, + Value: schema.Name, + }, + }, + { + Name: "constraintCount", + Value: 1, + Source: instrumentation.Source{ + Type: instrumentation.EngineSourceType, + Value: schema.Name, + }, + }, + }, + Labels: []*instrumentation.Label{ + { + Name: tracingEnabledLabelName, + Value: true, + }, + { + Name: printEnabledLabelName, + Value: true, + }, + }, + }, + }, + }, } - if stats.ConstraintCount != uint(1) { - t.Fatalf("expected %v constraint count, got %v", 1, "ConstraintCount") + for _, tc := range tests { + t.Run(tc.name, func(t *testing.T) { + var d *Driver + var err error + + if tc.driverArgs == nil { + d, err = New() + } else { + d, err = New(tc.driverArgs...) + } + if err != nil { + t.Fatal(err) + } + + prepareDriverFunc(d) + + response, err := d.Query(context.Background(), target, tc.constraints, review, tc.opts...) + if err != nil { + t.Fatalf("unexpected error in Query: %v", err) + } + if response == nil { + t.Fatalf("Query response is nil") + } + + if len(response.StatsEntries) != len(tc.expectedStatsEntries) { + t.Errorf("expected %d stats, but got %d", len(tc.expectedStatsEntries), len(response.StatsEntries)) + } + + for i, expectedStatEntry := range tc.expectedStatsEntries { + actualStatsEntry := response.StatsEntries[i] + + if diff := cmp.Diff(expectedStatEntry, actualStatsEntry, cmpopts.IgnoreFields(instrumentation.Stat{}, "Value")); diff != "" { + t.Errorf("stat entries don't match; diff %s", diff) + } + + for j, expectedStat := range expectedStatEntry.Stats { + actualStat := actualStatsEntry.Stats[j] + + switch actualStat.Name { + case templateRunTimeNS: + switch actualValue := actualStat.Value.(type) { + case uint64: + if !(actualValue > 0) { + t.Errorf("expected positive value for stat: %s; got: %d", templateRunTimeNS, actualValue) + } + default: + t.Errorf("unknown stat value type: %T for stat: %s", actualValue, actualValue) + } + case constraintCountName: + if actualStat.Value != expectedStat.Value { + t.Errorf("%s values don't match; want: %s; got: %s", constraintCountName, expectedStat.Value, actualStat.Value) + } + } + } + } + }) } } @@ -311,7 +762,7 @@ func TestDriver_ExternalData(t *testing.T) { t.Fatalf("got AddConstraint() error = %v, want %v", err, nil) } - res, _, err := d.Query( + qr, err := d.Query( ctx, cts.MockTargetHandler, []*unstructured.Unstructured{cts.MakeConstraint(t, "Fakes", "foo-1")}, @@ -320,11 +771,11 @@ func TestDriver_ExternalData(t *testing.T) { if err != nil { t.Fatalf("got Query() error = %v, want %v", err, nil) } - if tt.errorExpected && len(res) == 0 { + if tt.errorExpected && len(qr.Results) == 0 { t.Fatalf("got 0 errors on normal query; want 1") } - if !tt.errorExpected && len(res) > 0 { - t.Fatalf("got %d errors on normal query; want 0", len(res)) + if !tt.errorExpected && len(qr.Results) > 0 { + t.Fatalf("got %d errors on normal query; want 0", len(qr.Results)) } }) } diff --git a/constraint/pkg/client/drivers/types.go b/constraint/pkg/client/drivers/types.go new file mode 100644 index 000000000..7e0b9738b --- /dev/null +++ b/constraint/pkg/client/drivers/types.go @@ -0,0 +1,16 @@ +package drivers + +import ( + "github.com/open-policy-agent/frameworks/constraint/pkg/instrumentation" + "github.com/open-policy-agent/frameworks/constraint/pkg/types" +) + +// QueryResponse encapsulates the values returned on Query: +// - Results includes a Result for each violated Constraint. +// - Trace is the evaluation trace on Query if specified in query options or enabled at Driver creation. +// - StatsEntries include any Stats that the engine gathered on Query. +type QueryResponse struct { + Results []*types.Result + Trace *string + StatsEntries []*instrumentation.StatsEntry +} diff --git a/constraint/pkg/client/e2e_test.go b/constraint/pkg/client/e2e_test.go index d04aea330..d888b5d7f 100644 --- a/constraint/pkg/client/e2e_test.go +++ b/constraint/pkg/client/e2e_test.go @@ -3,7 +3,6 @@ package client_test import ( "context" "errors" - "strconv" "strings" "testing" @@ -19,6 +18,7 @@ import ( "github.com/open-policy-agent/frameworks/constraint/pkg/core/templates" "github.com/open-policy-agent/frameworks/constraint/pkg/handler" "github.com/open-policy-agent/frameworks/constraint/pkg/handler/handlertest" + "github.com/open-policy-agent/frameworks/constraint/pkg/instrumentation" "github.com/open-policy-agent/frameworks/constraint/pkg/types" "github.com/open-policy-agent/opa/topdown/print" "k8s.io/apimachinery/pkg/apis/meta/v1/unstructured" @@ -441,7 +441,7 @@ func TestClient_Review(t *testing.T) { results := responses.Results() - diffOpt := cmpopts.IgnoreFields(types.Result{}, "Metadata", "EvaluationMeta") + diffOpt := cmpopts.IgnoreFields(types.Result{}, "Metadata") if diff := cmp.Diff(tt.wantResults, results, diffOpt); diff != "" { t.Error(diff) } @@ -489,8 +489,7 @@ func TestClient_Review_Details(t *testing.T) { results := responses.Results() - diffOpt := cmpopts.IgnoreFields(types.Result{}, "EvaluationMeta") - if diff := cmp.Diff(want, results, diffOpt); diff != "" { + if diff := cmp.Diff(want, results); diff != "" { t.Error(diff) } } @@ -570,7 +569,7 @@ func TestClient_Review_Print(t *testing.T) { results := rsps.Results() if diff := cmp.Diff(tc.wantResults, results, - cmpopts.IgnoreFields(types.Result{}, "Metadata", "EvaluationMeta")); diff != "" { + cmpopts.IgnoreFields(types.Result{}, "Metadata")); diff != "" { t.Error(diff) } @@ -608,7 +607,7 @@ func TestE2E_RemoveConstraint(t *testing.T) { EnforcementAction: constraints.EnforcementActionDeny, }} - if diff := cmp.Diff(want, got, cmpopts.IgnoreFields(types.Result{}, "Metadata", "EvaluationMeta")); diff != "" { + if diff := cmp.Diff(want, got, cmpopts.IgnoreFields(types.Result{}, "Metadata")); diff != "" { t.Fatal(diff) } @@ -657,7 +656,7 @@ func TestE2E_RemoveTemplate(t *testing.T) { EnforcementAction: constraints.EnforcementActionDeny, }} - if diff := cmp.Diff(want, got, cmpopts.IgnoreFields(types.Result{}, "Metadata", "EvaluationMeta")); diff != "" { + if diff := cmp.Diff(want, got, cmpopts.IgnoreFields(types.Result{}, "Metadata")); diff != "" { t.Fatal(diff) } @@ -834,53 +833,104 @@ func TestE2E_Tracing_Unmatched(t *testing.T) { } } -// TestE2E_Review_RegoEvaluationMeta tests that we can get stats out of evaluated constraints. -func TestE2E_Review_RegoEvaluationMeta(t *testing.T) { - ctx := context.Background() - c := clienttest.New(t) - ct := clienttest.TemplateCheckData() - _, err := c.AddTemplate(ctx, ct) - if err != nil { - t.Fatal(err) +// TestE2E_DriverStats tests that we can turn on and off the Stats() QueryOpt. +func TestE2E_DriverStats(t *testing.T) { + tests := []struct { + name string + statsEnabled bool + }{ + { + name: "disabled", + statsEnabled: false, + }, + { + name: "enabled", + statsEnabled: true, + }, } - numConstrains := 3 - for i := 1; i < numConstrains+1; i++ { - name := "constraint-" + strconv.Itoa(i) - constraint := cts.MakeConstraint(t, clienttest.KindCheckData, name, cts.WantData("bar")) - _, err = c.AddConstraint(ctx, constraint) - if err != nil { - t.Fatal(err) - } - } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + ctx := context.Background() + c := clienttest.New(t) - review := handlertest.Review{ - Object: handlertest.Object{ - Name: "foo", - Data: "qux", - }, - } + _, err := c.AddTemplate(ctx, clienttest.TemplateDeny()) + if err != nil { + t.Fatal(err) + } - responses, err := c.Review(ctx, review) - if err != nil { - t.Fatal(err) - } + _, err = c.AddConstraint(ctx, cts.MakeConstraint(t, clienttest.KindDeny, "foo")) + if err != nil { + t.Fatal(err) + } - results := responses.Results() + obj := handlertest.Review{Object: handlertest.Object{Name: "bar"}} - // for each result check that we have the constraintCount == 3 and a positive templateRunTime - for _, result := range results { - stats, ok := result.EvaluationMeta.(rego.EvaluationMeta) - if !ok { - t.Fatalf("could not type convert to RegoEvaluationMeta") - } + rsps, err := c.Review(ctx, obj, drivers.Stats(tt.statsEnabled)) + if err != nil { + t.Fatal(err) + } - if stats.TemplateRunTime == 0 { - t.Fatalf("expected %v's value to be positive was zero", "TemplateRunTime") - } + stats := rsps.StatsEntries + if stats == nil && tt.statsEnabled { + t.Fatal("got nil stats but stats enabled for Review") + } else if len(stats) != 0 && !tt.statsEnabled { + t.Fatal("got stats but stats disabled") + } + }) + } +} + +func TestE2E_Client_GetDescriptionForStat(t *testing.T) { + unknownDriverSource := instrumentation.Source{ + Type: instrumentation.EngineSourceType, + Value: "unknown_driver", + } + unknownSourceType := instrumentation.Source{ + Type: "unknown_source", + Value: "unknown_value", + } + validSource := instrumentation.RegoSource + + c := clienttest.New(t) + tests := []struct { + name string + source instrumentation.Source + statName string + expectedUnknown bool + }{ + { + name: "unknown driver source", + source: unknownDriverSource, + statName: "some_stat_name", + expectedUnknown: true, + }, + { + name: "unknown source type", + source: unknownSourceType, + statName: "some_stat_name", + expectedUnknown: true, + }, + { + name: "valid source type with unknown stat", + source: validSource, + statName: "this_stat_does_not_exist", + expectedUnknown: true, + }, + { + name: "valid source type with known stat", + source: validSource, + statName: "templateRunTimeNS", + expectedUnknown: false, + }, + } - if stats.ConstraintCount != uint(numConstrains) { - t.Fatalf("expected %v constraint count, got %v", numConstrains, "ConstraintCount") + for _, tc := range tests { + desc := c.GetDescriptionForStat(tc.source, tc.statName) + if tc.expectedUnknown && desc != instrumentation.UnknownDescription { + t.Errorf("expected unknown description for stat %q, got: %q", tc.statName, desc) + } else if !tc.expectedUnknown && desc == instrumentation.UnknownDescription { + t.Errorf("expected actual description for stat %q, got: %q", tc.statName, desc) } } } diff --git a/constraint/pkg/instrumentation/instrumentation.go b/constraint/pkg/instrumentation/instrumentation.go new file mode 100644 index 000000000..e49413534 --- /dev/null +++ b/constraint/pkg/instrumentation/instrumentation.go @@ -0,0 +1,55 @@ +// package instrumentation defines primitives to +// support more observability throughout the constraint framework. +package instrumentation + +import "github.com/open-policy-agent/frameworks/constraint/pkg/client/drivers/rego/schema" + +const ( + // scope constants. + TemplateScope = "template" + + // description constants. + UnknownDescription = "unknown description" + + // source constants. + EngineSourceType = "engine" +) + +var RegoSource = Source{ + Type: EngineSourceType, + Value: schema.Name, +} + +// Label is a name/value tuple to add metadata +// about a StatsEntry. +type Label struct { + Name string `json:"name"` + Value interface{} `json:"value"` +} + +// Source is a special Stat level label of the form type/value +// that is meant to be used to identify where a Stat is coming from. +type Source struct { + Type string `json:"type"` + Value string `json:"value"` +} + +// Stat is a Name, Value, Description tuple that may contain +// metrics or aggregations of metrics. +type Stat struct { + Name string `json:"name"` + Value interface{} `json:"value"` + Source Source `json:"source"` +} + +// StatsEntry comprises of a generalized Key for all associated Stats. +type StatsEntry struct { + // Scope is the level of granularity that the Stats + // were created at. + Scope string `json:"scope"` + // StatsFor is the specific kind of Scope type that Stats + // were created for. + StatsFor string `json:"statsFor"` + Stats []*Stat `json:"stats"` + Labels []*Label `json:"labels,omitempty"` +} diff --git a/constraint/pkg/types/validation.go b/constraint/pkg/types/validation.go index c852234cf..f6a5ea404 100644 --- a/constraint/pkg/types/validation.go +++ b/constraint/pkg/types/validation.go @@ -6,6 +6,7 @@ import ( "strings" "github.com/davecgh/go-spew/spew" + "github.com/open-policy-agent/frameworks/constraint/pkg/instrumentation" "k8s.io/apimachinery/pkg/apis/meta/v1/unstructured" ) @@ -27,13 +28,10 @@ type Result struct { // The enforcement action of the constraint EnforcementAction string `json:"enforcementAction,omitempty"` - - // EvaluationMeta has metadata for a Result's evaluation. - EvaluationMeta interface{} `json:"evaluationMeta,omitempty"` } // Response is a collection of Constraint violations for a particular Target. -// Each Result is for a distinct Constraint. +// Each Result represents a violation for a distinct Constraint. type Response struct { Trace *string Target string @@ -90,14 +88,16 @@ func (r *Response) TraceDump() string { func NewResponses() *Responses { return &Responses{ - ByTarget: make(map[string]*Response), - Handled: make(map[string]bool), + ByTarget: make(map[string]*Response), + Handled: make(map[string]bool), + StatsEntries: make([]*instrumentation.StatsEntry, 0), } } type Responses struct { - ByTarget map[string]*Response - Handled map[string]bool + ByTarget map[string]*Response + Handled map[string]bool + StatsEntries []*instrumentation.StatsEntry } func (r *Responses) Results() []*Result {