diff --git a/DEVELOPMENT.md b/DEVELOPMENT.md
index e37b467e..61acdc5d 100644
--- a/DEVELOPMENT.md
+++ b/DEVELOPMENT.md
@@ -30,6 +30,7 @@ The above skaffold command deploys multiple resources:
| gcs | Google Cloud Storage Emulator | N/A | http://gcs:4443 |
| repo-downloader | Repo Downloader Workflow Step in
./workflows/steps/services/common/repo_downloader | http://localhost:8091 | http://repo-downloader:8080 |
| web-feature-consumer | Web Feature Consumer Step in
./workflows/steps/services/web_feature_consumer | http://localhost:8092 | http://web-feature-consumer:8080 |
+| wpt-consumer | WPT Consumer Step in
./workflows/steps/services/wpt_consumer | http://localhost:8093 | http://wpt-consumer:8080 |
_In the event the servers are not responsive, make a temporary change to a file_
_in a watched directory (e.g. backend). This will rebuild and expose the_
diff --git a/Makefile b/Makefile
index cd90e2aa..1f00b8f8 100644
--- a/Makefile
+++ b/Makefile
@@ -100,6 +100,9 @@ go-openapi: $(OPENAPI_OUT_DIR)/backend/types.gen.go \
$(OPENAPI_OUT_DIR)/workflows/steps/web_feature_consumer/client.gen.go \
$(OPENAPI_OUT_DIR)/workflows/steps/web_feature_consumer/types.gen.go \
$(OPENAPI_OUT_DIR)/workflows/steps/web_feature_consumer/server.gen.go \
+ $(OPENAPI_OUT_DIR)/workflows/steps/wpt_consumer/client.gen.go \
+ $(OPENAPI_OUT_DIR)/workflows/steps/wpt_consumer/types.gen.go \
+ $(OPENAPI_OUT_DIR)/workflows/steps/wpt_consumer/server.gen.go \
$(OPENAPI_OUT_DIR)/workflows/steps/common/repo_downloader/client.gen.go \
$(OPENAPI_OUT_DIR)/workflows/steps/common/repo_downloader/types.gen.go \
$(OPENAPI_OUT_DIR)/workflows/steps/common/repo_downloader/server.gen.go
diff --git a/backend/cmd/server/main.go b/backend/cmd/server/main.go
index 5eb6d92e..7699e80f 100644
--- a/backend/cmd/server/main.go
+++ b/backend/cmd/server/main.go
@@ -27,7 +27,7 @@ func main() {
if value, found := os.LookupEnv("DATASTORE_DATABASE"); found {
datastoreDB = &value
}
- fs, err := gds.NewWebFeatureClient(os.Getenv("PROJECT_ID"), datastoreDB)
+ fs, err := gds.NewDatastoreClient(os.Getenv("PROJECT_ID"), datastoreDB)
if err != nil {
slog.Error("failed to create datastore client", "error", err.Error())
os.Exit(1)
diff --git a/backend/pkg/httpserver/server.go b/backend/pkg/httpserver/server.go
index b54b9cb9..5fe3e38d 100644
--- a/backend/pkg/httpserver/server.go
+++ b/backend/pkg/httpserver/server.go
@@ -28,21 +28,26 @@ import (
)
type WebFeatureMetadataStorer interface {
- List(ctx context.Context) ([]backend.Feature, error)
- Get(ctx context.Context, featureID string) (*backend.Feature, error)
+ ListWebFeataureData(ctx context.Context, nextPageToken *string) ([]backend.Feature, *string, error)
+ GetWebFeatureData(ctx context.Context, featureID string) (*backend.Feature, error)
}
type Server struct {
metadataStorer WebFeatureMetadataStorer
}
+// V1ListFeatureMetricsByBrowserAndChannel implements backend.StrictServerInterface.
+func (*Server) V1ListFeatureMetricsByBrowserAndChannel(ctx context.Context, request backend.V1ListFeatureMetricsByBrowserAndChannelRequestObject) (backend.V1ListFeatureMetricsByBrowserAndChannelResponseObject, error) {
+ panic("unimplemented")
+}
+
// GetV1FeaturesFeatureId implements backend.StrictServerInterface.
// nolint: revive, ireturn // Name generated from openapi
func (s *Server) GetV1FeaturesFeatureId(
ctx context.Context,
request backend.GetV1FeaturesFeatureIdRequestObject,
) (backend.GetV1FeaturesFeatureIdResponseObject, error) {
- feature, err := s.metadataStorer.Get(ctx, request.FeatureId)
+ feature, err := s.metadataStorer.GetWebFeatureData(ctx, request.FeatureId)
if err != nil {
slog.Error("unable to get feature", "error", err)
@@ -61,7 +66,7 @@ func (s *Server) GetV1Features(
ctx context.Context,
_ backend.GetV1FeaturesRequestObject,
) (backend.GetV1FeaturesResponseObject, error) {
- featureData, err := s.metadataStorer.List(ctx)
+ featureData, _, err := s.metadataStorer.ListWebFeataureData(ctx, nil)
if err != nil {
// TODO check error type
slog.Error("unable to get list of features", "error", err)
diff --git a/lib/gds/client.go b/lib/gds/client.go
index 8534fd78..b3d42302 100644
--- a/lib/gds/client.go
+++ b/lib/gds/client.go
@@ -20,17 +20,15 @@ import (
"log/slog"
"cloud.google.com/go/datastore"
- "github.com/GoogleChrome/webstatus.dev/lib/gen/jsonschema/web_platform_dx__web_features"
- "github.com/GoogleChrome/webstatus.dev/lib/gen/openapi/backend"
+ "google.golang.org/api/iterator"
)
-const featureDataKey = "FeatureDataTest"
-
type Client struct {
*datastore.Client
}
-func NewWebFeatureClient(projectID string, database *string) (*Client, error) {
+// NewDatastoreClient returns a Client for the Google Datastore service.
+func NewDatastoreClient(projectID string, database *string) (*Client, error) {
if projectID == "" {
return nil, errors.New("projectID is empty")
}
@@ -53,28 +51,38 @@ func NewWebFeatureClient(projectID string, database *string) (*Client, error) {
return &Client{client}, nil
}
-type FeatureData struct {
- WebFeatureID string `datastore:"web_feature_id"`
- Name string `datastore:"name"`
- id int64 // The integer ID used in the datastore.
+// Filterable modifies a query with a given filter.
+type Filterable interface {
+ FilterQuery(*datastore.Query) *datastore.Query
+}
+
+// entityClient is generic client that contains generic methods that can apply
+// to any entity stored in datastore.
+type entityClient[T any] struct {
+ *Client
}
-func (f FeatureData) ID() int64 {
- return f.id
+type Mergeable[T any] interface {
+ Merge(existing *T, new *T) *T
}
-func (c *Client) Upsert(
+func (c *entityClient[T]) upsert(
ctx context.Context,
- webFeatureID string,
- data web_platform_dx__web_features.FeatureData,
-) error {
+ kind string,
+ data *T,
+ mergeable Mergeable[T],
+ filterables ...Filterable) error {
// Begin a transaction.
_, err := c.RunInTransaction(ctx, func(tx *datastore.Transaction) error {
// Get the entity, if it exists.
- var entity []FeatureData
- query := datastore.NewQuery(featureDataKey).FilterField("web_feature_id", "=", webFeatureID).Transaction(tx)
+ var existingEntity []T
+ query := datastore.NewQuery(kind)
+ for _, filterable := range filterables {
+ query = filterable.FilterQuery(query)
+ }
+ query = query.Limit(1).Transaction(tx)
- keys, err := c.GetAll(ctx, query, &entity)
+ keys, err := c.GetAll(ctx, query, &existingEntity)
if err != nil && !errors.Is(err, datastore.ErrNoSuchEntity) {
slog.Error("unable to check for existing entities", "error", err)
@@ -82,24 +90,19 @@ func (c *Client) Upsert(
}
var key *datastore.Key
- // If the entity exists, update it.
+ // If the entity exists, merge the two entities.
if len(keys) > 0 {
key = keys[0]
-
+ data = mergeable.Merge(&existingEntity[0], data)
} else {
// If the entity does not exist, insert it.
- key = datastore.IncompleteKey(featureDataKey, nil)
+ key = datastore.IncompleteKey(kind, nil)
}
- // nolint: exhaustruct // id does not exist yet
- feature := &FeatureData{
- WebFeatureID: webFeatureID,
- Name: data.Name,
- }
- _, err = tx.Put(key, feature)
+ _, err = tx.Put(key, data)
if err != nil {
// Handle any errors in an appropriate way, such as returning them.
- slog.Error("unable to upsert metadata", "error", err)
+ slog.Error("unable to upsert entity", "error", err)
return err
}
@@ -116,42 +119,63 @@ func (c *Client) Upsert(
return nil
}
-func (c *Client) List(ctx context.Context) ([]backend.Feature, error) {
- var featureData []*FeatureData
- _, err := c.GetAll(ctx, datastore.NewQuery(featureDataKey), &featureData)
- if err != nil {
- return nil, err
+func (c entityClient[T]) list(
+ ctx context.Context,
+ kind string,
+ pageToken *string,
+ filterables ...Filterable) ([]*T, *string, error) {
+ var data []*T
+ query := datastore.NewQuery(kind)
+ if pageToken != nil {
+ cursor, err := datastore.DecodeCursor(*pageToken)
+ if err != nil {
+ return nil, nil, err
+ }
+ query = query.Start(cursor)
+ }
+ for _, filterable := range filterables {
+ query = filterable.FilterQuery(query)
}
- ret := make([]backend.Feature, len(featureData))
-
- // nolint: exhaustruct
- // TODO. Will fix this lint error once the data is coming in.
- for idx, val := range featureData {
- ret[idx] = backend.Feature{
- FeatureId: val.WebFeatureID,
- Name: val.Name,
- Spec: nil,
+ it := c.Run(ctx, query)
+ for {
+ var entity T
+ _, err := it.Next(&entity)
+ if errors.Is(err, iterator.Done) {
+ cursor, err := it.Cursor()
+ if err != nil {
+ // TODO: Handle error.
+ return nil, nil, err
+ }
+ nextToken := cursor.String()
+
+ return data, &nextToken, nil
}
+ if err != nil {
+ return nil, nil, err
+ }
+ data = append(data, &entity)
}
-
- return ret, nil
}
-func (c *Client) Get(ctx context.Context, webFeatureID string) (*backend.Feature, error) {
- var featureData []*FeatureData
- _, err := c.GetAll(
- ctx, datastore.NewQuery(featureDataKey).
- FilterField("web_feature_id", "=", webFeatureID).Limit(1),
- &featureData)
+var ErrEntityNotFound = errors.New("queried entity not found")
+
+func (c entityClient[T]) get(ctx context.Context, kind string, filterables ...Filterable) (*T, error) {
+ var data []*T
+ query := datastore.NewQuery(kind)
+ for _, filterable := range filterables {
+ query = filterable.FilterQuery(query)
+ }
+ query = query.Limit(1)
+ _, err := c.GetAll(ctx, query, &data)
if err != nil {
+ slog.Error("failed to list data", "error", err, "kind", kind)
+
return nil, err
}
- // nolint: exhaustruct
- // TODO. Will fix this lint error once the data is coming in.
- return &backend.Feature{
- Name: featureData[0].WebFeatureID,
- FeatureId: featureData[0].WebFeatureID,
- Spec: nil,
- }, nil
+ if len(data) < 1 {
+ return nil, ErrEntityNotFound
+ }
+
+ return data[0], nil
}
diff --git a/lib/gds/client_test.go b/lib/gds/client_test.go
index fb47dc0d..2b15412f 100644
--- a/lib/gds/client_test.go
+++ b/lib/gds/client_test.go
@@ -15,15 +15,17 @@
package gds
import (
+ "cmp"
"context"
+ "errors"
"fmt"
"os"
"path/filepath"
- "slices"
+ "reflect"
"testing"
+ "time"
- "github.com/GoogleChrome/webstatus.dev/lib/gen/jsonschema/web_platform_dx__web_features"
- "github.com/GoogleChrome/webstatus.dev/lib/gen/openapi/backend"
+ "cloud.google.com/go/datastore"
"github.com/testcontainers/testcontainers-go"
"github.com/testcontainers/testcontainers-go/wait"
)
@@ -60,7 +62,7 @@ func getTestDatabase(ctx context.Context, t *testing.T) (*Client, func()) {
db := ""
dbPtr := &db
os.Setenv("DATASTORE_EMULATOR_HOST", fmt.Sprintf("localhost:%s", mappedPort.Port()))
- dsClient, err := NewWebFeatureClient(testDatastoreProject, dbPtr)
+ dsClient, err := NewDatastoreClient(testDatastoreProject, dbPtr)
if err != nil {
if unsetErr := os.Unsetenv("DATASTORE_EMULATOR_HOST"); unsetErr != nil {
t.Errorf("failed to unset env. %s", unsetErr.Error())
@@ -87,44 +89,202 @@ func getTestDatabase(ctx context.Context, t *testing.T) (*Client, func()) {
}
}
-// nolint: exhaustruct // No need to use every option of 3rd party struct.
-func TestUpsert(t *testing.T) {
+const sampleKey = "SampleData"
+
+type TestSample struct {
+ Name string `datastore:"name"`
+ Value *int `datastore:"value"`
+ CreatedAt time.Time `datastore:"created_at"`
+}
+
+type nameFilter struct {
+ name string
+}
+
+func (f nameFilter) FilterQuery(query *datastore.Query) *datastore.Query {
+ return query.FilterField("name", "=", f.name)
+}
+
+type sortSampleFilter struct {
+}
+
+func (f sortSampleFilter) FilterQuery(query *datastore.Query) *datastore.Query {
+ return query.Order("-created_at")
+}
+
+type limitSampleFilter struct {
+ size int
+}
+
+func (f limitSampleFilter) FilterQuery(query *datastore.Query) *datastore.Query {
+ return query.Limit(f.size)
+}
+
+// testSampleMerge implements Mergeable for TestSample.
+type testSampleMerge struct{}
+
+func (m testSampleMerge) Merge(existing *TestSample, new *TestSample) *TestSample {
+ return &TestSample{
+ Value: cmp.Or[*int](new.Value, existing.Value),
+ // The below fields cannot be overridden during a merge.
+ Name: existing.Name,
+ CreatedAt: existing.CreatedAt,
+ }
+}
+
+func intPtr(in int) *int {
+ return &in
+}
+
+// nolint: gochecknoglobals
+var testSamples = []TestSample{
+ {
+ Name: "a",
+ Value: intPtr(0),
+ CreatedAt: time.Date(2000, time.January, 1, 0, 0, 0, 0, time.UTC),
+ },
+ {
+ Name: "b",
+ Value: intPtr(1),
+ CreatedAt: time.Date(1999, time.January, 1, 0, 0, 0, 0, time.UTC),
+ },
+ {
+ Name: "c",
+ Value: intPtr(2),
+ CreatedAt: time.Date(2001, time.January, 1, 0, 0, 0, 0, time.UTC),
+ },
+ {
+ Name: "d",
+ Value: intPtr(3),
+ CreatedAt: time.Date(2002, time.January, 1, 0, 0, 0, 0, time.UTC),
+ },
+}
+
+func insertEntities(
+ ctx context.Context,
+ t *testing.T,
+ c entityClient[TestSample]) {
+ for i := range testSamples {
+ err := c.upsert(ctx, sampleKey, &testSamples[i], testSampleMerge{}, nameFilter{name: testSamples[i].Name})
+ if err != nil {
+ t.Fatalf("failed to insert entities. %s", err.Error())
+ }
+ }
+}
+
+func TestEntityClientOperations(t *testing.T) {
ctx := context.Background()
client, cleanup := getTestDatabase(ctx, t)
defer cleanup()
-
- // Part 1. Try to insert the first version
- err := client.Upsert(ctx, "id-1", web_platform_dx__web_features.FeatureData{
- Name: "version-1-name",
- })
+ c := entityClient[TestSample]{client}
+ // Step 1. Make sure the entity is not there yet.
+ // Step 1a. Do Get
+ entity, err := c.get(ctx, sampleKey, nameFilter{name: "a"})
+ if entity != nil {
+ t.Error("expected no entity")
+ }
+ if !errors.Is(err, ErrEntityNotFound) {
+ t.Error("expected ErrEntityNotFound")
+ }
+ // Step 1b. Do List
+ pageEmpty, nextPageToken, err := c.list(ctx, sampleKey, nil)
if err != nil {
- t.Errorf("failed to upsert %s", err.Error())
+ t.Errorf("list query failed. %s", err.Error())
}
- features, err := client.List(ctx)
+ if nextPageToken == nil {
+ t.Error("expected next page token")
+ }
+ if pageEmpty != nil {
+ t.Error("expected empty page")
+ }
+ // Step 2. Insert the entities
+ insertEntities(ctx, t, c)
+ // Step 3. Get the entity
+ entity, err = c.get(ctx, sampleKey, nameFilter{name: "a"})
if err != nil {
- t.Errorf("failed to list %s", err.Error())
+ t.Errorf("expected error %s", err.Error())
}
-
- expectedFeatures := []backend.Feature{{FeatureId: "id-1", Spec: nil, Name: "version-1-name"}}
- if !slices.Equal[[]backend.Feature](features, expectedFeatures) {
- t.Errorf("slices not equal actual [%v] expected [%v]", features, expectedFeatures)
+ if entity == nil {
+ t.Error("expected entity")
+ t.FailNow()
}
-
- // Part 2. Upsert the second version
- err = client.Upsert(ctx, "id-1", web_platform_dx__web_features.FeatureData{
- Name: "version-2-name",
- })
+ if !reflect.DeepEqual(*entity, TestSample{
+ Name: "a",
+ Value: intPtr(0),
+ CreatedAt: time.Date(2000, time.January, 1, 0, 0, 0, 0, time.UTC),
+ }) {
+ t.Errorf("values not equal. received %+v", *entity)
+ }
+ // Step 4. Upsert the entity
+ entity.Value = intPtr(200)
+ // CreatedAt should not update due to the Mergeable policy
+ entity.CreatedAt = time.Date(3000, time.March, 1, 0, 0, 0, 0, time.UTC)
+ err = c.upsert(ctx, sampleKey, entity, testSampleMerge{}, nameFilter{name: "a"})
if err != nil {
- t.Errorf("failed to upsert again %s", err.Error())
+ t.Errorf("upsert failed %s", err.Error())
}
-
- features, err = client.List(ctx)
+ // Step 5. Get the updated entity
+ entity, err = c.get(ctx, sampleKey, nameFilter{name: "a"})
if err != nil {
- t.Errorf("failed to list %s", err.Error())
+ t.Errorf("expected error %s", err.Error())
}
-
- expectedFeatures = []backend.Feature{{FeatureId: "id-1", Spec: nil, Name: "version-2-name"}}
- if !slices.Equal[[]backend.Feature](features, expectedFeatures) {
- t.Errorf("slices not equal actual [%v] expected [%v]", features, expectedFeatures)
+ if entity == nil {
+ t.Error("expected entity")
+ t.FailNow()
+ }
+ if !reflect.DeepEqual(*entity, TestSample{
+ Name: "a",
+ Value: intPtr(200),
+ CreatedAt: time.Date(2000, time.January, 1, 0, 0, 0, 0, time.UTC),
+ }) {
+ t.Errorf("values not equal. received %+v", *entity)
+ }
+ // Step 6. List the entities
+ // Step 6a. Get first page
+ filters := []Filterable{sortSampleFilter{}, limitSampleFilter{size: 2}}
+ pageOne, nextPageToken, err := c.list(ctx, sampleKey, nil, filters...)
+ if err != nil {
+ t.Errorf("page one query failed. %s", err.Error())
+ }
+ if nextPageToken == nil {
+ t.Error("expected next page token")
+ }
+ expectedPageOne := []*TestSample{
+ {
+ Name: "d",
+ Value: intPtr(3),
+ CreatedAt: time.Date(2002, time.January, 1, 0, 0, 0, 0, time.UTC),
+ },
+ {
+ Name: "c",
+ Value: intPtr(2),
+ CreatedAt: time.Date(2001, time.January, 1, 0, 0, 0, 0, time.UTC),
+ },
+ }
+ if !reflect.DeepEqual(pageOne, expectedPageOne) {
+ t.Error("values not equal")
+ }
+ // Step 6b. Get second page
+ pageTwo, nextPageToken, err := c.list(ctx, sampleKey, nextPageToken, filters...)
+ if err != nil {
+ t.Errorf("page two query failed. %s", err.Error())
+ }
+ if nextPageToken == nil {
+ t.Error("expected next page token")
+ }
+ expectedPageTwo := []*TestSample{
+ {
+ Name: "a",
+ Value: intPtr(200),
+ CreatedAt: time.Date(2000, time.January, 1, 0, 0, 0, 0, time.UTC),
+ },
+ {
+ Name: "b",
+ Value: intPtr(1),
+ CreatedAt: time.Date(1999, time.January, 1, 0, 0, 0, 0, time.UTC),
+ },
+ }
+ if !reflect.DeepEqual(pageTwo, expectedPageTwo) {
+ t.Error("values not equal")
}
}
diff --git a/lib/gds/web_feature.go b/lib/gds/web_feature.go
new file mode 100644
index 00000000..7cae7e50
--- /dev/null
+++ b/lib/gds/web_feature.go
@@ -0,0 +1,113 @@
+// Copyright 2024 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package gds
+
+import (
+ "cmp"
+ "context"
+
+ "cloud.google.com/go/datastore"
+ "github.com/GoogleChrome/webstatus.dev/lib/gen/jsonschema/web_platform_dx__web_features"
+ "github.com/GoogleChrome/webstatus.dev/lib/gen/openapi/backend"
+)
+
+const featureDataKey = "FeatureData"
+
+// FeatureData contains:
+// - basic metadata about the web feature.
+// - snapshot of latest metrics.
+type FeatureData struct {
+ WebFeatureID *string `datastore:"web_feature_id"`
+ Name *string `datastore:"name"`
+}
+
+// webFeaturesFilter implements Filterable to filter by web_feature_id.
+// Compatible kinds:
+// - featureDataKey.
+type webFeaturesFilter struct {
+ webFeatureID string
+}
+
+func (f webFeaturesFilter) FilterQuery(query *datastore.Query) *datastore.Query {
+ return query.FilterField("web_feature_id", "=", f.webFeatureID)
+}
+
+// webFeatureMerge implements Mergeable for FeatureData.
+type webFeatureMerge struct{}
+
+func (m webFeatureMerge) Merge(existing *FeatureData, new *FeatureData) *FeatureData {
+ return &FeatureData{
+ Name: cmp.Or[*string](new.Name, existing.Name),
+ // The below fields cannot be overridden during a merge.
+ WebFeatureID: existing.WebFeatureID,
+ }
+}
+
+// UpsertFeatureData inserts/updates data for the given web feature.
+func (c *Client) UpsertFeatureData(
+ ctx context.Context,
+ webFeatureID string,
+ data web_platform_dx__web_features.FeatureData,
+) error {
+ entityClient := entityClient[FeatureData]{c}
+
+ return entityClient.upsert(ctx,
+ featureDataKey,
+ &FeatureData{
+ WebFeatureID: &webFeatureID,
+ Name: &data.Name,
+ },
+ webFeatureMerge{},
+ webFeaturesFilter{
+ webFeatureID: webFeatureID,
+ },
+ )
+}
+
+// ListWebFeataureData lists web features data.
+func (c *Client) ListWebFeataureData(ctx context.Context, pageToken *string) ([]backend.Feature, *string, error) {
+ entityClient := entityClient[FeatureData]{c}
+ featureData, nextPageToken, err := entityClient.list(ctx, featureDataKey, pageToken)
+ if err != nil {
+ return nil, nil, err
+ }
+ ret := make([]backend.Feature, len(featureData))
+ for idx, val := range featureData {
+ ret[idx] = backend.Feature{
+ FeatureId: *val.WebFeatureID,
+ Name: *val.Name,
+ Spec: nil,
+ }
+ }
+
+ return ret, nextPageToken, nil
+}
+
+// GetWebFeatureData atttempts to get data for a given web feature.
+func (c *Client) GetWebFeatureData(ctx context.Context, webFeatureID string) (*backend.Feature, error) {
+ entityClient := entityClient[FeatureData]{c}
+ featureData, err := entityClient.get(ctx, featureDataKey, webFeaturesFilter{
+ webFeatureID: webFeatureID,
+ })
+ if err != nil {
+ return nil, err
+ }
+
+ return &backend.Feature{
+ Name: *featureData.Name,
+ FeatureId: *featureData.WebFeatureID,
+ Spec: nil,
+ }, nil
+}
diff --git a/lib/gds/web_feature_test.go b/lib/gds/web_feature_test.go
new file mode 100644
index 00000000..5d86e997
--- /dev/null
+++ b/lib/gds/web_feature_test.go
@@ -0,0 +1,66 @@
+// Copyright 2024 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package gds
+
+import (
+ "context"
+ "slices"
+ "testing"
+
+ "github.com/GoogleChrome/webstatus.dev/lib/gen/jsonschema/web_platform_dx__web_features"
+ "github.com/GoogleChrome/webstatus.dev/lib/gen/openapi/backend"
+)
+
+// nolint: exhaustruct // No need to use every option of 3rd party struct.
+func TestFeatureDataOperations(t *testing.T) {
+ ctx := context.Background()
+ client, cleanup := getTestDatabase(ctx, t)
+ defer cleanup()
+
+ // Part 1. Try to insert the first version
+ err := client.UpsertFeatureData(ctx, "id-1", web_platform_dx__web_features.FeatureData{
+ Name: "version-1-name",
+ })
+ if err != nil {
+ t.Errorf("failed to upsert %s", err.Error())
+ }
+ features, _, err := client.ListWebFeataureData(ctx, nil)
+ if err != nil {
+ t.Errorf("failed to list %s", err.Error())
+ }
+
+ expectedFeatures := []backend.Feature{{FeatureId: "id-1", Spec: nil, Name: "version-1-name"}}
+ if !slices.Equal[[]backend.Feature](features, expectedFeatures) {
+ t.Errorf("slices not equal actual [%v] expected [%v]", features, expectedFeatures)
+ }
+
+ // Part 2. Upsert the second version
+ err = client.UpsertFeatureData(ctx, "id-1", web_platform_dx__web_features.FeatureData{
+ Name: "version-2-name",
+ })
+ if err != nil {
+ t.Errorf("failed to upsert again %s", err.Error())
+ }
+
+ features, _, err = client.ListWebFeataureData(ctx, nil)
+ if err != nil {
+ t.Errorf("failed to list %s", err.Error())
+ }
+
+ expectedFeatures = []backend.Feature{{FeatureId: "id-1", Spec: nil, Name: "version-2-name"}}
+ if !slices.Equal[[]backend.Feature](features, expectedFeatures) {
+ t.Errorf("slices not equal actual [%v] expected [%v]", features, expectedFeatures)
+ }
+}
diff --git a/lib/gds/wpt_run.go b/lib/gds/wpt_run.go
new file mode 100644
index 00000000..d57a3094
--- /dev/null
+++ b/lib/gds/wpt_run.go
@@ -0,0 +1,164 @@
+// Copyright 2024 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package gds
+
+import (
+ "context"
+ "time"
+
+ "cloud.google.com/go/datastore"
+)
+
+const wptRunsKey = "WptRuns"
+
+// WPTRunMetadata contains common metadata for a run.
+type WPTRunMetadata struct {
+ RunID int64 `datastore:"run_id"`
+ TimeStart time.Time `datastore:"time_start"`
+ TimeEnd time.Time `datastore:"time_end"`
+ BrowserName string `datastore:"browser_name"`
+ BrowserVersion string `datastore:"browser_version"`
+ Channel string `datastore:"channel"`
+ OSName string `datastore:"os_name"`
+ OSVersion string `datastore:"os_version"`
+}
+
+// WPTRun contains all information about a WPT run.
+type WPTRun struct {
+ WPTRunMetadata
+ TestMetric *WPTRunMetric `datastore:"test_metric"`
+ FeatureTestMetrics []WPTRunMetricsGroupByFeature `datastore:"feature_test_metrics"`
+}
+
+// wptRunIDFilter implements Filterable to filter by run_id.
+// Compatible kinds:
+// - wptRunsKey.
+type wptRunIDFilter struct {
+ runID int64
+}
+
+func (f wptRunIDFilter) FilterQuery(query *datastore.Query) *datastore.Query {
+ return query.FilterField("run_id", "=", f.runID)
+}
+
+// wptRunMerge implements Mergeable for WPTRun.
+type wptRunMerge struct{}
+
+func (m wptRunMerge) Merge(existing *WPTRun, new *WPTRun) *WPTRun {
+ return &WPTRun{
+ WPTRunMetadata: *wptRunMetadataMerge{}.Merge(
+ &existing.WPTRunMetadata, &new.WPTRunMetadata),
+ TestMetric: wptRunMetricMerge{}.Merge(existing.TestMetric, new.TestMetric),
+ FeatureTestMetrics: *wptRunFeatureTestMetricsMerge{}.Merge(&existing.FeatureTestMetrics, &new.FeatureTestMetrics),
+ }
+}
+
+// wptRunMetadataMerge implements Mergeable for WPTRunMetadata.
+type wptRunMetadataMerge struct{}
+
+func (m wptRunMetadataMerge) Merge(existing *WPTRunMetadata, _ *WPTRunMetadata) *WPTRunMetadata {
+ // The below fields cannot be overridden during a merge.
+ return &WPTRunMetadata{
+ RunID: existing.RunID,
+ TimeStart: existing.TimeStart,
+ TimeEnd: existing.TimeEnd,
+ BrowserName: existing.BrowserName,
+ BrowserVersion: existing.BrowserVersion,
+ Channel: existing.Channel,
+ OSName: existing.OSName,
+ OSVersion: existing.OSVersion,
+ }
+}
+
+// StoreWPTRun stores the metadata for a given run.
+func (c *Client) StoreWPTRunMetadata(
+ ctx context.Context,
+ metadata WPTRunMetadata) error {
+ entityClient := entityClient[WPTRun]{c}
+
+ return entityClient.upsert(
+ ctx,
+ wptRunsKey,
+ &WPTRun{
+ WPTRunMetadata: metadata,
+ TestMetric: nil,
+ FeatureTestMetrics: nil,
+ },
+ wptRunMerge{},
+ wptRunIDFilter{runID: metadata.RunID},
+ )
+}
+
+// GetWPTRun gets the metadata for a given run.
+func (c *Client) GetWPTRun(
+ ctx context.Context,
+ runID int64) (*WPTRun, error) {
+ entityClient := entityClient[WPTRun]{c}
+
+ return entityClient.get(
+ ctx,
+ wptRunsKey,
+ wptRunIDFilter{runID: runID},
+ )
+}
+
+// nolint: lll
+// wptRunsByBrowserFilter implements Filterable to filter by:
+// - browser_name (equality)
+// - channel (equality)
+// - time_start (startAt >= x < endAt)
+// - sort by time_start
+// https://github.com/web-platform-tests/wpt.fyi/blob/fb5bae7c6d04563864ef1c28a263a0a8d6637c4e/shared/test_run_query.go#L183-L186
+//
+// Compatible kinds:
+// - wptRunsKey.
+type wptRunsByBrowserFilter struct {
+ startAt time.Time
+ endAt time.Time
+ browser string
+ channel string
+}
+
+func (f wptRunsByBrowserFilter) FilterQuery(query *datastore.Query) *datastore.Query {
+ return query.FilterField("browser_name", "=", f.browser).
+ FilterField("channel", "=", f.channel).
+ FilterField("time_start", ">=", f.startAt).
+ FilterField("time_start", "<", f.endAt).
+ Order("-time_start")
+}
+
+// ListWPTRunsByBrowser returns a list of runs
+// This is a helper method for other list methods.
+func (c *Client) ListWPTRunsByBrowser(
+ ctx context.Context,
+ browser string,
+ channel string,
+ startAt time.Time,
+ endAt time.Time,
+ pageToken *string) ([]*WPTRun, *string, error) {
+ entityClient := entityClient[WPTRun]{c}
+
+ return entityClient.list(
+ ctx,
+ wptRunsKey,
+ pageToken,
+ wptRunsByBrowserFilter{
+ startAt: startAt,
+ endAt: endAt,
+ browser: browser,
+ channel: channel,
+ },
+ )
+}
diff --git a/lib/gds/wpt_run_metrics.go b/lib/gds/wpt_run_metrics.go
new file mode 100644
index 00000000..4e9762b6
--- /dev/null
+++ b/lib/gds/wpt_run_metrics.go
@@ -0,0 +1,113 @@
+// Copyright 2024 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package gds
+
+import (
+ "cmp"
+ "context"
+ "time"
+)
+
+// WPTRunToMetrics contains metrics about a particular WPT run.
+type WPTRunToMetrics struct {
+ WPTRunMetadata
+ *WPTRunMetric
+}
+
+// WPTRunToMetrics contains metrics about a particular WPT run.
+type WPTRunToMetricsByFeature struct {
+ WPTRunMetadata
+ *WPTRunMetric
+ FeatureID string
+}
+
+// WPTRunMetrics contains metrics for multiple WPT runs over time.
+type WPTRunMetrics []WPTRunToMetrics
+
+// WPTRunMetric is the basic unit for measuring the tests in a given run.
+type WPTRunMetric struct {
+ // Datastore does not support unsigned integer currently.
+ TotalTests *int `datastore:"total_tests"`
+ TestPass *int `datastore:"test_pass"`
+}
+
+// wptRunMetricMerge implements Mergeable for WPTRunMetric.
+type wptRunMetricMerge struct{}
+
+func (m wptRunMetricMerge) Merge(existing *WPTRunMetric, new *WPTRunMetric) *WPTRunMetric {
+ if existing == nil && new != nil {
+ return new
+ }
+ if existing != nil && new == nil {
+ return existing
+ }
+ if existing == nil && new == nil {
+ return nil
+ }
+
+ return &WPTRunMetric{
+ TotalTests: cmp.Or[*int](new.TotalTests, existing.TotalTests),
+ TestPass: cmp.Or[*int](new.TestPass, existing.TestPass),
+ }
+}
+
+// StoreWPTRunMetrics stores the metrics for a given run.
+func (c *Client) StoreWPTRunMetrics(
+ ctx context.Context,
+ runID int64,
+ metric *WPTRunMetric) error {
+ // Try to get the WPT Run first.
+ run, err := c.GetWPTRun(ctx, runID)
+ if err != nil {
+ return err
+ }
+
+ run.TestMetric = metric
+ entityClient := entityClient[WPTRun]{c}
+
+ return entityClient.upsert(
+ ctx,
+ wptRunsKey,
+ run,
+ wptRunMerge{},
+ wptRunIDFilter{runID: runID},
+ )
+}
+
+// ListWPTMetricsByBrowser retrieves a list of metrics for the given
+// browser name and channel.
+func (c *Client) ListWPTMetricsByBrowser(
+ ctx context.Context,
+ browser string,
+ channel string,
+ startAt time.Time,
+ endAt time.Time,
+ pageToken *string) ([]WPTRunToMetrics, *string, error) {
+
+ runs, _, err := c.ListWPTRunsByBrowser(ctx, browser, channel, startAt, endAt, pageToken)
+ if err != nil {
+ return nil, nil, err
+ }
+
+ ret := make([]WPTRunToMetrics, 0, len(runs))
+ for _, run := range runs {
+ ret = append(ret, WPTRunToMetrics{
+ WPTRunMetadata: run.WPTRunMetadata,
+ WPTRunMetric: run.TestMetric,
+ })
+ }
+
+ return ret, nil, nil
+}
diff --git a/lib/gds/wpt_run_metrics_group_by_feature.go b/lib/gds/wpt_run_metrics_group_by_feature.go
new file mode 100644
index 00000000..5864a090
--- /dev/null
+++ b/lib/gds/wpt_run_metrics_group_by_feature.go
@@ -0,0 +1,125 @@
+// Copyright 2024 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package gds
+
+import (
+ "context"
+ "time"
+)
+
+// WPTRunMetricsGroupByFeature contains metrics for a given web feature in a
+// WPT run.
+type WPTRunMetricsGroupByFeature struct {
+ WPTRunMetric
+ FeatureID string `datastore:"web_feature_id"`
+}
+
+// wptRunFeatureTestMetricsMerge implements Mergeable for []WPTRunMetricsGroupByFeature.
+type wptRunFeatureTestMetricsMerge struct{}
+
+func (m wptRunFeatureTestMetricsMerge) Merge(
+ existing *[]WPTRunMetricsGroupByFeature,
+ new *[]WPTRunMetricsGroupByFeature) *[]WPTRunMetricsGroupByFeature {
+ if existing == nil && new != nil {
+ return new
+ }
+ if existing != nil && new == nil {
+ return existing
+ }
+ if existing == nil && new == nil {
+ return nil
+ }
+ metricNameMap := make(map[string]int) // Map feature name to index
+ for idx, metric := range *existing {
+ metricNameMap[metric.FeatureID] = idx
+ }
+ for newIdx := range *new {
+ if idx, exists := metricNameMap[(*new)[newIdx].FeatureID]; exists {
+ (*existing)[idx] = WPTRunMetricsGroupByFeature{
+ WPTRunMetric: *wptRunMetricMerge{}.Merge(&(*existing)[idx].WPTRunMetric, &(*new)[newIdx].WPTRunMetric),
+ // Do not override the feature ID.
+ FeatureID: (*existing)[idx].FeatureID,
+ }
+ } else {
+ // New item
+ *existing = append(*existing, (*new)[newIdx])
+ }
+ }
+
+ return existing
+}
+
+// StoreWPTRunMetricsForFeatures stores the metrics for a given web feature and run.
+// Assumes that all the data belongs to a single run.
+func (c *Client) StoreWPTRunMetricsForFeatures(
+ ctx context.Context,
+ runID int64,
+ dataPerFeature map[string]WPTRunMetric) error {
+ // Try to get the WPT Run first.
+ run, err := c.GetWPTRun(ctx, runID)
+ if err != nil {
+ return err
+ }
+ featureTestMetrics := make([]WPTRunMetricsGroupByFeature, 0, len(dataPerFeature))
+ for featureID, featureData := range dataPerFeature {
+ featureTestMetrics = append(
+ featureTestMetrics,
+ WPTRunMetricsGroupByFeature{WPTRunMetric: featureData, FeatureID: featureID})
+ }
+
+ entityClient := entityClient[WPTRun]{c}
+
+ run.FeatureTestMetrics = featureTestMetrics
+
+ return entityClient.upsert(
+ ctx,
+ wptRunsKey,
+ run,
+ wptRunMerge{},
+ wptRunIDFilter{runID: run.RunID},
+ )
+}
+
+// GetWPTMetricsByBrowserByFeature retrieves a list of metrics grouped by a
+// web feature the given browser name and channel.
+func (c *Client) ListWPTMetricsByBrowserByFeature(
+ ctx context.Context,
+ browser string,
+ channel string,
+ startAt time.Time,
+ endAt time.Time,
+ featureID string,
+ pageToken *string) ([]*WPTRunToMetricsByFeature, *string, error) {
+ runs, _, err := c.ListWPTRunsByBrowser(ctx, browser, channel, startAt, endAt, pageToken)
+ if err != nil {
+ return nil, nil, err
+ }
+
+ ret := make([]*WPTRunToMetricsByFeature, 0, len(runs))
+ for _, run := range runs {
+ for i, metric := range run.FeatureTestMetrics {
+ if metric.FeatureID == featureID {
+ ret = append(ret, &WPTRunToMetricsByFeature{
+ WPTRunMetadata: run.WPTRunMetadata,
+ WPTRunMetric: &run.FeatureTestMetrics[i].WPTRunMetric,
+ FeatureID: featureID,
+ })
+ }
+ }
+
+ }
+
+ return ret, nil, nil
+}
diff --git a/lib/gds/wpt_run_test.go b/lib/gds/wpt_run_test.go
new file mode 100644
index 00000000..ac990471
--- /dev/null
+++ b/lib/gds/wpt_run_test.go
@@ -0,0 +1,433 @@
+// Copyright 2024 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package gds
+
+import (
+ "context"
+ "reflect"
+ "testing"
+ "time"
+
+ "github.com/web-platform-tests/wpt.fyi/shared"
+)
+
+// nolint: gochecknoglobals
+var sampleWPTRuns = []WPTRun{
+ {
+ WPTRunMetadata: WPTRunMetadata{
+ RunID: 0,
+ TimeStart: time.Date(2000, time.January, 1, 0, 0, 0, 0, time.UTC),
+ TimeEnd: time.Date(2000, time.January, 1, 1, 0, 0, 0, time.UTC),
+ BrowserName: "fooBrowser",
+ BrowserVersion: "0.0.0",
+ Channel: shared.StableLabel,
+ OSName: "os",
+ OSVersion: "0.0.0",
+ },
+ TestMetric: &WPTRunMetric{
+ TotalTests: intPtr(2),
+ TestPass: intPtr(2),
+ },
+ FeatureTestMetrics: []WPTRunMetricsGroupByFeature{
+ {
+ FeatureID: "fooFeature",
+ WPTRunMetric: WPTRunMetric{
+ TotalTests: intPtr(1),
+ TestPass: intPtr(0),
+ },
+ },
+ },
+ },
+ {
+ WPTRunMetadata: WPTRunMetadata{
+ RunID: 1,
+ TimeStart: time.Date(2000, time.January, 1, 0, 0, 0, 0, time.UTC),
+ TimeEnd: time.Date(2000, time.January, 1, 1, 0, 0, 0, time.UTC),
+ BrowserName: "fooBrowser",
+ BrowserVersion: "0.0.0",
+ Channel: shared.ExperimentalLabel,
+ OSName: "os",
+ OSVersion: "0.0.0",
+ },
+ TestMetric: &WPTRunMetric{
+ TotalTests: intPtr(3),
+ TestPass: intPtr(3),
+ },
+ FeatureTestMetrics: []WPTRunMetricsGroupByFeature{
+ {
+ FeatureID: "fooFeature",
+ WPTRunMetric: WPTRunMetric{
+ TotalTests: intPtr(1),
+ TestPass: intPtr(1),
+ },
+ },
+ },
+ },
+ {
+ WPTRunMetadata: WPTRunMetadata{
+ RunID: 2,
+ TimeStart: time.Date(2000, time.January, 1, 0, 0, 0, 0, time.UTC),
+ TimeEnd: time.Date(2000, time.January, 1, 1, 0, 0, 0, time.UTC),
+ BrowserName: "barBrowser",
+ BrowserVersion: "0.0.0",
+ Channel: shared.StableLabel,
+ OSName: "os",
+ OSVersion: "0.0.0",
+ },
+ TestMetric: &WPTRunMetric{
+ TotalTests: intPtr(2),
+ TestPass: intPtr(2),
+ },
+ FeatureTestMetrics: []WPTRunMetricsGroupByFeature{
+ {
+ FeatureID: "fooFeature",
+ WPTRunMetric: WPTRunMetric{
+ TotalTests: intPtr(1),
+ TestPass: intPtr(1),
+ },
+ },
+ },
+ },
+ {
+ WPTRunMetadata: WPTRunMetadata{
+ RunID: 3,
+ TimeStart: time.Date(2000, time.January, 1, 0, 0, 0, 0, time.UTC),
+ TimeEnd: time.Date(2000, time.January, 1, 1, 0, 0, 0, time.UTC),
+ BrowserName: "barBrowser",
+ BrowserVersion: "0.0.0",
+ Channel: shared.ExperimentalLabel,
+ OSName: "os",
+ OSVersion: "0.0.0",
+ },
+ TestMetric: &WPTRunMetric{
+ TotalTests: intPtr(3),
+ TestPass: intPtr(3),
+ },
+ FeatureTestMetrics: []WPTRunMetricsGroupByFeature{
+ {
+ FeatureID: "fooFeature",
+ WPTRunMetric: WPTRunMetric{
+ TotalTests: intPtr(1),
+ TestPass: intPtr(1),
+ },
+ },
+ },
+ },
+ {
+ WPTRunMetadata: WPTRunMetadata{
+ RunID: 6,
+ TimeStart: time.Date(2000, time.January, 2, 0, 0, 0, 0, time.UTC),
+ TimeEnd: time.Date(2000, time.January, 2, 1, 0, 0, 0, time.UTC),
+ BrowserName: "fooBrowser",
+ BrowserVersion: "0.0.0",
+ Channel: shared.StableLabel,
+ OSName: "os",
+ OSVersion: "0.0.0",
+ },
+ TestMetric: &WPTRunMetric{
+ TotalTests: intPtr(2),
+ TestPass: intPtr(2),
+ },
+ FeatureTestMetrics: []WPTRunMetricsGroupByFeature{
+ {
+ FeatureID: "fooFeature",
+ WPTRunMetric: WPTRunMetric{
+ TotalTests: intPtr(1),
+ TestPass: intPtr(1),
+ },
+ },
+ {
+ FeatureID: "barFeature",
+ WPTRunMetric: WPTRunMetric{
+ TotalTests: intPtr(1),
+ TestPass: intPtr(1),
+ },
+ },
+ },
+ },
+ {
+ WPTRunMetadata: WPTRunMetadata{
+ RunID: 7,
+ TimeStart: time.Date(2000, time.January, 2, 0, 0, 0, 0, time.UTC),
+ TimeEnd: time.Date(2000, time.January, 2, 1, 0, 0, 0, time.UTC),
+ BrowserName: "fooBrowser",
+ BrowserVersion: "0.0.0",
+ Channel: shared.ExperimentalLabel,
+ OSName: "os",
+ OSVersion: "0.0.0",
+ },
+ TestMetric: &WPTRunMetric{
+ TotalTests: intPtr(3),
+ TestPass: intPtr(3),
+ },
+ FeatureTestMetrics: []WPTRunMetricsGroupByFeature{
+ {
+ FeatureID: "fooFeature",
+ WPTRunMetric: WPTRunMetric{
+ TotalTests: intPtr(2),
+ TestPass: intPtr(2),
+ },
+ },
+ {
+ FeatureID: "barFeature",
+ WPTRunMetric: WPTRunMetric{
+ TotalTests: intPtr(1),
+ TestPass: intPtr(1),
+ },
+ },
+ },
+ },
+ {
+ WPTRunMetadata: WPTRunMetadata{
+ RunID: 8,
+ TimeStart: time.Date(2000, time.January, 2, 0, 0, 0, 0, time.UTC),
+ TimeEnd: time.Date(2000, time.January, 2, 1, 0, 0, 0, time.UTC),
+ BrowserName: "barBrowser",
+ BrowserVersion: "0.0.0",
+ Channel: shared.StableLabel,
+ OSName: "os",
+ OSVersion: "0.0.0",
+ },
+ TestMetric: &WPTRunMetric{
+ TotalTests: intPtr(2),
+ TestPass: intPtr(2),
+ },
+ FeatureTestMetrics: []WPTRunMetricsGroupByFeature{
+ {
+ FeatureID: "fooFeature",
+ WPTRunMetric: WPTRunMetric{
+ TotalTests: intPtr(1),
+ TestPass: intPtr(1),
+ },
+ },
+ {
+ FeatureID: "barFeature",
+ WPTRunMetric: WPTRunMetric{
+ TotalTests: intPtr(1),
+ TestPass: intPtr(1),
+ },
+ },
+ },
+ },
+ {
+ WPTRunMetadata: WPTRunMetadata{
+ RunID: 9,
+ TimeStart: time.Date(2000, time.January, 2, 0, 0, 0, 0, time.UTC),
+ TimeEnd: time.Date(2000, time.January, 2, 1, 0, 0, 0, time.UTC),
+ BrowserName: "barBrowser",
+ BrowserVersion: "0.0.0",
+ Channel: shared.ExperimentalLabel,
+ OSName: "os",
+ OSVersion: "0.0.0",
+ },
+ TestMetric: &WPTRunMetric{
+ TotalTests: intPtr(3),
+ TestPass: intPtr(3),
+ },
+ FeatureTestMetrics: []WPTRunMetricsGroupByFeature{
+ {
+ FeatureID: "fooFeature",
+ WPTRunMetric: WPTRunMetric{
+ TotalTests: intPtr(1),
+ TestPass: intPtr(1),
+ },
+ },
+ {
+ FeatureID: "barFeature",
+ WPTRunMetric: WPTRunMetric{
+ TotalTests: intPtr(2),
+ TestPass: intPtr(2),
+ },
+ },
+ },
+ },
+}
+
+func setupEntities(ctx context.Context, t *testing.T, client *Client) {
+ for _, run := range sampleWPTRuns {
+ err := client.StoreWPTRunMetadata(ctx, run.WPTRunMetadata)
+ if err != nil {
+ t.Errorf("unable to store wpt run %s", err.Error())
+ }
+ err = client.StoreWPTRunMetrics(ctx, run.RunID,
+ run.TestMetric)
+ if err != nil {
+ t.Errorf("unable to store wpt run metric %s", err.Error())
+ }
+ featureMap := make(map[string]WPTRunMetric)
+ for _, featureMetric := range run.FeatureTestMetrics {
+ featureMap[featureMetric.FeatureID] = featureMetric.WPTRunMetric
+ }
+ err = client.StoreWPTRunMetricsForFeatures(ctx, run.RunID, featureMap)
+ if err != nil {
+ t.Errorf("unable to store wpt run metrics per feature %s", err.Error())
+ }
+ }
+}
+
+func testWPTMetricsByBrowser(ctx context.Context, client *Client, t *testing.T) {
+ // Get the foo browser
+ // Step 1. Pick a range that gets both entries of run wide metrics.
+ metrics, _, err := client.ListWPTMetricsByBrowser(
+ ctx,
+ "fooBrowser",
+ shared.StableLabel,
+ time.Date(2000, time.January, 1, 0, 0, 0, 0, time.UTC),
+ time.Date(2000, time.January, 3, 0, 0, 0, 0, time.UTC),
+ nil,
+ )
+ if err != nil {
+ t.Errorf("unable to get metrics for browser. %s", err.Error())
+ }
+ expectedPageBoth := []WPTRunToMetrics{
+ {
+ WPTRunMetadata: WPTRunMetadata{
+ RunID: 6,
+ TimeStart: time.Date(2000, time.January, 2, 0, 0, 0, 0, time.UTC),
+ TimeEnd: time.Date(2000, time.January, 2, 1, 0, 0, 0, time.UTC),
+ BrowserName: "fooBrowser",
+ BrowserVersion: "0.0.0",
+ Channel: shared.StableLabel,
+ OSName: "os",
+ OSVersion: "0.0.0",
+ },
+ WPTRunMetric: &WPTRunMetric{
+ TotalTests: intPtr(2),
+ TestPass: intPtr(2),
+ },
+ },
+ {
+ WPTRunMetadata: WPTRunMetadata{
+ RunID: 0,
+ TimeStart: time.Date(2000, time.January, 1, 0, 0, 0, 0, time.UTC),
+ TimeEnd: time.Date(2000, time.January, 1, 1, 0, 0, 0, time.UTC),
+ BrowserName: "fooBrowser",
+ BrowserVersion: "0.0.0",
+ Channel: shared.StableLabel,
+ OSName: "os",
+ OSVersion: "0.0.0",
+ },
+ WPTRunMetric: &WPTRunMetric{
+ TotalTests: intPtr(2),
+ TestPass: intPtr(2),
+ },
+ },
+ }
+ if !reflect.DeepEqual(expectedPageBoth, metrics) {
+ t.Error("unequal slices")
+ }
+
+ // Step 2. Pick a range that only gets run-wide metric.
+ metrics, _, err = client.ListWPTMetricsByBrowser(
+ ctx,
+ "fooBrowser",
+ shared.StableLabel,
+ time.Date(2000, time.January, 2, 0, 0, 0, 0, time.UTC),
+ time.Date(2000, time.January, 3, 0, 0, 0, 0, time.UTC),
+ nil,
+ )
+ if err != nil {
+ t.Errorf("unable to get metrics for browser. %s", err.Error())
+ }
+ expectedPageLast := []WPTRunToMetrics{
+ {
+ WPTRunMetadata: WPTRunMetadata{
+ RunID: 6,
+ TimeStart: time.Date(2000, time.January, 2, 0, 0, 0, 0, time.UTC),
+ TimeEnd: time.Date(2000, time.January, 2, 1, 0, 0, 0, time.UTC),
+ BrowserName: "fooBrowser",
+ BrowserVersion: "0.0.0",
+ Channel: shared.StableLabel,
+ OSName: "os",
+ OSVersion: "0.0.0",
+ },
+ WPTRunMetric: &WPTRunMetric{
+ TotalTests: intPtr(2),
+ TestPass: intPtr(2),
+ },
+ },
+ }
+ if !reflect.DeepEqual(expectedPageLast, metrics) {
+ t.Error("unequal slices")
+ }
+}
+
+func testWPTMetricsByBrowserByFeature(ctx context.Context, client *Client, t *testing.T) {
+ // Step 1b. Pick a range that gets both entries of feature specific metrics.
+ featureMetrics, _, err := client.ListWPTMetricsByBrowserByFeature(
+ ctx,
+ "fooBrowser",
+ shared.ExperimentalLabel,
+ time.Date(2000, time.January, 1, 0, 0, 0, 0, time.UTC),
+ time.Date(2000, time.January, 3, 0, 0, 0, 0, time.UTC),
+ "fooFeature",
+ nil,
+ )
+ if err != nil {
+ t.Errorf("unable to get metrics for browser by feature. %s", err.Error())
+ }
+ expectedPageFeatureMetrics := []*WPTRunToMetricsByFeature{
+ {
+ WPTRunMetadata: WPTRunMetadata{
+ RunID: 7,
+ TimeStart: time.Date(2000, time.January, 2, 0, 0, 0, 0, time.UTC),
+ TimeEnd: time.Date(2000, time.January, 2, 1, 0, 0, 0, time.UTC),
+ BrowserName: "fooBrowser",
+ BrowserVersion: "0.0.0",
+ Channel: shared.ExperimentalLabel,
+ OSName: "os",
+ OSVersion: "0.0.0",
+ },
+ WPTRunMetric: &WPTRunMetric{
+ TotalTests: intPtr(2),
+ TestPass: intPtr(2),
+ },
+ FeatureID: "fooFeature",
+ },
+ {
+ WPTRunMetadata: WPTRunMetadata{
+ RunID: 1,
+ TimeStart: time.Date(2000, time.January, 1, 0, 0, 0, 0, time.UTC),
+ TimeEnd: time.Date(2000, time.January, 1, 1, 0, 0, 0, time.UTC),
+ BrowserName: "fooBrowser",
+ BrowserVersion: "0.0.0",
+ Channel: shared.ExperimentalLabel,
+ OSName: "os",
+ OSVersion: "0.0.0",
+ },
+ WPTRunMetric: &WPTRunMetric{
+ TotalTests: intPtr(1),
+ TestPass: intPtr(1),
+ },
+ FeatureID: "fooFeature",
+ },
+ }
+ if !reflect.DeepEqual(expectedPageFeatureMetrics, featureMetrics) {
+ t.Errorf("unequal slices")
+ }
+}
+
+func TestWPTRunMetricsOperations(t *testing.T) {
+ ctx := context.Background()
+
+ // Getting the test database is expensive that is why the methods below aren't their own tests.
+ // For now, get it once and use it in the sub tests below.
+ client, cleanup := getTestDatabase(ctx, t)
+ defer cleanup()
+ setupEntities(ctx, t, client)
+
+ testWPTMetricsByBrowser(ctx, client, t)
+ testWPTMetricsByBrowserByFeature(ctx, client, t)
+}
diff --git a/lib/gen/openapi/workflows/steps/wpt_consumer/.gitkeep b/lib/gen/openapi/workflows/steps/wpt_consumer/.gitkeep
new file mode 100644
index 00000000..e69de29b
diff --git a/openapi/backend/openapi.yaml b/openapi/backend/openapi.yaml
index ac971ed9..2f8ad4b2 100644
--- a/openapi/backend/openapi.yaml
+++ b/openapi/backend/openapi.yaml
@@ -112,7 +112,54 @@ paths:
schema:
$ref: '#/components/schemas/BasicErrorModel'
+ /v1/features/{feature_id}/metrics/wpt/test-metrics/browser/{browser_name}/channel/{channel_name}:
+ parameters:
+ - name: feature_id
+ in: path
+ description: Feature ID
+ required: true
+ schema:
+ type: string
+ - name: browser_name
+ in: path
+ description: Browser name
+ required: true
+ schema:
+ type: string
+ enum:
+ - chrome
+ - firefox
+ - safari
+ - name: channel_name
+ in: path
+ description: Channel name
+ required: true
+ schema:
+ type: string
+ enum:
+ - stable
+ - experimental
+ get:
+ operationId: v1ListFeatureMetricsByBrowserAndChannel
+ parameters:
+ - $ref: '#/components/parameters/paginationTokenParam'
+ summary: List WPT metrics for a particular browser and channel
+ responses:
+ '200':
+ description: OK
+ headers:
+ X-Next-Page-Token:
+ $ref: '#/components/headers/X-Next-Page-Token'
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/WPTRunFeatureMetricPage'
components:
+ headers:
+ X-Next-Page-Token:
+ description: 'Opaque string for the next set of results'
+ schema:
+ type: string
parameters:
paginationTokenParam:
in: query
@@ -138,6 +185,86 @@ components:
score:
type: number
format: float
+ WPTRunFeatureMetricPage:
+ type: object
+ properties:
+ data:
+ type: array
+ items:
+ $ref: '#/components/schemas/WPTRunFeatureMetric'
+ required:
+ - data
+ WPTRunFeatureMetric:
+ type: object
+ properties:
+ metadata:
+ $ref: '#/components/schemas/WPTRunMetadata'
+ test_measurement:
+ $ref: '#/components/schemas/WPTRunTestMeasurement'
+ feature:
+ type: string
+ required:
+ - metadata
+ - feature
+ WPTRunMetric:
+ type: object
+ properties:
+ metadata:
+ $ref: '#/components/schemas/WPTRunMetadata'
+ test_measurement:
+ $ref: '#/components/schemas/WPTRunTestMeasurement'
+ required:
+ - metadata
+ WPTRunTestMeasurement:
+ type: object
+ properties:
+ total_tests:
+ description: Total number of tests in this measurement.
+ type: number
+ passing_tests:
+ description: Number of passing tests in this measurement.
+ type: number
+ required:
+ - total_tests
+ - passing_tests
+ WPTRunMetadata:
+ type: object
+ properties:
+ run_id:
+ type: number
+ description: The identifier of the run in WPT.
+ time_start:
+ type: string
+ description: The start time of the run. (RFC3339 date-time format)
+ format: date-time
+ time_end:
+ type: string
+ description: The end time of the run. (RFC3339 date-time format)
+ format: date-time
+ browser_name:
+ type: string
+ description: The browser name
+ browser_version:
+ type: string
+ description: The browser version
+ channel:
+ type: string
+ description: The channel of the browser. (e.g. stable, experimental)
+ os_name:
+ type: string
+ description: The OS name
+ os_version:
+ type: string
+ description: The OS version
+ required:
+ - run_id
+ - time_start
+ - time_end
+ - browser_name
+ - browser_version
+ - channel
+ - os_name
+ - os_version
PageMetadata:
type: object
properties:
diff --git a/openapi/workflows/steps/wpt_consumer/openapi.yaml b/openapi/workflows/steps/wpt_consumer/openapi.yaml
new file mode 100644
index 00000000..68ec1099
--- /dev/null
+++ b/openapi/workflows/steps/wpt_consumer/openapi.yaml
@@ -0,0 +1,66 @@
+# Copyright 2023 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+openapi: '3.0.0'
+info:
+ title: WPT Metrics
+ version: 1.0.0
+servers:
+ - url: http://localhost:8080
+paths:
+ /v1/wpt:
+ post:
+ summary: Consumes WPT data
+ responses:
+ '200':
+ description: WPT data downloaded
+ '400':
+ description: Bad Input
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/BasicErrorModel'
+ '404':
+ description: Not Found
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/BasicErrorModel'
+ '429':
+ description: Rate Limit
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/BasicErrorModel'
+ '500':
+ description: Internal Service Error
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/BasicErrorModel'
+
+components:
+ schemas:
+ BasicErrorModel:
+ type: object
+ required:
+ - message
+ - code
+ properties:
+ message:
+ type: string
+ code:
+ type: integer
+ minimum: 100
+ maximum: 600
diff --git a/workflows/skaffold.yaml b/workflows/skaffold.yaml
index 2b57e7c6..604e1a1c 100644
--- a/workflows/skaffold.yaml
+++ b/workflows/skaffold.yaml
@@ -19,3 +19,4 @@ metadata:
requires:
- path: ./steps/services/common/repo_downloader
- path: ./steps/services/web_feature_consumer
+ - path: ./steps/services/wpt_consumer
diff --git a/workflows/steps/services/web_feature_consumer/cmd/server/main.go b/workflows/steps/services/web_feature_consumer/cmd/server/main.go
index 44a6b26b..8a8be6aa 100644
--- a/workflows/steps/services/web_feature_consumer/cmd/server/main.go
+++ b/workflows/steps/services/web_feature_consumer/cmd/server/main.go
@@ -49,7 +49,7 @@ func main() {
if value, found := os.LookupEnv("DATASTORE_DATABASE"); found {
datastoreDB = &value
}
- fs, err := gds.NewWebFeatureClient(os.Getenv("PROJECT_ID"), datastoreDB)
+ fs, err := gds.NewDatastoreClient(os.Getenv("PROJECT_ID"), datastoreDB)
if err != nil {
slog.Error("failed to create datastore client", "error", err.Error())
os.Exit(1)
diff --git a/workflows/steps/services/web_feature_consumer/pkg/httpserver/server.go b/workflows/steps/services/web_feature_consumer/pkg/httpserver/server.go
index 3e35b35e..0e058139 100644
--- a/workflows/steps/services/web_feature_consumer/pkg/httpserver/server.go
+++ b/workflows/steps/services/web_feature_consumer/pkg/httpserver/server.go
@@ -35,7 +35,10 @@ type ObjectGetter interface {
}
type WebFeatureMetadataStorer interface {
- Upsert(ctx context.Context, webFeatureID string, featureData web_platform_dx__web_features.FeatureData) error
+ UpsertFeatureData(
+ ctx context.Context,
+ webFeatureID string,
+ featureData web_platform_dx__web_features.FeatureData) error
}
type Server struct {
@@ -87,7 +90,7 @@ func (s *Server) PostV1WebFeatures(
}, nil
}
- err = s.metadataStorer.Upsert(ctx, webFeatureKey, featureData)
+ err = s.metadataStorer.UpsertFeatureData(ctx, webFeatureKey, featureData)
if err != nil {
slog.Error("unable to store data", "error", err)
diff --git a/workflows/steps/services/wpt_consumer/cmd/server/main.go b/workflows/steps/services/wpt_consumer/cmd/server/main.go
new file mode 100644
index 00000000..97b25447
--- /dev/null
+++ b/workflows/steps/services/wpt_consumer/cmd/server/main.go
@@ -0,0 +1,74 @@
+// Copyright 2024 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package main
+
+import (
+ "cmp"
+ "log/slog"
+ "os"
+ "time"
+
+ "github.com/GoogleChrome/webstatus.dev/lib/gds"
+ "github.com/GoogleChrome/webstatus.dev/workflows/steps/services/wpt_consumer/pkg/httpserver"
+ "github.com/GoogleChrome/webstatus.dev/workflows/steps/services/wpt_consumer/pkg/workflow"
+ "github.com/GoogleChrome/webstatus.dev/workflows/steps/services/wpt_consumer/pkg/wptfyi"
+ "github.com/google/go-github/v47/github"
+ "github.com/web-platform-tests/wpt.fyi/shared"
+)
+
+func main() {
+ wptFyiHostname := cmp.Or[string](os.Getenv("WPTFYI_HOSTNAME"), "wpt.fyi")
+ var datastoreDB *string
+ if value, found := os.LookupEnv("DATASTORE_DATABASE"); found {
+ datastoreDB = &value
+ }
+ dsClient, err := gds.NewDatastoreClient(os.Getenv("PROJECT_ID"), datastoreDB)
+ if err != nil {
+ slog.Error("failed to create datastore client", "error", err.Error())
+ os.Exit(1)
+ }
+
+ ghClient := github.NewClient(nil)
+ workflow := workflow.Entrypoint{
+ Starter: workflow.NewWptRunsWorker(
+ wptfyi.NewHTTPClient(wptFyiHostname),
+ workflow.NewWPTRunsProcessor(workflow.NewWPTRunProcessor(
+ workflow.NewHTTPResultsGetter(),
+ workflow.NewGitHubWebFeaturesDataGetter(
+ shared.NewGitHubWebFeaturesClient(ghClient),
+ ),
+ workflow.WPTScorerForWebFeatures{},
+ dsClient,
+ )),
+ ),
+ NumWorkers: 1,
+ }
+
+ srv, err := httpserver.NewHTTPServer(
+ "8080",
+ workflow,
+ // For now only go a year back by default.
+ time.Date(2023, time.January, 1, 0, 0, 0, 0, time.UTC),
+ )
+ if err != nil {
+ slog.Error("unable to create server", "error", err.Error())
+ os.Exit(1)
+ }
+ err = srv.ListenAndServe()
+ if err != nil {
+ slog.Error("unable to start server", "error", err.Error())
+ os.Exit(1)
+ }
+}
diff --git a/workflows/steps/services/wpt_consumer/go.mod b/workflows/steps/services/wpt_consumer/go.mod
index 9404f4db..f382c992 100644
--- a/workflows/steps/services/wpt_consumer/go.mod
+++ b/workflows/steps/services/wpt_consumer/go.mod
@@ -2,4 +2,53 @@ module github.com/GoogleChrome/webstatus.dev/workflows/steps/services/wpt_consum
go 1.21.0
-require github.com/web-platform-tests/wpt.fyi v0.0.0-20240206170845-05ddddc52a6b // indirect
+require (
+ cloud.google.com/go v0.112.0 // indirect
+ cloud.google.com/go/cloudtasks v1.12.5 // indirect
+ cloud.google.com/go/compute v1.23.3 // indirect
+ cloud.google.com/go/compute/metadata v0.2.3 // indirect
+ cloud.google.com/go/datastore v1.15.0 // indirect
+ cloud.google.com/go/iam v1.1.5 // indirect
+ cloud.google.com/go/logging v1.9.0 // indirect
+ cloud.google.com/go/longrunning v0.5.4 // indirect
+ cloud.google.com/go/secretmanager v1.11.4 // indirect
+ github.com/deckarep/golang-set v1.8.0 // indirect
+ github.com/felixge/httpsnoop v1.0.4 // indirect
+ github.com/go-logr/logr v1.4.1 // indirect
+ github.com/go-logr/stdr v1.2.2 // indirect
+ github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da // indirect
+ github.com/golang/protobuf v1.5.3 // indirect
+ github.com/gomodule/redigo v1.8.9 // indirect
+ github.com/google/go-github/v47 v47.1.0 // indirect
+ github.com/google/go-querystring v1.1.0 // indirect
+ github.com/google/s2a-go v0.1.7 // indirect
+ github.com/google/uuid v1.6.0 // indirect
+ github.com/googleapis/enterprise-certificate-proxy v0.3.2 // indirect
+ github.com/googleapis/gax-go/v2 v2.12.0 // indirect
+ github.com/gorilla/handlers v1.5.2 // indirect
+ github.com/gorilla/mux v1.8.1 // indirect
+ github.com/gorilla/securecookie v1.1.2 // indirect
+ github.com/sirupsen/logrus v1.9.3 // indirect
+ github.com/web-platform-tests/wpt.fyi v0.0.0-20240206170845-05ddddc52a6b // indirect
+ go.opencensus.io v0.24.0 // indirect
+ go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.47.0 // indirect
+ go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.47.0 // indirect
+ go.opentelemetry.io/otel v1.22.0 // indirect
+ go.opentelemetry.io/otel/metric v1.22.0 // indirect
+ go.opentelemetry.io/otel/trace v1.22.0 // indirect
+ golang.org/x/crypto v0.18.0 // indirect
+ golang.org/x/net v0.20.0 // indirect
+ golang.org/x/oauth2 v0.16.0 // indirect
+ golang.org/x/sync v0.6.0 // indirect
+ golang.org/x/sys v0.16.0 // indirect
+ golang.org/x/text v0.14.0 // indirect
+ golang.org/x/time v0.5.0 // indirect
+ google.golang.org/api v0.161.0 // indirect
+ google.golang.org/appengine v1.6.8 // indirect
+ google.golang.org/genproto v0.0.0-20240116215550-a9fa1716bcac // indirect
+ google.golang.org/genproto/googleapis/api v0.0.0-20240122161410-6c6643bf1457 // indirect
+ google.golang.org/genproto/googleapis/rpc v0.0.0-20240122161410-6c6643bf1457 // indirect
+ google.golang.org/grpc v1.61.0 // indirect
+ google.golang.org/protobuf v1.32.0 // indirect
+ gopkg.in/yaml.v3 v3.0.1 // indirect
+)
diff --git a/workflows/steps/services/wpt_consumer/manifests/pod.yaml b/workflows/steps/services/wpt_consumer/manifests/pod.yaml
new file mode 100644
index 00000000..ca7236b9
--- /dev/null
+++ b/workflows/steps/services/wpt_consumer/manifests/pod.yaml
@@ -0,0 +1,42 @@
+# Copyright 2023 Google LLC
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# https://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+apiVersion: v1
+kind: Pod
+metadata:
+ name: wpt-consumer
+ labels:
+ app.kubernetes.io/name: wpt-consumer
+spec:
+ containers:
+ - name: wpt-consumer
+ image: wpt-consumer
+ ports:
+ - containerPort: 8080
+ name: http-svc
+ env:
+ - name: BUCKET
+ value: 'testbucket'
+ - name: STORAGE_EMULATOR_HOST
+ value: 'http://gcs:4443'
+ - name: PROJECT_ID
+ value: local
+ - name: DATASTORE_DATABASE
+ value: ''
+ - name: DATASTORE_EMULATOR_HOST
+ value: 'datastore:8085'
+ resources:
+ limits:
+ cpu: 250m
+ memory: 512Mi
diff --git a/workflows/steps/services/wpt_consumer/manifests/service.yaml b/workflows/steps/services/wpt_consumer/manifests/service.yaml
new file mode 100644
index 00000000..4a90a13d
--- /dev/null
+++ b/workflows/steps/services/wpt_consumer/manifests/service.yaml
@@ -0,0 +1,25 @@
+# Copyright 2023 Google LLC
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# https://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+apiVersion: v1
+kind: Service
+metadata:
+ name: wpt-consumer
+spec:
+ selector:
+ app.kubernetes.io/name: wpt-consumer
+ ports:
+ - protocol: TCP
+ port: 8080
+ targetPort: http-svc
diff --git a/workflows/steps/services/wpt_consumer/pkg/httpserver/server.go b/workflows/steps/services/wpt_consumer/pkg/httpserver/server.go
new file mode 100644
index 00000000..592fc822
--- /dev/null
+++ b/workflows/steps/services/wpt_consumer/pkg/httpserver/server.go
@@ -0,0 +1,82 @@
+// Copyright 2024 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package httpserver
+
+import (
+ "context"
+ "log/slog"
+ "net"
+ "net/http"
+ "time"
+
+ "github.com/GoogleChrome/webstatus.dev/lib/gen/openapi/workflows/steps/wpt_consumer"
+ "github.com/go-chi/chi/v5"
+)
+
+type WorkflowStarter interface {
+ Start(ctx context.Context, from time.Time) error
+}
+
+type Server struct {
+ workflowStarter WorkflowStarter
+ from time.Time
+}
+
+// PostV1Wpt implements wpt_consumer.StrictServerInterface.
+// nolint: revive, ireturn // Signature generated from openapi
+func (s *Server) PostV1Wpt(
+ ctx context.Context,
+ request wpt_consumer.PostV1WptRequestObject) (wpt_consumer.PostV1WptResponseObject, error) {
+ err := s.workflowStarter.Start(ctx, s.from)
+ if err != nil {
+ slog.Error("workflow failed", "error", err)
+
+ return wpt_consumer.PostV1Wpt500JSONResponse{
+ Code: 500,
+ Message: "workflow failed",
+ }, nil
+ }
+
+ return wpt_consumer.PostV1Wpt200Response{}, nil
+}
+
+func NewHTTPServer(
+ port string,
+ workflowStarter WorkflowStarter,
+ from time.Time,
+) (*http.Server, error) {
+ _, err := wpt_consumer.GetSwagger()
+ if err != nil {
+ return nil, err
+ }
+ srv := &Server{
+ workflowStarter: workflowStarter,
+ from: from,
+ }
+
+ handler := wpt_consumer.NewStrictHandler(srv, nil)
+
+ r := chi.NewRouter()
+
+ // We now register our wpt consumer router above as the handler for the interface
+ wpt_consumer.HandlerFromMux(handler, r)
+
+ // nolint:exhaustruct // No need to populate 3rd party struct
+ return &http.Server{
+ Handler: r,
+ Addr: net.JoinHostPort("0.0.0.0", port),
+ ReadHeaderTimeout: 30 * time.Second,
+ }, nil
+}
diff --git a/workflows/steps/services/wpt_consumer/pkg/workflow/get_runs.go b/workflows/steps/services/wpt_consumer/pkg/workflow/get_web_feature_map.go
similarity index 62%
rename from workflows/steps/services/wpt_consumer/pkg/workflow/get_runs.go
rename to workflows/steps/services/wpt_consumer/pkg/workflow/get_web_feature_map.go
index 4d6ee29c..40d808d7 100644
--- a/workflows/steps/services/wpt_consumer/pkg/workflow/get_runs.go
+++ b/workflows/steps/services/wpt_consumer/pkg/workflow/get_web_feature_map.go
@@ -16,20 +16,19 @@ package workflow
import (
"context"
- "time"
"github.com/web-platform-tests/wpt.fyi/shared"
)
-// Workflow contains all the steps for the workflow to consume wpt data.
-type Workflow struct{}
+func NewGitHubWebFeaturesDataGetter(client *shared.GitHubWebFeaturesClient) *GitHubWebFeaturesDataGetter {
+ return &GitHubWebFeaturesDataGetter{client: client}
+}
+
+type GitHubWebFeaturesDataGetter struct {
+ client *shared.GitHubWebFeaturesClient
+}
-// RunsGetter represents the behavior to get all the runs up until the given
-// date.
-type RunsGetter interface {
- GetRuns(
- ctx context.Context,
- stopAt time.Time,
- runsPerPage int,
- ) (shared.TestRuns, error)
+func (g GitHubWebFeaturesDataGetter) GetWebFeaturesData(ctx context.Context) (shared.WebFeaturesData, error) {
+ // TODO. cache the result
+ return g.client.Get(ctx)
}
diff --git a/workflows/steps/services/wpt_consumer/pkg/workflow/http_results_getter.go b/workflows/steps/services/wpt_consumer/pkg/workflow/http_results_getter.go
new file mode 100644
index 00000000..a474a6d3
--- /dev/null
+++ b/workflows/steps/services/wpt_consumer/pkg/workflow/http_results_getter.go
@@ -0,0 +1,56 @@
+// Copyright 2024 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package workflow
+
+import (
+ "context"
+ "encoding/json"
+ "net/http"
+)
+
+func NewHTTPResultsGetter() *HTTPResultsGetter {
+ return &HTTPResultsGetter{
+ client: *http.DefaultClient,
+ }
+}
+
+type HTTPResultsGetter struct {
+ client http.Client
+}
+
+func (h HTTPResultsGetter) DownloadResults(
+ ctx context.Context,
+ url string) (ResultsSummaryFile, error) {
+ req, err := http.NewRequestWithContext(ctx, http.MethodGet, url, nil)
+ if err != nil {
+ return nil, err
+ }
+
+ resp, err := h.client.Do(req)
+ if err != nil {
+ return nil, err
+ }
+ defer resp.Body.Close()
+
+ // No need to decompress it despite it having the .gz suffix.
+
+ var data ResultsSummaryFile
+ decoder := json.NewDecoder(resp.Body)
+ if err := decoder.Decode(&data); err != nil {
+ return nil, err
+ }
+
+ return data, nil
+}
diff --git a/workflows/steps/services/wpt_consumer/pkg/workflow/http_results_getter_integration_test.go b/workflows/steps/services/wpt_consumer/pkg/workflow/http_results_getter_integration_test.go
new file mode 100644
index 00000000..c658e8b4
--- /dev/null
+++ b/workflows/steps/services/wpt_consumer/pkg/workflow/http_results_getter_integration_test.go
@@ -0,0 +1,33 @@
+// Copyright 2024 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package workflow
+
+import (
+ "context"
+ "testing"
+)
+
+func TestHTTPResultsGetter(t *testing.T) {
+ g := NewHTTPResultsGetter()
+ // nolint:lll
+ sampleURL := "https://storage.googleapis.com/wptd/9593290ad1bd621f74c697c7cc347348af2de32a/chrome-117.0.5938.62-linux-20.04-ddee0c57b6-summary_v2.json.gz"
+ data, err := g.DownloadResults(context.Background(), sampleURL)
+ if err != nil {
+ t.Errorf("unexpected error during download. %s", err.Error())
+ }
+ if len(data) == 0 {
+ t.Error("expected there to be data")
+ }
+}
diff --git a/workflows/steps/services/wpt_consumer/pkg/workflow/score_webfeature.go b/workflows/steps/services/wpt_consumer/pkg/workflow/score_webfeature.go
new file mode 100644
index 00000000..54f0849c
--- /dev/null
+++ b/workflows/steps/services/wpt_consumer/pkg/workflow/score_webfeature.go
@@ -0,0 +1,96 @@
+// Copyright 2024 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package workflow
+
+import (
+ "cmp"
+ "context"
+
+ "github.com/GoogleChrome/webstatus.dev/lib/gds"
+ "github.com/web-platform-tests/wpt.fyi/shared"
+)
+
+type WPTStatusAbbreviation string
+
+// Abbreivations come from
+// https://github.com/web-platform-tests/wpt.fyi/tree/main/api#results-summaries
+const (
+ WPTStatusOK WPTStatusAbbreviation = "O"
+ WPTStatusPass WPTStatusAbbreviation = "P"
+ WPTStatusFail WPTStatusAbbreviation = "F"
+ WPTStatusSkip WPTStatusAbbreviation = "S"
+ WPTStatusError WPTStatusAbbreviation = "E"
+ WPTStatusNotRun WPTStatusAbbreviation = "N"
+ WPTStatusCrash WPTStatusAbbreviation = "C"
+ WPTStatusTimeout WPTStatusAbbreviation = "T"
+ WPTStatusPreconditionFailed WPTStatusAbbreviation = "PF"
+)
+
+type WPTScorerForWebFeatures struct{}
+
+func (s WPTScorerForWebFeatures) Score(
+ ctx context.Context,
+ summary ResultsSummaryFile,
+ testToWebFeatures shared.WebFeaturesData) (*gds.WPTRunMetric, map[string]gds.WPTRunMetric) {
+ scoreMap := make(map[string]gds.WPTRunMetric)
+ overall := new(gds.WPTRunMetric)
+ for test, testSummary := range summary {
+ if len(testSummary.Counts) < 2 {
+ // Need at least the number of subtests passes and the number of subtests
+ continue
+ }
+ s.scoreTest(ctx, test, overall, scoreMap, testToWebFeatures, testSummary.Counts[0], testSummary.Counts[1])
+ }
+
+ return overall, scoreMap
+}
+
+func (s WPTScorerForWebFeatures) scoreTest(
+ _ context.Context,
+ test string,
+ overall *gds.WPTRunMetric,
+ webFeatureScoreMap map[string]gds.WPTRunMetric,
+ testToWebFeatures shared.WebFeaturesData,
+ numberOfSubtestPassing int,
+ numberofSubtests int,
+) {
+ var webFeatures map[string]interface{}
+ var found bool
+ if webFeatures, found = testToWebFeatures[test]; !found {
+ // There are no web features associated with this test. Skip
+ return
+ }
+ *overall.TotalTests++
+ // Calculate the value early so we can re-use for multiple web features.
+ countsAsPassing := numberOfSubtestPassing == numberofSubtests
+ if countsAsPassing {
+ *overall.TestPass++
+ }
+ for webFeature := range webFeatures {
+ initialTotal := new(int)
+ initialPass := new(int)
+ *initialTotal = 0
+ *initialPass = 0
+ webFeatureScore := cmp.Or(
+ webFeatureScoreMap[webFeature],
+ gds.WPTRunMetric{TotalTests: initialTotal, TestPass: initialPass})
+ *webFeatureScore.TotalTests++
+ // If all of the sub tests passed, only count it.
+ if countsAsPassing {
+ *webFeatureScore.TestPass++
+ }
+ webFeatureScoreMap[webFeature] = webFeatureScore
+ }
+}
diff --git a/workflows/steps/services/wpt_consumer/pkg/workflow/workflow_entrypoint.go b/workflows/steps/services/wpt_consumer/pkg/workflow/workflow_entrypoint.go
new file mode 100644
index 00000000..2e3e294d
--- /dev/null
+++ b/workflows/steps/services/wpt_consumer/pkg/workflow/workflow_entrypoint.go
@@ -0,0 +1,90 @@
+// Copyright 2024 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package workflow
+
+import (
+ "context"
+ "errors"
+ "log/slog"
+ "sync"
+ "time"
+
+ "github.com/web-platform-tests/wpt.fyi/shared"
+)
+
+type Entrypoint struct {
+ Starter WorkerStarter
+ NumWorkers int
+}
+
+type WorkerStarter interface {
+ Start(ctx context.Context, id int, wg *sync.WaitGroup, jobs <-chan workflowArguments, errChan chan<- error)
+}
+
+func (w Entrypoint) Start(ctx context.Context, from time.Time) error {
+ browsers := shared.GetDefaultBrowserNames()
+ channels := []string{shared.StableLabel, shared.ExperimentalLabel}
+ wg := sync.WaitGroup{}
+ numberOfJobs := len(browsers) * len(channels)
+ jobsChan := make(chan workflowArguments, numberOfJobs)
+ errChan := make(chan error, numberOfJobs)
+
+ // Start the workers
+ for i := 0; i < w.NumWorkers; i++ {
+ wg.Add(1)
+ go w.Starter.Start(ctx, i, &wg, jobsChan, errChan)
+ }
+ wg.Add(len(browsers) * len(channels))
+ for _, browser := range browsers {
+ for _, channel := range channels {
+ jobsChan <- workflowArguments{
+ from: from,
+ browser: browser,
+ channel: channel,
+ }
+ }
+ }
+ close(jobsChan)
+ doneChan := make(chan struct{})
+ // Wait for workers and handle errors
+ go func() {
+ wg.Wait()
+ close(errChan)
+ }()
+
+ var allErrors []error
+ errWg := sync.WaitGroup{}
+ errWg.Add(1)
+ for {
+ select {
+ case err, ok := <-errChan:
+ if !ok {
+ // Handle collected errors
+ if len(allErrors) > 0 {
+ return errors.Join(allErrors...)
+ }
+
+ break
+ }
+ allErrors = append(allErrors, err)
+ case <-doneChan:
+ // Channel closed, proceed
+ slog.Info("Finished processing", "error count", len(allErrors))
+
+ return nil
+ }
+
+ }
+}
diff --git a/workflows/steps/services/wpt_consumer/pkg/workflow/wpt_run_processor.go b/workflows/steps/services/wpt_consumer/pkg/workflow/wpt_run_processor.go
new file mode 100644
index 00000000..1e8d747e
--- /dev/null
+++ b/workflows/steps/services/wpt_consumer/pkg/workflow/wpt_run_processor.go
@@ -0,0 +1,101 @@
+// Copyright 2024 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package workflow
+
+import (
+ "context"
+
+ "github.com/GoogleChrome/webstatus.dev/lib/gds"
+ "github.com/web-platform-tests/wpt.fyi/api/query"
+ "github.com/web-platform-tests/wpt.fyi/shared"
+)
+
+// A copy of summary from wpt.fyi
+// https://github.com/web-platform-tests/wpt.fyi/blob/05ddddc52a6b95469131eac5e439af39cbd1200a/api/query/query.go#L30
+// TODO export Summary in wpt.fyi and use it here instead.
+type ResultsSummaryFile map[string]query.SummaryResult
+
+// WPTRunProcessor contains all the steps for the workflow to consume wpt data
+// of a particular WPT Run.
+type WPTRunProcessor struct {
+ resultsDownloader ResultsDownloader
+ webFeaturesDataGetter WebFeaturesDataGetter
+ scorer WebFeatureWPTScorer
+ scoreStorer WebFeatureWPTScoreStorer
+}
+
+func NewWPTRunProcessor(
+ resultsDownloader ResultsDownloader,
+ webFeaturesDataGetter WebFeaturesDataGetter,
+ scorer WebFeatureWPTScorer,
+ scoreStorer WebFeatureWPTScoreStorer) *WPTRunProcessor {
+ return &WPTRunProcessor{
+ resultsDownloader: resultsDownloader,
+ webFeaturesDataGetter: webFeaturesDataGetter,
+ scorer: scorer,
+ scoreStorer: scoreStorer,
+ }
+}
+
+type ResultsDownloader interface {
+ DownloadResults(context.Context, string) (ResultsSummaryFile, error)
+}
+
+type WebFeaturesDataGetter interface {
+ GetWebFeaturesData(context.Context) (shared.WebFeaturesData, error)
+}
+
+type WebFeatureWPTScorer interface {
+ Score(context.Context, ResultsSummaryFile, shared.WebFeaturesData) (*gds.WPTRunMetric, map[string]gds.WPTRunMetric)
+}
+
+type WebFeatureWPTScoreStorer interface {
+ StoreWPTRunMetrics(context.Context, int64, *gds.WPTRunMetric) error
+ StoreWPTRunMetricsForFeatures(
+ context.Context,
+ int64,
+ map[string]gds.WPTRunMetric) error
+}
+
+func (w WPTRunProcessor) ProcessRun(
+ ctx context.Context,
+ run shared.TestRun) error {
+ // Get the results.
+ resultsSummaryFile, err := w.resultsDownloader.DownloadResults(ctx, run.ResultsURL)
+ if err != nil {
+ return err
+ }
+
+ // Get the web features data.
+ // TODO: in the future, get the matching metadata if it exist. Then default to
+ // the latest if it doesn't exist.
+ webFeaturesData, err := w.webFeaturesDataGetter.GetWebFeaturesData(ctx)
+ if err != nil {
+ return err
+ }
+ runMetrics, metricsPerFeature := w.scorer.Score(ctx, resultsSummaryFile, webFeaturesData)
+
+ err = w.scoreStorer.StoreWPTRunMetrics(ctx, run.ID, runMetrics)
+ if err != nil {
+ return err
+ }
+
+ err = w.scoreStorer.StoreWPTRunMetricsForFeatures(ctx, run.ID, metricsPerFeature)
+ if err != nil {
+ return err
+ }
+
+ return nil
+}
diff --git a/workflows/steps/services/wpt_consumer/pkg/workflow/wpt_runs_processor.go b/workflows/steps/services/wpt_consumer/pkg/workflow/wpt_runs_processor.go
new file mode 100644
index 00000000..12648941
--- /dev/null
+++ b/workflows/steps/services/wpt_consumer/pkg/workflow/wpt_runs_processor.go
@@ -0,0 +1,46 @@
+// Copyright 2024 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package workflow
+
+import (
+ "context"
+
+ "github.com/web-platform-tests/wpt.fyi/shared"
+)
+
+func NewWPTRunsProcessor(runProcessor RunProcessor) *WPTRunsProcessor {
+ return &WPTRunsProcessor{runProcessor: runProcessor}
+}
+
+// WPTRunsProcessor contains all the steps for the workflow to process wpt data
+// of multiple WPT runs.
+type WPTRunsProcessor struct {
+ runProcessor RunProcessor
+}
+
+type RunProcessor interface {
+ ProcessRun(context.Context, shared.TestRun) error
+}
+
+func (r WPTRunsProcessor) ProcessRuns(ctx context.Context, runs shared.TestRuns) error {
+ for _, run := range runs {
+ err := r.runProcessor.ProcessRun(ctx, run)
+ if err != nil {
+ return err
+ }
+ }
+
+ return nil
+}
diff --git a/workflows/steps/services/wpt_consumer/pkg/workflow/wpt_runs_worker.go b/workflows/steps/services/wpt_consumer/pkg/workflow/wpt_runs_worker.go
new file mode 100644
index 00000000..f9dfc014
--- /dev/null
+++ b/workflows/steps/services/wpt_consumer/pkg/workflow/wpt_runs_worker.go
@@ -0,0 +1,90 @@
+// Copyright 2024 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package workflow
+
+import (
+ "context"
+ "log/slog"
+ "sync"
+ "time"
+
+ "github.com/web-platform-tests/wpt.fyi/shared"
+)
+
+func NewWptRunsWorker(runsGetter RunsGetter, runsProcessor RunsProcessor) *WptRunsWorker {
+ return &WptRunsWorker{
+ runsGetter: runsGetter,
+ runsProcessor: runsProcessor,
+ }
+}
+
+type WptRunsWorker struct {
+ runsGetter RunsGetter
+ runsProcessor RunsProcessor
+}
+
+type workflowArguments struct {
+ from time.Time
+ browser string
+ channel string
+}
+
+type RunsProcessor interface {
+ ProcessRuns(context.Context, shared.TestRuns) error
+}
+
+// RunsGetter represents the behavior to get all the runs up until the given
+// date.
+type RunsGetter interface {
+ GetRuns(
+ ctx context.Context,
+ from time.Time,
+ runsPerPage int,
+ browserName string,
+ channelName string,
+ ) (shared.TestRuns, error)
+}
+
+func (w WptRunsWorker) Start(
+ ctx context.Context,
+ id int,
+ wg *sync.WaitGroup,
+ jobs <-chan workflowArguments,
+ errChan chan<- error) {
+ slog.Info("starting worker", "worker id", id)
+ defer wg.Done()
+ for job := range jobs {
+ err := w.startWorkflowForBrowserAndChannel(ctx, job)
+ if err != nil {
+ errChan <- err
+ }
+ }
+}
+
+func (w WptRunsWorker) startWorkflowForBrowserAndChannel(
+ ctx context.Context,
+ job workflowArguments) error {
+ runs, err := w.runsGetter.GetRuns(ctx, job.from, shared.MaxCountMaxValue, job.browser, job.channel)
+ if err != nil {
+ return err
+ }
+
+ err = w.runsProcessor.ProcessRuns(ctx, runs)
+ if err != nil {
+ return err
+ }
+
+ return nil
+}
diff --git a/workflows/steps/services/wpt_consumer/pkg/wptfyi/client.go b/workflows/steps/services/wpt_consumer/pkg/wptfyi/client.go
new file mode 100644
index 00000000..9ebbb283
--- /dev/null
+++ b/workflows/steps/services/wpt_consumer/pkg/wptfyi/client.go
@@ -0,0 +1,60 @@
+// Copyright 2024 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package wptfyi
+
+import (
+ "context"
+ "time"
+
+ mapset "github.com/deckarep/golang-set"
+ "github.com/web-platform-tests/wpt.fyi/shared"
+)
+
+type HTTPClient struct {
+ hostname string
+ pageLimit int
+}
+
+func NewHTTPClient(hostname string) HTTPClient {
+ return HTTPClient{
+ hostname: hostname,
+ // Request the max every time
+ pageLimit: shared.MaxCountMaxValue,
+ }
+}
+
+func (w HTTPClient) GetRuns(
+ _ context.Context,
+ from time.Time,
+ _ int,
+ browserName string,
+ channelName string,
+) (shared.TestRuns, error) {
+ //nolint:exhaustruct
+ // External struct does not need comply with exhaustruct.
+ apiOptions := shared.TestRunFilter{
+ From: &from,
+ MaxCount: &w.pageLimit,
+ Labels: mapset.NewSetWith(browserName, channelName),
+ }
+ allRuns := shared.TestRuns{}
+ runs, err := shared.FetchRuns(w.hostname, apiOptions)
+ if err != nil {
+ return nil, err
+ }
+ allRuns = append(allRuns, runs...)
+
+ return allRuns, nil
+}
diff --git a/workflows/steps/services/wpt_consumer/pkg/wptfyi/client_integration_test.go b/workflows/steps/services/wpt_consumer/pkg/wptfyi/client_integration_test.go
new file mode 100644
index 00000000..1946caec
--- /dev/null
+++ b/workflows/steps/services/wpt_consumer/pkg/wptfyi/client_integration_test.go
@@ -0,0 +1,32 @@
+// Copyright 2024 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package wptfyi
+
+import (
+ "context"
+ "testing"
+ "time"
+)
+
+func TestGetRunsIntegration(t *testing.T) {
+ client := NewHTTPClient("wpt.fyi")
+ runs, err := client.GetRuns(context.TODO(), time.Now().AddDate(0, 0, -365).UTC(), 0, "chrome", "stable")
+ if err != nil {
+ t.Errorf("unexpected error getting runs: %s\n", err.Error())
+ }
+ if len(runs) < 1 {
+ t.Error("received no runs")
+ }
+}
diff --git a/workflows/steps/services/wpt_consumer/skaffold.yaml b/workflows/steps/services/wpt_consumer/skaffold.yaml
new file mode 100644
index 00000000..60203d5c
--- /dev/null
+++ b/workflows/steps/services/wpt_consumer/skaffold.yaml
@@ -0,0 +1,43 @@
+# Copyright 2023 Google LLC
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# https://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+apiVersion: skaffold/v4beta9
+kind: Config
+metadata:
+ name: wpt-consumer-config
+requires:
+ - path: ../../../../.dev/datastore
+ - path: ../../../../.dev/gcs
+profiles:
+ - name: local
+ build:
+ artifacts:
+ - image: wpt-consumer
+ context: ../../../../
+ runtimeType: go
+ docker:
+ dockerfile: images/go_service.Dockerfile
+ buildArgs:
+ service_dir: workflows/steps/services/web_feature_consumer
+ local: {}
+ manifests:
+ rawYaml:
+ - manifests/*
+ deploy:
+ kubectl: {}
+ portForward:
+ - resourceType: pod
+ resourceName: wpt-consumer
+ port: 8080
+ localPort: 8093