diff --git a/annotation/annotation.go b/annotation.go similarity index 80% rename from annotation/annotation.go rename to annotation.go index 4644bc9..b724984 100644 --- a/annotation/annotation.go +++ b/annotation.go @@ -1,22 +1,16 @@ -package annotation +package simplejson import ( "encoding/json" - "github.com/clambin/simplejson/v3/common" "time" ) -// Request is a query for annotation. -type Request struct { +// AnnotationRequest is a query for annotation. +type AnnotationRequest struct { Annotation RequestDetails `json:"annotation"` Args } -// Args contains arguments for the Annotations endpoint. -type Args struct { - common.Args -} - // RequestDetails specifies which annotation should be returned. type RequestDetails struct { Name string `json:"name"` @@ -25,13 +19,13 @@ type RequestDetails struct { Query string `json:"query"` } -// UnmarshalJSON unmarshalls a Request from JSON -func (r *Request) UnmarshalJSON(b []byte) (err error) { - type Request2 Request +// UnmarshalJSON unmarshalls a AnnotationRequest from JSON +func (r *AnnotationRequest) UnmarshalJSON(b []byte) (err error) { + type Request2 AnnotationRequest var c Request2 err = json.Unmarshal(b, &c) if err == nil { - *r = Request(c) + *r = AnnotationRequest(c) } return err } diff --git a/annotation/annotation_test.go b/annotation_test.go similarity index 80% rename from annotation/annotation_test.go rename to annotation_test.go index b922ac6..ab8a62d 100644 --- a/annotation/annotation_test.go +++ b/annotation_test.go @@ -1,26 +1,23 @@ -package annotation_test +package simplejson import ( "encoding/json" - "flag" - "github.com/clambin/simplejson/v3/annotation" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" "os" "path/filepath" + "strings" "testing" "time" ) -var update = flag.Bool("update", false, "update .golden files") - func TestAnnotation_MarshalJSON(t *testing.T) { - ann := annotation.Annotation{ + ann := Annotation{ Time: time.Date(2022, time.January, 23, 0, 0, 0, 0, time.UTC), Title: "foo", Text: "bar", Tags: []string{"A", "B"}, - Request: annotation.RequestDetails{ + Request: RequestDetails{ Name: "snafu", Datasource: "datasource", Enable: true, @@ -31,7 +28,7 @@ func TestAnnotation_MarshalJSON(t *testing.T) { body, err := json.Marshal(ann) require.NoError(t, err) - gp := filepath.Join("testdata", t.Name()+"_1.golden") + gp := filepath.Join("testdata", strings.ToLower(t.Name()), "1.golden") if *update { t.Logf("updating golden file for %s", t.Name()) err = os.WriteFile(gp, body, 0644) @@ -54,7 +51,7 @@ func TestAnnotation_MarshalJSON(t *testing.T) { body, err = json.Marshal(ann) require.NoError(t, err) - gp = filepath.Join("testdata", t.Name()+"_2.golden") + gp = filepath.Join("testdata", strings.ToLower(t.Name()), "2.golden") if *update { t.Logf("updating golden file for %s", t.Name()) err = os.WriteFile(gp, body, 0644) diff --git a/common/common.go b/common.go similarity index 96% rename from common/common.go rename to common.go index 99af304..e9b41b2 100644 --- a/common/common.go +++ b/common.go @@ -1,4 +1,4 @@ -package common +package simplejson import "time" diff --git a/data/response.go b/data/response.go deleted file mode 100644 index 772903b..0000000 --- a/data/response.go +++ /dev/null @@ -1,41 +0,0 @@ -package data - -import ( - "github.com/clambin/simplejson/v3/query" - "github.com/grafana/grafana-plugin-sdk-go/data" - "time" -) - -// CreateTableResponse creates a simplejson TableResponse from a Dataset -func (t Table) CreateTableResponse() *query.TableResponse { - columns := make([]query.Column, len(t.Frame.Fields)) - - for i, f := range t.Frame.Fields { - columns[i] = makeColumn(f) - } - - return &query.TableResponse{Columns: columns} -} - -func makeColumn(f *data.Field) (column query.Column) { - name := f.Name - if name == "" { - name = "(unknown)" - } - - var values interface{} - if f.Len() > 0 { - switch f.At(0).(type) { - case time.Time: - values = query.TimeColumn(getFieldValues[time.Time](f)) - case string: - values = query.StringColumn(getFieldValues[string](f)) - case float64: - values = query.NumberColumn(getFieldValues[float64](f)) - } - } - return query.Column{ - Text: name, - Data: values, - } -} diff --git a/dataset/dataset.go b/dataset/dataset.go deleted file mode 100644 index 5fe8fd3..0000000 --- a/dataset/dataset.go +++ /dev/null @@ -1,183 +0,0 @@ -package dataset - -import ( - "github.com/clambin/simplejson/v3/query" - "time" -) - -// Dataset is a convenience data structure to construct a SimpleJSON table response. Use this when you're adding -// data for a range of (possibly out of order) timestamps. -// -// Deprecated: use data.Table instead -type Dataset struct { - data [][]float64 - timestamps *Indexer[time.Time] - columns *Indexer[string] -} - -// New creates a new Dataset -func New() *Dataset { - return &Dataset{ - timestamps: MakeIndexer[time.Time](), - columns: MakeIndexer[string](), - } -} - -// Add adds a value for a specified timestamp and column to the dataset. If there is already a value for that -// timestamp and column, the specified value is added to the existing value. -func (d *Dataset) Add(timestamp time.Time, column string, value float64) { - d.ensureColumnExists(column) - - row, added := d.timestamps.Add(timestamp) - if added { - d.data = append(d.data, make([]float64, d.columns.Count())) - } - col, _ := d.columns.GetIndex(column) - d.data[row][col] += value -} - -func (d *Dataset) ensureColumnExists(column string) { - if _, added := d.columns.Add(column); !added { - return - } - - // new column. add data for the new column to each row - for key, entry := range d.data { - d.data[key] = append(entry, 0) - } -} - -// Size returns the number of rows in the dataset. -func (d *Dataset) Size() int { - return d.timestamps.Count() -} - -// AddColumn adds a new column to the dataset. For each timestamp, processor is called with the values for the -// existing columns. Processor's return value is then added for the new column. -func (d *Dataset) AddColumn(column string, processor func(values map[string]float64) float64) { - columns := d.columns.List() - for index, row := range d.data { - d.data[index] = append(row, processor(d.rowValues(row, columns))) - } - d.columns.Add(column) -} - -func (d *Dataset) rowValues(row []float64, columns []string) (values map[string]float64) { - values = make(map[string]float64) - for _, column := range columns { - idx, _ := d.columns.GetIndex(column) - values[column] = row[idx] - } - return -} - -// GetTimestamps returns the (sorted) list of timestamps in the dataset. -func (d *Dataset) GetTimestamps() (timestamps []time.Time) { - return d.timestamps.List() -} - -// GetColumns returns the (sorted) list of column names. -func (d *Dataset) GetColumns() (columns []string) { - return d.columns.List() -} - -// GetValues returns the value for the specified column for each timestamp in the dataset. The values are sorted by timestamp. -func (d *Dataset) GetValues(column string) (values []float64, ok bool) { - var index int - index, ok = d.columns.GetIndex(column) - - if !ok { - return - } - - values = make([]float64, len(d.data)) - for i, timestamp := range d.timestamps.List() { - rowIndex, _ := d.timestamps.GetIndex(timestamp) - values[i] = d.data[rowIndex][index] - } - return -} - -// FilterByRange removes any rows in the dataset that are outside the specified from/to time range. If from/to is zero, -// it is ignored. -func (d *Dataset) FilterByRange(from, to time.Time) { - // make a list of all records to be removed, and the remaining timestamps - timestamps := make([]time.Time, 0, d.timestamps.Count()) - var remove bool - for _, timestamp := range d.timestamps.List() { - if !from.IsZero() && timestamp.Before(from) { - remove = true - continue - } else if !to.IsZero() && timestamp.After(to) { - remove = true - continue - } - timestamps = append(timestamps, timestamp) - } - - // nothing to do here? - if !remove { - return - } - - // create a new data list from the timestamps we want to keep - data := make([][]float64, len(timestamps)) - ts := MakeIndexer[time.Time]() - for index, timestamp := range timestamps { - i, _ := d.timestamps.GetIndex(timestamp) - data[index] = d.data[i] - ts.Add(timestamp) - } - d.data = data - d.timestamps = ts -} - -// Accumulate accumulates the values for each column by time. E.g. if the values were 1, 1, 1, 1, the result would be -// 1, 2, 3, 4. -func (d *Dataset) Accumulate() { - accumulated := make([]float64, d.columns.Count()) - - for _, timestamp := range d.timestamps.List() { - row, _ := d.timestamps.GetIndex(timestamp) - for index, value := range d.data[row] { - accumulated[index] += value - } - copy(d.data[row], accumulated) - } -} - -// Copy returns a copy of the dataset -func (d *Dataset) Copy() (clone *Dataset) { - clone = &Dataset{ - data: make([][]float64, len(d.data)), - timestamps: d.timestamps.Copy(), - columns: d.columns.Copy(), - } - for index, row := range d.data { - clone.data[index] = make([]float64, len(row)) - copy(clone.data[index], row) - } - return -} - -// GenerateTableResponse creates a TableResponse for the dataset -func (d *Dataset) GenerateTableResponse() (response *query.TableResponse) { - response = &query.TableResponse{ - Columns: []query.Column{{ - Text: "timestamp", - Data: query.TimeColumn(d.GetTimestamps()), - }}, - } - - for _, column := range d.GetColumns() { - values, _ := d.GetValues(column) - if column == "" { - column = "(unknown)" - } - response.Columns = append(response.Columns, query.Column{ - Text: column, - Data: query.NumberColumn(values), - }) - } - return -} diff --git a/dataset/dataset_test.go b/dataset/dataset_test.go deleted file mode 100644 index f7f6a06..0000000 --- a/dataset/dataset_test.go +++ /dev/null @@ -1,233 +0,0 @@ -package dataset_test - -import ( - "github.com/clambin/simplejson/v3/dataset" - "github.com/clambin/simplejson/v3/query" - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" - "testing" - "time" -) - -func TestDataset_Basic(t *testing.T) { - d := dataset.New() - assert.NotNil(t, d) - - for day := 1; day < 5; day++ { - d.Add(time.Date(2022, time.January, 5-day, 0, 0, 0, 0, time.UTC), "A", float64(5-day)) - } - - d.AddColumn("B", func(values map[string]float64) float64 { - return values["A"] * 2 - }) - - assert.Equal(t, 4, d.Size()) - assert.Equal(t, []string{"A", "B"}, d.GetColumns()) - assert.Equal(t, []time.Time{ - time.Date(2022, time.January, 1, 0, 0, 0, 0, time.UTC), - time.Date(2022, time.January, 2, 0, 0, 0, 0, time.UTC), - time.Date(2022, time.January, 3, 0, 0, 0, 0, time.UTC), - time.Date(2022, time.January, 4, 0, 0, 0, 0, time.UTC), - }, d.GetTimestamps()) - - values, ok := d.GetValues("B") - require.True(t, ok) - assert.Equal(t, []float64{2, 4, 6, 8}, values) -} - -func TestDataset_FilterByRange(t *testing.T) { - d := dataset.New() - assert.NotNil(t, d) - - for day := 1; day < 32; day++ { - d.Add(time.Date(2022, time.January, day, 0, 0, 0, 0, time.UTC), "A", float64(day)) - } - - assert.Equal(t, 31, d.Size()) - - d.FilterByRange(time.Time{}, time.Date(2023, time.January, 1, 0, 0, 0, 0, time.UTC)) - assert.Equal(t, 31, d.Size()) - - d.FilterByRange(time.Time{}, time.Date(2022, time.January, 30, 0, 0, 0, 0, time.UTC)) - assert.Equal(t, 30, d.Size()) - - d.FilterByRange(time.Date(2022, time.January, 2, 0, 0, 0, 0, time.UTC), time.Time{}) - assert.Equal(t, 29, d.Size()) - - d.FilterByRange(time.Date(2022, time.January, 8, 0, 0, 0, 0, time.UTC), time.Date(2022, time.January, 14, 0, 0, 0, 0, time.UTC)) - assert.Equal(t, 7, d.Size()) - - values, ok := d.GetValues("A") - require.True(t, ok) - assert.Equal(t, []float64{8, 9, 10, 11, 12, 13, 14}, values) - -} - -func TestDataset_Accumulate(t *testing.T) { - d := dataset.New() - assert.NotNil(t, d) - - for day := 1; day < 32; day++ { - d.Add(time.Date(2022, time.January, day, 0, 0, 0, 0, time.UTC), "A", 1.0) - } - - d.Accumulate() - - values, ok := d.GetValues("A") - require.True(t, ok) - expected := 1.0 - for index, value := range values { - require.Equal(t, expected, value, index) - expected++ - } -} - -func TestDataset_Copy(t *testing.T) { - d := dataset.New() - assert.NotNil(t, d) - - for day := 1; day < 5; day++ { - ts := time.Date(2022, time.January, day, 0, 0, 0, 0, time.UTC) - d.Add(ts, "A", 1.0) - } - - clone := d.Copy() - - d.Accumulate() - - values, ok := d.GetValues("A") - require.True(t, ok) - assert.Equal(t, []float64{1, 2, 3, 4}, values) - - values, ok = clone.GetValues("A") - require.True(t, ok) - assert.Equal(t, []float64{1, 1, 1, 1}, values) -} - -func TestDataset_GenerateTableResponse(t *testing.T) { - d := dataset.New() - assert.NotNil(t, d) - - for day := 1; day < 5; day++ { - ts := time.Date(2022, time.January, 5-day, 0, 0, 0, 0, time.UTC) - d.Add(ts, "", float64(5-day)) - } - - response := d.GenerateTableResponse() - assert.Equal(t, &query.TableResponse{ - Columns: []query.Column{ - { - Text: "timestamp", - Data: query.TimeColumn{ - time.Date(2022, time.January, 1, 0, 0, 0, 0, time.UTC), - time.Date(2022, time.January, 2, 0, 0, 0, 0, time.UTC), - time.Date(2022, time.January, 3, 0, 0, 0, 0, time.UTC), - time.Date(2022, time.January, 4, 0, 0, 0, 0, time.UTC), - }, - }, - { - Text: "(unknown)", - Data: query.NumberColumn{1, 2, 3, 4}, - }, - }, - }, response) -} - -func BenchmarkDataset_Add(b *testing.B) { - for i := 0; i < b.N; i++ { - d := dataset.New() - for y := 0; y < 5; y++ { - timestamp := time.Date(2020, time.January, 1, 0, 0, 0, 0, time.UTC) - for day := 0; day < 365; day++ { - d.Add(timestamp, "A", float64(day)) - timestamp = timestamp.Add(-24 * time.Hour) - } - } - } -} - -func BenchmarkDataset_GetColumns(b *testing.B) { - d := dataset.New() - timestamp := time.Date(2020, time.January, 1, 0, 0, 0, 0, time.UTC) - for day := 0; day < 5*365; day++ { - d.Add(timestamp, "A", float64(day)) - timestamp = timestamp.Add(-24 * time.Hour) - } - b.ResetTimer() - for i := 0; i < b.N; i++ { - _ = d.GetColumns() - } -} - -func BenchmarkDataset_GetTimestamps(b *testing.B) { - d := dataset.New() - timestamp := time.Date(2020, time.January, 1, 0, 0, 0, 0, time.UTC) - for day := 0; day < 5*365; day++ { - d.Add(timestamp, "A", float64(day)) - timestamp = timestamp.Add(-24 * time.Hour) - } - b.ResetTimer() - for i := 0; i < b.N; i++ { - d.GetTimestamps() - } -} - -func BenchmarkDataset_GetValues(b *testing.B) { - d := dataset.New() - timestamp := time.Date(2020, time.January, 1, 0, 0, 0, 0, time.UTC) - for day := 0; day < 5*365; day++ { - d.Add(timestamp, "A", float64(day)) - timestamp = timestamp.Add(-24 * time.Hour) - } - b.ResetTimer() - for i := 0; i < b.N; i++ { - _, _ = d.GetValues("A") - } -} - -func BenchmarkDataset_FilterByRange(b *testing.B) { - d := dataset.New() - timestamp := time.Date(2020, time.January, 1, 0, 0, 0, 0, time.UTC) - for day := 0; day < 5*365; day++ { - d.Add(timestamp, "A", float64(day)) - timestamp = timestamp.Add(24 * time.Hour) - } - - b.ResetTimer() - - start := time.Date(2020, time.January, 1, 0, 0, 0, 0, time.UTC) - stop := timestamp - for i := 0; i < b.N; i++ { - d.FilterByRange(start, stop) - start = start.Add(12 * time.Hour) - stop = stop.Add(-12 * time.Hour) - } -} - -func BenchmarkDataset_AddColumn(b *testing.B) { - d := dataset.New() - timestamp := time.Date(2020, time.January, 1, 0, 0, 0, 0, time.UTC) - for day := 0; day < 5*365; day++ { - d.Add(timestamp, "A", float64(day)) - timestamp = timestamp.Add(-24 * time.Hour) - } - b.ResetTimer() - for i := 0; i < b.N; i++ { - d.AddColumn("B", func(_ map[string]float64) float64 { - return 1 - }) - } -} - -func BenchmarkDataset_Copy(b *testing.B) { - d := dataset.New() - timestamp := time.Date(2020, time.January, 1, 0, 0, 0, 0, time.UTC) - for day := 0; day < 5*365; day++ { - d.Add(timestamp, "A", float64(day)) - timestamp = timestamp.Add(-24 * time.Hour) - } - b.ResetTimer() - for i := 0; i < b.N; i++ { - _ = d.Copy() - } -} diff --git a/dataset/doc.go b/dataset/doc.go deleted file mode 100644 index 9eb97ee..0000000 --- a/dataset/doc.go +++ /dev/null @@ -1,21 +0,0 @@ -/* -Package dataset makes it easier to produce time-based responses when dealing with data that may not necessarily be sequential. - -A dataset holds a table of rows for each timestamp that is added to the dataset. When adding new columns, empty cells are -automatically added to the table for existing rows: - - d := dataset.New() // creates an empty dataset - d.Add(time.Now(), "A", 1) // dataset has one row with a single cell, set to 1 - d.Add(time.Now(), "B", 2) // dataset now has two rows. First row is 1, 0. Second row is 0, 2 - -Furthermore, dataset allows to add a new column, calculated from the values of the other columns: - - d := dataset.New() - // add rows with values for columns "A" and "B" - - d.AddColumn("C", func(values map[string]float64) float64 { - return values["A"] + values["B"] - }) - // dataset now has a column "C", with the sum of columns "A" and "B" -*/ -package dataset diff --git a/dataset/doc_test.go b/dataset/doc_test.go deleted file mode 100644 index 375e1b9..0000000 --- a/dataset/doc_test.go +++ /dev/null @@ -1,23 +0,0 @@ -package dataset_test - -import ( - "fmt" - "github.com/clambin/simplejson/v3/dataset" - "time" -) - -func Example() { - d := dataset.New() - - for day := 1; day < 5; day++ { - d.Add(time.Date(2022, time.January, 5-day, 0, 0, 0, 0, time.UTC), "A", float64(5-day)) - } - - d.AddColumn("B", func(values map[string]float64) float64 { - return values["A"] * 2 - }) - - response := d.GenerateTableResponse() - - fmt.Printf("%v\n", response) -} diff --git a/dataset/indexer.go b/dataset/indexer.go deleted file mode 100644 index 9495877..0000000 --- a/dataset/indexer.go +++ /dev/null @@ -1,91 +0,0 @@ -package dataset - -import ( - "sort" - "time" -) - -type ordered interface { - time.Time | string -} - -// Indexer holds a unique set of values, and records the order in which they were added. -// Currently, it supports string and time.Time data. -type Indexer[T ordered] struct { - values []T - indices map[T]int - inOrder bool -} - -// MakeIndexer returns a new indexer -func MakeIndexer[T ordered]() *Indexer[T] { - return &Indexer[T]{ - values: make([]T, 0), - indices: make(map[T]int), - inOrder: true, - } -} - -// GetIndex returns the index of a value (i.e. when that value was added) -func (idx *Indexer[T]) GetIndex(value T) (index int, found bool) { - index, found = idx.indices[value] - return -} - -// Count returns the number of values in the Indexer -func (idx *Indexer[T]) Count() int { - return len(idx.values) -} - -// List returns the (sorted) values in the Indexer -func (idx *Indexer[T]) List() (values []T) { - if !idx.inOrder { - sort.Slice(idx.values, func(i, j int) bool { return isLessThan(idx.values[i], idx.values[j]) }) - idx.inOrder = true - } - return idx.values -} - -// Add adds a new value to the Indexer. It returns the index of that value and whether the value was actually added. -func (idx *Indexer[T]) Add(value T) (int, bool) { - index, found := idx.indices[value] - - if found { - return index, false - } - - index = len(idx.values) - idx.indices[value] = index - - if idx.inOrder && index > 0 { - idx.inOrder = !isLessThan(value, idx.values[index-1]) - } - idx.values = append(idx.values, value) - return index, true -} - -// Copy returns a copy of the Indexer -func (idx *Indexer[T]) Copy() (clone *Indexer[T]) { - clone = &Indexer[T]{ - values: make([]T, len(idx.values)), - indices: make(map[T]int), - } - copy(clone.values, idx.values) - for key, val := range idx.indices { - clone.indices[key] = val - } - return -} - -func isLessThan[T ordered](a, b T) (isLess bool) { - // this works around the fact that we can't type switch on T - var x interface{} = a - var y interface{} = b - switch (x).(type) { - case string: - isLess = x.(string) < y.(string) - case time.Time: - isLess = x.(time.Time).Before(y.(time.Time)) - } - return -} diff --git a/dataset/indexer_test.go b/dataset/indexer_test.go deleted file mode 100644 index cc1f3f8..0000000 --- a/dataset/indexer_test.go +++ /dev/null @@ -1,76 +0,0 @@ -package dataset_test - -import ( - "fmt" - "github.com/clambin/simplejson/v3/dataset" - "github.com/stretchr/testify/assert" - "math/rand" - "testing" - "time" -) - -func TestIndexer_Time(t *testing.T) { - idx := dataset.MakeIndexer[time.Time]() - indices := make(map[time.Time]int) - - const iterations = 100 - - for i := 0; i < iterations; i++ { - value := time.Date(2022, time.March, 1+rand.Intn(31), 0, 0, 0, 0, time.UTC) - - if index, added := idx.Add(value); added { - indices[value] = index - } - } - - assert.Len(t, indices, idx.Count()) - - for value, index := range indices { - i, found := idx.GetIndex(value) - assert.True(t, found) - assert.Equal(t, i, index) - } -} - -func TestIndexer_String(t *testing.T) { - idx := dataset.MakeIndexer[string]() - indices := make(map[string]int) - - const iterations = 100 - - for i := 0; i < iterations; i++ { - value := fmt.Sprintf("%02d", 1+rand.Intn(31)) - - if index, added := idx.Add(value); added { - indices[value] = index - } - } - - assert.Len(t, indices, idx.Count()) - - for value, index := range indices { - i, found := idx.GetIndex(value) - assert.True(t, found) - assert.Equal(t, i, index) - } -} - -func TestIndexer_Reorder(t *testing.T) { - input := []string{"C", "B", "A"} - idx := dataset.MakeIndexer[string]() - - for index, value := range input { - i, added := idx.Add(value) - assert.True(t, added) - assert.Equal(t, index, i) - } - - result := idx.List() - assert.Equal(t, []string{"A", "B", "C"}, result) - - for index, value := range input { - i, found := idx.GetIndex(value) - assert.True(t, found) - assert.Equal(t, index, i, value) - } -} diff --git a/dataset/indexer_unit_test.go b/dataset/indexer_unit_test.go deleted file mode 100644 index 3aaeebd..0000000 --- a/dataset/indexer_unit_test.go +++ /dev/null @@ -1,15 +0,0 @@ -package dataset - -import ( - "github.com/stretchr/testify/assert" - "testing" - "time" -) - -func TestLessThan(t *testing.T) { - assert.True(t, isLessThan("A", "B")) - - ts := time.Now() - assert.True(t, isLessThan(ts, ts.Add(time.Hour))) - assert.False(t, isLessThan(ts, ts.Add(-time.Hour))) -} diff --git a/doc.go b/doc.go index 80e7740..e53e945 100644 --- a/doc.go +++ b/doc.go @@ -1,13 +1,13 @@ /* Package simplejson provides a Go implementation for Grafana's SimpleJSON datasource: https://grafana.com/grafana/plugins/grafana-simple-json-datasource -Overview +# Overview A simplejson server is an HTTP server that supports one or more handlers. Each handler can support multiple targets, each of which can be supported by a timeseries or table query. Optionally tag can be used to alter the behaviour of the query (e.g. filtering what data should be returned). Finally, a handler can support annotation, i.e. a set of timestamps with associated text. -Server +# Server To create a SimpleJSON server, create a Server and run it: @@ -20,16 +20,16 @@ To create a SimpleJSON server, create a Server and run it: This starts a server, listening on port 8080, with one target "my-target", served by myHandler. -Handler +# Handler A handler serves incoming requests from Grafana, e.g. queries, requests for annotations or tag. The Handler interface contains all functions a handler needs to implement. It contains only one function (Endpoints). This function returns the Grafana SimpleJSON endpoints that the handler supports. Those can be: - - Query() implements the /query endpoint. handles both timeserie & table responses - - Annotations() implements the /annotation endpoint - - TagKeys() implements the /tag-keys endpoint - - TagValues() implements the /tag-values endpoint + - Query() implements the /query endpoint. handles both timeserie & table responses + - Annotations() implements the /annotation endpoint + - TagKeys() implements the /tag-keys endpoint + - TagValues() implements the /tag-values endpoint Here's an example of a handler that supports timeseries queries: @@ -42,21 +42,21 @@ Here's an example of a handler that supports timeseries queries: } } - func (handler *myHandler) Query(ctx context.Context, target string, target *simplejson.Args) (response *simplejson.QueryResponse, err error) { + func (handler *myHandler) Query(ctx context.Context, target string, target *simplejson.QueryArgs) (response *simplejson.QueryResponse, err error) { // build response return } -Queries +# Queries SimpleJSON supports two types of query responses: timeseries responses and table responses. Timeseries queries return values as a list of timestamp/value tuples. Here's an example of a timeseries query handler: - func (handler *myHandler) Query(_ context.Context, _ string, _ query.Args) (response *query.TimeSeriesResponse, err error) { + func (handler *myHandler) Query(_ context.Context, _ string, _ query.QueryArgs) (response *simplejson.TimeSeriesResponse, err error) { response = &query.TimeSeriesResponse{ Name: "A", - DataPoints: []query.DataPoint{ + DataPoints: []simplejson.DataPoint{ {Timestamp: time.Date(2020, 1, 1, 0, 0, 0, 0, time.UTC), Value: 100}, {Timestamp: time.Date(2020, 1, 1, 0, 1, 0, 0, time.UTC), Value: 101}, {Timestamp: time.Date(2020, 1, 1, 0, 2, 0, 0, time.UTC), Value: 103}, @@ -67,9 +67,9 @@ Timeseries queries return values as a list of timestamp/value tuples. Here's an Table Queries, on the other hand, return data organized in columns and rows. Each column needs to have the same number of rows: - func (handler *myHandler) TableQuery(_ context.Context, _ string, _ query.Args) (response *query.TableResponse, err error) { - response = &query.TableResponse{ - Columns: []query.Column{ + func (handler *myHandler) TableQuery(_ context.Context, _ string, _ query.QueryArgs) (response *simplejson.TableResponse, err error) { + response = &simplejson.TableResponse{ + Columns: []simplejson.Column{ { Text: "Time", Data: query.TimeColumn{time.Date(2020, 1, 1, 0, 0, 0, 0, time.UTC), time.Date(2020, 1, 1, 0, 1, 0, 0, time.UTC)} }, { Text: "Label", Data: query.StringColumn{"foo", "bar"}}, { Text: "Series A", Data: query.NumberColumn{42, 43}}, @@ -79,12 +79,12 @@ Table Queries, on the other hand, return data organized in columns and rows. Ea return } -Annotations +# Annotations The /annotations endpoint returns Annotations: - func (h *handler) Annotations(_ annotation.Request) (annotations []annotation.Annotation, err error) { - annotations = []annotation.Annotation{ + func (h *handler) Annotations(_ simplejson.QueryRequest) (annotations []simplejson.Annotation, err error) { + annotations = []simplejson.Annotation{ { Time: time.Now().Add(-5 * time.Minute), Title: "foo", @@ -98,8 +98,7 @@ The /annotations endpoint returns Annotations: NOTE: this is only called when using the SimpleJSON datasource. simPod / GrafanaJsonDatasource does not use the /annotations endpoint. Instead, it will call a regular /query and allows to configure its response as annotations instead. - -Tags +# Tags The /tag-keys and /tag-values endpoints return supported keys and key values respectively for your data source. A Grafana dashboard can then be confirmed to show those keys and its possible values as a filter. @@ -119,17 +118,15 @@ The following sets up a key & key value handler: When the dashboard performs a query with a tag selected, that tag & value will be added in the request's AdHocFilters. - -Metrics +# Metrics simplejson exports two Prometheus metrics for performance analytics: simplejson_query_duration_seconds: duration of query requests by target, in seconds simplejson_query_failed_count: number of failed query requests -Other topics +# Other topics For information on query arguments and tags, refer to the documentation for those data structures. - */ package simplejson diff --git a/doc_test.go b/doc_test.go index 208f5bc..e261f77 100644 --- a/doc_test.go +++ b/doc_test.go @@ -3,9 +3,7 @@ package simplejson_test import ( "context" "fmt" - "github.com/clambin/simplejson/v3" - "github.com/clambin/simplejson/v3/annotation" - "github.com/clambin/simplejson/v3/query" + "github.com/clambin/simplejson/v4" "time" ) @@ -31,33 +29,33 @@ func (h *handler) Endpoints() simplejson.Endpoints { } } -func (h *handler) Query(ctx context.Context, req query.Request) (query.Response, error) { +func (h *handler) Query(ctx context.Context, req simplejson.QueryRequest) (simplejson.Response, error) { if h.table == false { return h.timeSeriesQuery(ctx, req) } return h.tableQuery(ctx, req) } -func (h *handler) timeSeriesQuery(_ context.Context, _ query.Request) (*query.TimeSeriesResponse, error) { - dataPoints := make([]query.DataPoint, 60) +func (h *handler) timeSeriesQuery(_ context.Context, _ simplejson.QueryRequest) (simplejson.TimeSeriesResponse, error) { + dataPoints := make([]simplejson.DataPoint, 60) timestamp := time.Now().Add(-1 * time.Hour) for i := 0; i < 60; i++ { - dataPoints[i] = query.DataPoint{ + dataPoints[i] = simplejson.DataPoint{ Timestamp: timestamp, - Value: int64(i), + Value: float64(i), } timestamp = timestamp.Add(1 * time.Minute) } - return &query.TimeSeriesResponse{ + return simplejson.TimeSeriesResponse{ DataPoints: dataPoints, }, nil } -func (h *handler) tableQuery(_ context.Context, _ query.Request) (*query.TableResponse, error) { - timestamps := make(query.TimeColumn, 60) - seriesA := make(query.NumberColumn, 60) - seriesB := make(query.NumberColumn, 60) +func (h *handler) tableQuery(_ context.Context, _ simplejson.QueryRequest) (simplejson.TableResponse, error) { + timestamps := make(simplejson.TimeColumn, 60) + seriesA := make(simplejson.NumberColumn, 60) + seriesB := make(simplejson.NumberColumn, 60) timestamp := time.Now().Add(-1 * time.Hour) for i := 0; i < 60; i++ { @@ -67,17 +65,15 @@ func (h *handler) tableQuery(_ context.Context, _ query.Request) (*query.TableRe timestamp = timestamp.Add(1 * time.Minute) } - return &query.TableResponse{ - Columns: []query.Column{ - {Text: "timestamp", Data: timestamps}, - {Text: "series A", Data: seriesA}, - {Text: "series B", Data: seriesB}, - }, - }, nil + return simplejson.TableResponse{Columns: []simplejson.Column{ + {Text: "timestamp", Data: timestamps}, + {Text: "series A", Data: seriesA}, + {Text: "series B", Data: seriesB}, + }}, nil } -func (h *handler) Annotations(_ annotation.Request) ([]annotation.Annotation, error) { - return []annotation.Annotation{{ +func (h *handler) Annotations(_ simplejson.AnnotationRequest) ([]simplejson.Annotation, error) { + return []simplejson.Annotation{{ Time: time.Now().Add(-5 * time.Minute), Title: "foo", Text: "bar", diff --git a/endpoints.go b/endpoints.go index a1b7dbb..7cc5dc7 100644 --- a/endpoints.go +++ b/endpoints.go @@ -2,8 +2,6 @@ package simplejson import ( "encoding/json" - "github.com/clambin/simplejson/v3/annotation" - "github.com/clambin/simplejson/v3/query" "net/http" ) @@ -13,7 +11,7 @@ func (s *Server) Search(w http.ResponseWriter, _ *http.Request) { } func (s *Server) Query(w http.ResponseWriter, req *http.Request) { - var request query.Request + var request QueryRequest handleEndpoint(w, req, &request, func() ([]json.Marshaler, error) { return s.handleQuery(req.Context(), request) }) @@ -28,32 +26,28 @@ func (s *Server) Annotations(w http.ResponseWriter, req *http.Request) { return } - var request annotation.Request - handleEndpoint(w, req, &request, func() (response []json.Marshaler, err error) { - var annotations []annotation.Annotation + var request AnnotationRequest + handleEndpoint(w, req, &request, func() ([]json.Marshaler, error) { + var annotations []Annotation for _, h := range s.Handlers { - if h.Endpoints().Annotations == nil { - continue - } - - var newAnnotations []annotation.Annotation - newAnnotations, err = h.Endpoints().Annotations(request) - - if err == nil { - annotations = append(annotations, newAnnotations...) + if h.Endpoints().Annotations != nil { + if newAnnotations, err := h.Endpoints().Annotations(request); err == nil { + annotations = append(annotations, newAnnotations...) + } } } + var response []json.Marshaler for index := range annotations { annotations[index].Request = request.Annotation response = append(response, &annotations[index]) } - return + return response, nil }) } func (s *Server) TagKeys(w http.ResponseWriter, req *http.Request) { - handleEndpoint(w, req, nil, func() (keys []json.Marshaler, err error) { + handleEndpoint(w, req, nil, func() (keys []json.Marshaler, _ error) { for _, handler := range s.Handlers { if handler.Endpoints().TagKeys != nil { for _, newKey := range handler.Endpoints().TagKeys(req.Context()) { @@ -67,22 +61,23 @@ func (s *Server) TagKeys(w http.ResponseWriter, req *http.Request) { func (s *Server) TagValues(w http.ResponseWriter, req *http.Request) { var key valueKey - handleEndpoint(w, req, &key, func() (response []json.Marshaler, err error) { + handleEndpoint(w, req, &key, func() ([]json.Marshaler, error) { + var response []json.Marshaler for _, handler := range s.Handlers { - if handler.Endpoints().TagValues != nil { - var values []string - values, err = handler.Endpoints().TagValues(req.Context(), key.Key) - if err != nil { - http.Error(w, err.Error(), http.StatusInternalServerError) - return nil, err - } + if handler.Endpoints().TagValues == nil { + continue + } + values, err := handler.Endpoints().TagValues(req.Context(), key.Key) + if err != nil { + http.Error(w, err.Error(), http.StatusInternalServerError) + return nil, err + } - for _, v := range values { - response = append(response, &value{Text: v}) - } + for _, v := range values { + response = append(response, &value{Text: v}) } } - return + return response, nil }) } diff --git a/endpoints_query.go b/endpoints_query.go index d224e24..4dbf18e 100644 --- a/endpoints_query.go +++ b/endpoints_query.go @@ -4,11 +4,10 @@ import ( "context" "encoding/json" "fmt" - "github.com/clambin/simplejson/v3/query" "github.com/prometheus/client_golang/prometheus" ) -func (s *Server) handleQuery(ctx context.Context, request query.Request) ([]json.Marshaler, error) { +func (s *Server) handleQuery(ctx context.Context, request QueryRequest) ([]json.Marshaler, error) { responses := make([]json.Marshaler, 0, len(request.Targets)) for _, target := range request.Targets { timer := prometheus.NewTimer(s.queryMetrics.Duration.WithLabelValues(target.Name, target.Type)) @@ -25,7 +24,11 @@ func (s *Server) handleQuery(ctx context.Context, request query.Request) ([]json return responses, nil } -func (s *Server) handleQueryRequest(ctx context.Context, target query.Target, request query.Request) (query.Response, error) { +type Response interface { + MarshalJSON() ([]byte, error) +} + +func (s *Server) handleQueryRequest(ctx context.Context, target Target, request QueryRequest) (Response, error) { handler, ok := s.Handlers[target.Name] if !ok { return nil, fmt.Errorf("no handler found for target '%s'", target) diff --git a/go.mod b/go.mod index d6bcc06..95666ba 100644 --- a/go.mod +++ b/go.mod @@ -1,12 +1,12 @@ -module github.com/clambin/simplejson/v3 +module github.com/clambin/simplejson/v4 go 1.18 require ( github.com/clambin/httpserver v0.4.0 github.com/grafana/grafana-plugin-sdk-go v0.144.0 + github.com/mailru/easyjson v0.7.7 github.com/prometheus/client_golang v1.14.0 - github.com/sirupsen/logrus v1.9.0 github.com/stretchr/testify v1.8.1 ) @@ -21,6 +21,7 @@ require ( github.com/google/flatbuffers v2.0.0+incompatible // indirect github.com/google/go-cmp v0.5.8 // indirect github.com/gorilla/mux v1.8.0 // indirect + github.com/josharian/intern v1.0.0 // indirect github.com/json-iterator/go v1.1.12 // indirect github.com/klauspost/compress v1.13.1 // indirect github.com/kr/text v0.2.0 // indirect diff --git a/go.sum b/go.sum index b434ef2..176f62d 100644 --- a/go.sum +++ b/go.sum @@ -164,6 +164,8 @@ github.com/grpc-ecosystem/grpc-gateway v1.16.0/go.mod h1:BDjrQk3hbvj6Nolgz8mAMFb github.com/hashicorp/golang-lru v0.5.0/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= github.com/hashicorp/golang-lru v0.5.1/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= github.com/ianlancetaylor/demangle v0.0.0-20181102032728-5e5cf60278f6/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc= +github.com/josharian/intern v1.0.0 h1:vlS4z54oSdjm0bgjRigI+G1HpF+tI+9rE5LLzOg8HmY= +github.com/josharian/intern v1.0.0/go.mod h1:5DoeVV0s6jJacbCEi61lwdGj/aVlrQvzHFFd8Hwg//Y= github.com/jpillora/backoff v1.0.0/go.mod h1:J/6gKK9jxlEcS3zixgDgUAsiuZ7yrSoa/FX5e0EB2j4= github.com/json-iterator/go v1.1.6/go.mod h1:+SdeFBvtyEkXs7REEP0seUULqWtbJapLOCVDaaPEHmU= github.com/json-iterator/go v1.1.10/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= @@ -187,6 +189,8 @@ github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= +github.com/mailru/easyjson v0.7.7 h1:UGYAvKxe3sBsEDzO8ZeWOSlIQfWFlxbzLZe7hwFURr0= +github.com/mailru/easyjson v0.7.7/go.mod h1:xzfreul335JAWq5oZzymOObrkdz5UnU4kGfJJLY9Nlc= github.com/mattetti/filebuffer v1.0.1 h1:gG7pyfnSIZCxdoKq+cPa8T0hhYtD9NxCdI4D7PTjRLM= github.com/mattetti/filebuffer v1.0.1/go.mod h1:YdMURNDOttIiruleeVr6f56OrMc+MydEnTcXwtkxNVs= github.com/mattn/go-runewidth v0.0.9 h1:Lm995f3rfxdpd6TSmuVCHVb/QhupuXlYr8sCI/QdE+0= @@ -248,8 +252,6 @@ github.com/ruudk/golang-pdf417 v0.0.0-20181029194003-1af4ab5afa58/go.mod h1:6lfF github.com/sirupsen/logrus v1.2.0/go.mod h1:LxeOpSwHxABJmUn/MG1IvRgCAasNZTLOkJPxbbu5VWo= github.com/sirupsen/logrus v1.4.2/go.mod h1:tLMulIdttU9McNUspp0xgXVQah82FyeX6MwdIuYE2rE= github.com/sirupsen/logrus v1.6.0/go.mod h1:7uNnSEd1DgxDLC74fIahvMZmmYsHGZGEOFrfsX/uA88= -github.com/sirupsen/logrus v1.9.0 h1:trlNQbNUG3OdDrDil03MCb1H2o9nJ1x4/5LYw7byDE0= -github.com/sirupsen/logrus v1.9.0/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ= github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw= @@ -415,7 +417,6 @@ golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBc golang.org/x/sys v0.0.0-20210630005230-0f9fa26af87c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20211216021012-1d35b9e2eb4e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220114195835-da31bd327af9/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20220715151400-c0bba94af5f8/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.2.0 h1:ljd4t30dBnAvMZaQCevtY0xLLD0A+bRZXbgLMLU1F/A= golang.org/x/sys v0.2.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= diff --git a/data/accumulate.go b/pkg/data/accumulate.go similarity index 100% rename from data/accumulate.go rename to pkg/data/accumulate.go diff --git a/data/accumulate_test.go b/pkg/data/accumulate_test.go similarity index 100% rename from data/accumulate_test.go rename to pkg/data/accumulate_test.go diff --git a/data/filter.go b/pkg/data/filter.go similarity index 50% rename from data/filter.go rename to pkg/data/filter.go index c57d705..6a42c27 100644 --- a/data/filter.go +++ b/pkg/data/filter.go @@ -1,23 +1,23 @@ package data import ( - "github.com/clambin/simplejson/v3/query" + "github.com/clambin/simplejson/v4" "time" ) -// Filter returns a Dataset meeting the provided query Args. Currently, it filters based on the args' time Range. +// Filter returns a Dataset meeting the provided query QueryArgs. Currently, it filters based on the args' time Range. // only the first time column is taken into consideration. -func (t Table) Filter(args query.Args) (filtered *Table) { +func (t Table) Filter(args simplejson.Args) (filtered *Table) { index, found := t.getFirstTimestampColumn() if !found { return &Table{Frame: t.Frame.EmptyCopy()} } f, _ := t.Frame.FilterRowsByField(index, func(i interface{}) (bool, error) { - if !args.Args.Range.From.IsZero() && i.(time.Time).Before(args.Args.Range.From) { + if !args.Range.From.IsZero() && i.(time.Time).Before(args.Range.From) { return false, nil } - if !args.Args.Range.To.IsZero() && i.(time.Time).After(args.Args.Range.To) { + if !args.Range.To.IsZero() && i.(time.Time).After(args.Range.To) { return false, nil } return true, nil diff --git a/data/filter_test.go b/pkg/data/filter_test.go similarity index 53% rename from data/filter_test.go rename to pkg/data/filter_test.go index 4a7f617..03bded6 100644 --- a/data/filter_test.go +++ b/pkg/data/filter_test.go @@ -1,9 +1,8 @@ package data_test import ( - "github.com/clambin/simplejson/v3/common" - "github.com/clambin/simplejson/v3/data" - "github.com/clambin/simplejson/v3/query" + "github.com/clambin/simplejson/v4" + "github.com/clambin/simplejson/v4/pkg/data" grafanaData "github.com/grafana/grafana-plugin-sdk-go/data" "github.com/stretchr/testify/assert" "testing" @@ -13,12 +12,10 @@ import ( func TestTable_FilterByTime(t *testing.T) { d := createTable(10) - output := d.Filter(query.Args{ - Args: common.Args{ - Range: common.Range{ - From: time.Date(2022, 6, 5, 0, 0, 0, 0, time.UTC), - To: time.Date(2022, 6, 7, 0, 0, 0, 0, time.UTC), - }, + output := d.Filter(simplejson.Args{ + Range: simplejson.Range{ + From: time.Date(2022, 6, 5, 0, 0, 0, 0, time.UTC), + To: time.Date(2022, 6, 7, 0, 0, 0, 0, time.UTC), }, }) assert.Equal(t, []time.Time{ @@ -31,12 +28,10 @@ func TestTable_FilterByTime(t *testing.T) { func TestTable_FilterByTime_Empty(t *testing.T) { table := data.Table{Frame: grafanaData.NewFrame("bad")} - f := table.Filter(query.Args{ - Args: common.Args{ - Range: common.Range{ - From: time.Date(2022, 6, 5, 0, 0, 0, 0, time.UTC), - To: time.Date(2022, 6, 7, 0, 0, 0, 0, time.UTC), - }, + f := table.Filter(simplejson.Args{ + Range: simplejson.Range{ + From: time.Date(2022, 6, 5, 0, 0, 0, 0, time.UTC), + To: time.Date(2022, 6, 7, 0, 0, 0, 0, time.UTC), }, }) assert.NotNil(t, f.Frame) diff --git a/pkg/data/response.go b/pkg/data/response.go new file mode 100644 index 0000000..51d38a1 --- /dev/null +++ b/pkg/data/response.go @@ -0,0 +1,41 @@ +package data + +import ( + "github.com/clambin/simplejson/v4" + "github.com/grafana/grafana-plugin-sdk-go/data" + "time" +) + +// CreateTableResponse creates a simplejson TableResponse from a Dataset +func (t Table) CreateTableResponse() *simplejson.TableResponse { + columns := make([]simplejson.Column, len(t.Frame.Fields)) + + for i, f := range t.Frame.Fields { + columns[i] = makeColumn(f) + } + + return &simplejson.TableResponse{Columns: columns} +} + +func makeColumn(f *data.Field) (column simplejson.Column) { + name := f.Name + if name == "" { + name = "(unknown)" + } + + var values interface{} + if f.Len() > 0 { + switch f.At(0).(type) { + case time.Time: + values = simplejson.TimeColumn(getFieldValues[time.Time](f)) + case string: + values = simplejson.StringColumn(getFieldValues[string](f)) + case float64: + values = simplejson.NumberColumn(getFieldValues[float64](f)) + } + } + return simplejson.Column{ + Text: name, + Data: values, + } +} diff --git a/data/response_test.go b/pkg/data/response_test.go similarity index 100% rename from data/response_test.go rename to pkg/data/response_test.go diff --git a/data/table.go b/pkg/data/table.go similarity index 98% rename from data/table.go rename to pkg/data/table.go index b670dfa..5291d06 100644 --- a/data/table.go +++ b/pkg/data/table.go @@ -1,7 +1,7 @@ package data import ( - "github.com/clambin/simplejson/v3/pkg/set" + "github.com/clambin/simplejson/v4/pkg/set" "github.com/grafana/grafana-plugin-sdk-go/data" "time" ) diff --git a/data/table_test.go b/pkg/data/table_test.go similarity index 99% rename from data/table_test.go rename to pkg/data/table_test.go index bafe47f..1ff26fd 100644 --- a/data/table_test.go +++ b/pkg/data/table_test.go @@ -1,7 +1,7 @@ package data_test import ( - "github.com/clambin/simplejson/v3/data" + "github.com/clambin/simplejson/v4/pkg/data" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" "strconv" diff --git a/data/testdata/TestTable_CreateTableResponse.golden b/pkg/data/testdata/TestTable_CreateTableResponse.golden similarity index 100% rename from data/testdata/TestTable_CreateTableResponse.golden rename to pkg/data/testdata/TestTable_CreateTableResponse.golden diff --git a/pkg/set/set_test.go b/pkg/set/set_test.go index dcbcd76..169c33b 100644 --- a/pkg/set/set_test.go +++ b/pkg/set/set_test.go @@ -1,7 +1,7 @@ package set_test import ( - "github.com/clambin/simplejson/v3/pkg/set" + "github.com/clambin/simplejson/v4/pkg/set" "github.com/stretchr/testify/assert" "testing" ) diff --git a/query/response.go b/query.go similarity index 54% rename from query/response.go rename to query.go index f8c934c..16edca1 100644 --- a/query/response.go +++ b/query.go @@ -1,43 +1,76 @@ -package query +package simplejson import ( "encoding/json" "errors" + "github.com/mailru/easyjson" + "strconv" "time" ) -// Response interface for timeseries and table responses -type Response interface { - MarshalJSON() ([]byte, error) +// QueryRequest is a Query request. For each specified Target, the server will call the appropriate handler's Query or TableQuery +// function with the provided QueryArgs. +// +//easyjson:skip +type QueryRequest struct { + Targets []Target `json:"targets"` + QueryArgs } -// TimeSeriesResponse is the response from a timeseries Query. -type TimeSeriesResponse struct { - Target string - DataPoints []DataPoint +// Target specifies the requested target name and type. +// +//easyjson:skip +type Target struct { + Name string `json:"target"` // name of the target. + Type string `json:"type"` // "timeserie" or "" for timeseries. "table" for table queries. } -// MarshalJSON converts a TimeSeriesResponse to JSON. -func (t TimeSeriesResponse) MarshalJSON() (output []byte, err error) { - return json.Marshal(struct { - Target string `json:"target"` // name of the target - DataPoints []DataPoint `json:"datapoints"` // values for the target - }{Target: t.Target, DataPoints: t.DataPoints}) +// QueryArgs contains the arguments for a Query. +// +//easyjson:skip +type QueryArgs struct { + Args + MaxDataPoints uint64 `json:"maxDataPoints"` +} + +// UnmarshalJSON unmarshalls a QueryRequest from JSON +func (r *QueryRequest) UnmarshalJSON(b []byte) (err error) { + // workaround to avoid infinite loop + type Request2 QueryRequest + var c Request2 + err = json.Unmarshal(b, &c) + if err == nil { + *r = QueryRequest(c) + } + return err +} + +// TimeSeriesResponse is the response from a timeseries Query. +type TimeSeriesResponse struct { + Target string `json:"target"` + DataPoints []DataPoint `json:"datapoints"` } // DataPoint contains one entry returned by a Query. +// +//easyjson:skip type DataPoint struct { Timestamp time.Time - Value int64 + Value float64 } // MarshalJSON converts a DataPoint to JSON. func (d DataPoint) MarshalJSON() ([]byte, error) { - out := []int64{d.Value, d.Timestamp.UnixMilli()} - return json.Marshal(out) + return []byte(`[` + + strconv.FormatFloat(d.Value, 'f', -1, 64) + `,` + + strconv.FormatInt(d.Timestamp.UnixMilli(), 10) + + `]`), + nil } // TableResponse is returned by a TableQuery, i.e. a slice of Column structures. +// +//easyjson:skip type TableResponse struct { Columns []Column } @@ -77,15 +110,15 @@ func (t TableResponse) MarshalJSON() (output []byte, err error) { var colTypes []string var rowCount int - if colTypes, rowCount, err = t.getColumnDetails(); err != nil { - return + if colTypes, rowCount, err = t.getColumnDetails(); err == nil { + output, err = easyjson.Marshal(tableResponse{ + Type: "table", + Columns: t.buildColumns(colTypes), + Rows: t.buildRows(rowCount), + }) } - return json.Marshal(tableResponse{ - Type: "table", - Columns: t.buildColumns(colTypes), - Rows: t.buildRows(rowCount), - }) + return output, err } func (t TableResponse) getColumnDetails() (colTypes []string, rowCount int, err error) { @@ -108,27 +141,27 @@ func (t TableResponse) getColumnDetails() (colTypes []string, rowCount int, err } if dataCount != rowCount { - err = errors.New("error building table query output: all columns must have the same number of rows") - break + return colTypes, rowCount, errors.New("error building table query output: all columns must have the same number of rows") } } return } -func (t TableResponse) buildColumns(colTypes []string) (columns []tableResponseColumn) { - for index, entry := range colTypes { - columns = append(columns, tableResponseColumn{ +func (t TableResponse) buildColumns(colTypes []string) []tableResponseColumn { + columns := make([]tableResponseColumn, len(colTypes)) + for index, colType := range colTypes { + columns[index] = tableResponseColumn{ Text: t.Columns[index].Text, - Type: entry, - }) + Type: colType, + } } - return + return columns } -func (t TableResponse) buildRows(rowCount int) (rows []tableResponseRow) { +func (t TableResponse) buildRows(rowCount int) []tableResponseRow { + rows := make([]tableResponseRow, rowCount) for row := 0; row < rowCount; row++ { newRow := make(tableResponseRow, len(t.Columns)) - for column, entry := range t.Columns { switch data := entry.Data.(type) { case TimeColumn: @@ -139,9 +172,7 @@ func (t TableResponse) buildRows(rowCount int) (rows []tableResponseRow) { newRow[column] = data[row] } } - - rows = append(rows, newRow) - + rows[row] = newRow } - return + return rows } diff --git a/query/request.go b/query/request.go deleted file mode 100644 index 8fa79e7..0000000 --- a/query/request.go +++ /dev/null @@ -1,58 +0,0 @@ -package query - -import ( - "encoding/json" - "github.com/clambin/simplejson/v3/common" -) - -// Request is a Query request. For each specified Target, the server will call the appropriate handler's Query or TableQuery -// function with the provided Args. -type Request struct { - Targets []Target `json:"targets"` - Args -} - -// Target specifies the requested target name and type. -type Target struct { - Name string `json:"target"` // name of the target. - Type string `json:"type"` // "timeserie" or "" for timeseries. "table" for table queries. -} - -// Args contains the arguments for a Query. -type Args struct { - common.Args - // Interval QueryRequestDuration `json:"interval"` - MaxDataPoints uint64 `json:"maxDataPoints"` -} - -// UnmarshalJSON unmarshalls a Request from JSON -func (r *Request) UnmarshalJSON(b []byte) (err error) { - // workaround to avoid infinite loop - type Request2 Request - var c Request2 - err = json.Unmarshal(b, &c) - if err == nil { - *r = Request(c) - } - return err -} - -// type QueryRequestDuration time.Duration - -/* TODO: intervals can go to "1y", which time.ParseDuration doesn't handle -func (d *QueryRequestDuration) MarshalJSON() ([]byte, error) { - out := time.Duration(*d).String() - return json.Marshal(out) -} - - -func (d *QueryRequestDuration) UnmarshalJSON(input []byte) (err error) { - in := "" - if err = json.Unmarshal(input, &in); err == nil { - var value time.Duration - value, err = time.ParseDuration(in) - *d = QueryRequestDuration(value) - } - return -} -*/ diff --git a/query/request_test.go b/query/request_test.go deleted file mode 100644 index f00715e..0000000 --- a/query/request_test.go +++ /dev/null @@ -1,40 +0,0 @@ -package query_test - -import ( - "encoding/json" - "github.com/clambin/simplejson/v3/query" - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" - "testing" - "time" -) - -func TestRequests(t *testing.T) { - input := `{ - "maxDataPoints": 100, - "interval": "1h", - "range": { - "from": "2020-01-01T00:00:00.000Z", - "to": "2020-12-31T00:00:00.000Z" - }, - "targets": [ - { "target": "A", "type": "dataserie" }, - { "target": "B", "type": "table" } - ] -}` - - var output query.Request - - err := json.Unmarshal([]byte(input), &output) - require.NoError(t, err) - assert.Equal(t, uint64(100), output.MaxDataPoints) - // assert.Equal(t, server.QueryRequestDuration(1*time.Hour), output.Interval) - // assert.Equal(t, 1*time.Hour, time.Duration(output.Interval)) - assert.Equal(t, time.Date(2020, 1, 1, 0, 0, 0, 0, time.UTC), output.Range.From) - assert.Equal(t, time.Date(2020, 12, 31, 0, 0, 0, 0, time.UTC), output.Range.To) - require.Len(t, output.Targets, 2) - assert.Equal(t, "A", output.Targets[0].Name) - assert.Equal(t, "dataserie", output.Targets[0].Type) - assert.Equal(t, "B", output.Targets[1].Name) - assert.Equal(t, "table", output.Targets[1].Type) -} diff --git a/query/response_test.go b/query/response_test.go deleted file mode 100644 index c867440..0000000 --- a/query/response_test.go +++ /dev/null @@ -1,145 +0,0 @@ -package query_test - -import ( - "bufio" - "bytes" - "encoding/json" - "flag" - "github.com/clambin/simplejson/v3/query" - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" - "os" - "path/filepath" - "testing" - "time" -) - -var update = flag.Bool("update", false, "update .golden files") - -func TestWriteResponseDataSeries(t *testing.T) { - r := query.TimeSeriesResponse{ - Target: "A", - DataPoints: []query.DataPoint{ - {Value: 100, Timestamp: time.Date(2020, 1, 1, 0, 0, 0, 0, time.UTC)}, - {Value: 101, Timestamp: time.Date(2020, 1, 1, 1, 0, 0, 0, time.UTC)}, - {Value: 102, Timestamp: time.Date(2020, 1, 1, 2, 0, 0, 0, time.UTC)}, - }, - } - - var b bytes.Buffer - w := bufio.NewWriter(&b) - err := json.NewEncoder(w).Encode(r) - require.NoError(t, err) - _ = w.Flush() - - gp := filepath.Join("testdata", t.Name()+".golden") - if *update { - t.Logf("updating golden file for %s", t.Name()) - err = os.WriteFile(gp, b.Bytes(), 0644) - require.NoError(t, err, "failed to update golden file") - } - - var golden []byte - golden, err = os.ReadFile(gp) - require.NoError(t, err) - - assert.Equal(t, string(golden), b.String()) -} - -func TestWriteResponseTable(t *testing.T) { - testDate := time.Date(2020, 1, 1, 0, 0, 0, 0, time.UTC) - - r := query.TableResponse{ - Columns: []query.Column{ - {Text: "Time", Data: query.TimeColumn{testDate, testDate}}, - {Text: "Label", Data: query.StringColumn{"foo", "bar"}}, - {Text: "Series A", Data: query.NumberColumn{42, 43}}, - {Text: "Series B", Data: query.NumberColumn{64.5, 100.0}}, - }, - } - - var b bytes.Buffer - w := bufio.NewWriter(&b) - err := json.NewEncoder(w).Encode(r) - require.NoError(t, err) - _ = w.Flush() - - gp := filepath.Join("testdata", t.Name()+".golden") - if *update { - t.Logf("updating golden file for %s", t.Name()) - err = os.WriteFile(gp, b.Bytes(), 0644) - require.NoError(t, err, "failed to update golden file") - } - - var golden []byte - golden, err = os.ReadFile(gp) - require.NoError(t, err) - - assert.Equal(t, string(golden), b.String()) - -} - -func TestWriteBadResponseTable(t *testing.T) { - testDate := time.Date(2020, 1, 1, 0, 0, 0, 0, time.UTC) - - in := query.TableResponse{ - Columns: []query.Column{ - {Text: "Time", Data: query.TimeColumn{testDate, testDate}}, - {Text: "Label", Data: query.StringColumn{"foo"}}, - {Text: "Series A", Data: query.NumberColumn{42, 43}}, - {Text: "Series B", Data: query.NumberColumn{64.5, 100.0, 105.0}}, - }, - } - - _, err := json.Marshal(in) - assert.Error(t, err) -} - -func TestWriteCombinedResponse(t *testing.T) { - testDate := time.Date(2020, 1, 1, 0, 0, 0, 0, time.UTC) - - dataseries := []query.TimeSeriesResponse{{ - Target: "A", - DataPoints: []query.DataPoint{ - {Value: 100, Timestamp: time.Date(2020, 1, 1, 0, 0, 0, 0, time.UTC)}, - {Value: 101, Timestamp: time.Date(2020, 1, 1, 1, 0, 0, 0, time.UTC)}, - {Value: 102, Timestamp: time.Date(2020, 1, 1, 2, 0, 0, 0, time.UTC)}, - }, - }} - - tables := []query.TableResponse{{ - Columns: []query.Column{ - {Text: "Time", Data: query.TimeColumn{testDate, testDate}}, - {Text: "Label", Data: query.StringColumn{"foo", "bar"}}, - {Text: "Series A", Data: query.NumberColumn{42, 43}}, - {Text: "Series B", Data: query.NumberColumn{64.5, 100.0}}, - }, - }} - - packaged := make([]interface{}, 0) - for _, dataserie := range dataseries { - packaged = append(packaged, dataserie) - } - for _, table := range tables { - packaged = append(packaged, table) - } - - var b bytes.Buffer - w := bufio.NewWriter(&b) - err := json.NewEncoder(w).Encode(packaged) - require.NoError(t, err) - _ = w.Flush() - - gp := filepath.Join("testdata", t.Name()+".golden") - if *update { - t.Logf("updating golden file for %s", t.Name()) - err = os.WriteFile(gp, b.Bytes(), 0644) - require.NoError(t, err, "failed to update golden file") - } - - var golden []byte - golden, err = os.ReadFile(gp) - require.NoError(t, err) - - assert.Equal(t, string(golden), b.String()) -} diff --git a/query_easyjson.go b/query_easyjson.go new file mode 100644 index 0000000..82d8fc9 --- /dev/null +++ b/query_easyjson.go @@ -0,0 +1,521 @@ +// Code generated by easyjson for marshaling/unmarshaling. DO NOT EDIT. + +package simplejson + +import ( + json "encoding/json" + easyjson "github.com/mailru/easyjson" + jlexer "github.com/mailru/easyjson/jlexer" + jwriter "github.com/mailru/easyjson/jwriter" +) + +// suppress unused package warning +var ( + _ *json.RawMessage + _ *jlexer.Lexer + _ *jwriter.Writer + _ easyjson.Marshaler +) + +func easyjson90b16446DecodeGithubComClambinSimplejsonV3(in *jlexer.Lexer, out *tableResponseColumn) { + isTopLevel := in.IsStart() + if in.IsNull() { + if isTopLevel { + in.Consumed() + } + in.Skip() + return + } + in.Delim('{') + for !in.IsDelim('}') { + key := in.UnsafeFieldName(false) + in.WantColon() + if in.IsNull() { + in.Skip() + in.WantComma() + continue + } + switch key { + case "text": + out.Text = string(in.String()) + case "type": + out.Type = string(in.String()) + default: + in.SkipRecursive() + } + in.WantComma() + } + in.Delim('}') + if isTopLevel { + in.Consumed() + } +} +func easyjson90b16446EncodeGithubComClambinSimplejsonV3(out *jwriter.Writer, in tableResponseColumn) { + out.RawByte('{') + first := true + _ = first + { + const prefix string = ",\"text\":" + out.RawString(prefix[1:]) + out.String(string(in.Text)) + } + { + const prefix string = ",\"type\":" + out.RawString(prefix) + out.String(string(in.Type)) + } + out.RawByte('}') +} + +// MarshalJSON supports json.Marshaler interface +func (v tableResponseColumn) MarshalJSON() ([]byte, error) { + w := jwriter.Writer{} + easyjson90b16446EncodeGithubComClambinSimplejsonV3(&w, v) + return w.Buffer.BuildBytes(), w.Error +} + +// MarshalEasyJSON supports easyjson.Marshaler interface +func (v tableResponseColumn) MarshalEasyJSON(w *jwriter.Writer) { + easyjson90b16446EncodeGithubComClambinSimplejsonV3(w, v) +} + +// UnmarshalJSON supports json.Unmarshaler interface +func (v *tableResponseColumn) UnmarshalJSON(data []byte) error { + r := jlexer.Lexer{Data: data} + easyjson90b16446DecodeGithubComClambinSimplejsonV3(&r, v) + return r.Error() +} + +// UnmarshalEasyJSON supports easyjson.Unmarshaler interface +func (v *tableResponseColumn) UnmarshalEasyJSON(l *jlexer.Lexer) { + easyjson90b16446DecodeGithubComClambinSimplejsonV3(l, v) +} +func easyjson90b16446DecodeGithubComClambinSimplejsonV31(in *jlexer.Lexer, out *tableResponse) { + isTopLevel := in.IsStart() + if in.IsNull() { + if isTopLevel { + in.Consumed() + } + in.Skip() + return + } + in.Delim('{') + for !in.IsDelim('}') { + key := in.UnsafeFieldName(false) + in.WantColon() + if in.IsNull() { + in.Skip() + in.WantComma() + continue + } + switch key { + case "type": + out.Type = string(in.String()) + case "columns": + if in.IsNull() { + in.Skip() + out.Columns = nil + } else { + in.Delim('[') + if out.Columns == nil { + if !in.IsDelim(']') { + out.Columns = make([]tableResponseColumn, 0, 2) + } else { + out.Columns = []tableResponseColumn{} + } + } else { + out.Columns = (out.Columns)[:0] + } + for !in.IsDelim(']') { + var v1 tableResponseColumn + (v1).UnmarshalEasyJSON(in) + out.Columns = append(out.Columns, v1) + in.WantComma() + } + in.Delim(']') + } + case "rows": + if in.IsNull() { + in.Skip() + out.Rows = nil + } else { + in.Delim('[') + if out.Rows == nil { + if !in.IsDelim(']') { + out.Rows = make([]tableResponseRow, 0, 2) + } else { + out.Rows = []tableResponseRow{} + } + } else { + out.Rows = (out.Rows)[:0] + } + for !in.IsDelim(']') { + var v2 tableResponseRow + if in.IsNull() { + in.Skip() + v2 = nil + } else { + in.Delim('[') + if v2 == nil { + if !in.IsDelim(']') { + v2 = make(tableResponseRow, 0, 4) + } else { + v2 = tableResponseRow{} + } + } else { + v2 = (v2)[:0] + } + for !in.IsDelim(']') { + var v3 interface{} + if m, ok := v3.(easyjson.Unmarshaler); ok { + m.UnmarshalEasyJSON(in) + } else if m, ok := v3.(json.Unmarshaler); ok { + _ = m.UnmarshalJSON(in.Raw()) + } else { + v3 = in.Interface() + } + v2 = append(v2, v3) + in.WantComma() + } + in.Delim(']') + } + out.Rows = append(out.Rows, v2) + in.WantComma() + } + in.Delim(']') + } + default: + in.SkipRecursive() + } + in.WantComma() + } + in.Delim('}') + if isTopLevel { + in.Consumed() + } +} +func easyjson90b16446EncodeGithubComClambinSimplejsonV31(out *jwriter.Writer, in tableResponse) { + out.RawByte('{') + first := true + _ = first + { + const prefix string = ",\"type\":" + out.RawString(prefix[1:]) + out.String(string(in.Type)) + } + { + const prefix string = ",\"columns\":" + out.RawString(prefix) + if in.Columns == nil && (out.Flags&jwriter.NilSliceAsEmpty) == 0 { + out.RawString("null") + } else { + out.RawByte('[') + for v4, v5 := range in.Columns { + if v4 > 0 { + out.RawByte(',') + } + (v5).MarshalEasyJSON(out) + } + out.RawByte(']') + } + } + { + const prefix string = ",\"rows\":" + out.RawString(prefix) + if in.Rows == nil && (out.Flags&jwriter.NilSliceAsEmpty) == 0 { + out.RawString("null") + } else { + out.RawByte('[') + for v6, v7 := range in.Rows { + if v6 > 0 { + out.RawByte(',') + } + if v7 == nil && (out.Flags&jwriter.NilSliceAsEmpty) == 0 { + out.RawString("null") + } else { + out.RawByte('[') + for v8, v9 := range v7 { + if v8 > 0 { + out.RawByte(',') + } + if m, ok := v9.(easyjson.Marshaler); ok { + m.MarshalEasyJSON(out) + } else if m, ok := v9.(json.Marshaler); ok { + out.Raw(m.MarshalJSON()) + } else { + out.Raw(json.Marshal(v9)) + } + } + out.RawByte(']') + } + } + out.RawByte(']') + } + } + out.RawByte('}') +} + +// MarshalJSON supports json.Marshaler interface +func (v tableResponse) MarshalJSON() ([]byte, error) { + w := jwriter.Writer{} + easyjson90b16446EncodeGithubComClambinSimplejsonV31(&w, v) + return w.Buffer.BuildBytes(), w.Error +} + +// MarshalEasyJSON supports easyjson.Marshaler interface +func (v tableResponse) MarshalEasyJSON(w *jwriter.Writer) { + easyjson90b16446EncodeGithubComClambinSimplejsonV31(w, v) +} + +// UnmarshalJSON supports json.Unmarshaler interface +func (v *tableResponse) UnmarshalJSON(data []byte) error { + r := jlexer.Lexer{Data: data} + easyjson90b16446DecodeGithubComClambinSimplejsonV31(&r, v) + return r.Error() +} + +// UnmarshalEasyJSON supports easyjson.Unmarshaler interface +func (v *tableResponse) UnmarshalEasyJSON(l *jlexer.Lexer) { + easyjson90b16446DecodeGithubComClambinSimplejsonV31(l, v) +} +func easyjson90b16446DecodeGithubComClambinSimplejsonV32(in *jlexer.Lexer, out *TimeSeriesResponse) { + isTopLevel := in.IsStart() + if in.IsNull() { + if isTopLevel { + in.Consumed() + } + in.Skip() + return + } + in.Delim('{') + for !in.IsDelim('}') { + key := in.UnsafeFieldName(false) + in.WantColon() + if in.IsNull() { + in.Skip() + in.WantComma() + continue + } + switch key { + case "target": + out.Target = string(in.String()) + case "datapoints": + if in.IsNull() { + in.Skip() + out.DataPoints = nil + } else { + in.Delim('[') + if out.DataPoints == nil { + if !in.IsDelim(']') { + out.DataPoints = make([]DataPoint, 0, 2) + } else { + out.DataPoints = []DataPoint{} + } + } else { + out.DataPoints = (out.DataPoints)[:0] + } + for !in.IsDelim(']') { + var v10 DataPoint + easyjson90b16446DecodeGithubComClambinSimplejsonV33(in, &v10) + out.DataPoints = append(out.DataPoints, v10) + in.WantComma() + } + in.Delim(']') + } + default: + in.SkipRecursive() + } + in.WantComma() + } + in.Delim('}') + if isTopLevel { + in.Consumed() + } +} +func easyjson90b16446EncodeGithubComClambinSimplejsonV32(out *jwriter.Writer, in TimeSeriesResponse) { + out.RawByte('{') + first := true + _ = first + { + const prefix string = ",\"target\":" + out.RawString(prefix[1:]) + out.String(string(in.Target)) + } + { + const prefix string = ",\"datapoints\":" + out.RawString(prefix) + if in.DataPoints == nil && (out.Flags&jwriter.NilSliceAsEmpty) == 0 { + out.RawString("null") + } else { + out.RawByte('[') + for v11, v12 := range in.DataPoints { + if v11 > 0 { + out.RawByte(',') + } + out.Raw((v12).MarshalJSON()) + } + out.RawByte(']') + } + } + out.RawByte('}') +} + +// MarshalJSON supports json.Marshaler interface +func (v TimeSeriesResponse) MarshalJSON() ([]byte, error) { + w := jwriter.Writer{} + easyjson90b16446EncodeGithubComClambinSimplejsonV32(&w, v) + return w.Buffer.BuildBytes(), w.Error +} + +// MarshalEasyJSON supports easyjson.Marshaler interface +func (v TimeSeriesResponse) MarshalEasyJSON(w *jwriter.Writer) { + easyjson90b16446EncodeGithubComClambinSimplejsonV32(w, v) +} + +// UnmarshalJSON supports json.Unmarshaler interface +func (v *TimeSeriesResponse) UnmarshalJSON(data []byte) error { + r := jlexer.Lexer{Data: data} + easyjson90b16446DecodeGithubComClambinSimplejsonV32(&r, v) + return r.Error() +} + +// UnmarshalEasyJSON supports easyjson.Unmarshaler interface +func (v *TimeSeriesResponse) UnmarshalEasyJSON(l *jlexer.Lexer) { + easyjson90b16446DecodeGithubComClambinSimplejsonV32(l, v) +} +func easyjson90b16446DecodeGithubComClambinSimplejsonV33(in *jlexer.Lexer, out *DataPoint) { + isTopLevel := in.IsStart() + if in.IsNull() { + if isTopLevel { + in.Consumed() + } + in.Skip() + return + } + in.Delim('{') + for !in.IsDelim('}') { + key := in.UnsafeFieldName(false) + in.WantColon() + if in.IsNull() { + in.Skip() + in.WantComma() + continue + } + switch key { + case "Timestamp": + if data := in.Raw(); in.Ok() { + in.AddError((out.Timestamp).UnmarshalJSON(data)) + } + case "Value": + out.Value = float64(in.Float64()) + default: + in.SkipRecursive() + } + in.WantComma() + } + in.Delim('}') + if isTopLevel { + in.Consumed() + } +} +func easyjson90b16446EncodeGithubComClambinSimplejsonV33(out *jwriter.Writer, in DataPoint) { + out.RawByte('{') + first := true + _ = first + { + const prefix string = ",\"Timestamp\":" + out.RawString(prefix[1:]) + out.Raw((in.Timestamp).MarshalJSON()) + } + { + const prefix string = ",\"Value\":" + out.RawString(prefix) + out.Float64(float64(in.Value)) + } + out.RawByte('}') +} +func easyjson90b16446DecodeGithubComClambinSimplejsonV34(in *jlexer.Lexer, out *Column) { + isTopLevel := in.IsStart() + if in.IsNull() { + if isTopLevel { + in.Consumed() + } + in.Skip() + return + } + in.Delim('{') + for !in.IsDelim('}') { + key := in.UnsafeFieldName(false) + in.WantColon() + if in.IsNull() { + in.Skip() + in.WantComma() + continue + } + switch key { + case "Text": + out.Text = string(in.String()) + case "Data": + if m, ok := out.Data.(easyjson.Unmarshaler); ok { + m.UnmarshalEasyJSON(in) + } else if m, ok := out.Data.(json.Unmarshaler); ok { + _ = m.UnmarshalJSON(in.Raw()) + } else { + out.Data = in.Interface() + } + default: + in.SkipRecursive() + } + in.WantComma() + } + in.Delim('}') + if isTopLevel { + in.Consumed() + } +} +func easyjson90b16446EncodeGithubComClambinSimplejsonV34(out *jwriter.Writer, in Column) { + out.RawByte('{') + first := true + _ = first + { + const prefix string = ",\"Text\":" + out.RawString(prefix[1:]) + out.String(string(in.Text)) + } + { + const prefix string = ",\"Data\":" + out.RawString(prefix) + if m, ok := in.Data.(easyjson.Marshaler); ok { + m.MarshalEasyJSON(out) + } else if m, ok := in.Data.(json.Marshaler); ok { + out.Raw(m.MarshalJSON()) + } else { + out.Raw(json.Marshal(in.Data)) + } + } + out.RawByte('}') +} + +// MarshalJSON supports json.Marshaler interface +func (v Column) MarshalJSON() ([]byte, error) { + w := jwriter.Writer{} + easyjson90b16446EncodeGithubComClambinSimplejsonV34(&w, v) + return w.Buffer.BuildBytes(), w.Error +} + +// MarshalEasyJSON supports easyjson.Marshaler interface +func (v Column) MarshalEasyJSON(w *jwriter.Writer) { + easyjson90b16446EncodeGithubComClambinSimplejsonV34(w, v) +} + +// UnmarshalJSON supports json.Unmarshaler interface +func (v *Column) UnmarshalJSON(data []byte) error { + r := jlexer.Lexer{Data: data} + easyjson90b16446DecodeGithubComClambinSimplejsonV34(&r, v) + return r.Error() +} + +// UnmarshalEasyJSON supports easyjson.Unmarshaler interface +func (v *Column) UnmarshalEasyJSON(l *jlexer.Lexer) { + easyjson90b16446DecodeGithubComClambinSimplejsonV34(l, v) +} diff --git a/query_test.go b/query_test.go new file mode 100644 index 0000000..135b855 --- /dev/null +++ b/query_test.go @@ -0,0 +1,215 @@ +package simplejson + +import ( + "bufio" + "bytes" + "encoding/json" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "os" + "path/filepath" + "strings" + "testing" + "time" +) + +func TestRequests(t *testing.T) { + input := `{ + "maxDataPoints": 100, + "interval": "1h", + "range": { + "from": "2020-01-01T00:00:00.000Z", + "to": "2020-12-31T00:00:00.000Z" + }, + "targets": [ + { "target": "A", "type": "dataserie" }, + { "target": "B", "type": "table" } + ] +}` + + var output QueryRequest + + err := json.Unmarshal([]byte(input), &output) + require.NoError(t, err) + assert.Equal(t, uint64(100), output.MaxDataPoints) + // assert.Equal(t, server.QueryRequestDuration(1*time.Hour), output.Interval) + // assert.Equal(t, 1*time.Hour, time.Duration(output.Interval)) + assert.Equal(t, time.Date(2020, 1, 1, 0, 0, 0, 0, time.UTC), output.Range.From) + assert.Equal(t, time.Date(2020, 12, 31, 0, 0, 0, 0, time.UTC), output.Range.To) + require.Len(t, output.Targets, 2) + assert.Equal(t, "A", output.Targets[0].Name) + assert.Equal(t, "dataserie", output.Targets[0].Type) + assert.Equal(t, "B", output.Targets[1].Name) + assert.Equal(t, "table", output.Targets[1].Type) +} + +func TestResponse(t *testing.T) { + tests := []struct { + name string + pass bool + response Response + }{ + { + name: "timeseries", + pass: true, + response: TimeSeriesResponse{ + Target: "A", + DataPoints: []DataPoint{ + {Value: 100, Timestamp: time.Date(2020, 1, 1, 0, 0, 0, 0, time.UTC)}, + {Value: 101, Timestamp: time.Date(2020, 1, 1, 1, 0, 0, 0, time.UTC)}, + {Value: 102, Timestamp: time.Date(2020, 1, 1, 2, 0, 0, 0, time.UTC)}, + }, + }, + }, + { + name: "table", + pass: true, + response: TableResponse{ + Columns: []Column{ + {Text: "Time", Data: TimeColumn{time.Date(2020, 1, 1, 0, 0, 0, 0, time.UTC), time.Date(2020, 1, 1, 0, 1, 0, 0, time.UTC)}}, + {Text: "Label", Data: StringColumn{"foo", "bar"}}, + {Text: "Series A", Data: NumberColumn{42, 43}}, + {Text: "Series B", Data: NumberColumn{64.5, 100.0}}, + }, + }, + }, + { + name: "combined", + pass: true, + response: makeCombinedQueryResponse(), + }, + { + name: "invalid", + pass: false, + response: TableResponse{ + Columns: []Column{ + {Text: "Time", Data: TimeColumn{time.Date(2020, 1, 1, 0, 0, 0, 0, time.UTC), time.Date(2020, 1, 1, 0, 1, 0, 0, time.UTC)}}, + {Text: "Label", Data: StringColumn{"foo"}}, + {Text: "Series A", Data: NumberColumn{42, 43}}, + {Text: "Series B", Data: NumberColumn{64.5, 100.0, 105.0}}, + }, + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + var b bytes.Buffer + w := bufio.NewWriter(&b) + enc := json.NewEncoder(w) + enc.SetIndent("", " ") + err := enc.Encode(tt.response) + + if !tt.pass { + assert.Error(t, err) + return + } + + require.NoError(t, err) + _ = w.Flush() + + gp := filepath.Join("testdata", strings.ToLower(t.Name())+".golden") + if *update { + t.Logf("updating golden file for %s", t.Name()) + err = os.WriteFile(gp, b.Bytes(), 0644) + require.NoError(t, err, "failed to update golden file") + } + + var golden []byte + golden, err = os.ReadFile(gp) + require.NoError(t, err) + + assert.Equal(t, string(golden), b.String()) + + }) + } +} + +type combinedResponse struct { + responses []interface{} +} + +func (r combinedResponse) MarshalJSON() ([]byte, error) { + return json.Marshal(r.responses) +} + +func makeCombinedQueryResponse() combinedResponse { + testDate := time.Date(2020, 1, 1, 0, 0, 0, 0, time.UTC) + + dataseries := []TimeSeriesResponse{{ + Target: "A", + DataPoints: []DataPoint{ + {Value: 100, Timestamp: time.Date(2020, 1, 1, 0, 0, 0, 0, time.UTC)}, + {Value: 101, Timestamp: time.Date(2020, 1, 1, 1, 0, 0, 0, time.UTC)}, + {Value: 102, Timestamp: time.Date(2020, 1, 1, 2, 0, 0, 0, time.UTC)}, + }, + }} + + tables := []TableResponse{{ + Columns: []Column{ + {Text: "Time", Data: TimeColumn{testDate, testDate}}, + {Text: "Label", Data: StringColumn{"foo", "bar"}}, + {Text: "Series A", Data: NumberColumn{42, 43}}, + {Text: "Series B", Data: NumberColumn{64.5, 100.0}}, + }, + }} + + var r combinedResponse + //r.responses = make([]interface{}, 0) + for _, dataserie := range dataseries { + r.responses = append(r.responses, dataserie) + } + for _, table := range tables { + r.responses = append(r.responses, table) + } + + return r +} + +func BenchmarkTimeSeriesResponse_MarshalJSON(b *testing.B) { + response := buildTimeSeriesResponse(1000) + b.ResetTimer() + for i := 0; i < b.N; i++ { + if _, err := response.MarshalJSON(); err != nil { + b.Fatal(err) + } + } +} + +func buildTimeSeriesResponse(count int) TimeSeriesResponse { + var datapoints []DataPoint + timestamp := time.Date(2022, time.November, 27, 0, 0, 0, 0, time.UTC) + for i := 0; i < count; i++ { + datapoints = append(datapoints, DataPoint{ + Timestamp: timestamp, + Value: float64(i), + }) + } + return TimeSeriesResponse{Target: "foo", DataPoints: datapoints} +} + +func BenchmarkTableResponse_MarshalJSON(b *testing.B) { + response := buildTableResponse(1000) + b.ResetTimer() + for i := 0; i < b.N; i++ { + if _, err := response.MarshalJSON(); err != nil { + b.Fatal(err) + } + } +} + +func buildTableResponse(count int) TableResponse { + var timestamps []time.Time + var values []float64 + + timestamp := time.Date(2022, time.November, 27, 0, 0, 0, 0, time.UTC) + for i := 0; i < count; i++ { + timestamps = append(timestamps, timestamp) + values = append(values, 1.0) + timestamp = timestamp.Add(time.Minute) + } + return TableResponse{Columns: []Column{ + {Text: "time", Data: TimeColumn(timestamps)}, + {Text: "value", Data: NumberColumn(values)}, + }} +} diff --git a/server.go b/server.go index 02d8e42..64a1829 100644 --- a/server.go +++ b/server.go @@ -3,8 +3,6 @@ package simplejson import ( "context" "github.com/clambin/httpserver" - "github.com/clambin/simplejson/v3/annotation" - "github.com/clambin/simplejson/v3/query" "github.com/prometheus/client_golang/prometheus" "net/http" "sort" @@ -78,10 +76,10 @@ type Endpoints struct { } // QueryFunc handles queries -type QueryFunc func(ctx context.Context, req query.Request) (query.Response, error) +type QueryFunc func(ctx context.Context, req QueryRequest) (Response, error) // AnnotationsFunc handles requests for annotation -type AnnotationsFunc func(req annotation.Request) ([]annotation.Annotation, error) +type AnnotationsFunc func(req AnnotationRequest) ([]Annotation, error) // TagKeysFunc returns supported tag names type TagKeysFunc func(ctx context.Context) []string diff --git a/server_test.go b/server_test.go index 275e78e..c4a912e 100644 --- a/server_test.go +++ b/server_test.go @@ -5,8 +5,6 @@ import ( "context" "flag" "fmt" - "github.com/clambin/simplejson/v3/annotation" - "github.com/clambin/simplejson/v3/query" "github.com/prometheus/client_golang/prometheus" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" @@ -112,8 +110,8 @@ func TestServer_Metrics(t *testing.T) { type testHandler struct { noEndpoints bool - queryResponse query.Response - annotations []annotation.Annotation + queryResponse Response + annotations []Annotation tags []string tagValues map[string][]string } @@ -121,10 +119,10 @@ type testHandler struct { var _ Handler = &testHandler{} var ( - queryResponses = map[string]*query.TimeSeriesResponse{ + queryResponses = map[string]*TimeSeriesResponse{ "A": { Target: "A", - DataPoints: []query.DataPoint{ + DataPoints: []DataPoint{ {Timestamp: time.Date(2020, 1, 1, 0, 0, 0, 0, time.UTC), Value: 100}, {Timestamp: time.Date(2020, 1, 1, 0, 1, 0, 0, time.UTC), Value: 101}, {Timestamp: time.Date(2020, 1, 1, 0, 2, 0, 0, time.UTC), Value: 103}, @@ -132,7 +130,7 @@ var ( }, "B": { Target: "B", - DataPoints: []query.DataPoint{ + DataPoints: []DataPoint{ {Timestamp: time.Date(2020, 1, 1, 0, 0, 0, 0, time.UTC), Value: 100}, {Timestamp: time.Date(2020, 1, 1, 0, 1, 0, 0, time.UTC), Value: 99}, {Timestamp: time.Date(2020, 1, 1, 0, 2, 0, 0, time.UTC), Value: 98}, @@ -140,21 +138,21 @@ var ( }, } - tableQueryResponse = map[string]*query.TableResponse{ + tableQueryResponse = map[string]*TableResponse{ "C": { - Columns: []query.Column{ - {Text: "Time", Data: query.TimeColumn{ + Columns: []Column{ + {Text: "Time", Data: TimeColumn{ time.Date(2020, 1, 1, 0, 0, 0, 0, time.UTC), time.Date(2020, 1, 1, 0, 1, 0, 0, time.UTC), }}, - {Text: "Label", Data: query.StringColumn{"foo", "bar"}}, - {Text: "Series A", Data: query.NumberColumn{42, 43}}, - {Text: "Series B", Data: query.NumberColumn{64.5, 100.0}}, + {Text: "Label", Data: StringColumn{"foo", "bar"}}, + {Text: "Series A", Data: NumberColumn{42, 43}}, + {Text: "Series B", Data: NumberColumn{64.5, 100.0}}, }, }, } - annotations = []annotation.Annotation{{ + annotations = []Annotation{{ Time: time.Date(2021, 1, 1, 0, 0, 0, 0, time.UTC), Title: "foo", Text: "bar", @@ -205,11 +203,11 @@ func (handler *testHandler) Endpoints() (endpoints Endpoints) { return } -func (handler *testHandler) Query(_ context.Context, _ query.Request) (response query.Response, err error) { +func (handler *testHandler) Query(_ context.Context, _ QueryRequest) (response Response, err error) { return handler.queryResponse, nil } -func (handler *testHandler) Annotations(_ annotation.Request) (annotations []annotation.Annotation, err error) { +func (handler *testHandler) Annotations(_ AnnotationRequest) (annotations []Annotation, err error) { return handler.annotations, nil } @@ -219,7 +217,7 @@ func (handler *testHandler) Tags(_ context.Context) (tags []string) { func (handler *testHandler) TagValues(_ context.Context, tag string) (values []string, err error) { var ok bool - if values, ok = handler.tagValues[tag]; ok == false { + if values, ok = handler.tagValues[tag]; !ok { err = fmt.Errorf("unsupported tag '%s'", tag) } return diff --git a/query/testdata/TestWriteCombinedResponse.golden b/testdata/TestWriteCombinedResponse.golden similarity index 100% rename from query/testdata/TestWriteCombinedResponse.golden rename to testdata/TestWriteCombinedResponse.golden diff --git a/query/testdata/TestWriteResponseDataSeries.golden b/testdata/TestWriteResponseDataSeries.golden similarity index 100% rename from query/testdata/TestWriteResponseDataSeries.golden rename to testdata/TestWriteResponseDataSeries.golden diff --git a/query/testdata/TestWriteResponseTable.golden b/testdata/TestWriteResponseTable.golden similarity index 100% rename from query/testdata/TestWriteResponseTable.golden rename to testdata/TestWriteResponseTable.golden diff --git a/annotation/testdata/TestAnnotation_MarshalJSON_1.golden b/testdata/testannotation_marshaljson/1.golden similarity index 100% rename from annotation/testdata/TestAnnotation_MarshalJSON_1.golden rename to testdata/testannotation_marshaljson/1.golden diff --git a/annotation/testdata/TestAnnotation_MarshalJSON_2.golden b/testdata/testannotation_marshaljson/2.golden similarity index 100% rename from annotation/testdata/TestAnnotation_MarshalJSON_2.golden rename to testdata/testannotation_marshaljson/2.golden diff --git a/testdata/testresponse/combined.golden b/testdata/testresponse/combined.golden new file mode 100644 index 0000000..f94b019 --- /dev/null +++ b/testdata/testresponse/combined.golden @@ -0,0 +1,54 @@ +[ + { + "target": "A", + "datapoints": [ + [ + 100, + 1577836800000 + ], + [ + 101, + 1577840400000 + ], + [ + 102, + 1577844000000 + ] + ] + }, + { + "type": "table", + "columns": [ + { + "text": "Time", + "type": "time" + }, + { + "text": "Label", + "type": "string" + }, + { + "text": "Series A", + "type": "number" + }, + { + "text": "Series B", + "type": "number" + } + ], + "rows": [ + [ + "2020-01-01T00:00:00Z", + "foo", + 42, + 64.5 + ], + [ + "2020-01-01T00:00:00Z", + "bar", + 43, + 100 + ] + ] + } +] diff --git a/testdata/testresponse/table.golden b/testdata/testresponse/table.golden new file mode 100644 index 0000000..2829d6a --- /dev/null +++ b/testdata/testresponse/table.golden @@ -0,0 +1,35 @@ +{ + "type": "table", + "columns": [ + { + "text": "Time", + "type": "time" + }, + { + "text": "Label", + "type": "string" + }, + { + "text": "Series A", + "type": "number" + }, + { + "text": "Series B", + "type": "number" + } + ], + "rows": [ + [ + "2020-01-01T00:00:00Z", + "foo", + 42, + 64.5 + ], + [ + "2020-01-01T00:01:00Z", + "bar", + 43, + 100 + ] + ] +} diff --git a/testdata/testresponse/timeseries.golden b/testdata/testresponse/timeseries.golden new file mode 100644 index 0000000..bba838e --- /dev/null +++ b/testdata/testresponse/timeseries.golden @@ -0,0 +1,17 @@ +{ + "target": "A", + "datapoints": [ + [ + 100, + 1577836800000 + ], + [ + 101, + 1577840400000 + ], + [ + 102, + 1577844000000 + ] + ] +} diff --git a/testserver/testserver.go b/testserver/testserver.go deleted file mode 100644 index c171315..0000000 --- a/testserver/testserver.go +++ /dev/null @@ -1,159 +0,0 @@ -package main - -import ( - "context" - "errors" - "fmt" - "github.com/clambin/httpserver" - "github.com/clambin/simplejson/v3" - "github.com/clambin/simplejson/v3/annotation" - "github.com/clambin/simplejson/v3/query" - "github.com/prometheus/client_golang/prometheus/promhttp" - log "github.com/sirupsen/logrus" - "net/http" - "time" -) - -func main() { - s, err := simplejson.New("test", map[string]simplejson.Handler{ - "A": &handler{}, - "B": &handler{table: true}, - "C": &annoHandler{}, - }, httpserver.WithPort{Port: 8080}) - - if err != nil { - panic(err) - } - - go func() { - http.Handle("/metrics", promhttp.Handler()) - err2 := http.ListenAndServe(":9090", nil) - if !errors.Is(err2, http.ErrServerClosed) { - panic(err2) - } - }() - - log.SetLevel(log.DebugLevel) - err = s.Run() - if err != nil { - panic(err) - } -} - -type handler struct{ table bool } - -func (h *handler) Endpoints() simplejson.Endpoints { - return simplejson.Endpoints{ - Query: h.Query, - Annotations: h.Annotations, - TagKeys: h.TagKeys, - TagValues: h.TagValues, - } -} - -func (h *handler) Query(ctx context.Context, req query.Request) (response query.Response, err error) { - if h.table { - return h.tableQuery(ctx, req) - } - return h.timeSeriesQuery(ctx, req) -} - -func (h *handler) timeSeriesQuery(_ context.Context, req query.Request) (response *query.TimeSeriesResponse, err error) { - for _, filter := range req.Args.AdHocFilters { - log.WithFields(log.Fields{ - "key": filter.Key, - "operator": filter.Operator, - "condition": filter.Condition, - "value": filter.Value, - }).Info("table request received") - } - - dataPoints := make([]query.DataPoint, 60) - timestamp := time.Now().Add(-1 * time.Hour) - for i := 0; i < 60; i++ { - dataPoints[i] = query.DataPoint{ - Timestamp: timestamp, - Value: int64(i), - } - timestamp = timestamp.Add(1 * time.Minute) - } - - return &query.TimeSeriesResponse{ - DataPoints: dataPoints, - }, nil -} - -func (h *handler) tableQuery(_ context.Context, req query.Request) (response *query.TableResponse, err error) { - for _, filter := range req.Args.AdHocFilters { - log.WithFields(log.Fields{ - "key": filter.Key, - "operator": filter.Operator, - "condition": filter.Condition, - "value": filter.Value, - }).Info("table request received") - } - - timestamps := make(query.TimeColumn, 60) - seriesA := make(query.NumberColumn, 60) - seriesB := make(query.NumberColumn, 60) - - timestamp := time.Now().Add(-1 * time.Hour) - for i := 0; i < 60; i++ { - timestamps[i] = timestamp - seriesA[i] = float64(i) - seriesB[i] = float64(-i) - timestamp = timestamp.Add(1 * time.Minute) - } - - response = &query.TableResponse{ - Columns: []query.Column{ - {Text: "timestamp", Data: timestamps}, - {Text: "series A", Data: seriesA}, - {Text: "series B", Data: seriesB}, - }, - } - return -} - -func (h *handler) Annotations(_ annotation.Request) (annotations []annotation.Annotation, err error) { - annotations = []annotation.Annotation{ - { - Time: time.Now().Add(-5 * time.Minute), - Title: "foo", - Text: "bar", - Tags: []string{"A", "B"}, - }, - } - return -} - -func (h *handler) TagKeys(_ context.Context) (keys []string) { - return []string{"some-key"} -} - -func (h *handler) TagValues(_ context.Context, key string) (values []string, err error) { - if key != "some-key" { - return nil, fmt.Errorf("invalid key: %s", key) - } - return []string{"A", "B", "C"}, nil -} - -type annoHandler struct { -} - -func (a annoHandler) Endpoints() simplejson.Endpoints { - return simplejson.Endpoints{Query: a.annotations} -} - -func (annoHandler) annotations(_ context.Context, _ query.Request) (response query.Response, err error) { - response = &query.TableResponse{Columns: []query.Column{ - {Text: "start", Data: query.TimeColumn([]time.Time{time.Now().Add(-5 * time.Minute)})}, - {Text: "stop", Data: query.TimeColumn([]time.Time{time.Now().Add(-4 * time.Minute)})}, - {Text: "title", Data: query.StringColumn([]string{"bar"})}, - {Text: "name", Data: query.StringColumn([]string{"foo"})}, - {Text: "id", Data: query.NumberColumn([]float64{1.0})}, - {Text: "tags", Data: query.StringColumn([]string{"A"})}, - }} - - return -}