From bf41bec163a95b3f83a7147bfed3c5027581ebda Mon Sep 17 00:00:00 2001
From: "stainless-app[bot]"
<142633134+stainless-app[bot]@users.noreply.github.com>
Date: Fri, 7 Jun 2024 21:46:31 +0000
Subject: [PATCH] feat(api): update via SDK Studio (#4)
---
README.md | 98 ++++++++++++-------------
aliases.go | 2 +-
api.md | 24 +++---
client.go | 2 +-
client_test.go | 110 ++++++++++++++--------------
commit.go | 2 +-
committestresult.go | 2 +-
committestresult_test.go | 18 ++---
field.go | 2 +-
inferencepipeline.go | 2 +-
inferencepipelinedata.go | 2 +-
inferencepipelinedata_test.go | 40 +++++-----
inferencepipelinetestresult.go | 2 +-
inferencepipelinetestresult_test.go | 18 ++---
project.go | 2 +-
project_test.go | 16 ++--
projectcommit.go | 2 +-
projectcommit_test.go | 12 +--
projectinferencepipeline.go | 2 +-
projectinferencepipeline_test.go | 14 ++--
usage_test.go | 20 ++---
21 files changed, 196 insertions(+), 196 deletions(-)
diff --git a/README.md b/README.md
index 57e4109..abdfea7 100644
--- a/README.md
+++ b/README.md
@@ -13,7 +13,7 @@ It is generated with [Stainless](https://www.stainlessapi.com/).
```go
import (
- "github.com/openlayer-ai/openlayer-go" // imported as githubcomopenlayeraiopenlayergo
+ "github.com/openlayer-ai/openlayer-go" // imported as openlayer
)
```
@@ -49,21 +49,21 @@ import (
)
func main() {
- client := githubcomopenlayeraiopenlayergo.NewClient(
+ client := openlayer.NewClient(
option.WithAPIKey("My API Key"), // defaults to os.LookupEnv("OPENLAYER_API_KEY")
)
inferencePipelineDataStreamResponse, err := client.InferencePipelines.Data.Stream(
context.TODO(),
"182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e",
- githubcomopenlayeraiopenlayergo.InferencePipelineDataStreamParams{
- Config: githubcomopenlayeraiopenlayergo.F[githubcomopenlayeraiopenlayergo.InferencePipelineDataStreamParamsConfigUnion](githubcomopenlayeraiopenlayergo.InferencePipelineDataStreamParamsConfigLlmData{
- InputVariableNames: githubcomopenlayeraiopenlayergo.F([]string{"user_query"}),
- OutputColumnName: githubcomopenlayeraiopenlayergo.F("output"),
- NumOfTokenColumnName: githubcomopenlayeraiopenlayergo.F("tokens"),
- CostColumnName: githubcomopenlayeraiopenlayergo.F("cost"),
- TimestampColumnName: githubcomopenlayeraiopenlayergo.F("timestamp"),
+ openlayer.InferencePipelineDataStreamParams{
+ Config: openlayer.F[openlayer.InferencePipelineDataStreamParamsConfigUnion](openlayer.InferencePipelineDataStreamParamsConfigLlmData{
+ InputVariableNames: openlayer.F([]string{"user_query"}),
+ OutputColumnName: openlayer.F("output"),
+ NumOfTokenColumnName: openlayer.F("tokens"),
+ CostColumnName: openlayer.F("cost"),
+ TimestampColumnName: openlayer.F("timestamp"),
}),
- Rows: githubcomopenlayeraiopenlayergo.F([]map[string]interface{}{{
+ Rows: openlayer.F([]map[string]interface{}{{
"user_query": "what's the meaning of life?",
"output": "42",
"tokens": map[string]interface{}{},
@@ -94,18 +94,18 @@ To send a null, use `Null[T]()`, and to send a nonconforming value, use `Raw[T](
```go
params := FooParams{
- Name: githubcomopenlayeraiopenlayergo.F("hello"),
+ Name: openlayer.F("hello"),
// Explicitly send `"description": null`
- Description: githubcomopenlayeraiopenlayergo.Null[string](),
+ Description: openlayer.Null[string](),
- Point: githubcomopenlayeraiopenlayergo.F(githubcomopenlayeraiopenlayergo.Point{
- X: githubcomopenlayeraiopenlayergo.Int(0),
- Y: githubcomopenlayeraiopenlayergo.Int(1),
+ Point: openlayer.F(openlayer.Point{
+ X: openlayer.Int(0),
+ Y: openlayer.Int(1),
// In cases where the API specifies a given type,
// but you want to send something else, use `Raw`:
- Z: githubcomopenlayeraiopenlayergo.Raw[int64](0.01), // sends a float
+ Z: openlayer.Raw[int64](0.01), // sends a float
}),
}
```
@@ -159,7 +159,7 @@ This library uses the functional options pattern. Functions defined in the
requests. For example:
```go
-client := githubcomopenlayeraiopenlayergo.NewClient(
+client := openlayer.NewClient(
// Adds a header to every request made by the client
option.WithHeader("X-Some-Header", "custom_header_info"),
)
@@ -186,7 +186,7 @@ with additional helper methods like `.GetNextPage()`, e.g.:
### Errors
When the API returns a non-success status code, we return an error with type
-`*githubcomopenlayeraiopenlayergo.Error`. This contains the `StatusCode`, `*http.Request`, and
+`*openlayer.Error`. This contains the `StatusCode`, `*http.Request`, and
`*http.Response` values of the request, as well as the JSON of the error body
(much like other response objects in the SDK).
@@ -196,15 +196,15 @@ To handle errors, we recommend that you use the `errors.As` pattern:
_, err := client.InferencePipelines.Data.Stream(
context.TODO(),
"182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e",
- githubcomopenlayeraiopenlayergo.InferencePipelineDataStreamParams{
- Config: githubcomopenlayeraiopenlayergo.F[githubcomopenlayeraiopenlayergo.InferencePipelineDataStreamParamsConfigUnion](githubcomopenlayeraiopenlayergo.InferencePipelineDataStreamParamsConfigLlmData{
- InputVariableNames: githubcomopenlayeraiopenlayergo.F([]string{"user_query"}),
- OutputColumnName: githubcomopenlayeraiopenlayergo.F("output"),
- NumOfTokenColumnName: githubcomopenlayeraiopenlayergo.F("tokens"),
- CostColumnName: githubcomopenlayeraiopenlayergo.F("cost"),
- TimestampColumnName: githubcomopenlayeraiopenlayergo.F("timestamp"),
+ openlayer.InferencePipelineDataStreamParams{
+ Config: openlayer.F[openlayer.InferencePipelineDataStreamParamsConfigUnion](openlayer.InferencePipelineDataStreamParamsConfigLlmData{
+ InputVariableNames: openlayer.F([]string{"user_query"}),
+ OutputColumnName: openlayer.F("output"),
+ NumOfTokenColumnName: openlayer.F("tokens"),
+ CostColumnName: openlayer.F("cost"),
+ TimestampColumnName: openlayer.F("timestamp"),
}),
- Rows: githubcomopenlayeraiopenlayergo.F([]map[string]interface{}{{
+ Rows: openlayer.F([]map[string]interface{}{{
"user_query": "what's the meaning of life?",
"output": "42",
"tokens": map[string]interface{}{},
@@ -214,7 +214,7 @@ _, err := client.InferencePipelines.Data.Stream(
},
)
if err != nil {
- var apierr *githubcomopenlayeraiopenlayergo.Error
+ var apierr *openlayer.Error
if errors.As(err, &apierr) {
println(string(apierr.DumpRequest(true))) // Prints the serialized HTTP request
println(string(apierr.DumpResponse(true))) // Prints the serialized HTTP response
@@ -240,15 +240,15 @@ defer cancel()
client.InferencePipelines.Data.Stream(
ctx,
"182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e",
- githubcomopenlayeraiopenlayergo.InferencePipelineDataStreamParams{
- Config: githubcomopenlayeraiopenlayergo.F[githubcomopenlayeraiopenlayergo.InferencePipelineDataStreamParamsConfigUnion](githubcomopenlayeraiopenlayergo.InferencePipelineDataStreamParamsConfigLlmData{
- InputVariableNames: githubcomopenlayeraiopenlayergo.F([]string{"user_query"}),
- OutputColumnName: githubcomopenlayeraiopenlayergo.F("output"),
- NumOfTokenColumnName: githubcomopenlayeraiopenlayergo.F("tokens"),
- CostColumnName: githubcomopenlayeraiopenlayergo.F("cost"),
- TimestampColumnName: githubcomopenlayeraiopenlayergo.F("timestamp"),
+ openlayer.InferencePipelineDataStreamParams{
+ Config: openlayer.F[openlayer.InferencePipelineDataStreamParamsConfigUnion](openlayer.InferencePipelineDataStreamParamsConfigLlmData{
+ InputVariableNames: openlayer.F([]string{"user_query"}),
+ OutputColumnName: openlayer.F("output"),
+ NumOfTokenColumnName: openlayer.F("tokens"),
+ CostColumnName: openlayer.F("cost"),
+ TimestampColumnName: openlayer.F("timestamp"),
}),
- Rows: githubcomopenlayeraiopenlayergo.F([]map[string]interface{}{{
+ Rows: openlayer.F([]map[string]interface{}{{
"user_query": "what's the meaning of life?",
"output": "42",
"tokens": map[string]interface{}{},
@@ -271,7 +271,7 @@ The file name and content-type can be customized by implementing `Name() string`
string` on the run-time type of `io.Reader`. Note that `os.File` implements `Name() string`, so a
file returned by `os.Open` will be sent with the file name on disk.
-We also provide a helper `githubcomopenlayeraiopenlayergo.FileParam(reader io.Reader, filename string, contentType string)`
+We also provide a helper `openlayer.FileParam(reader io.Reader, filename string, contentType string)`
which can be used to wrap any `io.Reader` with the appropriate file name and content type.
### Retries
@@ -284,7 +284,7 @@ You can use the `WithMaxRetries` option to configure or disable this:
```go
// Configure the default for all requests:
-client := githubcomopenlayeraiopenlayergo.NewClient(
+client := openlayer.NewClient(
option.WithMaxRetries(0), // default is 2
)
@@ -292,15 +292,15 @@ client := githubcomopenlayeraiopenlayergo.NewClient(
client.InferencePipelines.Data.Stream(
context.TODO(),
"182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e",
- githubcomopenlayeraiopenlayergo.InferencePipelineDataStreamParams{
- Config: githubcomopenlayeraiopenlayergo.F[githubcomopenlayeraiopenlayergo.InferencePipelineDataStreamParamsConfigUnion](githubcomopenlayeraiopenlayergo.InferencePipelineDataStreamParamsConfigLlmData{
- InputVariableNames: githubcomopenlayeraiopenlayergo.F([]string{"user_query"}),
- OutputColumnName: githubcomopenlayeraiopenlayergo.F("output"),
- NumOfTokenColumnName: githubcomopenlayeraiopenlayergo.F("tokens"),
- CostColumnName: githubcomopenlayeraiopenlayergo.F("cost"),
- TimestampColumnName: githubcomopenlayeraiopenlayergo.F("timestamp"),
+ openlayer.InferencePipelineDataStreamParams{
+ Config: openlayer.F[openlayer.InferencePipelineDataStreamParamsConfigUnion](openlayer.InferencePipelineDataStreamParamsConfigLlmData{
+ InputVariableNames: openlayer.F([]string{"user_query"}),
+ OutputColumnName: openlayer.F("output"),
+ NumOfTokenColumnName: openlayer.F("tokens"),
+ CostColumnName: openlayer.F("cost"),
+ TimestampColumnName: openlayer.F("timestamp"),
}),
- Rows: githubcomopenlayeraiopenlayergo.F([]map[string]interface{}{{
+ Rows: openlayer.F([]map[string]interface{}{{
"user_query": "what's the meaning of life?",
"output": "42",
"tokens": map[string]interface{}{},
@@ -345,9 +345,9 @@ or the `option.WithJSONSet()` methods.
```go
params := FooNewParams{
- ID: githubcomopenlayeraiopenlayergo.F("id_xxxx"),
- Data: githubcomopenlayeraiopenlayergo.F(FooNewParamsData{
- FirstName: githubcomopenlayeraiopenlayergo.F("John"),
+ ID: openlayer.F("id_xxxx"),
+ Data: openlayer.F(FooNewParamsData{
+ FirstName: openlayer.F("John"),
}),
}
client.Foo.New(context.Background(), params, option.WithJSONSet("data.last_name", "Doe"))
@@ -382,7 +382,7 @@ func Logger(req *http.Request, next option.MiddlewareNext) (res *http.Response,
return res, err
}
-client := githubcomopenlayeraiopenlayergo.NewClient(
+client := openlayer.NewClient(
option.WithMiddleware(Logger),
)
```
diff --git a/aliases.go b/aliases.go
index 67f3312..7137948 100644
--- a/aliases.go
+++ b/aliases.go
@@ -1,6 +1,6 @@
// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
-package githubcomopenlayeraiopenlayergo
+package openlayer
import (
"github.com/openlayer-ai/openlayer-go/internal/apierror"
diff --git a/api.md b/api.md
index 91e970d..c42c904 100644
--- a/api.md
+++ b/api.md
@@ -2,31 +2,31 @@
Response Types:
-- githubcomopenlayeraiopenlayergo.ProjectListResponse
+- openlayer.ProjectListResponse
Methods:
-- client.Projects.List(ctx context.Context, query githubcomopenlayeraiopenlayergo.ProjectListParams) (githubcomopenlayeraiopenlayergo.ProjectListResponse, error)
+- client.Projects.List(ctx context.Context, query openlayer.ProjectListParams) (openlayer.ProjectListResponse, error)
## Commits
Response Types:
-- githubcomopenlayeraiopenlayergo.ProjectCommitListResponse
+- openlayer.ProjectCommitListResponse
Methods:
-- client.Projects.Commits.List(ctx context.Context, id string, query githubcomopenlayeraiopenlayergo.ProjectCommitListParams) (githubcomopenlayeraiopenlayergo.ProjectCommitListResponse, error)
+- client.Projects.Commits.List(ctx context.Context, id string, query openlayer.ProjectCommitListParams) (openlayer.ProjectCommitListResponse, error)
## InferencePipelines
Response Types:
-- githubcomopenlayeraiopenlayergo.ProjectInferencePipelineListResponse
+- openlayer.ProjectInferencePipelineListResponse
Methods:
-- client.Projects.InferencePipelines.List(ctx context.Context, id string, query githubcomopenlayeraiopenlayergo.ProjectInferencePipelineListParams) (githubcomopenlayeraiopenlayergo.ProjectInferencePipelineListResponse, error)
+- client.Projects.InferencePipelines.List(ctx context.Context, id string, query openlayer.ProjectInferencePipelineListParams) (openlayer.ProjectInferencePipelineListResponse, error)
# Commits
@@ -34,11 +34,11 @@ Methods:
Response Types:
-- githubcomopenlayeraiopenlayergo.CommitTestResultListResponse
+- openlayer.CommitTestResultListResponse
Methods:
-- client.Commits.TestResults.List(ctx context.Context, id string, query githubcomopenlayeraiopenlayergo.CommitTestResultListParams) (githubcomopenlayeraiopenlayergo.CommitTestResultListResponse, error)
+- client.Commits.TestResults.List(ctx context.Context, id string, query openlayer.CommitTestResultListParams) (openlayer.CommitTestResultListResponse, error)
# InferencePipelines
@@ -46,18 +46,18 @@ Methods:
Response Types:
-- githubcomopenlayeraiopenlayergo.InferencePipelineDataStreamResponse
+- openlayer.InferencePipelineDataStreamResponse
Methods:
-- client.InferencePipelines.Data.Stream(ctx context.Context, id string, body githubcomopenlayeraiopenlayergo.InferencePipelineDataStreamParams) (githubcomopenlayeraiopenlayergo.InferencePipelineDataStreamResponse, error)
+- client.InferencePipelines.Data.Stream(ctx context.Context, id string, body openlayer.InferencePipelineDataStreamParams) (openlayer.InferencePipelineDataStreamResponse, error)
## TestResults
Response Types:
-- githubcomopenlayeraiopenlayergo.InferencePipelineTestResultListResponse
+- openlayer.InferencePipelineTestResultListResponse
Methods:
-- client.InferencePipelines.TestResults.List(ctx context.Context, id string, query githubcomopenlayeraiopenlayergo.InferencePipelineTestResultListParams) (githubcomopenlayeraiopenlayergo.InferencePipelineTestResultListResponse, error)
+- client.InferencePipelines.TestResults.List(ctx context.Context, id string, query openlayer.InferencePipelineTestResultListParams) (openlayer.InferencePipelineTestResultListResponse, error)
diff --git a/client.go b/client.go
index b6b96fd..5c8faab 100644
--- a/client.go
+++ b/client.go
@@ -1,6 +1,6 @@
// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
-package githubcomopenlayeraiopenlayergo
+package openlayer
import (
"context"
diff --git a/client_test.go b/client_test.go
index 646a4ce..5f46152 100644
--- a/client_test.go
+++ b/client_test.go
@@ -1,6 +1,6 @@
// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
-package githubcomopenlayeraiopenlayergo_test
+package openlayer_test
import (
"context"
@@ -24,7 +24,7 @@ func (t *closureTransport) RoundTrip(req *http.Request) (*http.Response, error)
func TestUserAgentHeader(t *testing.T) {
var userAgent string
- client := githubcomopenlayeraiopenlayergo.NewClient(
+ client := openlayer.NewClient(
option.WithHTTPClient(&http.Client{
Transport: &closureTransport{
fn: func(req *http.Request) (*http.Response, error) {
@@ -39,15 +39,15 @@ func TestUserAgentHeader(t *testing.T) {
client.InferencePipelines.Data.Stream(
context.Background(),
"182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e",
- githubcomopenlayeraiopenlayergo.InferencePipelineDataStreamParams{
- Config: githubcomopenlayeraiopenlayergo.F[githubcomopenlayeraiopenlayergo.InferencePipelineDataStreamParamsConfigUnion](githubcomopenlayeraiopenlayergo.InferencePipelineDataStreamParamsConfigLlmData{
- InputVariableNames: githubcomopenlayeraiopenlayergo.F([]string{"user_query"}),
- OutputColumnName: githubcomopenlayeraiopenlayergo.F("output"),
- NumOfTokenColumnName: githubcomopenlayeraiopenlayergo.F("tokens"),
- CostColumnName: githubcomopenlayeraiopenlayergo.F("cost"),
- TimestampColumnName: githubcomopenlayeraiopenlayergo.F("timestamp"),
+ openlayer.InferencePipelineDataStreamParams{
+ Config: openlayer.F[openlayer.InferencePipelineDataStreamParamsConfigUnion](openlayer.InferencePipelineDataStreamParamsConfigLlmData{
+ InputVariableNames: openlayer.F([]string{"user_query"}),
+ OutputColumnName: openlayer.F("output"),
+ NumOfTokenColumnName: openlayer.F("tokens"),
+ CostColumnName: openlayer.F("cost"),
+ TimestampColumnName: openlayer.F("timestamp"),
}),
- Rows: githubcomopenlayeraiopenlayergo.F([]map[string]interface{}{{
+ Rows: openlayer.F([]map[string]interface{}{{
"user_query": "what's the meaning of life?",
"output": "42",
"tokens": map[string]interface{}{},
@@ -63,7 +63,7 @@ func TestUserAgentHeader(t *testing.T) {
func TestRetryAfter(t *testing.T) {
attempts := 0
- client := githubcomopenlayeraiopenlayergo.NewClient(
+ client := openlayer.NewClient(
option.WithHTTPClient(&http.Client{
Transport: &closureTransport{
fn: func(req *http.Request) (*http.Response, error) {
@@ -81,15 +81,15 @@ func TestRetryAfter(t *testing.T) {
res, err := client.InferencePipelines.Data.Stream(
context.Background(),
"182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e",
- githubcomopenlayeraiopenlayergo.InferencePipelineDataStreamParams{
- Config: githubcomopenlayeraiopenlayergo.F[githubcomopenlayeraiopenlayergo.InferencePipelineDataStreamParamsConfigUnion](githubcomopenlayeraiopenlayergo.InferencePipelineDataStreamParamsConfigLlmData{
- InputVariableNames: githubcomopenlayeraiopenlayergo.F([]string{"user_query"}),
- OutputColumnName: githubcomopenlayeraiopenlayergo.F("output"),
- NumOfTokenColumnName: githubcomopenlayeraiopenlayergo.F("tokens"),
- CostColumnName: githubcomopenlayeraiopenlayergo.F("cost"),
- TimestampColumnName: githubcomopenlayeraiopenlayergo.F("timestamp"),
+ openlayer.InferencePipelineDataStreamParams{
+ Config: openlayer.F[openlayer.InferencePipelineDataStreamParamsConfigUnion](openlayer.InferencePipelineDataStreamParamsConfigLlmData{
+ InputVariableNames: openlayer.F([]string{"user_query"}),
+ OutputColumnName: openlayer.F("output"),
+ NumOfTokenColumnName: openlayer.F("tokens"),
+ CostColumnName: openlayer.F("cost"),
+ TimestampColumnName: openlayer.F("timestamp"),
}),
- Rows: githubcomopenlayeraiopenlayergo.F([]map[string]interface{}{{
+ Rows: openlayer.F([]map[string]interface{}{{
"user_query": "what's the meaning of life?",
"output": "42",
"tokens": map[string]interface{}{},
@@ -108,7 +108,7 @@ func TestRetryAfter(t *testing.T) {
func TestRetryAfterMs(t *testing.T) {
attempts := 0
- client := githubcomopenlayeraiopenlayergo.NewClient(
+ client := openlayer.NewClient(
option.WithHTTPClient(&http.Client{
Transport: &closureTransport{
fn: func(req *http.Request) (*http.Response, error) {
@@ -126,15 +126,15 @@ func TestRetryAfterMs(t *testing.T) {
res, err := client.InferencePipelines.Data.Stream(
context.Background(),
"182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e",
- githubcomopenlayeraiopenlayergo.InferencePipelineDataStreamParams{
- Config: githubcomopenlayeraiopenlayergo.F[githubcomopenlayeraiopenlayergo.InferencePipelineDataStreamParamsConfigUnion](githubcomopenlayeraiopenlayergo.InferencePipelineDataStreamParamsConfigLlmData{
- InputVariableNames: githubcomopenlayeraiopenlayergo.F([]string{"user_query"}),
- OutputColumnName: githubcomopenlayeraiopenlayergo.F("output"),
- NumOfTokenColumnName: githubcomopenlayeraiopenlayergo.F("tokens"),
- CostColumnName: githubcomopenlayeraiopenlayergo.F("cost"),
- TimestampColumnName: githubcomopenlayeraiopenlayergo.F("timestamp"),
+ openlayer.InferencePipelineDataStreamParams{
+ Config: openlayer.F[openlayer.InferencePipelineDataStreamParamsConfigUnion](openlayer.InferencePipelineDataStreamParamsConfigLlmData{
+ InputVariableNames: openlayer.F([]string{"user_query"}),
+ OutputColumnName: openlayer.F("output"),
+ NumOfTokenColumnName: openlayer.F("tokens"),
+ CostColumnName: openlayer.F("cost"),
+ TimestampColumnName: openlayer.F("timestamp"),
}),
- Rows: githubcomopenlayeraiopenlayergo.F([]map[string]interface{}{{
+ Rows: openlayer.F([]map[string]interface{}{{
"user_query": "what's the meaning of life?",
"output": "42",
"tokens": map[string]interface{}{},
@@ -152,7 +152,7 @@ func TestRetryAfterMs(t *testing.T) {
}
func TestContextCancel(t *testing.T) {
- client := githubcomopenlayeraiopenlayergo.NewClient(
+ client := openlayer.NewClient(
option.WithHTTPClient(&http.Client{
Transport: &closureTransport{
fn: func(req *http.Request) (*http.Response, error) {
@@ -167,15 +167,15 @@ func TestContextCancel(t *testing.T) {
res, err := client.InferencePipelines.Data.Stream(
cancelCtx,
"182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e",
- githubcomopenlayeraiopenlayergo.InferencePipelineDataStreamParams{
- Config: githubcomopenlayeraiopenlayergo.F[githubcomopenlayeraiopenlayergo.InferencePipelineDataStreamParamsConfigUnion](githubcomopenlayeraiopenlayergo.InferencePipelineDataStreamParamsConfigLlmData{
- InputVariableNames: githubcomopenlayeraiopenlayergo.F([]string{"user_query"}),
- OutputColumnName: githubcomopenlayeraiopenlayergo.F("output"),
- NumOfTokenColumnName: githubcomopenlayeraiopenlayergo.F("tokens"),
- CostColumnName: githubcomopenlayeraiopenlayergo.F("cost"),
- TimestampColumnName: githubcomopenlayeraiopenlayergo.F("timestamp"),
+ openlayer.InferencePipelineDataStreamParams{
+ Config: openlayer.F[openlayer.InferencePipelineDataStreamParamsConfigUnion](openlayer.InferencePipelineDataStreamParamsConfigLlmData{
+ InputVariableNames: openlayer.F([]string{"user_query"}),
+ OutputColumnName: openlayer.F("output"),
+ NumOfTokenColumnName: openlayer.F("tokens"),
+ CostColumnName: openlayer.F("cost"),
+ TimestampColumnName: openlayer.F("timestamp"),
}),
- Rows: githubcomopenlayeraiopenlayergo.F([]map[string]interface{}{{
+ Rows: openlayer.F([]map[string]interface{}{{
"user_query": "what's the meaning of life?",
"output": "42",
"tokens": map[string]interface{}{},
@@ -190,7 +190,7 @@ func TestContextCancel(t *testing.T) {
}
func TestContextCancelDelay(t *testing.T) {
- client := githubcomopenlayeraiopenlayergo.NewClient(
+ client := openlayer.NewClient(
option.WithHTTPClient(&http.Client{
Transport: &closureTransport{
fn: func(req *http.Request) (*http.Response, error) {
@@ -205,15 +205,15 @@ func TestContextCancelDelay(t *testing.T) {
res, err := client.InferencePipelines.Data.Stream(
cancelCtx,
"182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e",
- githubcomopenlayeraiopenlayergo.InferencePipelineDataStreamParams{
- Config: githubcomopenlayeraiopenlayergo.F[githubcomopenlayeraiopenlayergo.InferencePipelineDataStreamParamsConfigUnion](githubcomopenlayeraiopenlayergo.InferencePipelineDataStreamParamsConfigLlmData{
- InputVariableNames: githubcomopenlayeraiopenlayergo.F([]string{"user_query"}),
- OutputColumnName: githubcomopenlayeraiopenlayergo.F("output"),
- NumOfTokenColumnName: githubcomopenlayeraiopenlayergo.F("tokens"),
- CostColumnName: githubcomopenlayeraiopenlayergo.F("cost"),
- TimestampColumnName: githubcomopenlayeraiopenlayergo.F("timestamp"),
+ openlayer.InferencePipelineDataStreamParams{
+ Config: openlayer.F[openlayer.InferencePipelineDataStreamParamsConfigUnion](openlayer.InferencePipelineDataStreamParamsConfigLlmData{
+ InputVariableNames: openlayer.F([]string{"user_query"}),
+ OutputColumnName: openlayer.F("output"),
+ NumOfTokenColumnName: openlayer.F("tokens"),
+ CostColumnName: openlayer.F("cost"),
+ TimestampColumnName: openlayer.F("timestamp"),
}),
- Rows: githubcomopenlayeraiopenlayergo.F([]map[string]interface{}{{
+ Rows: openlayer.F([]map[string]interface{}{{
"user_query": "what's the meaning of life?",
"output": "42",
"tokens": map[string]interface{}{},
@@ -236,7 +236,7 @@ func TestContextDeadline(t *testing.T) {
defer cancel()
go func() {
- client := githubcomopenlayeraiopenlayergo.NewClient(
+ client := openlayer.NewClient(
option.WithHTTPClient(&http.Client{
Transport: &closureTransport{
fn: func(req *http.Request) (*http.Response, error) {
@@ -249,15 +249,15 @@ func TestContextDeadline(t *testing.T) {
res, err := client.InferencePipelines.Data.Stream(
deadlineCtx,
"182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e",
- githubcomopenlayeraiopenlayergo.InferencePipelineDataStreamParams{
- Config: githubcomopenlayeraiopenlayergo.F[githubcomopenlayeraiopenlayergo.InferencePipelineDataStreamParamsConfigUnion](githubcomopenlayeraiopenlayergo.InferencePipelineDataStreamParamsConfigLlmData{
- InputVariableNames: githubcomopenlayeraiopenlayergo.F([]string{"user_query"}),
- OutputColumnName: githubcomopenlayeraiopenlayergo.F("output"),
- NumOfTokenColumnName: githubcomopenlayeraiopenlayergo.F("tokens"),
- CostColumnName: githubcomopenlayeraiopenlayergo.F("cost"),
- TimestampColumnName: githubcomopenlayeraiopenlayergo.F("timestamp"),
+ openlayer.InferencePipelineDataStreamParams{
+ Config: openlayer.F[openlayer.InferencePipelineDataStreamParamsConfigUnion](openlayer.InferencePipelineDataStreamParamsConfigLlmData{
+ InputVariableNames: openlayer.F([]string{"user_query"}),
+ OutputColumnName: openlayer.F("output"),
+ NumOfTokenColumnName: openlayer.F("tokens"),
+ CostColumnName: openlayer.F("cost"),
+ TimestampColumnName: openlayer.F("timestamp"),
}),
- Rows: githubcomopenlayeraiopenlayergo.F([]map[string]interface{}{{
+ Rows: openlayer.F([]map[string]interface{}{{
"user_query": "what's the meaning of life?",
"output": "42",
"tokens": map[string]interface{}{},
diff --git a/commit.go b/commit.go
index fdcf6ce..651371a 100644
--- a/commit.go
+++ b/commit.go
@@ -1,6 +1,6 @@
// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
-package githubcomopenlayeraiopenlayergo
+package openlayer
import (
"github.com/openlayer-ai/openlayer-go/option"
diff --git a/committestresult.go b/committestresult.go
index 0b366a8..4c78a09 100644
--- a/committestresult.go
+++ b/committestresult.go
@@ -1,6 +1,6 @@
// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
-package githubcomopenlayeraiopenlayergo
+package openlayer
import (
"context"
diff --git a/committestresult_test.go b/committestresult_test.go
index f37036d..f1e86c0 100644
--- a/committestresult_test.go
+++ b/committestresult_test.go
@@ -1,6 +1,6 @@
// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
-package githubcomopenlayeraiopenlayergo_test
+package openlayer_test
import (
"context"
@@ -21,23 +21,23 @@ func TestCommitTestResultListWithOptionalParams(t *testing.T) {
if !testutil.CheckTestServer(t, baseURL) {
return
}
- client := githubcomopenlayeraiopenlayergo.NewClient(
+ client := openlayer.NewClient(
option.WithBaseURL(baseURL),
option.WithAPIKey("My API Key"),
)
_, err := client.Commits.TestResults.List(
context.TODO(),
"182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e",
- githubcomopenlayeraiopenlayergo.CommitTestResultListParams{
- IncludeArchived: githubcomopenlayeraiopenlayergo.F(true),
- Page: githubcomopenlayeraiopenlayergo.F(int64(1)),
- PerPage: githubcomopenlayeraiopenlayergo.F(int64(1)),
- Status: githubcomopenlayeraiopenlayergo.F(githubcomopenlayeraiopenlayergo.CommitTestResultListParamsStatusPassing),
- Type: githubcomopenlayeraiopenlayergo.F(githubcomopenlayeraiopenlayergo.CommitTestResultListParamsTypeIntegrity),
+ openlayer.CommitTestResultListParams{
+ IncludeArchived: openlayer.F(true),
+ Page: openlayer.F(int64(1)),
+ PerPage: openlayer.F(int64(1)),
+ Status: openlayer.F(openlayer.CommitTestResultListParamsStatusPassing),
+ Type: openlayer.F(openlayer.CommitTestResultListParamsTypeIntegrity),
},
)
if err != nil {
- var apierr *githubcomopenlayeraiopenlayergo.Error
+ var apierr *openlayer.Error
if errors.As(err, &apierr) {
t.Log(string(apierr.DumpRequest(true)))
}
diff --git a/field.go b/field.go
index 893d64c..388bfca 100644
--- a/field.go
+++ b/field.go
@@ -1,4 +1,4 @@
-package githubcomopenlayeraiopenlayergo
+package openlayer
import (
"github.com/openlayer-ai/openlayer-go/internal/param"
diff --git a/inferencepipeline.go b/inferencepipeline.go
index ce3b2bc..50bbab5 100644
--- a/inferencepipeline.go
+++ b/inferencepipeline.go
@@ -1,6 +1,6 @@
// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
-package githubcomopenlayeraiopenlayergo
+package openlayer
import (
"github.com/openlayer-ai/openlayer-go/option"
diff --git a/inferencepipelinedata.go b/inferencepipelinedata.go
index 26e1504..b381aa7 100644
--- a/inferencepipelinedata.go
+++ b/inferencepipelinedata.go
@@ -1,6 +1,6 @@
// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
-package githubcomopenlayeraiopenlayergo
+package openlayer
import (
"context"
diff --git a/inferencepipelinedata_test.go b/inferencepipelinedata_test.go
index 4827543..da50ee7 100644
--- a/inferencepipelinedata_test.go
+++ b/inferencepipelinedata_test.go
@@ -1,6 +1,6 @@
// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
-package githubcomopenlayeraiopenlayergo_test
+package openlayer_test
import (
"context"
@@ -21,32 +21,32 @@ func TestInferencePipelineDataStreamWithOptionalParams(t *testing.T) {
if !testutil.CheckTestServer(t, baseURL) {
return
}
- client := githubcomopenlayeraiopenlayergo.NewClient(
+ client := openlayer.NewClient(
option.WithBaseURL(baseURL),
option.WithAPIKey("My API Key"),
)
_, err := client.InferencePipelines.Data.Stream(
context.TODO(),
"182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e",
- githubcomopenlayeraiopenlayergo.InferencePipelineDataStreamParams{
- Config: githubcomopenlayeraiopenlayergo.F[githubcomopenlayeraiopenlayergo.InferencePipelineDataStreamParamsConfigUnion](githubcomopenlayeraiopenlayergo.InferencePipelineDataStreamParamsConfigLlmData{
- NumOfTokenColumnName: githubcomopenlayeraiopenlayergo.F("tokens"),
- ContextColumnName: githubcomopenlayeraiopenlayergo.F("context"),
- CostColumnName: githubcomopenlayeraiopenlayergo.F("cost"),
- GroundTruthColumnName: githubcomopenlayeraiopenlayergo.F("ground_truth"),
- InferenceIDColumnName: githubcomopenlayeraiopenlayergo.F("id"),
- InputVariableNames: githubcomopenlayeraiopenlayergo.F([]string{"user_query"}),
- LatencyColumnName: githubcomopenlayeraiopenlayergo.F("latency"),
- Metadata: githubcomopenlayeraiopenlayergo.F[any](map[string]interface{}{}),
- OutputColumnName: githubcomopenlayeraiopenlayergo.F("output"),
- Prompt: githubcomopenlayeraiopenlayergo.F([]githubcomopenlayeraiopenlayergo.InferencePipelineDataStreamParamsConfigLlmDataPrompt{{
- Role: githubcomopenlayeraiopenlayergo.F("user"),
- Content: githubcomopenlayeraiopenlayergo.F("{{ user_query }}"),
+ openlayer.InferencePipelineDataStreamParams{
+ Config: openlayer.F[openlayer.InferencePipelineDataStreamParamsConfigUnion](openlayer.InferencePipelineDataStreamParamsConfigLlmData{
+ NumOfTokenColumnName: openlayer.F("tokens"),
+ ContextColumnName: openlayer.F("context"),
+ CostColumnName: openlayer.F("cost"),
+ GroundTruthColumnName: openlayer.F("ground_truth"),
+ InferenceIDColumnName: openlayer.F("id"),
+ InputVariableNames: openlayer.F([]string{"user_query"}),
+ LatencyColumnName: openlayer.F("latency"),
+ Metadata: openlayer.F[any](map[string]interface{}{}),
+ OutputColumnName: openlayer.F("output"),
+ Prompt: openlayer.F([]openlayer.InferencePipelineDataStreamParamsConfigLlmDataPrompt{{
+ Role: openlayer.F("user"),
+ Content: openlayer.F("{{ user_query }}"),
}}),
- QuestionColumnName: githubcomopenlayeraiopenlayergo.F("question"),
- TimestampColumnName: githubcomopenlayeraiopenlayergo.F("timestamp"),
+ QuestionColumnName: openlayer.F("question"),
+ TimestampColumnName: openlayer.F("timestamp"),
}),
- Rows: githubcomopenlayeraiopenlayergo.F([]map[string]interface{}{{
+ Rows: openlayer.F([]map[string]interface{}{{
"user_query": "bar",
"output": "bar",
"tokens": "bar",
@@ -56,7 +56,7 @@ func TestInferencePipelineDataStreamWithOptionalParams(t *testing.T) {
},
)
if err != nil {
- var apierr *githubcomopenlayeraiopenlayergo.Error
+ var apierr *openlayer.Error
if errors.As(err, &apierr) {
t.Log(string(apierr.DumpRequest(true)))
}
diff --git a/inferencepipelinetestresult.go b/inferencepipelinetestresult.go
index c82fd7b..0522fa9 100644
--- a/inferencepipelinetestresult.go
+++ b/inferencepipelinetestresult.go
@@ -1,6 +1,6 @@
// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
-package githubcomopenlayeraiopenlayergo
+package openlayer
import (
"context"
diff --git a/inferencepipelinetestresult_test.go b/inferencepipelinetestresult_test.go
index 0476e37..fb47f02 100644
--- a/inferencepipelinetestresult_test.go
+++ b/inferencepipelinetestresult_test.go
@@ -1,6 +1,6 @@
// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
-package githubcomopenlayeraiopenlayergo_test
+package openlayer_test
import (
"context"
@@ -21,23 +21,23 @@ func TestInferencePipelineTestResultListWithOptionalParams(t *testing.T) {
if !testutil.CheckTestServer(t, baseURL) {
return
}
- client := githubcomopenlayeraiopenlayergo.NewClient(
+ client := openlayer.NewClient(
option.WithBaseURL(baseURL),
option.WithAPIKey("My API Key"),
)
_, err := client.InferencePipelines.TestResults.List(
context.TODO(),
"182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e",
- githubcomopenlayeraiopenlayergo.InferencePipelineTestResultListParams{
- IncludeArchived: githubcomopenlayeraiopenlayergo.F(true),
- Page: githubcomopenlayeraiopenlayergo.F(int64(1)),
- PerPage: githubcomopenlayeraiopenlayergo.F(int64(1)),
- Status: githubcomopenlayeraiopenlayergo.F(githubcomopenlayeraiopenlayergo.InferencePipelineTestResultListParamsStatusPassing),
- Type: githubcomopenlayeraiopenlayergo.F(githubcomopenlayeraiopenlayergo.InferencePipelineTestResultListParamsTypeIntegrity),
+ openlayer.InferencePipelineTestResultListParams{
+ IncludeArchived: openlayer.F(true),
+ Page: openlayer.F(int64(1)),
+ PerPage: openlayer.F(int64(1)),
+ Status: openlayer.F(openlayer.InferencePipelineTestResultListParamsStatusPassing),
+ Type: openlayer.F(openlayer.InferencePipelineTestResultListParamsTypeIntegrity),
},
)
if err != nil {
- var apierr *githubcomopenlayeraiopenlayergo.Error
+ var apierr *openlayer.Error
if errors.As(err, &apierr) {
t.Log(string(apierr.DumpRequest(true)))
}
diff --git a/project.go b/project.go
index e1cb128..6639834 100644
--- a/project.go
+++ b/project.go
@@ -1,6 +1,6 @@
// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
-package githubcomopenlayeraiopenlayergo
+package openlayer
import (
"context"
diff --git a/project_test.go b/project_test.go
index 100e803..6008d50 100644
--- a/project_test.go
+++ b/project_test.go
@@ -1,6 +1,6 @@
// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
-package githubcomopenlayeraiopenlayergo_test
+package openlayer_test
import (
"context"
@@ -21,18 +21,18 @@ func TestProjectListWithOptionalParams(t *testing.T) {
if !testutil.CheckTestServer(t, baseURL) {
return
}
- client := githubcomopenlayeraiopenlayergo.NewClient(
+ client := openlayer.NewClient(
option.WithBaseURL(baseURL),
option.WithAPIKey("My API Key"),
)
- _, err := client.Projects.List(context.TODO(), githubcomopenlayeraiopenlayergo.ProjectListParams{
- Name: githubcomopenlayeraiopenlayergo.F("string"),
- Page: githubcomopenlayeraiopenlayergo.F(int64(1)),
- PerPage: githubcomopenlayeraiopenlayergo.F(int64(1)),
- TaskType: githubcomopenlayeraiopenlayergo.F(githubcomopenlayeraiopenlayergo.ProjectListParamsTaskTypeLlmBase),
+ _, err := client.Projects.List(context.TODO(), openlayer.ProjectListParams{
+ Name: openlayer.F("string"),
+ Page: openlayer.F(int64(1)),
+ PerPage: openlayer.F(int64(1)),
+ TaskType: openlayer.F(openlayer.ProjectListParamsTaskTypeLlmBase),
})
if err != nil {
- var apierr *githubcomopenlayeraiopenlayergo.Error
+ var apierr *openlayer.Error
if errors.As(err, &apierr) {
t.Log(string(apierr.DumpRequest(true)))
}
diff --git a/projectcommit.go b/projectcommit.go
index 62a0afe..caefede 100644
--- a/projectcommit.go
+++ b/projectcommit.go
@@ -1,6 +1,6 @@
// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
-package githubcomopenlayeraiopenlayergo
+package openlayer
import (
"context"
diff --git a/projectcommit_test.go b/projectcommit_test.go
index 56d6f85..3bf4c49 100644
--- a/projectcommit_test.go
+++ b/projectcommit_test.go
@@ -1,6 +1,6 @@
// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
-package githubcomopenlayeraiopenlayergo_test
+package openlayer_test
import (
"context"
@@ -21,20 +21,20 @@ func TestProjectCommitListWithOptionalParams(t *testing.T) {
if !testutil.CheckTestServer(t, baseURL) {
return
}
- client := githubcomopenlayeraiopenlayergo.NewClient(
+ client := openlayer.NewClient(
option.WithBaseURL(baseURL),
option.WithAPIKey("My API Key"),
)
_, err := client.Projects.Commits.List(
context.TODO(),
"182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e",
- githubcomopenlayeraiopenlayergo.ProjectCommitListParams{
- Page: githubcomopenlayeraiopenlayergo.F(int64(1)),
- PerPage: githubcomopenlayeraiopenlayergo.F(int64(1)),
+ openlayer.ProjectCommitListParams{
+ Page: openlayer.F(int64(1)),
+ PerPage: openlayer.F(int64(1)),
},
)
if err != nil {
- var apierr *githubcomopenlayeraiopenlayergo.Error
+ var apierr *openlayer.Error
if errors.As(err, &apierr) {
t.Log(string(apierr.DumpRequest(true)))
}
diff --git a/projectinferencepipeline.go b/projectinferencepipeline.go
index 3057f23..158e23f 100644
--- a/projectinferencepipeline.go
+++ b/projectinferencepipeline.go
@@ -1,6 +1,6 @@
// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
-package githubcomopenlayeraiopenlayergo
+package openlayer
import (
"context"
diff --git a/projectinferencepipeline_test.go b/projectinferencepipeline_test.go
index 004113d..54b85be 100644
--- a/projectinferencepipeline_test.go
+++ b/projectinferencepipeline_test.go
@@ -1,6 +1,6 @@
// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
-package githubcomopenlayeraiopenlayergo_test
+package openlayer_test
import (
"context"
@@ -21,21 +21,21 @@ func TestProjectInferencePipelineListWithOptionalParams(t *testing.T) {
if !testutil.CheckTestServer(t, baseURL) {
return
}
- client := githubcomopenlayeraiopenlayergo.NewClient(
+ client := openlayer.NewClient(
option.WithBaseURL(baseURL),
option.WithAPIKey("My API Key"),
)
_, err := client.Projects.InferencePipelines.List(
context.TODO(),
"182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e",
- githubcomopenlayeraiopenlayergo.ProjectInferencePipelineListParams{
- Name: githubcomopenlayeraiopenlayergo.F("string"),
- Page: githubcomopenlayeraiopenlayergo.F(int64(1)),
- PerPage: githubcomopenlayeraiopenlayergo.F(int64(1)),
+ openlayer.ProjectInferencePipelineListParams{
+ Name: openlayer.F("string"),
+ Page: openlayer.F(int64(1)),
+ PerPage: openlayer.F(int64(1)),
},
)
if err != nil {
- var apierr *githubcomopenlayeraiopenlayergo.Error
+ var apierr *openlayer.Error
if errors.As(err, &apierr) {
t.Log(string(apierr.DumpRequest(true)))
}
diff --git a/usage_test.go b/usage_test.go
index bc5d7dd..b7180f6 100644
--- a/usage_test.go
+++ b/usage_test.go
@@ -1,6 +1,6 @@
// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
-package githubcomopenlayeraiopenlayergo_test
+package openlayer_test
import (
"context"
@@ -20,22 +20,22 @@ func TestUsage(t *testing.T) {
if !testutil.CheckTestServer(t, baseURL) {
return
}
- client := githubcomopenlayeraiopenlayergo.NewClient(
+ client := openlayer.NewClient(
option.WithBaseURL(baseURL),
option.WithAPIKey("My API Key"),
)
inferencePipelineDataStreamResponse, err := client.InferencePipelines.Data.Stream(
context.TODO(),
"182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e",
- githubcomopenlayeraiopenlayergo.InferencePipelineDataStreamParams{
- Config: githubcomopenlayeraiopenlayergo.F[githubcomopenlayeraiopenlayergo.InferencePipelineDataStreamParamsConfigUnion](githubcomopenlayeraiopenlayergo.InferencePipelineDataStreamParamsConfigLlmData{
- InputVariableNames: githubcomopenlayeraiopenlayergo.F([]string{"user_query"}),
- OutputColumnName: githubcomopenlayeraiopenlayergo.F("output"),
- NumOfTokenColumnName: githubcomopenlayeraiopenlayergo.F("tokens"),
- CostColumnName: githubcomopenlayeraiopenlayergo.F("cost"),
- TimestampColumnName: githubcomopenlayeraiopenlayergo.F("timestamp"),
+ openlayer.InferencePipelineDataStreamParams{
+ Config: openlayer.F[openlayer.InferencePipelineDataStreamParamsConfigUnion](openlayer.InferencePipelineDataStreamParamsConfigLlmData{
+ InputVariableNames: openlayer.F([]string{"user_query"}),
+ OutputColumnName: openlayer.F("output"),
+ NumOfTokenColumnName: openlayer.F("tokens"),
+ CostColumnName: openlayer.F("cost"),
+ TimestampColumnName: openlayer.F("timestamp"),
}),
- Rows: githubcomopenlayeraiopenlayergo.F([]map[string]interface{}{{
+ Rows: openlayer.F([]map[string]interface{}{{
"user_query": "what's the meaning of life?",
"output": "42",
"tokens": map[string]interface{}{},