Skip to content

Commit

Permalink
feat(api): update via SDK Studio
Browse files Browse the repository at this point in the history
  • Loading branch information
stainless-bot committed Jul 17, 2024
1 parent 373f69d commit d613239
Show file tree
Hide file tree
Showing 3 changed files with 26 additions and 26 deletions.
4 changes: 2 additions & 2 deletions api.md
Original file line number Diff line number Diff line change
Expand Up @@ -60,11 +60,11 @@ Methods:

Response Types:

- <a href="https://pkg.go.dev/github.com/openlayer-ai/openlayer-go">openlayer</a>.<a href="https://pkg.go.dev/github.com/openlayer-ai/openlayer-go#InferencePipelineRowStreamResponse">InferencePipelineRowStreamResponse</a>
- <a href="https://pkg.go.dev/github.com/openlayer-ai/openlayer-go">openlayer</a>.<a href="https://pkg.go.dev/github.com/openlayer-ai/openlayer-go#InferencePipelineRowUpdateResponse">InferencePipelineRowUpdateResponse</a>

Methods:

- <code title="put /inference-pipelines/{inferencePipelineId}/rows">client.InferencePipelines.Rows.<a href="https://pkg.go.dev/github.com/openlayer-ai/openlayer-go#InferencePipelineRowService.Stream">Stream</a>(ctx <a href="https://pkg.go.dev/context">context</a>.<a href="https://pkg.go.dev/context#Context">Context</a>, inferencePipelineID <a href="https://pkg.go.dev/builtin#string">string</a>, params <a href="https://pkg.go.dev/github.com/openlayer-ai/openlayer-go">openlayer</a>.<a href="https://pkg.go.dev/github.com/openlayer-ai/openlayer-go#InferencePipelineRowStreamParams">InferencePipelineRowStreamParams</a>) (<a href="https://pkg.go.dev/github.com/openlayer-ai/openlayer-go">openlayer</a>.<a href="https://pkg.go.dev/github.com/openlayer-ai/openlayer-go#InferencePipelineRowStreamResponse">InferencePipelineRowStreamResponse</a>, <a href="https://pkg.go.dev/builtin#error">error</a>)</code>
- <code title="put /inference-pipelines/{inferencePipelineId}/rows">client.InferencePipelines.Rows.<a href="https://pkg.go.dev/github.com/openlayer-ai/openlayer-go#InferencePipelineRowService.Update">Update</a>(ctx <a href="https://pkg.go.dev/context">context</a>.<a href="https://pkg.go.dev/context#Context">Context</a>, inferencePipelineID <a href="https://pkg.go.dev/builtin#string">string</a>, params <a href="https://pkg.go.dev/github.com/openlayer-ai/openlayer-go">openlayer</a>.<a href="https://pkg.go.dev/github.com/openlayer-ai/openlayer-go#InferencePipelineRowUpdateParams">InferencePipelineRowUpdateParams</a>) (<a href="https://pkg.go.dev/github.com/openlayer-ai/openlayer-go">openlayer</a>.<a href="https://pkg.go.dev/github.com/openlayer-ai/openlayer-go#InferencePipelineRowUpdateResponse">InferencePipelineRowUpdateResponse</a>, <a href="https://pkg.go.dev/builtin#error">error</a>)</code>

## TestResults

Expand Down
40 changes: 20 additions & 20 deletions inferencepipelinerow.go
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@ func NewInferencePipelineRowService(opts ...option.RequestOption) (r *InferenceP
}

// Update an inference data point in an inference pipeline.
func (r *InferencePipelineRowService) Stream(ctx context.Context, inferencePipelineID string, params InferencePipelineRowStreamParams, opts ...option.RequestOption) (res *InferencePipelineRowStreamResponse, err error) {
func (r *InferencePipelineRowService) Update(ctx context.Context, inferencePipelineID string, params InferencePipelineRowUpdateParams, opts ...option.RequestOption) (res *InferencePipelineRowUpdateResponse, err error) {
opts = append(r.Options[:], opts...)
if inferencePipelineID == "" {
err = errors.New("missing required inferencePipelineId parameter")
Expand All @@ -47,62 +47,62 @@ func (r *InferencePipelineRowService) Stream(ctx context.Context, inferencePipel
return
}

type InferencePipelineRowStreamResponse struct {
Success InferencePipelineRowStreamResponseSuccess `json:"success,required"`
JSON inferencePipelineRowStreamResponseJSON `json:"-"`
type InferencePipelineRowUpdateResponse struct {
Success InferencePipelineRowUpdateResponseSuccess `json:"success,required"`
JSON inferencePipelineRowUpdateResponseJSON `json:"-"`
}

// inferencePipelineRowStreamResponseJSON contains the JSON metadata for the struct
// [InferencePipelineRowStreamResponse]
type inferencePipelineRowStreamResponseJSON struct {
// inferencePipelineRowUpdateResponseJSON contains the JSON metadata for the struct
// [InferencePipelineRowUpdateResponse]
type inferencePipelineRowUpdateResponseJSON struct {
Success apijson.Field
raw string
ExtraFields map[string]apijson.Field
}

func (r *InferencePipelineRowStreamResponse) UnmarshalJSON(data []byte) (err error) {
func (r *InferencePipelineRowUpdateResponse) UnmarshalJSON(data []byte) (err error) {
return apijson.UnmarshalRoot(data, r)
}

func (r inferencePipelineRowStreamResponseJSON) RawJSON() string {
func (r inferencePipelineRowUpdateResponseJSON) RawJSON() string {
return r.raw
}

type InferencePipelineRowStreamResponseSuccess bool
type InferencePipelineRowUpdateResponseSuccess bool

const (
InferencePipelineRowStreamResponseSuccessTrue InferencePipelineRowStreamResponseSuccess = true
InferencePipelineRowUpdateResponseSuccessTrue InferencePipelineRowUpdateResponseSuccess = true
)

func (r InferencePipelineRowStreamResponseSuccess) IsKnown() bool {
func (r InferencePipelineRowUpdateResponseSuccess) IsKnown() bool {
switch r {
case InferencePipelineRowStreamResponseSuccessTrue:
case InferencePipelineRowUpdateResponseSuccessTrue:
return true
}
return false
}

type InferencePipelineRowStreamParams struct {
type InferencePipelineRowUpdateParams struct {
// Specify the inference id as a query param.
InferenceID param.Field[string] `query:"inferenceId,required"`
Row param.Field[interface{}] `json:"row,required"`
Config param.Field[InferencePipelineRowStreamParamsConfig] `json:"config"`
Config param.Field[InferencePipelineRowUpdateParamsConfig] `json:"config"`
}

func (r InferencePipelineRowStreamParams) MarshalJSON() (data []byte, err error) {
func (r InferencePipelineRowUpdateParams) MarshalJSON() (data []byte, err error) {
return apijson.MarshalRoot(r)
}

// URLQuery serializes [InferencePipelineRowStreamParams]'s query parameters as
// URLQuery serializes [InferencePipelineRowUpdateParams]'s query parameters as
// `url.Values`.
func (r InferencePipelineRowStreamParams) URLQuery() (v url.Values) {
func (r InferencePipelineRowUpdateParams) URLQuery() (v url.Values) {
return apiquery.MarshalWithSettings(r, apiquery.QuerySettings{
ArrayFormat: apiquery.ArrayQueryFormatComma,
NestedFormat: apiquery.NestedQueryFormatBrackets,
})
}

type InferencePipelineRowStreamParamsConfig struct {
type InferencePipelineRowUpdateParamsConfig struct {
// Name of the column with the ground truths.
GroundTruthColumnName param.Field[string] `json:"groundTruthColumnName"`
// Name of the column with human feedback.
Expand All @@ -118,6 +118,6 @@ type InferencePipelineRowStreamParamsConfig struct {
TimestampColumnName param.Field[string] `json:"timestampColumnName"`
}

func (r InferencePipelineRowStreamParamsConfig) MarshalJSON() (data []byte, err error) {
func (r InferencePipelineRowUpdateParamsConfig) MarshalJSON() (data []byte, err error) {
return apijson.MarshalRoot(r)
}
8 changes: 4 additions & 4 deletions inferencepipelinerow_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ import (
"github.com/openlayer-ai/openlayer-go/option"
)

func TestInferencePipelineRowStreamWithOptionalParams(t *testing.T) {
func TestInferencePipelineRowUpdateWithOptionalParams(t *testing.T) {
baseURL := "http://localhost:4010"
if envURL, ok := os.LookupEnv("TEST_API_BASE_URL"); ok {
baseURL = envURL
Expand All @@ -25,13 +25,13 @@ func TestInferencePipelineRowStreamWithOptionalParams(t *testing.T) {
option.WithBaseURL(baseURL),
option.WithAPIKey("My API Key"),
)
_, err := client.InferencePipelines.Rows.Stream(
_, err := client.InferencePipelines.Rows.Update(
context.TODO(),
"182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e",
openlayer.InferencePipelineRowStreamParams{
openlayer.InferencePipelineRowUpdateParams{
InferenceID: openlayer.F("inferenceId"),
Row: openlayer.F[any](map[string]interface{}{}),
Config: openlayer.F(openlayer.InferencePipelineRowStreamParamsConfig{
Config: openlayer.F(openlayer.InferencePipelineRowUpdateParamsConfig{
InferenceIDColumnName: openlayer.F("id"),
LatencyColumnName: openlayer.F("latency"),
TimestampColumnName: openlayer.F("timestamp"),
Expand Down

0 comments on commit d613239

Please sign in to comment.