Skip to content

Commit

Permalink
Add tags and description to logs pipelines (#2868)
Browse files Browse the repository at this point in the history
Co-authored-by: ci.datadog-api-spec <packages@datadoghq.com>
  • Loading branch information
api-clients-generation-pipeline[bot] and ci.datadog-api-spec authored Jan 13, 2025
1 parent 987ec86 commit 4915f70
Show file tree
Hide file tree
Showing 6 changed files with 97 additions and 16 deletions.
8 changes: 4 additions & 4 deletions .apigentools-info
Original file line number Diff line number Diff line change
Expand Up @@ -4,13 +4,13 @@
"spec_versions": {
"v1": {
"apigentools_version": "1.6.6",
"regenerated": "2025-01-09 22:06:11.503685",
"spec_repo_commit": "8d40e082"
"regenerated": "2025-01-10 22:21:27.561108",
"spec_repo_commit": "2f8c42a8"
},
"v2": {
"apigentools_version": "1.6.6",
"regenerated": "2025-01-09 22:06:11.519399",
"spec_repo_commit": "8d40e082"
"regenerated": "2025-01-10 22:21:27.576496",
"spec_repo_commit": "2f8c42a8"
}
}
}
9 changes: 9 additions & 0 deletions .generator/schemas/v1/openapi.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -5926,6 +5926,9 @@ components:

Make sure to use an application key created by an admin.'
properties:
description:
description: A description of the pipeline.
type: string
filter:
$ref: '#/components/schemas/LogsFilter'
id:
Expand All @@ -5948,6 +5951,12 @@ components:
items:
$ref: '#/components/schemas/LogsProcessor'
type: array
tags:
description: A list of tags associated with the pipeline.
items:
description: A single tag using the format `key:value`.
type: string
type: array
type:
description: Type of pipeline.
example: pipeline
Expand Down
86 changes: 78 additions & 8 deletions api/datadogV1/model_logs_pipeline.go
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,8 @@ import (
// **Note**: These endpoints are only available for admin users.
// Make sure to use an application key created by an admin.
type LogsPipeline struct {
// A description of the pipeline.
Description *string `json:"description,omitempty"`
// Filter for logs.
Filter *LogsFilter `json:"filter,omitempty"`
// ID of the pipeline.
Expand All @@ -28,6 +30,8 @@ type LogsPipeline struct {
Name string `json:"name"`
// Ordered list of processors in this pipeline.
Processors []LogsProcessor `json:"processors,omitempty"`
// A list of tags associated with the pipeline.
Tags []string `json:"tags,omitempty"`
// Type of pipeline.
Type *string `json:"type,omitempty"`
// UnparsedObject contains the raw value of the object if there was an error when deserializing into the struct
Expand All @@ -53,6 +57,34 @@ func NewLogsPipelineWithDefaults() *LogsPipeline {
return &this
}

// GetDescription returns the Description field value if set, zero value otherwise.
func (o *LogsPipeline) GetDescription() string {
if o == nil || o.Description == nil {
var ret string
return ret
}
return *o.Description
}

// GetDescriptionOk returns a tuple with the Description field value if set, nil otherwise
// and a boolean to check if the value has been set.
func (o *LogsPipeline) GetDescriptionOk() (*string, bool) {
if o == nil || o.Description == nil {
return nil, false
}
return o.Description, true
}

// HasDescription returns a boolean if a field has been set.
func (o *LogsPipeline) HasDescription() bool {
return o != nil && o.Description != nil
}

// SetDescription gets a reference to the given string and assigns it to the Description field.
func (o *LogsPipeline) SetDescription(v string) {
o.Description = &v
}

// GetFilter returns the Filter field value if set, zero value otherwise.
func (o *LogsPipeline) GetFilter() LogsFilter {
if o == nil || o.Filter == nil {
Expand Down Expand Up @@ -216,6 +248,34 @@ func (o *LogsPipeline) SetProcessors(v []LogsProcessor) {
o.Processors = v
}

// GetTags returns the Tags field value if set, zero value otherwise.
func (o *LogsPipeline) GetTags() []string {
if o == nil || o.Tags == nil {
var ret []string
return ret
}
return o.Tags
}

// GetTagsOk returns a tuple with the Tags field value if set, nil otherwise
// and a boolean to check if the value has been set.
func (o *LogsPipeline) GetTagsOk() (*[]string, bool) {
if o == nil || o.Tags == nil {
return nil, false
}
return &o.Tags, true
}

// HasTags returns a boolean if a field has been set.
func (o *LogsPipeline) HasTags() bool {
return o != nil && o.Tags != nil
}

// SetTags gets a reference to the given []string and assigns it to the Tags field.
func (o *LogsPipeline) SetTags(v []string) {
o.Tags = v
}

// GetType returns the Type field value if set, zero value otherwise.
func (o *LogsPipeline) GetType() string {
if o == nil || o.Type == nil {
Expand Down Expand Up @@ -250,6 +310,9 @@ func (o LogsPipeline) MarshalJSON() ([]byte, error) {
if o.UnparsedObject != nil {
return datadog.Marshal(o.UnparsedObject)
}
if o.Description != nil {
toSerialize["description"] = o.Description
}
if o.Filter != nil {
toSerialize["filter"] = o.Filter
}
Expand All @@ -266,6 +329,9 @@ func (o LogsPipeline) MarshalJSON() ([]byte, error) {
if o.Processors != nil {
toSerialize["processors"] = o.Processors
}
if o.Tags != nil {
toSerialize["tags"] = o.Tags
}
if o.Type != nil {
toSerialize["type"] = o.Type
}
Expand All @@ -279,13 +345,15 @@ func (o LogsPipeline) MarshalJSON() ([]byte, error) {
// UnmarshalJSON deserializes the given payload.
func (o *LogsPipeline) UnmarshalJSON(bytes []byte) (err error) {
all := struct {
Filter *LogsFilter `json:"filter,omitempty"`
Id *string `json:"id,omitempty"`
IsEnabled *bool `json:"is_enabled,omitempty"`
IsReadOnly *bool `json:"is_read_only,omitempty"`
Name *string `json:"name"`
Processors []LogsProcessor `json:"processors,omitempty"`
Type *string `json:"type,omitempty"`
Description *string `json:"description,omitempty"`
Filter *LogsFilter `json:"filter,omitempty"`
Id *string `json:"id,omitempty"`
IsEnabled *bool `json:"is_enabled,omitempty"`
IsReadOnly *bool `json:"is_read_only,omitempty"`
Name *string `json:"name"`
Processors []LogsProcessor `json:"processors,omitempty"`
Tags []string `json:"tags,omitempty"`
Type *string `json:"type,omitempty"`
}{}
if err = datadog.Unmarshal(bytes, &all); err != nil {
return datadog.Unmarshal(bytes, &o.UnparsedObject)
Expand All @@ -295,12 +363,13 @@ func (o *LogsPipeline) UnmarshalJSON(bytes []byte) (err error) {
}
additionalProperties := make(map[string]interface{})
if err = datadog.Unmarshal(bytes, &additionalProperties); err == nil {
datadog.DeleteKeys(additionalProperties, &[]string{"filter", "id", "is_enabled", "is_read_only", "name", "processors", "type"})
datadog.DeleteKeys(additionalProperties, &[]string{"description", "filter", "id", "is_enabled", "is_read_only", "name", "processors", "tags", "type"})
} else {
return err
}

hasInvalidField := false
o.Description = all.Description
if all.Filter != nil && all.Filter.UnparsedObject != nil && o.UnparsedObject == nil {
hasInvalidField = true
}
Expand All @@ -310,6 +379,7 @@ func (o *LogsPipeline) UnmarshalJSON(bytes []byte) (err error) {
o.IsReadOnly = all.IsReadOnly
o.Name = *all.Name
o.Processors = all.Processors
o.Tags = all.Tags
o.Type = all.Type

if len(additionalProperties) > 0 {
Expand Down
1 change: 1 addition & 0 deletions examples/v1/logs-pipelines/CreateLogsPipeline.go
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,7 @@ rule_name_2 bar
Type: datadogV1.LOGSGROKPARSERTYPE_GROK_PARSER,
}},
},
Tags: []string{},
}
ctx := datadog.NewDefaultContext(context.Background())
configuration := datadog.NewConfiguration()
Expand Down
1 change: 1 addition & 0 deletions examples/v1/logs-pipelines/UpdateLogsPipeline.go
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,7 @@ rule_name_2 bar
Type: datadogV1.LOGSGROKPARSERTYPE_GROK_PARSER,
}},
},
Tags: []string{},
}
ctx := datadog.NewDefaultContext(context.Background())
configuration := datadog.NewConfiguration()
Expand Down
8 changes: 4 additions & 4 deletions tests/scenarios/features/v1/logs_pipelines.feature
Original file line number Diff line number Diff line change
Expand Up @@ -26,14 +26,14 @@ Feature: Logs Pipelines
@generated @skip @team:DataDog/event-platform-experience
Scenario: Create a pipeline returns "Bad Request" response
Given new "CreateLogsPipeline" request
And body with value {"filter": {"query": "source:python"}, "name": "", "processors": [{"grok": {"match_rules": "rule_name_1 foo\nrule_name_2 bar\n", "support_rules": "rule_name_1 foo\nrule_name_2 bar\n"}, "is_enabled": false, "samples": [], "source": "message", "type": "grok-parser"}]}
And body with value {"filter": {"query": "source:python"}, "name": "", "processors": [{"grok": {"match_rules": "rule_name_1 foo\nrule_name_2 bar\n", "support_rules": "rule_name_1 foo\nrule_name_2 bar\n"}, "is_enabled": false, "samples": [], "source": "message", "type": "grok-parser"}], "tags": []}
When the request is sent
Then the response status is 400 Bad Request

@generated @skip @team:DataDog/event-platform-experience
Scenario: Create a pipeline returns "OK" response
Given new "CreateLogsPipeline" request
And body with value {"filter": {"query": "source:python"}, "name": "", "processors": [{"grok": {"match_rules": "rule_name_1 foo\nrule_name_2 bar\n", "support_rules": "rule_name_1 foo\nrule_name_2 bar\n"}, "is_enabled": false, "samples": [], "source": "message", "type": "grok-parser"}]}
And body with value {"filter": {"query": "source:python"}, "name": "", "processors": [{"grok": {"match_rules": "rule_name_1 foo\nrule_name_2 bar\n", "support_rules": "rule_name_1 foo\nrule_name_2 bar\n"}, "is_enabled": false, "samples": [], "source": "message", "type": "grok-parser"}], "tags": []}
When the request is sent
Then the response status is 200 OK

Expand Down Expand Up @@ -81,15 +81,15 @@ Feature: Logs Pipelines
Scenario: Update a pipeline returns "Bad Request" response
Given new "UpdateLogsPipeline" request
And request contains "pipeline_id" parameter from "REPLACE.ME"
And body with value {"filter": {"query": "source:python"}, "name": "", "processors": [{"grok": {"match_rules": "rule_name_1 foo\nrule_name_2 bar\n", "support_rules": "rule_name_1 foo\nrule_name_2 bar\n"}, "is_enabled": false, "samples": [], "source": "message", "type": "grok-parser"}]}
And body with value {"filter": {"query": "source:python"}, "name": "", "processors": [{"grok": {"match_rules": "rule_name_1 foo\nrule_name_2 bar\n", "support_rules": "rule_name_1 foo\nrule_name_2 bar\n"}, "is_enabled": false, "samples": [], "source": "message", "type": "grok-parser"}], "tags": []}
When the request is sent
Then the response status is 400 Bad Request

@generated @skip @team:DataDog/event-platform-experience
Scenario: Update a pipeline returns "OK" response
Given new "UpdateLogsPipeline" request
And request contains "pipeline_id" parameter from "REPLACE.ME"
And body with value {"filter": {"query": "source:python"}, "name": "", "processors": [{"grok": {"match_rules": "rule_name_1 foo\nrule_name_2 bar\n", "support_rules": "rule_name_1 foo\nrule_name_2 bar\n"}, "is_enabled": false, "samples": [], "source": "message", "type": "grok-parser"}]}
And body with value {"filter": {"query": "source:python"}, "name": "", "processors": [{"grok": {"match_rules": "rule_name_1 foo\nrule_name_2 bar\n", "support_rules": "rule_name_1 foo\nrule_name_2 bar\n"}, "is_enabled": false, "samples": [], "source": "message", "type": "grok-parser"}], "tags": []}
When the request is sent
Then the response status is 200 OK

Expand Down

0 comments on commit 4915f70

Please sign in to comment.