Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 4 additions & 4 deletions .apigentools-info
Original file line number Diff line number Diff line change
Expand Up @@ -4,13 +4,13 @@
"spec_versions": {
"v1": {
"apigentools_version": "1.6.6",
"regenerated": "2025-01-09 22:06:12.839933",
"spec_repo_commit": "8d40e082"
"regenerated": "2025-01-10 22:21:35.663663",
"spec_repo_commit": "2f8c42a8"
},
"v2": {
"apigentools_version": "1.6.6",
"regenerated": "2025-01-09 22:06:12.854479",
"spec_repo_commit": "8d40e082"
"regenerated": "2025-01-10 22:21:35.678588",
"spec_repo_commit": "2f8c42a8"
}
}
}
9 changes: 9 additions & 0 deletions .generator/schemas/v1/openapi.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -5926,6 +5926,9 @@ components:

Make sure to use an application key created by an admin.'
properties:
description:
description: A description of the pipeline.
type: string
filter:
$ref: '#/components/schemas/LogsFilter'
id:
Expand All @@ -5948,6 +5951,12 @@ components:
items:
$ref: '#/components/schemas/LogsProcessor'
type: array
tags:
description: A list of tags associated with the pipeline.
items:
description: A single tag using the format `key:value`.
type: string
type: array
type:
description: Type of pipeline.
example: pipeline
Expand Down
75 changes: 73 additions & 2 deletions src/main/java/com/datadog/api/client/v1/model/LogsPipeline.java
Original file line number Diff line number Diff line change
Expand Up @@ -27,18 +27,23 @@
* application key created by an admin.
*/
@JsonPropertyOrder({
LogsPipeline.JSON_PROPERTY_DESCRIPTION,
LogsPipeline.JSON_PROPERTY_FILTER,
LogsPipeline.JSON_PROPERTY_ID,
LogsPipeline.JSON_PROPERTY_IS_ENABLED,
LogsPipeline.JSON_PROPERTY_IS_READ_ONLY,
LogsPipeline.JSON_PROPERTY_NAME,
LogsPipeline.JSON_PROPERTY_PROCESSORS,
LogsPipeline.JSON_PROPERTY_TAGS,
LogsPipeline.JSON_PROPERTY_TYPE
})
@jakarta.annotation.Generated(
value = "https://github.com/DataDog/datadog-api-client-java/blob/master/.generator")
public class LogsPipeline {
@JsonIgnore public boolean unparsed = false;
public static final String JSON_PROPERTY_DESCRIPTION = "description";
private String description;

public static final String JSON_PROPERTY_FILTER = "filter";
private LogsFilter filter;

Expand All @@ -57,6 +62,9 @@ public class LogsPipeline {
public static final String JSON_PROPERTY_PROCESSORS = "processors";
private List<LogsProcessor> processors = null;

public static final String JSON_PROPERTY_TAGS = "tags";
private List<String> tags = null;

public static final String JSON_PROPERTY_TYPE = "type";
private String type;

Expand All @@ -67,6 +75,27 @@ public LogsPipeline(@JsonProperty(required = true, value = JSON_PROPERTY_NAME) S
this.name = name;
}

public LogsPipeline description(String description) {
this.description = description;
return this;
}

/**
* A description of the pipeline.
*
* @return description
*/
@jakarta.annotation.Nullable
@JsonProperty(JSON_PROPERTY_DESCRIPTION)
@JsonInclude(value = JsonInclude.Include.USE_DEFAULTS)
public String getDescription() {
return description;
}

public void setDescription(String description) {
this.description = description;
}

public LogsPipeline filter(LogsFilter filter) {
this.filter = filter;
this.unparsed |= filter.unparsed;
Expand Down Expand Up @@ -187,6 +216,35 @@ public void setProcessors(List<LogsProcessor> processors) {
this.processors = processors;
}

public LogsPipeline tags(List<String> tags) {
this.tags = tags;
return this;
}

public LogsPipeline addTagsItem(String tagsItem) {
if (this.tags == null) {
this.tags = new ArrayList<>();
}
this.tags.add(tagsItem);
return this;
}

/**
* A list of tags associated with the pipeline.
*
* @return tags
*/
@jakarta.annotation.Nullable
@JsonProperty(JSON_PROPERTY_TAGS)
@JsonInclude(value = JsonInclude.Include.USE_DEFAULTS)
public List<String> getTags() {
return tags;
}

public void setTags(List<String> tags) {
this.tags = tags;
}

/**
* Type of pipeline.
*
Expand Down Expand Up @@ -255,32 +313,45 @@ public boolean equals(Object o) {
return false;
}
LogsPipeline logsPipeline = (LogsPipeline) o;
return Objects.equals(this.filter, logsPipeline.filter)
return Objects.equals(this.description, logsPipeline.description)
&& Objects.equals(this.filter, logsPipeline.filter)
&& Objects.equals(this.id, logsPipeline.id)
&& Objects.equals(this.isEnabled, logsPipeline.isEnabled)
&& Objects.equals(this.isReadOnly, logsPipeline.isReadOnly)
&& Objects.equals(this.name, logsPipeline.name)
&& Objects.equals(this.processors, logsPipeline.processors)
&& Objects.equals(this.tags, logsPipeline.tags)
&& Objects.equals(this.type, logsPipeline.type)
&& Objects.equals(this.additionalProperties, logsPipeline.additionalProperties);
}

@Override
public int hashCode() {
return Objects.hash(
filter, id, isEnabled, isReadOnly, name, processors, type, additionalProperties);
description,
filter,
id,
isEnabled,
isReadOnly,
name,
processors,
tags,
type,
additionalProperties);
}

@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("class LogsPipeline {\n");
sb.append(" description: ").append(toIndentedString(description)).append("\n");
sb.append(" filter: ").append(toIndentedString(filter)).append("\n");
sb.append(" id: ").append(toIndentedString(id)).append("\n");
sb.append(" isEnabled: ").append(toIndentedString(isEnabled)).append("\n");
sb.append(" isReadOnly: ").append(toIndentedString(isReadOnly)).append("\n");
sb.append(" name: ").append(toIndentedString(name)).append("\n");
sb.append(" processors: ").append(toIndentedString(processors)).append("\n");
sb.append(" tags: ").append(toIndentedString(tags)).append("\n");
sb.append(" type: ").append(toIndentedString(type)).append("\n");
sb.append(" additionalProperties: ")
.append(toIndentedString(additionalProperties))
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26,14 +26,14 @@ Feature: Logs Pipelines
@generated @skip @team:DataDog/event-platform-experience
Scenario: Create a pipeline returns "Bad Request" response
Given new "CreateLogsPipeline" request
And body with value {"filter": {"query": "source:python"}, "name": "", "processors": [{"grok": {"match_rules": "rule_name_1 foo\nrule_name_2 bar\n", "support_rules": "rule_name_1 foo\nrule_name_2 bar\n"}, "is_enabled": false, "samples": [], "source": "message", "type": "grok-parser"}]}
And body with value {"filter": {"query": "source:python"}, "name": "", "processors": [{"grok": {"match_rules": "rule_name_1 foo\nrule_name_2 bar\n", "support_rules": "rule_name_1 foo\nrule_name_2 bar\n"}, "is_enabled": false, "samples": [], "source": "message", "type": "grok-parser"}], "tags": []}
When the request is sent
Then the response status is 400 Bad Request

@generated @skip @team:DataDog/event-platform-experience
Scenario: Create a pipeline returns "OK" response
Given new "CreateLogsPipeline" request
And body with value {"filter": {"query": "source:python"}, "name": "", "processors": [{"grok": {"match_rules": "rule_name_1 foo\nrule_name_2 bar\n", "support_rules": "rule_name_1 foo\nrule_name_2 bar\n"}, "is_enabled": false, "samples": [], "source": "message", "type": "grok-parser"}]}
And body with value {"filter": {"query": "source:python"}, "name": "", "processors": [{"grok": {"match_rules": "rule_name_1 foo\nrule_name_2 bar\n", "support_rules": "rule_name_1 foo\nrule_name_2 bar\n"}, "is_enabled": false, "samples": [], "source": "message", "type": "grok-parser"}], "tags": []}
When the request is sent
Then the response status is 200 OK

Expand Down Expand Up @@ -81,15 +81,15 @@ Feature: Logs Pipelines
Scenario: Update a pipeline returns "Bad Request" response
Given new "UpdateLogsPipeline" request
And request contains "pipeline_id" parameter from "REPLACE.ME"
And body with value {"filter": {"query": "source:python"}, "name": "", "processors": [{"grok": {"match_rules": "rule_name_1 foo\nrule_name_2 bar\n", "support_rules": "rule_name_1 foo\nrule_name_2 bar\n"}, "is_enabled": false, "samples": [], "source": "message", "type": "grok-parser"}]}
And body with value {"filter": {"query": "source:python"}, "name": "", "processors": [{"grok": {"match_rules": "rule_name_1 foo\nrule_name_2 bar\n", "support_rules": "rule_name_1 foo\nrule_name_2 bar\n"}, "is_enabled": false, "samples": [], "source": "message", "type": "grok-parser"}], "tags": []}
When the request is sent
Then the response status is 400 Bad Request

@generated @skip @team:DataDog/event-platform-experience
Scenario: Update a pipeline returns "OK" response
Given new "UpdateLogsPipeline" request
And request contains "pipeline_id" parameter from "REPLACE.ME"
And body with value {"filter": {"query": "source:python"}, "name": "", "processors": [{"grok": {"match_rules": "rule_name_1 foo\nrule_name_2 bar\n", "support_rules": "rule_name_1 foo\nrule_name_2 bar\n"}, "is_enabled": false, "samples": [], "source": "message", "type": "grok-parser"}]}
And body with value {"filter": {"query": "source:python"}, "name": "", "processors": [{"grok": {"match_rules": "rule_name_1 foo\nrule_name_2 bar\n", "support_rules": "rule_name_1 foo\nrule_name_2 bar\n"}, "is_enabled": false, "samples": [], "source": "message", "type": "grok-parser"}], "tags": []}
When the request is sent
Then the response status is 200 OK

Expand Down
Loading