diff --git a/.generated-info b/.generated-info index 59f91397423..f029dbd2d2c 100644 --- a/.generated-info +++ b/.generated-info @@ -1,4 +1,4 @@ { - "spec_repo_commit": "04d09cb", - "generated": "2025-07-23 09:23:45.770" + "spec_repo_commit": "4727afe", + "generated": "2025-07-23 15:38:09.593" } diff --git a/.generator/schemas/v1/openapi.yaml b/.generator/schemas/v1/openapi.yaml index 5ce0fd12fcd..91a0e38e097 100644 --- a/.generator/schemas/v1/openapi.yaml +++ b/.generator/schemas/v1/openapi.yaml @@ -5547,6 +5547,72 @@ components: type: string x-enum-varnames: - DATE_REMAPPER + LogsDecoderProcessor: + description: 'The decoder processor decodes any source attribute containing + a + + base64/base16-encoded UTF-8/ASCII string back to its original value, storing + the + + result in a target attribute.' + properties: + binary_to_text_encoding: + $ref: '#/components/schemas/LogsDecoderProcessorBinaryToTextEncoding' + input_representation: + $ref: '#/components/schemas/LogsDecoderProcessorInputRepresentation' + is_enabled: + default: false + description: Whether the processor is enabled. + type: boolean + name: + description: Name of the processor. + type: string + source: + description: Name of the log attribute with the encoded data. + example: encoded.field + type: string + target: + description: Name of the log attribute that contains the decoded data. + example: decoded.field + type: string + type: + $ref: '#/components/schemas/LogsDecoderProcessorType' + required: + - source + - target + - binary_to_text_encoding + - input_representation + - type + type: object + LogsDecoderProcessorBinaryToTextEncoding: + description: The encoding used to represent the binary data. + enum: + - base64 + - base16 + example: base64 + type: string + x-enum-varnames: + - BASE64 + - BASE16 + LogsDecoderProcessorInputRepresentation: + description: The original representation of input string. + enum: + - utf_8 + - integer + example: utf_8 + type: string + x-enum-varnames: + - UTF_8 + - INTEGER + LogsDecoderProcessorType: + default: decoder-processor + description: Type of logs decoder processor. + enum: + - decoder-processor + example: decoder-processor + type: string + x-enum-varnames: + - DECODER_PROCESSOR LogsExclusion: description: Represents the index exclusion filter object from configuration API. @@ -6215,6 +6281,7 @@ components: - $ref: '#/components/schemas/LogsTraceRemapper' - $ref: '#/components/schemas/LogsSpanRemapper' - $ref: '#/components/schemas/LogsArrayProcessor' + - $ref: '#/components/schemas/LogsDecoderProcessor' LogsQueryCompute: description: Define computation for a log query. properties: diff --git a/examples/v1/logs-pipelines/CreateLogsPipeline_3336967838.java b/examples/v1/logs-pipelines/CreateLogsPipeline_3336967838.java new file mode 100644 index 00000000000..887eac75695 --- /dev/null +++ b/examples/v1/logs-pipelines/CreateLogsPipeline_3336967838.java @@ -0,0 +1,47 @@ +// Create a pipeline with Decoder Processor returns "OK" response + +import com.datadog.api.client.ApiClient; +import com.datadog.api.client.ApiException; +import com.datadog.api.client.v1.api.LogsPipelinesApi; +import com.datadog.api.client.v1.model.LogsDecoderProcessor; +import com.datadog.api.client.v1.model.LogsDecoderProcessorBinaryToTextEncoding; +import com.datadog.api.client.v1.model.LogsDecoderProcessorInputRepresentation; +import com.datadog.api.client.v1.model.LogsDecoderProcessorType; +import com.datadog.api.client.v1.model.LogsFilter; +import com.datadog.api.client.v1.model.LogsPipeline; +import com.datadog.api.client.v1.model.LogsProcessor; +import java.util.Collections; + +public class Example { + public static void main(String[] args) { + ApiClient defaultClient = ApiClient.getDefaultApiClient(); + LogsPipelinesApi apiInstance = new LogsPipelinesApi(defaultClient); + + LogsPipeline body = + new LogsPipeline() + .filter(new LogsFilter().query("source:python")) + .name("testDecoderProcessor") + .processors( + Collections.singletonList( + new LogsProcessor( + new LogsDecoderProcessor() + .type(LogsDecoderProcessorType.DECODER_PROCESSOR) + .isEnabled(true) + .name("test_decoder") + .source("encoded.field") + .target("decoded.field") + .binaryToTextEncoding(LogsDecoderProcessorBinaryToTextEncoding.BASE16) + .inputRepresentation(LogsDecoderProcessorInputRepresentation.UTF_8)))); + + try { + LogsPipeline result = apiInstance.createLogsPipeline(body); + System.out.println(result); + } catch (ApiException e) { + System.err.println("Exception when calling LogsPipelinesApi#createLogsPipeline"); + System.err.println("Status code: " + e.getCode()); + System.err.println("Reason: " + e.getResponseBody()); + System.err.println("Response headers: " + e.getResponseHeaders()); + e.printStackTrace(); + } + } +} diff --git a/src/main/java/com/datadog/api/client/v1/model/LogsDecoderProcessor.java b/src/main/java/com/datadog/api/client/v1/model/LogsDecoderProcessor.java new file mode 100644 index 00000000000..d7b18631ee0 --- /dev/null +++ b/src/main/java/com/datadog/api/client/v1/model/LogsDecoderProcessor.java @@ -0,0 +1,346 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v1.model; + +import com.fasterxml.jackson.annotation.JsonAnyGetter; +import com.fasterxml.jackson.annotation.JsonAnySetter; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonPropertyOrder; +import java.util.HashMap; +import java.util.Map; +import java.util.Objects; + +/** + * The decoder processor decodes any source attribute containing a base64/base16-encoded UTF-8/ASCII + * string back to its original value, storing the result in a target attribute. + */ +@JsonPropertyOrder({ + LogsDecoderProcessor.JSON_PROPERTY_BINARY_TO_TEXT_ENCODING, + LogsDecoderProcessor.JSON_PROPERTY_INPUT_REPRESENTATION, + LogsDecoderProcessor.JSON_PROPERTY_IS_ENABLED, + LogsDecoderProcessor.JSON_PROPERTY_NAME, + LogsDecoderProcessor.JSON_PROPERTY_SOURCE, + LogsDecoderProcessor.JSON_PROPERTY_TARGET, + LogsDecoderProcessor.JSON_PROPERTY_TYPE +}) +@jakarta.annotation.Generated( + value = "https://github.com/DataDog/datadog-api-client-java/blob/master/.generator") +public class LogsDecoderProcessor { + @JsonIgnore public boolean unparsed = false; + public static final String JSON_PROPERTY_BINARY_TO_TEXT_ENCODING = "binary_to_text_encoding"; + private LogsDecoderProcessorBinaryToTextEncoding binaryToTextEncoding; + + public static final String JSON_PROPERTY_INPUT_REPRESENTATION = "input_representation"; + private LogsDecoderProcessorInputRepresentation inputRepresentation; + + public static final String JSON_PROPERTY_IS_ENABLED = "is_enabled"; + private Boolean isEnabled = false; + + public static final String JSON_PROPERTY_NAME = "name"; + private String name; + + public static final String JSON_PROPERTY_SOURCE = "source"; + private String source; + + public static final String JSON_PROPERTY_TARGET = "target"; + private String target; + + public static final String JSON_PROPERTY_TYPE = "type"; + private LogsDecoderProcessorType type = LogsDecoderProcessorType.DECODER_PROCESSOR; + + public LogsDecoderProcessor() {} + + @JsonCreator + public LogsDecoderProcessor( + @JsonProperty(required = true, value = JSON_PROPERTY_BINARY_TO_TEXT_ENCODING) + LogsDecoderProcessorBinaryToTextEncoding binaryToTextEncoding, + @JsonProperty(required = true, value = JSON_PROPERTY_INPUT_REPRESENTATION) + LogsDecoderProcessorInputRepresentation inputRepresentation, + @JsonProperty(required = true, value = JSON_PROPERTY_SOURCE) String source, + @JsonProperty(required = true, value = JSON_PROPERTY_TARGET) String target, + @JsonProperty(required = true, value = JSON_PROPERTY_TYPE) LogsDecoderProcessorType type) { + this.binaryToTextEncoding = binaryToTextEncoding; + this.unparsed |= !binaryToTextEncoding.isValid(); + this.inputRepresentation = inputRepresentation; + this.unparsed |= !inputRepresentation.isValid(); + this.source = source; + this.target = target; + this.type = type; + this.unparsed |= !type.isValid(); + } + + public LogsDecoderProcessor binaryToTextEncoding( + LogsDecoderProcessorBinaryToTextEncoding binaryToTextEncoding) { + this.binaryToTextEncoding = binaryToTextEncoding; + this.unparsed |= !binaryToTextEncoding.isValid(); + return this; + } + + /** + * The encoding used to represent the binary data. + * + * @return binaryToTextEncoding + */ + @JsonProperty(JSON_PROPERTY_BINARY_TO_TEXT_ENCODING) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public LogsDecoderProcessorBinaryToTextEncoding getBinaryToTextEncoding() { + return binaryToTextEncoding; + } + + public void setBinaryToTextEncoding( + LogsDecoderProcessorBinaryToTextEncoding binaryToTextEncoding) { + if (!binaryToTextEncoding.isValid()) { + this.unparsed = true; + } + this.binaryToTextEncoding = binaryToTextEncoding; + } + + public LogsDecoderProcessor inputRepresentation( + LogsDecoderProcessorInputRepresentation inputRepresentation) { + this.inputRepresentation = inputRepresentation; + this.unparsed |= !inputRepresentation.isValid(); + return this; + } + + /** + * The original representation of input string. + * + * @return inputRepresentation + */ + @JsonProperty(JSON_PROPERTY_INPUT_REPRESENTATION) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public LogsDecoderProcessorInputRepresentation getInputRepresentation() { + return inputRepresentation; + } + + public void setInputRepresentation(LogsDecoderProcessorInputRepresentation inputRepresentation) { + if (!inputRepresentation.isValid()) { + this.unparsed = true; + } + this.inputRepresentation = inputRepresentation; + } + + public LogsDecoderProcessor isEnabled(Boolean isEnabled) { + this.isEnabled = isEnabled; + return this; + } + + /** + * Whether the processor is enabled. + * + * @return isEnabled + */ + @jakarta.annotation.Nullable + @JsonProperty(JSON_PROPERTY_IS_ENABLED) + @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) + public Boolean getIsEnabled() { + return isEnabled; + } + + public void setIsEnabled(Boolean isEnabled) { + this.isEnabled = isEnabled; + } + + public LogsDecoderProcessor name(String name) { + this.name = name; + return this; + } + + /** + * Name of the processor. + * + * @return name + */ + @jakarta.annotation.Nullable + @JsonProperty(JSON_PROPERTY_NAME) + @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + public LogsDecoderProcessor source(String source) { + this.source = source; + return this; + } + + /** + * Name of the log attribute with the encoded data. + * + * @return source + */ + @JsonProperty(JSON_PROPERTY_SOURCE) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public String getSource() { + return source; + } + + public void setSource(String source) { + this.source = source; + } + + public LogsDecoderProcessor target(String target) { + this.target = target; + return this; + } + + /** + * Name of the log attribute that contains the decoded data. + * + * @return target + */ + @JsonProperty(JSON_PROPERTY_TARGET) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public String getTarget() { + return target; + } + + public void setTarget(String target) { + this.target = target; + } + + public LogsDecoderProcessor type(LogsDecoderProcessorType type) { + this.type = type; + this.unparsed |= !type.isValid(); + return this; + } + + /** + * Type of logs decoder processor. + * + * @return type + */ + @JsonProperty(JSON_PROPERTY_TYPE) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public LogsDecoderProcessorType getType() { + return type; + } + + public void setType(LogsDecoderProcessorType type) { + if (!type.isValid()) { + this.unparsed = true; + } + this.type = type; + } + + /** + * A container for additional, undeclared properties. This is a holder for any undeclared + * properties as specified with the 'additionalProperties' keyword in the OAS document. + */ + private Map additionalProperties; + + /** + * Set the additional (undeclared) property with the specified name and value. If the property + * does not already exist, create it otherwise replace it. + * + * @param key The arbitrary key to set + * @param value The associated value + * @return LogsDecoderProcessor + */ + @JsonAnySetter + public LogsDecoderProcessor putAdditionalProperty(String key, Object value) { + if (this.additionalProperties == null) { + this.additionalProperties = new HashMap(); + } + this.additionalProperties.put(key, value); + return this; + } + + /** + * Return the additional (undeclared) property. + * + * @return The additional properties + */ + @JsonAnyGetter + public Map getAdditionalProperties() { + return additionalProperties; + } + + /** + * Return the additional (undeclared) property with the specified name. + * + * @param key The arbitrary key to get + * @return The specific additional property for the given key + */ + public Object getAdditionalProperty(String key) { + if (this.additionalProperties == null) { + return null; + } + return this.additionalProperties.get(key); + } + + /** Return true if this LogsDecoderProcessor object is equal to o. */ + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + LogsDecoderProcessor logsDecoderProcessor = (LogsDecoderProcessor) o; + return Objects.equals(this.binaryToTextEncoding, logsDecoderProcessor.binaryToTextEncoding) + && Objects.equals(this.inputRepresentation, logsDecoderProcessor.inputRepresentation) + && Objects.equals(this.isEnabled, logsDecoderProcessor.isEnabled) + && Objects.equals(this.name, logsDecoderProcessor.name) + && Objects.equals(this.source, logsDecoderProcessor.source) + && Objects.equals(this.target, logsDecoderProcessor.target) + && Objects.equals(this.type, logsDecoderProcessor.type) + && Objects.equals(this.additionalProperties, logsDecoderProcessor.additionalProperties); + } + + @Override + public int hashCode() { + return Objects.hash( + binaryToTextEncoding, + inputRepresentation, + isEnabled, + name, + source, + target, + type, + additionalProperties); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("class LogsDecoderProcessor {\n"); + sb.append(" binaryToTextEncoding: ") + .append(toIndentedString(binaryToTextEncoding)) + .append("\n"); + sb.append(" inputRepresentation: ") + .append(toIndentedString(inputRepresentation)) + .append("\n"); + sb.append(" isEnabled: ").append(toIndentedString(isEnabled)).append("\n"); + sb.append(" name: ").append(toIndentedString(name)).append("\n"); + sb.append(" source: ").append(toIndentedString(source)).append("\n"); + sb.append(" target: ").append(toIndentedString(target)).append("\n"); + sb.append(" type: ").append(toIndentedString(type)).append("\n"); + sb.append(" additionalProperties: ") + .append(toIndentedString(additionalProperties)) + .append("\n"); + sb.append('}'); + return sb.toString(); + } + + /** + * Convert the given object to string with each line indented by 4 spaces (except the first line). + */ + private String toIndentedString(Object o) { + if (o == null) { + return "null"; + } + return o.toString().replace("\n", "\n "); + } +} diff --git a/src/main/java/com/datadog/api/client/v1/model/LogsDecoderProcessorBinaryToTextEncoding.java b/src/main/java/com/datadog/api/client/v1/model/LogsDecoderProcessorBinaryToTextEncoding.java new file mode 100644 index 00000000000..564b24c0f68 --- /dev/null +++ b/src/main/java/com/datadog/api/client/v1/model/LogsDecoderProcessorBinaryToTextEncoding.java @@ -0,0 +1,65 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v1.model; + +import com.datadog.api.client.ModelEnum; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import com.fasterxml.jackson.databind.ser.std.StdSerializer; +import java.io.IOException; +import java.util.Arrays; +import java.util.HashSet; +import java.util.Set; + +/** The encoding used to represent the binary data. */ +@JsonSerialize( + using = + LogsDecoderProcessorBinaryToTextEncoding.LogsDecoderProcessorBinaryToTextEncodingSerializer + .class) +public class LogsDecoderProcessorBinaryToTextEncoding extends ModelEnum { + + private static final Set allowedValues = + new HashSet(Arrays.asList("base64", "base16")); + + public static final LogsDecoderProcessorBinaryToTextEncoding BASE64 = + new LogsDecoderProcessorBinaryToTextEncoding("base64"); + public static final LogsDecoderProcessorBinaryToTextEncoding BASE16 = + new LogsDecoderProcessorBinaryToTextEncoding("base16"); + + LogsDecoderProcessorBinaryToTextEncoding(String value) { + super(value, allowedValues); + } + + public static class LogsDecoderProcessorBinaryToTextEncodingSerializer + extends StdSerializer { + public LogsDecoderProcessorBinaryToTextEncodingSerializer( + Class t) { + super(t); + } + + public LogsDecoderProcessorBinaryToTextEncodingSerializer() { + this(null); + } + + @Override + public void serialize( + LogsDecoderProcessorBinaryToTextEncoding value, + JsonGenerator jgen, + SerializerProvider provider) + throws IOException, JsonProcessingException { + jgen.writeObject(value.value); + } + } + + @JsonCreator + public static LogsDecoderProcessorBinaryToTextEncoding fromValue(String value) { + return new LogsDecoderProcessorBinaryToTextEncoding(value); + } +} diff --git a/src/main/java/com/datadog/api/client/v1/model/LogsDecoderProcessorInputRepresentation.java b/src/main/java/com/datadog/api/client/v1/model/LogsDecoderProcessorInputRepresentation.java new file mode 100644 index 00000000000..73688a6f815 --- /dev/null +++ b/src/main/java/com/datadog/api/client/v1/model/LogsDecoderProcessorInputRepresentation.java @@ -0,0 +1,65 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v1.model; + +import com.datadog.api.client.ModelEnum; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import com.fasterxml.jackson.databind.ser.std.StdSerializer; +import java.io.IOException; +import java.util.Arrays; +import java.util.HashSet; +import java.util.Set; + +/** The original representation of input string. */ +@JsonSerialize( + using = + LogsDecoderProcessorInputRepresentation.LogsDecoderProcessorInputRepresentationSerializer + .class) +public class LogsDecoderProcessorInputRepresentation extends ModelEnum { + + private static final Set allowedValues = + new HashSet(Arrays.asList("utf_8", "integer")); + + public static final LogsDecoderProcessorInputRepresentation UTF_8 = + new LogsDecoderProcessorInputRepresentation("utf_8"); + public static final LogsDecoderProcessorInputRepresentation INTEGER = + new LogsDecoderProcessorInputRepresentation("integer"); + + LogsDecoderProcessorInputRepresentation(String value) { + super(value, allowedValues); + } + + public static class LogsDecoderProcessorInputRepresentationSerializer + extends StdSerializer { + public LogsDecoderProcessorInputRepresentationSerializer( + Class t) { + super(t); + } + + public LogsDecoderProcessorInputRepresentationSerializer() { + this(null); + } + + @Override + public void serialize( + LogsDecoderProcessorInputRepresentation value, + JsonGenerator jgen, + SerializerProvider provider) + throws IOException, JsonProcessingException { + jgen.writeObject(value.value); + } + } + + @JsonCreator + public static LogsDecoderProcessorInputRepresentation fromValue(String value) { + return new LogsDecoderProcessorInputRepresentation(value); + } +} diff --git a/src/main/java/com/datadog/api/client/v1/model/LogsDecoderProcessorType.java b/src/main/java/com/datadog/api/client/v1/model/LogsDecoderProcessorType.java new file mode 100644 index 00000000000..f3a19abb745 --- /dev/null +++ b/src/main/java/com/datadog/api/client/v1/model/LogsDecoderProcessorType.java @@ -0,0 +1,57 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v1.model; + +import com.datadog.api.client.ModelEnum; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import com.fasterxml.jackson.databind.ser.std.StdSerializer; +import java.io.IOException; +import java.util.Arrays; +import java.util.HashSet; +import java.util.Set; + +/** Type of logs decoder processor. */ +@JsonSerialize(using = LogsDecoderProcessorType.LogsDecoderProcessorTypeSerializer.class) +public class LogsDecoderProcessorType extends ModelEnum { + + private static final Set allowedValues = + new HashSet(Arrays.asList("decoder-processor")); + + public static final LogsDecoderProcessorType DECODER_PROCESSOR = + new LogsDecoderProcessorType("decoder-processor"); + + LogsDecoderProcessorType(String value) { + super(value, allowedValues); + } + + public static class LogsDecoderProcessorTypeSerializer + extends StdSerializer { + public LogsDecoderProcessorTypeSerializer(Class t) { + super(t); + } + + public LogsDecoderProcessorTypeSerializer() { + this(null); + } + + @Override + public void serialize( + LogsDecoderProcessorType value, JsonGenerator jgen, SerializerProvider provider) + throws IOException, JsonProcessingException { + jgen.writeObject(value.value); + } + } + + @JsonCreator + public static LogsDecoderProcessorType fromValue(String value) { + return new LogsDecoderProcessorType(value); + } +} diff --git a/src/main/java/com/datadog/api/client/v1/model/LogsProcessor.java b/src/main/java/com/datadog/api/client/v1/model/LogsProcessor.java index 601cc8d78cc..00bf36e8d2e 100644 --- a/src/main/java/com/datadog/api/client/v1/model/LogsProcessor.java +++ b/src/main/java/com/datadog/api/client/v1/model/LogsProcessor.java @@ -876,6 +876,51 @@ public LogsProcessor deserialize(JsonParser jp, DeserializationContext ctxt) log.log(Level.FINER, "Input data does not match schema 'LogsArrayProcessor'", e); } + // deserialize LogsDecoderProcessor + try { + boolean attemptParsing = true; + // ensure that we respect type coercion as set on the client ObjectMapper + if (LogsDecoderProcessor.class.equals(Integer.class) + || LogsDecoderProcessor.class.equals(Long.class) + || LogsDecoderProcessor.class.equals(Float.class) + || LogsDecoderProcessor.class.equals(Double.class) + || LogsDecoderProcessor.class.equals(Boolean.class) + || LogsDecoderProcessor.class.equals(String.class)) { + attemptParsing = typeCoercion; + if (!attemptParsing) { + attemptParsing |= + ((LogsDecoderProcessor.class.equals(Integer.class) + || LogsDecoderProcessor.class.equals(Long.class)) + && token == JsonToken.VALUE_NUMBER_INT); + attemptParsing |= + ((LogsDecoderProcessor.class.equals(Float.class) + || LogsDecoderProcessor.class.equals(Double.class)) + && (token == JsonToken.VALUE_NUMBER_FLOAT + || token == JsonToken.VALUE_NUMBER_INT)); + attemptParsing |= + (LogsDecoderProcessor.class.equals(Boolean.class) + && (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE)); + attemptParsing |= + (LogsDecoderProcessor.class.equals(String.class) + && token == JsonToken.VALUE_STRING); + } + } + if (attemptParsing) { + tmp = tree.traverse(jp.getCodec()).readValueAs(LogsDecoderProcessor.class); + // TODO: there is no validation against JSON schema constraints + // (min, max, enum, pattern...), this does not perform a strict JSON + // validation, which means the 'match' count may be higher than it should be. + if (!((LogsDecoderProcessor) tmp).unparsed) { + deserialized = tmp; + match++; + } + log.log(Level.FINER, "Input data matches schema 'LogsDecoderProcessor'"); + } + } catch (Exception e) { + // deserialization failed, continue + log.log(Level.FINER, "Input data does not match schema 'LogsDecoderProcessor'", e); + } + LogsProcessor ret = new LogsProcessor(); if (match == 1) { ret.setActualInstance(deserialized); @@ -994,6 +1039,11 @@ public LogsProcessor(LogsArrayProcessor o) { setActualInstance(o); } + public LogsProcessor(LogsDecoderProcessor o) { + super("oneOf", Boolean.FALSE); + setActualInstance(o); + } + static { schemas.put("LogsGrokParser", new GenericType() {}); schemas.put("LogsDateRemapper", new GenericType() {}); @@ -1015,6 +1065,7 @@ public LogsProcessor(LogsArrayProcessor o) { schemas.put("LogsTraceRemapper", new GenericType() {}); schemas.put("LogsSpanRemapper", new GenericType() {}); schemas.put("LogsArrayProcessor", new GenericType() {}); + schemas.put("LogsDecoderProcessor", new GenericType() {}); JSON.registerDescendants(LogsProcessor.class, Collections.unmodifiableMap(schemas)); } @@ -1029,7 +1080,8 @@ public Map getSchemas() { * LogsServiceRemapper, LogsMessageRemapper, LogsAttributeRemapper, LogsURLParser, * LogsUserAgentParser, LogsCategoryProcessor, LogsArithmeticProcessor, * LogsStringBuilderProcessor, LogsPipelineProcessor, LogsGeoIPParser, LogsLookupProcessor, - * ReferenceTableLogsLookupProcessor, LogsTraceRemapper, LogsSpanRemapper, LogsArrayProcessor + * ReferenceTableLogsLookupProcessor, LogsTraceRemapper, LogsSpanRemapper, LogsArrayProcessor, + * LogsDecoderProcessor * *

It could be an instance of the 'oneOf' schemas. The oneOf child schemas may themselves be a * composed schema (allOf, anyOf, oneOf). @@ -1109,6 +1161,10 @@ public void setActualInstance(Object instance) { super.setActualInstance(instance); return; } + if (JSON.isInstanceOf(LogsDecoderProcessor.class, instance, new HashSet>())) { + super.setActualInstance(instance); + return; + } if (JSON.isInstanceOf(UnparsedObject.class, instance, new HashSet>())) { super.setActualInstance(instance); @@ -1120,7 +1176,7 @@ public void setActualInstance(Object instance) { + " LogsUserAgentParser, LogsCategoryProcessor, LogsArithmeticProcessor," + " LogsStringBuilderProcessor, LogsPipelineProcessor, LogsGeoIPParser," + " LogsLookupProcessor, ReferenceTableLogsLookupProcessor, LogsTraceRemapper," - + " LogsSpanRemapper, LogsArrayProcessor"); + + " LogsSpanRemapper, LogsArrayProcessor, LogsDecoderProcessor"); } /** @@ -1128,13 +1184,15 @@ public void setActualInstance(Object instance) { * LogsStatusRemapper, LogsServiceRemapper, LogsMessageRemapper, LogsAttributeRemapper, * LogsURLParser, LogsUserAgentParser, LogsCategoryProcessor, LogsArithmeticProcessor, * LogsStringBuilderProcessor, LogsPipelineProcessor, LogsGeoIPParser, LogsLookupProcessor, - * ReferenceTableLogsLookupProcessor, LogsTraceRemapper, LogsSpanRemapper, LogsArrayProcessor + * ReferenceTableLogsLookupProcessor, LogsTraceRemapper, LogsSpanRemapper, LogsArrayProcessor, + * LogsDecoderProcessor * * @return The actual instance (LogsGrokParser, LogsDateRemapper, LogsStatusRemapper, * LogsServiceRemapper, LogsMessageRemapper, LogsAttributeRemapper, LogsURLParser, * LogsUserAgentParser, LogsCategoryProcessor, LogsArithmeticProcessor, * LogsStringBuilderProcessor, LogsPipelineProcessor, LogsGeoIPParser, LogsLookupProcessor, - * ReferenceTableLogsLookupProcessor, LogsTraceRemapper, LogsSpanRemapper, LogsArrayProcessor) + * ReferenceTableLogsLookupProcessor, LogsTraceRemapper, LogsSpanRemapper, LogsArrayProcessor, + * LogsDecoderProcessor) */ @Override public Object getActualInstance() { @@ -1339,4 +1397,15 @@ public LogsSpanRemapper getLogsSpanRemapper() throws ClassCastException { public LogsArrayProcessor getLogsArrayProcessor() throws ClassCastException { return (LogsArrayProcessor) super.getActualInstance(); } + + /** + * Get the actual instance of `LogsDecoderProcessor`. If the actual instance is not + * `LogsDecoderProcessor`, the ClassCastException will be thrown. + * + * @return The actual instance of `LogsDecoderProcessor` + * @throws ClassCastException if the instance is not `LogsDecoderProcessor` + */ + public LogsDecoderProcessor getLogsDecoderProcessor() throws ClassCastException { + return (LogsDecoderProcessor) super.getActualInstance(); + } } diff --git a/src/test/resources/cassettes/features/v1/Create_a_pipeline_with_Decoder_Processor_returns_OK_response.freeze b/src/test/resources/cassettes/features/v1/Create_a_pipeline_with_Decoder_Processor_returns_OK_response.freeze new file mode 100644 index 00000000000..0e0841ea4b8 --- /dev/null +++ b/src/test/resources/cassettes/features/v1/Create_a_pipeline_with_Decoder_Processor_returns_OK_response.freeze @@ -0,0 +1 @@ +2025-07-22T13:27:59.975Z \ No newline at end of file diff --git a/src/test/resources/cassettes/features/v1/Create_a_pipeline_with_Decoder_Processor_returns_OK_response.json b/src/test/resources/cassettes/features/v1/Create_a_pipeline_with_Decoder_Processor_returns_OK_response.json new file mode 100644 index 00000000000..4e33482adff --- /dev/null +++ b/src/test/resources/cassettes/features/v1/Create_a_pipeline_with_Decoder_Processor_returns_OK_response.json @@ -0,0 +1,58 @@ +[ + { + "httpRequest": { + "body": { + "type": "JSON", + "json": "{\"filter\":{\"query\":\"source:python\"},\"name\":\"testDecoderProcessor\",\"processors\":[{\"binary_to_text_encoding\":\"base16\",\"input_representation\":\"utf_8\",\"is_enabled\":true,\"name\":\"test_decoder\",\"source\":\"encoded.field\",\"target\":\"decoded.field\",\"type\":\"decoder-processor\"}],\"tags\":[]}" + }, + "headers": {}, + "method": "POST", + "path": "/api/v1/logs/config/pipelines", + "keepAlive": false, + "secure": true + }, + "httpResponse": { + "body": "{\"id\":\"BEg5CcvmSfyIGoMi9PWyTQ\",\"type\":\"pipeline\",\"name\":\"testDecoderProcessor\",\"is_enabled\":false,\"is_read_only\":false,\"filter\":{\"query\":\"source:python\"},\"processors\":[{\"name\":\"test_decoder\",\"is_enabled\":true,\"source\":\"encoded.field\",\"target\":\"decoded.field\",\"binary_to_text_encoding\":\"base16\",\"input_representation\":\"utf_8\",\"type\":\"decoder-processor\"}],\"tags\":[]}\n", + "headers": { + "Content-Type": [ + "application/json" + ] + }, + "statusCode": 200, + "reasonPhrase": "OK" + }, + "times": { + "remainingTimes": 1 + }, + "timeToLive": { + "unlimited": true + }, + "id": "728a82ed-3e81-2a07-5bff-989367bf7d5f" + }, + { + "httpRequest": { + "headers": {}, + "method": "DELETE", + "path": "/api/v1/logs/config/pipelines/BEg5CcvmSfyIGoMi9PWyTQ", + "keepAlive": false, + "secure": true + }, + "httpResponse": { + "body": "{}\n", + "headers": { + "Content-Type": [ + "application/json" + ] + }, + "statusCode": 200, + "reasonPhrase": "OK" + }, + "times": { + "remainingTimes": 1 + }, + "timeToLive": { + "unlimited": true + }, + "id": "eb454a5f-3982-ee2a-befb-92c5980ba3f3" + } +] \ No newline at end of file diff --git a/src/test/resources/com/datadog/api/client/v1/api/logs_pipelines.feature b/src/test/resources/com/datadog/api/client/v1/api/logs_pipelines.feature index bf817fda07c..66305bbbaf2 100644 --- a/src/test/resources/com/datadog/api/client/v1/api/logs_pipelines.feature +++ b/src/test/resources/com/datadog/api/client/v1/api/logs_pipelines.feature @@ -70,6 +70,13 @@ Feature: Logs Pipelines When the request is sent Then the response status is 200 OK + @team:DataDog/event-platform-experience + Scenario: Create a pipeline with Decoder Processor returns "OK" response + Given new "CreateLogsPipeline" request + And body with value {"filter": {"query": "source:python"}, "name": "testDecoderProcessor", "processors": [{"type": "decoder-processor", "is_enabled": true, "name": "test_decoder", "source": "encoded.field", "target": "decoded.field", "binary_to_text_encoding": "base16", "input_representation": "utf_8"}], "tags": []} + When the request is sent + Then the response status is 200 OK + @team:DataDog/event-platform-experience Scenario: Create a pipeline with Span Id Remapper returns "OK" response Given new "CreateLogsPipeline" request