diff --git a/.generator/schemas/v2/openapi.yaml b/.generator/schemas/v2/openapi.yaml index 52c13bf6e5..072441e139 100644 --- a/.generator/schemas/v2/openapi.yaml +++ b/.generator/schemas/v2/openapi.yaml @@ -6677,8 +6677,11 @@ components: x-enum-varnames: - AZURE_SCAN_OPTIONS AzureStorageDestination: - description: The `azure_storage` destination forwards logs to an Azure Blob + description: 'The `azure_storage` destination forwards logs to an Azure Blob Storage container. + + + **Supported pipeline types:** logs' properties: blob_prefix: description: Optional prefix for blobs written to the container. @@ -6709,6 +6712,8 @@ components: - inputs - container_name type: object + x-pipeline-types: + - logs AzureStorageDestinationType: default: azure_storage description: The destination type. The value should always be `azure_storage`. @@ -33628,8 +33633,11 @@ components: - query type: object MicrosoftSentinelDestination: - description: The `microsoft_sentinel` destination forwards logs to Microsoft + description: 'The `microsoft_sentinel` destination forwards logs to Microsoft Sentinel. + + + **Supported pipeline types:** logs' properties: client_id: description: Azure AD client ID used for authentication. @@ -33670,6 +33678,8 @@ components: - dcr_immutable_id - table type: object + x-pipeline-types: + - logs MicrosoftSentinelDestinationType: default: microsoft_sentinel description: The destination type. The value should always be `microsoft_sentinel`. @@ -35374,13 +35384,16 @@ components: - data type: object ObservabilityPipelineAddEnvVarsProcessor: - description: The `add_env_vars` processor adds environment variable values to - log events. + description: 'The `add_env_vars` processor adds environment variable values + to log events. + + + **Supported pipeline types:** logs' properties: display_name: $ref: '#/components/schemas/ObservabilityPipelineComponentDisplayName' enabled: - description: Whether this processor is enabled. + description: Indicates whether the processor is enabled. example: true type: boolean id: @@ -35407,6 +35420,8 @@ components: - variables - enabled type: object + x-pipeline-types: + - logs ObservabilityPipelineAddEnvVarsProcessorType: default: add_env_vars description: The processor type. The value should always be `add_env_vars`. @@ -35432,12 +35447,15 @@ components: - name type: object ObservabilityPipelineAddFieldsProcessor: - description: The `add_fields` processor adds static key-value fields to logs. + description: 'The `add_fields` processor adds static key-value fields to logs. + + + **Supported pipeline types:** logs' properties: display_name: $ref: '#/components/schemas/ObservabilityPipelineComponentDisplayName' enabled: - description: Whether this processor is enabled. + description: Indicates whether the processor is enabled. example: true type: boolean fields: @@ -35447,8 +35465,8 @@ components: $ref: '#/components/schemas/ObservabilityPipelineFieldValue' type: array id: - description: The unique identifier for this component. Used to reference - this component in other parts of the pipeline (for example, as the `input` + description: The unique identifier for this component. Used in other parts + of the pipeline to reference this component (for example, as the `input` to downstream components). example: add-fields-processor type: string @@ -35466,6 +35484,8 @@ components: - fields - enabled type: object + x-pipeline-types: + - logs ObservabilityPipelineAddFieldsProcessorType: default: add_fields description: The processor type. The value should always be `add_fields`. @@ -35475,15 +35495,60 @@ components: type: string x-enum-varnames: - ADD_FIELDS + ObservabilityPipelineAddHostnameProcessor: + description: 'The `add_hostname` processor adds the hostname to log events. + + + **Supported pipeline types:** logs' + properties: + display_name: + $ref: '#/components/schemas/ObservabilityPipelineComponentDisplayName' + enabled: + description: Indicates whether the processor is enabled. + example: true + type: boolean + id: + description: The unique identifier for this component. Used in other parts + of the pipeline to reference this component (for example, as the `input` + to downstream components). + example: add-hostname-processor + type: string + include: + description: A Datadog search query used to determine which logs this processor + targets. + example: service:my-service + type: string + type: + $ref: '#/components/schemas/ObservabilityPipelineAddHostnameProcessorType' + required: + - id + - type + - include + - enabled + type: object + x-pipeline-types: + - logs + ObservabilityPipelineAddHostnameProcessorType: + default: add_hostname + description: The processor type. The value should always be `add_hostname`. + enum: + - add_hostname + example: add_hostname + type: string + x-enum-varnames: + - ADD_HOSTNAME ObservabilityPipelineAmazonDataFirehoseSource: - description: The `amazon_data_firehose` source ingests logs from AWS Data Firehose. + description: 'The `amazon_data_firehose` source ingests logs from AWS Data Firehose. + + + **Supported pipeline types:** logs' properties: auth: $ref: '#/components/schemas/ObservabilityPipelineAwsAuth' id: - description: The unique identifier for this component. Used to reference - this component in other parts of the pipeline (e.g., as input to downstream - components). + description: The unique identifier for this component. Used in other parts + of the pipeline to reference this component (for example, as the `input` + to downstream components). example: amazon-firehose-source type: string tls: @@ -35494,6 +35559,8 @@ components: - id - type type: object + x-pipeline-types: + - logs ObservabilityPipelineAmazonDataFirehoseSourceType: default: amazon_data_firehose description: The source type. The value should always be `amazon_data_firehose`. @@ -35504,7 +35571,10 @@ components: x-enum-varnames: - AMAZON_DATA_FIREHOSE ObservabilityPipelineAmazonOpenSearchDestination: - description: The `amazon_opensearch` destination writes logs to Amazon OpenSearch. + description: 'The `amazon_opensearch` destination writes logs to Amazon OpenSearch. + + + **Supported pipeline types:** logs' properties: auth: $ref: '#/components/schemas/ObservabilityPipelineAmazonOpenSearchDestinationAuth' @@ -35532,6 +35602,8 @@ components: - inputs - auth type: object + x-pipeline-types: + - logs ObservabilityPipelineAmazonOpenSearchDestinationAuth: description: 'Authentication settings for the Amazon OpenSearch destination. @@ -35575,8 +35647,11 @@ components: x-enum-varnames: - AMAZON_OPENSEARCH ObservabilityPipelineAmazonS3Destination: - description: The `amazon_s3` destination sends your logs in Datadog-rehydratable + description: 'The `amazon_s3` destination sends your logs in Datadog-rehydratable format to an Amazon S3 bucket for archiving. + + + **Supported pipeline types:** logs' properties: auth: $ref: '#/components/schemas/ObservabilityPipelineAwsAuth' @@ -35617,6 +35692,8 @@ components: - region - storage_class type: object + x-pipeline-types: + - logs ObservabilityPipelineAmazonS3DestinationStorageClass: description: S3 storage class. enum: @@ -35653,14 +35730,17 @@ components: ObservabilityPipelineAmazonS3Source: description: 'The `amazon_s3` source ingests logs from an Amazon S3 bucket. - It supports AWS authentication and TLS encryption.' + It supports AWS authentication and TLS encryption. + + + **Supported pipeline types:** logs' properties: auth: $ref: '#/components/schemas/ObservabilityPipelineAwsAuth' id: - description: The unique identifier for this component. Used to reference - this component in other parts of the pipeline (e.g., as input to downstream - components). + description: The unique identifier for this component. Used in other parts + of the pipeline to reference this component (for example, as the `input` + to downstream components). example: aws-s3-source type: string region: @@ -35676,6 +35756,8 @@ components: - type - region type: object + x-pipeline-types: + - logs ObservabilityPipelineAmazonS3SourceType: default: amazon_s3 description: The source type. Always `amazon_s3`. @@ -35686,8 +35768,11 @@ components: x-enum-varnames: - AMAZON_S3 ObservabilityPipelineAmazonSecurityLakeDestination: - description: The `amazon_security_lake` destination sends your logs to Amazon + description: 'The `amazon_security_lake` destination sends your logs to Amazon Security Lake. + + + **Supported pipeline types:** logs' properties: auth: $ref: '#/components/schemas/ObservabilityPipelineAwsAuth' @@ -35727,6 +35812,8 @@ components: - region - custom_source_name type: object + x-pipeline-types: + - logs ObservabilityPipelineAmazonSecurityLakeDestinationType: default: amazon_security_lake description: The destination type. Always `amazon_security_lake`. @@ -35752,6 +35839,42 @@ components: role session. type: string type: object + ObservabilityPipelineCloudPremDestination: + description: 'The `cloud_prem` destination sends logs to Datadog CloudPrem. + + + **Supported pipeline types:** logs' + properties: + id: + description: The unique identifier for this component. + example: cloud-prem-destination + type: string + inputs: + description: A list of component IDs whose output is used as the `input` + for this component. + example: + - filter-processor + items: + type: string + type: array + type: + $ref: '#/components/schemas/ObservabilityPipelineCloudPremDestinationType' + required: + - id + - type + - inputs + type: object + x-pipeline-types: + - logs + ObservabilityPipelineCloudPremDestinationType: + default: cloud_prem + description: The destination type. The value should always be `cloud_prem`. + enum: + - cloud_prem + example: cloud_prem + type: string + x-enum-varnames: + - CLOUD_PREM ObservabilityPipelineComponentDisplayName: description: The display name for a component. example: my component @@ -35765,12 +35888,14 @@ components: example: - id: datadog-logs-destination inputs: - - filter-processor + - my-processor-group type: datadog_logs items: $ref: '#/components/schemas/ObservabilityPipelineConfigDestinationItem' type: array - processors: + pipeline_type: + $ref: '#/components/schemas/ObservabilityPipelineConfigPipelineType' + processor_groups: description: A list of processor groups that transform or enrich log data. example: - enabled: true @@ -35791,6 +35916,17 @@ components: items: $ref: '#/components/schemas/ObservabilityPipelineConfigProcessorGroup' type: array + processors: + deprecated: true + description: 'A list of processor groups that transform or enrich log data. + + + **Deprecated:** This field is deprecated, you should now use the processor_groups + field.' + example: [] + items: + $ref: '#/components/schemas/ObservabilityPipelineConfigProcessorGroup' + type: array sources: description: A list of configured data sources for the pipeline. example: @@ -35806,25 +35942,40 @@ components: ObservabilityPipelineConfigDestinationItem: description: A destination for the pipeline. oneOf: - - $ref: '#/components/schemas/ObservabilityPipelineDatadogLogsDestination' + - $ref: '#/components/schemas/ObservabilityPipelineHttpClientDestination' + - $ref: '#/components/schemas/ObservabilityPipelineAmazonOpenSearchDestination' - $ref: '#/components/schemas/ObservabilityPipelineAmazonS3Destination' - - $ref: '#/components/schemas/ObservabilityPipelineGoogleCloudStorageDestination' - - $ref: '#/components/schemas/ObservabilityPipelineSplunkHecDestination' - - $ref: '#/components/schemas/ObservabilityPipelineSumoLogicDestination' - - $ref: '#/components/schemas/ObservabilityPipelineElasticsearchDestination' - - $ref: '#/components/schemas/ObservabilityPipelineRsyslogDestination' - - $ref: '#/components/schemas/ObservabilityPipelineSyslogNgDestination' + - $ref: '#/components/schemas/ObservabilityPipelineAmazonSecurityLakeDestination' - $ref: '#/components/schemas/AzureStorageDestination' - - $ref: '#/components/schemas/MicrosoftSentinelDestination' + - $ref: '#/components/schemas/ObservabilityPipelineCloudPremDestination' + - $ref: '#/components/schemas/ObservabilityPipelineCrowdStrikeNextGenSiemDestination' + - $ref: '#/components/schemas/ObservabilityPipelineDatadogLogsDestination' + - $ref: '#/components/schemas/ObservabilityPipelineElasticsearchDestination' - $ref: '#/components/schemas/ObservabilityPipelineGoogleChronicleDestination' + - $ref: '#/components/schemas/ObservabilityPipelineGoogleCloudStorageDestination' + - $ref: '#/components/schemas/ObservabilityPipelineGooglePubSubDestination' + - $ref: '#/components/schemas/ObservabilityPipelineKafkaDestination' + - $ref: '#/components/schemas/MicrosoftSentinelDestination' - $ref: '#/components/schemas/ObservabilityPipelineNewRelicDestination' - - $ref: '#/components/schemas/ObservabilityPipelineSentinelOneDestination' - $ref: '#/components/schemas/ObservabilityPipelineOpenSearchDestination' - - $ref: '#/components/schemas/ObservabilityPipelineAmazonOpenSearchDestination' + - $ref: '#/components/schemas/ObservabilityPipelineRsyslogDestination' + - $ref: '#/components/schemas/ObservabilityPipelineSentinelOneDestination' - $ref: '#/components/schemas/ObservabilityPipelineSocketDestination' - - $ref: '#/components/schemas/ObservabilityPipelineAmazonSecurityLakeDestination' - - $ref: '#/components/schemas/ObservabilityPipelineCrowdStrikeNextGenSiemDestination' - - $ref: '#/components/schemas/ObservabilityPipelineGooglePubSubDestination' + - $ref: '#/components/schemas/ObservabilityPipelineSplunkHecDestination' + - $ref: '#/components/schemas/ObservabilityPipelineSumoLogicDestination' + - $ref: '#/components/schemas/ObservabilityPipelineSyslogNgDestination' + - $ref: '#/components/schemas/ObservabilityPipelineDatadogMetricsDestination' + ObservabilityPipelineConfigPipelineType: + default: logs + description: The type of data being ingested. Defaults to `logs` if not specified. + enum: + - logs + - metrics + example: logs + type: string + x-enum-varnames: + - LOGS + - METRICS ObservabilityPipelineConfigProcessorGroup: description: A group of processors. example: @@ -35898,45 +36049,53 @@ components: description: A processor for the pipeline. oneOf: - $ref: '#/components/schemas/ObservabilityPipelineFilterProcessor' + - $ref: '#/components/schemas/ObservabilityPipelineAddEnvVarsProcessor' + - $ref: '#/components/schemas/ObservabilityPipelineAddFieldsProcessor' + - $ref: '#/components/schemas/ObservabilityPipelineAddHostnameProcessor' + - $ref: '#/components/schemas/ObservabilityPipelineCustomProcessor' + - $ref: '#/components/schemas/ObservabilityPipelineDatadogTagsProcessor' + - $ref: '#/components/schemas/ObservabilityPipelineDedupeProcessor' + - $ref: '#/components/schemas/ObservabilityPipelineEnrichmentTableProcessor' + - $ref: '#/components/schemas/ObservabilityPipelineGenerateMetricsProcessor' + - $ref: '#/components/schemas/ObservabilityPipelineOcsfMapperProcessor' + - $ref: '#/components/schemas/ObservabilityPipelineParseGrokProcessor' - $ref: '#/components/schemas/ObservabilityPipelineParseJSONProcessor' + - $ref: '#/components/schemas/ObservabilityPipelineParseXMLProcessor' - $ref: '#/components/schemas/ObservabilityPipelineQuotaProcessor' - - $ref: '#/components/schemas/ObservabilityPipelineAddFieldsProcessor' + - $ref: '#/components/schemas/ObservabilityPipelineReduceProcessor' - $ref: '#/components/schemas/ObservabilityPipelineRemoveFieldsProcessor' - $ref: '#/components/schemas/ObservabilityPipelineRenameFieldsProcessor' - - $ref: '#/components/schemas/ObservabilityPipelineGenerateMetricsProcessor' - $ref: '#/components/schemas/ObservabilityPipelineSampleProcessor' - - $ref: '#/components/schemas/ObservabilityPipelineParseGrokProcessor' - $ref: '#/components/schemas/ObservabilityPipelineSensitiveDataScannerProcessor' - - $ref: '#/components/schemas/ObservabilityPipelineOcsfMapperProcessor' - - $ref: '#/components/schemas/ObservabilityPipelineAddEnvVarsProcessor' - - $ref: '#/components/schemas/ObservabilityPipelineDedupeProcessor' - - $ref: '#/components/schemas/ObservabilityPipelineEnrichmentTableProcessor' - - $ref: '#/components/schemas/ObservabilityPipelineReduceProcessor' + - $ref: '#/components/schemas/ObservabilityPipelineSplitArrayProcessor' - $ref: '#/components/schemas/ObservabilityPipelineThrottleProcessor' - - $ref: '#/components/schemas/ObservabilityPipelineCustomProcessor' - - $ref: '#/components/schemas/ObservabilityPipelineDatadogTagsProcessor' + - $ref: '#/components/schemas/ObservabilityPipelineMetricTagsProcessor' ObservabilityPipelineConfigSourceItem: description: A data source for the pipeline. oneOf: - - $ref: '#/components/schemas/ObservabilityPipelineKafkaSource' - $ref: '#/components/schemas/ObservabilityPipelineDatadogAgentSource' - - $ref: '#/components/schemas/ObservabilityPipelineSplunkTcpSource' - - $ref: '#/components/schemas/ObservabilityPipelineSplunkHecSource' + - $ref: '#/components/schemas/ObservabilityPipelineAmazonDataFirehoseSource' - $ref: '#/components/schemas/ObservabilityPipelineAmazonS3Source' - - $ref: '#/components/schemas/ObservabilityPipelineFluentdSource' - $ref: '#/components/schemas/ObservabilityPipelineFluentBitSource' - - $ref: '#/components/schemas/ObservabilityPipelineHttpServerSource' - - $ref: '#/components/schemas/ObservabilityPipelineSumoLogicSource' - - $ref: '#/components/schemas/ObservabilityPipelineRsyslogSource' - - $ref: '#/components/schemas/ObservabilityPipelineSyslogNgSource' - - $ref: '#/components/schemas/ObservabilityPipelineAmazonDataFirehoseSource' + - $ref: '#/components/schemas/ObservabilityPipelineFluentdSource' - $ref: '#/components/schemas/ObservabilityPipelineGooglePubSubSource' - $ref: '#/components/schemas/ObservabilityPipelineHttpClientSource' + - $ref: '#/components/schemas/ObservabilityPipelineHttpServerSource' + - $ref: '#/components/schemas/ObservabilityPipelineKafkaSource' - $ref: '#/components/schemas/ObservabilityPipelineLogstashSource' + - $ref: '#/components/schemas/ObservabilityPipelineRsyslogSource' - $ref: '#/components/schemas/ObservabilityPipelineSocketSource' + - $ref: '#/components/schemas/ObservabilityPipelineSplunkHecSource' + - $ref: '#/components/schemas/ObservabilityPipelineSplunkTcpSource' + - $ref: '#/components/schemas/ObservabilityPipelineSumoLogicSource' + - $ref: '#/components/schemas/ObservabilityPipelineSyslogNgSource' + - $ref: '#/components/schemas/ObservabilityPipelineOpentelemetrySource' ObservabilityPipelineCrowdStrikeNextGenSiemDestination: - description: The `crowdstrike_next_gen_siem` destination forwards logs to CrowdStrike + description: 'The `crowdstrike_next_gen_siem` destination forwards logs to CrowdStrike Next Gen SIEM. + + + **Supported pipeline types:** logs' properties: compression: $ref: '#/components/schemas/ObservabilityPipelineCrowdStrikeNextGenSiemDestinationCompression' @@ -35964,6 +36123,8 @@ components: - inputs - encoding type: object + x-pipeline-types: + - logs ObservabilityPipelineCrowdStrikeNextGenSiemDestinationCompression: description: Compression configuration for log events. properties: @@ -36007,14 +36168,17 @@ components: x-enum-varnames: - CROWDSTRIKE_NEXT_GEN_SIEM ObservabilityPipelineCustomProcessor: - description: The `custom_processor` processor transforms events using [Vector + description: 'The `custom_processor` processor transforms events using [Vector Remap Language (VRL)](https://vector.dev/docs/reference/vrl/) scripts with advanced filtering capabilities. + + + **Supported pipeline types:** logs' properties: display_name: $ref: '#/components/schemas/ObservabilityPipelineComponentDisplayName' enabled: - description: Whether this processor is enabled. + description: Indicates whether the processor is enabled. example: true type: boolean id: @@ -36043,6 +36207,8 @@ components: - remaps - enabled type: object + x-pipeline-types: + - logs ObservabilityPipelineCustomProcessorRemap: description: Defines a single VRL remap rule with its own filtering and transformation logic. @@ -36118,12 +36284,16 @@ components: - config type: object ObservabilityPipelineDatadogAgentSource: - description: The `datadog_agent` source collects logs from the Datadog Agent. + description: 'The `datadog_agent` source collects logs/metrics from the Datadog + Agent. + + + **Supported pipeline types:** logs, metrics' properties: id: - description: The unique identifier for this component. Used to reference - this component in other parts of the pipeline (e.g., as input to downstream - components). + description: The unique identifier for this component. Used in other parts + of the pipeline to reference this component (for example, as the `input` + to downstream components). example: datadog-agent-source type: string tls: @@ -36134,6 +36304,9 @@ components: - id - type type: object + x-pipeline-types: + - logs + - metrics ObservabilityPipelineDatadogAgentSourceType: default: datadog_agent description: The source type. The value should always be `datadog_agent`. @@ -36144,7 +36317,10 @@ components: x-enum-varnames: - DATADOG_AGENT ObservabilityPipelineDatadogLogsDestination: - description: The `datadog_logs` destination forwards logs to Datadog Log Management. + description: 'The `datadog_logs` destination forwards logs to Datadog Log Management. + + + **Supported pipeline types:** logs' properties: id: description: The unique identifier for this component. @@ -36165,6 +36341,8 @@ components: - type - inputs type: object + x-pipeline-types: + - logs ObservabilityPipelineDatadogLogsDestinationType: default: datadog_logs description: The destination type. The value should always be `datadog_logs`. @@ -36174,21 +36352,60 @@ components: type: string x-enum-varnames: - DATADOG_LOGS + ObservabilityPipelineDatadogMetricsDestination: + description: 'The `datadog_metrics` destination forwards metrics to Datadog. + + + **Supported pipeline types:** metrics' + properties: + id: + description: The unique identifier for this component. + example: datadog-metrics-destination + type: string + inputs: + description: A list of component IDs whose output is used as the input for + this component. + example: + - metric-tags-processor + items: + type: string + type: array + type: + $ref: '#/components/schemas/ObservabilityPipelineDatadogMetricsDestinationType' + required: + - id + - type + - inputs + type: object + x-pipeline-types: + - metrics + ObservabilityPipelineDatadogMetricsDestinationType: + default: datadog_metrics + description: The destination type. The value should always be `datadog_metrics`. + enum: + - datadog_metrics + example: datadog_metrics + type: string + x-enum-varnames: + - DATADOG_METRICS ObservabilityPipelineDatadogTagsProcessor: - description: The `datadog_tags` processor includes or excludes specific Datadog + description: 'The `datadog_tags` processor includes or excludes specific Datadog tags in your logs. + + + **Supported pipeline types:** logs' properties: action: $ref: '#/components/schemas/ObservabilityPipelineDatadogTagsProcessorAction' display_name: $ref: '#/components/schemas/ObservabilityPipelineComponentDisplayName' enabled: - description: Whether this processor is enabled. + description: Indicates whether the processor is enabled. example: true type: boolean id: - description: The unique identifier for this component. Used to reference - this component in other parts of the pipeline (for example, as the `input` + description: The unique identifier for this component. Used in other parts + of the pipeline to reference this component (for example, as the `input` to downstream components). example: datadog-tags-processor type: string @@ -36219,6 +36436,8 @@ components: - keys - enabled type: object + x-pipeline-types: + - logs ObservabilityPipelineDatadogTagsProcessorAction: description: The action to take on tags with matching keys. enum: @@ -36261,12 +36480,15 @@ components: - DECODE_JSON - DECODE_SYSLOG ObservabilityPipelineDedupeProcessor: - description: The `dedupe` processor removes duplicate fields in log events. + description: 'The `dedupe` processor removes duplicate fields in log events. + + + **Supported pipeline types:** logs' properties: display_name: $ref: '#/components/schemas/ObservabilityPipelineComponentDisplayName' enabled: - description: Whether this processor is enabled. + description: Indicates whether the processor is enabled. example: true type: boolean fields: @@ -36298,6 +36520,8 @@ components: - mode - enabled type: object + x-pipeline-types: + - logs ObservabilityPipelineDedupeProcessorMode: description: The deduplication mode to apply to the fields. enum: @@ -36318,8 +36542,11 @@ components: x-enum-varnames: - DEDUPE ObservabilityPipelineElasticsearchDestination: - description: The `elasticsearch` destination writes logs to an Elasticsearch + description: 'The `elasticsearch` destination writes logs to an Elasticsearch cluster. + + + **Supported pipeline types:** logs' properties: api_version: $ref: '#/components/schemas/ObservabilityPipelineElasticsearchDestinationApiVersion' @@ -36327,6 +36554,8 @@ components: description: The index to write logs to in Elasticsearch. example: logs-index type: string + data_stream: + $ref: '#/components/schemas/ObservabilityPipelineElasticsearchDestinationDataStream' id: description: The unique identifier for this component. example: elasticsearch-destination @@ -36346,6 +36575,8 @@ components: - type - inputs type: object + x-pipeline-types: + - logs ObservabilityPipelineElasticsearchDestinationApiVersion: description: The Elasticsearch API version to use. Set to `auto` to auto-detect. enum: @@ -36360,6 +36591,23 @@ components: - V6 - V7 - V8 + ObservabilityPipelineElasticsearchDestinationDataStream: + description: Configuration options for writing to Elasticsearch Data Streams + instead of a fixed index. + properties: + dataset: + description: The data stream dataset for your logs. This groups logs by + their source or application. + type: string + dtype: + description: The data stream type for your logs. This determines how logs + are categorized within the data stream. + type: string + namespace: + description: The data stream namespace for your logs. This separates logs + into different environments or domains. + type: string + type: object ObservabilityPipelineElasticsearchDestinationType: default: elasticsearch description: The destination type. The value should always be `elasticsearch`. @@ -36499,13 +36747,17 @@ components: - path type: object ObservabilityPipelineEnrichmentTableProcessor: - description: The `enrichment_table` processor enriches logs using a static CSV - file or GeoIP database. + description: 'The `enrichment_table` processor enriches logs using a static + CSV file, GeoIP database, or reference table. Exactly one of `file`, `geoip`, + or `reference_table` must be configured. + + + **Supported pipeline types:** logs' properties: display_name: $ref: '#/components/schemas/ObservabilityPipelineComponentDisplayName' enabled: - description: Whether this processor is enabled. + description: Indicates whether the processor is enabled. example: true type: boolean file: @@ -36521,6 +36773,8 @@ components: targets. example: source:my-source type: string + reference_table: + $ref: '#/components/schemas/ObservabilityPipelineEnrichmentTableReferenceTable' target: description: Path where enrichment results should be stored in the log. example: enriched.geoip @@ -36534,6 +36788,8 @@ components: - target - enabled type: object + x-pipeline-types: + - logs ObservabilityPipelineEnrichmentTableProcessorType: default: enrichment_table description: The processor type. The value should always be `enrichment_table`. @@ -36543,6 +36799,28 @@ components: type: string x-enum-varnames: - ENRICHMENT_TABLE + ObservabilityPipelineEnrichmentTableReferenceTable: + description: Uses a Datadog reference table to enrich logs. + properties: + columns: + description: List of column names to include from the reference table. If + not provided, all columns are included. + items: + type: string + type: array + key_field: + description: Path to the field in the log event to match against the reference + table. + example: log.user.id + type: string + table_id: + description: The unique identifier of the reference table. + example: 550e8400-e29b-41d4-a716-446655440000 + type: string + required: + - key_field + - table_id + type: object ObservabilityPipelineFieldValue: description: Represents a static key-value pair used in various processors. properties: @@ -36559,26 +36837,29 @@ components: - value type: object ObservabilityPipelineFilterProcessor: - description: The `filter` processor allows conditional processing of logs based - on a Datadog search query. Logs that match the `include` query are passed - through; others are discarded. + description: 'The `filter` processor allows conditional processing of logs/metrics + based on a Datadog search query. Logs/metrics that match the `include` query + are passed through; others are discarded. + + + **Supported pipeline types:** logs, metrics' properties: display_name: $ref: '#/components/schemas/ObservabilityPipelineComponentDisplayName' enabled: - description: Whether this processor is enabled. + description: Indicates whether the processor is enabled. example: true type: boolean id: - description: The unique identifier for this component. Used to reference - this component in other parts of the pipeline (for example, as the `input` + description: The unique identifier for this component. Used in other parts + of the pipeline to reference this component (for example, as the `input` to downstream components). example: filter-processor type: string include: - description: A Datadog search query used to determine which logs should - pass through the filter. Logs that match this query continue to downstream - components; others are dropped. + description: A Datadog search query used to determine which logs/metrics + should pass through the filter. Logs/metrics that match this query continue + to downstream components; others are dropped. example: service:my-service type: string type: @@ -36589,6 +36870,9 @@ components: - include - enabled type: object + x-pipeline-types: + - logs + - metrics ObservabilityPipelineFilterProcessorType: default: filter description: The processor type. The value should always be `filter`. @@ -36599,11 +36883,14 @@ components: x-enum-varnames: - FILTER ObservabilityPipelineFluentBitSource: - description: The `fluent_bit` source ingests logs from Fluent Bit. + description: 'The `fluent_bit` source ingests logs from Fluent Bit. + + + **Supported pipeline types:** logs' properties: id: - description: The unique identifier for this component. Used to reference - this component in other parts of the pipeline (for example, as the `input` + description: The unique identifier for this component. Used in other parts + of the pipeline to reference this component (for example, as the `input` to downstream components). example: fluent-source type: string @@ -36615,6 +36902,8 @@ components: - id - type type: object + x-pipeline-types: + - logs ObservabilityPipelineFluentBitSourceType: default: fluent_bit description: The source type. The value should always be `fluent_bit`. @@ -36625,11 +36914,14 @@ components: x-enum-varnames: - FLUENT_BIT ObservabilityPipelineFluentdSource: - description: The `fluentd` source ingests logs from a Fluentd-compatible service. + description: 'The `fluentd` source ingests logs from a Fluentd-compatible service. + + + **Supported pipeline types:** logs' properties: id: - description: The unique identifier for this component. Used to reference - this component in other parts of the pipeline (for example, as the `input` + description: The unique identifier for this component. Used in other parts + of the pipeline to reference this component (for example, as the `input` to downstream components). example: fluent-source type: string @@ -36641,6 +36933,8 @@ components: - id - type type: object + x-pipeline-types: + - logs ObservabilityPipelineFluentdSourceType: default: fluentd description: The source type. The value should always be `fluentd. @@ -36665,12 +36959,15 @@ components: from logs and sends them to Datadog. Metrics can be counters, gauges, or distributions and optionally grouped by - log fields.' + log fields. + + + **Supported pipeline types:** logs' properties: display_name: $ref: '#/components/schemas/ObservabilityPipelineComponentDisplayName' enabled: - description: Whether this processor is enabled. + description: Indicates whether the processor is enabled. example: true type: boolean id: @@ -36695,6 +36992,8 @@ components: - type - enabled type: object + x-pipeline-types: + - logs ObservabilityPipelineGenerateMetricsProcessorType: default: generate_datadog_metrics description: The processor type. Always `generate_datadog_metrics`. @@ -36789,7 +37088,10 @@ components: - GAUGE - DISTRIBUTION ObservabilityPipelineGoogleChronicleDestination: - description: The `google_chronicle` destination sends logs to Google Chronicle. + description: 'The `google_chronicle` destination sends logs to Google Chronicle. + + + **Supported pipeline types:** logs' properties: auth: $ref: '#/components/schemas/ObservabilityPipelineGcpAuth' @@ -36823,6 +37125,8 @@ components: - inputs - customer_id type: object + x-pipeline-types: + - logs ObservabilityPipelineGoogleChronicleDestinationEncoding: description: The encoding format for the logs sent to Chronicle. enum: @@ -36846,7 +37150,10 @@ components: description: 'The `google_cloud_storage` destination stores logs in a Google Cloud Storage (GCS) bucket. - It requires a bucket name, GCP authentication, and metadata fields.' + It requires a bucket name, GCP authentication, and metadata fields. + + + **Supported pipeline types:** logs' properties: acl: $ref: '#/components/schemas/ObservabilityPipelineGoogleCloudStorageDestinationAcl' @@ -36888,6 +37195,8 @@ components: - bucket - storage_class type: object + x-pipeline-types: + - logs ObservabilityPipelineGoogleCloudStorageDestinationAcl: description: Access control list setting for objects written to the bucket. enum: @@ -36930,8 +37239,11 @@ components: x-enum-varnames: - GOOGLE_CLOUD_STORAGE ObservabilityPipelineGooglePubSubDestination: - description: The `google_pubsub` destination publishes logs to a Google Cloud + description: 'The `google_pubsub` destination publishes logs to a Google Cloud Pub/Sub topic. + + + **Supported pipeline types:** logs' properties: auth: $ref: '#/components/schemas/ObservabilityPipelineGcpAuth' @@ -36969,6 +37281,8 @@ components: - project - topic type: object + x-pipeline-types: + - logs ObservabilityPipelineGooglePubSubDestinationEncoding: description: Encoding format for log events. enum: @@ -36989,17 +37303,20 @@ components: x-enum-varnames: - GOOGLE_PUBSUB ObservabilityPipelineGooglePubSubSource: - description: The `google_pubsub` source ingests logs from a Google Cloud Pub/Sub + description: 'The `google_pubsub` source ingests logs from a Google Cloud Pub/Sub subscription. + + + **Supported pipeline types:** logs' properties: auth: $ref: '#/components/schemas/ObservabilityPipelineGcpAuth' decoding: $ref: '#/components/schemas/ObservabilityPipelineDecoding' id: - description: The unique identifier for this component. Used to reference - this component in other parts of the pipeline (e.g., as input to downstream - components). + description: The unique identifier for this component. Used in other parts + of the pipeline to reference this component (for example, as the `input` + to downstream components). example: google-pubsub-source type: string project: @@ -37021,6 +37338,8 @@ components: - project - subscription type: object + x-pipeline-types: + - logs ObservabilityPipelineGooglePubSubSourceType: default: google_pubsub description: The source type. The value should always be `google_pubsub`. @@ -37030,18 +37349,103 @@ components: type: string x-enum-varnames: - GOOGLE_PUBSUB + ObservabilityPipelineHttpClientDestination: + description: 'The `http_client` destination sends data to an HTTP endpoint. + + + **Supported pipeline types:** logs, metrics' + properties: + auth_strategy: + $ref: '#/components/schemas/ObservabilityPipelineHttpClientDestinationAuthStrategy' + compression: + $ref: '#/components/schemas/ObservabilityPipelineHttpClientDestinationCompression' + encoding: + $ref: '#/components/schemas/ObservabilityPipelineHttpClientDestinationEncoding' + id: + description: The unique identifier for this component. + example: http-client-destination + type: string + inputs: + description: A list of component IDs whose output is used as the input for + this component. + example: + - filter-processor + items: + type: string + type: array + tls: + $ref: '#/components/schemas/ObservabilityPipelineTls' + type: + $ref: '#/components/schemas/ObservabilityPipelineHttpClientDestinationType' + required: + - id + - type + - inputs + - encoding + type: object + x-pipeline-types: + - logs + - metrics + ObservabilityPipelineHttpClientDestinationAuthStrategy: + description: HTTP authentication strategy. + enum: + - none + - basic + - bearer + example: basic + type: string + x-enum-varnames: + - NONE + - BASIC + - BEARER + ObservabilityPipelineHttpClientDestinationCompression: + description: Compression configuration for HTTP requests. + properties: + algorithm: + $ref: '#/components/schemas/ObservabilityPipelineHttpClientDestinationCompressionAlgorithm' + required: + - algorithm + type: object + ObservabilityPipelineHttpClientDestinationCompressionAlgorithm: + description: Compression algorithm. + enum: + - gzip + example: gzip + type: string + x-enum-varnames: + - GZIP + ObservabilityPipelineHttpClientDestinationEncoding: + description: Encoding format for log events. + enum: + - json + example: json + type: string + x-enum-varnames: + - JSON + ObservabilityPipelineHttpClientDestinationType: + default: http_client + description: The destination type. The value should always be `http_client`. + enum: + - http_client + example: http_client + type: string + x-enum-varnames: + - HTTP_CLIENT ObservabilityPipelineHttpClientSource: - description: The `http_client` source scrapes logs from HTTP endpoints at regular + description: 'The `http_client` source scrapes logs from HTTP endpoints at regular intervals. + + + **Supported pipeline types:** logs' properties: auth_strategy: $ref: '#/components/schemas/ObservabilityPipelineHttpClientSourceAuthStrategy' decoding: $ref: '#/components/schemas/ObservabilityPipelineDecoding' id: - description: The unique identifier for this component. Used to reference - this component in other parts of the pipeline (e.g., as input to downstream - components). + description: The unique identifier for this component. Used in other parts + of the pipeline to reference this component (for example, as the `input` + to downstream components). example: http-client-source type: string scrape_interval_secs: @@ -37063,14 +37467,18 @@ components: - type - decoding type: object + x-pipeline-types: + - logs ObservabilityPipelineHttpClientSourceAuthStrategy: description: Optional authentication strategy for HTTP requests. enum: + - none - basic - bearer example: basic type: string x-enum-varnames: + - NONE - BASIC - BEARER ObservabilityPipelineHttpClientSourceType: @@ -37083,8 +37491,11 @@ components: x-enum-varnames: - HTTP_CLIENT ObservabilityPipelineHttpServerSource: - description: The `http_server` source collects logs over HTTP POST from external + description: 'The `http_server` source collects logs over HTTP POST from external services. + + + **Supported pipeline types:** logs' properties: auth_strategy: $ref: '#/components/schemas/ObservabilityPipelineHttpServerSourceAuthStrategy' @@ -37104,6 +37515,8 @@ components: - auth_strategy - decoding type: object + x-pipeline-types: + - logs ObservabilityPipelineHttpServerSourceAuthStrategy: description: HTTP authentication method. enum: @@ -37123,50 +37536,125 @@ components: type: string x-enum-varnames: - HTTP_SERVER - ObservabilityPipelineKafkaSource: - description: The `kafka` source ingests data from Apache Kafka topics. + ObservabilityPipelineKafkaDestination: + description: 'The `kafka` destination sends logs to Apache Kafka topics. + + + **Supported pipeline types:** logs' properties: - group_id: - description: Consumer group ID used by the Kafka client. - example: consumer-group-0 + compression: + $ref: '#/components/schemas/ObservabilityPipelineKafkaDestinationCompression' + encoding: + $ref: '#/components/schemas/ObservabilityPipelineKafkaDestinationEncoding' + headers_key: + description: The field name to use for Kafka message headers. + example: headers type: string id: - description: The unique identifier for this component. Used to reference - this component in other parts of the pipeline (e.g., as input to downstream - components). - example: kafka-source + description: The unique identifier for this component. + example: kafka-destination + type: string + inputs: + description: A list of component IDs whose output is used as the `input` + for this component. + example: + - filter-processor + items: + type: string + type: array + key_field: + description: The field name to use as the Kafka message key. + example: message_id type: string librdkafka_options: - description: Optional list of advanced Kafka client configuration options, + description: Optional list of advanced Kafka producer configuration options, defined as key-value pairs. items: - $ref: '#/components/schemas/ObservabilityPipelineKafkaSourceLibrdkafkaOption' + $ref: '#/components/schemas/ObservabilityPipelineKafkaLibrdkafkaOption' type: array + message_timeout_ms: + description: Maximum time in milliseconds to wait for message delivery confirmation. + example: 300000 + format: int64 + minimum: 1 + type: integer + rate_limit_duration_secs: + description: Duration in seconds for the rate limit window. + example: 1 + format: int64 + minimum: 1 + type: integer + rate_limit_num: + description: Maximum number of messages allowed per rate limit duration. + example: 1000 + format: int64 + minimum: 1 + type: integer sasl: - $ref: '#/components/schemas/ObservabilityPipelineKafkaSourceSasl' + $ref: '#/components/schemas/ObservabilityPipelineKafkaSasl' + socket_timeout_ms: + description: Socket timeout in milliseconds for network requests. + example: 60000 + format: int64 + maximum: 300000 + minimum: 10 + type: integer tls: $ref: '#/components/schemas/ObservabilityPipelineTls' - topics: - description: A list of Kafka topic names to subscribe to. The source ingests - messages from each topic specified. - example: - - topic1 - - topic2 - items: - type: string - type: array + topic: + description: The Kafka topic name to publish logs to. + example: logs-topic + type: string type: - $ref: '#/components/schemas/ObservabilityPipelineKafkaSourceType' + $ref: '#/components/schemas/ObservabilityPipelineKafkaDestinationType' required: - id - type - - group_id - - topics + - inputs + - topic + - encoding type: object - ObservabilityPipelineKafkaSourceLibrdkafkaOption: + x-pipeline-types: + - logs + ObservabilityPipelineKafkaDestinationCompression: + description: Compression codec for Kafka messages. + enum: + - none + - gzip + - snappy + - lz4 + - zstd + example: gzip + type: string + x-enum-varnames: + - NONE + - GZIP + - SNAPPY + - LZ4 + - ZSTD + ObservabilityPipelineKafkaDestinationEncoding: + description: Encoding format for log events. + enum: + - json + - raw_message + example: json + type: string + x-enum-varnames: + - JSON + - RAW_MESSAGE + ObservabilityPipelineKafkaDestinationType: + default: kafka + description: The destination type. The value should always be `kafka`. + enum: + - kafka + example: kafka + type: string + x-enum-varnames: + - KAFKA + ObservabilityPipelineKafkaLibrdkafkaOption: description: Represents a key-value pair used to configure low-level `librdkafka` - client options for Kafka sources, such as timeouts, buffer sizes, and security - settings. + client options for Kafka source and destination, such as timeouts, buffer + sizes, and security settings. properties: name: description: The name of the `librdkafka` configuration option to set. @@ -37181,12 +37669,68 @@ components: - name - value type: object - ObservabilityPipelineKafkaSourceSasl: + ObservabilityPipelineKafkaSasl: description: Specifies the SASL mechanism for authenticating with a Kafka cluster. properties: mechanism: - $ref: '#/components/schemas/ObservabilityPipelinePipelineKafkaSourceSaslMechanism' + $ref: '#/components/schemas/ObservabilityPipelineKafkaSaslMechanism' type: object + ObservabilityPipelineKafkaSaslMechanism: + description: SASL mechanism used for Kafka authentication. + enum: + - PLAIN + - SCRAM-SHA-256 + - SCRAM-SHA-512 + type: string + x-enum-varnames: + - PLAIN + - SCRAMNOT_SHANOT_256 + - SCRAMNOT_SHANOT_512 + ObservabilityPipelineKafkaSource: + description: 'The `kafka` source ingests data from Apache Kafka topics. + + + **Supported pipeline types:** logs' + properties: + group_id: + description: Consumer group ID used by the Kafka client. + example: consumer-group-0 + type: string + id: + description: The unique identifier for this component. Used in other parts + of the pipeline to reference this component (for example, as the `input` + to downstream components). + example: kafka-source + type: string + librdkafka_options: + description: Optional list of advanced Kafka client configuration options, + defined as key-value pairs. + items: + $ref: '#/components/schemas/ObservabilityPipelineKafkaLibrdkafkaOption' + type: array + sasl: + $ref: '#/components/schemas/ObservabilityPipelineKafkaSasl' + tls: + $ref: '#/components/schemas/ObservabilityPipelineTls' + topics: + description: A list of Kafka topic names to subscribe to. The source ingests + messages from each topic specified. + example: + - topic1 + - topic2 + items: + type: string + type: array + type: + $ref: '#/components/schemas/ObservabilityPipelineKafkaSourceType' + required: + - id + - type + - group_id + - topics + type: object + x-pipeline-types: + - logs ObservabilityPipelineKafkaSourceType: default: kafka description: The source type. The value should always be `kafka`. @@ -37197,12 +37741,15 @@ components: x-enum-varnames: - KAFKA ObservabilityPipelineLogstashSource: - description: The `logstash` source ingests logs from a Logstash forwarder. + description: 'The `logstash` source ingests logs from a Logstash forwarder. + + + **Supported pipeline types:** logs' properties: id: - description: The unique identifier for this component. Used to reference - this component in other parts of the pipeline (e.g., as input to downstream - components). + description: The unique identifier for this component. Used in other parts + of the pipeline to reference this component (for example, as the `input` + to downstream components). example: logstash-source type: string tls: @@ -37213,6 +37760,8 @@ components: - id - type type: object + x-pipeline-types: + - logs ObservabilityPipelineLogstashSourceType: default: logstash description: The source type. The value should always be `logstash`. @@ -37237,13 +37786,112 @@ components: - name - value type: object + ObservabilityPipelineMetricTagsProcessor: + description: 'The `metric_tags` processor filters metrics based on their tags + using Datadog tag key patterns. + + + **Supported pipeline types:** metrics' + properties: + display_name: + $ref: '#/components/schemas/ObservabilityPipelineComponentDisplayName' + enabled: + description: Indicates whether the processor is enabled. + example: true + type: boolean + id: + description: The unique identifier for this component. Used in other parts + of the pipeline to reference this component (for example, as the `input` + to downstream components). + example: metric-tags-processor + type: string + include: + description: A Datadog search query that determines which metrics the processor + targets. + example: '*' + type: string + rules: + description: A list of rules for filtering metric tags. + items: + $ref: '#/components/schemas/ObservabilityPipelineMetricTagsProcessorRule' + maxItems: 100 + minItems: 1 + type: array + type: + $ref: '#/components/schemas/ObservabilityPipelineMetricTagsProcessorType' + required: + - id + - type + - include + - rules + - enabled + type: object + x-pipeline-types: + - metrics + ObservabilityPipelineMetricTagsProcessorRule: + description: Defines a rule for filtering metric tags based on key patterns. + properties: + action: + $ref: '#/components/schemas/ObservabilityPipelineMetricTagsProcessorRuleAction' + include: + description: A Datadog search query used to determine which metrics this + rule targets. + example: '*' + type: string + keys: + description: A list of tag keys to include or exclude. + example: + - env + - service + - version + items: + type: string + type: array + mode: + $ref: '#/components/schemas/ObservabilityPipelineMetricTagsProcessorRuleMode' + required: + - include + - mode + - action + - keys + type: object + ObservabilityPipelineMetricTagsProcessorRuleAction: + description: The action to take on tags with matching keys. + enum: + - include + - exclude + example: include + type: string + x-enum-varnames: + - INCLUDE + - EXCLUDE + ObservabilityPipelineMetricTagsProcessorRuleMode: + description: The processing mode for tag filtering. + enum: + - filter + example: filter + type: string + x-enum-varnames: + - FILTER + ObservabilityPipelineMetricTagsProcessorType: + default: metric_tags + description: The processor type. The value should always be `metric_tags`. + enum: + - metric_tags + example: metric_tags + type: string + x-enum-varnames: + - METRIC_TAGS ObservabilityPipelineMetricValue: description: Specifies how the value of the generated metric is computed. oneOf: - $ref: '#/components/schemas/ObservabilityPipelineGeneratedMetricIncrementByOne' - $ref: '#/components/schemas/ObservabilityPipelineGeneratedMetricIncrementByField' ObservabilityPipelineNewRelicDestination: - description: The `new_relic` destination sends logs to the New Relic platform. + description: 'The `new_relic` destination sends logs to the New Relic platform. + + + **Supported pipeline types:** logs' properties: id: description: The unique identifier for this component. @@ -37267,6 +37915,8 @@ components: - inputs - region type: object + x-pipeline-types: + - logs ObservabilityPipelineNewRelicDestinationRegion: description: The New Relic region. enum: @@ -37287,13 +37937,16 @@ components: x-enum-varnames: - NEW_RELIC ObservabilityPipelineOcsfMapperProcessor: - description: The `ocsf_mapper` processor transforms logs into the OCSF schema + description: 'The `ocsf_mapper` processor transforms logs into the OCSF schema using a predefined mapping configuration. + + + **Supported pipeline types:** logs' properties: display_name: $ref: '#/components/schemas/ObservabilityPipelineComponentDisplayName' enabled: - description: Whether this processor is enabled. + description: Indicates whether the processor is enabled. example: true type: boolean id: @@ -37320,6 +37973,8 @@ components: - mappings - enabled type: object + x-pipeline-types: + - logs ObservabilityPipelineOcsfMapperProcessorMapping: description: Defines how specific events are transformed to OCSF using a mapping configuration. @@ -37379,7 +38034,10 @@ components: - OKTA_SYSTEM_LOG_AUTHENTICATION - PALO_ALTO_NETWORKS_FIREWALL_TRAFFIC ObservabilityPipelineOpenSearchDestination: - description: The `opensearch` destination writes logs to an OpenSearch cluster. + description: 'The `opensearch` destination writes logs to an OpenSearch cluster. + + + **Supported pipeline types:** logs' properties: bulk_index: description: The index to write logs to. @@ -37404,6 +38062,8 @@ components: - type - inputs type: object + x-pipeline-types: + - logs ObservabilityPipelineOpenSearchDestinationType: default: opensearch description: The destination type. The value should always be `opensearch`. @@ -37413,9 +38073,56 @@ components: type: string x-enum-varnames: - OPENSEARCH + ObservabilityPipelineOpentelemetrySource: + description: 'The `opentelemetry` source receives telemetry data using the OpenTelemetry + Protocol (OTLP) over gRPC and HTTP. + + + **Supported pipeline types:** logs' + properties: + grpc_address_key: + description: Environment variable name containing the gRPC server address + for receiving OTLP data. Must be a valid environment variable name (alphanumeric + characters and underscores only). + example: OTEL_GRPC_ADDRESS + type: string + http_address_key: + description: Environment variable name containing the HTTP server address + for receiving OTLP data. Must be a valid environment variable name (alphanumeric + characters and underscores only). + example: OTEL_HTTP_ADDRESS + type: string + id: + description: The unique identifier for this component. Used in other parts + of the pipeline to reference this component (for example, as the `input` + to downstream components). + example: opentelemetry-source + type: string + tls: + $ref: '#/components/schemas/ObservabilityPipelineTls' + type: + $ref: '#/components/schemas/ObservabilityPipelineOpentelemetrySourceType' + required: + - id + - type + type: object + x-pipeline-types: + - logs + ObservabilityPipelineOpentelemetrySourceType: + default: opentelemetry + description: The source type. The value should always be `opentelemetry`. + enum: + - opentelemetry + example: opentelemetry + type: string + x-enum-varnames: + - OPENTELEMETRY ObservabilityPipelineParseGrokProcessor: - description: The `parse_grok` processor extracts structured fields from unstructured + description: 'The `parse_grok` processor extracts structured fields from unstructured log messages using Grok patterns. + + + **Supported pipeline types:** logs' properties: disable_library_rules: default: false @@ -37426,7 +38133,7 @@ components: display_name: $ref: '#/components/schemas/ObservabilityPipelineComponentDisplayName' enabled: - description: Whether this processor is enabled. + description: Indicates whether the processor is enabled. example: true type: boolean id: @@ -37454,6 +38161,8 @@ components: - rules - enabled type: object + x-pipeline-types: + - logs ObservabilityPipelineParseGrokProcessorRule: description: 'A Grok parsing rule used in the `parse_grok` processor. Each rule defines how to extract structured fields @@ -37533,14 +38242,17 @@ components: x-enum-varnames: - PARSE_GROK ObservabilityPipelineParseJSONProcessor: - description: The `parse_json` processor extracts JSON from a specified field + description: 'The `parse_json` processor extracts JSON from a specified field and flattens it into the event. This is useful when logs contain embedded JSON as a string. + + + **Supported pipeline types:** logs' properties: display_name: $ref: '#/components/schemas/ObservabilityPipelineComponentDisplayName' enabled: - description: Whether this processor is enabled. + description: Indicates whether the processor is enabled. example: true type: boolean field: @@ -37567,6 +38279,8 @@ components: - field - enabled type: object + x-pipeline-types: + - logs ObservabilityPipelineParseJSONProcessorType: default: parse_json description: The processor type. The value should always be `parse_json`. @@ -37576,37 +38290,101 @@ components: type: string x-enum-varnames: - PARSE_JSON - ObservabilityPipelinePipelineKafkaSourceSaslMechanism: - description: SASL mechanism used for Kafka authentication. + ObservabilityPipelineParseXMLProcessor: + description: 'The `parse_xml` processor parses XML from a specified field and + extracts it into the event. + + + **Supported pipeline types:** logs' + properties: + always_use_text_key: + description: Whether to always use a text key for element content. + type: boolean + attr_prefix: + description: The prefix to use for XML attributes in the parsed output. + type: string + display_name: + $ref: '#/components/schemas/ObservabilityPipelineComponentDisplayName' + enabled: + description: Indicates whether the processor is enabled. + example: true + type: boolean + field: + description: The name of the log field that contains an XML string. + example: message + type: string + id: + description: The unique identifier for this component. Used in other parts + of the pipeline to reference this component (for example, as the `input` + to downstream components). + example: parse-xml-processor + type: string + include: + description: A Datadog search query used to determine which logs this processor + targets. + example: service:my-service + type: string + include_attr: + description: Whether to include XML attributes in the parsed output. + type: boolean + parse_bool: + description: Whether to parse boolean values from strings. + type: boolean + parse_null: + description: Whether to parse null values. + type: boolean + parse_number: + description: Whether to parse numeric values from strings. + type: boolean + text_key: + description: The key name to use for text content within XML elements. Must + be at least 1 character if specified. + minLength: 1 + type: string + type: + $ref: '#/components/schemas/ObservabilityPipelineParseXMLProcessorType' + required: + - id + - type + - include + - field + - enabled + type: object + x-pipeline-types: + - logs + ObservabilityPipelineParseXMLProcessorType: + default: parse_xml + description: The processor type. The value should always be `parse_xml`. enum: - - PLAIN - - SCRAM-SHA-256 - - SCRAM-SHA-512 + - parse_xml + example: parse_xml type: string x-enum-varnames: - - PLAIN - - SCRAMNOT_SHANOT_256 - - SCRAMNOT_SHANOT_512 + - PARSE_XML ObservabilityPipelineQuotaProcessor: - description: The Quota Processor measures logging traffic for logs that match + description: 'The `quota` processor measures logging traffic for logs that match a specified filter. When the configured daily quota is met, the processor can drop or alert. + + + **Supported pipeline types:** logs' properties: display_name: $ref: '#/components/schemas/ObservabilityPipelineComponentDisplayName' drop_events: - description: If set to `true`, logs that matched the quota filter and sent - after the quota has been met are dropped; only logs that did not match - the filter query continue through the pipeline. + description: 'If set to `true`, logs that match the quota filter and are + sent after the quota is exceeded are dropped. Logs that do not match the + filter continue through the pipeline. **Note**: You can set either `drop_events` + or `overflow_action`, but not both.' example: false type: boolean enabled: - description: Whether this processor is enabled. + description: Indicates whether the processor is enabled. example: true type: boolean id: - description: The unique identifier for this component. Used to reference - this component in other parts of the pipeline (for example, as the `input` + description: The unique identifier for this component. Used in other parts + of the pipeline to reference this component (for example, as the `input` to downstream components). example: quota-processor type: string @@ -37641,6 +38419,8 @@ components: items: type: string type: array + too_many_buckets_action: + $ref: '#/components/schemas/ObservabilityPipelineQuotaProcessorOverflowAction' type: $ref: '#/components/schemas/ObservabilityPipelineQuotaProcessorType' required: @@ -37651,6 +38431,8 @@ components: - limit - enabled type: object + x-pipeline-types: + - logs ObservabilityPipelineQuotaProcessorLimit: description: The maximum amount of data or number of events allowed before the quota is enforced. Can be specified in bytes or events. @@ -37678,7 +38460,8 @@ components: - BYTES - EVENTS ObservabilityPipelineQuotaProcessorOverflowAction: - description: 'The action to take when the quota is exceeded. Options: + description: 'The action to take when the quota or bucket limit is exceeded. + Options: - `drop`: Drop the event. @@ -37722,13 +38505,16 @@ components: x-enum-varnames: - QUOTA ObservabilityPipelineReduceProcessor: - description: The `reduce` processor aggregates and merges logs based on matching + description: 'The `reduce` processor aggregates and merges logs based on matching keys and merge strategies. + + + **Supported pipeline types:** logs' properties: display_name: $ref: '#/components/schemas/ObservabilityPipelineComponentDisplayName' enabled: - description: Whether this processor is enabled. + description: Indicates whether the processor is enabled. example: true type: boolean group_by: @@ -37764,6 +38550,8 @@ components: - merge_strategies - enabled type: object + x-pipeline-types: + - logs ObservabilityPipelineReduceProcessorMergeStrategy: description: Defines how a specific field should be merged across grouped events. properties: @@ -37817,12 +38605,15 @@ components: x-enum-varnames: - REDUCE ObservabilityPipelineRemoveFieldsProcessor: - description: The `remove_fields` processor deletes specified fields from logs. + description: 'The `remove_fields` processor deletes specified fields from logs. + + + **Supported pipeline types:** logs' properties: display_name: $ref: '#/components/schemas/ObservabilityPipelineComponentDisplayName' enabled: - description: Whether this processor is enabled. + description: Indicates whether the processor is enabled. example: true type: boolean fields: @@ -37834,9 +38625,9 @@ components: type: string type: array id: - description: The unique identifier for this component. Used to reference - this component in other parts of the pipeline (e.g., as input to downstream - components). + description: The unique identifier for this component. Used in other parts + of the pipeline to reference this component (for example, as the `input` + to downstream components). example: remove-fields-processor type: string include: @@ -37853,6 +38644,8 @@ components: - fields - enabled type: object + x-pipeline-types: + - logs ObservabilityPipelineRemoveFieldsProcessorType: default: remove_fields description: The processor type. The value should always be `remove_fields`. @@ -37863,12 +38656,15 @@ components: x-enum-varnames: - REMOVE_FIELDS ObservabilityPipelineRenameFieldsProcessor: - description: The `rename_fields` processor changes field names. + description: 'The `rename_fields` processor changes field names. + + + **Supported pipeline types:** logs' properties: display_name: $ref: '#/components/schemas/ObservabilityPipelineComponentDisplayName' enabled: - description: Whether this processor is enabled. + description: Indicates whether the processor is enabled. example: true type: boolean fields: @@ -37898,6 +38694,8 @@ components: - fields - enabled type: object + x-pipeline-types: + - logs ObservabilityPipelineRenameFieldsProcessorField: description: Defines how to rename a field in log events. properties: @@ -37929,8 +38727,11 @@ components: x-enum-varnames: - RENAME_FIELDS ObservabilityPipelineRsyslogDestination: - description: The `rsyslog` destination forwards logs to an external `rsyslog` + description: 'The `rsyslog` destination forwards logs to an external `rsyslog` server over TCP or UDP using the syslog protocol. + + + **Supported pipeline types:** logs' properties: id: description: The unique identifier for this component. @@ -37959,6 +38760,8 @@ components: - type - inputs type: object + x-pipeline-types: + - logs ObservabilityPipelineRsyslogDestinationType: default: rsyslog description: The destination type. The value should always be `rsyslog`. @@ -37969,13 +38772,16 @@ components: x-enum-varnames: - RSYSLOG ObservabilityPipelineRsyslogSource: - description: The `rsyslog` source listens for logs over TCP or UDP from an `rsyslog` - server using the syslog protocol. + description: 'The `rsyslog` source listens for logs over TCP or UDP from an + `rsyslog` server using the syslog protocol. + + + **Supported pipeline types:** logs' properties: id: - description: The unique identifier for this component. Used to reference - this component in other parts of the pipeline (e.g., as input to downstream - components). + description: The unique identifier for this component. Used in other parts + of the pipeline to reference this component (for example, as the `input` + to downstream components). example: rsyslog-source type: string mode: @@ -37989,6 +38795,8 @@ components: - type - mode type: object + x-pipeline-types: + - logs ObservabilityPipelineRsyslogSourceType: default: rsyslog description: The source type. The value should always be `rsyslog`. @@ -37999,18 +38807,31 @@ components: x-enum-varnames: - RSYSLOG ObservabilityPipelineSampleProcessor: - description: The `sample` processor allows probabilistic sampling of logs at + description: 'The `sample` processor allows probabilistic sampling of logs at a fixed rate. + + + **Supported pipeline types:** logs' properties: display_name: $ref: '#/components/schemas/ObservabilityPipelineComponentDisplayName' enabled: - description: Whether this processor is enabled. + description: Indicates whether the processor is enabled. example: true type: boolean + group_by: + description: Optional list of fields to group events by. Each group is sampled + independently. + example: + - service + - host + items: + type: string + minItems: 1 + type: array id: - description: The unique identifier for this component. Used to reference - this component in other parts of the pipeline (for example, as the `input` + description: The unique identifier for this component. Used in other parts + of the pipeline to reference this component (for example, as the `input` to downstream components). example: sample-processor type: string @@ -38024,20 +38845,17 @@ components: example: 10.0 format: double type: number - rate: - description: Number of events to sample (1 in N). - example: 10 - format: int64 - minimum: 1 - type: integer type: $ref: '#/components/schemas/ObservabilityPipelineSampleProcessorType' required: - id - type - include + - percentage - enabled type: object + x-pipeline-types: + - logs ObservabilityPipelineSampleProcessorType: default: sample description: The processor type. The value should always be `sample`. @@ -38048,19 +38866,22 @@ components: x-enum-varnames: - SAMPLE ObservabilityPipelineSensitiveDataScannerProcessor: - description: The `sensitive_data_scanner` processor detects and optionally redacts - sensitive data in log events. + description: 'The `sensitive_data_scanner` processor detects and optionally + redacts sensitive data in log events. + + + **Supported pipeline types:** logs' properties: display_name: $ref: '#/components/schemas/ObservabilityPipelineComponentDisplayName' enabled: - description: Whether this processor is enabled. + description: Indicates whether the processor is enabled. example: true type: boolean id: - description: The unique identifier for this component. Used to reference - this component in other parts of the pipeline (e.g., as input to downstream - components). + description: The unique identifier for this component. Used in other parts + of the pipeline to reference this component (for example, as the `input` + to downstream components). example: sensitive-scanner type: string include: @@ -38083,6 +38904,8 @@ components: - rules - enabled type: object + x-pipeline-types: + - logs ObservabilityPipelineSensitiveDataScannerProcessorAction: description: Defines what action to take when sensitive data is matched. oneOf: @@ -38204,6 +39027,11 @@ components: ObservabilityPipelineSensitiveDataScannerProcessorCustomPatternOptions: description: Options for defining a custom regex pattern. properties: + description: + description: Human-readable description providing context about a sensitive + data scanner rule + example: "Custom regex for internal API\u202Fkeys" + type: string rule: description: A regular expression used to detect sensitive values. Must be a valid regex. @@ -38259,6 +39087,11 @@ components: description: Options for selecting a predefined library pattern and enabling keyword support. properties: + description: + description: Human-readable description providing context about a sensitive + data scanner rule + example: Credit card pattern + type: string id: description: Identifier for a predefined pattern from the sensitive data scanner pattern library. @@ -38400,7 +39233,10 @@ components: x-enum-varnames: - SENSITIVE_DATA_SCANNER ObservabilityPipelineSentinelOneDestination: - description: The `sentinel_one` destination sends logs to SentinelOne. + description: 'The `sentinel_one` destination sends logs to SentinelOne. + + + **Supported pipeline types:** logs' properties: id: description: The unique identifier for this component. @@ -38424,6 +39260,8 @@ components: - inputs - region type: object + x-pipeline-types: + - logs ObservabilityPipelineSentinelOneDestinationRegion: description: The SentinelOne region to send logs to. enum: @@ -38448,8 +39286,11 @@ components: x-enum-varnames: - SENTINEL_ONE ObservabilityPipelineSocketDestination: - description: The `socket` destination sends logs over TCP or UDP to a remote + description: 'The `socket` destination sends logs over TCP or UDP to a remote server. + + + **Supported pipeline types:** logs' properties: encoding: $ref: '#/components/schemas/ObservabilityPipelineSocketDestinationEncoding' @@ -38482,6 +39323,8 @@ components: - framing - mode type: object + x-pipeline-types: + - logs ObservabilityPipelineSocketDestinationEncoding: description: Encoding format for log events. enum: @@ -38576,14 +39419,17 @@ components: x-enum-varnames: - SOCKET ObservabilityPipelineSocketSource: - description: The `socket` source ingests logs over TCP or UDP. + description: 'The `socket` source ingests logs over TCP or UDP. + + + **Supported pipeline types:** logs' properties: framing: $ref: '#/components/schemas/ObservabilityPipelineSocketSourceFraming' id: - description: The unique identifier for this component. Used to reference - this component in other parts of the pipeline (e.g., as input to downstream - components). + description: The unique identifier for this component. Used in other parts + of the pipeline to reference this component (for example, as the `input` + to downstream components). example: socket-source type: string mode: @@ -38599,6 +39445,8 @@ components: - mode - framing type: object + x-pipeline-types: + - logs ObservabilityPipelineSocketSourceFraming: description: Framing method configuration for the socket source. oneOf: @@ -38739,9 +39587,79 @@ components: - type - attributes type: object + ObservabilityPipelineSplitArrayProcessor: + description: 'The `split_array` processor splits array fields into separate + events based on configured rules. + + + **Supported pipeline types:** logs' + properties: + arrays: + description: A list of array split configurations. + items: + $ref: '#/components/schemas/ObservabilityPipelineSplitArrayProcessorArrayConfig' + maxItems: 15 + minItems: 1 + type: array + display_name: + $ref: '#/components/schemas/ObservabilityPipelineComponentDisplayName' + enabled: + description: Indicates whether the processor is enabled. + example: true + type: boolean + id: + description: The unique identifier for this component. Used in other parts + of the pipeline to reference this component (for example, as the `input` + to downstream components). + example: split-array-processor + type: string + include: + description: A Datadog search query used to determine which logs this processor + targets. For split_array, this should typically be `*`. + example: '*' + type: string + type: + $ref: '#/components/schemas/ObservabilityPipelineSplitArrayProcessorType' + required: + - id + - type + - include + - arrays + - enabled + type: object + x-pipeline-types: + - logs + ObservabilityPipelineSplitArrayProcessorArrayConfig: + description: Configuration for a single array split operation. + properties: + field: + description: The path to the array field to split. + example: tags + type: string + include: + description: A Datadog search query used to determine which logs this array + split operation targets. + example: '*' + type: string + required: + - include + - field + type: object + ObservabilityPipelineSplitArrayProcessorType: + default: split_array + description: The processor type. The value should always be `split_array`. + enum: + - split_array + example: split_array + type: string + x-enum-varnames: + - SPLIT_ARRAY ObservabilityPipelineSplunkHecDestination: - description: The `splunk_hec` destination forwards logs to Splunk using the + description: 'The `splunk_hec` destination forwards logs to Splunk using the HTTP Event Collector (HEC). + + + **Supported pipeline types:** logs' properties: auto_extract_timestamp: description: 'If `true`, Splunk tries to extract timestamps from incoming @@ -38753,9 +39671,9 @@ components: encoding: $ref: '#/components/schemas/ObservabilityPipelineSplunkHecDestinationEncoding' id: - description: The unique identifier for this component. Used to reference - this component in other parts of the pipeline (e.g., as input to downstream - components). + description: The unique identifier for this component. Used in other parts + of the pipeline to reference this component (for example, as the `input` + to downstream components). example: splunk-hec-destination type: string index: @@ -38781,6 +39699,8 @@ components: - type - inputs type: object + x-pipeline-types: + - logs ObservabilityPipelineSplunkHecDestinationEncoding: description: Encoding format for log events. enum: @@ -38801,13 +39721,16 @@ components: x-enum-varnames: - SPLUNK_HEC ObservabilityPipelineSplunkHecSource: - description: The `splunk_hec` source implements the Splunk HTTP Event Collector + description: 'The `splunk_hec` source implements the Splunk HTTP Event Collector (HEC) API. + + + **Supported pipeline types:** logs' properties: id: - description: The unique identifier for this component. Used to reference - this component in other parts of the pipeline (e.g., as input to downstream - components). + description: The unique identifier for this component. Used in other parts + of the pipeline to reference this component (for example, as the `input` + to downstream components). example: splunk-hec-source type: string tls: @@ -38818,6 +39741,8 @@ components: - id - type type: object + x-pipeline-types: + - logs ObservabilityPipelineSplunkHecSourceType: default: splunk_hec description: The source type. Always `splunk_hec`. @@ -38831,12 +39756,15 @@ components: description: 'The `splunk_tcp` source receives logs from a Splunk Universal Forwarder over TCP. - TLS is supported for secure transmission.' + TLS is supported for secure transmission. + + + **Supported pipeline types:** logs' properties: id: - description: The unique identifier for this component. Used to reference - this component in other parts of the pipeline (e.g., as input to downstream - components). + description: The unique identifier for this component. Used in other parts + of the pipeline to reference this component (for example, as the `input` + to downstream components). example: splunk-tcp-source type: string tls: @@ -38847,6 +39775,8 @@ components: - id - type type: object + x-pipeline-types: + - logs ObservabilityPipelineSplunkTcpSourceType: default: splunk_tcp description: The source type. Always `splunk_tcp`. @@ -38857,7 +39787,10 @@ components: x-enum-varnames: - SPLUNK_TCP ObservabilityPipelineSumoLogicDestination: - description: The `sumo_logic` destination forwards logs to Sumo Logic. + description: 'The `sumo_logic` destination forwards logs to Sumo Logic. + + + **Supported pipeline types:** logs' properties: encoding: $ref: '#/components/schemas/ObservabilityPipelineSumoLogicDestinationEncoding' @@ -38898,6 +39831,8 @@ components: - type - inputs type: object + x-pipeline-types: + - logs ObservabilityPipelineSumoLogicDestinationEncoding: description: The output encoding format. enum: @@ -38935,12 +39870,15 @@ components: x-enum-varnames: - SUMO_LOGIC ObservabilityPipelineSumoLogicSource: - description: The `sumo_logic` source receives logs from Sumo Logic collectors. + description: 'The `sumo_logic` source receives logs from Sumo Logic collectors. + + + **Supported pipeline types:** logs' properties: id: - description: The unique identifier for this component. Used to reference - this component in other parts of the pipeline (e.g., as input to downstream - components). + description: The unique identifier for this component. Used in other parts + of the pipeline to reference this component (for example, as the `input` + to downstream components). example: sumo-logic-source type: string type: @@ -38949,6 +39887,8 @@ components: - id - type type: object + x-pipeline-types: + - logs ObservabilityPipelineSumoLogicSourceType: default: sumo_logic description: The source type. The value should always be `sumo_logic`. @@ -38959,8 +39899,11 @@ components: x-enum-varnames: - SUMO_LOGIC ObservabilityPipelineSyslogNgDestination: - description: The `syslog_ng` destination forwards logs to an external `syslog-ng` + description: 'The `syslog_ng` destination forwards logs to an external `syslog-ng` server over TCP or UDP using the syslog protocol. + + + **Supported pipeline types:** logs' properties: id: description: The unique identifier for this component. @@ -38989,6 +39932,8 @@ components: - type - inputs type: object + x-pipeline-types: + - logs ObservabilityPipelineSyslogNgDestinationType: default: syslog_ng description: The destination type. The value should always be `syslog_ng`. @@ -38999,13 +39944,16 @@ components: x-enum-varnames: - SYSLOG_NG ObservabilityPipelineSyslogNgSource: - description: The `syslog_ng` source listens for logs over TCP or UDP from a + description: 'The `syslog_ng` source listens for logs over TCP or UDP from a `syslog-ng` server using the syslog protocol. + + + **Supported pipeline types:** logs' properties: id: - description: The unique identifier for this component. Used to reference - this component in other parts of the pipeline (e.g., as input to downstream - components). + description: The unique identifier for this component. Used in other parts + of the pipeline to reference this component (for example, as the `input` + to downstream components). example: syslog-ng-source type: string mode: @@ -39019,6 +39967,8 @@ components: - type - mode type: object + x-pipeline-types: + - logs ObservabilityPipelineSyslogNgSourceType: default: syslog_ng description: The source type. The value should always be `syslog_ng`. @@ -39039,13 +39989,16 @@ components: - TCP - UDP ObservabilityPipelineThrottleProcessor: - description: The `throttle` processor limits the number of events that pass + description: 'The `throttle` processor limits the number of events that pass through over a given time window. + + + **Supported pipeline types:** logs' properties: display_name: $ref: '#/components/schemas/ObservabilityPipelineComponentDisplayName' enabled: - description: Whether this processor is enabled. + description: Indicates whether the processor is enabled. example: true type: boolean group_by: @@ -39086,6 +40039,8 @@ components: - window - enabled type: object + x-pipeline-types: + - logs ObservabilityPipelineThrottleProcessorType: default: throttle description: The processor type. The value should always be `throttle`. @@ -76499,6 +77454,222 @@ paths: summary: Get all aggregated DNS traffic tags: - Cloud Network Monitoring + /api/v2/obs-pipelines/pipelines: + get: + description: Retrieve a list of pipelines. + operationId: ListPipelines + parameters: + - $ref: '#/components/parameters/PageSize' + - $ref: '#/components/parameters/PageNumber' + responses: + '200': + content: + application/json: + schema: + $ref: '#/components/schemas/ListPipelinesResponse' + description: OK + '400': + $ref: '#/components/responses/BadRequestResponse' + '403': + $ref: '#/components/responses/NotAuthorizedResponse' + '429': + $ref: '#/components/responses/TooManyRequestsResponse' + summary: List pipelines + tags: + - Observability Pipelines + x-permission: + operator: OR + permissions: + - observability_pipelines_read + x-unstable: '**Note**: This endpoint is in Preview. Fill out this [form](https://www.datadoghq.com/product-preview/observability-pipelines-api-and-terraform-support/) + to request access.' + post: + description: Create a new pipeline. + operationId: CreatePipeline + requestBody: + content: + application/json: + schema: + $ref: '#/components/schemas/ObservabilityPipelineSpec' + required: true + responses: + '201': + content: + application/json: + schema: + $ref: '#/components/schemas/ObservabilityPipeline' + description: OK + '400': + $ref: '#/components/responses/BadRequestResponse' + '403': + $ref: '#/components/responses/NotAuthorizedResponse' + '409': + $ref: '#/components/responses/ConflictResponse' + '429': + $ref: '#/components/responses/TooManyRequestsResponse' + summary: Create a new pipeline + tags: + - Observability Pipelines + x-permission: + operator: OR + permissions: + - observability_pipelines_deploy + x-unstable: '**Note**: This endpoint is in Preview. Fill out this [form](https://www.datadoghq.com/product-preview/observability-pipelines-api-and-terraform-support/) + to request access.' + /api/v2/obs-pipelines/pipelines/validate: + post: + description: 'Validates a pipeline configuration without creating or updating + any resources. + + Returns a list of validation errors, if any.' + operationId: ValidatePipeline + requestBody: + content: + application/json: + schema: + $ref: '#/components/schemas/ObservabilityPipelineSpec' + required: true + responses: + '200': + content: + application/json: + schema: + $ref: '#/components/schemas/ValidationResponse' + description: OK + '400': + $ref: '#/components/responses/BadRequestResponse' + '403': + $ref: '#/components/responses/NotAuthorizedResponse' + '429': + $ref: '#/components/responses/TooManyRequestsResponse' + summary: Validate an observability pipeline + tags: + - Observability Pipelines + x-permission: + operator: OR + permissions: + - observability_pipelines_read + x-unstable: '**Note**: This endpoint is in Preview. Fill out this [form](https://www.datadoghq.com/product-preview/observability-pipelines-api-and-terraform-support/) + to request access.' + /api/v2/obs-pipelines/pipelines/{pipeline_id}: + delete: + description: Delete a pipeline. + operationId: DeletePipeline + parameters: + - description: The ID of the pipeline to delete. + in: path + name: pipeline_id + required: true + schema: + type: string + responses: + '204': + description: OK + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/APIErrorResponse' + description: Forbidden + '404': + content: + application/json: + schema: + $ref: '#/components/schemas/APIErrorResponse' + description: Not Found + '409': + content: + application/json: + schema: + $ref: '#/components/schemas/APIErrorResponse' + description: Conflict + '429': + $ref: '#/components/responses/TooManyRequestsResponse' + summary: Delete a pipeline + tags: + - Observability Pipelines + x-permission: + operator: OR + permissions: + - observability_pipelines_delete + x-unstable: '**Note**: This endpoint is in Preview. Fill out this [form](https://www.datadoghq.com/product-preview/observability-pipelines-api-and-terraform-support/) + to request access.' + get: + description: Get a specific pipeline by its ID. + operationId: GetPipeline + parameters: + - description: The ID of the pipeline to retrieve. + in: path + name: pipeline_id + required: true + schema: + type: string + responses: + '200': + content: + application/json: + schema: + $ref: '#/components/schemas/ObservabilityPipeline' + description: OK + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/APIErrorResponse' + description: Forbidden + '429': + $ref: '#/components/responses/TooManyRequestsResponse' + summary: Get a specific pipeline + tags: + - Observability Pipelines + x-permission: + operator: OR + permissions: + - observability_pipelines_read + x-unstable: '**Note**: This endpoint is in Preview. Fill out this [form](https://www.datadoghq.com/product-preview/observability-pipelines-api-and-terraform-support/) + to request access.' + put: + description: Update a pipeline. + operationId: UpdatePipeline + parameters: + - description: The ID of the pipeline to update. + in: path + name: pipeline_id + required: true + schema: + type: string + requestBody: + content: + application/json: + schema: + $ref: '#/components/schemas/ObservabilityPipeline' + required: true + responses: + '200': + content: + application/json: + schema: + $ref: '#/components/schemas/ObservabilityPipeline' + description: OK + '400': + $ref: '#/components/responses/BadRequestResponse' + '403': + $ref: '#/components/responses/NotAuthorizedResponse' + '404': + $ref: '#/components/responses/NotFoundResponse' + '409': + $ref: '#/components/responses/ConflictResponse' + '429': + $ref: '#/components/responses/TooManyRequestsResponse' + summary: Update a pipeline + tags: + - Observability Pipelines + x-permission: + operator: OR + permissions: + - observability_pipelines_deploy + x-unstable: '**Note**: This endpoint is in Preview. Fill out this [form](https://www.datadoghq.com/product-preview/observability-pipelines-api-and-terraform-support/) + to request access.' /api/v2/on-call/escalation-policies: post: description: Create a new On-Call escalation policy @@ -80157,222 +81328,6 @@ paths: tags: - CSM Threats x-codegen-request-body-name: body - /api/v2/remote_config/products/obs_pipelines/pipelines: - get: - description: Retrieve a list of pipelines. - operationId: ListPipelines - parameters: - - $ref: '#/components/parameters/PageSize' - - $ref: '#/components/parameters/PageNumber' - responses: - '200': - content: - application/json: - schema: - $ref: '#/components/schemas/ListPipelinesResponse' - description: OK - '400': - $ref: '#/components/responses/BadRequestResponse' - '403': - $ref: '#/components/responses/NotAuthorizedResponse' - '429': - $ref: '#/components/responses/TooManyRequestsResponse' - summary: List pipelines - tags: - - Observability Pipelines - x-permission: - operator: OR - permissions: - - observability_pipelines_read - x-unstable: '**Note**: This endpoint is in Preview. Fill out this [form](https://www.datadoghq.com/product-preview/observability-pipelines-api-and-terraform-support/) - to request access.' - post: - description: Create a new pipeline. - operationId: CreatePipeline - requestBody: - content: - application/json: - schema: - $ref: '#/components/schemas/ObservabilityPipelineSpec' - required: true - responses: - '201': - content: - application/json: - schema: - $ref: '#/components/schemas/ObservabilityPipeline' - description: OK - '400': - $ref: '#/components/responses/BadRequestResponse' - '403': - $ref: '#/components/responses/NotAuthorizedResponse' - '409': - $ref: '#/components/responses/ConflictResponse' - '429': - $ref: '#/components/responses/TooManyRequestsResponse' - summary: Create a new pipeline - tags: - - Observability Pipelines - x-permission: - operator: OR - permissions: - - observability_pipelines_deploy - x-unstable: '**Note**: This endpoint is in Preview. Fill out this [form](https://www.datadoghq.com/product-preview/observability-pipelines-api-and-terraform-support/) - to request access.' - /api/v2/remote_config/products/obs_pipelines/pipelines/validate: - post: - description: 'Validates a pipeline configuration without creating or updating - any resources. - - Returns a list of validation errors, if any.' - operationId: ValidatePipeline - requestBody: - content: - application/json: - schema: - $ref: '#/components/schemas/ObservabilityPipelineSpec' - required: true - responses: - '200': - content: - application/json: - schema: - $ref: '#/components/schemas/ValidationResponse' - description: OK - '400': - $ref: '#/components/responses/BadRequestResponse' - '403': - $ref: '#/components/responses/NotAuthorizedResponse' - '429': - $ref: '#/components/responses/TooManyRequestsResponse' - summary: Validate an observability pipeline - tags: - - Observability Pipelines - x-permission: - operator: OR - permissions: - - observability_pipelines_read - x-unstable: '**Note**: This endpoint is in Preview. Fill out this [form](https://www.datadoghq.com/product-preview/observability-pipelines-api-and-terraform-support/) - to request access.' - /api/v2/remote_config/products/obs_pipelines/pipelines/{pipeline_id}: - delete: - description: Delete a pipeline. - operationId: DeletePipeline - parameters: - - description: The ID of the pipeline to delete. - in: path - name: pipeline_id - required: true - schema: - type: string - responses: - '204': - description: OK - '403': - content: - application/json: - schema: - $ref: '#/components/schemas/APIErrorResponse' - description: Forbidden - '404': - content: - application/json: - schema: - $ref: '#/components/schemas/APIErrorResponse' - description: Not Found - '409': - content: - application/json: - schema: - $ref: '#/components/schemas/APIErrorResponse' - description: Conflict - '429': - $ref: '#/components/responses/TooManyRequestsResponse' - summary: Delete a pipeline - tags: - - Observability Pipelines - x-permission: - operator: OR - permissions: - - observability_pipelines_delete - x-unstable: '**Note**: This endpoint is in Preview. Fill out this [form](https://www.datadoghq.com/product-preview/observability-pipelines-api-and-terraform-support/) - to request access.' - get: - description: Get a specific pipeline by its ID. - operationId: GetPipeline - parameters: - - description: The ID of the pipeline to retrieve. - in: path - name: pipeline_id - required: true - schema: - type: string - responses: - '200': - content: - application/json: - schema: - $ref: '#/components/schemas/ObservabilityPipeline' - description: OK - '403': - content: - application/json: - schema: - $ref: '#/components/schemas/APIErrorResponse' - description: Forbidden - '429': - $ref: '#/components/responses/TooManyRequestsResponse' - summary: Get a specific pipeline - tags: - - Observability Pipelines - x-permission: - operator: OR - permissions: - - observability_pipelines_read - x-unstable: '**Note**: This endpoint is in Preview. Fill out this [form](https://www.datadoghq.com/product-preview/observability-pipelines-api-and-terraform-support/) - to request access.' - put: - description: Update a pipeline. - operationId: UpdatePipeline - parameters: - - description: The ID of the pipeline to update. - in: path - name: pipeline_id - required: true - schema: - type: string - requestBody: - content: - application/json: - schema: - $ref: '#/components/schemas/ObservabilityPipeline' - required: true - responses: - '200': - content: - application/json: - schema: - $ref: '#/components/schemas/ObservabilityPipeline' - description: OK - '400': - $ref: '#/components/responses/BadRequestResponse' - '403': - $ref: '#/components/responses/NotAuthorizedResponse' - '404': - $ref: '#/components/responses/NotFoundResponse' - '409': - $ref: '#/components/responses/ConflictResponse' - '429': - $ref: '#/components/responses/TooManyRequestsResponse' - summary: Update a pipeline - tags: - - Observability Pipelines - x-permission: - operator: OR - permissions: - - observability_pipelines_deploy - x-unstable: '**Note**: This endpoint is in Preview. Fill out this [form](https://www.datadoghq.com/product-preview/observability-pipelines-api-and-terraform-support/) - to request access.' /api/v2/restriction_policy/{resource_id}: delete: description: Deletes the restriction policy associated with a specified resource. diff --git a/docs/datadog_api_client.v2.model.rst b/docs/datadog_api_client.v2.model.rst index d697650e88..55303f00af 100644 --- a/docs/datadog_api_client.v2.model.rst +++ b/docs/datadog_api_client.v2.model.rst @@ -15481,6 +15481,20 @@ datadog\_api\_client.v2.model.observability\_pipeline\_add\_fields\_processor\_t :members: :show-inheritance: +datadog\_api\_client.v2.model.observability\_pipeline\_add\_hostname\_processor module +-------------------------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_add_hostname_processor + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.observability\_pipeline\_add\_hostname\_processor\_type module +-------------------------------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_add_hostname_processor_type + :members: + :show-inheritance: + datadog\_api\_client.v2.model.observability\_pipeline\_amazon\_data\_firehose\_source module -------------------------------------------------------------------------------------------- @@ -15579,6 +15593,20 @@ datadog\_api\_client.v2.model.observability\_pipeline\_aws\_auth module :members: :show-inheritance: +datadog\_api\_client.v2.model.observability\_pipeline\_cloud\_prem\_destination module +-------------------------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_cloud_prem_destination + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.observability\_pipeline\_cloud\_prem\_destination\_type module +-------------------------------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_cloud_prem_destination_type + :members: + :show-inheritance: + datadog\_api\_client.v2.model.observability\_pipeline\_config module -------------------------------------------------------------------- @@ -15593,6 +15621,13 @@ datadog\_api\_client.v2.model.observability\_pipeline\_config\_destination\_item :members: :show-inheritance: +datadog\_api\_client.v2.model.observability\_pipeline\_config\_pipeline\_type module +------------------------------------------------------------------------------------ + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_config_pipeline_type + :members: + :show-inheritance: + datadog\_api\_client.v2.model.observability\_pipeline\_config\_processor\_group module -------------------------------------------------------------------------------------- @@ -15712,6 +15747,20 @@ datadog\_api\_client.v2.model.observability\_pipeline\_datadog\_logs\_destinatio :members: :show-inheritance: +datadog\_api\_client.v2.model.observability\_pipeline\_datadog\_metrics\_destination module +------------------------------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_datadog_metrics_destination + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.observability\_pipeline\_datadog\_metrics\_destination\_type module +------------------------------------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_datadog_metrics_destination_type + :members: + :show-inheritance: + datadog\_api\_client.v2.model.observability\_pipeline\_datadog\_tags\_processor module -------------------------------------------------------------------------------------- @@ -15775,6 +15824,13 @@ datadog\_api\_client.v2.model.observability\_pipeline\_elasticsearch\_destinatio :members: :show-inheritance: +datadog\_api\_client.v2.model.observability\_pipeline\_elasticsearch\_destination\_data\_stream module +------------------------------------------------------------------------------------------------------ + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_elasticsearch_destination_data_stream + :members: + :show-inheritance: + datadog\_api\_client.v2.model.observability\_pipeline\_elasticsearch\_destination\_type module ---------------------------------------------------------------------------------------------- @@ -15852,6 +15908,13 @@ datadog\_api\_client.v2.model.observability\_pipeline\_enrichment\_table\_proces :members: :show-inheritance: +datadog\_api\_client.v2.model.observability\_pipeline\_enrichment\_table\_reference\_table module +------------------------------------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_enrichment_table_reference_table + :members: + :show-inheritance: + datadog\_api\_client.v2.model.observability\_pipeline\_field\_value module -------------------------------------------------------------------------- @@ -16048,6 +16111,48 @@ datadog\_api\_client.v2.model.observability\_pipeline\_google\_pub\_sub\_source\ :members: :show-inheritance: +datadog\_api\_client.v2.model.observability\_pipeline\_http\_client\_destination module +--------------------------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_http_client_destination + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.observability\_pipeline\_http\_client\_destination\_auth\_strategy module +------------------------------------------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_http_client_destination_auth_strategy + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.observability\_pipeline\_http\_client\_destination\_compression module +---------------------------------------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_http_client_destination_compression + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.observability\_pipeline\_http\_client\_destination\_compression\_algorithm module +--------------------------------------------------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_http_client_destination_compression_algorithm + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.observability\_pipeline\_http\_client\_destination\_encoding module +------------------------------------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_http_client_destination_encoding + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.observability\_pipeline\_http\_client\_destination\_type module +--------------------------------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_http_client_destination_type + :members: + :show-inheritance: + datadog\_api\_client.v2.model.observability\_pipeline\_http\_client\_source module ---------------------------------------------------------------------------------- @@ -16090,24 +16195,59 @@ datadog\_api\_client.v2.model.observability\_pipeline\_http\_server\_source\_typ :members: :show-inheritance: -datadog\_api\_client.v2.model.observability\_pipeline\_kafka\_source module ---------------------------------------------------------------------------- +datadog\_api\_client.v2.model.observability\_pipeline\_kafka\_destination module +-------------------------------------------------------------------------------- -.. automodule:: datadog_api_client.v2.model.observability_pipeline_kafka_source +.. automodule:: datadog_api_client.v2.model.observability_pipeline_kafka_destination :members: :show-inheritance: -datadog\_api\_client.v2.model.observability\_pipeline\_kafka\_source\_librdkafka\_option module ------------------------------------------------------------------------------------------------ +datadog\_api\_client.v2.model.observability\_pipeline\_kafka\_destination\_compression module +--------------------------------------------------------------------------------------------- -.. automodule:: datadog_api_client.v2.model.observability_pipeline_kafka_source_librdkafka_option +.. automodule:: datadog_api_client.v2.model.observability_pipeline_kafka_destination_compression :members: :show-inheritance: -datadog\_api\_client.v2.model.observability\_pipeline\_kafka\_source\_sasl module ---------------------------------------------------------------------------------- +datadog\_api\_client.v2.model.observability\_pipeline\_kafka\_destination\_encoding module +------------------------------------------------------------------------------------------ -.. automodule:: datadog_api_client.v2.model.observability_pipeline_kafka_source_sasl +.. automodule:: datadog_api_client.v2.model.observability_pipeline_kafka_destination_encoding + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.observability\_pipeline\_kafka\_destination\_type module +-------------------------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_kafka_destination_type + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.observability\_pipeline\_kafka\_librdkafka\_option module +--------------------------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_kafka_librdkafka_option + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.observability\_pipeline\_kafka\_sasl module +------------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_kafka_sasl + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.observability\_pipeline\_kafka\_sasl\_mechanism module +------------------------------------------------------------------------------------ + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_kafka_sasl_mechanism + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.observability\_pipeline\_kafka\_source module +--------------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_kafka_source :members: :show-inheritance: @@ -16139,6 +16279,41 @@ datadog\_api\_client.v2.model.observability\_pipeline\_metadata\_entry module :members: :show-inheritance: +datadog\_api\_client.v2.model.observability\_pipeline\_metric\_tags\_processor module +------------------------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_metric_tags_processor + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.observability\_pipeline\_metric\_tags\_processor\_rule module +------------------------------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_metric_tags_processor_rule + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.observability\_pipeline\_metric\_tags\_processor\_rule\_action module +--------------------------------------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_metric_tags_processor_rule_action + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.observability\_pipeline\_metric\_tags\_processor\_rule\_mode module +------------------------------------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_metric_tags_processor_rule_mode + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.observability\_pipeline\_metric\_tags\_processor\_type module +------------------------------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_metric_tags_processor_type + :members: + :show-inheritance: + datadog\_api\_client.v2.model.observability\_pipeline\_metric\_value module --------------------------------------------------------------------------- @@ -16216,6 +16391,20 @@ datadog\_api\_client.v2.model.observability\_pipeline\_open\_search\_destination :members: :show-inheritance: +datadog\_api\_client.v2.model.observability\_pipeline\_opentelemetry\_source module +----------------------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_opentelemetry_source + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.observability\_pipeline\_opentelemetry\_source\_type module +----------------------------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_opentelemetry_source_type + :members: + :show-inheritance: + datadog\_api\_client.v2.model.observability\_pipeline\_parse\_grok\_processor module ------------------------------------------------------------------------------------ @@ -16265,10 +16454,17 @@ datadog\_api\_client.v2.model.observability\_pipeline\_parse\_json\_processor\_t :members: :show-inheritance: -datadog\_api\_client.v2.model.observability\_pipeline\_pipeline\_kafka\_source\_sasl\_mechanism module ------------------------------------------------------------------------------------------------------- +datadog\_api\_client.v2.model.observability\_pipeline\_parse\_xml\_processor module +----------------------------------------------------------------------------------- -.. automodule:: datadog_api_client.v2.model.observability_pipeline_pipeline_kafka_source_sasl_mechanism +.. automodule:: datadog_api_client.v2.model.observability_pipeline_parse_xml_processor + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.observability\_pipeline\_parse\_xml\_processor\_type module +----------------------------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_parse_xml_processor_type :members: :show-inheritance: @@ -16832,6 +17028,27 @@ datadog\_api\_client.v2.model.observability\_pipeline\_spec\_data module :members: :show-inheritance: +datadog\_api\_client.v2.model.observability\_pipeline\_split\_array\_processor module +------------------------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_split_array_processor + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.observability\_pipeline\_split\_array\_processor\_array\_config module +---------------------------------------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_split_array_processor_array_config + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.observability\_pipeline\_split\_array\_processor\_type module +------------------------------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_split_array_processor_type + :members: + :show-inheritance: + datadog\_api\_client.v2.model.observability\_pipeline\_splunk\_hec\_destination module -------------------------------------------------------------------------------------- diff --git a/examples/v2/observability-pipelines/CreatePipeline.py b/examples/v2/observability-pipelines/CreatePipeline.py index 87b50b26c0..54e6232db7 100644 --- a/examples/v2/observability-pipelines/CreatePipeline.py +++ b/examples/v2/observability-pipelines/CreatePipeline.py @@ -41,7 +41,7 @@ type=ObservabilityPipelineDatadogLogsDestinationType.DATADOG_LOGS, ), ], - processors=[ + processor_groups=[ ObservabilityPipelineConfigProcessorGroup( enabled=True, id="my-processor-group", diff --git a/examples/v2/observability-pipelines/UpdatePipeline.py b/examples/v2/observability-pipelines/UpdatePipeline.py index c8e05195fa..e1c8cf4faa 100644 --- a/examples/v2/observability-pipelines/UpdatePipeline.py +++ b/examples/v2/observability-pipelines/UpdatePipeline.py @@ -45,7 +45,7 @@ type=ObservabilityPipelineDatadogLogsDestinationType.DATADOG_LOGS, ), ], - processors=[ + processor_groups=[ ObservabilityPipelineConfigProcessorGroup( enabled=True, id="my-processor-group", diff --git a/examples/v2/observability-pipelines/ValidatePipeline.py b/examples/v2/observability-pipelines/ValidatePipeline.py index a55d4a6368..e950443ff5 100644 --- a/examples/v2/observability-pipelines/ValidatePipeline.py +++ b/examples/v2/observability-pipelines/ValidatePipeline.py @@ -41,7 +41,7 @@ type=ObservabilityPipelineDatadogLogsDestinationType.DATADOG_LOGS, ), ], - processors=[ + processor_groups=[ ObservabilityPipelineConfigProcessorGroup( enabled=True, id="my-processor-group", diff --git a/src/datadog_api_client/configuration.py b/src/datadog_api_client/configuration.py index 5f29613298..cb031ef99a 100644 --- a/src/datadog_api_client/configuration.py +++ b/src/datadog_api_client/configuration.py @@ -359,6 +359,12 @@ def __init__( "v2.update_monitor_user_template": False, "v2.validate_existing_monitor_user_template": False, "v2.validate_monitor_user_template": False, + "v2.create_pipeline": False, + "v2.delete_pipeline": False, + "v2.get_pipeline": False, + "v2.list_pipelines": False, + "v2.update_pipeline": False, + "v2.validate_pipeline": False, "v2.list_role_templates": False, "v2.create_connection": False, "v2.delete_connection": False, @@ -370,12 +376,6 @@ def __init__( "v2.query_event_filtered_users": False, "v2.query_users": False, "v2.update_connection": False, - "v2.create_pipeline": False, - "v2.delete_pipeline": False, - "v2.get_pipeline": False, - "v2.list_pipelines": False, - "v2.update_pipeline": False, - "v2.validate_pipeline": False, "v2.create_scorecard_outcomes_batch": False, "v2.create_scorecard_rule": False, "v2.delete_scorecard_rule": False, diff --git a/src/datadog_api_client/v2/api/observability_pipelines_api.py b/src/datadog_api_client/v2/api/observability_pipelines_api.py index 2c14f8497a..7422aedcd5 100644 --- a/src/datadog_api_client/v2/api/observability_pipelines_api.py +++ b/src/datadog_api_client/v2/api/observability_pipelines_api.py @@ -31,7 +31,7 @@ def __init__(self, api_client=None): settings={ "response_type": (ObservabilityPipeline,), "auth": ["apiKeyAuth", "appKeyAuth"], - "endpoint_path": "/api/v2/remote_config/products/obs_pipelines/pipelines", + "endpoint_path": "/api/v2/obs-pipelines/pipelines", "operation_id": "create_pipeline", "http_method": "POST", "version": "v2", @@ -51,7 +51,7 @@ def __init__(self, api_client=None): settings={ "response_type": None, "auth": ["apiKeyAuth", "appKeyAuth"], - "endpoint_path": "/api/v2/remote_config/products/obs_pipelines/pipelines/{pipeline_id}", + "endpoint_path": "/api/v2/obs-pipelines/pipelines/{pipeline_id}", "operation_id": "delete_pipeline", "http_method": "DELETE", "version": "v2", @@ -74,7 +74,7 @@ def __init__(self, api_client=None): settings={ "response_type": (ObservabilityPipeline,), "auth": ["apiKeyAuth", "appKeyAuth"], - "endpoint_path": "/api/v2/remote_config/products/obs_pipelines/pipelines/{pipeline_id}", + "endpoint_path": "/api/v2/obs-pipelines/pipelines/{pipeline_id}", "operation_id": "get_pipeline", "http_method": "GET", "version": "v2", @@ -97,7 +97,7 @@ def __init__(self, api_client=None): settings={ "response_type": (ListPipelinesResponse,), "auth": ["apiKeyAuth", "appKeyAuth"], - "endpoint_path": "/api/v2/remote_config/products/obs_pipelines/pipelines", + "endpoint_path": "/api/v2/obs-pipelines/pipelines", "operation_id": "list_pipelines", "http_method": "GET", "version": "v2", @@ -124,7 +124,7 @@ def __init__(self, api_client=None): settings={ "response_type": (ObservabilityPipeline,), "auth": ["apiKeyAuth", "appKeyAuth"], - "endpoint_path": "/api/v2/remote_config/products/obs_pipelines/pipelines/{pipeline_id}", + "endpoint_path": "/api/v2/obs-pipelines/pipelines/{pipeline_id}", "operation_id": "update_pipeline", "http_method": "PUT", "version": "v2", @@ -150,7 +150,7 @@ def __init__(self, api_client=None): settings={ "response_type": (ValidationResponse,), "auth": ["apiKeyAuth", "appKeyAuth"], - "endpoint_path": "/api/v2/remote_config/products/obs_pipelines/pipelines/validate", + "endpoint_path": "/api/v2/obs-pipelines/pipelines/validate", "operation_id": "validate_pipeline", "http_method": "POST", "version": "v2", diff --git a/src/datadog_api_client/v2/model/azure_storage_destination.py b/src/datadog_api_client/v2/model/azure_storage_destination.py index 283e2df4aa..09c1195596 100644 --- a/src/datadog_api_client/v2/model/azure_storage_destination.py +++ b/src/datadog_api_client/v2/model/azure_storage_destination.py @@ -50,6 +50,8 @@ def __init__( """ The ``azure_storage`` destination forwards logs to an Azure Blob Storage container. + **Supported pipeline types:** logs + :param blob_prefix: Optional prefix for blobs written to the container. :type blob_prefix: str, optional diff --git a/src/datadog_api_client/v2/model/microsoft_sentinel_destination.py b/src/datadog_api_client/v2/model/microsoft_sentinel_destination.py index 44868adb77..9b70faf693 100644 --- a/src/datadog_api_client/v2/model/microsoft_sentinel_destination.py +++ b/src/datadog_api_client/v2/model/microsoft_sentinel_destination.py @@ -54,6 +54,8 @@ def __init__( """ The ``microsoft_sentinel`` destination forwards logs to Microsoft Sentinel. + **Supported pipeline types:** logs + :param client_id: Azure AD client ID used for authentication. :type client_id: str diff --git a/src/datadog_api_client/v2/model/observability_pipeline_add_env_vars_processor.py b/src/datadog_api_client/v2/model/observability_pipeline_add_env_vars_processor.py index afef01d8e9..44801c3dd5 100644 --- a/src/datadog_api_client/v2/model/observability_pipeline_add_env_vars_processor.py +++ b/src/datadog_api_client/v2/model/observability_pipeline_add_env_vars_processor.py @@ -63,10 +63,12 @@ def __init__( """ The ``add_env_vars`` processor adds environment variable values to log events. + **Supported pipeline types:** logs + :param display_name: The display name for a component. :type display_name: str, optional - :param enabled: Whether this processor is enabled. + :param enabled: Indicates whether the processor is enabled. :type enabled: bool :param id: The unique identifier for this component. Used to reference this processor in the pipeline. diff --git a/src/datadog_api_client/v2/model/observability_pipeline_add_fields_processor.py b/src/datadog_api_client/v2/model/observability_pipeline_add_fields_processor.py index 47e7dd254c..795014bc80 100644 --- a/src/datadog_api_client/v2/model/observability_pipeline_add_fields_processor.py +++ b/src/datadog_api_client/v2/model/observability_pipeline_add_fields_processor.py @@ -59,16 +59,18 @@ def __init__( """ The ``add_fields`` processor adds static key-value fields to logs. + **Supported pipeline types:** logs + :param display_name: The display name for a component. :type display_name: str, optional - :param enabled: Whether this processor is enabled. + :param enabled: Indicates whether the processor is enabled. :type enabled: bool :param fields: A list of static fields (key-value pairs) that is added to each log event processed by this component. :type fields: [ObservabilityPipelineFieldValue] - :param id: The unique identifier for this component. Used to reference this component in other parts of the pipeline (for example, as the ``input`` to downstream components). + :param id: The unique identifier for this component. Used in other parts of the pipeline to reference this component (for example, as the ``input`` to downstream components). :type id: str :param include: A Datadog search query used to determine which logs this processor targets. diff --git a/src/datadog_api_client/v2/model/observability_pipeline_add_hostname_processor.py b/src/datadog_api_client/v2/model/observability_pipeline_add_hostname_processor.py new file mode 100644 index 0000000000..b28869555f --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_add_hostname_processor.py @@ -0,0 +1,81 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + +from typing import Union, TYPE_CHECKING + +from datadog_api_client.model_utils import ( + ModelNormal, + cached_property, + unset, + UnsetType, +) + + +if TYPE_CHECKING: + from datadog_api_client.v2.model.observability_pipeline_add_hostname_processor_type import ( + ObservabilityPipelineAddHostnameProcessorType, + ) + + +class ObservabilityPipelineAddHostnameProcessor(ModelNormal): + @cached_property + def openapi_types(_): + from datadog_api_client.v2.model.observability_pipeline_add_hostname_processor_type import ( + ObservabilityPipelineAddHostnameProcessorType, + ) + + return { + "display_name": (str,), + "enabled": (bool,), + "id": (str,), + "include": (str,), + "type": (ObservabilityPipelineAddHostnameProcessorType,), + } + + attribute_map = { + "display_name": "display_name", + "enabled": "enabled", + "id": "id", + "include": "include", + "type": "type", + } + + def __init__( + self_, + enabled: bool, + id: str, + include: str, + type: ObservabilityPipelineAddHostnameProcessorType, + display_name: Union[str, UnsetType] = unset, + **kwargs, + ): + """ + The ``add_hostname`` processor adds the hostname to log events. + + **Supported pipeline types:** logs + + :param display_name: The display name for a component. + :type display_name: str, optional + + :param enabled: Indicates whether the processor is enabled. + :type enabled: bool + + :param id: The unique identifier for this component. Used in other parts of the pipeline to reference this component (for example, as the ``input`` to downstream components). + :type id: str + + :param include: A Datadog search query used to determine which logs this processor targets. + :type include: str + + :param type: The processor type. The value should always be ``add_hostname``. + :type type: ObservabilityPipelineAddHostnameProcessorType + """ + if display_name is not unset: + kwargs["display_name"] = display_name + super().__init__(kwargs) + + self_.enabled = enabled + self_.id = id + self_.include = include + self_.type = type diff --git a/src/datadog_api_client/v2/model/observability_pipeline_add_hostname_processor_type.py b/src/datadog_api_client/v2/model/observability_pipeline_add_hostname_processor_type.py new file mode 100644 index 0000000000..3f0c2dd4ff --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_add_hostname_processor_type.py @@ -0,0 +1,37 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + + +from datadog_api_client.model_utils import ( + ModelSimple, + cached_property, +) + +from typing import ClassVar + + +class ObservabilityPipelineAddHostnameProcessorType(ModelSimple): + """ + The processor type. The value should always be `add_hostname`. + + :param value: If omitted defaults to "add_hostname". Must be one of ["add_hostname"]. + :type value: str + """ + + allowed_values = { + "add_hostname", + } + ADD_HOSTNAME: ClassVar["ObservabilityPipelineAddHostnameProcessorType"] + + @cached_property + def openapi_types(_): + return { + "value": (str,), + } + + +ObservabilityPipelineAddHostnameProcessorType.ADD_HOSTNAME = ObservabilityPipelineAddHostnameProcessorType( + "add_hostname" +) diff --git a/src/datadog_api_client/v2/model/observability_pipeline_amazon_data_firehose_source.py b/src/datadog_api_client/v2/model/observability_pipeline_amazon_data_firehose_source.py index cdb35acf00..8bbddd8850 100644 --- a/src/datadog_api_client/v2/model/observability_pipeline_amazon_data_firehose_source.py +++ b/src/datadog_api_client/v2/model/observability_pipeline_amazon_data_firehose_source.py @@ -55,11 +55,13 @@ def __init__( """ The ``amazon_data_firehose`` source ingests logs from AWS Data Firehose. + **Supported pipeline types:** logs + :param auth: AWS authentication credentials used for accessing AWS services such as S3. If omitted, the system’s default credentials are used (for example, the IAM role and environment variables). :type auth: ObservabilityPipelineAwsAuth, optional - :param id: The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components). + :param id: The unique identifier for this component. Used in other parts of the pipeline to reference this component (for example, as the ``input`` to downstream components). :type id: str :param tls: Configuration for enabling TLS encryption between the pipeline component and external services. diff --git a/src/datadog_api_client/v2/model/observability_pipeline_amazon_open_search_destination.py b/src/datadog_api_client/v2/model/observability_pipeline_amazon_open_search_destination.py index 639d156f8b..003a449c9c 100644 --- a/src/datadog_api_client/v2/model/observability_pipeline_amazon_open_search_destination.py +++ b/src/datadog_api_client/v2/model/observability_pipeline_amazon_open_search_destination.py @@ -60,6 +60,8 @@ def __init__( """ The ``amazon_opensearch`` destination writes logs to Amazon OpenSearch. + **Supported pipeline types:** logs + :param auth: Authentication settings for the Amazon OpenSearch destination. The ``strategy`` field determines whether basic or AWS-based authentication is used. :type auth: ObservabilityPipelineAmazonOpenSearchDestinationAuth diff --git a/src/datadog_api_client/v2/model/observability_pipeline_amazon_s3_destination.py b/src/datadog_api_client/v2/model/observability_pipeline_amazon_s3_destination.py index 1a7af99838..0018d410eb 100644 --- a/src/datadog_api_client/v2/model/observability_pipeline_amazon_s3_destination.py +++ b/src/datadog_api_client/v2/model/observability_pipeline_amazon_s3_destination.py @@ -76,6 +76,8 @@ def __init__( """ The ``amazon_s3`` destination sends your logs in Datadog-rehydratable format to an Amazon S3 bucket for archiving. + **Supported pipeline types:** logs + :param auth: AWS authentication credentials used for accessing AWS services such as S3. If omitted, the system’s default credentials are used (for example, the IAM role and environment variables). :type auth: ObservabilityPipelineAwsAuth, optional diff --git a/src/datadog_api_client/v2/model/observability_pipeline_amazon_s3_source.py b/src/datadog_api_client/v2/model/observability_pipeline_amazon_s3_source.py index fe4730a921..54806147bd 100644 --- a/src/datadog_api_client/v2/model/observability_pipeline_amazon_s3_source.py +++ b/src/datadog_api_client/v2/model/observability_pipeline_amazon_s3_source.py @@ -59,11 +59,13 @@ def __init__( The ``amazon_s3`` source ingests logs from an Amazon S3 bucket. It supports AWS authentication and TLS encryption. + **Supported pipeline types:** logs + :param auth: AWS authentication credentials used for accessing AWS services such as S3. If omitted, the system’s default credentials are used (for example, the IAM role and environment variables). :type auth: ObservabilityPipelineAwsAuth, optional - :param id: The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components). + :param id: The unique identifier for this component. Used in other parts of the pipeline to reference this component (for example, as the ``input`` to downstream components). :type id: str :param region: AWS region where the S3 bucket resides. diff --git a/src/datadog_api_client/v2/model/observability_pipeline_amazon_security_lake_destination.py b/src/datadog_api_client/v2/model/observability_pipeline_amazon_security_lake_destination.py index 6757c5f25c..2f286a9755 100644 --- a/src/datadog_api_client/v2/model/observability_pipeline_amazon_security_lake_destination.py +++ b/src/datadog_api_client/v2/model/observability_pipeline_amazon_security_lake_destination.py @@ -67,6 +67,8 @@ def __init__( """ The ``amazon_security_lake`` destination sends your logs to Amazon Security Lake. + **Supported pipeline types:** logs + :param auth: AWS authentication credentials used for accessing AWS services such as S3. If omitted, the system’s default credentials are used (for example, the IAM role and environment variables). :type auth: ObservabilityPipelineAwsAuth, optional diff --git a/src/datadog_api_client/v2/model/observability_pipeline_cloud_prem_destination.py b/src/datadog_api_client/v2/model/observability_pipeline_cloud_prem_destination.py new file mode 100644 index 0000000000..90a08c3a2d --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_cloud_prem_destination.py @@ -0,0 +1,58 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + +from typing import List, TYPE_CHECKING + +from datadog_api_client.model_utils import ( + ModelNormal, + cached_property, +) + + +if TYPE_CHECKING: + from datadog_api_client.v2.model.observability_pipeline_cloud_prem_destination_type import ( + ObservabilityPipelineCloudPremDestinationType, + ) + + +class ObservabilityPipelineCloudPremDestination(ModelNormal): + @cached_property + def openapi_types(_): + from datadog_api_client.v2.model.observability_pipeline_cloud_prem_destination_type import ( + ObservabilityPipelineCloudPremDestinationType, + ) + + return { + "id": (str,), + "inputs": ([str],), + "type": (ObservabilityPipelineCloudPremDestinationType,), + } + + attribute_map = { + "id": "id", + "inputs": "inputs", + "type": "type", + } + + def __init__(self_, id: str, inputs: List[str], type: ObservabilityPipelineCloudPremDestinationType, **kwargs): + """ + The ``cloud_prem`` destination sends logs to Datadog CloudPrem. + + **Supported pipeline types:** logs + + :param id: The unique identifier for this component. + :type id: str + + :param inputs: A list of component IDs whose output is used as the ``input`` for this component. + :type inputs: [str] + + :param type: The destination type. The value should always be ``cloud_prem``. + :type type: ObservabilityPipelineCloudPremDestinationType + """ + super().__init__(kwargs) + + self_.id = id + self_.inputs = inputs + self_.type = type diff --git a/src/datadog_api_client/v2/model/observability_pipeline_cloud_prem_destination_type.py b/src/datadog_api_client/v2/model/observability_pipeline_cloud_prem_destination_type.py new file mode 100644 index 0000000000..88110c24ef --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_cloud_prem_destination_type.py @@ -0,0 +1,35 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + + +from datadog_api_client.model_utils import ( + ModelSimple, + cached_property, +) + +from typing import ClassVar + + +class ObservabilityPipelineCloudPremDestinationType(ModelSimple): + """ + The destination type. The value should always be `cloud_prem`. + + :param value: If omitted defaults to "cloud_prem". Must be one of ["cloud_prem"]. + :type value: str + """ + + allowed_values = { + "cloud_prem", + } + CLOUD_PREM: ClassVar["ObservabilityPipelineCloudPremDestinationType"] + + @cached_property + def openapi_types(_): + return { + "value": (str,), + } + + +ObservabilityPipelineCloudPremDestinationType.CLOUD_PREM = ObservabilityPipelineCloudPremDestinationType("cloud_prem") diff --git a/src/datadog_api_client/v2/model/observability_pipeline_config.py b/src/datadog_api_client/v2/model/observability_pipeline_config.py index 48cf842a5d..530b96b8c6 100644 --- a/src/datadog_api_client/v2/model/observability_pipeline_config.py +++ b/src/datadog_api_client/v2/model/observability_pipeline_config.py @@ -17,99 +17,117 @@ from datadog_api_client.v2.model.observability_pipeline_config_destination_item import ( ObservabilityPipelineConfigDestinationItem, ) + from datadog_api_client.v2.model.observability_pipeline_config_pipeline_type import ( + ObservabilityPipelineConfigPipelineType, + ) from datadog_api_client.v2.model.observability_pipeline_config_processor_group import ( ObservabilityPipelineConfigProcessorGroup, ) from datadog_api_client.v2.model.observability_pipeline_config_source_item import ( ObservabilityPipelineConfigSourceItem, ) - from datadog_api_client.v2.model.observability_pipeline_datadog_logs_destination import ( - ObservabilityPipelineDatadogLogsDestination, + from datadog_api_client.v2.model.observability_pipeline_http_client_destination import ( + ObservabilityPipelineHttpClientDestination, + ) + from datadog_api_client.v2.model.observability_pipeline_amazon_open_search_destination import ( + ObservabilityPipelineAmazonOpenSearchDestination, ) from datadog_api_client.v2.model.observability_pipeline_amazon_s3_destination import ( ObservabilityPipelineAmazonS3Destination, ) - from datadog_api_client.v2.model.observability_pipeline_google_cloud_storage_destination import ( - ObservabilityPipelineGoogleCloudStorageDestination, + from datadog_api_client.v2.model.observability_pipeline_amazon_security_lake_destination import ( + ObservabilityPipelineAmazonSecurityLakeDestination, ) - from datadog_api_client.v2.model.observability_pipeline_splunk_hec_destination import ( - ObservabilityPipelineSplunkHecDestination, + from datadog_api_client.v2.model.azure_storage_destination import AzureStorageDestination + from datadog_api_client.v2.model.observability_pipeline_cloud_prem_destination import ( + ObservabilityPipelineCloudPremDestination, ) - from datadog_api_client.v2.model.observability_pipeline_sumo_logic_destination import ( - ObservabilityPipelineSumoLogicDestination, + from datadog_api_client.v2.model.observability_pipeline_crowd_strike_next_gen_siem_destination import ( + ObservabilityPipelineCrowdStrikeNextGenSiemDestination, + ) + from datadog_api_client.v2.model.observability_pipeline_datadog_logs_destination import ( + ObservabilityPipelineDatadogLogsDestination, ) from datadog_api_client.v2.model.observability_pipeline_elasticsearch_destination import ( ObservabilityPipelineElasticsearchDestination, ) - from datadog_api_client.v2.model.observability_pipeline_rsyslog_destination import ( - ObservabilityPipelineRsyslogDestination, - ) - from datadog_api_client.v2.model.observability_pipeline_syslog_ng_destination import ( - ObservabilityPipelineSyslogNgDestination, - ) - from datadog_api_client.v2.model.azure_storage_destination import AzureStorageDestination - from datadog_api_client.v2.model.microsoft_sentinel_destination import MicrosoftSentinelDestination from datadog_api_client.v2.model.observability_pipeline_google_chronicle_destination import ( ObservabilityPipelineGoogleChronicleDestination, ) + from datadog_api_client.v2.model.observability_pipeline_google_cloud_storage_destination import ( + ObservabilityPipelineGoogleCloudStorageDestination, + ) + from datadog_api_client.v2.model.observability_pipeline_google_pub_sub_destination import ( + ObservabilityPipelineGooglePubSubDestination, + ) + from datadog_api_client.v2.model.observability_pipeline_kafka_destination import ( + ObservabilityPipelineKafkaDestination, + ) + from datadog_api_client.v2.model.microsoft_sentinel_destination import MicrosoftSentinelDestination from datadog_api_client.v2.model.observability_pipeline_new_relic_destination import ( ObservabilityPipelineNewRelicDestination, ) - from datadog_api_client.v2.model.observability_pipeline_sentinel_one_destination import ( - ObservabilityPipelineSentinelOneDestination, - ) from datadog_api_client.v2.model.observability_pipeline_open_search_destination import ( ObservabilityPipelineOpenSearchDestination, ) - from datadog_api_client.v2.model.observability_pipeline_amazon_open_search_destination import ( - ObservabilityPipelineAmazonOpenSearchDestination, + from datadog_api_client.v2.model.observability_pipeline_rsyslog_destination import ( + ObservabilityPipelineRsyslogDestination, + ) + from datadog_api_client.v2.model.observability_pipeline_sentinel_one_destination import ( + ObservabilityPipelineSentinelOneDestination, ) from datadog_api_client.v2.model.observability_pipeline_socket_destination import ( ObservabilityPipelineSocketDestination, ) - from datadog_api_client.v2.model.observability_pipeline_amazon_security_lake_destination import ( - ObservabilityPipelineAmazonSecurityLakeDestination, + from datadog_api_client.v2.model.observability_pipeline_splunk_hec_destination import ( + ObservabilityPipelineSplunkHecDestination, ) - from datadog_api_client.v2.model.observability_pipeline_crowd_strike_next_gen_siem_destination import ( - ObservabilityPipelineCrowdStrikeNextGenSiemDestination, + from datadog_api_client.v2.model.observability_pipeline_sumo_logic_destination import ( + ObservabilityPipelineSumoLogicDestination, ) - from datadog_api_client.v2.model.observability_pipeline_google_pub_sub_destination import ( - ObservabilityPipelineGooglePubSubDestination, + from datadog_api_client.v2.model.observability_pipeline_syslog_ng_destination import ( + ObservabilityPipelineSyslogNgDestination, + ) + from datadog_api_client.v2.model.observability_pipeline_datadog_metrics_destination import ( + ObservabilityPipelineDatadogMetricsDestination, ) - from datadog_api_client.v2.model.observability_pipeline_kafka_source import ObservabilityPipelineKafkaSource from datadog_api_client.v2.model.observability_pipeline_datadog_agent_source import ( ObservabilityPipelineDatadogAgentSource, ) - from datadog_api_client.v2.model.observability_pipeline_splunk_tcp_source import ( - ObservabilityPipelineSplunkTcpSource, - ) - from datadog_api_client.v2.model.observability_pipeline_splunk_hec_source import ( - ObservabilityPipelineSplunkHecSource, + from datadog_api_client.v2.model.observability_pipeline_amazon_data_firehose_source import ( + ObservabilityPipelineAmazonDataFirehoseSource, ) from datadog_api_client.v2.model.observability_pipeline_amazon_s3_source import ObservabilityPipelineAmazonS3Source - from datadog_api_client.v2.model.observability_pipeline_fluentd_source import ObservabilityPipelineFluentdSource from datadog_api_client.v2.model.observability_pipeline_fluent_bit_source import ( ObservabilityPipelineFluentBitSource, ) - from datadog_api_client.v2.model.observability_pipeline_http_server_source import ( - ObservabilityPipelineHttpServerSource, - ) - from datadog_api_client.v2.model.observability_pipeline_sumo_logic_source import ( - ObservabilityPipelineSumoLogicSource, - ) - from datadog_api_client.v2.model.observability_pipeline_rsyslog_source import ObservabilityPipelineRsyslogSource - from datadog_api_client.v2.model.observability_pipeline_syslog_ng_source import ObservabilityPipelineSyslogNgSource - from datadog_api_client.v2.model.observability_pipeline_amazon_data_firehose_source import ( - ObservabilityPipelineAmazonDataFirehoseSource, - ) + from datadog_api_client.v2.model.observability_pipeline_fluentd_source import ObservabilityPipelineFluentdSource from datadog_api_client.v2.model.observability_pipeline_google_pub_sub_source import ( ObservabilityPipelineGooglePubSubSource, ) from datadog_api_client.v2.model.observability_pipeline_http_client_source import ( ObservabilityPipelineHttpClientSource, ) + from datadog_api_client.v2.model.observability_pipeline_http_server_source import ( + ObservabilityPipelineHttpServerSource, + ) + from datadog_api_client.v2.model.observability_pipeline_kafka_source import ObservabilityPipelineKafkaSource from datadog_api_client.v2.model.observability_pipeline_logstash_source import ObservabilityPipelineLogstashSource + from datadog_api_client.v2.model.observability_pipeline_rsyslog_source import ObservabilityPipelineRsyslogSource from datadog_api_client.v2.model.observability_pipeline_socket_source import ObservabilityPipelineSocketSource + from datadog_api_client.v2.model.observability_pipeline_splunk_hec_source import ( + ObservabilityPipelineSplunkHecSource, + ) + from datadog_api_client.v2.model.observability_pipeline_splunk_tcp_source import ( + ObservabilityPipelineSplunkTcpSource, + ) + from datadog_api_client.v2.model.observability_pipeline_sumo_logic_source import ( + ObservabilityPipelineSumoLogicSource, + ) + from datadog_api_client.v2.model.observability_pipeline_syslog_ng_source import ObservabilityPipelineSyslogNgSource + from datadog_api_client.v2.model.observability_pipeline_opentelemetry_source import ( + ObservabilityPipelineOpentelemetrySource, + ) class ObservabilityPipelineConfig(ModelNormal): @@ -118,6 +136,9 @@ def openapi_types(_): from datadog_api_client.v2.model.observability_pipeline_config_destination_item import ( ObservabilityPipelineConfigDestinationItem, ) + from datadog_api_client.v2.model.observability_pipeline_config_pipeline_type import ( + ObservabilityPipelineConfigPipelineType, + ) from datadog_api_client.v2.model.observability_pipeline_config_processor_group import ( ObservabilityPipelineConfigProcessorGroup, ) @@ -127,12 +148,16 @@ def openapi_types(_): return { "destinations": ([ObservabilityPipelineConfigDestinationItem],), + "pipeline_type": (ObservabilityPipelineConfigPipelineType,), + "processor_groups": ([ObservabilityPipelineConfigProcessorGroup],), "processors": ([ObservabilityPipelineConfigProcessorGroup],), "sources": ([ObservabilityPipelineConfigSourceItem],), } attribute_map = { "destinations": "destinations", + "pipeline_type": "pipeline_type", + "processor_groups": "processor_groups", "processors": "processors", "sources": "sources", } @@ -142,48 +167,55 @@ def __init__( destinations: List[ Union[ ObservabilityPipelineConfigDestinationItem, - ObservabilityPipelineDatadogLogsDestination, + ObservabilityPipelineHttpClientDestination, + ObservabilityPipelineAmazonOpenSearchDestination, ObservabilityPipelineAmazonS3Destination, - ObservabilityPipelineGoogleCloudStorageDestination, - ObservabilityPipelineSplunkHecDestination, - ObservabilityPipelineSumoLogicDestination, - ObservabilityPipelineElasticsearchDestination, - ObservabilityPipelineRsyslogDestination, - ObservabilityPipelineSyslogNgDestination, + ObservabilityPipelineAmazonSecurityLakeDestination, AzureStorageDestination, - MicrosoftSentinelDestination, + ObservabilityPipelineCloudPremDestination, + ObservabilityPipelineCrowdStrikeNextGenSiemDestination, + ObservabilityPipelineDatadogLogsDestination, + ObservabilityPipelineElasticsearchDestination, ObservabilityPipelineGoogleChronicleDestination, + ObservabilityPipelineGoogleCloudStorageDestination, + ObservabilityPipelineGooglePubSubDestination, + ObservabilityPipelineKafkaDestination, + MicrosoftSentinelDestination, ObservabilityPipelineNewRelicDestination, - ObservabilityPipelineSentinelOneDestination, ObservabilityPipelineOpenSearchDestination, - ObservabilityPipelineAmazonOpenSearchDestination, + ObservabilityPipelineRsyslogDestination, + ObservabilityPipelineSentinelOneDestination, ObservabilityPipelineSocketDestination, - ObservabilityPipelineAmazonSecurityLakeDestination, - ObservabilityPipelineCrowdStrikeNextGenSiemDestination, - ObservabilityPipelineGooglePubSubDestination, + ObservabilityPipelineSplunkHecDestination, + ObservabilityPipelineSumoLogicDestination, + ObservabilityPipelineSyslogNgDestination, + ObservabilityPipelineDatadogMetricsDestination, ] ], sources: List[ Union[ ObservabilityPipelineConfigSourceItem, - ObservabilityPipelineKafkaSource, ObservabilityPipelineDatadogAgentSource, - ObservabilityPipelineSplunkTcpSource, - ObservabilityPipelineSplunkHecSource, + ObservabilityPipelineAmazonDataFirehoseSource, ObservabilityPipelineAmazonS3Source, - ObservabilityPipelineFluentdSource, ObservabilityPipelineFluentBitSource, - ObservabilityPipelineHttpServerSource, - ObservabilityPipelineSumoLogicSource, - ObservabilityPipelineRsyslogSource, - ObservabilityPipelineSyslogNgSource, - ObservabilityPipelineAmazonDataFirehoseSource, + ObservabilityPipelineFluentdSource, ObservabilityPipelineGooglePubSubSource, ObservabilityPipelineHttpClientSource, + ObservabilityPipelineHttpServerSource, + ObservabilityPipelineKafkaSource, ObservabilityPipelineLogstashSource, + ObservabilityPipelineRsyslogSource, ObservabilityPipelineSocketSource, + ObservabilityPipelineSplunkHecSource, + ObservabilityPipelineSplunkTcpSource, + ObservabilityPipelineSumoLogicSource, + ObservabilityPipelineSyslogNgSource, + ObservabilityPipelineOpentelemetrySource, ] ], + pipeline_type: Union[ObservabilityPipelineConfigPipelineType, UnsetType] = unset, + processor_groups: Union[List[ObservabilityPipelineConfigProcessorGroup], UnsetType] = unset, processors: Union[List[ObservabilityPipelineConfigProcessorGroup], UnsetType] = unset, **kwargs, ): @@ -193,12 +225,24 @@ def __init__( :param destinations: A list of destination components where processed logs are sent. :type destinations: [ObservabilityPipelineConfigDestinationItem] + :param pipeline_type: The type of data being ingested. Defaults to ``logs`` if not specified. + :type pipeline_type: ObservabilityPipelineConfigPipelineType, optional + + :param processor_groups: A list of processor groups that transform or enrich log data. + :type processor_groups: [ObservabilityPipelineConfigProcessorGroup], optional + :param processors: A list of processor groups that transform or enrich log data. + + **Deprecated:** This field is deprecated, you should now use the processor_groups field. **Deprecated**. :type processors: [ObservabilityPipelineConfigProcessorGroup], optional :param sources: A list of configured data sources for the pipeline. :type sources: [ObservabilityPipelineConfigSourceItem] """ + if pipeline_type is not unset: + kwargs["pipeline_type"] = pipeline_type + if processor_groups is not unset: + kwargs["processor_groups"] = processor_groups if processors is not unset: kwargs["processors"] = processors super().__init__(kwargs) diff --git a/src/datadog_api_client/v2/model/observability_pipeline_config_destination_item.py b/src/datadog_api_client/v2/model/observability_pipeline_config_destination_item.py index df018bec4e..7f4f8e2384 100644 --- a/src/datadog_api_client/v2/model/observability_pipeline_config_destination_item.py +++ b/src/datadog_api_client/v2/model/observability_pipeline_config_destination_item.py @@ -15,18 +15,33 @@ def __init__(self, **kwargs): """ A destination for the pipeline. + :param auth_strategy: HTTP authentication strategy. + :type auth_strategy: ObservabilityPipelineHttpClientDestinationAuthStrategy, optional + + :param compression: Compression configuration for HTTP requests. + :type compression: ObservabilityPipelineHttpClientDestinationCompression, optional + + :param encoding: Encoding format for log events. + :type encoding: ObservabilityPipelineHttpClientDestinationEncoding + :param id: The unique identifier for this component. :type id: str - :param inputs: A list of component IDs whose output is used as the `input` for this component. + :param inputs: A list of component IDs whose output is used as the input for this component. :type inputs: [str] - :param type: The destination type. The value should always be `datadog_logs`. - :type type: ObservabilityPipelineDatadogLogsDestinationType + :param tls: Configuration for enabling TLS encryption between the pipeline component and external services. + :type tls: ObservabilityPipelineTls, optional - :param auth: AWS authentication credentials used for accessing AWS services such as S3. - If omitted, the system’s default credentials are used (for example, the IAM role and environment variables). - :type auth: ObservabilityPipelineAwsAuth, optional + :param type: The destination type. The value should always be `http_client`. + :type type: ObservabilityPipelineHttpClientDestinationType + + :param auth: Authentication settings for the Amazon OpenSearch destination. + The `strategy` field determines whether basic or AWS-based authentication is used. + :type auth: ObservabilityPipelineAmazonOpenSearchDestinationAuth + + :param bulk_index: The index to write logs to. + :type bulk_index: str, optional :param bucket: S3 bucket name. :type bucket: str @@ -40,8 +55,26 @@ def __init__(self, **kwargs): :param storage_class: S3 storage class. :type storage_class: ObservabilityPipelineAmazonS3DestinationStorageClass - :param tls: Configuration for enabling TLS encryption between the pipeline component and external services. - :type tls: ObservabilityPipelineTls, optional + :param custom_source_name: Custom source name for the logs in Security Lake. + :type custom_source_name: str + + :param blob_prefix: Optional prefix for blobs written to the container. + :type blob_prefix: str, optional + + :param container_name: The name of the Azure Blob Storage container to store logs in. + :type container_name: str + + :param api_version: The Elasticsearch API version to use. Set to `auto` to auto-detect. + :type api_version: ObservabilityPipelineElasticsearchDestinationApiVersion, optional + + :param data_stream: Configuration options for writing to Elasticsearch Data Streams instead of a fixed index. + :type data_stream: ObservabilityPipelineElasticsearchDestinationDataStream, optional + + :param customer_id: The Google Chronicle customer ID. + :type customer_id: str + + :param log_type: The log type metadata associated with the Chronicle destination. + :type log_type: str, optional :param acl: Access control list setting for objects written to the bucket. :type acl: ObservabilityPipelineGoogleCloudStorageDestinationAcl, optional @@ -49,45 +82,35 @@ def __init__(self, **kwargs): :param metadata: Custom metadata to attach to each object uploaded to the GCS bucket. :type metadata: [ObservabilityPipelineMetadataEntry], optional - :param auto_extract_timestamp: If `true`, Splunk tries to extract timestamps from incoming log events. - If `false`, Splunk assigns the time the event was received. - :type auto_extract_timestamp: bool, optional - - :param encoding: Encoding format for log events. - :type encoding: ObservabilityPipelineSplunkHecDestinationEncoding, optional - - :param index: Optional name of the Splunk index where logs are written. - :type index: str, optional - - :param sourcetype: The Splunk sourcetype to assign to log events. - :type sourcetype: str, optional + :param project: The GCP project ID that owns the Pub/Sub topic. + :type project: str - :param header_custom_fields: A list of custom headers to include in the request to Sumo Logic. - :type header_custom_fields: [ObservabilityPipelineSumoLogicDestinationHeaderCustomFieldsItem], optional + :param topic: The Pub/Sub topic name to publish logs to. + :type topic: str - :param header_host_name: Optional override for the host name header. - :type header_host_name: str, optional + :param headers_key: The field name to use for Kafka message headers. + :type headers_key: str, optional - :param header_source_category: Optional override for the source category header. - :type header_source_category: str, optional + :param key_field: The field name to use as the Kafka message key. + :type key_field: str, optional - :param header_source_name: Optional override for the source name header. - :type header_source_name: str, optional + :param librdkafka_options: Optional list of advanced Kafka producer configuration options, defined as key-value pairs. + :type librdkafka_options: [ObservabilityPipelineKafkaLibrdkafkaOption], optional - :param api_version: The Elasticsearch API version to use. Set to `auto` to auto-detect. - :type api_version: ObservabilityPipelineElasticsearchDestinationApiVersion, optional + :param message_timeout_ms: Maximum time in milliseconds to wait for message delivery confirmation. + :type message_timeout_ms: int, optional - :param bulk_index: The index to write logs to in Elasticsearch. - :type bulk_index: str, optional + :param rate_limit_duration_secs: Duration in seconds for the rate limit window. + :type rate_limit_duration_secs: int, optional - :param keepalive: Optional socket keepalive duration in milliseconds. - :type keepalive: int, optional + :param rate_limit_num: Maximum number of messages allowed per rate limit duration. + :type rate_limit_num: int, optional - :param blob_prefix: Optional prefix for blobs written to the container. - :type blob_prefix: str, optional + :param sasl: Specifies the SASL mechanism for authenticating with a Kafka cluster. + :type sasl: ObservabilityPipelineKafkaSasl, optional - :param container_name: The name of the Azure Blob Storage container to store logs in. - :type container_name: str + :param socket_timeout_ms: Socket timeout in milliseconds for network requests. + :type socket_timeout_ms: int, optional :param client_id: Azure AD client ID used for authentication. :type client_id: str @@ -101,11 +124,8 @@ def __init__(self, **kwargs): :param tenant_id: Azure AD tenant ID. :type tenant_id: str - :param customer_id: The Google Chronicle customer ID. - :type customer_id: str - - :param log_type: The log type metadata associated with the Chronicle destination. - :type log_type: str, optional + :param keepalive: Optional socket keepalive duration in milliseconds. + :type keepalive: int, optional :param framing: Framing method configuration. :type framing: ObservabilityPipelineSocketDestinationFraming @@ -113,17 +133,27 @@ def __init__(self, **kwargs): :param mode: Protocol used to send logs. :type mode: ObservabilityPipelineSocketDestinationMode - :param custom_source_name: Custom source name for the logs in Security Lake. - :type custom_source_name: str + :param auto_extract_timestamp: If `true`, Splunk tries to extract timestamps from incoming log events. + If `false`, Splunk assigns the time the event was received. + :type auto_extract_timestamp: bool, optional - :param compression: Compression configuration for log events. - :type compression: ObservabilityPipelineCrowdStrikeNextGenSiemDestinationCompression, optional + :param index: Optional name of the Splunk index where logs are written. + :type index: str, optional - :param project: The GCP project ID that owns the Pub/Sub topic. - :type project: str + :param sourcetype: The Splunk sourcetype to assign to log events. + :type sourcetype: str, optional - :param topic: The Pub/Sub topic name to publish logs to. - :type topic: str + :param header_custom_fields: A list of custom headers to include in the request to Sumo Logic. + :type header_custom_fields: [ObservabilityPipelineSumoLogicDestinationHeaderCustomFieldsItem], optional + + :param header_host_name: Optional override for the host name header. + :type header_host_name: str, optional + + :param header_source_category: Optional override for the source category header. + :type header_source_category: str, optional + + :param header_source_name: Optional override for the source name header. + :type header_source_name: str, optional """ super().__init__(kwargs) @@ -136,80 +166,96 @@ def _composed_schemas(_): # code would be run when this module is imported, and these composed # classes don't exist yet because their module has not finished # loading - from datadog_api_client.v2.model.observability_pipeline_datadog_logs_destination import ( - ObservabilityPipelineDatadogLogsDestination, + from datadog_api_client.v2.model.observability_pipeline_http_client_destination import ( + ObservabilityPipelineHttpClientDestination, + ) + from datadog_api_client.v2.model.observability_pipeline_amazon_open_search_destination import ( + ObservabilityPipelineAmazonOpenSearchDestination, ) from datadog_api_client.v2.model.observability_pipeline_amazon_s3_destination import ( ObservabilityPipelineAmazonS3Destination, ) - from datadog_api_client.v2.model.observability_pipeline_google_cloud_storage_destination import ( - ObservabilityPipelineGoogleCloudStorageDestination, + from datadog_api_client.v2.model.observability_pipeline_amazon_security_lake_destination import ( + ObservabilityPipelineAmazonSecurityLakeDestination, ) - from datadog_api_client.v2.model.observability_pipeline_splunk_hec_destination import ( - ObservabilityPipelineSplunkHecDestination, + from datadog_api_client.v2.model.azure_storage_destination import AzureStorageDestination + from datadog_api_client.v2.model.observability_pipeline_cloud_prem_destination import ( + ObservabilityPipelineCloudPremDestination, ) - from datadog_api_client.v2.model.observability_pipeline_sumo_logic_destination import ( - ObservabilityPipelineSumoLogicDestination, + from datadog_api_client.v2.model.observability_pipeline_crowd_strike_next_gen_siem_destination import ( + ObservabilityPipelineCrowdStrikeNextGenSiemDestination, + ) + from datadog_api_client.v2.model.observability_pipeline_datadog_logs_destination import ( + ObservabilityPipelineDatadogLogsDestination, ) from datadog_api_client.v2.model.observability_pipeline_elasticsearch_destination import ( ObservabilityPipelineElasticsearchDestination, ) - from datadog_api_client.v2.model.observability_pipeline_rsyslog_destination import ( - ObservabilityPipelineRsyslogDestination, - ) - from datadog_api_client.v2.model.observability_pipeline_syslog_ng_destination import ( - ObservabilityPipelineSyslogNgDestination, - ) - from datadog_api_client.v2.model.azure_storage_destination import AzureStorageDestination - from datadog_api_client.v2.model.microsoft_sentinel_destination import MicrosoftSentinelDestination from datadog_api_client.v2.model.observability_pipeline_google_chronicle_destination import ( ObservabilityPipelineGoogleChronicleDestination, ) + from datadog_api_client.v2.model.observability_pipeline_google_cloud_storage_destination import ( + ObservabilityPipelineGoogleCloudStorageDestination, + ) + from datadog_api_client.v2.model.observability_pipeline_google_pub_sub_destination import ( + ObservabilityPipelineGooglePubSubDestination, + ) + from datadog_api_client.v2.model.observability_pipeline_kafka_destination import ( + ObservabilityPipelineKafkaDestination, + ) + from datadog_api_client.v2.model.microsoft_sentinel_destination import MicrosoftSentinelDestination from datadog_api_client.v2.model.observability_pipeline_new_relic_destination import ( ObservabilityPipelineNewRelicDestination, ) - from datadog_api_client.v2.model.observability_pipeline_sentinel_one_destination import ( - ObservabilityPipelineSentinelOneDestination, - ) from datadog_api_client.v2.model.observability_pipeline_open_search_destination import ( ObservabilityPipelineOpenSearchDestination, ) - from datadog_api_client.v2.model.observability_pipeline_amazon_open_search_destination import ( - ObservabilityPipelineAmazonOpenSearchDestination, + from datadog_api_client.v2.model.observability_pipeline_rsyslog_destination import ( + ObservabilityPipelineRsyslogDestination, + ) + from datadog_api_client.v2.model.observability_pipeline_sentinel_one_destination import ( + ObservabilityPipelineSentinelOneDestination, ) from datadog_api_client.v2.model.observability_pipeline_socket_destination import ( ObservabilityPipelineSocketDestination, ) - from datadog_api_client.v2.model.observability_pipeline_amazon_security_lake_destination import ( - ObservabilityPipelineAmazonSecurityLakeDestination, + from datadog_api_client.v2.model.observability_pipeline_splunk_hec_destination import ( + ObservabilityPipelineSplunkHecDestination, ) - from datadog_api_client.v2.model.observability_pipeline_crowd_strike_next_gen_siem_destination import ( - ObservabilityPipelineCrowdStrikeNextGenSiemDestination, + from datadog_api_client.v2.model.observability_pipeline_sumo_logic_destination import ( + ObservabilityPipelineSumoLogicDestination, ) - from datadog_api_client.v2.model.observability_pipeline_google_pub_sub_destination import ( - ObservabilityPipelineGooglePubSubDestination, + from datadog_api_client.v2.model.observability_pipeline_syslog_ng_destination import ( + ObservabilityPipelineSyslogNgDestination, + ) + from datadog_api_client.v2.model.observability_pipeline_datadog_metrics_destination import ( + ObservabilityPipelineDatadogMetricsDestination, ) return { "oneOf": [ - ObservabilityPipelineDatadogLogsDestination, + ObservabilityPipelineHttpClientDestination, + ObservabilityPipelineAmazonOpenSearchDestination, ObservabilityPipelineAmazonS3Destination, - ObservabilityPipelineGoogleCloudStorageDestination, - ObservabilityPipelineSplunkHecDestination, - ObservabilityPipelineSumoLogicDestination, - ObservabilityPipelineElasticsearchDestination, - ObservabilityPipelineRsyslogDestination, - ObservabilityPipelineSyslogNgDestination, + ObservabilityPipelineAmazonSecurityLakeDestination, AzureStorageDestination, - MicrosoftSentinelDestination, + ObservabilityPipelineCloudPremDestination, + ObservabilityPipelineCrowdStrikeNextGenSiemDestination, + ObservabilityPipelineDatadogLogsDestination, + ObservabilityPipelineElasticsearchDestination, ObservabilityPipelineGoogleChronicleDestination, + ObservabilityPipelineGoogleCloudStorageDestination, + ObservabilityPipelineGooglePubSubDestination, + ObservabilityPipelineKafkaDestination, + MicrosoftSentinelDestination, ObservabilityPipelineNewRelicDestination, - ObservabilityPipelineSentinelOneDestination, ObservabilityPipelineOpenSearchDestination, - ObservabilityPipelineAmazonOpenSearchDestination, + ObservabilityPipelineRsyslogDestination, + ObservabilityPipelineSentinelOneDestination, ObservabilityPipelineSocketDestination, - ObservabilityPipelineAmazonSecurityLakeDestination, - ObservabilityPipelineCrowdStrikeNextGenSiemDestination, - ObservabilityPipelineGooglePubSubDestination, + ObservabilityPipelineSplunkHecDestination, + ObservabilityPipelineSumoLogicDestination, + ObservabilityPipelineSyslogNgDestination, + ObservabilityPipelineDatadogMetricsDestination, ], } diff --git a/src/datadog_api_client/v2/model/observability_pipeline_config_pipeline_type.py b/src/datadog_api_client/v2/model/observability_pipeline_config_pipeline_type.py new file mode 100644 index 0000000000..bdf1f81a18 --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_config_pipeline_type.py @@ -0,0 +1,38 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + + +from datadog_api_client.model_utils import ( + ModelSimple, + cached_property, +) + +from typing import ClassVar + + +class ObservabilityPipelineConfigPipelineType(ModelSimple): + """ + The type of data being ingested. Defaults to `logs` if not specified. + + :param value: If omitted defaults to "logs". Must be one of ["logs", "metrics"]. + :type value: str + """ + + allowed_values = { + "logs", + "metrics", + } + LOGS: ClassVar["ObservabilityPipelineConfigPipelineType"] + METRICS: ClassVar["ObservabilityPipelineConfigPipelineType"] + + @cached_property + def openapi_types(_): + return { + "value": (str,), + } + + +ObservabilityPipelineConfigPipelineType.LOGS = ObservabilityPipelineConfigPipelineType("logs") +ObservabilityPipelineConfigPipelineType.METRICS = ObservabilityPipelineConfigPipelineType("metrics") diff --git a/src/datadog_api_client/v2/model/observability_pipeline_config_processor_group.py b/src/datadog_api_client/v2/model/observability_pipeline_config_processor_group.py index 6579870f58..62bba716b5 100644 --- a/src/datadog_api_client/v2/model/observability_pipeline_config_processor_group.py +++ b/src/datadog_api_client/v2/model/observability_pipeline_config_processor_group.py @@ -18,46 +18,58 @@ ObservabilityPipelineConfigProcessorItem, ) from datadog_api_client.v2.model.observability_pipeline_filter_processor import ObservabilityPipelineFilterProcessor - from datadog_api_client.v2.model.observability_pipeline_parse_json_processor import ( - ObservabilityPipelineParseJSONProcessor, + from datadog_api_client.v2.model.observability_pipeline_add_env_vars_processor import ( + ObservabilityPipelineAddEnvVarsProcessor, ) - from datadog_api_client.v2.model.observability_pipeline_quota_processor import ObservabilityPipelineQuotaProcessor from datadog_api_client.v2.model.observability_pipeline_add_fields_processor import ( ObservabilityPipelineAddFieldsProcessor, ) - from datadog_api_client.v2.model.observability_pipeline_remove_fields_processor import ( - ObservabilityPipelineRemoveFieldsProcessor, + from datadog_api_client.v2.model.observability_pipeline_add_hostname_processor import ( + ObservabilityPipelineAddHostnameProcessor, ) - from datadog_api_client.v2.model.observability_pipeline_rename_fields_processor import ( - ObservabilityPipelineRenameFieldsProcessor, + from datadog_api_client.v2.model.observability_pipeline_custom_processor import ObservabilityPipelineCustomProcessor + from datadog_api_client.v2.model.observability_pipeline_datadog_tags_processor import ( + ObservabilityPipelineDatadogTagsProcessor, + ) + from datadog_api_client.v2.model.observability_pipeline_dedupe_processor import ObservabilityPipelineDedupeProcessor + from datadog_api_client.v2.model.observability_pipeline_enrichment_table_processor import ( + ObservabilityPipelineEnrichmentTableProcessor, ) from datadog_api_client.v2.model.observability_pipeline_generate_metrics_processor import ( ObservabilityPipelineGenerateMetricsProcessor, ) - from datadog_api_client.v2.model.observability_pipeline_sample_processor import ObservabilityPipelineSampleProcessor + from datadog_api_client.v2.model.observability_pipeline_ocsf_mapper_processor import ( + ObservabilityPipelineOcsfMapperProcessor, + ) from datadog_api_client.v2.model.observability_pipeline_parse_grok_processor import ( ObservabilityPipelineParseGrokProcessor, ) - from datadog_api_client.v2.model.observability_pipeline_sensitive_data_scanner_processor import ( - ObservabilityPipelineSensitiveDataScannerProcessor, + from datadog_api_client.v2.model.observability_pipeline_parse_json_processor import ( + ObservabilityPipelineParseJSONProcessor, ) - from datadog_api_client.v2.model.observability_pipeline_ocsf_mapper_processor import ( - ObservabilityPipelineOcsfMapperProcessor, + from datadog_api_client.v2.model.observability_pipeline_parse_xml_processor import ( + ObservabilityPipelineParseXMLProcessor, ) - from datadog_api_client.v2.model.observability_pipeline_add_env_vars_processor import ( - ObservabilityPipelineAddEnvVarsProcessor, + from datadog_api_client.v2.model.observability_pipeline_quota_processor import ObservabilityPipelineQuotaProcessor + from datadog_api_client.v2.model.observability_pipeline_reduce_processor import ObservabilityPipelineReduceProcessor + from datadog_api_client.v2.model.observability_pipeline_remove_fields_processor import ( + ObservabilityPipelineRemoveFieldsProcessor, ) - from datadog_api_client.v2.model.observability_pipeline_dedupe_processor import ObservabilityPipelineDedupeProcessor - from datadog_api_client.v2.model.observability_pipeline_enrichment_table_processor import ( - ObservabilityPipelineEnrichmentTableProcessor, + from datadog_api_client.v2.model.observability_pipeline_rename_fields_processor import ( + ObservabilityPipelineRenameFieldsProcessor, + ) + from datadog_api_client.v2.model.observability_pipeline_sample_processor import ObservabilityPipelineSampleProcessor + from datadog_api_client.v2.model.observability_pipeline_sensitive_data_scanner_processor import ( + ObservabilityPipelineSensitiveDataScannerProcessor, + ) + from datadog_api_client.v2.model.observability_pipeline_split_array_processor import ( + ObservabilityPipelineSplitArrayProcessor, ) - from datadog_api_client.v2.model.observability_pipeline_reduce_processor import ObservabilityPipelineReduceProcessor from datadog_api_client.v2.model.observability_pipeline_throttle_processor import ( ObservabilityPipelineThrottleProcessor, ) - from datadog_api_client.v2.model.observability_pipeline_custom_processor import ObservabilityPipelineCustomProcessor - from datadog_api_client.v2.model.observability_pipeline_datadog_tags_processor import ( - ObservabilityPipelineDatadogTagsProcessor, + from datadog_api_client.v2.model.observability_pipeline_metric_tags_processor import ( + ObservabilityPipelineMetricTagsProcessor, ) @@ -96,23 +108,27 @@ def __init__( Union[ ObservabilityPipelineConfigProcessorItem, ObservabilityPipelineFilterProcessor, + ObservabilityPipelineAddEnvVarsProcessor, + ObservabilityPipelineAddFieldsProcessor, + ObservabilityPipelineAddHostnameProcessor, + ObservabilityPipelineCustomProcessor, + ObservabilityPipelineDatadogTagsProcessor, + ObservabilityPipelineDedupeProcessor, + ObservabilityPipelineEnrichmentTableProcessor, + ObservabilityPipelineGenerateMetricsProcessor, + ObservabilityPipelineOcsfMapperProcessor, + ObservabilityPipelineParseGrokProcessor, ObservabilityPipelineParseJSONProcessor, + ObservabilityPipelineParseXMLProcessor, ObservabilityPipelineQuotaProcessor, - ObservabilityPipelineAddFieldsProcessor, + ObservabilityPipelineReduceProcessor, ObservabilityPipelineRemoveFieldsProcessor, ObservabilityPipelineRenameFieldsProcessor, - ObservabilityPipelineGenerateMetricsProcessor, ObservabilityPipelineSampleProcessor, - ObservabilityPipelineParseGrokProcessor, ObservabilityPipelineSensitiveDataScannerProcessor, - ObservabilityPipelineOcsfMapperProcessor, - ObservabilityPipelineAddEnvVarsProcessor, - ObservabilityPipelineDedupeProcessor, - ObservabilityPipelineEnrichmentTableProcessor, - ObservabilityPipelineReduceProcessor, + ObservabilityPipelineSplitArrayProcessor, ObservabilityPipelineThrottleProcessor, - ObservabilityPipelineCustomProcessor, - ObservabilityPipelineDatadogTagsProcessor, + ObservabilityPipelineMetricTagsProcessor, ] ], display_name: Union[str, UnsetType] = unset, diff --git a/src/datadog_api_client/v2/model/observability_pipeline_config_processor_item.py b/src/datadog_api_client/v2/model/observability_pipeline_config_processor_item.py index 8becc43c0d..ec393b59d6 100644 --- a/src/datadog_api_client/v2/model/observability_pipeline_config_processor_item.py +++ b/src/datadog_api_client/v2/model/observability_pipeline_config_processor_item.py @@ -18,22 +18,85 @@ def __init__(self, **kwargs): :param display_name: The display name for a component. :type display_name: str, optional - :param enabled: Whether this processor is enabled. + :param enabled: Indicates whether the processor is enabled. :type enabled: bool - :param id: The unique identifier for this component. Used to reference this component in other parts of the pipeline (for example, as the `input` to downstream components). + :param id: The unique identifier for this component. Used in other parts of the pipeline to reference this component (for example, as the `input` to downstream components). :type id: str - :param include: A Datadog search query used to determine which logs should pass through the filter. Logs that match this query continue to downstream components; others are dropped. + :param include: A Datadog search query used to determine which logs/metrics should pass through the filter. Logs/metrics that match this query continue to downstream components; others are dropped. :type include: str :param type: The processor type. The value should always be `filter`. :type type: ObservabilityPipelineFilterProcessorType + :param variables: A list of environment variable mappings to apply to log fields. + :type variables: [ObservabilityPipelineAddEnvVarsProcessorVariable] + + :param fields: A list of static fields (key-value pairs) that is added to each log event processed by this component. + :type fields: [ObservabilityPipelineFieldValue] + + :param remaps: Array of VRL remap rules. + :type remaps: [ObservabilityPipelineCustomProcessorRemap] + + :param action: The action to take on tags with matching keys. + :type action: ObservabilityPipelineDatadogTagsProcessorAction + + :param keys: A list of tag keys. + :type keys: [str] + + :param mode: The processing mode. + :type mode: ObservabilityPipelineDatadogTagsProcessorMode + + :param file: Defines a static enrichment table loaded from a CSV file. + :type file: ObservabilityPipelineEnrichmentTableFile, optional + + :param geoip: Uses a GeoIP database to enrich logs based on an IP field. + :type geoip: ObservabilityPipelineEnrichmentTableGeoIp, optional + + :param reference_table: Uses a Datadog reference table to enrich logs. + :type reference_table: ObservabilityPipelineEnrichmentTableReferenceTable, optional + + :param target: Path where enrichment results should be stored in the log. + :type target: str + + :param metrics: Configuration for generating individual metrics. + :type metrics: [ObservabilityPipelineGeneratedMetric], optional + + :param mappings: A list of mapping rules to convert events to the OCSF format. + :type mappings: [ObservabilityPipelineOcsfMapperProcessorMapping] + + :param disable_library_rules: If set to `true`, disables the default Grok rules provided by Datadog. + :type disable_library_rules: bool, optional + + :param rules: The list of Grok parsing rules. If multiple matching rules are provided, they are evaluated in order. The first successful match is applied. + :type rules: [ObservabilityPipelineParseGrokProcessorRule] + :param field: The name of the log field that contains a JSON string. :type field: str - :param drop_events: If set to `true`, logs that matched the quota filter and sent after the quota has been met are dropped; only logs that did not match the filter query continue through the pipeline. + :param always_use_text_key: Whether to always use a text key for element content. + :type always_use_text_key: bool, optional + + :param attr_prefix: The prefix to use for XML attributes in the parsed output. + :type attr_prefix: str, optional + + :param include_attr: Whether to include XML attributes in the parsed output. + :type include_attr: bool, optional + + :param parse_bool: Whether to parse boolean values from strings. + :type parse_bool: bool, optional + + :param parse_null: Whether to parse null values. + :type parse_null: bool, optional + + :param parse_number: Whether to parse numeric values from strings. + :type parse_number: bool, optional + + :param text_key: The key name to use for text content within XML elements. Must be at least 1 character if specified. + :type text_key: str, optional + + :param drop_events: If set to `true`, logs that match the quota filter and are sent after the quota is exceeded are dropped. Logs that do not match the filter continue through the pipeline. **Note**: You can set either `drop_events` or `overflow_action`, but not both. :type drop_events: bool, optional :param ignore_when_missing_partitions: If `true`, the processor skips quota checks when partition fields are missing from the logs. @@ -45,7 +108,7 @@ def __init__(self, **kwargs): :param name: Name of the quota. :type name: str - :param overflow_action: The action to take when the quota is exceeded. Options: + :param overflow_action: The action to take when the quota or bucket limit is exceeded. Options: - `drop`: Drop the event. - `no_action`: Let the event pass through. - `overflow_routing`: Route to an overflow destination. @@ -57,41 +120,11 @@ def __init__(self, **kwargs): :param partition_fields: A list of fields used to segment log traffic for quota enforcement. Quotas are tracked independently by unique combinations of these field values. :type partition_fields: [str], optional - :param fields: A list of static fields (key-value pairs) that is added to each log event processed by this component. - :type fields: [ObservabilityPipelineFieldValue] - - :param metrics: Configuration for generating individual metrics. - :type metrics: [ObservabilityPipelineGeneratedMetric], optional - - :param percentage: The percentage of logs to sample. - :type percentage: float, optional - - :param rate: Number of events to sample (1 in N). - :type rate: int, optional - - :param disable_library_rules: If set to `true`, disables the default Grok rules provided by Datadog. - :type disable_library_rules: bool, optional - - :param rules: The list of Grok parsing rules. If multiple matching rules are provided, they are evaluated in order. The first successful match is applied. - :type rules: [ObservabilityPipelineParseGrokProcessorRule] - - :param mappings: A list of mapping rules to convert events to the OCSF format. - :type mappings: [ObservabilityPipelineOcsfMapperProcessorMapping] - - :param variables: A list of environment variable mappings to apply to log fields. - :type variables: [ObservabilityPipelineAddEnvVarsProcessorVariable] - - :param mode: The deduplication mode to apply to the fields. - :type mode: ObservabilityPipelineDedupeProcessorMode - - :param file: Defines a static enrichment table loaded from a CSV file. - :type file: ObservabilityPipelineEnrichmentTableFile, optional - - :param geoip: Uses a GeoIP database to enrich logs based on an IP field. - :type geoip: ObservabilityPipelineEnrichmentTableGeoIp, optional - - :param target: Path where enrichment results should be stored in the log. - :type target: str + :param too_many_buckets_action: The action to take when the quota or bucket limit is exceeded. Options: + - `drop`: Drop the event. + - `no_action`: Let the event pass through. + - `overflow_routing`: Route to an overflow destination. + :type too_many_buckets_action: ObservabilityPipelineQuotaProcessorOverflowAction, optional :param group_by: A list of fields used to group log events for merging. :type group_by: [str] @@ -99,20 +132,17 @@ def __init__(self, **kwargs): :param merge_strategies: List of merge strategies defining how values from grouped events should be combined. :type merge_strategies: [ObservabilityPipelineReduceProcessorMergeStrategy] + :param percentage: The percentage of logs to sample. + :type percentage: float + + :param arrays: A list of array split configurations. + :type arrays: [ObservabilityPipelineSplitArrayProcessorArrayConfig] + :param threshold: the number of events allowed in a given time window. Events sent after the threshold has been reached, are dropped. :type threshold: int :param window: The time window in seconds over which the threshold applies. :type window: float - - :param remaps: Array of VRL remap rules. - :type remaps: [ObservabilityPipelineCustomProcessorRemap] - - :param action: The action to take on tags with matching keys. - :type action: ObservabilityPipelineDatadogTagsProcessorAction - - :param keys: A list of tag keys. - :type keys: [str] """ super().__init__(kwargs) @@ -128,14 +158,47 @@ def _composed_schemas(_): from datadog_api_client.v2.model.observability_pipeline_filter_processor import ( ObservabilityPipelineFilterProcessor, ) + from datadog_api_client.v2.model.observability_pipeline_add_env_vars_processor import ( + ObservabilityPipelineAddEnvVarsProcessor, + ) + from datadog_api_client.v2.model.observability_pipeline_add_fields_processor import ( + ObservabilityPipelineAddFieldsProcessor, + ) + from datadog_api_client.v2.model.observability_pipeline_add_hostname_processor import ( + ObservabilityPipelineAddHostnameProcessor, + ) + from datadog_api_client.v2.model.observability_pipeline_custom_processor import ( + ObservabilityPipelineCustomProcessor, + ) + from datadog_api_client.v2.model.observability_pipeline_datadog_tags_processor import ( + ObservabilityPipelineDatadogTagsProcessor, + ) + from datadog_api_client.v2.model.observability_pipeline_dedupe_processor import ( + ObservabilityPipelineDedupeProcessor, + ) + from datadog_api_client.v2.model.observability_pipeline_enrichment_table_processor import ( + ObservabilityPipelineEnrichmentTableProcessor, + ) + from datadog_api_client.v2.model.observability_pipeline_generate_metrics_processor import ( + ObservabilityPipelineGenerateMetricsProcessor, + ) + from datadog_api_client.v2.model.observability_pipeline_ocsf_mapper_processor import ( + ObservabilityPipelineOcsfMapperProcessor, + ) + from datadog_api_client.v2.model.observability_pipeline_parse_grok_processor import ( + ObservabilityPipelineParseGrokProcessor, + ) from datadog_api_client.v2.model.observability_pipeline_parse_json_processor import ( ObservabilityPipelineParseJSONProcessor, ) + from datadog_api_client.v2.model.observability_pipeline_parse_xml_processor import ( + ObservabilityPipelineParseXMLProcessor, + ) from datadog_api_client.v2.model.observability_pipeline_quota_processor import ( ObservabilityPipelineQuotaProcessor, ) - from datadog_api_client.v2.model.observability_pipeline_add_fields_processor import ( - ObservabilityPipelineAddFieldsProcessor, + from datadog_api_client.v2.model.observability_pipeline_reduce_processor import ( + ObservabilityPipelineReduceProcessor, ) from datadog_api_client.v2.model.observability_pipeline_remove_fields_processor import ( ObservabilityPipelineRemoveFieldsProcessor, @@ -143,62 +206,45 @@ def _composed_schemas(_): from datadog_api_client.v2.model.observability_pipeline_rename_fields_processor import ( ObservabilityPipelineRenameFieldsProcessor, ) - from datadog_api_client.v2.model.observability_pipeline_generate_metrics_processor import ( - ObservabilityPipelineGenerateMetricsProcessor, - ) from datadog_api_client.v2.model.observability_pipeline_sample_processor import ( ObservabilityPipelineSampleProcessor, ) - from datadog_api_client.v2.model.observability_pipeline_parse_grok_processor import ( - ObservabilityPipelineParseGrokProcessor, - ) from datadog_api_client.v2.model.observability_pipeline_sensitive_data_scanner_processor import ( ObservabilityPipelineSensitiveDataScannerProcessor, ) - from datadog_api_client.v2.model.observability_pipeline_ocsf_mapper_processor import ( - ObservabilityPipelineOcsfMapperProcessor, - ) - from datadog_api_client.v2.model.observability_pipeline_add_env_vars_processor import ( - ObservabilityPipelineAddEnvVarsProcessor, - ) - from datadog_api_client.v2.model.observability_pipeline_dedupe_processor import ( - ObservabilityPipelineDedupeProcessor, - ) - from datadog_api_client.v2.model.observability_pipeline_enrichment_table_processor import ( - ObservabilityPipelineEnrichmentTableProcessor, - ) - from datadog_api_client.v2.model.observability_pipeline_reduce_processor import ( - ObservabilityPipelineReduceProcessor, + from datadog_api_client.v2.model.observability_pipeline_split_array_processor import ( + ObservabilityPipelineSplitArrayProcessor, ) from datadog_api_client.v2.model.observability_pipeline_throttle_processor import ( ObservabilityPipelineThrottleProcessor, ) - from datadog_api_client.v2.model.observability_pipeline_custom_processor import ( - ObservabilityPipelineCustomProcessor, - ) - from datadog_api_client.v2.model.observability_pipeline_datadog_tags_processor import ( - ObservabilityPipelineDatadogTagsProcessor, + from datadog_api_client.v2.model.observability_pipeline_metric_tags_processor import ( + ObservabilityPipelineMetricTagsProcessor, ) return { "oneOf": [ ObservabilityPipelineFilterProcessor, + ObservabilityPipelineAddEnvVarsProcessor, + ObservabilityPipelineAddFieldsProcessor, + ObservabilityPipelineAddHostnameProcessor, + ObservabilityPipelineCustomProcessor, + ObservabilityPipelineDatadogTagsProcessor, + ObservabilityPipelineDedupeProcessor, + ObservabilityPipelineEnrichmentTableProcessor, + ObservabilityPipelineGenerateMetricsProcessor, + ObservabilityPipelineOcsfMapperProcessor, + ObservabilityPipelineParseGrokProcessor, ObservabilityPipelineParseJSONProcessor, + ObservabilityPipelineParseXMLProcessor, ObservabilityPipelineQuotaProcessor, - ObservabilityPipelineAddFieldsProcessor, + ObservabilityPipelineReduceProcessor, ObservabilityPipelineRemoveFieldsProcessor, ObservabilityPipelineRenameFieldsProcessor, - ObservabilityPipelineGenerateMetricsProcessor, ObservabilityPipelineSampleProcessor, - ObservabilityPipelineParseGrokProcessor, ObservabilityPipelineSensitiveDataScannerProcessor, - ObservabilityPipelineOcsfMapperProcessor, - ObservabilityPipelineAddEnvVarsProcessor, - ObservabilityPipelineDedupeProcessor, - ObservabilityPipelineEnrichmentTableProcessor, - ObservabilityPipelineReduceProcessor, + ObservabilityPipelineSplitArrayProcessor, ObservabilityPipelineThrottleProcessor, - ObservabilityPipelineCustomProcessor, - ObservabilityPipelineDatadogTagsProcessor, + ObservabilityPipelineMetricTagsProcessor, ], } diff --git a/src/datadog_api_client/v2/model/observability_pipeline_config_source_item.py b/src/datadog_api_client/v2/model/observability_pipeline_config_source_item.py index 04e30172c1..023c63f546 100644 --- a/src/datadog_api_client/v2/model/observability_pipeline_config_source_item.py +++ b/src/datadog_api_client/v2/model/observability_pipeline_config_source_item.py @@ -15,26 +15,14 @@ def __init__(self, **kwargs): """ A data source for the pipeline. - :param group_id: Consumer group ID used by the Kafka client. - :type group_id: str - - :param id: The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components). + :param id: The unique identifier for this component. Used in other parts of the pipeline to reference this component (for example, as the `input` to downstream components). :type id: str - :param librdkafka_options: Optional list of advanced Kafka client configuration options, defined as key-value pairs. - :type librdkafka_options: [ObservabilityPipelineKafkaSourceLibrdkafkaOption], optional - - :param sasl: Specifies the SASL mechanism for authenticating with a Kafka cluster. - :type sasl: ObservabilityPipelineKafkaSourceSasl, optional - :param tls: Configuration for enabling TLS encryption between the pipeline component and external services. :type tls: ObservabilityPipelineTls, optional - :param topics: A list of Kafka topic names to subscribe to. The source ingests messages from each topic specified. - :type topics: [str] - - :param type: The source type. The value should always be `kafka`. - :type type: ObservabilityPipelineKafkaSourceType + :param type: The source type. The value should always be `datadog_agent`. + :type type: ObservabilityPipelineDatadogAgentSourceType :param auth: AWS authentication credentials used for accessing AWS services such as S3. If omitted, the system’s default credentials are used (for example, the IAM role and environment variables). @@ -43,29 +31,47 @@ def __init__(self, **kwargs): :param region: AWS region where the S3 bucket resides. :type region: str - :param auth_strategy: HTTP authentication method. - :type auth_strategy: ObservabilityPipelineHttpServerSourceAuthStrategy - :param decoding: The decoding format used to interpret incoming logs. :type decoding: ObservabilityPipelineDecoding - :param mode: Protocol used by the syslog source to receive messages. - :type mode: ObservabilityPipelineSyslogSourceMode - :param project: The GCP project ID that owns the Pub/Sub subscription. :type project: str :param subscription: The Pub/Sub subscription name from which messages are consumed. :type subscription: str + :param auth_strategy: Optional authentication strategy for HTTP requests. + :type auth_strategy: ObservabilityPipelineHttpClientSourceAuthStrategy, optional + :param scrape_interval_secs: The interval (in seconds) between HTTP scrape requests. :type scrape_interval_secs: int, optional :param scrape_timeout_secs: The timeout (in seconds) for each scrape request. :type scrape_timeout_secs: int, optional + :param group_id: Consumer group ID used by the Kafka client. + :type group_id: str + + :param librdkafka_options: Optional list of advanced Kafka client configuration options, defined as key-value pairs. + :type librdkafka_options: [ObservabilityPipelineKafkaLibrdkafkaOption], optional + + :param sasl: Specifies the SASL mechanism for authenticating with a Kafka cluster. + :type sasl: ObservabilityPipelineKafkaSasl, optional + + :param topics: A list of Kafka topic names to subscribe to. The source ingests messages from each topic specified. + :type topics: [str] + + :param mode: Protocol used by the syslog source to receive messages. + :type mode: ObservabilityPipelineSyslogSourceMode + :param framing: Framing method configuration for the socket source. :type framing: ObservabilityPipelineSocketSourceFraming + + :param grpc_address_key: Environment variable name containing the gRPC server address for receiving OTLP data. Must be a valid environment variable name (alphanumeric characters and underscores only). + :type grpc_address_key: str, optional + + :param http_address_key: Environment variable name containing the HTTP server address for receiving OTLP data. Must be a valid environment variable name (alphanumeric characters and underscores only). + :type http_address_key: str, optional """ super().__init__(kwargs) @@ -78,64 +84,68 @@ def _composed_schemas(_): # code would be run when this module is imported, and these composed # classes don't exist yet because their module has not finished # loading - from datadog_api_client.v2.model.observability_pipeline_kafka_source import ObservabilityPipelineKafkaSource from datadog_api_client.v2.model.observability_pipeline_datadog_agent_source import ( ObservabilityPipelineDatadogAgentSource, ) - from datadog_api_client.v2.model.observability_pipeline_splunk_tcp_source import ( - ObservabilityPipelineSplunkTcpSource, - ) - from datadog_api_client.v2.model.observability_pipeline_splunk_hec_source import ( - ObservabilityPipelineSplunkHecSource, + from datadog_api_client.v2.model.observability_pipeline_amazon_data_firehose_source import ( + ObservabilityPipelineAmazonDataFirehoseSource, ) from datadog_api_client.v2.model.observability_pipeline_amazon_s3_source import ( ObservabilityPipelineAmazonS3Source, ) - from datadog_api_client.v2.model.observability_pipeline_fluentd_source import ObservabilityPipelineFluentdSource from datadog_api_client.v2.model.observability_pipeline_fluent_bit_source import ( ObservabilityPipelineFluentBitSource, ) - from datadog_api_client.v2.model.observability_pipeline_http_server_source import ( - ObservabilityPipelineHttpServerSource, - ) - from datadog_api_client.v2.model.observability_pipeline_sumo_logic_source import ( - ObservabilityPipelineSumoLogicSource, - ) - from datadog_api_client.v2.model.observability_pipeline_rsyslog_source import ObservabilityPipelineRsyslogSource - from datadog_api_client.v2.model.observability_pipeline_syslog_ng_source import ( - ObservabilityPipelineSyslogNgSource, - ) - from datadog_api_client.v2.model.observability_pipeline_amazon_data_firehose_source import ( - ObservabilityPipelineAmazonDataFirehoseSource, - ) + from datadog_api_client.v2.model.observability_pipeline_fluentd_source import ObservabilityPipelineFluentdSource from datadog_api_client.v2.model.observability_pipeline_google_pub_sub_source import ( ObservabilityPipelineGooglePubSubSource, ) from datadog_api_client.v2.model.observability_pipeline_http_client_source import ( ObservabilityPipelineHttpClientSource, ) + from datadog_api_client.v2.model.observability_pipeline_http_server_source import ( + ObservabilityPipelineHttpServerSource, + ) + from datadog_api_client.v2.model.observability_pipeline_kafka_source import ObservabilityPipelineKafkaSource from datadog_api_client.v2.model.observability_pipeline_logstash_source import ( ObservabilityPipelineLogstashSource, ) + from datadog_api_client.v2.model.observability_pipeline_rsyslog_source import ObservabilityPipelineRsyslogSource from datadog_api_client.v2.model.observability_pipeline_socket_source import ObservabilityPipelineSocketSource + from datadog_api_client.v2.model.observability_pipeline_splunk_hec_source import ( + ObservabilityPipelineSplunkHecSource, + ) + from datadog_api_client.v2.model.observability_pipeline_splunk_tcp_source import ( + ObservabilityPipelineSplunkTcpSource, + ) + from datadog_api_client.v2.model.observability_pipeline_sumo_logic_source import ( + ObservabilityPipelineSumoLogicSource, + ) + from datadog_api_client.v2.model.observability_pipeline_syslog_ng_source import ( + ObservabilityPipelineSyslogNgSource, + ) + from datadog_api_client.v2.model.observability_pipeline_opentelemetry_source import ( + ObservabilityPipelineOpentelemetrySource, + ) return { "oneOf": [ - ObservabilityPipelineKafkaSource, ObservabilityPipelineDatadogAgentSource, - ObservabilityPipelineSplunkTcpSource, - ObservabilityPipelineSplunkHecSource, + ObservabilityPipelineAmazonDataFirehoseSource, ObservabilityPipelineAmazonS3Source, - ObservabilityPipelineFluentdSource, ObservabilityPipelineFluentBitSource, - ObservabilityPipelineHttpServerSource, - ObservabilityPipelineSumoLogicSource, - ObservabilityPipelineRsyslogSource, - ObservabilityPipelineSyslogNgSource, - ObservabilityPipelineAmazonDataFirehoseSource, + ObservabilityPipelineFluentdSource, ObservabilityPipelineGooglePubSubSource, ObservabilityPipelineHttpClientSource, + ObservabilityPipelineHttpServerSource, + ObservabilityPipelineKafkaSource, ObservabilityPipelineLogstashSource, + ObservabilityPipelineRsyslogSource, ObservabilityPipelineSocketSource, + ObservabilityPipelineSplunkHecSource, + ObservabilityPipelineSplunkTcpSource, + ObservabilityPipelineSumoLogicSource, + ObservabilityPipelineSyslogNgSource, + ObservabilityPipelineOpentelemetrySource, ], } diff --git a/src/datadog_api_client/v2/model/observability_pipeline_crowd_strike_next_gen_siem_destination.py b/src/datadog_api_client/v2/model/observability_pipeline_crowd_strike_next_gen_siem_destination.py index d1dbc9facf..a84a24c745 100644 --- a/src/datadog_api_client/v2/model/observability_pipeline_crowd_strike_next_gen_siem_destination.py +++ b/src/datadog_api_client/v2/model/observability_pipeline_crowd_strike_next_gen_siem_destination.py @@ -71,6 +71,8 @@ def __init__( """ The ``crowdstrike_next_gen_siem`` destination forwards logs to CrowdStrike Next Gen SIEM. + **Supported pipeline types:** logs + :param compression: Compression configuration for log events. :type compression: ObservabilityPipelineCrowdStrikeNextGenSiemDestinationCompression, optional diff --git a/src/datadog_api_client/v2/model/observability_pipeline_custom_processor.py b/src/datadog_api_client/v2/model/observability_pipeline_custom_processor.py index 3304418aaf..afdebcb5d7 100644 --- a/src/datadog_api_client/v2/model/observability_pipeline_custom_processor.py +++ b/src/datadog_api_client/v2/model/observability_pipeline_custom_processor.py @@ -68,10 +68,12 @@ def __init__( """ The ``custom_processor`` processor transforms events using `Vector Remap Language (VRL) `_ scripts with advanced filtering capabilities. + **Supported pipeline types:** logs + :param display_name: The display name for a component. :type display_name: str, optional - :param enabled: Whether this processor is enabled. + :param enabled: Indicates whether the processor is enabled. :type enabled: bool :param id: The unique identifier for this processor. diff --git a/src/datadog_api_client/v2/model/observability_pipeline_datadog_agent_source.py b/src/datadog_api_client/v2/model/observability_pipeline_datadog_agent_source.py index 3614768ec1..ddce71ea67 100644 --- a/src/datadog_api_client/v2/model/observability_pipeline_datadog_agent_source.py +++ b/src/datadog_api_client/v2/model/observability_pipeline_datadog_agent_source.py @@ -48,9 +48,11 @@ def __init__( **kwargs, ): """ - The ``datadog_agent`` source collects logs from the Datadog Agent. + The ``datadog_agent`` source collects logs/metrics from the Datadog Agent. - :param id: The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components). + **Supported pipeline types:** logs, metrics + + :param id: The unique identifier for this component. Used in other parts of the pipeline to reference this component (for example, as the ``input`` to downstream components). :type id: str :param tls: Configuration for enabling TLS encryption between the pipeline component and external services. diff --git a/src/datadog_api_client/v2/model/observability_pipeline_datadog_logs_destination.py b/src/datadog_api_client/v2/model/observability_pipeline_datadog_logs_destination.py index b519b48735..453881dfc4 100644 --- a/src/datadog_api_client/v2/model/observability_pipeline_datadog_logs_destination.py +++ b/src/datadog_api_client/v2/model/observability_pipeline_datadog_logs_destination.py @@ -40,6 +40,8 @@ def __init__(self_, id: str, inputs: List[str], type: ObservabilityPipelineDatad """ The ``datadog_logs`` destination forwards logs to Datadog Log Management. + **Supported pipeline types:** logs + :param id: The unique identifier for this component. :type id: str diff --git a/src/datadog_api_client/v2/model/observability_pipeline_datadog_metrics_destination.py b/src/datadog_api_client/v2/model/observability_pipeline_datadog_metrics_destination.py new file mode 100644 index 0000000000..4c42f53f56 --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_datadog_metrics_destination.py @@ -0,0 +1,58 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + +from typing import List, TYPE_CHECKING + +from datadog_api_client.model_utils import ( + ModelNormal, + cached_property, +) + + +if TYPE_CHECKING: + from datadog_api_client.v2.model.observability_pipeline_datadog_metrics_destination_type import ( + ObservabilityPipelineDatadogMetricsDestinationType, + ) + + +class ObservabilityPipelineDatadogMetricsDestination(ModelNormal): + @cached_property + def openapi_types(_): + from datadog_api_client.v2.model.observability_pipeline_datadog_metrics_destination_type import ( + ObservabilityPipelineDatadogMetricsDestinationType, + ) + + return { + "id": (str,), + "inputs": ([str],), + "type": (ObservabilityPipelineDatadogMetricsDestinationType,), + } + + attribute_map = { + "id": "id", + "inputs": "inputs", + "type": "type", + } + + def __init__(self_, id: str, inputs: List[str], type: ObservabilityPipelineDatadogMetricsDestinationType, **kwargs): + """ + The ``datadog_metrics`` destination forwards metrics to Datadog. + + **Supported pipeline types:** metrics + + :param id: The unique identifier for this component. + :type id: str + + :param inputs: A list of component IDs whose output is used as the input for this component. + :type inputs: [str] + + :param type: The destination type. The value should always be ``datadog_metrics``. + :type type: ObservabilityPipelineDatadogMetricsDestinationType + """ + super().__init__(kwargs) + + self_.id = id + self_.inputs = inputs + self_.type = type diff --git a/src/datadog_api_client/v2/model/observability_pipeline_datadog_metrics_destination_type.py b/src/datadog_api_client/v2/model/observability_pipeline_datadog_metrics_destination_type.py new file mode 100644 index 0000000000..b0897c7d8b --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_datadog_metrics_destination_type.py @@ -0,0 +1,37 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + + +from datadog_api_client.model_utils import ( + ModelSimple, + cached_property, +) + +from typing import ClassVar + + +class ObservabilityPipelineDatadogMetricsDestinationType(ModelSimple): + """ + The destination type. The value should always be `datadog_metrics`. + + :param value: If omitted defaults to "datadog_metrics". Must be one of ["datadog_metrics"]. + :type value: str + """ + + allowed_values = { + "datadog_metrics", + } + DATADOG_METRICS: ClassVar["ObservabilityPipelineDatadogMetricsDestinationType"] + + @cached_property + def openapi_types(_): + return { + "value": (str,), + } + + +ObservabilityPipelineDatadogMetricsDestinationType.DATADOG_METRICS = ObservabilityPipelineDatadogMetricsDestinationType( + "datadog_metrics" +) diff --git a/src/datadog_api_client/v2/model/observability_pipeline_datadog_tags_processor.py b/src/datadog_api_client/v2/model/observability_pipeline_datadog_tags_processor.py index c96b59c4a6..fde8f40318 100644 --- a/src/datadog_api_client/v2/model/observability_pipeline_datadog_tags_processor.py +++ b/src/datadog_api_client/v2/model/observability_pipeline_datadog_tags_processor.py @@ -75,16 +75,18 @@ def __init__( """ The ``datadog_tags`` processor includes or excludes specific Datadog tags in your logs. + **Supported pipeline types:** logs + :param action: The action to take on tags with matching keys. :type action: ObservabilityPipelineDatadogTagsProcessorAction :param display_name: The display name for a component. :type display_name: str, optional - :param enabled: Whether this processor is enabled. + :param enabled: Indicates whether the processor is enabled. :type enabled: bool - :param id: The unique identifier for this component. Used to reference this component in other parts of the pipeline (for example, as the ``input`` to downstream components). + :param id: The unique identifier for this component. Used in other parts of the pipeline to reference this component (for example, as the ``input`` to downstream components). :type id: str :param include: A Datadog search query used to determine which logs this processor targets. diff --git a/src/datadog_api_client/v2/model/observability_pipeline_dedupe_processor.py b/src/datadog_api_client/v2/model/observability_pipeline_dedupe_processor.py index 9c4e3999c9..f0e3f6dd0a 100644 --- a/src/datadog_api_client/v2/model/observability_pipeline_dedupe_processor.py +++ b/src/datadog_api_client/v2/model/observability_pipeline_dedupe_processor.py @@ -66,10 +66,12 @@ def __init__( """ The ``dedupe`` processor removes duplicate fields in log events. + **Supported pipeline types:** logs + :param display_name: The display name for a component. :type display_name: str, optional - :param enabled: Whether this processor is enabled. + :param enabled: Indicates whether the processor is enabled. :type enabled: bool :param fields: A list of log field paths to check for duplicates. diff --git a/src/datadog_api_client/v2/model/observability_pipeline_elasticsearch_destination.py b/src/datadog_api_client/v2/model/observability_pipeline_elasticsearch_destination.py index 8ce413b7ef..33b38b74b1 100644 --- a/src/datadog_api_client/v2/model/observability_pipeline_elasticsearch_destination.py +++ b/src/datadog_api_client/v2/model/observability_pipeline_elasticsearch_destination.py @@ -17,6 +17,9 @@ from datadog_api_client.v2.model.observability_pipeline_elasticsearch_destination_api_version import ( ObservabilityPipelineElasticsearchDestinationApiVersion, ) + from datadog_api_client.v2.model.observability_pipeline_elasticsearch_destination_data_stream import ( + ObservabilityPipelineElasticsearchDestinationDataStream, + ) from datadog_api_client.v2.model.observability_pipeline_elasticsearch_destination_type import ( ObservabilityPipelineElasticsearchDestinationType, ) @@ -28,6 +31,9 @@ def openapi_types(_): from datadog_api_client.v2.model.observability_pipeline_elasticsearch_destination_api_version import ( ObservabilityPipelineElasticsearchDestinationApiVersion, ) + from datadog_api_client.v2.model.observability_pipeline_elasticsearch_destination_data_stream import ( + ObservabilityPipelineElasticsearchDestinationDataStream, + ) from datadog_api_client.v2.model.observability_pipeline_elasticsearch_destination_type import ( ObservabilityPipelineElasticsearchDestinationType, ) @@ -35,6 +41,7 @@ def openapi_types(_): return { "api_version": (ObservabilityPipelineElasticsearchDestinationApiVersion,), "bulk_index": (str,), + "data_stream": (ObservabilityPipelineElasticsearchDestinationDataStream,), "id": (str,), "inputs": ([str],), "type": (ObservabilityPipelineElasticsearchDestinationType,), @@ -43,6 +50,7 @@ def openapi_types(_): attribute_map = { "api_version": "api_version", "bulk_index": "bulk_index", + "data_stream": "data_stream", "id": "id", "inputs": "inputs", "type": "type", @@ -55,17 +63,23 @@ def __init__( type: ObservabilityPipelineElasticsearchDestinationType, api_version: Union[ObservabilityPipelineElasticsearchDestinationApiVersion, UnsetType] = unset, bulk_index: Union[str, UnsetType] = unset, + data_stream: Union[ObservabilityPipelineElasticsearchDestinationDataStream, UnsetType] = unset, **kwargs, ): """ The ``elasticsearch`` destination writes logs to an Elasticsearch cluster. + **Supported pipeline types:** logs + :param api_version: The Elasticsearch API version to use. Set to ``auto`` to auto-detect. :type api_version: ObservabilityPipelineElasticsearchDestinationApiVersion, optional :param bulk_index: The index to write logs to in Elasticsearch. :type bulk_index: str, optional + :param data_stream: Configuration options for writing to Elasticsearch Data Streams instead of a fixed index. + :type data_stream: ObservabilityPipelineElasticsearchDestinationDataStream, optional + :param id: The unique identifier for this component. :type id: str @@ -79,6 +93,8 @@ def __init__( kwargs["api_version"] = api_version if bulk_index is not unset: kwargs["bulk_index"] = bulk_index + if data_stream is not unset: + kwargs["data_stream"] = data_stream super().__init__(kwargs) self_.id = id diff --git a/src/datadog_api_client/v2/model/observability_pipeline_elasticsearch_destination_data_stream.py b/src/datadog_api_client/v2/model/observability_pipeline_elasticsearch_destination_data_stream.py new file mode 100644 index 0000000000..8046b936b9 --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_elasticsearch_destination_data_stream.py @@ -0,0 +1,56 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + +from typing import Union + +from datadog_api_client.model_utils import ( + ModelNormal, + cached_property, + unset, + UnsetType, +) + + +class ObservabilityPipelineElasticsearchDestinationDataStream(ModelNormal): + @cached_property + def openapi_types(_): + return { + "dataset": (str,), + "dtype": (str,), + "namespace": (str,), + } + + attribute_map = { + "dataset": "dataset", + "dtype": "dtype", + "namespace": "namespace", + } + + def __init__( + self_, + dataset: Union[str, UnsetType] = unset, + dtype: Union[str, UnsetType] = unset, + namespace: Union[str, UnsetType] = unset, + **kwargs, + ): + """ + Configuration options for writing to Elasticsearch Data Streams instead of a fixed index. + + :param dataset: The data stream dataset for your logs. This groups logs by their source or application. + :type dataset: str, optional + + :param dtype: The data stream type for your logs. This determines how logs are categorized within the data stream. + :type dtype: str, optional + + :param namespace: The data stream namespace for your logs. This separates logs into different environments or domains. + :type namespace: str, optional + """ + if dataset is not unset: + kwargs["dataset"] = dataset + if dtype is not unset: + kwargs["dtype"] = dtype + if namespace is not unset: + kwargs["namespace"] = namespace + super().__init__(kwargs) diff --git a/src/datadog_api_client/v2/model/observability_pipeline_enrichment_table_processor.py b/src/datadog_api_client/v2/model/observability_pipeline_enrichment_table_processor.py index 3483c95ae4..43b44ea34d 100644 --- a/src/datadog_api_client/v2/model/observability_pipeline_enrichment_table_processor.py +++ b/src/datadog_api_client/v2/model/observability_pipeline_enrichment_table_processor.py @@ -20,6 +20,9 @@ from datadog_api_client.v2.model.observability_pipeline_enrichment_table_geo_ip import ( ObservabilityPipelineEnrichmentTableGeoIp, ) + from datadog_api_client.v2.model.observability_pipeline_enrichment_table_reference_table import ( + ObservabilityPipelineEnrichmentTableReferenceTable, + ) from datadog_api_client.v2.model.observability_pipeline_enrichment_table_processor_type import ( ObservabilityPipelineEnrichmentTableProcessorType, ) @@ -34,6 +37,9 @@ def openapi_types(_): from datadog_api_client.v2.model.observability_pipeline_enrichment_table_geo_ip import ( ObservabilityPipelineEnrichmentTableGeoIp, ) + from datadog_api_client.v2.model.observability_pipeline_enrichment_table_reference_table import ( + ObservabilityPipelineEnrichmentTableReferenceTable, + ) from datadog_api_client.v2.model.observability_pipeline_enrichment_table_processor_type import ( ObservabilityPipelineEnrichmentTableProcessorType, ) @@ -45,6 +51,7 @@ def openapi_types(_): "geoip": (ObservabilityPipelineEnrichmentTableGeoIp,), "id": (str,), "include": (str,), + "reference_table": (ObservabilityPipelineEnrichmentTableReferenceTable,), "target": (str,), "type": (ObservabilityPipelineEnrichmentTableProcessorType,), } @@ -56,6 +63,7 @@ def openapi_types(_): "geoip": "geoip", "id": "id", "include": "include", + "reference_table": "reference_table", "target": "target", "type": "type", } @@ -70,15 +78,18 @@ def __init__( display_name: Union[str, UnsetType] = unset, file: Union[ObservabilityPipelineEnrichmentTableFile, UnsetType] = unset, geoip: Union[ObservabilityPipelineEnrichmentTableGeoIp, UnsetType] = unset, + reference_table: Union[ObservabilityPipelineEnrichmentTableReferenceTable, UnsetType] = unset, **kwargs, ): """ - The ``enrichment_table`` processor enriches logs using a static CSV file or GeoIP database. + The ``enrichment_table`` processor enriches logs using a static CSV file, GeoIP database, or reference table. Exactly one of ``file`` , ``geoip`` , or ``reference_table`` must be configured. + + **Supported pipeline types:** logs :param display_name: The display name for a component. :type display_name: str, optional - :param enabled: Whether this processor is enabled. + :param enabled: Indicates whether the processor is enabled. :type enabled: bool :param file: Defines a static enrichment table loaded from a CSV file. @@ -93,6 +104,9 @@ def __init__( :param include: A Datadog search query used to determine which logs this processor targets. :type include: str + :param reference_table: Uses a Datadog reference table to enrich logs. + :type reference_table: ObservabilityPipelineEnrichmentTableReferenceTable, optional + :param target: Path where enrichment results should be stored in the log. :type target: str @@ -105,6 +119,8 @@ def __init__( kwargs["file"] = file if geoip is not unset: kwargs["geoip"] = geoip + if reference_table is not unset: + kwargs["reference_table"] = reference_table super().__init__(kwargs) self_.enabled = enabled diff --git a/src/datadog_api_client/v2/model/observability_pipeline_enrichment_table_reference_table.py b/src/datadog_api_client/v2/model/observability_pipeline_enrichment_table_reference_table.py new file mode 100644 index 0000000000..9788ea4973 --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_enrichment_table_reference_table.py @@ -0,0 +1,49 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + +from typing import List, Union + +from datadog_api_client.model_utils import ( + ModelNormal, + cached_property, + unset, + UnsetType, +) + + +class ObservabilityPipelineEnrichmentTableReferenceTable(ModelNormal): + @cached_property + def openapi_types(_): + return { + "columns": ([str],), + "key_field": (str,), + "table_id": (str,), + } + + attribute_map = { + "columns": "columns", + "key_field": "key_field", + "table_id": "table_id", + } + + def __init__(self_, key_field: str, table_id: str, columns: Union[List[str], UnsetType] = unset, **kwargs): + """ + Uses a Datadog reference table to enrich logs. + + :param columns: List of column names to include from the reference table. If not provided, all columns are included. + :type columns: [str], optional + + :param key_field: Path to the field in the log event to match against the reference table. + :type key_field: str + + :param table_id: The unique identifier of the reference table. + :type table_id: str + """ + if columns is not unset: + kwargs["columns"] = columns + super().__init__(kwargs) + + self_.key_field = key_field + self_.table_id = table_id diff --git a/src/datadog_api_client/v2/model/observability_pipeline_filter_processor.py b/src/datadog_api_client/v2/model/observability_pipeline_filter_processor.py index 1098a4c6a6..5f064d3f2b 100644 --- a/src/datadog_api_client/v2/model/observability_pipeline_filter_processor.py +++ b/src/datadog_api_client/v2/model/observability_pipeline_filter_processor.py @@ -52,18 +52,20 @@ def __init__( **kwargs, ): """ - The ``filter`` processor allows conditional processing of logs based on a Datadog search query. Logs that match the ``include`` query are passed through; others are discarded. + The ``filter`` processor allows conditional processing of logs/metrics based on a Datadog search query. Logs/metrics that match the ``include`` query are passed through; others are discarded. + + **Supported pipeline types:** logs, metrics :param display_name: The display name for a component. :type display_name: str, optional - :param enabled: Whether this processor is enabled. + :param enabled: Indicates whether the processor is enabled. :type enabled: bool - :param id: The unique identifier for this component. Used to reference this component in other parts of the pipeline (for example, as the ``input`` to downstream components). + :param id: The unique identifier for this component. Used in other parts of the pipeline to reference this component (for example, as the ``input`` to downstream components). :type id: str - :param include: A Datadog search query used to determine which logs should pass through the filter. Logs that match this query continue to downstream components; others are dropped. + :param include: A Datadog search query used to determine which logs/metrics should pass through the filter. Logs/metrics that match this query continue to downstream components; others are dropped. :type include: str :param type: The processor type. The value should always be ``filter``. diff --git a/src/datadog_api_client/v2/model/observability_pipeline_fluent_bit_source.py b/src/datadog_api_client/v2/model/observability_pipeline_fluent_bit_source.py index ba39b1d678..dd1344f898 100644 --- a/src/datadog_api_client/v2/model/observability_pipeline_fluent_bit_source.py +++ b/src/datadog_api_client/v2/model/observability_pipeline_fluent_bit_source.py @@ -50,7 +50,9 @@ def __init__( """ The ``fluent_bit`` source ingests logs from Fluent Bit. - :param id: The unique identifier for this component. Used to reference this component in other parts of the pipeline (for example, as the ``input`` to downstream components). + **Supported pipeline types:** logs + + :param id: The unique identifier for this component. Used in other parts of the pipeline to reference this component (for example, as the ``input`` to downstream components). :type id: str :param tls: Configuration for enabling TLS encryption between the pipeline component and external services. diff --git a/src/datadog_api_client/v2/model/observability_pipeline_fluentd_source.py b/src/datadog_api_client/v2/model/observability_pipeline_fluentd_source.py index 2e64961b5f..a3351cbe96 100644 --- a/src/datadog_api_client/v2/model/observability_pipeline_fluentd_source.py +++ b/src/datadog_api_client/v2/model/observability_pipeline_fluentd_source.py @@ -50,7 +50,9 @@ def __init__( """ The ``fluentd`` source ingests logs from a Fluentd-compatible service. - :param id: The unique identifier for this component. Used to reference this component in other parts of the pipeline (for example, as the ``input`` to downstream components). + **Supported pipeline types:** logs + + :param id: The unique identifier for this component. Used in other parts of the pipeline to reference this component (for example, as the ``input`` to downstream components). :type id: str :param tls: Configuration for enabling TLS encryption between the pipeline component and external services. diff --git a/src/datadog_api_client/v2/model/observability_pipeline_generate_metrics_processor.py b/src/datadog_api_client/v2/model/observability_pipeline_generate_metrics_processor.py index 97aa67e007..9c5ffa2dfa 100644 --- a/src/datadog_api_client/v2/model/observability_pipeline_generate_metrics_processor.py +++ b/src/datadog_api_client/v2/model/observability_pipeline_generate_metrics_processor.py @@ -62,10 +62,12 @@ def __init__( The ``generate_datadog_metrics`` processor creates custom metrics from logs and sends them to Datadog. Metrics can be counters, gauges, or distributions and optionally grouped by log fields. + **Supported pipeline types:** logs + :param display_name: The display name for a component. :type display_name: str, optional - :param enabled: Whether this processor is enabled. + :param enabled: Indicates whether the processor is enabled. :type enabled: bool :param id: The unique identifier for this component. Used to reference this component in other parts of the pipeline. diff --git a/src/datadog_api_client/v2/model/observability_pipeline_google_chronicle_destination.py b/src/datadog_api_client/v2/model/observability_pipeline_google_chronicle_destination.py index 964a09e959..44f294ec03 100644 --- a/src/datadog_api_client/v2/model/observability_pipeline_google_chronicle_destination.py +++ b/src/datadog_api_client/v2/model/observability_pipeline_google_chronicle_destination.py @@ -68,6 +68,8 @@ def __init__( """ The ``google_chronicle`` destination sends logs to Google Chronicle. + **Supported pipeline types:** logs + :param auth: GCP credentials used to authenticate with Google Cloud Storage. :type auth: ObservabilityPipelineGcpAuth, optional diff --git a/src/datadog_api_client/v2/model/observability_pipeline_google_cloud_storage_destination.py b/src/datadog_api_client/v2/model/observability_pipeline_google_cloud_storage_destination.py index a2e8b0c337..8c5e4f1527 100644 --- a/src/datadog_api_client/v2/model/observability_pipeline_google_cloud_storage_destination.py +++ b/src/datadog_api_client/v2/model/observability_pipeline_google_cloud_storage_destination.py @@ -83,6 +83,8 @@ def __init__( The ``google_cloud_storage`` destination stores logs in a Google Cloud Storage (GCS) bucket. It requires a bucket name, GCP authentication, and metadata fields. + **Supported pipeline types:** logs + :param acl: Access control list setting for objects written to the bucket. :type acl: ObservabilityPipelineGoogleCloudStorageDestinationAcl, optional diff --git a/src/datadog_api_client/v2/model/observability_pipeline_google_pub_sub_destination.py b/src/datadog_api_client/v2/model/observability_pipeline_google_pub_sub_destination.py index 5fc600f35e..847bdd482f 100644 --- a/src/datadog_api_client/v2/model/observability_pipeline_google_pub_sub_destination.py +++ b/src/datadog_api_client/v2/model/observability_pipeline_google_pub_sub_destination.py @@ -73,6 +73,8 @@ def __init__( """ The ``google_pubsub`` destination publishes logs to a Google Cloud Pub/Sub topic. + **Supported pipeline types:** logs + :param auth: GCP credentials used to authenticate with Google Cloud Storage. :type auth: ObservabilityPipelineGcpAuth, optional diff --git a/src/datadog_api_client/v2/model/observability_pipeline_google_pub_sub_source.py b/src/datadog_api_client/v2/model/observability_pipeline_google_pub_sub_source.py index 6135892ad0..859bad4249 100644 --- a/src/datadog_api_client/v2/model/observability_pipeline_google_pub_sub_source.py +++ b/src/datadog_api_client/v2/model/observability_pipeline_google_pub_sub_source.py @@ -66,13 +66,15 @@ def __init__( """ The ``google_pubsub`` source ingests logs from a Google Cloud Pub/Sub subscription. + **Supported pipeline types:** logs + :param auth: GCP credentials used to authenticate with Google Cloud Storage. :type auth: ObservabilityPipelineGcpAuth, optional :param decoding: The decoding format used to interpret incoming logs. :type decoding: ObservabilityPipelineDecoding - :param id: The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components). + :param id: The unique identifier for this component. Used in other parts of the pipeline to reference this component (for example, as the ``input`` to downstream components). :type id: str :param project: The GCP project ID that owns the Pub/Sub subscription. diff --git a/src/datadog_api_client/v2/model/observability_pipeline_http_client_destination.py b/src/datadog_api_client/v2/model/observability_pipeline_http_client_destination.py new file mode 100644 index 0000000000..c26d7bf7ce --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_http_client_destination.py @@ -0,0 +1,117 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + +from typing import List, Union, TYPE_CHECKING + +from datadog_api_client.model_utils import ( + ModelNormal, + cached_property, + unset, + UnsetType, +) + + +if TYPE_CHECKING: + from datadog_api_client.v2.model.observability_pipeline_http_client_destination_auth_strategy import ( + ObservabilityPipelineHttpClientDestinationAuthStrategy, + ) + from datadog_api_client.v2.model.observability_pipeline_http_client_destination_compression import ( + ObservabilityPipelineHttpClientDestinationCompression, + ) + from datadog_api_client.v2.model.observability_pipeline_http_client_destination_encoding import ( + ObservabilityPipelineHttpClientDestinationEncoding, + ) + from datadog_api_client.v2.model.observability_pipeline_tls import ObservabilityPipelineTls + from datadog_api_client.v2.model.observability_pipeline_http_client_destination_type import ( + ObservabilityPipelineHttpClientDestinationType, + ) + + +class ObservabilityPipelineHttpClientDestination(ModelNormal): + @cached_property + def openapi_types(_): + from datadog_api_client.v2.model.observability_pipeline_http_client_destination_auth_strategy import ( + ObservabilityPipelineHttpClientDestinationAuthStrategy, + ) + from datadog_api_client.v2.model.observability_pipeline_http_client_destination_compression import ( + ObservabilityPipelineHttpClientDestinationCompression, + ) + from datadog_api_client.v2.model.observability_pipeline_http_client_destination_encoding import ( + ObservabilityPipelineHttpClientDestinationEncoding, + ) + from datadog_api_client.v2.model.observability_pipeline_tls import ObservabilityPipelineTls + from datadog_api_client.v2.model.observability_pipeline_http_client_destination_type import ( + ObservabilityPipelineHttpClientDestinationType, + ) + + return { + "auth_strategy": (ObservabilityPipelineHttpClientDestinationAuthStrategy,), + "compression": (ObservabilityPipelineHttpClientDestinationCompression,), + "encoding": (ObservabilityPipelineHttpClientDestinationEncoding,), + "id": (str,), + "inputs": ([str],), + "tls": (ObservabilityPipelineTls,), + "type": (ObservabilityPipelineHttpClientDestinationType,), + } + + attribute_map = { + "auth_strategy": "auth_strategy", + "compression": "compression", + "encoding": "encoding", + "id": "id", + "inputs": "inputs", + "tls": "tls", + "type": "type", + } + + def __init__( + self_, + encoding: ObservabilityPipelineHttpClientDestinationEncoding, + id: str, + inputs: List[str], + type: ObservabilityPipelineHttpClientDestinationType, + auth_strategy: Union[ObservabilityPipelineHttpClientDestinationAuthStrategy, UnsetType] = unset, + compression: Union[ObservabilityPipelineHttpClientDestinationCompression, UnsetType] = unset, + tls: Union[ObservabilityPipelineTls, UnsetType] = unset, + **kwargs, + ): + """ + The ``http_client`` destination sends data to an HTTP endpoint. + + **Supported pipeline types:** logs, metrics + + :param auth_strategy: HTTP authentication strategy. + :type auth_strategy: ObservabilityPipelineHttpClientDestinationAuthStrategy, optional + + :param compression: Compression configuration for HTTP requests. + :type compression: ObservabilityPipelineHttpClientDestinationCompression, optional + + :param encoding: Encoding format for log events. + :type encoding: ObservabilityPipelineHttpClientDestinationEncoding + + :param id: The unique identifier for this component. + :type id: str + + :param inputs: A list of component IDs whose output is used as the input for this component. + :type inputs: [str] + + :param tls: Configuration for enabling TLS encryption between the pipeline component and external services. + :type tls: ObservabilityPipelineTls, optional + + :param type: The destination type. The value should always be ``http_client``. + :type type: ObservabilityPipelineHttpClientDestinationType + """ + if auth_strategy is not unset: + kwargs["auth_strategy"] = auth_strategy + if compression is not unset: + kwargs["compression"] = compression + if tls is not unset: + kwargs["tls"] = tls + super().__init__(kwargs) + + self_.encoding = encoding + self_.id = id + self_.inputs = inputs + self_.type = type diff --git a/src/datadog_api_client/v2/model/observability_pipeline_http_client_destination_auth_strategy.py b/src/datadog_api_client/v2/model/observability_pipeline_http_client_destination_auth_strategy.py new file mode 100644 index 0000000000..ffb1523cca --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_http_client_destination_auth_strategy.py @@ -0,0 +1,47 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + + +from datadog_api_client.model_utils import ( + ModelSimple, + cached_property, +) + +from typing import ClassVar + + +class ObservabilityPipelineHttpClientDestinationAuthStrategy(ModelSimple): + """ + HTTP authentication strategy. + + :param value: Must be one of ["none", "basic", "bearer"]. + :type value: str + """ + + allowed_values = { + "none", + "basic", + "bearer", + } + NONE: ClassVar["ObservabilityPipelineHttpClientDestinationAuthStrategy"] + BASIC: ClassVar["ObservabilityPipelineHttpClientDestinationAuthStrategy"] + BEARER: ClassVar["ObservabilityPipelineHttpClientDestinationAuthStrategy"] + + @cached_property + def openapi_types(_): + return { + "value": (str,), + } + + +ObservabilityPipelineHttpClientDestinationAuthStrategy.NONE = ObservabilityPipelineHttpClientDestinationAuthStrategy( + "none" +) +ObservabilityPipelineHttpClientDestinationAuthStrategy.BASIC = ObservabilityPipelineHttpClientDestinationAuthStrategy( + "basic" +) +ObservabilityPipelineHttpClientDestinationAuthStrategy.BEARER = ObservabilityPipelineHttpClientDestinationAuthStrategy( + "bearer" +) diff --git a/src/datadog_api_client/v2/model/observability_pipeline_http_client_destination_compression.py b/src/datadog_api_client/v2/model/observability_pipeline_http_client_destination_compression.py new file mode 100644 index 0000000000..be42af4291 --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_http_client_destination_compression.py @@ -0,0 +1,44 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + +from typing import TYPE_CHECKING + +from datadog_api_client.model_utils import ( + ModelNormal, + cached_property, +) + + +if TYPE_CHECKING: + from datadog_api_client.v2.model.observability_pipeline_http_client_destination_compression_algorithm import ( + ObservabilityPipelineHttpClientDestinationCompressionAlgorithm, + ) + + +class ObservabilityPipelineHttpClientDestinationCompression(ModelNormal): + @cached_property + def openapi_types(_): + from datadog_api_client.v2.model.observability_pipeline_http_client_destination_compression_algorithm import ( + ObservabilityPipelineHttpClientDestinationCompressionAlgorithm, + ) + + return { + "algorithm": (ObservabilityPipelineHttpClientDestinationCompressionAlgorithm,), + } + + attribute_map = { + "algorithm": "algorithm", + } + + def __init__(self_, algorithm: ObservabilityPipelineHttpClientDestinationCompressionAlgorithm, **kwargs): + """ + Compression configuration for HTTP requests. + + :param algorithm: Compression algorithm. + :type algorithm: ObservabilityPipelineHttpClientDestinationCompressionAlgorithm + """ + super().__init__(kwargs) + + self_.algorithm = algorithm diff --git a/src/datadog_api_client/v2/model/observability_pipeline_http_client_destination_compression_algorithm.py b/src/datadog_api_client/v2/model/observability_pipeline_http_client_destination_compression_algorithm.py new file mode 100644 index 0000000000..0adcb8b2f3 --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_http_client_destination_compression_algorithm.py @@ -0,0 +1,37 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + + +from datadog_api_client.model_utils import ( + ModelSimple, + cached_property, +) + +from typing import ClassVar + + +class ObservabilityPipelineHttpClientDestinationCompressionAlgorithm(ModelSimple): + """ + Compression algorithm. + + :param value: If omitted defaults to "gzip". Must be one of ["gzip"]. + :type value: str + """ + + allowed_values = { + "gzip", + } + GZIP: ClassVar["ObservabilityPipelineHttpClientDestinationCompressionAlgorithm"] + + @cached_property + def openapi_types(_): + return { + "value": (str,), + } + + +ObservabilityPipelineHttpClientDestinationCompressionAlgorithm.GZIP = ( + ObservabilityPipelineHttpClientDestinationCompressionAlgorithm("gzip") +) diff --git a/src/datadog_api_client/v2/model/observability_pipeline_http_client_destination_encoding.py b/src/datadog_api_client/v2/model/observability_pipeline_http_client_destination_encoding.py new file mode 100644 index 0000000000..573d3f2798 --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_http_client_destination_encoding.py @@ -0,0 +1,35 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + + +from datadog_api_client.model_utils import ( + ModelSimple, + cached_property, +) + +from typing import ClassVar + + +class ObservabilityPipelineHttpClientDestinationEncoding(ModelSimple): + """ + Encoding format for log events. + + :param value: If omitted defaults to "json". Must be one of ["json"]. + :type value: str + """ + + allowed_values = { + "json", + } + JSON: ClassVar["ObservabilityPipelineHttpClientDestinationEncoding"] + + @cached_property + def openapi_types(_): + return { + "value": (str,), + } + + +ObservabilityPipelineHttpClientDestinationEncoding.JSON = ObservabilityPipelineHttpClientDestinationEncoding("json") diff --git a/src/datadog_api_client/v2/model/observability_pipeline_http_client_destination_type.py b/src/datadog_api_client/v2/model/observability_pipeline_http_client_destination_type.py new file mode 100644 index 0000000000..3bbe09a7c1 --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_http_client_destination_type.py @@ -0,0 +1,37 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + + +from datadog_api_client.model_utils import ( + ModelSimple, + cached_property, +) + +from typing import ClassVar + + +class ObservabilityPipelineHttpClientDestinationType(ModelSimple): + """ + The destination type. The value should always be `http_client`. + + :param value: If omitted defaults to "http_client". Must be one of ["http_client"]. + :type value: str + """ + + allowed_values = { + "http_client", + } + HTTP_CLIENT: ClassVar["ObservabilityPipelineHttpClientDestinationType"] + + @cached_property + def openapi_types(_): + return { + "value": (str,), + } + + +ObservabilityPipelineHttpClientDestinationType.HTTP_CLIENT = ObservabilityPipelineHttpClientDestinationType( + "http_client" +) diff --git a/src/datadog_api_client/v2/model/observability_pipeline_http_client_source.py b/src/datadog_api_client/v2/model/observability_pipeline_http_client_source.py index 5d6b534568..5ee0bc021a 100644 --- a/src/datadog_api_client/v2/model/observability_pipeline_http_client_source.py +++ b/src/datadog_api_client/v2/model/observability_pipeline_http_client_source.py @@ -70,13 +70,15 @@ def __init__( """ The ``http_client`` source scrapes logs from HTTP endpoints at regular intervals. + **Supported pipeline types:** logs + :param auth_strategy: Optional authentication strategy for HTTP requests. :type auth_strategy: ObservabilityPipelineHttpClientSourceAuthStrategy, optional :param decoding: The decoding format used to interpret incoming logs. :type decoding: ObservabilityPipelineDecoding - :param id: The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components). + :param id: The unique identifier for this component. Used in other parts of the pipeline to reference this component (for example, as the ``input`` to downstream components). :type id: str :param scrape_interval_secs: The interval (in seconds) between HTTP scrape requests. diff --git a/src/datadog_api_client/v2/model/observability_pipeline_http_client_source_auth_strategy.py b/src/datadog_api_client/v2/model/observability_pipeline_http_client_source_auth_strategy.py index d2cd326f0a..51e4c20f6d 100644 --- a/src/datadog_api_client/v2/model/observability_pipeline_http_client_source_auth_strategy.py +++ b/src/datadog_api_client/v2/model/observability_pipeline_http_client_source_auth_strategy.py @@ -16,14 +16,16 @@ class ObservabilityPipelineHttpClientSourceAuthStrategy(ModelSimple): """ Optional authentication strategy for HTTP requests. - :param value: Must be one of ["basic", "bearer"]. + :param value: Must be one of ["none", "basic", "bearer"]. :type value: str """ allowed_values = { + "none", "basic", "bearer", } + NONE: ClassVar["ObservabilityPipelineHttpClientSourceAuthStrategy"] BASIC: ClassVar["ObservabilityPipelineHttpClientSourceAuthStrategy"] BEARER: ClassVar["ObservabilityPipelineHttpClientSourceAuthStrategy"] @@ -34,5 +36,6 @@ def openapi_types(_): } +ObservabilityPipelineHttpClientSourceAuthStrategy.NONE = ObservabilityPipelineHttpClientSourceAuthStrategy("none") ObservabilityPipelineHttpClientSourceAuthStrategy.BASIC = ObservabilityPipelineHttpClientSourceAuthStrategy("basic") ObservabilityPipelineHttpClientSourceAuthStrategy.BEARER = ObservabilityPipelineHttpClientSourceAuthStrategy("bearer") diff --git a/src/datadog_api_client/v2/model/observability_pipeline_http_server_source.py b/src/datadog_api_client/v2/model/observability_pipeline_http_server_source.py index 260387f57b..c1cc317bc8 100644 --- a/src/datadog_api_client/v2/model/observability_pipeline_http_server_source.py +++ b/src/datadog_api_client/v2/model/observability_pipeline_http_server_source.py @@ -64,6 +64,8 @@ def __init__( """ The ``http_server`` source collects logs over HTTP POST from external services. + **Supported pipeline types:** logs + :param auth_strategy: HTTP authentication method. :type auth_strategy: ObservabilityPipelineHttpServerSourceAuthStrategy diff --git a/src/datadog_api_client/v2/model/observability_pipeline_kafka_destination.py b/src/datadog_api_client/v2/model/observability_pipeline_kafka_destination.py new file mode 100644 index 0000000000..2436d1cfde --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_kafka_destination.py @@ -0,0 +1,198 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + +from typing import List, Union, TYPE_CHECKING + +from datadog_api_client.model_utils import ( + ModelNormal, + cached_property, + unset, + UnsetType, +) + + +if TYPE_CHECKING: + from datadog_api_client.v2.model.observability_pipeline_kafka_destination_compression import ( + ObservabilityPipelineKafkaDestinationCompression, + ) + from datadog_api_client.v2.model.observability_pipeline_kafka_destination_encoding import ( + ObservabilityPipelineKafkaDestinationEncoding, + ) + from datadog_api_client.v2.model.observability_pipeline_kafka_librdkafka_option import ( + ObservabilityPipelineKafkaLibrdkafkaOption, + ) + from datadog_api_client.v2.model.observability_pipeline_kafka_sasl import ObservabilityPipelineKafkaSasl + from datadog_api_client.v2.model.observability_pipeline_tls import ObservabilityPipelineTls + from datadog_api_client.v2.model.observability_pipeline_kafka_destination_type import ( + ObservabilityPipelineKafkaDestinationType, + ) + + +class ObservabilityPipelineKafkaDestination(ModelNormal): + validations = { + "message_timeout_ms": { + "inclusive_minimum": 1, + }, + "rate_limit_duration_secs": { + "inclusive_minimum": 1, + }, + "rate_limit_num": { + "inclusive_minimum": 1, + }, + "socket_timeout_ms": { + "inclusive_maximum": 300000, + "inclusive_minimum": 10, + }, + } + + @cached_property + def openapi_types(_): + from datadog_api_client.v2.model.observability_pipeline_kafka_destination_compression import ( + ObservabilityPipelineKafkaDestinationCompression, + ) + from datadog_api_client.v2.model.observability_pipeline_kafka_destination_encoding import ( + ObservabilityPipelineKafkaDestinationEncoding, + ) + from datadog_api_client.v2.model.observability_pipeline_kafka_librdkafka_option import ( + ObservabilityPipelineKafkaLibrdkafkaOption, + ) + from datadog_api_client.v2.model.observability_pipeline_kafka_sasl import ObservabilityPipelineKafkaSasl + from datadog_api_client.v2.model.observability_pipeline_tls import ObservabilityPipelineTls + from datadog_api_client.v2.model.observability_pipeline_kafka_destination_type import ( + ObservabilityPipelineKafkaDestinationType, + ) + + return { + "compression": (ObservabilityPipelineKafkaDestinationCompression,), + "encoding": (ObservabilityPipelineKafkaDestinationEncoding,), + "headers_key": (str,), + "id": (str,), + "inputs": ([str],), + "key_field": (str,), + "librdkafka_options": ([ObservabilityPipelineKafkaLibrdkafkaOption],), + "message_timeout_ms": (int,), + "rate_limit_duration_secs": (int,), + "rate_limit_num": (int,), + "sasl": (ObservabilityPipelineKafkaSasl,), + "socket_timeout_ms": (int,), + "tls": (ObservabilityPipelineTls,), + "topic": (str,), + "type": (ObservabilityPipelineKafkaDestinationType,), + } + + attribute_map = { + "compression": "compression", + "encoding": "encoding", + "headers_key": "headers_key", + "id": "id", + "inputs": "inputs", + "key_field": "key_field", + "librdkafka_options": "librdkafka_options", + "message_timeout_ms": "message_timeout_ms", + "rate_limit_duration_secs": "rate_limit_duration_secs", + "rate_limit_num": "rate_limit_num", + "sasl": "sasl", + "socket_timeout_ms": "socket_timeout_ms", + "tls": "tls", + "topic": "topic", + "type": "type", + } + + def __init__( + self_, + encoding: ObservabilityPipelineKafkaDestinationEncoding, + id: str, + inputs: List[str], + topic: str, + type: ObservabilityPipelineKafkaDestinationType, + compression: Union[ObservabilityPipelineKafkaDestinationCompression, UnsetType] = unset, + headers_key: Union[str, UnsetType] = unset, + key_field: Union[str, UnsetType] = unset, + librdkafka_options: Union[List[ObservabilityPipelineKafkaLibrdkafkaOption], UnsetType] = unset, + message_timeout_ms: Union[int, UnsetType] = unset, + rate_limit_duration_secs: Union[int, UnsetType] = unset, + rate_limit_num: Union[int, UnsetType] = unset, + sasl: Union[ObservabilityPipelineKafkaSasl, UnsetType] = unset, + socket_timeout_ms: Union[int, UnsetType] = unset, + tls: Union[ObservabilityPipelineTls, UnsetType] = unset, + **kwargs, + ): + """ + The ``kafka`` destination sends logs to Apache Kafka topics. + + **Supported pipeline types:** logs + + :param compression: Compression codec for Kafka messages. + :type compression: ObservabilityPipelineKafkaDestinationCompression, optional + + :param encoding: Encoding format for log events. + :type encoding: ObservabilityPipelineKafkaDestinationEncoding + + :param headers_key: The field name to use for Kafka message headers. + :type headers_key: str, optional + + :param id: The unique identifier for this component. + :type id: str + + :param inputs: A list of component IDs whose output is used as the ``input`` for this component. + :type inputs: [str] + + :param key_field: The field name to use as the Kafka message key. + :type key_field: str, optional + + :param librdkafka_options: Optional list of advanced Kafka producer configuration options, defined as key-value pairs. + :type librdkafka_options: [ObservabilityPipelineKafkaLibrdkafkaOption], optional + + :param message_timeout_ms: Maximum time in milliseconds to wait for message delivery confirmation. + :type message_timeout_ms: int, optional + + :param rate_limit_duration_secs: Duration in seconds for the rate limit window. + :type rate_limit_duration_secs: int, optional + + :param rate_limit_num: Maximum number of messages allowed per rate limit duration. + :type rate_limit_num: int, optional + + :param sasl: Specifies the SASL mechanism for authenticating with a Kafka cluster. + :type sasl: ObservabilityPipelineKafkaSasl, optional + + :param socket_timeout_ms: Socket timeout in milliseconds for network requests. + :type socket_timeout_ms: int, optional + + :param tls: Configuration for enabling TLS encryption between the pipeline component and external services. + :type tls: ObservabilityPipelineTls, optional + + :param topic: The Kafka topic name to publish logs to. + :type topic: str + + :param type: The destination type. The value should always be ``kafka``. + :type type: ObservabilityPipelineKafkaDestinationType + """ + if compression is not unset: + kwargs["compression"] = compression + if headers_key is not unset: + kwargs["headers_key"] = headers_key + if key_field is not unset: + kwargs["key_field"] = key_field + if librdkafka_options is not unset: + kwargs["librdkafka_options"] = librdkafka_options + if message_timeout_ms is not unset: + kwargs["message_timeout_ms"] = message_timeout_ms + if rate_limit_duration_secs is not unset: + kwargs["rate_limit_duration_secs"] = rate_limit_duration_secs + if rate_limit_num is not unset: + kwargs["rate_limit_num"] = rate_limit_num + if sasl is not unset: + kwargs["sasl"] = sasl + if socket_timeout_ms is not unset: + kwargs["socket_timeout_ms"] = socket_timeout_ms + if tls is not unset: + kwargs["tls"] = tls + super().__init__(kwargs) + + self_.encoding = encoding + self_.id = id + self_.inputs = inputs + self_.topic = topic + self_.type = type diff --git a/src/datadog_api_client/v2/model/observability_pipeline_kafka_destination_compression.py b/src/datadog_api_client/v2/model/observability_pipeline_kafka_destination_compression.py new file mode 100644 index 0000000000..3cd49d2960 --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_kafka_destination_compression.py @@ -0,0 +1,47 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + + +from datadog_api_client.model_utils import ( + ModelSimple, + cached_property, +) + +from typing import ClassVar + + +class ObservabilityPipelineKafkaDestinationCompression(ModelSimple): + """ + Compression codec for Kafka messages. + + :param value: Must be one of ["none", "gzip", "snappy", "lz4", "zstd"]. + :type value: str + """ + + allowed_values = { + "none", + "gzip", + "snappy", + "lz4", + "zstd", + } + NONE: ClassVar["ObservabilityPipelineKafkaDestinationCompression"] + GZIP: ClassVar["ObservabilityPipelineKafkaDestinationCompression"] + SNAPPY: ClassVar["ObservabilityPipelineKafkaDestinationCompression"] + LZ4: ClassVar["ObservabilityPipelineKafkaDestinationCompression"] + ZSTD: ClassVar["ObservabilityPipelineKafkaDestinationCompression"] + + @cached_property + def openapi_types(_): + return { + "value": (str,), + } + + +ObservabilityPipelineKafkaDestinationCompression.NONE = ObservabilityPipelineKafkaDestinationCompression("none") +ObservabilityPipelineKafkaDestinationCompression.GZIP = ObservabilityPipelineKafkaDestinationCompression("gzip") +ObservabilityPipelineKafkaDestinationCompression.SNAPPY = ObservabilityPipelineKafkaDestinationCompression("snappy") +ObservabilityPipelineKafkaDestinationCompression.LZ4 = ObservabilityPipelineKafkaDestinationCompression("lz4") +ObservabilityPipelineKafkaDestinationCompression.ZSTD = ObservabilityPipelineKafkaDestinationCompression("zstd") diff --git a/src/datadog_api_client/v2/model/observability_pipeline_kafka_destination_encoding.py b/src/datadog_api_client/v2/model/observability_pipeline_kafka_destination_encoding.py new file mode 100644 index 0000000000..99db79c36e --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_kafka_destination_encoding.py @@ -0,0 +1,38 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + + +from datadog_api_client.model_utils import ( + ModelSimple, + cached_property, +) + +from typing import ClassVar + + +class ObservabilityPipelineKafkaDestinationEncoding(ModelSimple): + """ + Encoding format for log events. + + :param value: Must be one of ["json", "raw_message"]. + :type value: str + """ + + allowed_values = { + "json", + "raw_message", + } + JSON: ClassVar["ObservabilityPipelineKafkaDestinationEncoding"] + RAW_MESSAGE: ClassVar["ObservabilityPipelineKafkaDestinationEncoding"] + + @cached_property + def openapi_types(_): + return { + "value": (str,), + } + + +ObservabilityPipelineKafkaDestinationEncoding.JSON = ObservabilityPipelineKafkaDestinationEncoding("json") +ObservabilityPipelineKafkaDestinationEncoding.RAW_MESSAGE = ObservabilityPipelineKafkaDestinationEncoding("raw_message") diff --git a/src/datadog_api_client/v2/model/observability_pipeline_kafka_destination_type.py b/src/datadog_api_client/v2/model/observability_pipeline_kafka_destination_type.py new file mode 100644 index 0000000000..e2e290b169 --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_kafka_destination_type.py @@ -0,0 +1,35 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + + +from datadog_api_client.model_utils import ( + ModelSimple, + cached_property, +) + +from typing import ClassVar + + +class ObservabilityPipelineKafkaDestinationType(ModelSimple): + """ + The destination type. The value should always be `kafka`. + + :param value: If omitted defaults to "kafka". Must be one of ["kafka"]. + :type value: str + """ + + allowed_values = { + "kafka", + } + KAFKA: ClassVar["ObservabilityPipelineKafkaDestinationType"] + + @cached_property + def openapi_types(_): + return { + "value": (str,), + } + + +ObservabilityPipelineKafkaDestinationType.KAFKA = ObservabilityPipelineKafkaDestinationType("kafka") diff --git a/src/datadog_api_client/v2/model/observability_pipeline_kafka_source_librdkafka_option.py b/src/datadog_api_client/v2/model/observability_pipeline_kafka_librdkafka_option.py similarity index 84% rename from src/datadog_api_client/v2/model/observability_pipeline_kafka_source_librdkafka_option.py rename to src/datadog_api_client/v2/model/observability_pipeline_kafka_librdkafka_option.py index 4099a196cd..a7e29aa09b 100644 --- a/src/datadog_api_client/v2/model/observability_pipeline_kafka_source_librdkafka_option.py +++ b/src/datadog_api_client/v2/model/observability_pipeline_kafka_librdkafka_option.py @@ -10,7 +10,7 @@ ) -class ObservabilityPipelineKafkaSourceLibrdkafkaOption(ModelNormal): +class ObservabilityPipelineKafkaLibrdkafkaOption(ModelNormal): @cached_property def openapi_types(_): return { @@ -25,7 +25,7 @@ def openapi_types(_): def __init__(self_, name: str, value: str, **kwargs): """ - Represents a key-value pair used to configure low-level ``librdkafka`` client options for Kafka sources, such as timeouts, buffer sizes, and security settings. + Represents a key-value pair used to configure low-level ``librdkafka`` client options for Kafka source and destination, such as timeouts, buffer sizes, and security settings. :param name: The name of the ``librdkafka`` configuration option to set. :type name: str diff --git a/src/datadog_api_client/v2/model/observability_pipeline_kafka_source_sasl.py b/src/datadog_api_client/v2/model/observability_pipeline_kafka_sasl.py similarity index 59% rename from src/datadog_api_client/v2/model/observability_pipeline_kafka_source_sasl.py rename to src/datadog_api_client/v2/model/observability_pipeline_kafka_sasl.py index 88f6e0aaab..e6d42bc1c8 100644 --- a/src/datadog_api_client/v2/model/observability_pipeline_kafka_source_sasl.py +++ b/src/datadog_api_client/v2/model/observability_pipeline_kafka_sasl.py @@ -14,34 +14,32 @@ if TYPE_CHECKING: - from datadog_api_client.v2.model.observability_pipeline_pipeline_kafka_source_sasl_mechanism import ( - ObservabilityPipelinePipelineKafkaSourceSaslMechanism, + from datadog_api_client.v2.model.observability_pipeline_kafka_sasl_mechanism import ( + ObservabilityPipelineKafkaSaslMechanism, ) -class ObservabilityPipelineKafkaSourceSasl(ModelNormal): +class ObservabilityPipelineKafkaSasl(ModelNormal): @cached_property def openapi_types(_): - from datadog_api_client.v2.model.observability_pipeline_pipeline_kafka_source_sasl_mechanism import ( - ObservabilityPipelinePipelineKafkaSourceSaslMechanism, + from datadog_api_client.v2.model.observability_pipeline_kafka_sasl_mechanism import ( + ObservabilityPipelineKafkaSaslMechanism, ) return { - "mechanism": (ObservabilityPipelinePipelineKafkaSourceSaslMechanism,), + "mechanism": (ObservabilityPipelineKafkaSaslMechanism,), } attribute_map = { "mechanism": "mechanism", } - def __init__( - self_, mechanism: Union[ObservabilityPipelinePipelineKafkaSourceSaslMechanism, UnsetType] = unset, **kwargs - ): + def __init__(self_, mechanism: Union[ObservabilityPipelineKafkaSaslMechanism, UnsetType] = unset, **kwargs): """ Specifies the SASL mechanism for authenticating with a Kafka cluster. :param mechanism: SASL mechanism used for Kafka authentication. - :type mechanism: ObservabilityPipelinePipelineKafkaSourceSaslMechanism, optional + :type mechanism: ObservabilityPipelineKafkaSaslMechanism, optional """ if mechanism is not unset: kwargs["mechanism"] = mechanism diff --git a/src/datadog_api_client/v2/model/observability_pipeline_pipeline_kafka_source_sasl_mechanism.py b/src/datadog_api_client/v2/model/observability_pipeline_kafka_sasl_mechanism.py similarity index 50% rename from src/datadog_api_client/v2/model/observability_pipeline_pipeline_kafka_source_sasl_mechanism.py rename to src/datadog_api_client/v2/model/observability_pipeline_kafka_sasl_mechanism.py index db414b0a6f..2f261fd1aa 100644 --- a/src/datadog_api_client/v2/model/observability_pipeline_pipeline_kafka_source_sasl_mechanism.py +++ b/src/datadog_api_client/v2/model/observability_pipeline_kafka_sasl_mechanism.py @@ -12,7 +12,7 @@ from typing import ClassVar -class ObservabilityPipelinePipelineKafkaSourceSaslMechanism(ModelSimple): +class ObservabilityPipelineKafkaSaslMechanism(ModelSimple): """ SASL mechanism used for Kafka authentication. @@ -25,9 +25,9 @@ class ObservabilityPipelinePipelineKafkaSourceSaslMechanism(ModelSimple): "SCRAM-SHA-256", "SCRAM-SHA-512", } - PLAIN: ClassVar["ObservabilityPipelinePipelineKafkaSourceSaslMechanism"] - SCRAMNOT_SHANOT_256: ClassVar["ObservabilityPipelinePipelineKafkaSourceSaslMechanism"] - SCRAMNOT_SHANOT_512: ClassVar["ObservabilityPipelinePipelineKafkaSourceSaslMechanism"] + PLAIN: ClassVar["ObservabilityPipelineKafkaSaslMechanism"] + SCRAMNOT_SHANOT_256: ClassVar["ObservabilityPipelineKafkaSaslMechanism"] + SCRAMNOT_SHANOT_512: ClassVar["ObservabilityPipelineKafkaSaslMechanism"] @cached_property def openapi_types(_): @@ -36,12 +36,6 @@ def openapi_types(_): } -ObservabilityPipelinePipelineKafkaSourceSaslMechanism.PLAIN = ObservabilityPipelinePipelineKafkaSourceSaslMechanism( - "PLAIN" -) -ObservabilityPipelinePipelineKafkaSourceSaslMechanism.SCRAMNOT_SHANOT_256 = ( - ObservabilityPipelinePipelineKafkaSourceSaslMechanism("SCRAM-SHA-256") -) -ObservabilityPipelinePipelineKafkaSourceSaslMechanism.SCRAMNOT_SHANOT_512 = ( - ObservabilityPipelinePipelineKafkaSourceSaslMechanism("SCRAM-SHA-512") -) +ObservabilityPipelineKafkaSaslMechanism.PLAIN = ObservabilityPipelineKafkaSaslMechanism("PLAIN") +ObservabilityPipelineKafkaSaslMechanism.SCRAMNOT_SHANOT_256 = ObservabilityPipelineKafkaSaslMechanism("SCRAM-SHA-256") +ObservabilityPipelineKafkaSaslMechanism.SCRAMNOT_SHANOT_512 = ObservabilityPipelineKafkaSaslMechanism("SCRAM-SHA-512") diff --git a/src/datadog_api_client/v2/model/observability_pipeline_kafka_source.py b/src/datadog_api_client/v2/model/observability_pipeline_kafka_source.py index 3ba6cfe651..acbc81f60e 100644 --- a/src/datadog_api_client/v2/model/observability_pipeline_kafka_source.py +++ b/src/datadog_api_client/v2/model/observability_pipeline_kafka_source.py @@ -14,12 +14,10 @@ if TYPE_CHECKING: - from datadog_api_client.v2.model.observability_pipeline_kafka_source_librdkafka_option import ( - ObservabilityPipelineKafkaSourceLibrdkafkaOption, - ) - from datadog_api_client.v2.model.observability_pipeline_kafka_source_sasl import ( - ObservabilityPipelineKafkaSourceSasl, + from datadog_api_client.v2.model.observability_pipeline_kafka_librdkafka_option import ( + ObservabilityPipelineKafkaLibrdkafkaOption, ) + from datadog_api_client.v2.model.observability_pipeline_kafka_sasl import ObservabilityPipelineKafkaSasl from datadog_api_client.v2.model.observability_pipeline_tls import ObservabilityPipelineTls from datadog_api_client.v2.model.observability_pipeline_kafka_source_type import ( ObservabilityPipelineKafkaSourceType, @@ -29,12 +27,10 @@ class ObservabilityPipelineKafkaSource(ModelNormal): @cached_property def openapi_types(_): - from datadog_api_client.v2.model.observability_pipeline_kafka_source_librdkafka_option import ( - ObservabilityPipelineKafkaSourceLibrdkafkaOption, - ) - from datadog_api_client.v2.model.observability_pipeline_kafka_source_sasl import ( - ObservabilityPipelineKafkaSourceSasl, + from datadog_api_client.v2.model.observability_pipeline_kafka_librdkafka_option import ( + ObservabilityPipelineKafkaLibrdkafkaOption, ) + from datadog_api_client.v2.model.observability_pipeline_kafka_sasl import ObservabilityPipelineKafkaSasl from datadog_api_client.v2.model.observability_pipeline_tls import ObservabilityPipelineTls from datadog_api_client.v2.model.observability_pipeline_kafka_source_type import ( ObservabilityPipelineKafkaSourceType, @@ -43,8 +39,8 @@ def openapi_types(_): return { "group_id": (str,), "id": (str,), - "librdkafka_options": ([ObservabilityPipelineKafkaSourceLibrdkafkaOption],), - "sasl": (ObservabilityPipelineKafkaSourceSasl,), + "librdkafka_options": ([ObservabilityPipelineKafkaLibrdkafkaOption],), + "sasl": (ObservabilityPipelineKafkaSasl,), "tls": (ObservabilityPipelineTls,), "topics": ([str],), "type": (ObservabilityPipelineKafkaSourceType,), @@ -66,25 +62,27 @@ def __init__( id: str, topics: List[str], type: ObservabilityPipelineKafkaSourceType, - librdkafka_options: Union[List[ObservabilityPipelineKafkaSourceLibrdkafkaOption], UnsetType] = unset, - sasl: Union[ObservabilityPipelineKafkaSourceSasl, UnsetType] = unset, + librdkafka_options: Union[List[ObservabilityPipelineKafkaLibrdkafkaOption], UnsetType] = unset, + sasl: Union[ObservabilityPipelineKafkaSasl, UnsetType] = unset, tls: Union[ObservabilityPipelineTls, UnsetType] = unset, **kwargs, ): """ The ``kafka`` source ingests data from Apache Kafka topics. + **Supported pipeline types:** logs + :param group_id: Consumer group ID used by the Kafka client. :type group_id: str - :param id: The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components). + :param id: The unique identifier for this component. Used in other parts of the pipeline to reference this component (for example, as the ``input`` to downstream components). :type id: str :param librdkafka_options: Optional list of advanced Kafka client configuration options, defined as key-value pairs. - :type librdkafka_options: [ObservabilityPipelineKafkaSourceLibrdkafkaOption], optional + :type librdkafka_options: [ObservabilityPipelineKafkaLibrdkafkaOption], optional :param sasl: Specifies the SASL mechanism for authenticating with a Kafka cluster. - :type sasl: ObservabilityPipelineKafkaSourceSasl, optional + :type sasl: ObservabilityPipelineKafkaSasl, optional :param tls: Configuration for enabling TLS encryption between the pipeline component and external services. :type tls: ObservabilityPipelineTls, optional diff --git a/src/datadog_api_client/v2/model/observability_pipeline_logstash_source.py b/src/datadog_api_client/v2/model/observability_pipeline_logstash_source.py index 1971cae326..b0edbf9ba8 100644 --- a/src/datadog_api_client/v2/model/observability_pipeline_logstash_source.py +++ b/src/datadog_api_client/v2/model/observability_pipeline_logstash_source.py @@ -50,7 +50,9 @@ def __init__( """ The ``logstash`` source ingests logs from a Logstash forwarder. - :param id: The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components). + **Supported pipeline types:** logs + + :param id: The unique identifier for this component. Used in other parts of the pipeline to reference this component (for example, as the ``input`` to downstream components). :type id: str :param tls: Configuration for enabling TLS encryption between the pipeline component and external services. diff --git a/src/datadog_api_client/v2/model/observability_pipeline_metric_tags_processor.py b/src/datadog_api_client/v2/model/observability_pipeline_metric_tags_processor.py new file mode 100644 index 0000000000..fb378619ab --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_metric_tags_processor.py @@ -0,0 +1,101 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + +from typing import List, Union, TYPE_CHECKING + +from datadog_api_client.model_utils import ( + ModelNormal, + cached_property, + unset, + UnsetType, +) + + +if TYPE_CHECKING: + from datadog_api_client.v2.model.observability_pipeline_metric_tags_processor_rule import ( + ObservabilityPipelineMetricTagsProcessorRule, + ) + from datadog_api_client.v2.model.observability_pipeline_metric_tags_processor_type import ( + ObservabilityPipelineMetricTagsProcessorType, + ) + + +class ObservabilityPipelineMetricTagsProcessor(ModelNormal): + validations = { + "rules": { + "max_items": 100, + "min_items": 1, + }, + } + + @cached_property + def openapi_types(_): + from datadog_api_client.v2.model.observability_pipeline_metric_tags_processor_rule import ( + ObservabilityPipelineMetricTagsProcessorRule, + ) + from datadog_api_client.v2.model.observability_pipeline_metric_tags_processor_type import ( + ObservabilityPipelineMetricTagsProcessorType, + ) + + return { + "display_name": (str,), + "enabled": (bool,), + "id": (str,), + "include": (str,), + "rules": ([ObservabilityPipelineMetricTagsProcessorRule],), + "type": (ObservabilityPipelineMetricTagsProcessorType,), + } + + attribute_map = { + "display_name": "display_name", + "enabled": "enabled", + "id": "id", + "include": "include", + "rules": "rules", + "type": "type", + } + + def __init__( + self_, + enabled: bool, + id: str, + include: str, + rules: List[ObservabilityPipelineMetricTagsProcessorRule], + type: ObservabilityPipelineMetricTagsProcessorType, + display_name: Union[str, UnsetType] = unset, + **kwargs, + ): + """ + The ``metric_tags`` processor filters metrics based on their tags using Datadog tag key patterns. + + **Supported pipeline types:** metrics + + :param display_name: The display name for a component. + :type display_name: str, optional + + :param enabled: Indicates whether the processor is enabled. + :type enabled: bool + + :param id: The unique identifier for this component. Used in other parts of the pipeline to reference this component (for example, as the ``input`` to downstream components). + :type id: str + + :param include: A Datadog search query that determines which metrics the processor targets. + :type include: str + + :param rules: A list of rules for filtering metric tags. + :type rules: [ObservabilityPipelineMetricTagsProcessorRule] + + :param type: The processor type. The value should always be ``metric_tags``. + :type type: ObservabilityPipelineMetricTagsProcessorType + """ + if display_name is not unset: + kwargs["display_name"] = display_name + super().__init__(kwargs) + + self_.enabled = enabled + self_.id = id + self_.include = include + self_.rules = rules + self_.type = type diff --git a/src/datadog_api_client/v2/model/observability_pipeline_metric_tags_processor_rule.py b/src/datadog_api_client/v2/model/observability_pipeline_metric_tags_processor_rule.py new file mode 100644 index 0000000000..ebaf9a1b03 --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_metric_tags_processor_rule.py @@ -0,0 +1,75 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + +from typing import List, TYPE_CHECKING + +from datadog_api_client.model_utils import ( + ModelNormal, + cached_property, +) + + +if TYPE_CHECKING: + from datadog_api_client.v2.model.observability_pipeline_metric_tags_processor_rule_action import ( + ObservabilityPipelineMetricTagsProcessorRuleAction, + ) + from datadog_api_client.v2.model.observability_pipeline_metric_tags_processor_rule_mode import ( + ObservabilityPipelineMetricTagsProcessorRuleMode, + ) + + +class ObservabilityPipelineMetricTagsProcessorRule(ModelNormal): + @cached_property + def openapi_types(_): + from datadog_api_client.v2.model.observability_pipeline_metric_tags_processor_rule_action import ( + ObservabilityPipelineMetricTagsProcessorRuleAction, + ) + from datadog_api_client.v2.model.observability_pipeline_metric_tags_processor_rule_mode import ( + ObservabilityPipelineMetricTagsProcessorRuleMode, + ) + + return { + "action": (ObservabilityPipelineMetricTagsProcessorRuleAction,), + "include": (str,), + "keys": ([str],), + "mode": (ObservabilityPipelineMetricTagsProcessorRuleMode,), + } + + attribute_map = { + "action": "action", + "include": "include", + "keys": "keys", + "mode": "mode", + } + + def __init__( + self_, + action: ObservabilityPipelineMetricTagsProcessorRuleAction, + include: str, + keys: List[str], + mode: ObservabilityPipelineMetricTagsProcessorRuleMode, + **kwargs, + ): + """ + Defines a rule for filtering metric tags based on key patterns. + + :param action: The action to take on tags with matching keys. + :type action: ObservabilityPipelineMetricTagsProcessorRuleAction + + :param include: A Datadog search query used to determine which metrics this rule targets. + :type include: str + + :param keys: A list of tag keys to include or exclude. + :type keys: [str] + + :param mode: The processing mode for tag filtering. + :type mode: ObservabilityPipelineMetricTagsProcessorRuleMode + """ + super().__init__(kwargs) + + self_.action = action + self_.include = include + self_.keys = keys + self_.mode = mode diff --git a/src/datadog_api_client/v2/model/observability_pipeline_metric_tags_processor_rule_action.py b/src/datadog_api_client/v2/model/observability_pipeline_metric_tags_processor_rule_action.py new file mode 100644 index 0000000000..2f2e45809a --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_metric_tags_processor_rule_action.py @@ -0,0 +1,42 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + + +from datadog_api_client.model_utils import ( + ModelSimple, + cached_property, +) + +from typing import ClassVar + + +class ObservabilityPipelineMetricTagsProcessorRuleAction(ModelSimple): + """ + The action to take on tags with matching keys. + + :param value: Must be one of ["include", "exclude"]. + :type value: str + """ + + allowed_values = { + "include", + "exclude", + } + INCLUDE: ClassVar["ObservabilityPipelineMetricTagsProcessorRuleAction"] + EXCLUDE: ClassVar["ObservabilityPipelineMetricTagsProcessorRuleAction"] + + @cached_property + def openapi_types(_): + return { + "value": (str,), + } + + +ObservabilityPipelineMetricTagsProcessorRuleAction.INCLUDE = ObservabilityPipelineMetricTagsProcessorRuleAction( + "include" +) +ObservabilityPipelineMetricTagsProcessorRuleAction.EXCLUDE = ObservabilityPipelineMetricTagsProcessorRuleAction( + "exclude" +) diff --git a/src/datadog_api_client/v2/model/observability_pipeline_metric_tags_processor_rule_mode.py b/src/datadog_api_client/v2/model/observability_pipeline_metric_tags_processor_rule_mode.py new file mode 100644 index 0000000000..6add98b0c0 --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_metric_tags_processor_rule_mode.py @@ -0,0 +1,35 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + + +from datadog_api_client.model_utils import ( + ModelSimple, + cached_property, +) + +from typing import ClassVar + + +class ObservabilityPipelineMetricTagsProcessorRuleMode(ModelSimple): + """ + The processing mode for tag filtering. + + :param value: If omitted defaults to "filter". Must be one of ["filter"]. + :type value: str + """ + + allowed_values = { + "filter", + } + FILTER: ClassVar["ObservabilityPipelineMetricTagsProcessorRuleMode"] + + @cached_property + def openapi_types(_): + return { + "value": (str,), + } + + +ObservabilityPipelineMetricTagsProcessorRuleMode.FILTER = ObservabilityPipelineMetricTagsProcessorRuleMode("filter") diff --git a/src/datadog_api_client/v2/model/observability_pipeline_metric_tags_processor_type.py b/src/datadog_api_client/v2/model/observability_pipeline_metric_tags_processor_type.py new file mode 100644 index 0000000000..ad85ed7e23 --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_metric_tags_processor_type.py @@ -0,0 +1,35 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + + +from datadog_api_client.model_utils import ( + ModelSimple, + cached_property, +) + +from typing import ClassVar + + +class ObservabilityPipelineMetricTagsProcessorType(ModelSimple): + """ + The processor type. The value should always be `metric_tags`. + + :param value: If omitted defaults to "metric_tags". Must be one of ["metric_tags"]. + :type value: str + """ + + allowed_values = { + "metric_tags", + } + METRIC_TAGS: ClassVar["ObservabilityPipelineMetricTagsProcessorType"] + + @cached_property + def openapi_types(_): + return { + "value": (str,), + } + + +ObservabilityPipelineMetricTagsProcessorType.METRIC_TAGS = ObservabilityPipelineMetricTagsProcessorType("metric_tags") diff --git a/src/datadog_api_client/v2/model/observability_pipeline_new_relic_destination.py b/src/datadog_api_client/v2/model/observability_pipeline_new_relic_destination.py index ec0cb39481..e8cf62c104 100644 --- a/src/datadog_api_client/v2/model/observability_pipeline_new_relic_destination.py +++ b/src/datadog_api_client/v2/model/observability_pipeline_new_relic_destination.py @@ -55,6 +55,8 @@ def __init__( """ The ``new_relic`` destination sends logs to the New Relic platform. + **Supported pipeline types:** logs + :param id: The unique identifier for this component. :type id: str diff --git a/src/datadog_api_client/v2/model/observability_pipeline_ocsf_mapper_processor.py b/src/datadog_api_client/v2/model/observability_pipeline_ocsf_mapper_processor.py index 45ce411dc2..e307087fc2 100644 --- a/src/datadog_api_client/v2/model/observability_pipeline_ocsf_mapper_processor.py +++ b/src/datadog_api_client/v2/model/observability_pipeline_ocsf_mapper_processor.py @@ -63,10 +63,12 @@ def __init__( """ The ``ocsf_mapper`` processor transforms logs into the OCSF schema using a predefined mapping configuration. + **Supported pipeline types:** logs + :param display_name: The display name for a component. :type display_name: str, optional - :param enabled: Whether this processor is enabled. + :param enabled: Indicates whether the processor is enabled. :type enabled: bool :param id: The unique identifier for this component. Used to reference this component in other parts of the pipeline. diff --git a/src/datadog_api_client/v2/model/observability_pipeline_open_search_destination.py b/src/datadog_api_client/v2/model/observability_pipeline_open_search_destination.py index d817a4b558..d2753ebdb7 100644 --- a/src/datadog_api_client/v2/model/observability_pipeline_open_search_destination.py +++ b/src/datadog_api_client/v2/model/observability_pipeline_open_search_destination.py @@ -51,6 +51,8 @@ def __init__( """ The ``opensearch`` destination writes logs to an OpenSearch cluster. + **Supported pipeline types:** logs + :param bulk_index: The index to write logs to. :type bulk_index: str, optional diff --git a/src/datadog_api_client/v2/model/observability_pipeline_opentelemetry_source.py b/src/datadog_api_client/v2/model/observability_pipeline_opentelemetry_source.py new file mode 100644 index 0000000000..ac6090dcd9 --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_opentelemetry_source.py @@ -0,0 +1,85 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + +from typing import Union, TYPE_CHECKING + +from datadog_api_client.model_utils import ( + ModelNormal, + cached_property, + unset, + UnsetType, +) + + +if TYPE_CHECKING: + from datadog_api_client.v2.model.observability_pipeline_tls import ObservabilityPipelineTls + from datadog_api_client.v2.model.observability_pipeline_opentelemetry_source_type import ( + ObservabilityPipelineOpentelemetrySourceType, + ) + + +class ObservabilityPipelineOpentelemetrySource(ModelNormal): + @cached_property + def openapi_types(_): + from datadog_api_client.v2.model.observability_pipeline_tls import ObservabilityPipelineTls + from datadog_api_client.v2.model.observability_pipeline_opentelemetry_source_type import ( + ObservabilityPipelineOpentelemetrySourceType, + ) + + return { + "grpc_address_key": (str,), + "http_address_key": (str,), + "id": (str,), + "tls": (ObservabilityPipelineTls,), + "type": (ObservabilityPipelineOpentelemetrySourceType,), + } + + attribute_map = { + "grpc_address_key": "grpc_address_key", + "http_address_key": "http_address_key", + "id": "id", + "tls": "tls", + "type": "type", + } + + def __init__( + self_, + id: str, + type: ObservabilityPipelineOpentelemetrySourceType, + grpc_address_key: Union[str, UnsetType] = unset, + http_address_key: Union[str, UnsetType] = unset, + tls: Union[ObservabilityPipelineTls, UnsetType] = unset, + **kwargs, + ): + """ + The ``opentelemetry`` source receives telemetry data using the OpenTelemetry Protocol (OTLP) over gRPC and HTTP. + + **Supported pipeline types:** logs + + :param grpc_address_key: Environment variable name containing the gRPC server address for receiving OTLP data. Must be a valid environment variable name (alphanumeric characters and underscores only). + :type grpc_address_key: str, optional + + :param http_address_key: Environment variable name containing the HTTP server address for receiving OTLP data. Must be a valid environment variable name (alphanumeric characters and underscores only). + :type http_address_key: str, optional + + :param id: The unique identifier for this component. Used in other parts of the pipeline to reference this component (for example, as the ``input`` to downstream components). + :type id: str + + :param tls: Configuration for enabling TLS encryption between the pipeline component and external services. + :type tls: ObservabilityPipelineTls, optional + + :param type: The source type. The value should always be ``opentelemetry``. + :type type: ObservabilityPipelineOpentelemetrySourceType + """ + if grpc_address_key is not unset: + kwargs["grpc_address_key"] = grpc_address_key + if http_address_key is not unset: + kwargs["http_address_key"] = http_address_key + if tls is not unset: + kwargs["tls"] = tls + super().__init__(kwargs) + + self_.id = id + self_.type = type diff --git a/src/datadog_api_client/v2/model/observability_pipeline_opentelemetry_source_type.py b/src/datadog_api_client/v2/model/observability_pipeline_opentelemetry_source_type.py new file mode 100644 index 0000000000..ec476d0dba --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_opentelemetry_source_type.py @@ -0,0 +1,37 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + + +from datadog_api_client.model_utils import ( + ModelSimple, + cached_property, +) + +from typing import ClassVar + + +class ObservabilityPipelineOpentelemetrySourceType(ModelSimple): + """ + The source type. The value should always be `opentelemetry`. + + :param value: If omitted defaults to "opentelemetry". Must be one of ["opentelemetry"]. + :type value: str + """ + + allowed_values = { + "opentelemetry", + } + OPENTELEMETRY: ClassVar["ObservabilityPipelineOpentelemetrySourceType"] + + @cached_property + def openapi_types(_): + return { + "value": (str,), + } + + +ObservabilityPipelineOpentelemetrySourceType.OPENTELEMETRY = ObservabilityPipelineOpentelemetrySourceType( + "opentelemetry" +) diff --git a/src/datadog_api_client/v2/model/observability_pipeline_parse_grok_processor.py b/src/datadog_api_client/v2/model/observability_pipeline_parse_grok_processor.py index 32e6da60d0..d92f1ea6f8 100644 --- a/src/datadog_api_client/v2/model/observability_pipeline_parse_grok_processor.py +++ b/src/datadog_api_client/v2/model/observability_pipeline_parse_grok_processor.py @@ -66,13 +66,15 @@ def __init__( """ The ``parse_grok`` processor extracts structured fields from unstructured log messages using Grok patterns. + **Supported pipeline types:** logs + :param disable_library_rules: If set to ``true`` , disables the default Grok rules provided by Datadog. :type disable_library_rules: bool, optional :param display_name: The display name for a component. :type display_name: str, optional - :param enabled: Whether this processor is enabled. + :param enabled: Indicates whether the processor is enabled. :type enabled: bool :param id: A unique identifier for this processor. diff --git a/src/datadog_api_client/v2/model/observability_pipeline_parse_json_processor.py b/src/datadog_api_client/v2/model/observability_pipeline_parse_json_processor.py index 134700560c..40ece0e6ca 100644 --- a/src/datadog_api_client/v2/model/observability_pipeline_parse_json_processor.py +++ b/src/datadog_api_client/v2/model/observability_pipeline_parse_json_processor.py @@ -57,10 +57,12 @@ def __init__( """ The ``parse_json`` processor extracts JSON from a specified field and flattens it into the event. This is useful when logs contain embedded JSON as a string. + **Supported pipeline types:** logs + :param display_name: The display name for a component. :type display_name: str, optional - :param enabled: Whether this processor is enabled. + :param enabled: Indicates whether the processor is enabled. :type enabled: bool :param field: The name of the log field that contains a JSON string. diff --git a/src/datadog_api_client/v2/model/observability_pipeline_parse_xml_processor.py b/src/datadog_api_client/v2/model/observability_pipeline_parse_xml_processor.py new file mode 100644 index 0000000000..d9595e146a --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_parse_xml_processor.py @@ -0,0 +1,150 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + +from typing import Union, TYPE_CHECKING + +from datadog_api_client.model_utils import ( + ModelNormal, + cached_property, + unset, + UnsetType, +) + + +if TYPE_CHECKING: + from datadog_api_client.v2.model.observability_pipeline_parse_xml_processor_type import ( + ObservabilityPipelineParseXMLProcessorType, + ) + + +class ObservabilityPipelineParseXMLProcessor(ModelNormal): + validations = { + "text_key": { + "min_length": 1, + }, + } + + @cached_property + def openapi_types(_): + from datadog_api_client.v2.model.observability_pipeline_parse_xml_processor_type import ( + ObservabilityPipelineParseXMLProcessorType, + ) + + return { + "always_use_text_key": (bool,), + "attr_prefix": (str,), + "display_name": (str,), + "enabled": (bool,), + "field": (str,), + "id": (str,), + "include": (str,), + "include_attr": (bool,), + "parse_bool": (bool,), + "parse_null": (bool,), + "parse_number": (bool,), + "text_key": (str,), + "type": (ObservabilityPipelineParseXMLProcessorType,), + } + + attribute_map = { + "always_use_text_key": "always_use_text_key", + "attr_prefix": "attr_prefix", + "display_name": "display_name", + "enabled": "enabled", + "field": "field", + "id": "id", + "include": "include", + "include_attr": "include_attr", + "parse_bool": "parse_bool", + "parse_null": "parse_null", + "parse_number": "parse_number", + "text_key": "text_key", + "type": "type", + } + + def __init__( + self_, + enabled: bool, + field: str, + id: str, + include: str, + type: ObservabilityPipelineParseXMLProcessorType, + always_use_text_key: Union[bool, UnsetType] = unset, + attr_prefix: Union[str, UnsetType] = unset, + display_name: Union[str, UnsetType] = unset, + include_attr: Union[bool, UnsetType] = unset, + parse_bool: Union[bool, UnsetType] = unset, + parse_null: Union[bool, UnsetType] = unset, + parse_number: Union[bool, UnsetType] = unset, + text_key: Union[str, UnsetType] = unset, + **kwargs, + ): + """ + The ``parse_xml`` processor parses XML from a specified field and extracts it into the event. + + **Supported pipeline types:** logs + + :param always_use_text_key: Whether to always use a text key for element content. + :type always_use_text_key: bool, optional + + :param attr_prefix: The prefix to use for XML attributes in the parsed output. + :type attr_prefix: str, optional + + :param display_name: The display name for a component. + :type display_name: str, optional + + :param enabled: Indicates whether the processor is enabled. + :type enabled: bool + + :param field: The name of the log field that contains an XML string. + :type field: str + + :param id: The unique identifier for this component. Used in other parts of the pipeline to reference this component (for example, as the ``input`` to downstream components). + :type id: str + + :param include: A Datadog search query used to determine which logs this processor targets. + :type include: str + + :param include_attr: Whether to include XML attributes in the parsed output. + :type include_attr: bool, optional + + :param parse_bool: Whether to parse boolean values from strings. + :type parse_bool: bool, optional + + :param parse_null: Whether to parse null values. + :type parse_null: bool, optional + + :param parse_number: Whether to parse numeric values from strings. + :type parse_number: bool, optional + + :param text_key: The key name to use for text content within XML elements. Must be at least 1 character if specified. + :type text_key: str, optional + + :param type: The processor type. The value should always be ``parse_xml``. + :type type: ObservabilityPipelineParseXMLProcessorType + """ + if always_use_text_key is not unset: + kwargs["always_use_text_key"] = always_use_text_key + if attr_prefix is not unset: + kwargs["attr_prefix"] = attr_prefix + if display_name is not unset: + kwargs["display_name"] = display_name + if include_attr is not unset: + kwargs["include_attr"] = include_attr + if parse_bool is not unset: + kwargs["parse_bool"] = parse_bool + if parse_null is not unset: + kwargs["parse_null"] = parse_null + if parse_number is not unset: + kwargs["parse_number"] = parse_number + if text_key is not unset: + kwargs["text_key"] = text_key + super().__init__(kwargs) + + self_.enabled = enabled + self_.field = field + self_.id = id + self_.include = include + self_.type = type diff --git a/src/datadog_api_client/v2/model/observability_pipeline_parse_xml_processor_type.py b/src/datadog_api_client/v2/model/observability_pipeline_parse_xml_processor_type.py new file mode 100644 index 0000000000..5e8f0a8285 --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_parse_xml_processor_type.py @@ -0,0 +1,35 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + + +from datadog_api_client.model_utils import ( + ModelSimple, + cached_property, +) + +from typing import ClassVar + + +class ObservabilityPipelineParseXMLProcessorType(ModelSimple): + """ + The processor type. The value should always be `parse_xml`. + + :param value: If omitted defaults to "parse_xml". Must be one of ["parse_xml"]. + :type value: str + """ + + allowed_values = { + "parse_xml", + } + PARSE_XML: ClassVar["ObservabilityPipelineParseXMLProcessorType"] + + @cached_property + def openapi_types(_): + return { + "value": (str,), + } + + +ObservabilityPipelineParseXMLProcessorType.PARSE_XML = ObservabilityPipelineParseXMLProcessorType("parse_xml") diff --git a/src/datadog_api_client/v2/model/observability_pipeline_quota_processor.py b/src/datadog_api_client/v2/model/observability_pipeline_quota_processor.py index a122a03915..329a051e31 100644 --- a/src/datadog_api_client/v2/model/observability_pipeline_quota_processor.py +++ b/src/datadog_api_client/v2/model/observability_pipeline_quota_processor.py @@ -56,6 +56,7 @@ def openapi_types(_): "overflow_action": (ObservabilityPipelineQuotaProcessorOverflowAction,), "overrides": ([ObservabilityPipelineQuotaProcessorOverride],), "partition_fields": ([str],), + "too_many_buckets_action": (ObservabilityPipelineQuotaProcessorOverflowAction,), "type": (ObservabilityPipelineQuotaProcessorType,), } @@ -71,6 +72,7 @@ def openapi_types(_): "overflow_action": "overflow_action", "overrides": "overrides", "partition_fields": "partition_fields", + "too_many_buckets_action": "too_many_buckets_action", "type": "type", } @@ -88,21 +90,24 @@ def __init__( overflow_action: Union[ObservabilityPipelineQuotaProcessorOverflowAction, UnsetType] = unset, overrides: Union[List[ObservabilityPipelineQuotaProcessorOverride], UnsetType] = unset, partition_fields: Union[List[str], UnsetType] = unset, + too_many_buckets_action: Union[ObservabilityPipelineQuotaProcessorOverflowAction, UnsetType] = unset, **kwargs, ): """ - The Quota Processor measures logging traffic for logs that match a specified filter. When the configured daily quota is met, the processor can drop or alert. + The ``quota`` processor measures logging traffic for logs that match a specified filter. When the configured daily quota is met, the processor can drop or alert. + + **Supported pipeline types:** logs :param display_name: The display name for a component. :type display_name: str, optional - :param drop_events: If set to ``true`` , logs that matched the quota filter and sent after the quota has been met are dropped; only logs that did not match the filter query continue through the pipeline. + :param drop_events: If set to ``true`` , logs that match the quota filter and are sent after the quota is exceeded are dropped. Logs that do not match the filter continue through the pipeline. **Note** : You can set either ``drop_events`` or ``overflow_action`` , but not both. :type drop_events: bool, optional - :param enabled: Whether this processor is enabled. + :param enabled: Indicates whether the processor is enabled. :type enabled: bool - :param id: The unique identifier for this component. Used to reference this component in other parts of the pipeline (for example, as the ``input`` to downstream components). + :param id: The unique identifier for this component. Used in other parts of the pipeline to reference this component (for example, as the ``input`` to downstream components). :type id: str :param ignore_when_missing_partitions: If ``true`` , the processor skips quota checks when partition fields are missing from the logs. @@ -117,7 +122,7 @@ def __init__( :param name: Name of the quota. :type name: str - :param overflow_action: The action to take when the quota is exceeded. Options: + :param overflow_action: The action to take when the quota or bucket limit is exceeded. Options: * ``drop`` : Drop the event. * ``no_action`` : Let the event pass through. @@ -130,6 +135,13 @@ def __init__( :param partition_fields: A list of fields used to segment log traffic for quota enforcement. Quotas are tracked independently by unique combinations of these field values. :type partition_fields: [str], optional + :param too_many_buckets_action: The action to take when the quota or bucket limit is exceeded. Options: + + * ``drop`` : Drop the event. + * ``no_action`` : Let the event pass through. + * ``overflow_routing`` : Route to an overflow destination. + :type too_many_buckets_action: ObservabilityPipelineQuotaProcessorOverflowAction, optional + :param type: The processor type. The value should always be ``quota``. :type type: ObservabilityPipelineQuotaProcessorType """ @@ -145,6 +157,8 @@ def __init__( kwargs["overrides"] = overrides if partition_fields is not unset: kwargs["partition_fields"] = partition_fields + if too_many_buckets_action is not unset: + kwargs["too_many_buckets_action"] = too_many_buckets_action super().__init__(kwargs) self_.enabled = enabled diff --git a/src/datadog_api_client/v2/model/observability_pipeline_quota_processor_overflow_action.py b/src/datadog_api_client/v2/model/observability_pipeline_quota_processor_overflow_action.py index 1341d5654d..3bf3e7727d 100644 --- a/src/datadog_api_client/v2/model/observability_pipeline_quota_processor_overflow_action.py +++ b/src/datadog_api_client/v2/model/observability_pipeline_quota_processor_overflow_action.py @@ -14,7 +14,7 @@ class ObservabilityPipelineQuotaProcessorOverflowAction(ModelSimple): """ - The action to take when the quota is exceeded. Options: + The action to take when the quota or bucket limit is exceeded. Options: - `drop`: Drop the event. - `no_action`: Let the event pass through. - `overflow_routing`: Route to an overflow destination. diff --git a/src/datadog_api_client/v2/model/observability_pipeline_reduce_processor.py b/src/datadog_api_client/v2/model/observability_pipeline_reduce_processor.py index 86cf0c092a..551ef575f1 100644 --- a/src/datadog_api_client/v2/model/observability_pipeline_reduce_processor.py +++ b/src/datadog_api_client/v2/model/observability_pipeline_reduce_processor.py @@ -66,10 +66,12 @@ def __init__( """ The ``reduce`` processor aggregates and merges logs based on matching keys and merge strategies. + **Supported pipeline types:** logs + :param display_name: The display name for a component. :type display_name: str, optional - :param enabled: Whether this processor is enabled. + :param enabled: Indicates whether the processor is enabled. :type enabled: bool :param group_by: A list of fields used to group log events for merging. diff --git a/src/datadog_api_client/v2/model/observability_pipeline_remove_fields_processor.py b/src/datadog_api_client/v2/model/observability_pipeline_remove_fields_processor.py index d439328fca..584e068c8e 100644 --- a/src/datadog_api_client/v2/model/observability_pipeline_remove_fields_processor.py +++ b/src/datadog_api_client/v2/model/observability_pipeline_remove_fields_processor.py @@ -57,16 +57,18 @@ def __init__( """ The ``remove_fields`` processor deletes specified fields from logs. + **Supported pipeline types:** logs + :param display_name: The display name for a component. :type display_name: str, optional - :param enabled: Whether this processor is enabled. + :param enabled: Indicates whether the processor is enabled. :type enabled: bool :param fields: A list of field names to be removed from each log event. :type fields: [str] - :param id: The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components). + :param id: The unique identifier for this component. Used in other parts of the pipeline to reference this component (for example, as the ``input`` to downstream components). :type id: str :param include: A Datadog search query used to determine which logs this processor targets. diff --git a/src/datadog_api_client/v2/model/observability_pipeline_rename_fields_processor.py b/src/datadog_api_client/v2/model/observability_pipeline_rename_fields_processor.py index c32d90a194..57fa79be1c 100644 --- a/src/datadog_api_client/v2/model/observability_pipeline_rename_fields_processor.py +++ b/src/datadog_api_client/v2/model/observability_pipeline_rename_fields_processor.py @@ -63,10 +63,12 @@ def __init__( """ The ``rename_fields`` processor changes field names. + **Supported pipeline types:** logs + :param display_name: The display name for a component. :type display_name: str, optional - :param enabled: Whether this processor is enabled. + :param enabled: Indicates whether the processor is enabled. :type enabled: bool :param fields: A list of rename rules specifying which fields to rename in the event, what to rename them to, and whether to preserve the original fields. diff --git a/src/datadog_api_client/v2/model/observability_pipeline_rsyslog_destination.py b/src/datadog_api_client/v2/model/observability_pipeline_rsyslog_destination.py index 486442d4aa..ead5f47857 100644 --- a/src/datadog_api_client/v2/model/observability_pipeline_rsyslog_destination.py +++ b/src/datadog_api_client/v2/model/observability_pipeline_rsyslog_destination.py @@ -62,6 +62,8 @@ def __init__( """ The ``rsyslog`` destination forwards logs to an external ``rsyslog`` server over TCP or UDP using the syslog protocol. + **Supported pipeline types:** logs + :param id: The unique identifier for this component. :type id: str diff --git a/src/datadog_api_client/v2/model/observability_pipeline_rsyslog_source.py b/src/datadog_api_client/v2/model/observability_pipeline_rsyslog_source.py index 90c9a6dc6b..7a490c05ea 100644 --- a/src/datadog_api_client/v2/model/observability_pipeline_rsyslog_source.py +++ b/src/datadog_api_client/v2/model/observability_pipeline_rsyslog_source.py @@ -59,7 +59,9 @@ def __init__( """ The ``rsyslog`` source listens for logs over TCP or UDP from an ``rsyslog`` server using the syslog protocol. - :param id: The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components). + **Supported pipeline types:** logs + + :param id: The unique identifier for this component. Used in other parts of the pipeline to reference this component (for example, as the ``input`` to downstream components). :type id: str :param mode: Protocol used by the syslog source to receive messages. diff --git a/src/datadog_api_client/v2/model/observability_pipeline_sample_processor.py b/src/datadog_api_client/v2/model/observability_pipeline_sample_processor.py index b301a81a73..b5615da170 100644 --- a/src/datadog_api_client/v2/model/observability_pipeline_sample_processor.py +++ b/src/datadog_api_client/v2/model/observability_pipeline_sample_processor.py @@ -3,7 +3,7 @@ # Copyright 2019-Present Datadog, Inc. from __future__ import annotations -from typing import Union, TYPE_CHECKING +from typing import List, Union, TYPE_CHECKING from datadog_api_client.model_utils import ( ModelNormal, @@ -21,8 +21,8 @@ class ObservabilityPipelineSampleProcessor(ModelNormal): validations = { - "rate": { - "inclusive_minimum": 1, + "group_by": { + "min_items": 1, }, } @@ -35,20 +35,20 @@ def openapi_types(_): return { "display_name": (str,), "enabled": (bool,), + "group_by": ([str],), "id": (str,), "include": (str,), "percentage": (float,), - "rate": (int,), "type": (ObservabilityPipelineSampleProcessorType,), } attribute_map = { "display_name": "display_name", "enabled": "enabled", + "group_by": "group_by", "id": "id", "include": "include", "percentage": "percentage", - "rate": "rate", "type": "type", } @@ -57,45 +57,46 @@ def __init__( enabled: bool, id: str, include: str, + percentage: float, type: ObservabilityPipelineSampleProcessorType, display_name: Union[str, UnsetType] = unset, - percentage: Union[float, UnsetType] = unset, - rate: Union[int, UnsetType] = unset, + group_by: Union[List[str], UnsetType] = unset, **kwargs, ): """ The ``sample`` processor allows probabilistic sampling of logs at a fixed rate. + **Supported pipeline types:** logs + :param display_name: The display name for a component. :type display_name: str, optional - :param enabled: Whether this processor is enabled. + :param enabled: Indicates whether the processor is enabled. :type enabled: bool - :param id: The unique identifier for this component. Used to reference this component in other parts of the pipeline (for example, as the ``input`` to downstream components). + :param group_by: Optional list of fields to group events by. Each group is sampled independently. + :type group_by: [str], optional + + :param id: The unique identifier for this component. Used in other parts of the pipeline to reference this component (for example, as the ``input`` to downstream components). :type id: str :param include: A Datadog search query used to determine which logs this processor targets. :type include: str :param percentage: The percentage of logs to sample. - :type percentage: float, optional - - :param rate: Number of events to sample (1 in N). - :type rate: int, optional + :type percentage: float :param type: The processor type. The value should always be ``sample``. :type type: ObservabilityPipelineSampleProcessorType """ if display_name is not unset: kwargs["display_name"] = display_name - if percentage is not unset: - kwargs["percentage"] = percentage - if rate is not unset: - kwargs["rate"] = rate + if group_by is not unset: + kwargs["group_by"] = group_by super().__init__(kwargs) self_.enabled = enabled self_.id = id self_.include = include + self_.percentage = percentage self_.type = type diff --git a/src/datadog_api_client/v2/model/observability_pipeline_sensitive_data_scanner_processor.py b/src/datadog_api_client/v2/model/observability_pipeline_sensitive_data_scanner_processor.py index 750d0619d3..1f747edb96 100644 --- a/src/datadog_api_client/v2/model/observability_pipeline_sensitive_data_scanner_processor.py +++ b/src/datadog_api_client/v2/model/observability_pipeline_sensitive_data_scanner_processor.py @@ -63,13 +63,15 @@ def __init__( """ The ``sensitive_data_scanner`` processor detects and optionally redacts sensitive data in log events. + **Supported pipeline types:** logs + :param display_name: The display name for a component. :type display_name: str, optional - :param enabled: Whether this processor is enabled. + :param enabled: Indicates whether the processor is enabled. :type enabled: bool - :param id: The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components). + :param id: The unique identifier for this component. Used in other parts of the pipeline to reference this component (for example, as the ``input`` to downstream components). :type id: str :param include: A Datadog search query used to determine which logs this processor targets. diff --git a/src/datadog_api_client/v2/model/observability_pipeline_sensitive_data_scanner_processor_custom_pattern_options.py b/src/datadog_api_client/v2/model/observability_pipeline_sensitive_data_scanner_processor_custom_pattern_options.py index da99100de6..6b7a504c70 100644 --- a/src/datadog_api_client/v2/model/observability_pipeline_sensitive_data_scanner_processor_custom_pattern_options.py +++ b/src/datadog_api_client/v2/model/observability_pipeline_sensitive_data_scanner_processor_custom_pattern_options.py @@ -3,10 +3,13 @@ # Copyright 2019-Present Datadog, Inc. from __future__ import annotations +from typing import Union from datadog_api_client.model_utils import ( ModelNormal, cached_property, + unset, + UnsetType, ) @@ -14,20 +17,27 @@ class ObservabilityPipelineSensitiveDataScannerProcessorCustomPatternOptions(Mod @cached_property def openapi_types(_): return { + "description": (str,), "rule": (str,), } attribute_map = { + "description": "description", "rule": "rule", } - def __init__(self_, rule: str, **kwargs): + def __init__(self_, rule: str, description: Union[str, UnsetType] = unset, **kwargs): """ Options for defining a custom regex pattern. + :param description: Human-readable description providing context about a sensitive data scanner rule + :type description: str, optional + :param rule: A regular expression used to detect sensitive values. Must be a valid regex. :type rule: str """ + if description is not unset: + kwargs["description"] = description super().__init__(kwargs) self_.rule = rule diff --git a/src/datadog_api_client/v2/model/observability_pipeline_sensitive_data_scanner_processor_library_pattern_options.py b/src/datadog_api_client/v2/model/observability_pipeline_sensitive_data_scanner_processor_library_pattern_options.py index 1389dec5a1..91ac818cdf 100644 --- a/src/datadog_api_client/v2/model/observability_pipeline_sensitive_data_scanner_processor_library_pattern_options.py +++ b/src/datadog_api_client/v2/model/observability_pipeline_sensitive_data_scanner_processor_library_pattern_options.py @@ -17,25 +17,38 @@ class ObservabilityPipelineSensitiveDataScannerProcessorLibraryPatternOptions(Mo @cached_property def openapi_types(_): return { + "description": (str,), "id": (str,), "use_recommended_keywords": (bool,), } attribute_map = { + "description": "description", "id": "id", "use_recommended_keywords": "use_recommended_keywords", } - def __init__(self_, id: str, use_recommended_keywords: Union[bool, UnsetType] = unset, **kwargs): + def __init__( + self_, + id: str, + description: Union[str, UnsetType] = unset, + use_recommended_keywords: Union[bool, UnsetType] = unset, + **kwargs, + ): """ Options for selecting a predefined library pattern and enabling keyword support. + :param description: Human-readable description providing context about a sensitive data scanner rule + :type description: str, optional + :param id: Identifier for a predefined pattern from the sensitive data scanner pattern library. :type id: str :param use_recommended_keywords: Whether to augment the pattern with recommended keywords (optional). :type use_recommended_keywords: bool, optional """ + if description is not unset: + kwargs["description"] = description if use_recommended_keywords is not unset: kwargs["use_recommended_keywords"] = use_recommended_keywords super().__init__(kwargs) diff --git a/src/datadog_api_client/v2/model/observability_pipeline_sentinel_one_destination.py b/src/datadog_api_client/v2/model/observability_pipeline_sentinel_one_destination.py index 924aa9b0f5..85fca46e4d 100644 --- a/src/datadog_api_client/v2/model/observability_pipeline_sentinel_one_destination.py +++ b/src/datadog_api_client/v2/model/observability_pipeline_sentinel_one_destination.py @@ -55,6 +55,8 @@ def __init__( """ The ``sentinel_one`` destination sends logs to SentinelOne. + **Supported pipeline types:** logs + :param id: The unique identifier for this component. :type id: str diff --git a/src/datadog_api_client/v2/model/observability_pipeline_socket_destination.py b/src/datadog_api_client/v2/model/observability_pipeline_socket_destination.py index 8ff0d447f2..c631f27cc2 100644 --- a/src/datadog_api_client/v2/model/observability_pipeline_socket_destination.py +++ b/src/datadog_api_client/v2/model/observability_pipeline_socket_destination.py @@ -94,6 +94,8 @@ def __init__( """ The ``socket`` destination sends logs over TCP or UDP to a remote server. + **Supported pipeline types:** logs + :param encoding: Encoding format for log events. :type encoding: ObservabilityPipelineSocketDestinationEncoding diff --git a/src/datadog_api_client/v2/model/observability_pipeline_socket_source.py b/src/datadog_api_client/v2/model/observability_pipeline_socket_source.py index 23d1794735..cc981a064b 100644 --- a/src/datadog_api_client/v2/model/observability_pipeline_socket_source.py +++ b/src/datadog_api_client/v2/model/observability_pipeline_socket_source.py @@ -90,10 +90,12 @@ def __init__( """ The ``socket`` source ingests logs over TCP or UDP. + **Supported pipeline types:** logs + :param framing: Framing method configuration for the socket source. :type framing: ObservabilityPipelineSocketSourceFraming - :param id: The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components). + :param id: The unique identifier for this component. Used in other parts of the pipeline to reference this component (for example, as the ``input`` to downstream components). :type id: str :param mode: Protocol used to receive logs. diff --git a/src/datadog_api_client/v2/model/observability_pipeline_split_array_processor.py b/src/datadog_api_client/v2/model/observability_pipeline_split_array_processor.py new file mode 100644 index 0000000000..1c42c1f58f --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_split_array_processor.py @@ -0,0 +1,101 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + +from typing import List, Union, TYPE_CHECKING + +from datadog_api_client.model_utils import ( + ModelNormal, + cached_property, + unset, + UnsetType, +) + + +if TYPE_CHECKING: + from datadog_api_client.v2.model.observability_pipeline_split_array_processor_array_config import ( + ObservabilityPipelineSplitArrayProcessorArrayConfig, + ) + from datadog_api_client.v2.model.observability_pipeline_split_array_processor_type import ( + ObservabilityPipelineSplitArrayProcessorType, + ) + + +class ObservabilityPipelineSplitArrayProcessor(ModelNormal): + validations = { + "arrays": { + "max_items": 15, + "min_items": 1, + }, + } + + @cached_property + def openapi_types(_): + from datadog_api_client.v2.model.observability_pipeline_split_array_processor_array_config import ( + ObservabilityPipelineSplitArrayProcessorArrayConfig, + ) + from datadog_api_client.v2.model.observability_pipeline_split_array_processor_type import ( + ObservabilityPipelineSplitArrayProcessorType, + ) + + return { + "arrays": ([ObservabilityPipelineSplitArrayProcessorArrayConfig],), + "display_name": (str,), + "enabled": (bool,), + "id": (str,), + "include": (str,), + "type": (ObservabilityPipelineSplitArrayProcessorType,), + } + + attribute_map = { + "arrays": "arrays", + "display_name": "display_name", + "enabled": "enabled", + "id": "id", + "include": "include", + "type": "type", + } + + def __init__( + self_, + arrays: List[ObservabilityPipelineSplitArrayProcessorArrayConfig], + enabled: bool, + id: str, + include: str, + type: ObservabilityPipelineSplitArrayProcessorType, + display_name: Union[str, UnsetType] = unset, + **kwargs, + ): + """ + The ``split_array`` processor splits array fields into separate events based on configured rules. + + **Supported pipeline types:** logs + + :param arrays: A list of array split configurations. + :type arrays: [ObservabilityPipelineSplitArrayProcessorArrayConfig] + + :param display_name: The display name for a component. + :type display_name: str, optional + + :param enabled: Indicates whether the processor is enabled. + :type enabled: bool + + :param id: The unique identifier for this component. Used in other parts of the pipeline to reference this component (for example, as the ``input`` to downstream components). + :type id: str + + :param include: A Datadog search query used to determine which logs this processor targets. For split_array, this should typically be ``*``. + :type include: str + + :param type: The processor type. The value should always be ``split_array``. + :type type: ObservabilityPipelineSplitArrayProcessorType + """ + if display_name is not unset: + kwargs["display_name"] = display_name + super().__init__(kwargs) + + self_.arrays = arrays + self_.enabled = enabled + self_.id = id + self_.include = include + self_.type = type diff --git a/src/datadog_api_client/v2/model/observability_pipeline_split_array_processor_array_config.py b/src/datadog_api_client/v2/model/observability_pipeline_split_array_processor_array_config.py new file mode 100644 index 0000000000..ac7133ee2c --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_split_array_processor_array_config.py @@ -0,0 +1,39 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + + +from datadog_api_client.model_utils import ( + ModelNormal, + cached_property, +) + + +class ObservabilityPipelineSplitArrayProcessorArrayConfig(ModelNormal): + @cached_property + def openapi_types(_): + return { + "field": (str,), + "include": (str,), + } + + attribute_map = { + "field": "field", + "include": "include", + } + + def __init__(self_, field: str, include: str, **kwargs): + """ + Configuration for a single array split operation. + + :param field: The path to the array field to split. + :type field: str + + :param include: A Datadog search query used to determine which logs this array split operation targets. + :type include: str + """ + super().__init__(kwargs) + + self_.field = field + self_.include = include diff --git a/src/datadog_api_client/v2/model/observability_pipeline_split_array_processor_type.py b/src/datadog_api_client/v2/model/observability_pipeline_split_array_processor_type.py new file mode 100644 index 0000000000..c6b6a7e9a1 --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_split_array_processor_type.py @@ -0,0 +1,35 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + + +from datadog_api_client.model_utils import ( + ModelSimple, + cached_property, +) + +from typing import ClassVar + + +class ObservabilityPipelineSplitArrayProcessorType(ModelSimple): + """ + The processor type. The value should always be `split_array`. + + :param value: If omitted defaults to "split_array". Must be one of ["split_array"]. + :type value: str + """ + + allowed_values = { + "split_array", + } + SPLIT_ARRAY: ClassVar["ObservabilityPipelineSplitArrayProcessorType"] + + @cached_property + def openapi_types(_): + return { + "value": (str,), + } + + +ObservabilityPipelineSplitArrayProcessorType.SPLIT_ARRAY = ObservabilityPipelineSplitArrayProcessorType("split_array") diff --git a/src/datadog_api_client/v2/model/observability_pipeline_splunk_hec_destination.py b/src/datadog_api_client/v2/model/observability_pipeline_splunk_hec_destination.py index e36b4aee74..2b5820e3c3 100644 --- a/src/datadog_api_client/v2/model/observability_pipeline_splunk_hec_destination.py +++ b/src/datadog_api_client/v2/model/observability_pipeline_splunk_hec_destination.py @@ -66,6 +66,8 @@ def __init__( """ The ``splunk_hec`` destination forwards logs to Splunk using the HTTP Event Collector (HEC). + **Supported pipeline types:** logs + :param auto_extract_timestamp: If ``true`` , Splunk tries to extract timestamps from incoming log events. If ``false`` , Splunk assigns the time the event was received. :type auto_extract_timestamp: bool, optional @@ -73,7 +75,7 @@ def __init__( :param encoding: Encoding format for log events. :type encoding: ObservabilityPipelineSplunkHecDestinationEncoding, optional - :param id: The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components). + :param id: The unique identifier for this component. Used in other parts of the pipeline to reference this component (for example, as the ``input`` to downstream components). :type id: str :param index: Optional name of the Splunk index where logs are written. diff --git a/src/datadog_api_client/v2/model/observability_pipeline_splunk_hec_source.py b/src/datadog_api_client/v2/model/observability_pipeline_splunk_hec_source.py index bf8e2f976f..b1b36b9993 100644 --- a/src/datadog_api_client/v2/model/observability_pipeline_splunk_hec_source.py +++ b/src/datadog_api_client/v2/model/observability_pipeline_splunk_hec_source.py @@ -50,7 +50,9 @@ def __init__( """ The ``splunk_hec`` source implements the Splunk HTTP Event Collector (HEC) API. - :param id: The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components). + **Supported pipeline types:** logs + + :param id: The unique identifier for this component. Used in other parts of the pipeline to reference this component (for example, as the ``input`` to downstream components). :type id: str :param tls: Configuration for enabling TLS encryption between the pipeline component and external services. diff --git a/src/datadog_api_client/v2/model/observability_pipeline_splunk_tcp_source.py b/src/datadog_api_client/v2/model/observability_pipeline_splunk_tcp_source.py index 5cd32dc425..af2787cb1b 100644 --- a/src/datadog_api_client/v2/model/observability_pipeline_splunk_tcp_source.py +++ b/src/datadog_api_client/v2/model/observability_pipeline_splunk_tcp_source.py @@ -51,7 +51,9 @@ def __init__( The ``splunk_tcp`` source receives logs from a Splunk Universal Forwarder over TCP. TLS is supported for secure transmission. - :param id: The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components). + **Supported pipeline types:** logs + + :param id: The unique identifier for this component. Used in other parts of the pipeline to reference this component (for example, as the ``input`` to downstream components). :type id: str :param tls: Configuration for enabling TLS encryption between the pipeline component and external services. diff --git a/src/datadog_api_client/v2/model/observability_pipeline_sumo_logic_destination.py b/src/datadog_api_client/v2/model/observability_pipeline_sumo_logic_destination.py index d49e3044f8..bd8b1f6a65 100644 --- a/src/datadog_api_client/v2/model/observability_pipeline_sumo_logic_destination.py +++ b/src/datadog_api_client/v2/model/observability_pipeline_sumo_logic_destination.py @@ -77,6 +77,8 @@ def __init__( """ The ``sumo_logic`` destination forwards logs to Sumo Logic. + **Supported pipeline types:** logs + :param encoding: The output encoding format. :type encoding: ObservabilityPipelineSumoLogicDestinationEncoding, optional diff --git a/src/datadog_api_client/v2/model/observability_pipeline_sumo_logic_source.py b/src/datadog_api_client/v2/model/observability_pipeline_sumo_logic_source.py index c02e14c8c1..78dee8b563 100644 --- a/src/datadog_api_client/v2/model/observability_pipeline_sumo_logic_source.py +++ b/src/datadog_api_client/v2/model/observability_pipeline_sumo_logic_source.py @@ -38,7 +38,9 @@ def __init__(self_, id: str, type: ObservabilityPipelineSumoLogicSourceType, **k """ The ``sumo_logic`` source receives logs from Sumo Logic collectors. - :param id: The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components). + **Supported pipeline types:** logs + + :param id: The unique identifier for this component. Used in other parts of the pipeline to reference this component (for example, as the ``input`` to downstream components). :type id: str :param type: The source type. The value should always be ``sumo_logic``. diff --git a/src/datadog_api_client/v2/model/observability_pipeline_syslog_ng_destination.py b/src/datadog_api_client/v2/model/observability_pipeline_syslog_ng_destination.py index 4984e69b5e..f9cc83262e 100644 --- a/src/datadog_api_client/v2/model/observability_pipeline_syslog_ng_destination.py +++ b/src/datadog_api_client/v2/model/observability_pipeline_syslog_ng_destination.py @@ -62,6 +62,8 @@ def __init__( """ The ``syslog_ng`` destination forwards logs to an external ``syslog-ng`` server over TCP or UDP using the syslog protocol. + **Supported pipeline types:** logs + :param id: The unique identifier for this component. :type id: str diff --git a/src/datadog_api_client/v2/model/observability_pipeline_syslog_ng_source.py b/src/datadog_api_client/v2/model/observability_pipeline_syslog_ng_source.py index 5f3e91d9a1..607791b358 100644 --- a/src/datadog_api_client/v2/model/observability_pipeline_syslog_ng_source.py +++ b/src/datadog_api_client/v2/model/observability_pipeline_syslog_ng_source.py @@ -59,7 +59,9 @@ def __init__( """ The ``syslog_ng`` source listens for logs over TCP or UDP from a ``syslog-ng`` server using the syslog protocol. - :param id: The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components). + **Supported pipeline types:** logs + + :param id: The unique identifier for this component. Used in other parts of the pipeline to reference this component (for example, as the ``input`` to downstream components). :type id: str :param mode: Protocol used by the syslog source to receive messages. diff --git a/src/datadog_api_client/v2/model/observability_pipeline_throttle_processor.py b/src/datadog_api_client/v2/model/observability_pipeline_throttle_processor.py index 3240833d02..a67077e0fd 100644 --- a/src/datadog_api_client/v2/model/observability_pipeline_throttle_processor.py +++ b/src/datadog_api_client/v2/model/observability_pipeline_throttle_processor.py @@ -63,10 +63,12 @@ def __init__( """ The ``throttle`` processor limits the number of events that pass through over a given time window. + **Supported pipeline types:** logs + :param display_name: The display name for a component. :type display_name: str, optional - :param enabled: Whether this processor is enabled. + :param enabled: Indicates whether the processor is enabled. :type enabled: bool :param group_by: Optional list of fields used to group events before the threshold has been reached. diff --git a/src/datadog_api_client/v2/models/__init__.py b/src/datadog_api_client/v2/models/__init__.py index ea611ffe86..7af7bf8cb6 100644 --- a/src/datadog_api_client/v2/models/__init__.py +++ b/src/datadog_api_client/v2/models/__init__.py @@ -2906,6 +2906,12 @@ from datadog_api_client.v2.model.observability_pipeline_add_fields_processor_type import ( ObservabilityPipelineAddFieldsProcessorType, ) +from datadog_api_client.v2.model.observability_pipeline_add_hostname_processor import ( + ObservabilityPipelineAddHostnameProcessor, +) +from datadog_api_client.v2.model.observability_pipeline_add_hostname_processor_type import ( + ObservabilityPipelineAddHostnameProcessorType, +) from datadog_api_client.v2.model.observability_pipeline_amazon_data_firehose_source import ( ObservabilityPipelineAmazonDataFirehoseSource, ) @@ -2944,10 +2950,19 @@ ObservabilityPipelineAmazonSecurityLakeDestinationType, ) from datadog_api_client.v2.model.observability_pipeline_aws_auth import ObservabilityPipelineAwsAuth +from datadog_api_client.v2.model.observability_pipeline_cloud_prem_destination import ( + ObservabilityPipelineCloudPremDestination, +) +from datadog_api_client.v2.model.observability_pipeline_cloud_prem_destination_type import ( + ObservabilityPipelineCloudPremDestinationType, +) from datadog_api_client.v2.model.observability_pipeline_config import ObservabilityPipelineConfig from datadog_api_client.v2.model.observability_pipeline_config_destination_item import ( ObservabilityPipelineConfigDestinationItem, ) +from datadog_api_client.v2.model.observability_pipeline_config_pipeline_type import ( + ObservabilityPipelineConfigPipelineType, +) from datadog_api_client.v2.model.observability_pipeline_config_processor_group import ( ObservabilityPipelineConfigProcessorGroup, ) @@ -2991,6 +3006,12 @@ from datadog_api_client.v2.model.observability_pipeline_datadog_logs_destination_type import ( ObservabilityPipelineDatadogLogsDestinationType, ) +from datadog_api_client.v2.model.observability_pipeline_datadog_metrics_destination import ( + ObservabilityPipelineDatadogMetricsDestination, +) +from datadog_api_client.v2.model.observability_pipeline_datadog_metrics_destination_type import ( + ObservabilityPipelineDatadogMetricsDestinationType, +) from datadog_api_client.v2.model.observability_pipeline_datadog_tags_processor import ( ObservabilityPipelineDatadogTagsProcessor, ) @@ -3017,6 +3038,9 @@ from datadog_api_client.v2.model.observability_pipeline_elasticsearch_destination_api_version import ( ObservabilityPipelineElasticsearchDestinationApiVersion, ) +from datadog_api_client.v2.model.observability_pipeline_elasticsearch_destination_data_stream import ( + ObservabilityPipelineElasticsearchDestinationDataStream, +) from datadog_api_client.v2.model.observability_pipeline_elasticsearch_destination_type import ( ObservabilityPipelineElasticsearchDestinationType, ) @@ -3050,6 +3074,9 @@ from datadog_api_client.v2.model.observability_pipeline_enrichment_table_processor_type import ( ObservabilityPipelineEnrichmentTableProcessorType, ) +from datadog_api_client.v2.model.observability_pipeline_enrichment_table_reference_table import ( + ObservabilityPipelineEnrichmentTableReferenceTable, +) from datadog_api_client.v2.model.observability_pipeline_field_value import ObservabilityPipelineFieldValue from datadog_api_client.v2.model.observability_pipeline_filter_processor import ObservabilityPipelineFilterProcessor from datadog_api_client.v2.model.observability_pipeline_filter_processor_type import ( @@ -3122,6 +3149,24 @@ from datadog_api_client.v2.model.observability_pipeline_google_pub_sub_source_type import ( ObservabilityPipelineGooglePubSubSourceType, ) +from datadog_api_client.v2.model.observability_pipeline_http_client_destination import ( + ObservabilityPipelineHttpClientDestination, +) +from datadog_api_client.v2.model.observability_pipeline_http_client_destination_auth_strategy import ( + ObservabilityPipelineHttpClientDestinationAuthStrategy, +) +from datadog_api_client.v2.model.observability_pipeline_http_client_destination_compression import ( + ObservabilityPipelineHttpClientDestinationCompression, +) +from datadog_api_client.v2.model.observability_pipeline_http_client_destination_compression_algorithm import ( + ObservabilityPipelineHttpClientDestinationCompressionAlgorithm, +) +from datadog_api_client.v2.model.observability_pipeline_http_client_destination_encoding import ( + ObservabilityPipelineHttpClientDestinationEncoding, +) +from datadog_api_client.v2.model.observability_pipeline_http_client_destination_type import ( + ObservabilityPipelineHttpClientDestinationType, +) from datadog_api_client.v2.model.observability_pipeline_http_client_source import ObservabilityPipelineHttpClientSource from datadog_api_client.v2.model.observability_pipeline_http_client_source_auth_strategy import ( ObservabilityPipelineHttpClientSourceAuthStrategy, @@ -3136,17 +3181,45 @@ from datadog_api_client.v2.model.observability_pipeline_http_server_source_type import ( ObservabilityPipelineHttpServerSourceType, ) -from datadog_api_client.v2.model.observability_pipeline_kafka_source import ObservabilityPipelineKafkaSource -from datadog_api_client.v2.model.observability_pipeline_kafka_source_librdkafka_option import ( - ObservabilityPipelineKafkaSourceLibrdkafkaOption, +from datadog_api_client.v2.model.observability_pipeline_kafka_destination import ObservabilityPipelineKafkaDestination +from datadog_api_client.v2.model.observability_pipeline_kafka_destination_compression import ( + ObservabilityPipelineKafkaDestinationCompression, ) -from datadog_api_client.v2.model.observability_pipeline_kafka_source_sasl import ObservabilityPipelineKafkaSourceSasl +from datadog_api_client.v2.model.observability_pipeline_kafka_destination_encoding import ( + ObservabilityPipelineKafkaDestinationEncoding, +) +from datadog_api_client.v2.model.observability_pipeline_kafka_destination_type import ( + ObservabilityPipelineKafkaDestinationType, +) +from datadog_api_client.v2.model.observability_pipeline_kafka_librdkafka_option import ( + ObservabilityPipelineKafkaLibrdkafkaOption, +) +from datadog_api_client.v2.model.observability_pipeline_kafka_sasl import ObservabilityPipelineKafkaSasl +from datadog_api_client.v2.model.observability_pipeline_kafka_sasl_mechanism import ( + ObservabilityPipelineKafkaSaslMechanism, +) +from datadog_api_client.v2.model.observability_pipeline_kafka_source import ObservabilityPipelineKafkaSource from datadog_api_client.v2.model.observability_pipeline_kafka_source_type import ObservabilityPipelineKafkaSourceType from datadog_api_client.v2.model.observability_pipeline_logstash_source import ObservabilityPipelineLogstashSource from datadog_api_client.v2.model.observability_pipeline_logstash_source_type import ( ObservabilityPipelineLogstashSourceType, ) from datadog_api_client.v2.model.observability_pipeline_metadata_entry import ObservabilityPipelineMetadataEntry +from datadog_api_client.v2.model.observability_pipeline_metric_tags_processor import ( + ObservabilityPipelineMetricTagsProcessor, +) +from datadog_api_client.v2.model.observability_pipeline_metric_tags_processor_rule import ( + ObservabilityPipelineMetricTagsProcessorRule, +) +from datadog_api_client.v2.model.observability_pipeline_metric_tags_processor_rule_action import ( + ObservabilityPipelineMetricTagsProcessorRuleAction, +) +from datadog_api_client.v2.model.observability_pipeline_metric_tags_processor_rule_mode import ( + ObservabilityPipelineMetricTagsProcessorRuleMode, +) +from datadog_api_client.v2.model.observability_pipeline_metric_tags_processor_type import ( + ObservabilityPipelineMetricTagsProcessorType, +) from datadog_api_client.v2.model.observability_pipeline_metric_value import ObservabilityPipelineMetricValue from datadog_api_client.v2.model.observability_pipeline_new_relic_destination import ( ObservabilityPipelineNewRelicDestination, @@ -3178,6 +3251,12 @@ from datadog_api_client.v2.model.observability_pipeline_open_search_destination_type import ( ObservabilityPipelineOpenSearchDestinationType, ) +from datadog_api_client.v2.model.observability_pipeline_opentelemetry_source import ( + ObservabilityPipelineOpentelemetrySource, +) +from datadog_api_client.v2.model.observability_pipeline_opentelemetry_source_type import ( + ObservabilityPipelineOpentelemetrySourceType, +) from datadog_api_client.v2.model.observability_pipeline_parse_grok_processor import ( ObservabilityPipelineParseGrokProcessor, ) @@ -3199,8 +3278,11 @@ from datadog_api_client.v2.model.observability_pipeline_parse_json_processor_type import ( ObservabilityPipelineParseJSONProcessorType, ) -from datadog_api_client.v2.model.observability_pipeline_pipeline_kafka_source_sasl_mechanism import ( - ObservabilityPipelinePipelineKafkaSourceSaslMechanism, +from datadog_api_client.v2.model.observability_pipeline_parse_xml_processor import ( + ObservabilityPipelineParseXMLProcessor, +) +from datadog_api_client.v2.model.observability_pipeline_parse_xml_processor_type import ( + ObservabilityPipelineParseXMLProcessorType, ) from datadog_api_client.v2.model.observability_pipeline_quota_processor import ObservabilityPipelineQuotaProcessor from datadog_api_client.v2.model.observability_pipeline_quota_processor_limit import ( @@ -3422,6 +3504,15 @@ from datadog_api_client.v2.model.observability_pipeline_socket_source_type import ObservabilityPipelineSocketSourceType from datadog_api_client.v2.model.observability_pipeline_spec import ObservabilityPipelineSpec from datadog_api_client.v2.model.observability_pipeline_spec_data import ObservabilityPipelineSpecData +from datadog_api_client.v2.model.observability_pipeline_split_array_processor import ( + ObservabilityPipelineSplitArrayProcessor, +) +from datadog_api_client.v2.model.observability_pipeline_split_array_processor_array_config import ( + ObservabilityPipelineSplitArrayProcessorArrayConfig, +) +from datadog_api_client.v2.model.observability_pipeline_split_array_processor_type import ( + ObservabilityPipelineSplitArrayProcessorType, +) from datadog_api_client.v2.model.observability_pipeline_splunk_hec_destination import ( ObservabilityPipelineSplunkHecDestination, ) @@ -7452,6 +7543,8 @@ "ObservabilityPipelineAddEnvVarsProcessorVariable", "ObservabilityPipelineAddFieldsProcessor", "ObservabilityPipelineAddFieldsProcessorType", + "ObservabilityPipelineAddHostnameProcessor", + "ObservabilityPipelineAddHostnameProcessorType", "ObservabilityPipelineAmazonDataFirehoseSource", "ObservabilityPipelineAmazonDataFirehoseSourceType", "ObservabilityPipelineAmazonOpenSearchDestination", @@ -7466,8 +7559,11 @@ "ObservabilityPipelineAmazonSecurityLakeDestination", "ObservabilityPipelineAmazonSecurityLakeDestinationType", "ObservabilityPipelineAwsAuth", + "ObservabilityPipelineCloudPremDestination", + "ObservabilityPipelineCloudPremDestinationType", "ObservabilityPipelineConfig", "ObservabilityPipelineConfigDestinationItem", + "ObservabilityPipelineConfigPipelineType", "ObservabilityPipelineConfigProcessorGroup", "ObservabilityPipelineConfigProcessorItem", "ObservabilityPipelineConfigSourceItem", @@ -7485,6 +7581,8 @@ "ObservabilityPipelineDatadogAgentSourceType", "ObservabilityPipelineDatadogLogsDestination", "ObservabilityPipelineDatadogLogsDestinationType", + "ObservabilityPipelineDatadogMetricsDestination", + "ObservabilityPipelineDatadogMetricsDestinationType", "ObservabilityPipelineDatadogTagsProcessor", "ObservabilityPipelineDatadogTagsProcessorAction", "ObservabilityPipelineDatadogTagsProcessorMode", @@ -7495,6 +7593,7 @@ "ObservabilityPipelineDedupeProcessorType", "ObservabilityPipelineElasticsearchDestination", "ObservabilityPipelineElasticsearchDestinationApiVersion", + "ObservabilityPipelineElasticsearchDestinationDataStream", "ObservabilityPipelineElasticsearchDestinationType", "ObservabilityPipelineEnrichmentTableFile", "ObservabilityPipelineEnrichmentTableFileEncoding", @@ -7506,6 +7605,7 @@ "ObservabilityPipelineEnrichmentTableGeoIp", "ObservabilityPipelineEnrichmentTableProcessor", "ObservabilityPipelineEnrichmentTableProcessorType", + "ObservabilityPipelineEnrichmentTableReferenceTable", "ObservabilityPipelineFieldValue", "ObservabilityPipelineFilterProcessor", "ObservabilityPipelineFilterProcessorType", @@ -7534,19 +7634,35 @@ "ObservabilityPipelineGooglePubSubDestinationType", "ObservabilityPipelineGooglePubSubSource", "ObservabilityPipelineGooglePubSubSourceType", + "ObservabilityPipelineHttpClientDestination", + "ObservabilityPipelineHttpClientDestinationAuthStrategy", + "ObservabilityPipelineHttpClientDestinationCompression", + "ObservabilityPipelineHttpClientDestinationCompressionAlgorithm", + "ObservabilityPipelineHttpClientDestinationEncoding", + "ObservabilityPipelineHttpClientDestinationType", "ObservabilityPipelineHttpClientSource", "ObservabilityPipelineHttpClientSourceAuthStrategy", "ObservabilityPipelineHttpClientSourceType", "ObservabilityPipelineHttpServerSource", "ObservabilityPipelineHttpServerSourceAuthStrategy", "ObservabilityPipelineHttpServerSourceType", + "ObservabilityPipelineKafkaDestination", + "ObservabilityPipelineKafkaDestinationCompression", + "ObservabilityPipelineKafkaDestinationEncoding", + "ObservabilityPipelineKafkaDestinationType", + "ObservabilityPipelineKafkaLibrdkafkaOption", + "ObservabilityPipelineKafkaSasl", + "ObservabilityPipelineKafkaSaslMechanism", "ObservabilityPipelineKafkaSource", - "ObservabilityPipelineKafkaSourceLibrdkafkaOption", - "ObservabilityPipelineKafkaSourceSasl", "ObservabilityPipelineKafkaSourceType", "ObservabilityPipelineLogstashSource", "ObservabilityPipelineLogstashSourceType", "ObservabilityPipelineMetadataEntry", + "ObservabilityPipelineMetricTagsProcessor", + "ObservabilityPipelineMetricTagsProcessorRule", + "ObservabilityPipelineMetricTagsProcessorRuleAction", + "ObservabilityPipelineMetricTagsProcessorRuleMode", + "ObservabilityPipelineMetricTagsProcessorType", "ObservabilityPipelineMetricValue", "ObservabilityPipelineNewRelicDestination", "ObservabilityPipelineNewRelicDestinationRegion", @@ -7558,6 +7674,8 @@ "ObservabilityPipelineOcsfMappingLibrary", "ObservabilityPipelineOpenSearchDestination", "ObservabilityPipelineOpenSearchDestinationType", + "ObservabilityPipelineOpentelemetrySource", + "ObservabilityPipelineOpentelemetrySourceType", "ObservabilityPipelineParseGrokProcessor", "ObservabilityPipelineParseGrokProcessorRule", "ObservabilityPipelineParseGrokProcessorRuleMatchRule", @@ -7565,7 +7683,8 @@ "ObservabilityPipelineParseGrokProcessorType", "ObservabilityPipelineParseJSONProcessor", "ObservabilityPipelineParseJSONProcessorType", - "ObservabilityPipelinePipelineKafkaSourceSaslMechanism", + "ObservabilityPipelineParseXMLProcessor", + "ObservabilityPipelineParseXMLProcessorType", "ObservabilityPipelineQuotaProcessor", "ObservabilityPipelineQuotaProcessorLimit", "ObservabilityPipelineQuotaProcessorLimitEnforceType", @@ -7646,6 +7765,9 @@ "ObservabilityPipelineSocketSourceType", "ObservabilityPipelineSpec", "ObservabilityPipelineSpecData", + "ObservabilityPipelineSplitArrayProcessor", + "ObservabilityPipelineSplitArrayProcessorArrayConfig", + "ObservabilityPipelineSplitArrayProcessorType", "ObservabilityPipelineSplunkHecDestination", "ObservabilityPipelineSplunkHecDestinationEncoding", "ObservabilityPipelineSplunkHecDestinationType", diff --git a/tests/v2/cassettes/test_scenarios/test_create_a_new_pipeline_returns_bad_request_response.frozen b/tests/v2/cassettes/test_scenarios/test_create_a_new_pipeline_returns_bad_request_response.frozen index 73133f1c75..69f96e35dd 100644 --- a/tests/v2/cassettes/test_scenarios/test_create_a_new_pipeline_returns_bad_request_response.frozen +++ b/tests/v2/cassettes/test_scenarios/test_create_a_new_pipeline_returns_bad_request_response.frozen @@ -1 +1 @@ -2025-12-18T16:15:15.575Z \ No newline at end of file +2026-01-09T15:42:36.842Z \ No newline at end of file diff --git a/tests/v2/cassettes/test_scenarios/test_create_a_new_pipeline_returns_bad_request_response.yaml b/tests/v2/cassettes/test_scenarios/test_create_a_new_pipeline_returns_bad_request_response.yaml index 0099df74f0..44c741718f 100644 --- a/tests/v2/cassettes/test_scenarios/test_create_a_new_pipeline_returns_bad_request_response.yaml +++ b/tests/v2/cassettes/test_scenarios/test_create_a_new_pipeline_returns_bad_request_response.yaml @@ -1,6 +1,6 @@ interactions: - request: - body: '{"data":{"attributes":{"config":{"destinations":[{"id":"datadog-logs-destination","inputs":["my-processor-group"],"type":"datadog_logs"}],"processors":[{"enabled":true,"id":"unknown-processor","include":"service:my-service","inputs":["datadog-agent-source"],"processors":[{"enabled":true,"id":"filter-processor","include":"status:error","type":"filter"}]}],"sources":[{"id":"datadog-agent-source","type":"datadog_agent"}]},"name":"Main + body: '{"data":{"attributes":{"config":{"destinations":[{"id":"datadog-logs-destination","inputs":["my-processor-group"],"type":"datadog_logs"}],"processor_groups":[{"enabled":true,"id":"unknown-processor","include":"service:my-service","inputs":["datadog-agent-source"],"processors":[{"enabled":true,"id":"filter-processor","include":"status:error","type":"filter"}]}],"sources":[{"id":"datadog-agent-source","type":"datadog_agent"}]},"name":"Main Observability Pipeline"},"type":"pipelines"}}' headers: accept: @@ -8,7 +8,7 @@ interactions: content-type: - application/json method: POST - uri: https://api.datadoghq.com/api/v2/remote_config/products/obs_pipelines/pipelines + uri: https://api.datadoghq.com/api/v2/obs-pipelines/pipelines response: body: string: '{"errors":[{"title":"Component with ID my-processor-group is an unknown diff --git a/tests/v2/cassettes/test_scenarios/test_create_a_new_pipeline_returns_ok_response.frozen b/tests/v2/cassettes/test_scenarios/test_create_a_new_pipeline_returns_ok_response.frozen index 20165353d5..c09316ab61 100644 --- a/tests/v2/cassettes/test_scenarios/test_create_a_new_pipeline_returns_ok_response.frozen +++ b/tests/v2/cassettes/test_scenarios/test_create_a_new_pipeline_returns_ok_response.frozen @@ -1 +1 @@ -2025-12-18T16:15:16.062Z \ No newline at end of file +2026-01-09T15:42:37.370Z \ No newline at end of file diff --git a/tests/v2/cassettes/test_scenarios/test_create_a_new_pipeline_returns_ok_response.yaml b/tests/v2/cassettes/test_scenarios/test_create_a_new_pipeline_returns_ok_response.yaml index 6fc5f16843..874c407660 100644 --- a/tests/v2/cassettes/test_scenarios/test_create_a_new_pipeline_returns_ok_response.yaml +++ b/tests/v2/cassettes/test_scenarios/test_create_a_new_pipeline_returns_ok_response.yaml @@ -1,6 +1,6 @@ interactions: - request: - body: '{"data":{"attributes":{"config":{"destinations":[{"id":"datadog-logs-destination","inputs":["my-processor-group"],"type":"datadog_logs"}],"processors":[{"enabled":true,"id":"my-processor-group","include":"service:my-service","inputs":["datadog-agent-source"],"processors":[{"enabled":true,"id":"filter-processor","include":"status:error","type":"filter"}]}],"sources":[{"id":"datadog-agent-source","type":"datadog_agent"}]},"name":"Main + body: '{"data":{"attributes":{"config":{"destinations":[{"id":"datadog-logs-destination","inputs":["my-processor-group"],"type":"datadog_logs"}],"processor_groups":[{"enabled":true,"id":"my-processor-group","include":"service:my-service","inputs":["datadog-agent-source"],"processors":[{"enabled":true,"id":"filter-processor","include":"status:error","type":"filter"}]}],"sources":[{"id":"datadog-agent-source","type":"datadog_agent"}]},"name":"Main Observability Pipeline"},"type":"pipelines"}}' headers: accept: @@ -8,11 +8,11 @@ interactions: content-type: - application/json method: POST - uri: https://api.datadoghq.com/api/v2/remote_config/products/obs_pipelines/pipelines + uri: https://api.datadoghq.com/api/v2/obs-pipelines/pipelines response: body: - string: '{"data":{"id":"bd8d693c-dc2c-11f0-bf69-da7ad0900002","type":"pipelines","attributes":{"name":"Main - Observability Pipeline","config":{"destinations":[{"id":"datadog-logs-destination","inputs":["my-processor-group"],"type":"datadog_logs"}],"processors":[{"enabled":true,"id":"my-processor-group","include":"service:my-service","inputs":["datadog-agent-source"],"processors":[{"enabled":true,"id":"filter-processor","include":"status:error","type":"filter"}]}],"sources":[{"id":"datadog-agent-source","type":"datadog_agent"}]}}}} + string: '{"data":{"id":"d33cceac-ed71-11f0-bd8c-da7ad0900002","type":"pipelines","attributes":{"name":"Main + Observability Pipeline","config":{"destinations":[{"id":"datadog-logs-destination","inputs":["my-processor-group"],"type":"datadog_logs"}],"processor_groups":[{"enabled":true,"id":"my-processor-group","include":"service:my-service","inputs":["datadog-agent-source"],"processors":[{"enabled":true,"id":"filter-processor","include":"status:error","type":"filter"}]}],"processors":[{"enabled":true,"id":"my-processor-group","include":"service:my-service","inputs":["datadog-agent-source"],"processors":[{"enabled":true,"id":"filter-processor","include":"status:error","type":"filter"}]}],"sources":[{"id":"datadog-agent-source","type":"datadog_agent"}]}}}} ' headers: @@ -27,7 +27,7 @@ interactions: accept: - '*/*' method: DELETE - uri: https://api.datadoghq.com/api/v2/remote_config/products/obs_pipelines/pipelines/bd8d693c-dc2c-11f0-bf69-da7ad0900002 + uri: https://api.datadoghq.com/api/v2/obs-pipelines/pipelines/d33cceac-ed71-11f0-bd8c-da7ad0900002 response: body: string: '' diff --git a/tests/v2/cassettes/test_scenarios/test_delete_a_pipeline_returns_not_found_response.frozen b/tests/v2/cassettes/test_scenarios/test_delete_a_pipeline_returns_not_found_response.frozen index 85deda9192..e7cb42324d 100644 --- a/tests/v2/cassettes/test_scenarios/test_delete_a_pipeline_returns_not_found_response.frozen +++ b/tests/v2/cassettes/test_scenarios/test_delete_a_pipeline_returns_not_found_response.frozen @@ -1 +1 @@ -2025-12-18T16:15:17.165Z \ No newline at end of file +2026-01-09T15:42:38.750Z \ No newline at end of file diff --git a/tests/v2/cassettes/test_scenarios/test_delete_a_pipeline_returns_not_found_response.yaml b/tests/v2/cassettes/test_scenarios/test_delete_a_pipeline_returns_not_found_response.yaml index 95dbe0e11a..e87901953f 100644 --- a/tests/v2/cassettes/test_scenarios/test_delete_a_pipeline_returns_not_found_response.yaml +++ b/tests/v2/cassettes/test_scenarios/test_delete_a_pipeline_returns_not_found_response.yaml @@ -5,7 +5,7 @@ interactions: accept: - '*/*' method: DELETE - uri: https://api.datadoghq.com/api/v2/remote_config/products/obs_pipelines/pipelines/3fa85f64-5717-4562-b3fc-2c963f66afa6 + uri: https://api.datadoghq.com/api/v2/obs-pipelines/pipelines/3fa85f64-5717-4562-b3fc-2c963f66afa6 response: body: string: '{"errors":[{"title":"Resource Not Found"}]} diff --git a/tests/v2/cassettes/test_scenarios/test_delete_a_pipeline_returns_ok_response.frozen b/tests/v2/cassettes/test_scenarios/test_delete_a_pipeline_returns_ok_response.frozen index 201ee9bda8..f1c8586602 100644 --- a/tests/v2/cassettes/test_scenarios/test_delete_a_pipeline_returns_ok_response.frozen +++ b/tests/v2/cassettes/test_scenarios/test_delete_a_pipeline_returns_ok_response.frozen @@ -1 +1 @@ -2025-12-18T16:15:17.716Z \ No newline at end of file +2026-01-09T15:42:39.305Z \ No newline at end of file diff --git a/tests/v2/cassettes/test_scenarios/test_delete_a_pipeline_returns_ok_response.yaml b/tests/v2/cassettes/test_scenarios/test_delete_a_pipeline_returns_ok_response.yaml index c18e0ea31d..e8707537fe 100644 --- a/tests/v2/cassettes/test_scenarios/test_delete_a_pipeline_returns_ok_response.yaml +++ b/tests/v2/cassettes/test_scenarios/test_delete_a_pipeline_returns_ok_response.yaml @@ -1,6 +1,6 @@ interactions: - request: - body: '{"data":{"attributes":{"config":{"destinations":[{"id":"datadog-logs-destination","inputs":["processor-group-0"],"type":"datadog_logs"}],"processors":[{"display_name":"My + body: '{"data":{"attributes":{"config":{"destinations":[{"id":"datadog-logs-destination","inputs":["processor-group-0"],"type":"datadog_logs"}],"processor_groups":[{"display_name":"My Processor Group","enabled":true,"id":"processor-group-0","include":"service:my-service","inputs":["datadog-agent-source"],"processors":[{"display_name":"My Filter Processor","enabled":true,"id":"filter-processor","include":"status:error","type":"filter"}]}],"sources":[{"id":"datadog-agent-source","type":"datadog_agent"}]},"name":"Main Observability Pipeline"},"type":"pipelines"}}' @@ -10,11 +10,13 @@ interactions: content-type: - application/json method: POST - uri: https://api.datadoghq.com/api/v2/remote_config/products/obs_pipelines/pipelines + uri: https://api.datadoghq.com/api/v2/obs-pipelines/pipelines response: body: - string: '{"data":{"id":"be89fea4-dc2c-11f0-bdea-da7ad0900002","type":"pipelines","attributes":{"name":"Main - Observability Pipeline","config":{"destinations":[{"id":"datadog-logs-destination","inputs":["processor-group-0"],"type":"datadog_logs"}],"processors":[{"display_name":"My + string: '{"data":{"id":"d46478de-ed71-11f0-99c8-da7ad0900002","type":"pipelines","attributes":{"name":"Main + Observability Pipeline","config":{"destinations":[{"id":"datadog-logs-destination","inputs":["processor-group-0"],"type":"datadog_logs"}],"processor_groups":[{"display_name":"My + Processor Group","enabled":true,"id":"processor-group-0","include":"service:my-service","inputs":["datadog-agent-source"],"processors":[{"display_name":"My + Filter Processor","enabled":true,"id":"filter-processor","include":"status:error","type":"filter"}]}],"processors":[{"display_name":"My Processor Group","enabled":true,"id":"processor-group-0","include":"service:my-service","inputs":["datadog-agent-source"],"processors":[{"display_name":"My Filter Processor","enabled":true,"id":"filter-processor","include":"status:error","type":"filter"}]}],"sources":[{"id":"datadog-agent-source","type":"datadog_agent"}]}}}} @@ -31,7 +33,7 @@ interactions: accept: - '*/*' method: DELETE - uri: https://api.datadoghq.com/api/v2/remote_config/products/obs_pipelines/pipelines/be89fea4-dc2c-11f0-bdea-da7ad0900002 + uri: https://api.datadoghq.com/api/v2/obs-pipelines/pipelines/d46478de-ed71-11f0-99c8-da7ad0900002 response: body: string: '' @@ -47,7 +49,7 @@ interactions: accept: - '*/*' method: DELETE - uri: https://api.datadoghq.com/api/v2/remote_config/products/obs_pipelines/pipelines/be89fea4-dc2c-11f0-bdea-da7ad0900002 + uri: https://api.datadoghq.com/api/v2/obs-pipelines/pipelines/d46478de-ed71-11f0-99c8-da7ad0900002 response: body: string: '{"errors":[{"title":"Resource Not Found"}]} diff --git a/tests/v2/cassettes/test_scenarios/test_get_a_specific_pipeline_returns_ok_response.frozen b/tests/v2/cassettes/test_scenarios/test_get_a_specific_pipeline_returns_ok_response.frozen index 2da6d0e5c1..6b8e4b6406 100644 --- a/tests/v2/cassettes/test_scenarios/test_get_a_specific_pipeline_returns_ok_response.frozen +++ b/tests/v2/cassettes/test_scenarios/test_get_a_specific_pipeline_returns_ok_response.frozen @@ -1 +1 @@ -2025-12-18T16:15:20.018Z \ No newline at end of file +2026-01-09T15:42:41.635Z \ No newline at end of file diff --git a/tests/v2/cassettes/test_scenarios/test_get_a_specific_pipeline_returns_ok_response.yaml b/tests/v2/cassettes/test_scenarios/test_get_a_specific_pipeline_returns_ok_response.yaml index 1d78b7a9d4..f8fdbaf1e0 100644 --- a/tests/v2/cassettes/test_scenarios/test_get_a_specific_pipeline_returns_ok_response.yaml +++ b/tests/v2/cassettes/test_scenarios/test_get_a_specific_pipeline_returns_ok_response.yaml @@ -1,6 +1,6 @@ interactions: - request: - body: '{"data":{"attributes":{"config":{"destinations":[{"id":"datadog-logs-destination","inputs":["processor-group-0"],"type":"datadog_logs"}],"processors":[{"display_name":"My + body: '{"data":{"attributes":{"config":{"destinations":[{"id":"datadog-logs-destination","inputs":["processor-group-0"],"type":"datadog_logs"}],"processor_groups":[{"display_name":"My Processor Group","enabled":true,"id":"processor-group-0","include":"service:my-service","inputs":["datadog-agent-source"],"processors":[{"display_name":"My Filter Processor","enabled":true,"id":"filter-processor","include":"status:error","type":"filter"}]}],"sources":[{"id":"datadog-agent-source","type":"datadog_agent"}]},"name":"Main Observability Pipeline"},"type":"pipelines"}}' @@ -10,11 +10,13 @@ interactions: content-type: - application/json method: POST - uri: https://api.datadoghq.com/api/v2/remote_config/products/obs_pipelines/pipelines + uri: https://api.datadoghq.com/api/v2/obs-pipelines/pipelines response: body: - string: '{"data":{"id":"bfe664a4-dc2c-11f0-bdec-da7ad0900002","type":"pipelines","attributes":{"name":"Main - Observability Pipeline","config":{"destinations":[{"id":"datadog-logs-destination","inputs":["processor-group-0"],"type":"datadog_logs"}],"processors":[{"display_name":"My + string: '{"data":{"id":"d5da36ae-ed71-11f0-bd8e-da7ad0900002","type":"pipelines","attributes":{"name":"Main + Observability Pipeline","config":{"destinations":[{"id":"datadog-logs-destination","inputs":["processor-group-0"],"type":"datadog_logs"}],"processor_groups":[{"display_name":"My + Processor Group","enabled":true,"id":"processor-group-0","include":"service:my-service","inputs":["datadog-agent-source"],"processors":[{"display_name":"My + Filter Processor","enabled":true,"id":"filter-processor","include":"status:error","type":"filter"}]}],"processors":[{"display_name":"My Processor Group","enabled":true,"id":"processor-group-0","include":"service:my-service","inputs":["datadog-agent-source"],"processors":[{"display_name":"My Filter Processor","enabled":true,"id":"filter-processor","include":"status:error","type":"filter"}]}],"sources":[{"id":"datadog-agent-source","type":"datadog_agent"}]}}}} @@ -31,11 +33,13 @@ interactions: accept: - application/json method: GET - uri: https://api.datadoghq.com/api/v2/remote_config/products/obs_pipelines/pipelines/bfe664a4-dc2c-11f0-bdec-da7ad0900002 + uri: https://api.datadoghq.com/api/v2/obs-pipelines/pipelines/d5da36ae-ed71-11f0-bd8e-da7ad0900002 response: body: - string: '{"data":{"id":"bfe664a4-dc2c-11f0-bdec-da7ad0900002","type":"pipelines","attributes":{"name":"Main - Observability Pipeline","config":{"destinations":[{"id":"datadog-logs-destination","inputs":["processor-group-0"],"type":"datadog_logs"}],"processors":[{"display_name":"My + string: '{"data":{"id":"d5da36ae-ed71-11f0-bd8e-da7ad0900002","type":"pipelines","attributes":{"name":"Main + Observability Pipeline","config":{"destinations":[{"id":"datadog-logs-destination","inputs":["processor-group-0"],"type":"datadog_logs"}],"processor_groups":[{"display_name":"My + Processor Group","enabled":true,"id":"processor-group-0","include":"service:my-service","inputs":["datadog-agent-source"],"processors":[{"display_name":"My + Filter Processor","enabled":true,"id":"filter-processor","include":"status:error","type":"filter"}]}],"processors":[{"display_name":"My Processor Group","enabled":true,"id":"processor-group-0","include":"service:my-service","inputs":["datadog-agent-source"],"processors":[{"display_name":"My Filter Processor","enabled":true,"id":"filter-processor","include":"status:error","type":"filter"}]}],"sources":[{"id":"datadog-agent-source","type":"datadog_agent"}]}}}} @@ -52,7 +56,7 @@ interactions: accept: - '*/*' method: DELETE - uri: https://api.datadoghq.com/api/v2/remote_config/products/obs_pipelines/pipelines/bfe664a4-dc2c-11f0-bdec-da7ad0900002 + uri: https://api.datadoghq.com/api/v2/obs-pipelines/pipelines/d5da36ae-ed71-11f0-bd8e-da7ad0900002 response: body: string: '' diff --git a/tests/v2/cassettes/test_scenarios/test_list_pipelines_returns_bad_request_response.frozen b/tests/v2/cassettes/test_scenarios/test_list_pipelines_returns_bad_request_response.frozen index a1ae2640d7..12cd810b8f 100644 --- a/tests/v2/cassettes/test_scenarios/test_list_pipelines_returns_bad_request_response.frozen +++ b/tests/v2/cassettes/test_scenarios/test_list_pipelines_returns_bad_request_response.frozen @@ -1 +1 @@ -2025-12-18T16:15:22.038Z \ No newline at end of file +2026-01-09T15:42:44.442Z \ No newline at end of file diff --git a/tests/v2/cassettes/test_scenarios/test_list_pipelines_returns_bad_request_response.yaml b/tests/v2/cassettes/test_scenarios/test_list_pipelines_returns_bad_request_response.yaml index 7bcaf73401..f8bd866497 100644 --- a/tests/v2/cassettes/test_scenarios/test_list_pipelines_returns_bad_request_response.yaml +++ b/tests/v2/cassettes/test_scenarios/test_list_pipelines_returns_bad_request_response.yaml @@ -5,7 +5,7 @@ interactions: accept: - application/json method: GET - uri: https://api.datadoghq.com/api/v2/remote_config/products/obs_pipelines/pipelines?page%5Bsize%5D=0 + uri: https://api.datadoghq.com/api/v2/obs-pipelines/pipelines?page%5Bsize%5D=0 response: body: string: '{"errors":[{"title":"page[size] must be a number between 1 and 50"}]} diff --git a/tests/v2/cassettes/test_scenarios/test_list_pipelines_returns_ok_response.frozen b/tests/v2/cassettes/test_scenarios/test_list_pipelines_returns_ok_response.frozen index fb745af4ce..f91e733f3e 100644 --- a/tests/v2/cassettes/test_scenarios/test_list_pipelines_returns_ok_response.frozen +++ b/tests/v2/cassettes/test_scenarios/test_list_pipelines_returns_ok_response.frozen @@ -1 +1 @@ -2025-12-18T16:15:22.507Z \ No newline at end of file +2026-01-09T15:42:45.631Z \ No newline at end of file diff --git a/tests/v2/cassettes/test_scenarios/test_list_pipelines_returns_ok_response.yaml b/tests/v2/cassettes/test_scenarios/test_list_pipelines_returns_ok_response.yaml index 65aa8d7f7f..9302618adc 100644 --- a/tests/v2/cassettes/test_scenarios/test_list_pipelines_returns_ok_response.yaml +++ b/tests/v2/cassettes/test_scenarios/test_list_pipelines_returns_ok_response.yaml @@ -1,6 +1,6 @@ interactions: - request: - body: '{"data":{"attributes":{"config":{"destinations":[{"id":"datadog-logs-destination","inputs":["processor-group-0"],"type":"datadog_logs"}],"processors":[{"display_name":"My + body: '{"data":{"attributes":{"config":{"destinations":[{"id":"datadog-logs-destination","inputs":["processor-group-0"],"type":"datadog_logs"}],"processor_groups":[{"display_name":"My Processor Group","enabled":true,"id":"processor-group-0","include":"service:my-service","inputs":["datadog-agent-source"],"processors":[{"display_name":"My Filter Processor","enabled":true,"id":"filter-processor","include":"status:error","type":"filter"}]}],"sources":[{"id":"datadog-agent-source","type":"datadog_agent"}]},"name":"Main Observability Pipeline"},"type":"pipelines"}}' @@ -10,11 +10,13 @@ interactions: content-type: - application/json method: POST - uri: https://api.datadoghq.com/api/v2/remote_config/products/obs_pipelines/pipelines + uri: https://api.datadoghq.com/api/v2/obs-pipelines/pipelines response: body: - string: '{"data":{"id":"c162e83e-dc2c-11f0-bf6b-da7ad0900002","type":"pipelines","attributes":{"name":"Main - Observability Pipeline","config":{"destinations":[{"id":"datadog-logs-destination","inputs":["processor-group-0"],"type":"datadog_logs"}],"processors":[{"display_name":"My + string: '{"data":{"id":"d831c96c-ed71-11f0-99ca-da7ad0900002","type":"pipelines","attributes":{"name":"Main + Observability Pipeline","config":{"destinations":[{"id":"datadog-logs-destination","inputs":["processor-group-0"],"type":"datadog_logs"}],"processor_groups":[{"display_name":"My + Processor Group","enabled":true,"id":"processor-group-0","include":"service:my-service","inputs":["datadog-agent-source"],"processors":[{"display_name":"My + Filter Processor","enabled":true,"id":"filter-processor","include":"status:error","type":"filter"}]}],"processors":[{"display_name":"My Processor Group","enabled":true,"id":"processor-group-0","include":"service:my-service","inputs":["datadog-agent-source"],"processors":[{"display_name":"My Filter Processor","enabled":true,"id":"filter-processor","include":"status:error","type":"filter"}]}],"sources":[{"id":"datadog-agent-source","type":"datadog_agent"}]}}}} @@ -31,13 +33,72 @@ interactions: accept: - application/json method: GET - uri: https://api.datadoghq.com/api/v2/remote_config/products/obs_pipelines/pipelines + uri: https://api.datadoghq.com/api/v2/obs-pipelines/pipelines response: body: - string: '{"data":[{"id":"c162e83e-dc2c-11f0-bf6b-da7ad0900002","type":"pipelines","attributes":{"name":"Main - Observability Pipeline","config":{"destinations":[{"id":"datadog-logs-destination","inputs":["processor-group-0"],"type":"datadog_logs"}],"processors":[{"display_name":"My + string: '{"data":[{"id":"4bf478ba-dc68-11f0-87e9-da7ad0900002","type":"pipelines","attributes":{"name":"Main + Observability Pipeline","config":{"destinations":[{"id":"datadog-logs-destination","inputs":["processor-group-0"],"type":"datadog_logs"}],"processor_groups":[{"enabled":true,"id":"processor-group-0","include":"service:my-service","inputs":["datadog-agent-source"],"processors":[{"enabled":true,"id":"filter-processor","include":"status:error","type":"filter"}]}],"processors":[{"enabled":true,"id":"processor-group-0","include":"service:my-service","inputs":["datadog-agent-source"],"processors":[{"enabled":true,"id":"filter-processor","include":"status:error","type":"filter"}]}],"sources":[{"id":"datadog-agent-source","type":"datadog_agent"}]}}},{"id":"a78e416a-de66-11f0-a039-da7ad0900002","type":"pipelines","attributes":{"name":"http-server-pipeline","config":{"destinations":[{"id":"destination-1","inputs":["http-source-1"],"type":"datadog_logs"}],"processor_groups":[],"processors":[],"sources":[{"auth_strategy":"plain","decoding":"json","id":"http-source-1","tls":{"ca_file":"/etc/ssl/certs/ca.crt","crt_file":"/etc/ssl/certs/http.crt","key_file":"/etc/ssl/private/http.key"},"type":"http_server"}]}}},{"id":"a84fd58c-de66-11f0-a03b-da7ad0900002","type":"pipelines","attributes":{"name":"amazon_s3-source-pipeline","config":{"destinations":[{"id":"destination-1","inputs":["s3-source-1"],"type":"datadog_logs"}],"processor_groups":[],"processors":[],"sources":[{"auth":{"assume_role":"arn:aws:iam::123456789012:role/test-role","external_id":"external-test-id","session_name":"session-test"},"id":"s3-source-1","region":"us-east-1","tls":{"ca_file":"/etc/ssl/certs/s3.ca","crt_file":"/etc/ssl/certs/s3.crt","key_file":"/etc/ssl/private/s3.key"},"type":"amazon_s3"}]}}},{"id":"a42e22e0-df49-11f0-81d5-da7ad0900002","type":"pipelines","attributes":{"name":"dedupe + pipeline","config":{"destinations":[{"id":"destination-1","inputs":["dedupe-group-2"],"type":"datadog_logs"}],"processor_groups":[{"enabled":true,"id":"dedupe-group-1","include":"*","inputs":["source-1"],"processors":[{"enabled":true,"fields":["log.message","log.tags"],"id":"dedupe-match","include":"*","mode":"match","type":"dedupe"}]},{"enabled":true,"id":"dedupe-group-2","include":"*","inputs":["dedupe-group-1"],"processors":[{"enabled":true,"fields":["log.source","log.context"],"id":"dedupe-ignore","include":"*","mode":"ignore","type":"dedupe"}]}],"processors":[{"enabled":true,"id":"dedupe-group-1","include":"*","inputs":["source-1"],"processors":[{"enabled":true,"fields":["log.message","log.tags"],"id":"dedupe-match","include":"*","mode":"match","type":"dedupe"}]},{"enabled":true,"id":"dedupe-group-2","include":"*","inputs":["dedupe-group-1"],"processors":[{"enabled":true,"fields":["log.source","log.context"],"id":"dedupe-ignore","include":"*","mode":"ignore","type":"dedupe"}]}],"sources":[{"id":"source-1","type":"datadog_agent"}]}}},{"id":"2cd3c342-e0c2-11f0-9d34-da7ad0900002","type":"pipelines","attributes":{"name":"add-fields-pipeline","config":{"destinations":[{"id":"destination-1","inputs":["add-fields-group-1"],"type":"datadog_logs"}],"processor_groups":[{"enabled":true,"id":"add-fields-group-1","include":"*","inputs":["source-1"],"processors":[{"enabled":true,"fields":[{"name":"custom.field","value":"hello-world"},{"name":"env","value":"prod"}],"id":"add-fields-1","include":"*","type":"add_fields"}]}],"processors":[{"enabled":true,"id":"add-fields-group-1","include":"*","inputs":["source-1"],"processors":[{"enabled":true,"fields":[{"name":"custom.field","value":"hello-world"},{"name":"env","value":"prod"}],"id":"add-fields-1","include":"*","type":"add_fields"}]}],"sources":[{"id":"source-1","type":"datadog_agent"}]}}},{"id":"20f4849c-e579-11f0-af79-da7ad0900002","type":"pipelines","attributes":{"name":"fluent-pipeline","config":{"destinations":[{"id":"destination-1","inputs":["fluent-source-1"],"type":"datadog_logs"}],"processor_groups":[],"processors":[],"sources":[{"id":"fluent-source-1","tls":{"ca_file":"/etc/ssl/certs/ca.crt","crt_file":"/etc/ssl/certs/fluent.crt","key_file":"/etc/ssl/private/fluent.key"},"type":"fluentd"}]}}},{"id":"15621afe-e669-11f0-bec3-da7ad0900002","type":"pipelines","attributes":{"name":"Main + Observability Pipeline","config":{"destinations":[{"id":"datadog-logs-destination","inputs":["processor-group-0"],"type":"datadog_logs"}],"processor_groups":[{"display_name":"My + Processor Group","enabled":true,"id":"processor-group-0","include":"service:my-service","inputs":["datadog-agent-source"],"processors":[{"display_name":"My + Filter Processor","enabled":true,"id":"filter-processor","include":"status:error","type":"filter"}]}],"processors":[{"display_name":"My + Processor Group","enabled":true,"id":"processor-group-0","include":"service:my-service","inputs":["datadog-agent-source"],"processors":[{"display_name":"My + Filter Processor","enabled":true,"id":"filter-processor","include":"status:error","type":"filter"}]}],"sources":[{"id":"datadog-agent-source","type":"datadog_agent"}]}}},{"id":"dfbeb25a-e6c1-11f0-9bc1-da7ad0900002","type":"pipelines","attributes":{"name":"Main + Observability Pipeline","config":{"destinations":[{"id":"datadog-logs-destination","inputs":["processor-group-0"],"type":"datadog_logs"}],"processor_groups":[{"display_name":"My + Processor Group","enabled":true,"id":"processor-group-0","include":"service:my-service","inputs":["datadog-agent-source"],"processors":[{"display_name":"My + Filter Processor","enabled":true,"id":"filter-processor","include":"status:error","type":"filter"}]}],"processors":[{"display_name":"My + Processor Group","enabled":true,"id":"processor-group-0","include":"service:my-service","inputs":["datadog-agent-source"],"processors":[{"display_name":"My + Filter Processor","enabled":true,"id":"filter-processor","include":"status:error","type":"filter"}]}],"sources":[{"id":"datadog-agent-source","type":"datadog_agent"}]}}},{"id":"923fbdb6-e771-11f0-9388-da7ad0900002","type":"pipelines","attributes":{"name":"http-client + pipeline","config":{"destinations":[{"id":"destination-1","inputs":["http-source-1"],"type":"datadog_logs"}],"processor_groups":[],"processors":[],"sources":[{"auth_strategy":"basic","decoding":"json","id":"http-source-1","scrape_interval_secs":60,"scrape_timeout_secs":10,"tls":{"crt_file":"/path/to/http.crt"},"type":"http_client"}]}}},{"id":"a7b600ce-e771-11f0-939c-da7ad0900002","type":"pipelines","attributes":{"name":"newrelic + pipeline","config":{"destinations":[{"id":"destination-1","inputs":["source-1"],"region":"us","type":"new_relic"}],"processor_groups":[],"processors":[],"sources":[{"id":"source-1","type":"datadog_agent"}]}}},{"id":"306bab4c-e904-11f0-aa8a-da7ad0900002","type":"pipelines","attributes":{"name":"splunk-hec-pipeline","config":{"destinations":[{"id":"destination-1","inputs":["splunk-hec-source-1"],"type":"datadog_logs"}],"processor_groups":[],"processors":[],"sources":[{"id":"splunk-hec-source-1","tls":{"ca_file":"/etc/ssl/certs/ca.crt","crt_file":"/etc/ssl/certs/splunk.crt","key_file":"/etc/ssl/private/splunk.key"},"type":"splunk_hec"}]}}},{"id":"51faefca-e922-11f0-a260-da7ad0900002","type":"pipelines","attributes":{"name":"Main + Observability Pipeline","config":{"destinations":[{"id":"datadog-logs-destination","inputs":["processor-group-0"],"type":"datadog_logs"}],"processor_groups":[{"display_name":"My + Processor Group","enabled":true,"id":"processor-group-0","include":"service:my-service","inputs":["datadog-agent-source"],"processors":[{"display_name":"My + Filter Processor","enabled":true,"id":"filter-processor","include":"status:error","type":"filter"}]}],"processors":[{"display_name":"My + Processor Group","enabled":true,"id":"processor-group-0","include":"service:my-service","inputs":["datadog-agent-source"],"processors":[{"display_name":"My + Filter Processor","enabled":true,"id":"filter-processor","include":"status:error","type":"filter"}]}],"sources":[{"id":"datadog-agent-source","type":"datadog_agent"}]}}},{"id":"8d025dea-ea96-11f0-8a79-da7ad0900002","type":"pipelines","attributes":{"name":"crowdstrike-next-gen-siem-destination-pipeline-basic","config":{"destinations":[{"encoding":"raw_message","id":"crowdstrike-dest-basic-1","inputs":["source-1"],"type":"crowdstrike_next_gen_siem"}],"processor_groups":[],"processors":[],"sources":[{"id":"source-1","type":"datadog_agent"}]}}},{"id":"ed4d493e-eabf-11f0-852d-da7ad0900002","type":"pipelines","attributes":{"name":"Main + Observability Pipeline","config":{"destinations":[{"id":"datadog-logs-destination","inputs":["processor-group-0"],"type":"datadog_logs"}],"processor_groups":[{"display_name":"My + Processor Group","enabled":true,"id":"processor-group-0","include":"service:my-service","inputs":["datadog-agent-source"],"processors":[{"display_name":"My + Filter Processor","enabled":true,"id":"filter-processor","include":"status:error","type":"filter"}]}],"processors":[{"display_name":"My + Processor Group","enabled":true,"id":"processor-group-0","include":"service:my-service","inputs":["datadog-agent-source"],"processors":[{"display_name":"My + Filter Processor","enabled":true,"id":"filter-processor","include":"status:error","type":"filter"}]}],"sources":[{"id":"datadog-agent-source","type":"datadog_agent"}]}}},{"id":"29a560ae-ec7a-11f0-a7f4-da7ad0900002","type":"pipelines","attributes":{"name":"Main + Observability Pipeline","config":{"destinations":[{"id":"datadog-logs-destination","inputs":["processor-group-0"],"type":"datadog_logs"}],"processor_groups":[{"display_name":"My + Processor Group","enabled":true,"id":"processor-group-0","include":"service:my-service","inputs":["datadog-agent-source"],"processors":[{"display_name":"My + Filter Processor","enabled":true,"id":"filter-processor","include":"status:error","type":"filter"}]}],"processors":[{"display_name":"My + Processor Group","enabled":true,"id":"processor-group-0","include":"service:my-service","inputs":["datadog-agent-source"],"processors":[{"display_name":"My + Filter Processor","enabled":true,"id":"filter-processor","include":"status:error","type":"filter"}]}],"sources":[{"id":"datadog-agent-source","type":"datadog_agent"}]}}},{"id":"65fb8a3e-ec92-11f0-8664-da7ad0900002","type":"pipelines","attributes":{"name":"amazon + s3 pipeline","config":{"destinations":[{"auth":{"assume_role":"arn:aws:iam::123456789012:role/example-role","external_id":"external-id-123","session_name":"s3-session"},"bucket":"my-logs-bucket","id":"s3-dest-1","inputs":["source-1"],"key_prefix":"logs/","region":"us-east-1","storage_class":"STANDARD","type":"amazon_s3"}],"pipeline_type":"logs","processor_groups":[],"processors":[],"sources":[{"id":"source-1","type":"datadog_agent"}]}}},{"id":"31a1d8e0-ec9e-11f0-baa1-da7ad0900002","type":"pipelines","attributes":{"name":"rsyslog-source-pipeline","config":{"destinations":[{"id":"destination-1","inputs":["rsyslog-source-1"],"type":"datadog_logs"}],"pipeline_type":"logs","processor_groups":[],"processors":[],"sources":[{"id":"rsyslog-source-1","mode":"tcp","tls":{"crt_file":"/etc/certs/rsyslog.crt"},"type":"rsyslog"}]}}},{"id":"328f42d8-ec9e-11f0-baa3-da7ad0900002","type":"pipelines","attributes":{"name":"syslogng-source-pipeline","config":{"destinations":[{"id":"destination-1","inputs":["syslogng-source-1"],"type":"datadog_logs"}],"pipeline_type":"logs","processor_groups":[],"processors":[],"sources":[{"id":"syslogng-source-1","mode":"udp","tls":{"crt_file":"/etc/certs/syslogng.crt"},"type":"syslog_ng"}]}}},{"id":"335c38ba-ec9e-11f0-baa5-da7ad0900002","type":"pipelines","attributes":{"name":"rsyslog-dest-pipeline","config":{"destinations":[{"id":"rsyslog-destination-1","inputs":["source-1"],"keepalive":60000,"tls":{"crt_file":"/etc/certs/rsyslog.crt"},"type":"rsyslog"}],"pipeline_type":"logs","processor_groups":[],"processors":[],"sources":[{"id":"source-1","type":"datadog_agent"}]}}},{"id":"b669fdac-eca6-11f0-9a8c-da7ad0900002","type":"pipelines","attributes":{"name":"Main + Observability Pipeline","config":{"destinations":[{"id":"datadog-logs-destination","inputs":["my-processor-group"],"type":"datadog_logs"}],"processor_groups":[{"enabled":true,"id":"my-processor-group","include":"service:my-service","inputs":["datadog-agent-source"],"processors":[{"enabled":true,"id":"filter-processor","include":"status:error","type":"filter"}]}],"processors":[{"enabled":true,"id":"my-processor-group","include":"service:my-service","inputs":["datadog-agent-source"],"processors":[{"enabled":true,"id":"filter-processor","include":"status:error","type":"filter"}]}],"sources":[{"id":"datadog-agent-source","type":"datadog_agent"}]}}},{"id":"bfb53c8e-ecb8-11f0-b85b-da7ad0900002","type":"pipelines","attributes":{"name":"syslogng-source-pipeline","config":{"destinations":[{"id":"destination-1","inputs":["syslogng-source-1"],"type":"datadog_logs"}],"pipeline_type":"logs","processor_groups":[],"processors":[],"sources":[{"id":"syslogng-source-1","mode":"udp","tls":{"crt_file":"/etc/certs/syslogng.crt"},"type":"syslog_ng"}]}}},{"id":"51cf915c-ed4d-11f0-a300-da7ad0900002","type":"pipelines","attributes":{"name":"Main + Observability Pipeline","config":{"destinations":[{"id":"datadog-logs-destination","inputs":["my-processor-group"],"type":"datadog_logs"}],"processor_groups":[{"enabled":true,"id":"my-processor-group","include":"service:my-service","inputs":["datadog-agent-source"],"processors":[{"enabled":true,"id":"filter-processor","include":"status:error","type":"filter"}]}],"processors":[{"enabled":true,"id":"my-processor-group","include":"service:my-service","inputs":["datadog-agent-source"],"processors":[{"enabled":true,"id":"filter-processor","include":"status:error","type":"filter"}]}],"sources":[{"id":"datadog-agent-source","type":"datadog_agent"}]}}},{"id":"52b0c636-ed4d-11f0-b3ff-da7ad0900002","type":"pipelines","attributes":{"name":"Main + Observability Pipeline","config":{"destinations":[{"id":"datadog-logs-destination","inputs":["processor-group-0"],"type":"datadog_logs"}],"processor_groups":[{"display_name":"My + Processor Group","enabled":true,"id":"processor-group-0","include":"service:my-service","inputs":["datadog-agent-source"],"processors":[{"display_name":"My + Filter Processor","enabled":true,"id":"filter-processor","include":"status:error","type":"filter"}]}],"processors":[{"display_name":"My + Processor Group","enabled":true,"id":"processor-group-0","include":"service:my-service","inputs":["datadog-agent-source"],"processors":[{"display_name":"My + Filter Processor","enabled":true,"id":"filter-processor","include":"status:error","type":"filter"}]}],"sources":[{"id":"datadog-agent-source","type":"datadog_agent"}]}}},{"id":"5314eb34-ed4d-11f0-b401-da7ad0900002","type":"pipelines","attributes":{"name":"Main + Observability Pipeline","config":{"destinations":[{"id":"datadog-logs-destination","inputs":["processor-group-0"],"type":"datadog_logs"}],"processor_groups":[{"display_name":"My + Processor Group","enabled":true,"id":"processor-group-0","include":"service:my-service","inputs":["datadog-agent-source"],"processors":[{"display_name":"My + Filter Processor","enabled":true,"id":"filter-processor","include":"status:error","type":"filter"}]}],"processors":[{"display_name":"My + Processor Group","enabled":true,"id":"processor-group-0","include":"service:my-service","inputs":["datadog-agent-source"],"processors":[{"display_name":"My + Filter Processor","enabled":true,"id":"filter-processor","include":"status:error","type":"filter"}]}],"sources":[{"id":"datadog-agent-source","type":"datadog_agent"}]}}},{"id":"53a48b72-ed4d-11f0-b403-da7ad0900002","type":"pipelines","attributes":{"name":"Main + Observability Pipeline","config":{"destinations":[{"id":"datadog-logs-destination","inputs":["processor-group-0"],"type":"datadog_logs"}],"processor_groups":[{"display_name":"My + Processor Group","enabled":true,"id":"processor-group-0","include":"service:my-service","inputs":["datadog-agent-source"],"processors":[{"display_name":"My + Filter Processor","enabled":true,"id":"filter-processor","include":"status:error","type":"filter"}]}],"processors":[{"display_name":"My + Processor Group","enabled":true,"id":"processor-group-0","include":"service:my-service","inputs":["datadog-agent-source"],"processors":[{"display_name":"My + Filter Processor","enabled":true,"id":"filter-processor","include":"status:error","type":"filter"}]}],"sources":[{"id":"datadog-agent-source","type":"datadog_agent"}]}}},{"id":"5400fc90-ed4d-11f0-a302-da7ad0900002","type":"pipelines","attributes":{"name":"Main + Observability Pipeline","config":{"destinations":[{"id":"datadog-logs-destination","inputs":["processor-group-0"],"type":"datadog_logs"}],"processor_groups":[{"display_name":"My + Processor Group","enabled":true,"id":"processor-group-0","include":"service:my-service","inputs":["datadog-agent-source"],"processors":[{"display_name":"My + Filter Processor","enabled":true,"id":"filter-processor","include":"status:error","type":"filter"}]}],"processors":[{"display_name":"My + Processor Group","enabled":true,"id":"processor-group-0","include":"service:my-service","inputs":["datadog-agent-source"],"processors":[{"display_name":"My + Filter Processor","enabled":true,"id":"filter-processor","include":"status:error","type":"filter"}]}],"sources":[{"id":"datadog-agent-source","type":"datadog_agent"}]}}},{"id":"54ab9b78-ed4d-11f0-a304-da7ad0900002","type":"pipelines","attributes":{"name":"Main + Observability Pipeline","config":{"destinations":[{"id":"datadog-logs-destination","inputs":["processor-group-0"],"type":"datadog_logs"}],"processor_groups":[{"display_name":"My + Processor Group","enabled":true,"id":"processor-group-0","include":"service:my-service","inputs":["datadog-agent-source"],"processors":[{"display_name":"My + Filter Processor","enabled":true,"id":"filter-processor","include":"status:error","type":"filter"}]}],"processors":[{"display_name":"My + Processor Group","enabled":true,"id":"processor-group-0","include":"service:my-service","inputs":["datadog-agent-source"],"processors":[{"display_name":"My + Filter Processor","enabled":true,"id":"filter-processor","include":"status:error","type":"filter"}]}],"sources":[{"id":"datadog-agent-source","type":"datadog_agent"}]}}},{"id":"f352d0f0-ed54-11f0-92c4-da7ad0900002","type":"pipelines","attributes":{"name":"gcs-destination-pipeline","config":{"destinations":[{"acl":"project-private","auth":{"credentials_file":"/var/secrets/gcp-creds.json"},"bucket":"my-gcs-bucket","id":"gcs-destination-1","inputs":["source-1"],"key_prefix":"logs/","metadata":[{"name":"environment","value":"production"},{"name":"team","value":"platform"}],"storage_class":"NEARLINE","type":"google_cloud_storage"}],"processor_groups":[],"processors":[],"sources":[{"id":"source-1","type":"datadog_agent"}]}}},{"id":"d831c96c-ed71-11f0-99ca-da7ad0900002","type":"pipelines","attributes":{"name":"Main + Observability Pipeline","config":{"destinations":[{"id":"datadog-logs-destination","inputs":["processor-group-0"],"type":"datadog_logs"}],"processor_groups":[{"display_name":"My + Processor Group","enabled":true,"id":"processor-group-0","include":"service:my-service","inputs":["datadog-agent-source"],"processors":[{"display_name":"My + Filter Processor","enabled":true,"id":"filter-processor","include":"status:error","type":"filter"}]}],"processors":[{"display_name":"My Processor Group","enabled":true,"id":"processor-group-0","include":"service:my-service","inputs":["datadog-agent-source"],"processors":[{"display_name":"My - Filter Processor","enabled":true,"id":"filter-processor","include":"status:error","type":"filter"}]}],"sources":[{"id":"datadog-agent-source","type":"datadog_agent"}]}}}],"meta":{"totalCount":1}} + Filter Processor","enabled":true,"id":"filter-processor","include":"status:error","type":"filter"}]}],"sources":[{"id":"datadog-agent-source","type":"datadog_agent"}]}}}],"meta":{"totalCount":29}} ' headers: @@ -52,7 +113,7 @@ interactions: accept: - '*/*' method: DELETE - uri: https://api.datadoghq.com/api/v2/remote_config/products/obs_pipelines/pipelines/c162e83e-dc2c-11f0-bf6b-da7ad0900002 + uri: https://api.datadoghq.com/api/v2/obs-pipelines/pipelines/d831c96c-ed71-11f0-99ca-da7ad0900002 response: body: string: '' diff --git a/tests/v2/cassettes/test_scenarios/test_update_a_pipeline_returns_bad_request_response.frozen b/tests/v2/cassettes/test_scenarios/test_update_a_pipeline_returns_bad_request_response.frozen index 92a0e9377f..dd2c3e2c78 100644 --- a/tests/v2/cassettes/test_scenarios/test_update_a_pipeline_returns_bad_request_response.frozen +++ b/tests/v2/cassettes/test_scenarios/test_update_a_pipeline_returns_bad_request_response.frozen @@ -1 +1 @@ -2025-12-18T16:15:24.455Z \ No newline at end of file +2026-01-09T15:42:48.887Z \ No newline at end of file diff --git a/tests/v2/cassettes/test_scenarios/test_update_a_pipeline_returns_bad_request_response.yaml b/tests/v2/cassettes/test_scenarios/test_update_a_pipeline_returns_bad_request_response.yaml index 390b6216f0..07c9f81044 100644 --- a/tests/v2/cassettes/test_scenarios/test_update_a_pipeline_returns_bad_request_response.yaml +++ b/tests/v2/cassettes/test_scenarios/test_update_a_pipeline_returns_bad_request_response.yaml @@ -1,6 +1,6 @@ interactions: - request: - body: '{"data":{"attributes":{"config":{"destinations":[{"id":"datadog-logs-destination","inputs":["processor-group-0"],"type":"datadog_logs"}],"processors":[{"display_name":"My + body: '{"data":{"attributes":{"config":{"destinations":[{"id":"datadog-logs-destination","inputs":["processor-group-0"],"type":"datadog_logs"}],"processor_groups":[{"display_name":"My Processor Group","enabled":true,"id":"processor-group-0","include":"service:my-service","inputs":["datadog-agent-source"],"processors":[{"display_name":"My Filter Processor","enabled":true,"id":"filter-processor","include":"status:error","type":"filter"}]}],"sources":[{"id":"datadog-agent-source","type":"datadog_agent"}]},"name":"Main Observability Pipeline"},"type":"pipelines"}}' @@ -10,11 +10,13 @@ interactions: content-type: - application/json method: POST - uri: https://api.datadoghq.com/api/v2/remote_config/products/obs_pipelines/pipelines + uri: https://api.datadoghq.com/api/v2/obs-pipelines/pipelines response: body: - string: '{"data":{"id":"c28a5ad0-dc2c-11f0-bdee-da7ad0900002","type":"pipelines","attributes":{"name":"Main - Observability Pipeline","config":{"destinations":[{"id":"datadog-logs-destination","inputs":["processor-group-0"],"type":"datadog_logs"}],"processors":[{"display_name":"My + string: '{"data":{"id":"da187384-ed71-11f0-bd90-da7ad0900002","type":"pipelines","attributes":{"name":"Main + Observability Pipeline","config":{"destinations":[{"id":"datadog-logs-destination","inputs":["processor-group-0"],"type":"datadog_logs"}],"processor_groups":[{"display_name":"My + Processor Group","enabled":true,"id":"processor-group-0","include":"service:my-service","inputs":["datadog-agent-source"],"processors":[{"display_name":"My + Filter Processor","enabled":true,"id":"filter-processor","include":"status:error","type":"filter"}]}],"processors":[{"display_name":"My Processor Group","enabled":true,"id":"processor-group-0","include":"service:my-service","inputs":["datadog-agent-source"],"processors":[{"display_name":"My Filter Processor","enabled":true,"id":"filter-processor","include":"status:error","type":"filter"}]}],"sources":[{"id":"datadog-agent-source","type":"datadog_agent"}]}}}} @@ -26,7 +28,7 @@ interactions: code: 201 message: Created - request: - body: '{"data":{"attributes":{"config":{"destinations":[{"id":"datadog-logs-destination","inputs":["my-processor-group"],"type":"datadog_logs"}],"processors":[{"enabled":true,"id":"unknown-processor","include":"service:my-service","inputs":["datadog-agent-source"],"processors":[{"enabled":true,"id":"filter-processor","include":"status:error","type":"filter"}]}],"sources":[{"id":"datadog-agent-source","type":"datadog_agent"}]},"name":"Main + body: '{"data":{"attributes":{"config":{"destinations":[{"id":"datadog-logs-destination","inputs":["my-processor-group"],"type":"datadog_logs"}],"processor_groups":[{"enabled":true,"id":"unknown-processor","include":"service:my-service","inputs":["datadog-agent-source"],"processors":[{"enabled":true,"id":"filter-processor","include":"status:error","type":"filter"}]}],"sources":[{"id":"datadog-agent-source","type":"datadog_agent"}]},"name":"Main Observability Pipeline"},"id":"3fa85f64-5717-4562-b3fc-2c963f66afa6","type":"pipelines"}}' headers: accept: @@ -34,7 +36,7 @@ interactions: content-type: - application/json method: PUT - uri: https://api.datadoghq.com/api/v2/remote_config/products/obs_pipelines/pipelines/c28a5ad0-dc2c-11f0-bdee-da7ad0900002 + uri: https://api.datadoghq.com/api/v2/obs-pipelines/pipelines/da187384-ed71-11f0-bd90-da7ad0900002 response: body: string: '{"errors":[{"title":"Component with ID my-processor-group is an unknown @@ -56,7 +58,7 @@ interactions: accept: - '*/*' method: DELETE - uri: https://api.datadoghq.com/api/v2/remote_config/products/obs_pipelines/pipelines/c28a5ad0-dc2c-11f0-bdee-da7ad0900002 + uri: https://api.datadoghq.com/api/v2/obs-pipelines/pipelines/da187384-ed71-11f0-bd90-da7ad0900002 response: body: string: '' diff --git a/tests/v2/cassettes/test_scenarios/test_update_a_pipeline_returns_not_found_response.frozen b/tests/v2/cassettes/test_scenarios/test_update_a_pipeline_returns_not_found_response.frozen index b083fd34ae..9290a51038 100644 --- a/tests/v2/cassettes/test_scenarios/test_update_a_pipeline_returns_not_found_response.frozen +++ b/tests/v2/cassettes/test_scenarios/test_update_a_pipeline_returns_not_found_response.frozen @@ -1 +1 @@ -2025-12-18T16:15:26.411Z \ No newline at end of file +2026-01-09T15:42:51.159Z \ No newline at end of file diff --git a/tests/v2/cassettes/test_scenarios/test_update_a_pipeline_returns_not_found_response.yaml b/tests/v2/cassettes/test_scenarios/test_update_a_pipeline_returns_not_found_response.yaml index ce0b290bb9..ba1aa75eb3 100644 --- a/tests/v2/cassettes/test_scenarios/test_update_a_pipeline_returns_not_found_response.yaml +++ b/tests/v2/cassettes/test_scenarios/test_update_a_pipeline_returns_not_found_response.yaml @@ -1,6 +1,6 @@ interactions: - request: - body: '{"data":{"attributes":{"config":{"destinations":[{"id":"datadog-logs-destination","inputs":["my-processor-group"],"type":"datadog_logs"}],"processors":[{"enabled":true,"id":"my-processor-group","include":"service:my-service","inputs":["datadog-agent-source"],"processors":[{"enabled":true,"id":"filter-processor","include":"status:error","type":"filter"}]}],"sources":[{"id":"datadog-agent-source","type":"datadog_agent"}]},"name":"Main + body: '{"data":{"attributes":{"config":{"destinations":[{"id":"datadog-logs-destination","inputs":["my-processor-group"],"type":"datadog_logs"}],"processor_groups":[{"enabled":true,"id":"my-processor-group","include":"service:my-service","inputs":["datadog-agent-source"],"processors":[{"enabled":true,"id":"filter-processor","include":"status:error","type":"filter"}]}],"sources":[{"id":"datadog-agent-source","type":"datadog_agent"}]},"name":"Main Observability Pipeline"},"id":"3fa85f64-5717-4562-b3fc-2c963f66afa6","type":"pipelines"}}' headers: accept: @@ -8,7 +8,7 @@ interactions: content-type: - application/json method: PUT - uri: https://api.datadoghq.com/api/v2/remote_config/products/obs_pipelines/pipelines/3fa85f64-5717-4562-b3fc-2c963f66afa6 + uri: https://api.datadoghq.com/api/v2/obs-pipelines/pipelines/3fa85f64-5717-4562-b3fc-2c963f66afa6 response: body: string: '{"errors":[{"title":"Not Found"}]} diff --git a/tests/v2/cassettes/test_scenarios/test_update_a_pipeline_returns_ok_response.frozen b/tests/v2/cassettes/test_scenarios/test_update_a_pipeline_returns_ok_response.frozen index b9cbd5fcac..f264c1022b 100644 --- a/tests/v2/cassettes/test_scenarios/test_update_a_pipeline_returns_ok_response.frozen +++ b/tests/v2/cassettes/test_scenarios/test_update_a_pipeline_returns_ok_response.frozen @@ -1 +1 @@ -2025-12-18T16:15:26.929Z \ No newline at end of file +2026-01-09T15:42:51.716Z \ No newline at end of file diff --git a/tests/v2/cassettes/test_scenarios/test_update_a_pipeline_returns_ok_response.yaml b/tests/v2/cassettes/test_scenarios/test_update_a_pipeline_returns_ok_response.yaml index 5e9ce1cb89..9a0a03c29f 100644 --- a/tests/v2/cassettes/test_scenarios/test_update_a_pipeline_returns_ok_response.yaml +++ b/tests/v2/cassettes/test_scenarios/test_update_a_pipeline_returns_ok_response.yaml @@ -1,6 +1,6 @@ interactions: - request: - body: '{"data":{"attributes":{"config":{"destinations":[{"id":"datadog-logs-destination","inputs":["processor-group-0"],"type":"datadog_logs"}],"processors":[{"display_name":"My + body: '{"data":{"attributes":{"config":{"destinations":[{"id":"datadog-logs-destination","inputs":["processor-group-0"],"type":"datadog_logs"}],"processor_groups":[{"display_name":"My Processor Group","enabled":true,"id":"processor-group-0","include":"service:my-service","inputs":["datadog-agent-source"],"processors":[{"display_name":"My Filter Processor","enabled":true,"id":"filter-processor","include":"status:error","type":"filter"}]}],"sources":[{"id":"datadog-agent-source","type":"datadog_agent"}]},"name":"Main Observability Pipeline"},"type":"pipelines"}}' @@ -10,11 +10,13 @@ interactions: content-type: - application/json method: POST - uri: https://api.datadoghq.com/api/v2/remote_config/products/obs_pipelines/pipelines + uri: https://api.datadoghq.com/api/v2/obs-pipelines/pipelines response: body: - string: '{"data":{"id":"c40401e0-dc2c-11f0-bf6d-da7ad0900002","type":"pipelines","attributes":{"name":"Main - Observability Pipeline","config":{"destinations":[{"id":"datadog-logs-destination","inputs":["processor-group-0"],"type":"datadog_logs"}],"processors":[{"display_name":"My + string: '{"data":{"id":"dbc8f316-ed71-11f0-99cc-da7ad0900002","type":"pipelines","attributes":{"name":"Main + Observability Pipeline","config":{"destinations":[{"id":"datadog-logs-destination","inputs":["processor-group-0"],"type":"datadog_logs"}],"processor_groups":[{"display_name":"My + Processor Group","enabled":true,"id":"processor-group-0","include":"service:my-service","inputs":["datadog-agent-source"],"processors":[{"display_name":"My + Filter Processor","enabled":true,"id":"filter-processor","include":"status:error","type":"filter"}]}],"processors":[{"display_name":"My Processor Group","enabled":true,"id":"processor-group-0","include":"service:my-service","inputs":["datadog-agent-source"],"processors":[{"display_name":"My Filter Processor","enabled":true,"id":"filter-processor","include":"status:error","type":"filter"}]}],"sources":[{"id":"datadog-agent-source","type":"datadog_agent"}]}}}} @@ -26,7 +28,7 @@ interactions: code: 201 message: Created - request: - body: '{"data":{"attributes":{"config":{"destinations":[{"id":"updated-datadog-logs-destination-id","inputs":["my-processor-group"],"type":"datadog_logs"}],"processors":[{"enabled":true,"id":"my-processor-group","include":"service:my-service","inputs":["datadog-agent-source"],"processors":[{"enabled":true,"id":"filter-processor","include":"status:error","type":"filter"}]}],"sources":[{"id":"datadog-agent-source","type":"datadog_agent"}]},"name":"Updated + body: '{"data":{"attributes":{"config":{"destinations":[{"id":"updated-datadog-logs-destination-id","inputs":["my-processor-group"],"type":"datadog_logs"}],"processor_groups":[{"enabled":true,"id":"my-processor-group","include":"service:my-service","inputs":["datadog-agent-source"],"processors":[{"enabled":true,"id":"filter-processor","include":"status:error","type":"filter"}]}],"sources":[{"id":"datadog-agent-source","type":"datadog_agent"}]},"name":"Updated Pipeline Name"},"id":"3fa85f64-5717-4562-b3fc-2c963f66afa6","type":"pipelines"}}' headers: accept: @@ -34,11 +36,11 @@ interactions: content-type: - application/json method: PUT - uri: https://api.datadoghq.com/api/v2/remote_config/products/obs_pipelines/pipelines/c40401e0-dc2c-11f0-bf6d-da7ad0900002 + uri: https://api.datadoghq.com/api/v2/obs-pipelines/pipelines/dbc8f316-ed71-11f0-99cc-da7ad0900002 response: body: - string: '{"data":{"id":"c40401e0-dc2c-11f0-bf6d-da7ad0900002","type":"pipelines","attributes":{"name":"Updated - Pipeline Name","config":{"destinations":[{"id":"updated-datadog-logs-destination-id","inputs":["my-processor-group"],"type":"datadog_logs"}],"processors":[{"enabled":true,"id":"my-processor-group","include":"service:my-service","inputs":["datadog-agent-source"],"processors":[{"enabled":true,"id":"filter-processor","include":"status:error","type":"filter"}]}],"sources":[{"id":"datadog-agent-source","type":"datadog_agent"}]}}}} + string: '{"data":{"id":"dbc8f316-ed71-11f0-99cc-da7ad0900002","type":"pipelines","attributes":{"name":"Updated + Pipeline Name","config":{"destinations":[{"id":"updated-datadog-logs-destination-id","inputs":["my-processor-group"],"type":"datadog_logs"}],"processor_groups":[{"enabled":true,"id":"my-processor-group","include":"service:my-service","inputs":["datadog-agent-source"],"processors":[{"enabled":true,"id":"filter-processor","include":"status:error","type":"filter"}]}],"processors":[{"enabled":true,"id":"my-processor-group","include":"service:my-service","inputs":["datadog-agent-source"],"processors":[{"enabled":true,"id":"filter-processor","include":"status:error","type":"filter"}]}],"sources":[{"id":"datadog-agent-source","type":"datadog_agent"}]}}}} ' headers: @@ -53,7 +55,7 @@ interactions: accept: - '*/*' method: DELETE - uri: https://api.datadoghq.com/api/v2/remote_config/products/obs_pipelines/pipelines/c40401e0-dc2c-11f0-bf6d-da7ad0900002 + uri: https://api.datadoghq.com/api/v2/obs-pipelines/pipelines/dbc8f316-ed71-11f0-99cc-da7ad0900002 response: body: string: '' diff --git a/tests/v2/cassettes/test_scenarios/test_validate_an_observability_pipeline_returns_bad_request_response.frozen b/tests/v2/cassettes/test_scenarios/test_validate_an_observability_pipeline_returns_bad_request_response.frozen index 745856e49a..6c17934ae2 100644 --- a/tests/v2/cassettes/test_scenarios/test_validate_an_observability_pipeline_returns_bad_request_response.frozen +++ b/tests/v2/cassettes/test_scenarios/test_validate_an_observability_pipeline_returns_bad_request_response.frozen @@ -1 +1 @@ -2025-12-18T16:15:29.179Z \ No newline at end of file +2026-01-09T15:42:54.207Z \ No newline at end of file diff --git a/tests/v2/cassettes/test_scenarios/test_validate_an_observability_pipeline_returns_bad_request_response.yaml b/tests/v2/cassettes/test_scenarios/test_validate_an_observability_pipeline_returns_bad_request_response.yaml index 1be82b24a9..0e32388b23 100644 --- a/tests/v2/cassettes/test_scenarios/test_validate_an_observability_pipeline_returns_bad_request_response.yaml +++ b/tests/v2/cassettes/test_scenarios/test_validate_an_observability_pipeline_returns_bad_request_response.yaml @@ -1,6 +1,6 @@ interactions: - request: - body: '{"data":{"attributes":{"config":{"destinations":[{"id":"datadog-logs-destination","inputs":["my-processor-group"],"type":"datadog_logs"}],"processors":[{"enabled":true,"id":"my-processor-group","include":"service:my-service","inputs":["datadog-agent-source"],"processors":[{"enabled":true,"id":"filter-processor","type":"filter"}]}],"sources":[{"id":"datadog-agent-source","type":"datadog_agent"}]},"name":"Main + body: '{"data":{"attributes":{"config":{"destinations":[{"id":"datadog-logs-destination","inputs":["my-processor-group"],"type":"datadog_logs"}],"processor_groups":[{"enabled":true,"id":"my-processor-group","include":"service:my-service","inputs":["datadog-agent-source"],"processors":[{"enabled":true,"id":"filter-processor","type":"filter"}]}],"sources":[{"id":"datadog-agent-source","type":"datadog_agent"}]},"name":"Main Observability Pipeline"},"type":"pipelines"}}' headers: accept: @@ -8,7 +8,7 @@ interactions: content-type: - application/json method: POST - uri: https://api.datadoghq.com/api/v2/remote_config/products/obs_pipelines/pipelines/validate + uri: https://api.datadoghq.com/api/v2/obs-pipelines/pipelines/validate response: body: string: '{"errors":[{"title":"Field ''include'' is required","meta":{"field":"include","id":"filter-processor","message":"Field diff --git a/tests/v2/cassettes/test_scenarios/test_validate_an_observability_pipeline_returns_ok_response.frozen b/tests/v2/cassettes/test_scenarios/test_validate_an_observability_pipeline_returns_ok_response.frozen index ecfcc54162..25983fc2c3 100644 --- a/tests/v2/cassettes/test_scenarios/test_validate_an_observability_pipeline_returns_ok_response.frozen +++ b/tests/v2/cassettes/test_scenarios/test_validate_an_observability_pipeline_returns_ok_response.frozen @@ -1 +1 @@ -2025-12-18T16:15:29.647Z \ No newline at end of file +2026-01-09T15:42:54.714Z \ No newline at end of file diff --git a/tests/v2/cassettes/test_scenarios/test_validate_an_observability_pipeline_returns_ok_response.yaml b/tests/v2/cassettes/test_scenarios/test_validate_an_observability_pipeline_returns_ok_response.yaml index 192d22320f..1945f7fafb 100644 --- a/tests/v2/cassettes/test_scenarios/test_validate_an_observability_pipeline_returns_ok_response.yaml +++ b/tests/v2/cassettes/test_scenarios/test_validate_an_observability_pipeline_returns_ok_response.yaml @@ -1,6 +1,6 @@ interactions: - request: - body: '{"data":{"attributes":{"config":{"destinations":[{"id":"datadog-logs-destination","inputs":["my-processor-group"],"type":"datadog_logs"}],"processors":[{"enabled":true,"id":"my-processor-group","include":"service:my-service","inputs":["datadog-agent-source"],"processors":[{"enabled":true,"id":"filter-processor","include":"status:error","type":"filter"}]}],"sources":[{"id":"datadog-agent-source","type":"datadog_agent"}]},"name":"Main + body: '{"data":{"attributes":{"config":{"destinations":[{"id":"datadog-logs-destination","inputs":["my-processor-group"],"type":"datadog_logs"}],"processor_groups":[{"enabled":true,"id":"my-processor-group","include":"service:my-service","inputs":["datadog-agent-source"],"processors":[{"enabled":true,"id":"filter-processor","include":"status:error","type":"filter"}]}],"sources":[{"id":"datadog-agent-source","type":"datadog_agent"}]},"name":"Main Observability Pipeline"},"type":"pipelines"}}' headers: accept: @@ -8,7 +8,7 @@ interactions: content-type: - application/json method: POST - uri: https://api.datadoghq.com/api/v2/remote_config/products/obs_pipelines/pipelines/validate + uri: https://api.datadoghq.com/api/v2/obs-pipelines/pipelines/validate response: body: string: '{"errors":[]} diff --git a/tests/v2/features/given.json b/tests/v2/features/given.json index aa08181879..3a01a060c5 100644 --- a/tests/v2/features/given.json +++ b/tests/v2/features/given.json @@ -727,6 +727,18 @@ "tag": "Monitors", "operationId": "CreateMonitorUserTemplate" }, + { + "parameters": [ + { + "name": "body", + "value": "{\n \"data\":{\n \"attributes\":{\n \"config\":{\n \"destinations\":[\n {\n \"id\":\"datadog-logs-destination\",\n \"inputs\":[\n \"processor-group-0\"\n ],\n \"type\":\"datadog_logs\"\n }\n ],\n \"processor_groups\":[\n {\n \"id\":\"processor-group-0\",\n \"include\":\"service:my-service\",\n \"display_name\": \"My Processor Group\",\n \"inputs\":[\n \"datadog-agent-source\"\n ],\n \"enabled\": true,\n \"processors\": [\n {\n \"id\": \"filter-processor\",\n \"type\": \"filter\",\n \"include\": \"status:error\",\n \"display_name\": \"My Filter Processor\",\n \"enabled\": true\n }\n ]\n }\n ],\n \"sources\":[\n {\n \"id\":\"datadog-agent-source\",\n \"type\":\"datadog_agent\"\n }\n ]\n },\n \"name\":\"Main Observability Pipeline\"\n },\n \"type\":\"pipelines\"\n }\n}" + } + ], + "step": "there is a valid \"pipeline\" in the system", + "key": "pipeline", + "tag": "Observability Pipelines", + "operationId": "CreatePipeline" + }, { "parameters": [ { @@ -879,18 +891,6 @@ "tag": "CSM Threats", "operationId": "CreateCSMThreatsAgentPolicy" }, - { - "parameters": [ - { - "name": "body", - "value": "{\n \"data\":{\n \"attributes\":{\n \"config\":{\n \"destinations\":[\n {\n \"id\":\"datadog-logs-destination\",\n \"inputs\":[\n \"processor-group-0\"\n ],\n \"type\":\"datadog_logs\"\n }\n ],\n \"processors\":[\n {\n \"id\":\"processor-group-0\",\n \"include\":\"service:my-service\",\n \"display_name\": \"My Processor Group\",\n \"inputs\":[\n \"datadog-agent-source\"\n ],\n \"enabled\": true,\n \"processors\": [\n {\n \"id\": \"filter-processor\",\n \"type\": \"filter\",\n \"include\": \"status:error\",\n \"display_name\": \"My Filter Processor\",\n \"enabled\": true\n }\n ]\n }\n ],\n \"sources\":[\n {\n \"id\":\"datadog-agent-source\",\n \"type\":\"datadog_agent\"\n }\n ]\n },\n \"name\":\"Main Observability Pipeline\"\n },\n \"type\":\"pipelines\"\n }\n}" - } - ], - "step": "there is a valid \"pipeline\" in the system", - "key": "pipeline", - "tag": "Observability Pipelines", - "operationId": "CreatePipeline" - }, { "parameters": [ { diff --git a/tests/v2/features/observability_pipelines.feature b/tests/v2/features/observability_pipelines.feature index c43fa8b3b7..37f1ec8c74 100644 --- a/tests/v2/features/observability_pipelines.feature +++ b/tests/v2/features/observability_pipelines.feature @@ -12,7 +12,7 @@ Feature: Observability Pipelines Scenario: Create a new pipeline returns "Bad Request" response Given operation "CreatePipeline" enabled And new "CreatePipeline" request - And body with value {"data": {"attributes": {"config": {"destinations": [{"id": "datadog-logs-destination", "inputs": ["my-processor-group"], "type": "datadog_logs"}], "processors": [{"enabled": true, "id": "unknown-processor", "include": "service:my-service", "inputs": ["datadog-agent-source"], "processors": [{"enabled": true, "id": "filter-processor", "include": "status:error", "type": "filter"}]}], "sources": [{"id": "datadog-agent-source", "type": "datadog_agent"}]}, "name": "Main Observability Pipeline"}, "type": "pipelines"}} + And body with value {"data": {"attributes": {"config": {"destinations": [{"id": "datadog-logs-destination", "inputs": ["my-processor-group"], "type": "datadog_logs"}], "processor_groups": [{"enabled": true, "id": "unknown-processor", "include": "service:my-service", "inputs": ["datadog-agent-source"], "processors": [{"enabled": true, "id": "filter-processor", "include": "status:error", "type": "filter"}]}], "sources": [{"id": "datadog-agent-source", "type": "datadog_agent"}]}, "name": "Main Observability Pipeline"}, "type": "pipelines"}} When the request is sent Then the response status is 400 Bad Request @@ -20,7 +20,7 @@ Feature: Observability Pipelines Scenario: Create a new pipeline returns "Conflict" response Given operation "CreatePipeline" enabled And new "CreatePipeline" request - And body with value {"data": {"attributes": {"config": {"destinations": [{"id": "datadog-logs-destination", "inputs": ["filter-processor"], "type": "datadog_logs"}], "processors": [{"enabled": true, "id": "my-processor-group", "include": "service:my-service", "inputs": ["datadog-agent-source"], "processors": [{"enabled": true, "id": "filter-processor", "include": "status:error", "type": "filter"}, {"enabled": true, "field": "message", "id": "json-processor", "include": "*", "type": "parse_json"}]}], "sources": [{"id": "datadog-agent-source", "type": "datadog_agent"}]}, "name": "Main Observability Pipeline"}, "type": "pipelines"}} + And body with value {"data": {"attributes": {"config": {"destinations": [{"id": "datadog-logs-destination", "inputs": ["my-processor-group"], "type": "datadog_logs"}], "pipeline_type": "logs", "processor_groups": [{"enabled": true, "id": "my-processor-group", "include": "service:my-service", "inputs": ["datadog-agent-source"], "processors": [{"enabled": true, "id": "filter-processor", "include": "status:error", "type": "filter"}, {"enabled": true, "field": "message", "id": "json-processor", "include": "*", "type": "parse_json"}]}], "processors": [], "sources": [{"id": "datadog-agent-source", "type": "datadog_agent"}]}, "name": "Main Observability Pipeline"}, "type": "pipelines"}} When the request is sent Then the response status is 409 Conflict @@ -28,14 +28,14 @@ Feature: Observability Pipelines Scenario: Create a new pipeline returns "OK" response Given operation "CreatePipeline" enabled And new "CreatePipeline" request - And body with value {"data": {"attributes": {"config": {"destinations": [{"id": "datadog-logs-destination", "inputs": ["my-processor-group"], "type": "datadog_logs"}], "processors": [{"enabled": true, "id": "my-processor-group", "include": "service:my-service", "inputs": ["datadog-agent-source"], "processors": [{"enabled": true, "id": "filter-processor", "include": "status:error", "type": "filter"}]}], "sources": [{"id": "datadog-agent-source", "type": "datadog_agent"}]}, "name": "Main Observability Pipeline"}, "type": "pipelines"}} + And body with value {"data": {"attributes": {"config": {"destinations": [{"id": "datadog-logs-destination", "inputs": ["my-processor-group"], "type": "datadog_logs"}], "processor_groups": [{"enabled": true, "id": "my-processor-group", "include": "service:my-service", "inputs": ["datadog-agent-source"], "processors": [{"enabled": true, "id": "filter-processor", "include": "status:error", "type": "filter"}]}], "sources": [{"id": "datadog-agent-source", "type": "datadog_agent"}]}, "name": "Main Observability Pipeline"}, "type": "pipelines"}} When the request is sent Then the response status is 201 OK And the response "data" has field "id" And the response "data.type" is equal to "pipelines" And the response "data.attributes.name" is equal to "Main Observability Pipeline" And the response "data.attributes.config.sources" has length 1 - And the response "data.attributes.config.processors" has length 1 + And the response "data.attributes.config.processor_groups" has length 1 And the response "data.attributes.config.destinations" has length 1 @generated @skip @team:DataDog/observability-pipelines @@ -75,7 +75,7 @@ Feature: Observability Pipelines And the response "data.type" is equal to "pipelines" And the response "data.attributes.name" is equal to "Main Observability Pipeline" And the response "data.attributes.config.sources" has length 1 - And the response "data.attributes.config.processors" has length 1 + And the response "data.attributes.config.processor_groups" has length 1 And the response "data.attributes.config.destinations" has length 1 @team:DataDog/observability-pipelines @@ -97,7 +97,7 @@ Feature: Observability Pipelines And the response "data[0].type" is equal to "pipelines" And the response "data[0].attributes.name" is equal to "Main Observability Pipeline" And the response "data[0].attributes.config.sources" has length 1 - And the response "data[0].attributes.config.processors" has length 1 + And the response "data[0].attributes.config.processor_groups" has length 1 And the response "data[0].attributes.config.destinations" has length 1 @team:DataDog/observability-pipelines @@ -106,7 +106,7 @@ Feature: Observability Pipelines And new "UpdatePipeline" request And there is a valid "pipeline" in the system And request contains "pipeline_id" parameter from "pipeline.data.id" - And body with value {"data": {"attributes": {"config": {"destinations": [{"id": "datadog-logs-destination", "inputs": ["my-processor-group"], "type": "datadog_logs"}], "processors": [{"enabled": true, "id": "unknown-processor", "include": "service:my-service", "inputs": ["datadog-agent-source"], "processors": [{"enabled": true, "id": "filter-processor", "include": "status:error", "type": "filter"}]}], "sources": [{"id": "datadog-agent-source", "type": "datadog_agent"}]}, "name": "Main Observability Pipeline"}, "id": "3fa85f64-5717-4562-b3fc-2c963f66afa6", "type": "pipelines"}} + And body with value {"data": {"attributes": {"config": {"destinations": [{"id": "datadog-logs-destination", "inputs": ["my-processor-group"], "type": "datadog_logs"}], "processor_groups": [{"enabled": true, "id": "unknown-processor", "include": "service:my-service", "inputs": ["datadog-agent-source"], "processors": [{"enabled": true, "id": "filter-processor", "include": "status:error", "type": "filter"}]}], "sources": [{"id": "datadog-agent-source", "type": "datadog_agent"}]}, "name": "Main Observability Pipeline"}, "id": "3fa85f64-5717-4562-b3fc-2c963f66afa6", "type": "pipelines"}} When the request is sent Then the response status is 400 Bad Request @@ -115,7 +115,7 @@ Feature: Observability Pipelines Given operation "UpdatePipeline" enabled And new "UpdatePipeline" request And request contains "pipeline_id" parameter from "REPLACE.ME" - And body with value {"data": {"attributes": {"config": {"destinations": [{"id": "datadog-logs-destination", "inputs": ["filter-processor"], "type": "datadog_logs"}], "processors": [{"enabled": true, "id": "my-processor-group", "include": "service:my-service", "inputs": ["datadog-agent-source"], "processors": [{"enabled": true, "id": "filter-processor", "include": "status:error", "type": "filter"}, {"enabled": true, "field": "message", "id": "json-processor", "include": "*", "type": "parse_json"}]}], "sources": [{"id": "datadog-agent-source", "type": "datadog_agent"}]}, "name": "Main Observability Pipeline"}, "id": "3fa85f64-5717-4562-b3fc-2c963f66afa6", "type": "pipelines"}} + And body with value {"data": {"attributes": {"config": {"destinations": [{"id": "datadog-logs-destination", "inputs": ["my-processor-group"], "type": "datadog_logs"}], "pipeline_type": "logs", "processor_groups": [{"enabled": true, "id": "my-processor-group", "include": "service:my-service", "inputs": ["datadog-agent-source"], "processors": [{"enabled": true, "id": "filter-processor", "include": "status:error", "type": "filter"}, {"enabled": true, "field": "message", "id": "json-processor", "include": "*", "type": "parse_json"}]}], "processors": [], "sources": [{"id": "datadog-agent-source", "type": "datadog_agent"}]}, "name": "Main Observability Pipeline"}, "id": "3fa85f64-5717-4562-b3fc-2c963f66afa6", "type": "pipelines"}} When the request is sent Then the response status is 409 Conflict @@ -124,7 +124,7 @@ Feature: Observability Pipelines Given operation "UpdatePipeline" enabled And new "UpdatePipeline" request And request contains "pipeline_id" parameter with value "3fa85f64-5717-4562-b3fc-2c963f66afa6" - And body with value {"data": {"attributes": {"config": {"destinations": [{"id": "datadog-logs-destination", "inputs": ["my-processor-group"], "type": "datadog_logs"}], "processors": [{"enabled": true, "id": "my-processor-group", "include": "service:my-service", "inputs": ["datadog-agent-source"], "processors": [{"enabled": true, "id": "filter-processor", "include": "status:error", "type": "filter"}]}], "sources": [{"id": "datadog-agent-source", "type": "datadog_agent"}]}, "name": "Main Observability Pipeline"}, "id": "3fa85f64-5717-4562-b3fc-2c963f66afa6", "type": "pipelines"}} + And body with value {"data": {"attributes": {"config": {"destinations": [{"id": "datadog-logs-destination", "inputs": ["my-processor-group"], "type": "datadog_logs"}], "processor_groups": [{"enabled": true, "id": "my-processor-group", "include": "service:my-service", "inputs": ["datadog-agent-source"], "processors": [{"enabled": true, "id": "filter-processor", "include": "status:error", "type": "filter"}]}], "sources": [{"id": "datadog-agent-source", "type": "datadog_agent"}]}, "name": "Main Observability Pipeline"}, "id": "3fa85f64-5717-4562-b3fc-2c963f66afa6", "type": "pipelines"}} When the request is sent Then the response status is 404 Not Found @@ -134,14 +134,14 @@ Feature: Observability Pipelines And there is a valid "pipeline" in the system And new "UpdatePipeline" request And request contains "pipeline_id" parameter from "pipeline.data.id" - And body with value {"data": {"attributes": {"config": {"destinations": [{"id": "updated-datadog-logs-destination-id", "inputs": ["my-processor-group"], "type": "datadog_logs"}], "processors": [{"enabled": true, "id": "my-processor-group", "include": "service:my-service", "inputs": ["datadog-agent-source"], "processors": [{"enabled": true, "id": "filter-processor", "include": "status:error", "type": "filter"}]}], "sources": [{"id": "datadog-agent-source", "type": "datadog_agent"}]}, "name": "Updated Pipeline Name"}, "id": "3fa85f64-5717-4562-b3fc-2c963f66afa6", "type": "pipelines"}} + And body with value {"data": {"attributes": {"config": {"destinations": [{"id": "updated-datadog-logs-destination-id", "inputs": ["my-processor-group"], "type": "datadog_logs"}], "processor_groups": [{"enabled": true, "id": "my-processor-group", "include": "service:my-service", "inputs": ["datadog-agent-source"], "processors": [{"enabled": true, "id": "filter-processor", "include": "status:error", "type": "filter"}]}], "sources": [{"id": "datadog-agent-source", "type": "datadog_agent"}]}, "name": "Updated Pipeline Name"}, "id": "3fa85f64-5717-4562-b3fc-2c963f66afa6", "type": "pipelines"}} When the request is sent Then the response status is 200 OK And the response "data" has field "id" And the response "data.type" is equal to "pipelines" And the response "data.attributes.name" is equal to "Updated Pipeline Name" And the response "data.attributes.config.sources" has length 1 - And the response "data.attributes.config.processors" has length 1 + And the response "data.attributes.config.processor_groups" has length 1 And the response "data.attributes.config.destinations" has length 1 And the response "data.attributes.config.destinations[0].id" is equal to "updated-datadog-logs-destination-id" @@ -149,7 +149,7 @@ Feature: Observability Pipelines Scenario: Validate an observability pipeline returns "Bad Request" response Given operation "ValidatePipeline" enabled And new "ValidatePipeline" request - And body with value {"data": {"attributes": {"config": {"destinations": [{"id": "datadog-logs-destination", "inputs": ["my-processor-group"], "type": "datadog_logs"}], "processors": [{"enabled": true, "id": "my-processor-group", "include": "service:my-service", "inputs": ["datadog-agent-source"], "processors": [{"enabled": true, "id": "filter-processor", "type": "filter"}]}], "sources": [{"id": "datadog-agent-source", "type": "datadog_agent"}]}, "name": "Main Observability Pipeline"}, "type": "pipelines"}} + And body with value {"data": {"attributes": {"config": {"destinations": [{"id": "datadog-logs-destination", "inputs": ["my-processor-group"], "type": "datadog_logs"}], "processor_groups": [{"enabled": true, "id": "my-processor-group", "include": "service:my-service", "inputs": ["datadog-agent-source"], "processors": [{"enabled": true, "id": "filter-processor", "type": "filter"}]}], "sources": [{"id": "datadog-agent-source", "type": "datadog_agent"}]}, "name": "Main Observability Pipeline"}, "type": "pipelines"}} When the request is sent Then the response status is 400 Bad Request And the response "errors[0].title" is equal to "Field 'include' is required" @@ -161,7 +161,7 @@ Feature: Observability Pipelines Scenario: Validate an observability pipeline returns "OK" response Given operation "ValidatePipeline" enabled And new "ValidatePipeline" request - And body with value {"data": {"attributes": {"config": {"destinations": [{"id": "datadog-logs-destination", "inputs": ["my-processor-group"], "type": "datadog_logs"}], "processors": [{"enabled": true, "id": "my-processor-group", "include": "service:my-service", "inputs": ["datadog-agent-source"], "processors": [{"enabled": true, "id": "filter-processor", "include": "status:error", "type": "filter"}]}], "sources": [{"id": "datadog-agent-source", "type": "datadog_agent"}]}, "name": "Main Observability Pipeline"}, "type": "pipelines"}} + And body with value {"data": {"attributes": {"config": {"destinations": [{"id": "datadog-logs-destination", "inputs": ["my-processor-group"], "type": "datadog_logs"}], "processor_groups": [{"enabled": true, "id": "my-processor-group", "include": "service:my-service", "inputs": ["datadog-agent-source"], "processors": [{"enabled": true, "id": "filter-processor", "include": "status:error", "type": "filter"}]}], "sources": [{"id": "datadog-agent-source", "type": "datadog_agent"}]}, "name": "Main Observability Pipeline"}, "type": "pipelines"}} When the request is sent Then the response status is 200 OK And the response "errors" has length 0 diff --git a/tests/v2/features/undo.json b/tests/v2/features/undo.json index b54378e93b..1b8a3c101a 100644 --- a/tests/v2/features/undo.json +++ b/tests/v2/features/undo.json @@ -2838,6 +2838,49 @@ "type": "safe" } }, + "ListPipelines": { + "tag": "Observability Pipelines", + "undo": { + "type": "safe" + } + }, + "CreatePipeline": { + "tag": "Observability Pipelines", + "undo": { + "operationId": "DeletePipeline", + "parameters": [ + { + "name": "pipeline_id", + "source": "data.id" + } + ], + "type": "unsafe" + } + }, + "ValidatePipeline": { + "tag": "Observability Pipelines", + "undo": { + "type": "safe" + } + }, + "DeletePipeline": { + "tag": "Observability Pipelines", + "undo": { + "type": "idempotent" + } + }, + "GetPipeline": { + "tag": "Observability Pipelines", + "undo": { + "type": "safe" + } + }, + "UpdatePipeline": { + "tag": "Observability Pipelines", + "undo": { + "type": "idempotent" + } + }, "CreateOnCallEscalationPolicy": { "tag": "On-Call", "undo": { @@ -3449,49 +3492,6 @@ "type": "idempotent" } }, - "ListPipelines": { - "tag": "Observability Pipelines", - "undo": { - "type": "safe" - } - }, - "CreatePipeline": { - "tag": "Observability Pipelines", - "undo": { - "operationId": "DeletePipeline", - "parameters": [ - { - "name": "pipeline_id", - "source": "data.id" - } - ], - "type": "unsafe" - } - }, - "ValidatePipeline": { - "tag": "Observability Pipelines", - "undo": { - "type": "safe" - } - }, - "DeletePipeline": { - "tag": "Observability Pipelines", - "undo": { - "type": "idempotent" - } - }, - "GetPipeline": { - "tag": "Observability Pipelines", - "undo": { - "type": "safe" - } - }, - "UpdatePipeline": { - "tag": "Observability Pipelines", - "undo": { - "type": "idempotent" - } - }, "DeleteRestrictionPolicy": { "tag": "Restriction Policies", "undo": {