From 35c8c1cbca67a587a0c695da9c84b697dedb7a13 Mon Sep 17 00:00:00 2001 From: "ci.datadog-api-spec" Date: Thu, 21 Aug 2025 17:08:30 +0000 Subject: [PATCH] Regenerate client from commit ca16233 of spec repo --- .generated-info | 4 +- .generator/schemas/v2/openapi.yaml | 110 ++++++++++++++++- lib/datadog_api_client/inflector.rb | 6 + .../v2/models/azure_storage_destination.rb | 12 +- .../models/microsoft_sentinel_destination.rb | 12 +- ...pipeline_amazon_open_search_destination.rb | 12 +- ...vability_pipeline_amazon_s3_destination.rb | 12 +- .../observability_pipeline_buffer_options.rb | 64 ++++++++++ ...ility_pipeline_buffer_options_disk_type.rb | 26 ++++ ...ity_pipeline_buffer_options_memory_type.rb | 26 ++++ ...ility_pipeline_datadog_logs_destination.rb | 12 +- ...ervability_pipeline_disk_buffer_options.rb | 115 ++++++++++++++++++ ...lity_pipeline_elasticsearch_destination.rb | 12 +- ...y_pipeline_google_chronicle_destination.rb | 12 +- ...peline_google_cloud_storage_destination.rb | 12 +- ...vability_pipeline_memory_buffer_options.rb | 115 ++++++++++++++++++ ...ity_pipeline_memory_buffer_size_options.rb | 115 ++++++++++++++++++ ...vability_pipeline_new_relic_destination.rb | 12 +- ...bility_pipeline_open_search_destination.rb | 12 +- .../observability_pipeline_quota_processor.rb | 33 ++--- ...ipeline_quota_processor_overflow_action.rb | 2 +- ...ervability_pipeline_rsyslog_destination.rb | 12 +- ...observability_pipeline_sample_processor.rb | 25 +++- ...ility_pipeline_sentinel_one_destination.rb | 12 +- ...ability_pipeline_splunk_hec_destination.rb | 12 +- ...ability_pipeline_sumo_logic_destination.rb | 12 +- ...vability_pipeline_syslog_ng_destination.rb | 12 +- 27 files changed, 782 insertions(+), 39 deletions(-) create mode 100644 lib/datadog_api_client/v2/models/observability_pipeline_buffer_options.rb create mode 100644 lib/datadog_api_client/v2/models/observability_pipeline_buffer_options_disk_type.rb create mode 100644 lib/datadog_api_client/v2/models/observability_pipeline_buffer_options_memory_type.rb create mode 100644 lib/datadog_api_client/v2/models/observability_pipeline_disk_buffer_options.rb create mode 100644 lib/datadog_api_client/v2/models/observability_pipeline_memory_buffer_options.rb create mode 100644 lib/datadog_api_client/v2/models/observability_pipeline_memory_buffer_size_options.rb diff --git a/.generated-info b/.generated-info index 2bd9fe63b481..fede8ec44e41 100644 --- a/.generated-info +++ b/.generated-info @@ -1,4 +1,4 @@ { - "spec_repo_commit": "4413e63", - "generated": "2025-08-19 20:28:34.170" + "spec_repo_commit": "ca16233", + "generated": "2025-08-21 17:08:30.242" } diff --git a/.generator/schemas/v2/openapi.yaml b/.generator/schemas/v2/openapi.yaml index 5dc52e4a2ee2..7be522b9349c 100644 --- a/.generator/schemas/v2/openapi.yaml +++ b/.generator/schemas/v2/openapi.yaml @@ -4983,6 +4983,8 @@ components: description: Optional prefix for blobs written to the container. example: logs/ type: string + buffer: + $ref: '#/components/schemas/ObservabilityPipelineBufferOptions' container_name: description: The name of the Azure Blob Storage container to store logs in. @@ -25037,6 +25039,8 @@ components: description: The `microsoft_sentinel` destination forwards logs to Microsoft Sentinel. properties: + buffer: + $ref: '#/components/schemas/ObservabilityPipelineBufferOptions' client_id: description: Azure AD client ID used for authentication. example: a1b2c3d4-5678-90ab-cdef-1234567890ab @@ -26669,6 +26673,8 @@ components: properties: auth: $ref: '#/components/schemas/ObservabilityPipelineAmazonOpenSearchDestinationAuth' + buffer: + $ref: '#/components/schemas/ObservabilityPipelineBufferOptions' bulk_index: description: The index to write logs to. example: logs-index @@ -26747,6 +26753,8 @@ components: description: S3 bucket name. example: error-logs type: string + buffer: + $ref: '#/components/schemas/ObservabilityPipelineBufferOptions' id: description: Unique identifier for the destination component. example: amazon-s3-destination @@ -26919,6 +26927,30 @@ components: role session. type: string type: object + ObservabilityPipelineBufferOptions: + description: Configuration for buffer settings on destination components. + oneOf: + - $ref: '#/components/schemas/ObservabilityPipelineDiskBufferOptions' + - $ref: '#/components/schemas/ObservabilityPipelineMemoryBufferOptions' + - $ref: '#/components/schemas/ObservabilityPipelineMemoryBufferSizeOptions' + ObservabilityPipelineBufferOptionsDiskType: + default: disk + description: Specifies the buffer type to configure. This option supports only + a disk buffer. + enum: + - disk + type: string + x-enum-varnames: + - DISK + ObservabilityPipelineBufferOptionsMemoryType: + default: memory + description: Specifies the buffer type to configure. This option supports only + a memory buffer. + enum: + - memory + type: string + x-enum-varnames: + - MEMORY ObservabilityPipelineConfig: description: Specifies the pipeline's configuration, including its sources, processors, and destinations. @@ -27232,6 +27264,8 @@ components: ObservabilityPipelineDatadogLogsDestination: description: The `datadog_logs` destination forwards logs to Datadog Log Management. properties: + buffer: + $ref: '#/components/schemas/ObservabilityPipelineBufferOptions' id: description: The unique identifier for this component. example: datadog-logs-destination @@ -27407,12 +27441,25 @@ components: type: string x-enum-varnames: - DEDUPE + ObservabilityPipelineDiskBufferOptions: + description: Options for configuring a disk buffer. + properties: + max_size: + description: Maximum size of the disk buffer. + example: 4096 + format: int64 + type: integer + type: + $ref: '#/components/schemas/ObservabilityPipelineBufferOptionsDiskType' + type: object ObservabilityPipelineElasticsearchDestination: description: The `elasticsearch` destination writes logs to an Elasticsearch cluster. properties: api_version: $ref: '#/components/schemas/ObservabilityPipelineElasticsearchDestinationApiVersion' + buffer: + $ref: '#/components/schemas/ObservabilityPipelineBufferOptions' bulk_index: description: The index to write logs to in Elasticsearch. example: logs-index @@ -27897,6 +27944,8 @@ components: properties: auth: $ref: '#/components/schemas/ObservabilityPipelineGcpAuth' + buffer: + $ref: '#/components/schemas/ObservabilityPipelineBufferOptions' customer_id: description: The Google Chronicle customer ID. example: abcdefg123456789 @@ -27963,6 +28012,8 @@ components: description: Name of the GCS bucket. example: error-logs type: string + buffer: + $ref: '#/components/schemas/ObservabilityPipelineBufferOptions' id: description: Unique identifier for the destination component. example: gcs-destination @@ -28273,6 +28324,28 @@ components: type: string x-enum-varnames: - LOGSTASH + ObservabilityPipelineMemoryBufferOptions: + description: Options for configuring a memory buffer by byte size. + properties: + max_size: + description: Maximum size of the disk buffer. + example: 4096 + format: int64 + type: integer + type: + $ref: '#/components/schemas/ObservabilityPipelineBufferOptionsMemoryType' + type: object + ObservabilityPipelineMemoryBufferSizeOptions: + description: Options for configuring a memory buffer by queue length. + properties: + max_events: + description: Maximum events for the memory buffer. + example: 500 + format: int64 + type: integer + type: + $ref: '#/components/schemas/ObservabilityPipelineBufferOptionsMemoryType' + type: object ObservabilityPipelineMetadataEntry: description: A custom metadata entry. properties: @@ -28296,6 +28369,8 @@ components: ObservabilityPipelineNewRelicDestination: description: The `new_relic` destination sends logs to the New Relic platform. properties: + buffer: + $ref: '#/components/schemas/ObservabilityPipelineBufferOptions' id: description: The unique identifier for this component. example: new-relic-destination @@ -28433,6 +28508,8 @@ components: ObservabilityPipelineOpenSearchDestination: description: The `opensearch` destination writes logs to an OpenSearch cluster. properties: + buffer: + $ref: '#/components/schemas/ObservabilityPipelineBufferOptions' bulk_index: description: The index to write logs to. example: logs-index @@ -28657,9 +28734,10 @@ components: can drop or alert. properties: drop_events: - description: If set to `true`, logs that matched the quota filter and sent - after the quota has been met are dropped; only logs that did not match - the filter query continue through the pipeline. + description: 'If set to `true`, logs that match the quota filter and are + sent after the quota is exceeded are dropped. Logs that do not match the + filter continue through the pipeline. **Note**: You can set either `drop_events` + or `overflow_action`, but not both.' example: false type: boolean id: @@ -28707,6 +28785,8 @@ components: items: type: string type: array + too_many_buckets_action: + $ref: '#/components/schemas/ObservabilityPipelineQuotaProcessorOverflowAction' type: $ref: '#/components/schemas/ObservabilityPipelineQuotaProcessorType' required: @@ -28714,7 +28794,6 @@ components: - type - include - name - - drop_events - limit - inputs type: object @@ -28745,7 +28824,8 @@ components: - BYTES - EVENTS ObservabilityPipelineQuotaProcessorOverflowAction: - description: 'The action to take when the quota is exceeded. Options: + description: 'The action to take when the quota or bucket limit is exceeded. + Options: - `drop`: Drop the event. @@ -29006,6 +29086,8 @@ components: description: The `rsyslog` destination forwards logs to an external `rsyslog` server over TCP or UDP using the syslog protocol. properties: + buffer: + $ref: '#/components/schemas/ObservabilityPipelineBufferOptions' id: description: The unique identifier for this component. example: rsyslog-destination @@ -29076,6 +29158,16 @@ components: description: The `sample` processor allows probabilistic sampling of logs at a fixed rate. properties: + group_by: + description: Optional list of fields to group events by. Each group is sampled + independently. + example: + - service + - host + items: + type: string + minItems: 1 + type: array id: description: The unique identifier for this component. Used to reference this component in other parts of the pipeline (for example, as the `input` @@ -29480,6 +29572,8 @@ components: ObservabilityPipelineSentinelOneDestination: description: The `sentinel_one` destination sends logs to SentinelOne. properties: + buffer: + $ref: '#/components/schemas/ObservabilityPipelineBufferOptions' id: description: The unique identifier for this component. example: sentinelone-destination @@ -29836,6 +29930,8 @@ components: ' example: true type: boolean + buffer: + $ref: '#/components/schemas/ObservabilityPipelineBufferOptions' encoding: $ref: '#/components/schemas/ObservabilityPipelineSplunkHecDestinationEncoding' id: @@ -29949,6 +30045,8 @@ components: ObservabilityPipelineSumoLogicDestination: description: The `sumo_logic` destination forwards logs to Sumo Logic. properties: + buffer: + $ref: '#/components/schemas/ObservabilityPipelineBufferOptions' encoding: $ref: '#/components/schemas/ObservabilityPipelineSumoLogicDestinationEncoding' header_custom_fields: @@ -30052,6 +30150,8 @@ components: description: The `syslog_ng` destination forwards logs to an external `syslog-ng` server over TCP or UDP using the syslog protocol. properties: + buffer: + $ref: '#/components/schemas/ObservabilityPipelineBufferOptions' id: description: The unique identifier for this component. example: syslog-ng-destination diff --git a/lib/datadog_api_client/inflector.rb b/lib/datadog_api_client/inflector.rb index 31a9061949a4..88ea07ed0a29 100644 --- a/lib/datadog_api_client/inflector.rb +++ b/lib/datadog_api_client/inflector.rb @@ -2623,6 +2623,9 @@ def overrides "v2.observability_pipeline_amazon_security_lake_destination" => "ObservabilityPipelineAmazonSecurityLakeDestination", "v2.observability_pipeline_amazon_security_lake_destination_type" => "ObservabilityPipelineAmazonSecurityLakeDestinationType", "v2.observability_pipeline_aws_auth" => "ObservabilityPipelineAwsAuth", + "v2.observability_pipeline_buffer_options" => "ObservabilityPipelineBufferOptions", + "v2.observability_pipeline_buffer_options_disk_type" => "ObservabilityPipelineBufferOptionsDiskType", + "v2.observability_pipeline_buffer_options_memory_type" => "ObservabilityPipelineBufferOptionsMemoryType", "v2.observability_pipeline_config" => "ObservabilityPipelineConfig", "v2.observability_pipeline_config_destination_item" => "ObservabilityPipelineConfigDestinationItem", "v2.observability_pipeline_config_processor_item" => "ObservabilityPipelineConfigProcessorItem", @@ -2649,6 +2652,7 @@ def overrides "v2.observability_pipeline_dedupe_processor" => "ObservabilityPipelineDedupeProcessor", "v2.observability_pipeline_dedupe_processor_mode" => "ObservabilityPipelineDedupeProcessorMode", "v2.observability_pipeline_dedupe_processor_type" => "ObservabilityPipelineDedupeProcessorType", + "v2.observability_pipeline_disk_buffer_options" => "ObservabilityPipelineDiskBufferOptions", "v2.observability_pipeline_elasticsearch_destination" => "ObservabilityPipelineElasticsearchDestination", "v2.observability_pipeline_elasticsearch_destination_api_version" => "ObservabilityPipelineElasticsearchDestinationApiVersion", "v2.observability_pipeline_elasticsearch_destination_type" => "ObservabilityPipelineElasticsearchDestinationType", @@ -2699,6 +2703,8 @@ def overrides "v2.observability_pipeline_kafka_source_type" => "ObservabilityPipelineKafkaSourceType", "v2.observability_pipeline_logstash_source" => "ObservabilityPipelineLogstashSource", "v2.observability_pipeline_logstash_source_type" => "ObservabilityPipelineLogstashSourceType", + "v2.observability_pipeline_memory_buffer_options" => "ObservabilityPipelineMemoryBufferOptions", + "v2.observability_pipeline_memory_buffer_size_options" => "ObservabilityPipelineMemoryBufferSizeOptions", "v2.observability_pipeline_metadata_entry" => "ObservabilityPipelineMetadataEntry", "v2.observability_pipeline_metric_value" => "ObservabilityPipelineMetricValue", "v2.observability_pipeline_new_relic_destination" => "ObservabilityPipelineNewRelicDestination", diff --git a/lib/datadog_api_client/v2/models/azure_storage_destination.rb b/lib/datadog_api_client/v2/models/azure_storage_destination.rb index ef29c2cade85..98477b8c88cf 100644 --- a/lib/datadog_api_client/v2/models/azure_storage_destination.rb +++ b/lib/datadog_api_client/v2/models/azure_storage_destination.rb @@ -24,6 +24,9 @@ class AzureStorageDestination # Optional prefix for blobs written to the container. attr_accessor :blob_prefix + # Configuration for buffer settings on destination components. + attr_accessor :buffer + # The name of the Azure Blob Storage container to store logs in. attr_reader :container_name @@ -43,6 +46,7 @@ class AzureStorageDestination def self.attribute_map { :'blob_prefix' => :'blob_prefix', + :'buffer' => :'buffer', :'container_name' => :'container_name', :'id' => :'id', :'inputs' => :'inputs', @@ -55,6 +59,7 @@ def self.attribute_map def self.openapi_types { :'blob_prefix' => :'String', + :'buffer' => :'ObservabilityPipelineBufferOptions', :'container_name' => :'String', :'id' => :'String', :'inputs' => :'Array', @@ -84,6 +89,10 @@ def initialize(attributes = {}) self.blob_prefix = attributes[:'blob_prefix'] end + if attributes.key?(:'buffer') + self.buffer = attributes[:'buffer'] + end + if attributes.key?(:'container_name') self.container_name = attributes[:'container_name'] end @@ -181,6 +190,7 @@ def ==(o) return true if self.equal?(o) self.class == o.class && blob_prefix == o.blob_prefix && + buffer == o.buffer && container_name == o.container_name && id == o.id && inputs == o.inputs && @@ -192,7 +202,7 @@ def ==(o) # @return [Integer] Hash code # @!visibility private def hash - [blob_prefix, container_name, id, inputs, type, additional_properties].hash + [blob_prefix, buffer, container_name, id, inputs, type, additional_properties].hash end end end diff --git a/lib/datadog_api_client/v2/models/microsoft_sentinel_destination.rb b/lib/datadog_api_client/v2/models/microsoft_sentinel_destination.rb index 03266b733402..e598f24bf7f2 100644 --- a/lib/datadog_api_client/v2/models/microsoft_sentinel_destination.rb +++ b/lib/datadog_api_client/v2/models/microsoft_sentinel_destination.rb @@ -21,6 +21,9 @@ module DatadogAPIClient::V2 class MicrosoftSentinelDestination include BaseGenericModel + # Configuration for buffer settings on destination components. + attr_accessor :buffer + # Azure AD client ID used for authentication. attr_reader :client_id @@ -48,6 +51,7 @@ class MicrosoftSentinelDestination # @!visibility private def self.attribute_map { + :'buffer' => :'buffer', :'client_id' => :'client_id', :'dcr_immutable_id' => :'dcr_immutable_id', :'id' => :'id', @@ -62,6 +66,7 @@ def self.attribute_map # @!visibility private def self.openapi_types { + :'buffer' => :'ObservabilityPipelineBufferOptions', :'client_id' => :'String', :'dcr_immutable_id' => :'String', :'id' => :'String', @@ -90,6 +95,10 @@ def initialize(attributes = {}) end } + if attributes.key?(:'buffer') + self.buffer = attributes[:'buffer'] + end + if attributes.key?(:'client_id') self.client_id = attributes[:'client_id'] end @@ -231,6 +240,7 @@ def to_hash def ==(o) return true if self.equal?(o) self.class == o.class && + buffer == o.buffer && client_id == o.client_id && dcr_immutable_id == o.dcr_immutable_id && id == o.id && @@ -245,7 +255,7 @@ def ==(o) # @return [Integer] Hash code # @!visibility private def hash - [client_id, dcr_immutable_id, id, inputs, table, tenant_id, type, additional_properties].hash + [buffer, client_id, dcr_immutable_id, id, inputs, table, tenant_id, type, additional_properties].hash end end end diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_amazon_open_search_destination.rb b/lib/datadog_api_client/v2/models/observability_pipeline_amazon_open_search_destination.rb index bad2077f94ae..314413a780de 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_amazon_open_search_destination.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_amazon_open_search_destination.rb @@ -26,6 +26,9 @@ class ObservabilityPipelineAmazonOpenSearchDestination # attr_reader :auth + # Configuration for buffer settings on destination components. + attr_accessor :buffer + # The index to write logs to. attr_accessor :bulk_index @@ -45,6 +48,7 @@ class ObservabilityPipelineAmazonOpenSearchDestination def self.attribute_map { :'auth' => :'auth', + :'buffer' => :'buffer', :'bulk_index' => :'bulk_index', :'id' => :'id', :'inputs' => :'inputs', @@ -57,6 +61,7 @@ def self.attribute_map def self.openapi_types { :'auth' => :'ObservabilityPipelineAmazonOpenSearchDestinationAuth', + :'buffer' => :'ObservabilityPipelineBufferOptions', :'bulk_index' => :'String', :'id' => :'String', :'inputs' => :'Array', @@ -86,6 +91,10 @@ def initialize(attributes = {}) self.auth = attributes[:'auth'] end + if attributes.key?(:'buffer') + self.buffer = attributes[:'buffer'] + end + if attributes.key?(:'bulk_index') self.bulk_index = attributes[:'bulk_index'] end @@ -183,6 +192,7 @@ def ==(o) return true if self.equal?(o) self.class == o.class && auth == o.auth && + buffer == o.buffer && bulk_index == o.bulk_index && id == o.id && inputs == o.inputs && @@ -194,7 +204,7 @@ def ==(o) # @return [Integer] Hash code # @!visibility private def hash - [auth, bulk_index, id, inputs, type, additional_properties].hash + [auth, buffer, bulk_index, id, inputs, type, additional_properties].hash end end end diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_amazon_s3_destination.rb b/lib/datadog_api_client/v2/models/observability_pipeline_amazon_s3_destination.rb index fbc649e44871..0aabeb29dcaa 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_amazon_s3_destination.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_amazon_s3_destination.rb @@ -29,6 +29,9 @@ class ObservabilityPipelineAmazonS3Destination # S3 bucket name. attr_reader :bucket + # Configuration for buffer settings on destination components. + attr_accessor :buffer + # Unique identifier for the destination component. attr_reader :id @@ -58,6 +61,7 @@ def self.attribute_map { :'auth' => :'auth', :'bucket' => :'bucket', + :'buffer' => :'buffer', :'id' => :'id', :'inputs' => :'inputs', :'key_prefix' => :'key_prefix', @@ -74,6 +78,7 @@ def self.openapi_types { :'auth' => :'ObservabilityPipelineAwsAuth', :'bucket' => :'String', + :'buffer' => :'ObservabilityPipelineBufferOptions', :'id' => :'String', :'inputs' => :'Array', :'key_prefix' => :'String', @@ -110,6 +115,10 @@ def initialize(attributes = {}) self.bucket = attributes[:'bucket'] end + if attributes.key?(:'buffer') + self.buffer = attributes[:'buffer'] + end + if attributes.key?(:'id') self.id = attributes[:'id'] end @@ -242,6 +251,7 @@ def ==(o) self.class == o.class && auth == o.auth && bucket == o.bucket && + buffer == o.buffer && id == o.id && inputs == o.inputs && key_prefix == o.key_prefix && @@ -256,7 +266,7 @@ def ==(o) # @return [Integer] Hash code # @!visibility private def hash - [auth, bucket, id, inputs, key_prefix, region, storage_class, tls, type, additional_properties].hash + [auth, bucket, buffer, id, inputs, key_prefix, region, storage_class, tls, type, additional_properties].hash end end end diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_buffer_options.rb b/lib/datadog_api_client/v2/models/observability_pipeline_buffer_options.rb new file mode 100644 index 000000000000..ef7445c56a0c --- /dev/null +++ b/lib/datadog_api_client/v2/models/observability_pipeline_buffer_options.rb @@ -0,0 +1,64 @@ +=begin +#Datadog API V2 Collection + +#Collection of all Datadog Public endpoints. + +The version of the OpenAPI document: 1.0 +Contact: support@datadoghq.com +Generated by: https://github.com/DataDog/datadog-api-client-ruby/tree/master/.generator + + Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + This product includes software developed at Datadog (https://www.datadoghq.com/). + Copyright 2020-Present Datadog, Inc. + +=end + +require 'date' +require 'time' + +module DatadogAPIClient::V2 + # Configuration for buffer settings on destination components. + module ObservabilityPipelineBufferOptions + class << self + include BaseOneOfModel + include BaseOneOfModelNoDiscriminator + + # List of class defined in oneOf (OpenAPI v3) + def openapi_one_of + [ + :'ObservabilityPipelineDiskBufferOptions', + :'ObservabilityPipelineMemoryBufferOptions', + :'ObservabilityPipelineMemoryBufferSizeOptions' + ] + end + # Builds the object + # @param data [Mixed] Data to be matched against the list of oneOf items + # @return [Object] Returns the model or the data itself + def build(data) + # Go through the list of oneOf items and attempt to identify the appropriate one. + # Note: + # - We do not attempt to check whether exactly one item matches. + # - No advanced validation of types in some cases (e.g. "x: { type: string }" will happily match { x: 123 }) + # due to the way the deserialization is made in the base_object template (it just casts without verifying). + # - TODO: scalar values are de facto behaving as if they were nullable. + # - TODO: logging when debugging is set. + openapi_one_of.each do |klass| + begin + next if klass == :AnyType # "nullable: true" + typed_data = find_and_cast_into_type(klass, data) + next if typed_data.respond_to?(:_unparsed) && typed_data._unparsed + return typed_data if typed_data + rescue # rescue all errors so we keep iterating even if the current item lookup raises + end + end + + if openapi_one_of.include?(:AnyType) + data + else + self._unparsed = true + DatadogAPIClient::UnparsedObject.new(data) + end + end + end + end +end diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_buffer_options_disk_type.rb b/lib/datadog_api_client/v2/models/observability_pipeline_buffer_options_disk_type.rb new file mode 100644 index 000000000000..0978418d37c1 --- /dev/null +++ b/lib/datadog_api_client/v2/models/observability_pipeline_buffer_options_disk_type.rb @@ -0,0 +1,26 @@ +=begin +#Datadog API V2 Collection + +#Collection of all Datadog Public endpoints. + +The version of the OpenAPI document: 1.0 +Contact: support@datadoghq.com +Generated by: https://github.com/DataDog/datadog-api-client-ruby/tree/master/.generator + + Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + This product includes software developed at Datadog (https://www.datadoghq.com/). + Copyright 2020-Present Datadog, Inc. + +=end + +require 'date' +require 'time' + +module DatadogAPIClient::V2 + # Specifies the buffer type to configure. This option supports only a disk buffer. + class ObservabilityPipelineBufferOptionsDiskType + include BaseEnumModel + + DISK = "disk".freeze + end +end diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_buffer_options_memory_type.rb b/lib/datadog_api_client/v2/models/observability_pipeline_buffer_options_memory_type.rb new file mode 100644 index 000000000000..bd92914d4ab7 --- /dev/null +++ b/lib/datadog_api_client/v2/models/observability_pipeline_buffer_options_memory_type.rb @@ -0,0 +1,26 @@ +=begin +#Datadog API V2 Collection + +#Collection of all Datadog Public endpoints. + +The version of the OpenAPI document: 1.0 +Contact: support@datadoghq.com +Generated by: https://github.com/DataDog/datadog-api-client-ruby/tree/master/.generator + + Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + This product includes software developed at Datadog (https://www.datadoghq.com/). + Copyright 2020-Present Datadog, Inc. + +=end + +require 'date' +require 'time' + +module DatadogAPIClient::V2 + # Specifies the buffer type to configure. This option supports only a memory buffer. + class ObservabilityPipelineBufferOptionsMemoryType + include BaseEnumModel + + MEMORY = "memory".freeze + end +end diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_datadog_logs_destination.rb b/lib/datadog_api_client/v2/models/observability_pipeline_datadog_logs_destination.rb index 3b08d1fd9c22..91ea7f0d63d2 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_datadog_logs_destination.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_datadog_logs_destination.rb @@ -21,6 +21,9 @@ module DatadogAPIClient::V2 class ObservabilityPipelineDatadogLogsDestination include BaseGenericModel + # Configuration for buffer settings on destination components. + attr_accessor :buffer + # The unique identifier for this component. attr_reader :id @@ -36,6 +39,7 @@ class ObservabilityPipelineDatadogLogsDestination # @!visibility private def self.attribute_map { + :'buffer' => :'buffer', :'id' => :'id', :'inputs' => :'inputs', :'type' => :'type' @@ -46,6 +50,7 @@ def self.attribute_map # @!visibility private def self.openapi_types { + :'buffer' => :'ObservabilityPipelineBufferOptions', :'id' => :'String', :'inputs' => :'Array', :'type' => :'ObservabilityPipelineDatadogLogsDestinationType' @@ -70,6 +75,10 @@ def initialize(attributes = {}) end } + if attributes.key?(:'buffer') + self.buffer = attributes[:'buffer'] + end + if attributes.key?(:'id') self.id = attributes[:'id'] end @@ -151,6 +160,7 @@ def to_hash def ==(o) return true if self.equal?(o) self.class == o.class && + buffer == o.buffer && id == o.id && inputs == o.inputs && type == o.type && @@ -161,7 +171,7 @@ def ==(o) # @return [Integer] Hash code # @!visibility private def hash - [id, inputs, type, additional_properties].hash + [buffer, id, inputs, type, additional_properties].hash end end end diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_disk_buffer_options.rb b/lib/datadog_api_client/v2/models/observability_pipeline_disk_buffer_options.rb new file mode 100644 index 000000000000..802fd22d351e --- /dev/null +++ b/lib/datadog_api_client/v2/models/observability_pipeline_disk_buffer_options.rb @@ -0,0 +1,115 @@ +=begin +#Datadog API V2 Collection + +#Collection of all Datadog Public endpoints. + +The version of the OpenAPI document: 1.0 +Contact: support@datadoghq.com +Generated by: https://github.com/DataDog/datadog-api-client-ruby/tree/master/.generator + + Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + This product includes software developed at Datadog (https://www.datadoghq.com/). + Copyright 2020-Present Datadog, Inc. + +=end + +require 'date' +require 'time' + +module DatadogAPIClient::V2 + # Options for configuring a disk buffer. + class ObservabilityPipelineDiskBufferOptions + include BaseGenericModel + + # Maximum size of the disk buffer. + attr_accessor :max_size + + # Specifies the buffer type to configure. This option supports only a disk buffer. + attr_accessor :type + + attr_accessor :additional_properties + + # Attribute mapping from ruby-style variable name to JSON key. + # @!visibility private + def self.attribute_map + { + :'max_size' => :'max_size', + :'type' => :'type' + } + end + + # Attribute type mapping. + # @!visibility private + def self.openapi_types + { + :'max_size' => :'Integer', + :'type' => :'ObservabilityPipelineBufferOptionsDiskType' + } + end + + # Initializes the object + # @param attributes [Hash] Model attributes in the form of hash + # @!visibility private + def initialize(attributes = {}) + if (!attributes.is_a?(Hash)) + fail ArgumentError, "The input argument (attributes) must be a hash in `DatadogAPIClient::V2::ObservabilityPipelineDiskBufferOptions` initialize method" + end + + self.additional_properties = {} + # check to see if the attribute exists and convert string to symbol for hash key + attributes = attributes.each_with_object({}) { |(k, v), h| + if (!self.class.attribute_map.key?(k.to_sym)) + self.additional_properties[k.to_sym] = v + else + h[k.to_sym] = v + end + } + + if attributes.key?(:'max_size') + self.max_size = attributes[:'max_size'] + end + + if attributes.key?(:'type') + self.type = attributes[:'type'] + end + end + + # Returns the object in the form of hash, with additionalProperties support. + # @return [Hash] Returns the object in the form of hash + # @!visibility private + def to_hash + hash = {} + self.class.attribute_map.each_pair do |attr, param| + value = self.send(attr) + if value.nil? + is_nullable = self.class.openapi_nullable.include?(attr) + next if !is_nullable || (is_nullable && !instance_variable_defined?(:"@#{attr}")) + end + + hash[param] = _to_hash(value) + end + self.additional_properties.each_pair do |attr, value| + hash[attr] = value + end + hash + end + + # Checks equality by comparing each attribute. + # @param o [Object] Object to be compared + # @!visibility private + def ==(o) + return true if self.equal?(o) + self.class == o.class && + max_size == o.max_size && + type == o.type && + additional_properties == o.additional_properties + end + + # Calculates hash code according to all attributes. + # @return [Integer] Hash code + # @!visibility private + def hash + [max_size, type, additional_properties].hash + end + end +end diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_elasticsearch_destination.rb b/lib/datadog_api_client/v2/models/observability_pipeline_elasticsearch_destination.rb index f49df443a047..678965a86003 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_elasticsearch_destination.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_elasticsearch_destination.rb @@ -24,6 +24,9 @@ class ObservabilityPipelineElasticsearchDestination # The Elasticsearch API version to use. Set to `auto` to auto-detect. attr_accessor :api_version + # Configuration for buffer settings on destination components. + attr_accessor :buffer + # The index to write logs to in Elasticsearch. attr_accessor :bulk_index @@ -43,6 +46,7 @@ class ObservabilityPipelineElasticsearchDestination def self.attribute_map { :'api_version' => :'api_version', + :'buffer' => :'buffer', :'bulk_index' => :'bulk_index', :'id' => :'id', :'inputs' => :'inputs', @@ -55,6 +59,7 @@ def self.attribute_map def self.openapi_types { :'api_version' => :'ObservabilityPipelineElasticsearchDestinationApiVersion', + :'buffer' => :'ObservabilityPipelineBufferOptions', :'bulk_index' => :'String', :'id' => :'String', :'inputs' => :'Array', @@ -84,6 +89,10 @@ def initialize(attributes = {}) self.api_version = attributes[:'api_version'] end + if attributes.key?(:'buffer') + self.buffer = attributes[:'buffer'] + end + if attributes.key?(:'bulk_index') self.bulk_index = attributes[:'bulk_index'] end @@ -170,6 +179,7 @@ def ==(o) return true if self.equal?(o) self.class == o.class && api_version == o.api_version && + buffer == o.buffer && bulk_index == o.bulk_index && id == o.id && inputs == o.inputs && @@ -181,7 +191,7 @@ def ==(o) # @return [Integer] Hash code # @!visibility private def hash - [api_version, bulk_index, id, inputs, type, additional_properties].hash + [api_version, buffer, bulk_index, id, inputs, type, additional_properties].hash end end end diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_google_chronicle_destination.rb b/lib/datadog_api_client/v2/models/observability_pipeline_google_chronicle_destination.rb index 7b33f026c51b..d1f242592019 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_google_chronicle_destination.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_google_chronicle_destination.rb @@ -25,6 +25,9 @@ class ObservabilityPipelineGoogleChronicleDestination # attr_reader :auth + # Configuration for buffer settings on destination components. + attr_accessor :buffer + # The Google Chronicle customer ID. attr_reader :customer_id @@ -50,6 +53,7 @@ class ObservabilityPipelineGoogleChronicleDestination def self.attribute_map { :'auth' => :'auth', + :'buffer' => :'buffer', :'customer_id' => :'customer_id', :'encoding' => :'encoding', :'id' => :'id', @@ -64,6 +68,7 @@ def self.attribute_map def self.openapi_types { :'auth' => :'ObservabilityPipelineGcpAuth', + :'buffer' => :'ObservabilityPipelineBufferOptions', :'customer_id' => :'String', :'encoding' => :'ObservabilityPipelineGoogleChronicleDestinationEncoding', :'id' => :'String', @@ -95,6 +100,10 @@ def initialize(attributes = {}) self.auth = attributes[:'auth'] end + if attributes.key?(:'buffer') + self.buffer = attributes[:'buffer'] + end + if attributes.key?(:'customer_id') self.customer_id = attributes[:'customer_id'] end @@ -211,6 +220,7 @@ def ==(o) return true if self.equal?(o) self.class == o.class && auth == o.auth && + buffer == o.buffer && customer_id == o.customer_id && encoding == o.encoding && id == o.id && @@ -224,7 +234,7 @@ def ==(o) # @return [Integer] Hash code # @!visibility private def hash - [auth, customer_id, encoding, id, inputs, log_type, type, additional_properties].hash + [auth, buffer, customer_id, encoding, id, inputs, log_type, type, additional_properties].hash end end end diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_google_cloud_storage_destination.rb b/lib/datadog_api_client/v2/models/observability_pipeline_google_cloud_storage_destination.rb index 57d38e58b5a7..4b39b0746cfe 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_google_cloud_storage_destination.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_google_cloud_storage_destination.rb @@ -32,6 +32,9 @@ class ObservabilityPipelineGoogleCloudStorageDestination # Name of the GCS bucket. attr_reader :bucket + # Configuration for buffer settings on destination components. + attr_accessor :buffer + # Unique identifier for the destination component. attr_reader :id @@ -59,6 +62,7 @@ def self.attribute_map :'acl' => :'acl', :'auth' => :'auth', :'bucket' => :'bucket', + :'buffer' => :'buffer', :'id' => :'id', :'inputs' => :'inputs', :'key_prefix' => :'key_prefix', @@ -75,6 +79,7 @@ def self.openapi_types :'acl' => :'ObservabilityPipelineGoogleCloudStorageDestinationAcl', :'auth' => :'ObservabilityPipelineGcpAuth', :'bucket' => :'String', + :'buffer' => :'ObservabilityPipelineBufferOptions', :'id' => :'String', :'inputs' => :'Array', :'key_prefix' => :'String', @@ -114,6 +119,10 @@ def initialize(attributes = {}) self.bucket = attributes[:'bucket'] end + if attributes.key?(:'buffer') + self.buffer = attributes[:'buffer'] + end + if attributes.key?(:'id') self.id = attributes[:'id'] end @@ -256,6 +265,7 @@ def ==(o) acl == o.acl && auth == o.auth && bucket == o.bucket && + buffer == o.buffer && id == o.id && inputs == o.inputs && key_prefix == o.key_prefix && @@ -269,7 +279,7 @@ def ==(o) # @return [Integer] Hash code # @!visibility private def hash - [acl, auth, bucket, id, inputs, key_prefix, metadata, storage_class, type, additional_properties].hash + [acl, auth, bucket, buffer, id, inputs, key_prefix, metadata, storage_class, type, additional_properties].hash end end end diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_memory_buffer_options.rb b/lib/datadog_api_client/v2/models/observability_pipeline_memory_buffer_options.rb new file mode 100644 index 000000000000..5e3ebe665ee9 --- /dev/null +++ b/lib/datadog_api_client/v2/models/observability_pipeline_memory_buffer_options.rb @@ -0,0 +1,115 @@ +=begin +#Datadog API V2 Collection + +#Collection of all Datadog Public endpoints. + +The version of the OpenAPI document: 1.0 +Contact: support@datadoghq.com +Generated by: https://github.com/DataDog/datadog-api-client-ruby/tree/master/.generator + + Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + This product includes software developed at Datadog (https://www.datadoghq.com/). + Copyright 2020-Present Datadog, Inc. + +=end + +require 'date' +require 'time' + +module DatadogAPIClient::V2 + # Options for configuring a memory buffer by byte size. + class ObservabilityPipelineMemoryBufferOptions + include BaseGenericModel + + # Maximum size of the disk buffer. + attr_accessor :max_size + + # Specifies the buffer type to configure. This option supports only a memory buffer. + attr_accessor :type + + attr_accessor :additional_properties + + # Attribute mapping from ruby-style variable name to JSON key. + # @!visibility private + def self.attribute_map + { + :'max_size' => :'max_size', + :'type' => :'type' + } + end + + # Attribute type mapping. + # @!visibility private + def self.openapi_types + { + :'max_size' => :'Integer', + :'type' => :'ObservabilityPipelineBufferOptionsMemoryType' + } + end + + # Initializes the object + # @param attributes [Hash] Model attributes in the form of hash + # @!visibility private + def initialize(attributes = {}) + if (!attributes.is_a?(Hash)) + fail ArgumentError, "The input argument (attributes) must be a hash in `DatadogAPIClient::V2::ObservabilityPipelineMemoryBufferOptions` initialize method" + end + + self.additional_properties = {} + # check to see if the attribute exists and convert string to symbol for hash key + attributes = attributes.each_with_object({}) { |(k, v), h| + if (!self.class.attribute_map.key?(k.to_sym)) + self.additional_properties[k.to_sym] = v + else + h[k.to_sym] = v + end + } + + if attributes.key?(:'max_size') + self.max_size = attributes[:'max_size'] + end + + if attributes.key?(:'type') + self.type = attributes[:'type'] + end + end + + # Returns the object in the form of hash, with additionalProperties support. + # @return [Hash] Returns the object in the form of hash + # @!visibility private + def to_hash + hash = {} + self.class.attribute_map.each_pair do |attr, param| + value = self.send(attr) + if value.nil? + is_nullable = self.class.openapi_nullable.include?(attr) + next if !is_nullable || (is_nullable && !instance_variable_defined?(:"@#{attr}")) + end + + hash[param] = _to_hash(value) + end + self.additional_properties.each_pair do |attr, value| + hash[attr] = value + end + hash + end + + # Checks equality by comparing each attribute. + # @param o [Object] Object to be compared + # @!visibility private + def ==(o) + return true if self.equal?(o) + self.class == o.class && + max_size == o.max_size && + type == o.type && + additional_properties == o.additional_properties + end + + # Calculates hash code according to all attributes. + # @return [Integer] Hash code + # @!visibility private + def hash + [max_size, type, additional_properties].hash + end + end +end diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_memory_buffer_size_options.rb b/lib/datadog_api_client/v2/models/observability_pipeline_memory_buffer_size_options.rb new file mode 100644 index 000000000000..55c78eb719bc --- /dev/null +++ b/lib/datadog_api_client/v2/models/observability_pipeline_memory_buffer_size_options.rb @@ -0,0 +1,115 @@ +=begin +#Datadog API V2 Collection + +#Collection of all Datadog Public endpoints. + +The version of the OpenAPI document: 1.0 +Contact: support@datadoghq.com +Generated by: https://github.com/DataDog/datadog-api-client-ruby/tree/master/.generator + + Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + This product includes software developed at Datadog (https://www.datadoghq.com/). + Copyright 2020-Present Datadog, Inc. + +=end + +require 'date' +require 'time' + +module DatadogAPIClient::V2 + # Options for configuring a memory buffer by queue length. + class ObservabilityPipelineMemoryBufferSizeOptions + include BaseGenericModel + + # Maximum events for the memory buffer. + attr_accessor :max_events + + # Specifies the buffer type to configure. This option supports only a memory buffer. + attr_accessor :type + + attr_accessor :additional_properties + + # Attribute mapping from ruby-style variable name to JSON key. + # @!visibility private + def self.attribute_map + { + :'max_events' => :'max_events', + :'type' => :'type' + } + end + + # Attribute type mapping. + # @!visibility private + def self.openapi_types + { + :'max_events' => :'Integer', + :'type' => :'ObservabilityPipelineBufferOptionsMemoryType' + } + end + + # Initializes the object + # @param attributes [Hash] Model attributes in the form of hash + # @!visibility private + def initialize(attributes = {}) + if (!attributes.is_a?(Hash)) + fail ArgumentError, "The input argument (attributes) must be a hash in `DatadogAPIClient::V2::ObservabilityPipelineMemoryBufferSizeOptions` initialize method" + end + + self.additional_properties = {} + # check to see if the attribute exists and convert string to symbol for hash key + attributes = attributes.each_with_object({}) { |(k, v), h| + if (!self.class.attribute_map.key?(k.to_sym)) + self.additional_properties[k.to_sym] = v + else + h[k.to_sym] = v + end + } + + if attributes.key?(:'max_events') + self.max_events = attributes[:'max_events'] + end + + if attributes.key?(:'type') + self.type = attributes[:'type'] + end + end + + # Returns the object in the form of hash, with additionalProperties support. + # @return [Hash] Returns the object in the form of hash + # @!visibility private + def to_hash + hash = {} + self.class.attribute_map.each_pair do |attr, param| + value = self.send(attr) + if value.nil? + is_nullable = self.class.openapi_nullable.include?(attr) + next if !is_nullable || (is_nullable && !instance_variable_defined?(:"@#{attr}")) + end + + hash[param] = _to_hash(value) + end + self.additional_properties.each_pair do |attr, value| + hash[attr] = value + end + hash + end + + # Checks equality by comparing each attribute. + # @param o [Object] Object to be compared + # @!visibility private + def ==(o) + return true if self.equal?(o) + self.class == o.class && + max_events == o.max_events && + type == o.type && + additional_properties == o.additional_properties + end + + # Calculates hash code according to all attributes. + # @return [Integer] Hash code + # @!visibility private + def hash + [max_events, type, additional_properties].hash + end + end +end diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_new_relic_destination.rb b/lib/datadog_api_client/v2/models/observability_pipeline_new_relic_destination.rb index d8705a24d962..eae6ee6e3d24 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_new_relic_destination.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_new_relic_destination.rb @@ -21,6 +21,9 @@ module DatadogAPIClient::V2 class ObservabilityPipelineNewRelicDestination include BaseGenericModel + # Configuration for buffer settings on destination components. + attr_accessor :buffer + # The unique identifier for this component. attr_reader :id @@ -39,6 +42,7 @@ class ObservabilityPipelineNewRelicDestination # @!visibility private def self.attribute_map { + :'buffer' => :'buffer', :'id' => :'id', :'inputs' => :'inputs', :'region' => :'region', @@ -50,6 +54,7 @@ def self.attribute_map # @!visibility private def self.openapi_types { + :'buffer' => :'ObservabilityPipelineBufferOptions', :'id' => :'String', :'inputs' => :'Array', :'region' => :'ObservabilityPipelineNewRelicDestinationRegion', @@ -75,6 +80,10 @@ def initialize(attributes = {}) end } + if attributes.key?(:'buffer') + self.buffer = attributes[:'buffer'] + end + if attributes.key?(:'id') self.id = attributes[:'id'] end @@ -171,6 +180,7 @@ def to_hash def ==(o) return true if self.equal?(o) self.class == o.class && + buffer == o.buffer && id == o.id && inputs == o.inputs && region == o.region && @@ -182,7 +192,7 @@ def ==(o) # @return [Integer] Hash code # @!visibility private def hash - [id, inputs, region, type, additional_properties].hash + [buffer, id, inputs, region, type, additional_properties].hash end end end diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_open_search_destination.rb b/lib/datadog_api_client/v2/models/observability_pipeline_open_search_destination.rb index 2120c2eef855..4c1adfbc8590 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_open_search_destination.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_open_search_destination.rb @@ -21,6 +21,9 @@ module DatadogAPIClient::V2 class ObservabilityPipelineOpenSearchDestination include BaseGenericModel + # Configuration for buffer settings on destination components. + attr_accessor :buffer + # The index to write logs to. attr_accessor :bulk_index @@ -39,6 +42,7 @@ class ObservabilityPipelineOpenSearchDestination # @!visibility private def self.attribute_map { + :'buffer' => :'buffer', :'bulk_index' => :'bulk_index', :'id' => :'id', :'inputs' => :'inputs', @@ -50,6 +54,7 @@ def self.attribute_map # @!visibility private def self.openapi_types { + :'buffer' => :'ObservabilityPipelineBufferOptions', :'bulk_index' => :'String', :'id' => :'String', :'inputs' => :'Array', @@ -75,6 +80,10 @@ def initialize(attributes = {}) end } + if attributes.key?(:'buffer') + self.buffer = attributes[:'buffer'] + end + if attributes.key?(:'bulk_index') self.bulk_index = attributes[:'bulk_index'] end @@ -160,6 +169,7 @@ def to_hash def ==(o) return true if self.equal?(o) self.class == o.class && + buffer == o.buffer && bulk_index == o.bulk_index && id == o.id && inputs == o.inputs && @@ -171,7 +181,7 @@ def ==(o) # @return [Integer] Hash code # @!visibility private def hash - [bulk_index, id, inputs, type, additional_properties].hash + [buffer, bulk_index, id, inputs, type, additional_properties].hash end end end diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_quota_processor.rb b/lib/datadog_api_client/v2/models/observability_pipeline_quota_processor.rb index 1e9cea452d64..f60703daeb01 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_quota_processor.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_quota_processor.rb @@ -21,8 +21,8 @@ module DatadogAPIClient::V2 class ObservabilityPipelineQuotaProcessor include BaseGenericModel - # If set to `true`, logs that matched the quota filter and sent after the quota has been met are dropped; only logs that did not match the filter query continue through the pipeline. - attr_reader :drop_events + # If set to `true`, logs that match the quota filter and are sent after the quota is exceeded are dropped. Logs that do not match the filter continue through the pipeline. **Note**: You can set either `drop_events` or `overflow_action`, but not both. + attr_accessor :drop_events # The unique identifier for this component. Used to reference this component in other parts of the pipeline (for example, as the `input` to downstream components). attr_reader :id @@ -42,7 +42,7 @@ class ObservabilityPipelineQuotaProcessor # Name of the quota. attr_reader :name - # The action to take when the quota is exceeded. Options: + # The action to take when the quota or bucket limit is exceeded. Options: # - `drop`: Drop the event. # - `no_action`: Let the event pass through. # - `overflow_routing`: Route to an overflow destination. @@ -55,6 +55,13 @@ class ObservabilityPipelineQuotaProcessor # A list of fields used to segment log traffic for quota enforcement. Quotas are tracked independently by unique combinations of these field values. attr_accessor :partition_fields + # The action to take when the quota or bucket limit is exceeded. Options: + # - `drop`: Drop the event. + # - `no_action`: Let the event pass through. + # - `overflow_routing`: Route to an overflow destination. + # + attr_accessor :too_many_buckets_action + # The processor type. The value should always be `quota`. attr_reader :type @@ -74,6 +81,7 @@ def self.attribute_map :'overflow_action' => :'overflow_action', :'overrides' => :'overrides', :'partition_fields' => :'partition_fields', + :'too_many_buckets_action' => :'too_many_buckets_action', :'type' => :'type' } end @@ -92,6 +100,7 @@ def self.openapi_types :'overflow_action' => :'ObservabilityPipelineQuotaProcessorOverflowAction', :'overrides' => :'Array', :'partition_fields' => :'Array', + :'too_many_buckets_action' => :'ObservabilityPipelineQuotaProcessorOverflowAction', :'type' => :'ObservabilityPipelineQuotaProcessorType' } end @@ -160,6 +169,10 @@ def initialize(attributes = {}) end end + if attributes.key?(:'too_many_buckets_action') + self.too_many_buckets_action = attributes[:'too_many_buckets_action'] + end + if attributes.key?(:'type') self.type = attributes[:'type'] end @@ -169,7 +182,6 @@ def initialize(attributes = {}) # @return true if the model is valid # @!visibility private def valid? - return false if @drop_events.nil? return false if @id.nil? return false if @include.nil? return false if @inputs.nil? @@ -179,16 +191,6 @@ def valid? true end - # Custom attribute writer method with validation - # @param drop_events [Object] Object to be assigned - # @!visibility private - def drop_events=(drop_events) - if drop_events.nil? - fail ArgumentError, 'invalid value for "drop_events", drop_events cannot be nil.' - end - @drop_events = drop_events - end - # Custom attribute writer method with validation # @param id [Object] Object to be assigned # @!visibility private @@ -285,6 +287,7 @@ def ==(o) overflow_action == o.overflow_action && overrides == o.overrides && partition_fields == o.partition_fields && + too_many_buckets_action == o.too_many_buckets_action && type == o.type && additional_properties == o.additional_properties end @@ -293,7 +296,7 @@ def ==(o) # @return [Integer] Hash code # @!visibility private def hash - [drop_events, id, ignore_when_missing_partitions, include, inputs, limit, name, overflow_action, overrides, partition_fields, type, additional_properties].hash + [drop_events, id, ignore_when_missing_partitions, include, inputs, limit, name, overflow_action, overrides, partition_fields, too_many_buckets_action, type, additional_properties].hash end end end diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_quota_processor_overflow_action.rb b/lib/datadog_api_client/v2/models/observability_pipeline_quota_processor_overflow_action.rb index 4990af20ad61..06bc2d264f1f 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_quota_processor_overflow_action.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_quota_processor_overflow_action.rb @@ -17,7 +17,7 @@ require 'time' module DatadogAPIClient::V2 - # The action to take when the quota is exceeded. Options: + # The action to take when the quota or bucket limit is exceeded. Options: # - `drop`: Drop the event. # - `no_action`: Let the event pass through. # - `overflow_routing`: Route to an overflow destination. diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_rsyslog_destination.rb b/lib/datadog_api_client/v2/models/observability_pipeline_rsyslog_destination.rb index 4c15ba87ef60..fed60c0335e7 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_rsyslog_destination.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_rsyslog_destination.rb @@ -21,6 +21,9 @@ module DatadogAPIClient::V2 class ObservabilityPipelineRsyslogDestination include BaseGenericModel + # Configuration for buffer settings on destination components. + attr_accessor :buffer + # The unique identifier for this component. attr_reader :id @@ -42,6 +45,7 @@ class ObservabilityPipelineRsyslogDestination # @!visibility private def self.attribute_map { + :'buffer' => :'buffer', :'id' => :'id', :'inputs' => :'inputs', :'keepalive' => :'keepalive', @@ -54,6 +58,7 @@ def self.attribute_map # @!visibility private def self.openapi_types { + :'buffer' => :'ObservabilityPipelineBufferOptions', :'id' => :'String', :'inputs' => :'Array', :'keepalive' => :'Integer', @@ -80,6 +85,10 @@ def initialize(attributes = {}) end } + if attributes.key?(:'buffer') + self.buffer = attributes[:'buffer'] + end + if attributes.key?(:'id') self.id = attributes[:'id'] end @@ -180,6 +189,7 @@ def to_hash def ==(o) return true if self.equal?(o) self.class == o.class && + buffer == o.buffer && id == o.id && inputs == o.inputs && keepalive == o.keepalive && @@ -192,7 +202,7 @@ def ==(o) # @return [Integer] Hash code # @!visibility private def hash - [id, inputs, keepalive, tls, type, additional_properties].hash + [buffer, id, inputs, keepalive, tls, type, additional_properties].hash end end end diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_sample_processor.rb b/lib/datadog_api_client/v2/models/observability_pipeline_sample_processor.rb index 32229c4bad55..3eed16813afb 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_sample_processor.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_sample_processor.rb @@ -21,6 +21,9 @@ module DatadogAPIClient::V2 class ObservabilityPipelineSampleProcessor include BaseGenericModel + # Optional list of fields to group events by. Each group is sampled independently. + attr_reader :group_by + # The unique identifier for this component. Used to reference this component in other parts of the pipeline (for example, as the `input` to downstream components). attr_reader :id @@ -45,6 +48,7 @@ class ObservabilityPipelineSampleProcessor # @!visibility private def self.attribute_map { + :'group_by' => :'group_by', :'id' => :'id', :'include' => :'include', :'inputs' => :'inputs', @@ -58,6 +62,7 @@ def self.attribute_map # @!visibility private def self.openapi_types { + :'group_by' => :'Array', :'id' => :'String', :'include' => :'String', :'inputs' => :'Array', @@ -85,6 +90,12 @@ def initialize(attributes = {}) end } + if attributes.key?(:'group_by') + if (value = attributes[:'group_by']).is_a?(Array) + self.group_by = value + end + end + if attributes.key?(:'id') self.id = attributes[:'id'] end @@ -116,6 +127,7 @@ def initialize(attributes = {}) # @return true if the model is valid # @!visibility private def valid? + return false if !@group_by.nil? && @group_by.length < 1 return false if @id.nil? return false if @include.nil? return false if @inputs.nil? @@ -124,6 +136,16 @@ def valid? true end + # Custom attribute writer method with validation + # @param group_by [Object] Object to be assigned + # @!visibility private + def group_by=(group_by) + if !group_by.nil? && group_by.length < 1 + fail ArgumentError, 'invalid value for "group_by", number of items must be greater than or equal to 1.' + end + @group_by = group_by + end + # Custom attribute writer method with validation # @param id [Object] Object to be assigned # @!visibility private @@ -200,6 +222,7 @@ def to_hash def ==(o) return true if self.equal?(o) self.class == o.class && + group_by == o.group_by && id == o.id && include == o.include && inputs == o.inputs && @@ -213,7 +236,7 @@ def ==(o) # @return [Integer] Hash code # @!visibility private def hash - [id, include, inputs, percentage, rate, type, additional_properties].hash + [group_by, id, include, inputs, percentage, rate, type, additional_properties].hash end end end diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_sentinel_one_destination.rb b/lib/datadog_api_client/v2/models/observability_pipeline_sentinel_one_destination.rb index 9d0af0150f23..ac4c5bf690f2 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_sentinel_one_destination.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_sentinel_one_destination.rb @@ -21,6 +21,9 @@ module DatadogAPIClient::V2 class ObservabilityPipelineSentinelOneDestination include BaseGenericModel + # Configuration for buffer settings on destination components. + attr_accessor :buffer + # The unique identifier for this component. attr_reader :id @@ -39,6 +42,7 @@ class ObservabilityPipelineSentinelOneDestination # @!visibility private def self.attribute_map { + :'buffer' => :'buffer', :'id' => :'id', :'inputs' => :'inputs', :'region' => :'region', @@ -50,6 +54,7 @@ def self.attribute_map # @!visibility private def self.openapi_types { + :'buffer' => :'ObservabilityPipelineBufferOptions', :'id' => :'String', :'inputs' => :'Array', :'region' => :'ObservabilityPipelineSentinelOneDestinationRegion', @@ -75,6 +80,10 @@ def initialize(attributes = {}) end } + if attributes.key?(:'buffer') + self.buffer = attributes[:'buffer'] + end + if attributes.key?(:'id') self.id = attributes[:'id'] end @@ -171,6 +180,7 @@ def to_hash def ==(o) return true if self.equal?(o) self.class == o.class && + buffer == o.buffer && id == o.id && inputs == o.inputs && region == o.region && @@ -182,7 +192,7 @@ def ==(o) # @return [Integer] Hash code # @!visibility private def hash - [id, inputs, region, type, additional_properties].hash + [buffer, id, inputs, region, type, additional_properties].hash end end end diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_splunk_hec_destination.rb b/lib/datadog_api_client/v2/models/observability_pipeline_splunk_hec_destination.rb index 5410eaf32d69..32d71aea3cd6 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_splunk_hec_destination.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_splunk_hec_destination.rb @@ -26,6 +26,9 @@ class ObservabilityPipelineSplunkHecDestination # attr_accessor :auto_extract_timestamp + # Configuration for buffer settings on destination components. + attr_accessor :buffer + # Encoding format for log events. attr_accessor :encoding @@ -51,6 +54,7 @@ class ObservabilityPipelineSplunkHecDestination def self.attribute_map { :'auto_extract_timestamp' => :'auto_extract_timestamp', + :'buffer' => :'buffer', :'encoding' => :'encoding', :'id' => :'id', :'index' => :'index', @@ -65,6 +69,7 @@ def self.attribute_map def self.openapi_types { :'auto_extract_timestamp' => :'Boolean', + :'buffer' => :'ObservabilityPipelineBufferOptions', :'encoding' => :'ObservabilityPipelineSplunkHecDestinationEncoding', :'id' => :'String', :'index' => :'String', @@ -96,6 +101,10 @@ def initialize(attributes = {}) self.auto_extract_timestamp = attributes[:'auto_extract_timestamp'] end + if attributes.key?(:'buffer') + self.buffer = attributes[:'buffer'] + end + if attributes.key?(:'encoding') self.encoding = attributes[:'encoding'] end @@ -190,6 +199,7 @@ def ==(o) return true if self.equal?(o) self.class == o.class && auto_extract_timestamp == o.auto_extract_timestamp && + buffer == o.buffer && encoding == o.encoding && id == o.id && index == o.index && @@ -203,7 +213,7 @@ def ==(o) # @return [Integer] Hash code # @!visibility private def hash - [auto_extract_timestamp, encoding, id, index, inputs, sourcetype, type, additional_properties].hash + [auto_extract_timestamp, buffer, encoding, id, index, inputs, sourcetype, type, additional_properties].hash end end end diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_sumo_logic_destination.rb b/lib/datadog_api_client/v2/models/observability_pipeline_sumo_logic_destination.rb index 6193fb57f020..ebaf3cf1b271 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_sumo_logic_destination.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_sumo_logic_destination.rb @@ -21,6 +21,9 @@ module DatadogAPIClient::V2 class ObservabilityPipelineSumoLogicDestination include BaseGenericModel + # Configuration for buffer settings on destination components. + attr_accessor :buffer + # The output encoding format. attr_accessor :encoding @@ -51,6 +54,7 @@ class ObservabilityPipelineSumoLogicDestination # @!visibility private def self.attribute_map { + :'buffer' => :'buffer', :'encoding' => :'encoding', :'header_custom_fields' => :'header_custom_fields', :'header_host_name' => :'header_host_name', @@ -66,6 +70,7 @@ def self.attribute_map # @!visibility private def self.openapi_types { + :'buffer' => :'ObservabilityPipelineBufferOptions', :'encoding' => :'ObservabilityPipelineSumoLogicDestinationEncoding', :'header_custom_fields' => :'Array', :'header_host_name' => :'String', @@ -95,6 +100,10 @@ def initialize(attributes = {}) end } + if attributes.key?(:'buffer') + self.buffer = attributes[:'buffer'] + end + if attributes.key?(:'encoding') self.encoding = attributes[:'encoding'] end @@ -198,6 +207,7 @@ def to_hash def ==(o) return true if self.equal?(o) self.class == o.class && + buffer == o.buffer && encoding == o.encoding && header_custom_fields == o.header_custom_fields && header_host_name == o.header_host_name && @@ -213,7 +223,7 @@ def ==(o) # @return [Integer] Hash code # @!visibility private def hash - [encoding, header_custom_fields, header_host_name, header_source_category, header_source_name, id, inputs, type, additional_properties].hash + [buffer, encoding, header_custom_fields, header_host_name, header_source_category, header_source_name, id, inputs, type, additional_properties].hash end end end diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_syslog_ng_destination.rb b/lib/datadog_api_client/v2/models/observability_pipeline_syslog_ng_destination.rb index 045dcfe90403..b2242d7eecdd 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_syslog_ng_destination.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_syslog_ng_destination.rb @@ -21,6 +21,9 @@ module DatadogAPIClient::V2 class ObservabilityPipelineSyslogNgDestination include BaseGenericModel + # Configuration for buffer settings on destination components. + attr_accessor :buffer + # The unique identifier for this component. attr_reader :id @@ -42,6 +45,7 @@ class ObservabilityPipelineSyslogNgDestination # @!visibility private def self.attribute_map { + :'buffer' => :'buffer', :'id' => :'id', :'inputs' => :'inputs', :'keepalive' => :'keepalive', @@ -54,6 +58,7 @@ def self.attribute_map # @!visibility private def self.openapi_types { + :'buffer' => :'ObservabilityPipelineBufferOptions', :'id' => :'String', :'inputs' => :'Array', :'keepalive' => :'Integer', @@ -80,6 +85,10 @@ def initialize(attributes = {}) end } + if attributes.key?(:'buffer') + self.buffer = attributes[:'buffer'] + end + if attributes.key?(:'id') self.id = attributes[:'id'] end @@ -180,6 +189,7 @@ def to_hash def ==(o) return true if self.equal?(o) self.class == o.class && + buffer == o.buffer && id == o.id && inputs == o.inputs && keepalive == o.keepalive && @@ -192,7 +202,7 @@ def ==(o) # @return [Integer] Hash code # @!visibility private def hash - [id, inputs, keepalive, tls, type, additional_properties].hash + [buffer, id, inputs, keepalive, tls, type, additional_properties].hash end end end