Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
195 changes: 186 additions & 9 deletions .generator/schemas/v2/openapi.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -41994,6 +41994,191 @@ components:
type: string
x-enum-varnames:
- AMAZON_S3
ObservabilityPipelineAmazonS3GenericBatchSettings:
description: Event batching settings
properties:
batch_size:
description: Maximum batch size in bytes.
example: 100000000
format: int64
type: integer
timeout_secs:
description: Maximum number of seconds to wait before flushing the batch.
example: 900
format: int64
type: integer
type: object
ObservabilityPipelineAmazonS3GenericCompression:
description: Compression algorithm applied to encoded logs.
oneOf:
- $ref: '#/components/schemas/ObservabilityPipelineAmazonS3GenericCompressionZstd'
- $ref: '#/components/schemas/ObservabilityPipelineAmazonS3GenericCompressionGzip'
- $ref: '#/components/schemas/ObservabilityPipelineAmazonS3GenericCompressionSnappy'
ObservabilityPipelineAmazonS3GenericCompressionGzip:
description: Gzip compression.
properties:
level:
description: Gzip compression level.
example: 6
format: int64
type: integer
type:
$ref: '#/components/schemas/ObservabilityPipelineAmazonS3GenericCompressionGzipType'
required:
- type
- level
type: object
ObservabilityPipelineAmazonS3GenericCompressionGzipType:
default: gzip
description: The compression type. Always `gzip`.
enum:
- gzip
example: gzip
type: string
x-enum-varnames:
- GZIP
ObservabilityPipelineAmazonS3GenericCompressionSnappy:
description: Snappy compression.
properties:
type:
$ref: '#/components/schemas/ObservabilityPipelineAmazonS3GenericCompressionSnappyType'
required:
- type
type: object
ObservabilityPipelineAmazonS3GenericCompressionSnappyType:
default: snappy
description: The compression type. Always `snappy`.
enum:
- snappy
example: snappy
type: string
x-enum-varnames:
- SNAPPY
ObservabilityPipelineAmazonS3GenericCompressionZstd:
description: Zstd compression.
properties:
level:
description: Zstd compression level.
example: 3
format: int64
type: integer
type:
$ref: '#/components/schemas/ObservabilityPipelineAmazonS3GenericCompressionZstdType'
required:
- type
- level
type: object
ObservabilityPipelineAmazonS3GenericCompressionZstdType:
default: zstd
description: The compression type. Always `zstd`.
enum:
- zstd
example: zstd
type: string
x-enum-varnames:
- ZSTD
ObservabilityPipelineAmazonS3GenericDestination:
description: 'The `amazon_s3_generic` destination sends your logs to an Amazon
S3 bucket.


**Supported pipeline types:** logs'
properties:
auth:
$ref: '#/components/schemas/ObservabilityPipelineAwsAuth'
batch_settings:
$ref: '#/components/schemas/ObservabilityPipelineAmazonS3GenericBatchSettings'
bucket:
description: S3 bucket name.
example: my-bucket
type: string
compression:
$ref: '#/components/schemas/ObservabilityPipelineAmazonS3GenericCompression'
encoding:
$ref: '#/components/schemas/ObservabilityPipelineAmazonS3GenericEncoding'
id:
description: Unique identifier for the destination component.
example: generic-s3-destination
type: string
inputs:
description: A list of component IDs whose output is used as the `input`
for this component.
example:
- filter-processor
items:
type: string
type: array
key_prefix:
description: Optional prefix for object keys.
type: string
region:
description: AWS region of the S3 bucket.
example: us-east-1
type: string
storage_class:
$ref: '#/components/schemas/ObservabilityPipelineAmazonS3DestinationStorageClass'
type:
$ref: '#/components/schemas/ObservabilityPipelineAmazonS3GenericDestinationType'
required:
- id
- type
- inputs
- bucket
- region
- storage_class
- encoding
- compression
type: object
x-pipeline-types:
- logs
ObservabilityPipelineAmazonS3GenericDestinationType:
default: amazon_s3_generic
description: The destination type. Always `amazon_s3_generic`.
enum:
- amazon_s3_generic
example: amazon_s3_generic
type: string
x-enum-varnames:
- GENERIC_ARCHIVES_S3
ObservabilityPipelineAmazonS3GenericEncoding:
description: Encoding format for the destination.
oneOf:
- $ref: '#/components/schemas/ObservabilityPipelineAmazonS3GenericEncodingJson'
- $ref: '#/components/schemas/ObservabilityPipelineAmazonS3GenericEncodingParquet'
ObservabilityPipelineAmazonS3GenericEncodingJson:
description: JSON encoding.
properties:
type:
$ref: '#/components/schemas/ObservabilityPipelineAmazonS3GenericEncodingJsonType'
required:
- type
type: object
ObservabilityPipelineAmazonS3GenericEncodingJsonType:
default: json
description: The encoding type. Always `json`.
enum:
- json
example: json
type: string
x-enum-varnames:
- JSON
ObservabilityPipelineAmazonS3GenericEncodingParquet:
description: Parquet encoding.
properties:
type:
$ref: '#/components/schemas/ObservabilityPipelineAmazonS3GenericEncodingParquetType'
required:
- type
type: object
ObservabilityPipelineAmazonS3GenericEncodingParquetType:
default: parquet
description: The encoding type. Always `parquet`.
enum:
- parquet
example: parquet
type: string
x-enum-varnames:
- PARQUET
ObservabilityPipelineAmazonS3Source:
description: 'The `amazon_s3` source ingests logs from an Amazon S3 bucket.

Expand Down Expand Up @@ -42269,6 +42454,7 @@ components:
- $ref: '#/components/schemas/ObservabilityPipelineHttpClientDestination'
- $ref: '#/components/schemas/ObservabilityPipelineAmazonOpenSearchDestination'
- $ref: '#/components/schemas/ObservabilityPipelineAmazonS3Destination'
- $ref: '#/components/schemas/ObservabilityPipelineAmazonS3GenericDestination'
- $ref: '#/components/schemas/ObservabilityPipelineAmazonSecurityLakeDestination'
- $ref: '#/components/schemas/AzureStorageDestination'
- $ref: '#/components/schemas/ObservabilityPipelineCloudPremDestination'
Expand Down Expand Up @@ -46462,15 +46648,6 @@ components:
description: Optional name of the Splunk index where logs are written.
example: main
type: string
indexed_fields:
description: List of log field names to send as indexed fields to Splunk
HEC. Available only when `encoding` is `json`.
example:
- service
- host
items:
type: string
type: array
inputs:
description: A list of component IDs whose output is used as the `input`
for this component.
Expand Down
15 changes: 15 additions & 0 deletions lib/datadog_api_client/inflector.rb
Original file line number Diff line number Diff line change
Expand Up @@ -3590,6 +3590,21 @@ def overrides
"v2.observability_pipeline_amazon_s3_destination" => "ObservabilityPipelineAmazonS3Destination",
"v2.observability_pipeline_amazon_s3_destination_storage_class" => "ObservabilityPipelineAmazonS3DestinationStorageClass",
"v2.observability_pipeline_amazon_s3_destination_type" => "ObservabilityPipelineAmazonS3DestinationType",
"v2.observability_pipeline_amazon_s3_generic_batch_settings" => "ObservabilityPipelineAmazonS3GenericBatchSettings",
"v2.observability_pipeline_amazon_s3_generic_compression" => "ObservabilityPipelineAmazonS3GenericCompression",
"v2.observability_pipeline_amazon_s3_generic_compression_gzip" => "ObservabilityPipelineAmazonS3GenericCompressionGzip",
"v2.observability_pipeline_amazon_s3_generic_compression_gzip_type" => "ObservabilityPipelineAmazonS3GenericCompressionGzipType",
"v2.observability_pipeline_amazon_s3_generic_compression_snappy" => "ObservabilityPipelineAmazonS3GenericCompressionSnappy",
"v2.observability_pipeline_amazon_s3_generic_compression_snappy_type" => "ObservabilityPipelineAmazonS3GenericCompressionSnappyType",
"v2.observability_pipeline_amazon_s3_generic_compression_zstd" => "ObservabilityPipelineAmazonS3GenericCompressionZstd",
"v2.observability_pipeline_amazon_s3_generic_compression_zstd_type" => "ObservabilityPipelineAmazonS3GenericCompressionZstdType",
"v2.observability_pipeline_amazon_s3_generic_destination" => "ObservabilityPipelineAmazonS3GenericDestination",
"v2.observability_pipeline_amazon_s3_generic_destination_type" => "ObservabilityPipelineAmazonS3GenericDestinationType",
"v2.observability_pipeline_amazon_s3_generic_encoding" => "ObservabilityPipelineAmazonS3GenericEncoding",
"v2.observability_pipeline_amazon_s3_generic_encoding_json" => "ObservabilityPipelineAmazonS3GenericEncodingJson",
"v2.observability_pipeline_amazon_s3_generic_encoding_json_type" => "ObservabilityPipelineAmazonS3GenericEncodingJsonType",
"v2.observability_pipeline_amazon_s3_generic_encoding_parquet" => "ObservabilityPipelineAmazonS3GenericEncodingParquet",
"v2.observability_pipeline_amazon_s3_generic_encoding_parquet_type" => "ObservabilityPipelineAmazonS3GenericEncodingParquetType",
"v2.observability_pipeline_amazon_s3_source" => "ObservabilityPipelineAmazonS3Source",
"v2.observability_pipeline_amazon_s3_source_type" => "ObservabilityPipelineAmazonS3SourceType",
"v2.observability_pipeline_amazon_security_lake_destination" => "ObservabilityPipelineAmazonSecurityLakeDestination",
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,115 @@
=begin
#Datadog API V2 Collection

#Collection of all Datadog Public endpoints.

The version of the OpenAPI document: 1.0
Contact: support@datadoghq.com
Generated by: https://github.com/DataDog/datadog-api-client-ruby/tree/master/.generator

Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License.
This product includes software developed at Datadog (https://www.datadoghq.com/).
Copyright 2020-Present Datadog, Inc.

=end

require 'date'
require 'time'

module DatadogAPIClient::V2
# Event batching settings
class ObservabilityPipelineAmazonS3GenericBatchSettings
include BaseGenericModel

# Maximum batch size in bytes.
attr_accessor :batch_size

# Maximum number of seconds to wait before flushing the batch.
attr_accessor :timeout_secs

attr_accessor :additional_properties

# Attribute mapping from ruby-style variable name to JSON key.
# @!visibility private
def self.attribute_map
{
:'batch_size' => :'batch_size',
:'timeout_secs' => :'timeout_secs'
}
end

# Attribute type mapping.
# @!visibility private
def self.openapi_types
{
:'batch_size' => :'Integer',
:'timeout_secs' => :'Integer'
}
end

# Initializes the object
# @param attributes [Hash] Model attributes in the form of hash
# @!visibility private
def initialize(attributes = {})
if (!attributes.is_a?(Hash))
fail ArgumentError, "The input argument (attributes) must be a hash in `DatadogAPIClient::V2::ObservabilityPipelineAmazonS3GenericBatchSettings` initialize method"
end

self.additional_properties = {}
# check to see if the attribute exists and convert string to symbol for hash key
attributes = attributes.each_with_object({}) { |(k, v), h|
if (!self.class.attribute_map.key?(k.to_sym))
self.additional_properties[k.to_sym] = v
else
h[k.to_sym] = v
end
}

if attributes.key?(:'batch_size')
self.batch_size = attributes[:'batch_size']
end

if attributes.key?(:'timeout_secs')
self.timeout_secs = attributes[:'timeout_secs']
end
end

# Returns the object in the form of hash, with additionalProperties support.
# @return [Hash] Returns the object in the form of hash
# @!visibility private
def to_hash
hash = {}
self.class.attribute_map.each_pair do |attr, param|
value = self.send(attr)
if value.nil?
is_nullable = self.class.openapi_nullable.include?(attr)
next if !is_nullable || (is_nullable && !instance_variable_defined?(:"@#{attr}"))
end

hash[param] = _to_hash(value)
end
self.additional_properties.each_pair do |attr, value|
hash[attr] = value
end
hash
end

# Checks equality by comparing each attribute.
# @param o [Object] Object to be compared
# @!visibility private
def ==(o)
return true if self.equal?(o)
self.class == o.class &&
batch_size == o.batch_size &&
timeout_secs == o.timeout_secs &&
additional_properties == o.additional_properties
end

# Calculates hash code according to all attributes.
# @return [Integer] Hash code
# @!visibility private
def hash
[batch_size, timeout_secs, additional_properties].hash
end
end
end
Loading
Loading