Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1,408 changes: 1,173 additions & 235 deletions .generator/schemas/v2/openapi.yaml

Large diffs are not rendered by default.

12 changes: 6 additions & 6 deletions src/datadog/configuration.rs
Original file line number Diff line number Diff line change
Expand Up @@ -243,6 +243,12 @@ impl Default for Configuration {
false,
),
("v2.validate_monitor_user_template".to_owned(), false),
("v2.create_pipeline".to_owned(), false),
("v2.delete_pipeline".to_owned(), false),
("v2.get_pipeline".to_owned(), false),
("v2.list_pipelines".to_owned(), false),
("v2.update_pipeline".to_owned(), false),
("v2.validate_pipeline".to_owned(), false),
("v2.list_role_templates".to_owned(), false),
("v2.create_connection".to_owned(), false),
("v2.delete_connection".to_owned(), false),
Expand All @@ -254,12 +260,6 @@ impl Default for Configuration {
("v2.query_event_filtered_users".to_owned(), false),
("v2.query_users".to_owned(), false),
("v2.update_connection".to_owned(), false),
("v2.create_pipeline".to_owned(), false),
("v2.delete_pipeline".to_owned(), false),
("v2.get_pipeline".to_owned(), false),
("v2.list_pipelines".to_owned(), false),
("v2.update_pipeline".to_owned(), false),
("v2.validate_pipeline".to_owned(), false),
("v2.create_scorecard_outcomes_batch".to_owned(), false),
("v2.create_scorecard_rule".to_owned(), false),
("v2.delete_scorecard_rule".to_owned(), false),
Expand Down
6 changes: 3 additions & 3 deletions src/datadogV2/api/api_observability_pipelines.rs
Original file line number Diff line number Diff line change
Expand Up @@ -189,7 +189,7 @@ impl ObservabilityPipelinesAPI {
let local_client = &self.client;

let local_uri_str = format!(
"{}/api/v2/remote_config/products/obs_pipelines/pipelines",
"{}/api/v2/obs-pipelines/pipelines",
local_configuration.get_operation_host(operation_id)
);
let mut local_req_builder =
Expand Down Expand Up @@ -563,7 +563,7 @@ impl ObservabilityPipelinesAPI {
let local_client = &self.client;

let local_uri_str = format!(
"{}/api/v2/remote_config/products/obs_pipelines/pipelines",
"{}/api/v2/obs-pipelines/pipelines",
local_configuration.get_operation_host(operation_id)
);
let mut local_req_builder =
Expand Down Expand Up @@ -851,7 +851,7 @@ impl ObservabilityPipelinesAPI {
let local_client = &self.client;

let local_uri_str = format!(
"{}/api/v2/remote_config/products/obs_pipelines/pipelines/validate",
"{}/api/v2/obs-pipelines/pipelines/validate",
local_configuration.get_operation_host(operation_id)
);
let mut local_req_builder =
Expand Down
2,538 changes: 1,300 additions & 1,238 deletions src/datadogV2/model/mod.rs

Large diffs are not rendered by default.

2 changes: 2 additions & 0 deletions src/datadogV2/model/model_azure_storage_destination.rs
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,8 @@ use serde_with::skip_serializing_none;
use std::fmt::{self, Formatter};

/// The `azure_storage` destination forwards logs to an Azure Blob Storage container.
///
/// **Supported pipeline types:** logs
#[non_exhaustive]
#[skip_serializing_none]
#[derive(Clone, Debug, PartialEq, Serialize)]
Expand Down
2 changes: 2 additions & 0 deletions src/datadogV2/model/model_microsoft_sentinel_destination.rs
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,8 @@ use serde_with::skip_serializing_none;
use std::fmt::{self, Formatter};

/// The `microsoft_sentinel` destination forwards logs to Microsoft Sentinel.
///
/// **Supported pipeline types:** logs
#[non_exhaustive]
#[skip_serializing_none]
#[derive(Clone, Debug, PartialEq, Serialize)]
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,8 @@ use serde_with::skip_serializing_none;
use std::fmt::{self, Formatter};

/// The `add_env_vars` processor adds environment variable values to log events.
///
/// **Supported pipeline types:** logs
#[non_exhaustive]
#[skip_serializing_none]
#[derive(Clone, Debug, PartialEq, Serialize)]
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,8 @@ use serde_with::skip_serializing_none;
use std::fmt::{self, Formatter};

/// The `add_fields` processor adds static key-value fields to logs.
///
/// **Supported pipeline types:** logs
#[non_exhaustive]
#[skip_serializing_none]
#[derive(Clone, Debug, PartialEq, Serialize)]
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,157 @@
// Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License.
// This product includes software developed at Datadog (https://www.datadoghq.com/).
// Copyright 2019-Present Datadog, Inc.
use serde::de::{Error, MapAccess, Visitor};
use serde::{Deserialize, Deserializer, Serialize};
use serde_with::skip_serializing_none;
use std::fmt::{self, Formatter};

/// The `add_hostname` processor adds the hostname to log events.
///
/// **Supported pipeline types:** logs
#[non_exhaustive]
#[skip_serializing_none]
#[derive(Clone, Debug, PartialEq, Serialize)]
pub struct ObservabilityPipelineAddHostnameProcessor {
/// The display name for a component.
#[serde(rename = "display_name")]
pub display_name: Option<String>,
/// Whether this processor is enabled.
#[serde(rename = "enabled")]
pub enabled: bool,
/// The unique identifier for this component. Used to reference this component in other parts of the pipeline (for example, as the `input` to downstream components).
#[serde(rename = "id")]
pub id: String,
/// A Datadog search query used to determine which logs this processor targets.
#[serde(rename = "include")]
pub include: String,
/// The processor type. The value should always be `add_hostname`.
#[serde(rename = "type")]
pub type_: crate::datadogV2::model::ObservabilityPipelineAddHostnameProcessorType,
#[serde(flatten)]
pub additional_properties: std::collections::BTreeMap<String, serde_json::Value>,
#[serde(skip)]
#[serde(default)]
pub(crate) _unparsed: bool,
}

impl ObservabilityPipelineAddHostnameProcessor {
pub fn new(
enabled: bool,
id: String,
include: String,
type_: crate::datadogV2::model::ObservabilityPipelineAddHostnameProcessorType,
) -> ObservabilityPipelineAddHostnameProcessor {
ObservabilityPipelineAddHostnameProcessor {
display_name: None,
enabled,
id,
include,
type_,
additional_properties: std::collections::BTreeMap::new(),
_unparsed: false,
}
}

pub fn display_name(mut self, value: String) -> Self {
self.display_name = Some(value);
self
}

pub fn additional_properties(
mut self,
value: std::collections::BTreeMap<String, serde_json::Value>,
) -> Self {
self.additional_properties = value;
self
}
}

impl<'de> Deserialize<'de> for ObservabilityPipelineAddHostnameProcessor {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
struct ObservabilityPipelineAddHostnameProcessorVisitor;
impl<'a> Visitor<'a> for ObservabilityPipelineAddHostnameProcessorVisitor {
type Value = ObservabilityPipelineAddHostnameProcessor;

fn expecting(&self, f: &mut Formatter<'_>) -> fmt::Result {
f.write_str("a mapping")
}

fn visit_map<M>(self, mut map: M) -> Result<Self::Value, M::Error>
where
M: MapAccess<'a>,
{
let mut display_name: Option<String> = None;
let mut enabled: Option<bool> = None;
let mut id: Option<String> = None;
let mut include: Option<String> = None;
let mut type_: Option<
crate::datadogV2::model::ObservabilityPipelineAddHostnameProcessorType,
> = None;
let mut additional_properties: std::collections::BTreeMap<
String,
serde_json::Value,
> = std::collections::BTreeMap::new();
let mut _unparsed = false;

while let Some((k, v)) = map.next_entry::<String, serde_json::Value>()? {
match k.as_str() {
"display_name" => {
if v.is_null() {
continue;
}
display_name =
Some(serde_json::from_value(v).map_err(M::Error::custom)?);
}
"enabled" => {
enabled = Some(serde_json::from_value(v).map_err(M::Error::custom)?);
}
"id" => {
id = Some(serde_json::from_value(v).map_err(M::Error::custom)?);
}
"include" => {
include = Some(serde_json::from_value(v).map_err(M::Error::custom)?);
}
"type" => {
type_ = Some(serde_json::from_value(v).map_err(M::Error::custom)?);
if let Some(ref _type_) = type_ {
match _type_ {
crate::datadogV2::model::ObservabilityPipelineAddHostnameProcessorType::UnparsedObject(_type_) => {
_unparsed = true;
},
_ => {}
}
}
}
&_ => {
if let Ok(value) = serde_json::from_value(v.clone()) {
additional_properties.insert(k, value);
}
}
}
}
let enabled = enabled.ok_or_else(|| M::Error::missing_field("enabled"))?;
let id = id.ok_or_else(|| M::Error::missing_field("id"))?;
let include = include.ok_or_else(|| M::Error::missing_field("include"))?;
let type_ = type_.ok_or_else(|| M::Error::missing_field("type_"))?;

let content = ObservabilityPipelineAddHostnameProcessor {
display_name,
enabled,
id,
include,
type_,
additional_properties,
_unparsed,
};

Ok(content)
}
}

deserializer.deserialize_any(ObservabilityPipelineAddHostnameProcessorVisitor)
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,48 @@
// Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License.
// This product includes software developed at Datadog (https://www.datadoghq.com/).
// Copyright 2019-Present Datadog, Inc.

use serde::{Deserialize, Deserializer, Serialize, Serializer};

#[non_exhaustive]
#[derive(Clone, Debug, Eq, PartialEq)]
pub enum ObservabilityPipelineAddHostnameProcessorType {
ADD_HOSTNAME,
UnparsedObject(crate::datadog::UnparsedObject),
}

impl ToString for ObservabilityPipelineAddHostnameProcessorType {
fn to_string(&self) -> String {
match self {
Self::ADD_HOSTNAME => String::from("add_hostname"),
Self::UnparsedObject(v) => v.value.to_string(),
}
}
}

impl Serialize for ObservabilityPipelineAddHostnameProcessorType {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
match self {
Self::UnparsedObject(v) => v.serialize(serializer),
_ => serializer.serialize_str(self.to_string().as_str()),
}
}
}

impl<'de> Deserialize<'de> for ObservabilityPipelineAddHostnameProcessorType {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
let s: String = String::deserialize(deserializer)?;
Ok(match s.as_str() {
"add_hostname" => Self::ADD_HOSTNAME,
_ => Self::UnparsedObject(crate::datadog::UnparsedObject {
value: serde_json::Value::String(s.into()),
}),
})
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,8 @@ use serde_with::skip_serializing_none;
use std::fmt::{self, Formatter};

/// The `amazon_data_firehose` source ingests logs from AWS Data Firehose.
///
/// **Supported pipeline types:** logs
#[non_exhaustive]
#[skip_serializing_none]
#[derive(Clone, Debug, PartialEq, Serialize)]
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,8 @@ use serde_with::skip_serializing_none;
use std::fmt::{self, Formatter};

/// The `amazon_opensearch` destination writes logs to Amazon OpenSearch.
///
/// **Supported pipeline types:** logs
#[non_exhaustive]
#[skip_serializing_none]
#[derive(Clone, Debug, PartialEq, Serialize)]
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,8 @@ use serde_with::skip_serializing_none;
use std::fmt::{self, Formatter};

/// The `amazon_s3` destination sends your logs in Datadog-rehydratable format to an Amazon S3 bucket for archiving.
///
/// **Supported pipeline types:** logs
#[non_exhaustive]
#[skip_serializing_none]
#[derive(Clone, Debug, PartialEq, Serialize)]
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,8 @@ use std::fmt::{self, Formatter};

/// The `amazon_s3` source ingests logs from an Amazon S3 bucket.
/// It supports AWS authentication and TLS encryption.
///
/// **Supported pipeline types:** logs
#[non_exhaustive]
#[skip_serializing_none]
#[derive(Clone, Debug, PartialEq, Serialize)]
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,8 @@ use serde_with::skip_serializing_none;
use std::fmt::{self, Formatter};

/// The `amazon_security_lake` destination sends your logs to Amazon Security Lake.
///
/// **Supported pipeline types:** logs
#[non_exhaustive]
#[skip_serializing_none]
#[derive(Clone, Debug, PartialEq, Serialize)]
Expand Down
Loading
Loading