diff --git a/.generator/schemas/v2/openapi.yaml b/.generator/schemas/v2/openapi.yaml index 6e4cbaacf..aba68e52a 100644 --- a/.generator/schemas/v2/openapi.yaml +++ b/.generator/schemas/v2/openapi.yaml @@ -6806,6 +6806,72 @@ components: required: - data type: object + BatchDeleteRowsRequestArray: + description: The request body for deleting multiple rows from a reference table. + properties: + data: + items: + $ref: '#/components/schemas/BatchDeleteRowsRequestData' + maxItems: 200 + type: array + required: + - data + type: object + BatchDeleteRowsRequestData: + description: Row resource containing a single row identifier for deletion. + properties: + id: + example: primary_key_value + type: string + type: + $ref: '#/components/schemas/TableRowResourceDataType' + required: + - type + - id + type: object + BatchUpsertRowsRequestArray: + description: The request body for creating or updating multiple rows into a + reference table. + properties: + data: + items: + $ref: '#/components/schemas/BatchUpsertRowsRequestData' + maxItems: 200 + type: array + required: + - data + type: object + BatchUpsertRowsRequestData: + description: Row resource containing a single row identifier and its column + values. + properties: + attributes: + $ref: '#/components/schemas/BatchUpsertRowsRequestDataAttributes' + id: + example: primary_key_value + type: string + type: + $ref: '#/components/schemas/TableRowResourceDataType' + required: + - type + - id + type: object + BatchUpsertRowsRequestDataAttributes: + description: Attributes containing row data values for row creation or update + operations. + properties: + values: + additionalProperties: + x-required-field: true + description: Key-value pairs representing row data, where keys are field + names from the schema. + example: + example_key_value: primary_key_value + name: row_name + type: object + required: + - values + type: object BillConfig: description: Bill config. properties: @@ -74549,6 +74615,47 @@ paths: tags: - Reference Tables /api/v2/reference-tables/tables/{id}/rows: + delete: + description: Delete multiple rows from a Reference Table by their primary key + values. + operationId: DeleteRows + parameters: + - description: Unique identifier of the reference table to delete rows from + in: path + name: id + required: true + schema: + type: string + requestBody: + content: + application/json: + schema: + $ref: '#/components/schemas/BatchDeleteRowsRequestArray' + required: true + responses: + '200': + description: Rows deleted successfully + '400': + $ref: '#/components/responses/BadRequestResponse' + '403': + $ref: '#/components/responses/ForbiddenResponse' + '404': + $ref: '#/components/responses/NotFoundResponse' + '429': + $ref: '#/components/responses/TooManyRequestsResponse' + '500': + content: + application/json: + schema: + $ref: '#/components/schemas/APIErrorResponse' + description: Internal Server Error + security: + - apiKeyAuth: [] + appKeyAuth: [] + - AuthZ: [] + summary: Delete rows + tags: + - Reference Tables get: description: Get reference table rows by their primary key values. operationId: GetRowsByID @@ -74593,6 +74700,48 @@ paths: summary: Get rows by id tags: - Reference Tables + post: + description: Create or update rows in a Reference Table by their primary key + values. If a row with the specified primary key exists, it is updated; otherwise, + a new row is created. + operationId: UpsertRows + parameters: + - description: Unique identifier of the reference table to upsert rows into + in: path + name: id + required: true + schema: + type: string + requestBody: + content: + application/json: + schema: + $ref: '#/components/schemas/BatchUpsertRowsRequestArray' + required: true + responses: + '200': + description: Rows created or updated successfully + '400': + $ref: '#/components/responses/BadRequestResponse' + '403': + $ref: '#/components/responses/ForbiddenResponse' + '404': + $ref: '#/components/responses/NotFoundResponse' + '429': + $ref: '#/components/responses/TooManyRequestsResponse' + '500': + content: + application/json: + schema: + $ref: '#/components/schemas/APIErrorResponse' + description: Internal Server Error + security: + - apiKeyAuth: [] + appKeyAuth: [] + - AuthZ: [] + summary: Upsert rows + tags: + - Reference Tables /api/v2/reference-tables/uploads: post: description: Create a reference table upload for bulk data ingestion diff --git a/examples/v2_reference-tables_DeleteRows.rs b/examples/v2_reference-tables_DeleteRows.rs new file mode 100644 index 000000000..159cf0ddd --- /dev/null +++ b/examples/v2_reference-tables_DeleteRows.rs @@ -0,0 +1,22 @@ +// Delete rows returns "Rows deleted successfully" response +use datadog_api_client::datadog; +use datadog_api_client::datadogV2::api_reference_tables::ReferenceTablesAPI; +use datadog_api_client::datadogV2::model::BatchDeleteRowsRequestArray; +use datadog_api_client::datadogV2::model::BatchDeleteRowsRequestData; +use datadog_api_client::datadogV2::model::TableRowResourceDataType; + +#[tokio::main] +async fn main() { + let body = BatchDeleteRowsRequestArray::new(vec![BatchDeleteRowsRequestData::new( + "primary_key_value".to_string(), + TableRowResourceDataType::ROW, + )]); + let configuration = datadog::Configuration::new(); + let api = ReferenceTablesAPI::with_config(configuration); + let resp = api.delete_rows("id".to_string(), body).await; + if let Ok(value) = resp { + println!("{:#?}", value); + } else { + println!("{:#?}", resp.unwrap_err()); + } +} diff --git a/examples/v2_reference-tables_UpsertRows.rs b/examples/v2_reference-tables_UpsertRows.rs new file mode 100644 index 000000000..86787ba8d --- /dev/null +++ b/examples/v2_reference-tables_UpsertRows.rs @@ -0,0 +1,32 @@ +// Upsert rows returns "Rows created or updated successfully" response +use datadog_api_client::datadog; +use datadog_api_client::datadogV2::api_reference_tables::ReferenceTablesAPI; +use datadog_api_client::datadogV2::model::BatchUpsertRowsRequestArray; +use datadog_api_client::datadogV2::model::BatchUpsertRowsRequestData; +use datadog_api_client::datadogV2::model::BatchUpsertRowsRequestDataAttributes; +use datadog_api_client::datadogV2::model::TableRowResourceDataType; +use serde_json::Value; +use std::collections::BTreeMap; + +#[tokio::main] +async fn main() { + let body = BatchUpsertRowsRequestArray::new(vec![BatchUpsertRowsRequestData::new( + "primary_key_value".to_string(), + TableRowResourceDataType::ROW, + ) + .attributes(BatchUpsertRowsRequestDataAttributes::new(BTreeMap::from([ + ( + "example_key_value".to_string(), + Value::from("primary_key_value"), + ), + ("name".to_string(), Value::from("row_name")), + ])))]); + let configuration = datadog::Configuration::new(); + let api = ReferenceTablesAPI::with_config(configuration); + let resp = api.upsert_rows("id".to_string(), body).await; + if let Ok(value) = resp { + println!("{:#?}", value); + } else { + println!("{:#?}", resp.unwrap_err()); + } +} diff --git a/src/datadogV2/api/api_reference_tables.rs b/src/datadogV2/api/api_reference_tables.rs index 52a90333b..f69e837a0 100644 --- a/src/datadogV2/api/api_reference_tables.rs +++ b/src/datadogV2/api/api_reference_tables.rs @@ -77,6 +77,14 @@ pub enum CreateReferenceTableUploadError { UnknownValue(serde_json::Value), } +/// DeleteRowsError is a struct for typed errors of method [`ReferenceTablesAPI::delete_rows`] +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(untagged)] +pub enum DeleteRowsError { + APIErrorResponse(crate::datadogV2::model::APIErrorResponse), + UnknownValue(serde_json::Value), +} + /// DeleteTableError is a struct for typed errors of method [`ReferenceTablesAPI::delete_table`] #[derive(Debug, Clone, Serialize, Deserialize)] #[serde(untagged)] @@ -117,6 +125,14 @@ pub enum UpdateReferenceTableError { UnknownValue(serde_json::Value), } +/// UpsertRowsError is a struct for typed errors of method [`ReferenceTablesAPI::upsert_rows`] +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(untagged)] +pub enum UpsertRowsError { + APIErrorResponse(crate::datadogV2::model::APIErrorResponse), + UnknownValue(serde_json::Value), +} + /// View and manage Reference Tables in your organization. #[derive(Debug, Clone)] pub struct ReferenceTablesAPI { @@ -497,6 +513,141 @@ impl ReferenceTablesAPI { } } + /// Delete multiple rows from a Reference Table by their primary key values. + pub async fn delete_rows( + &self, + id: String, + body: crate::datadogV2::model::BatchDeleteRowsRequestArray, + ) -> Result<(), datadog::Error> { + match self.delete_rows_with_http_info(id, body).await { + Ok(_) => Ok(()), + Err(err) => Err(err), + } + } + + /// Delete multiple rows from a Reference Table by their primary key values. + pub async fn delete_rows_with_http_info( + &self, + id: String, + body: crate::datadogV2::model::BatchDeleteRowsRequestArray, + ) -> Result, datadog::Error> { + let local_configuration = &self.config; + let operation_id = "v2.delete_rows"; + + let local_client = &self.client; + + let local_uri_str = format!( + "{}/api/v2/reference-tables/tables/{id}/rows", + local_configuration.get_operation_host(operation_id), + id = datadog::urlencode(id) + ); + let mut local_req_builder = + local_client.request(reqwest::Method::DELETE, local_uri_str.as_str()); + + // build headers + let mut headers = HeaderMap::new(); + headers.insert("Content-Type", HeaderValue::from_static("application/json")); + headers.insert("Accept", HeaderValue::from_static("*/*")); + + // build user agent + match HeaderValue::from_str(local_configuration.user_agent.as_str()) { + Ok(user_agent) => headers.insert(reqwest::header::USER_AGENT, user_agent), + Err(e) => { + log::warn!("Failed to parse user agent header: {e}, falling back to default"); + headers.insert( + reqwest::header::USER_AGENT, + HeaderValue::from_static(datadog::DEFAULT_USER_AGENT.as_str()), + ) + } + }; + + // build auth + if let Some(local_key) = local_configuration.auth_keys.get("apiKeyAuth") { + headers.insert( + "DD-API-KEY", + HeaderValue::from_str(local_key.key.as_str()) + .expect("failed to parse DD-API-KEY header"), + ); + }; + if let Some(local_key) = local_configuration.auth_keys.get("appKeyAuth") { + headers.insert( + "DD-APPLICATION-KEY", + HeaderValue::from_str(local_key.key.as_str()) + .expect("failed to parse DD-APPLICATION-KEY header"), + ); + }; + + // build body parameters + let output = Vec::new(); + let mut ser = serde_json::Serializer::with_formatter(output, datadog::DDFormatter); + if body.serialize(&mut ser).is_ok() { + if let Some(content_encoding) = headers.get("Content-Encoding") { + match content_encoding.to_str().unwrap_or_default() { + "gzip" => { + let mut enc = GzEncoder::new(Vec::new(), Compression::default()); + let _ = enc.write_all(ser.into_inner().as_slice()); + match enc.finish() { + Ok(buf) => { + local_req_builder = local_req_builder.body(buf); + } + Err(e) => return Err(datadog::Error::Io(e)), + } + } + "deflate" => { + let mut enc = ZlibEncoder::new(Vec::new(), Compression::default()); + let _ = enc.write_all(ser.into_inner().as_slice()); + match enc.finish() { + Ok(buf) => { + local_req_builder = local_req_builder.body(buf); + } + Err(e) => return Err(datadog::Error::Io(e)), + } + } + "zstd1" => { + let mut enc = zstd::stream::Encoder::new(Vec::new(), 0).unwrap(); + let _ = enc.write_all(ser.into_inner().as_slice()); + match enc.finish() { + Ok(buf) => { + local_req_builder = local_req_builder.body(buf); + } + Err(e) => return Err(datadog::Error::Io(e)), + } + } + _ => { + local_req_builder = local_req_builder.body(ser.into_inner()); + } + } + } else { + local_req_builder = local_req_builder.body(ser.into_inner()); + } + } + + local_req_builder = local_req_builder.headers(headers); + let local_req = local_req_builder.build()?; + log::debug!("request content: {:?}", local_req.body()); + let local_resp = local_client.execute(local_req).await?; + + let local_status = local_resp.status(); + let local_content = local_resp.text().await?; + log::debug!("response content: {}", local_content); + + if !local_status.is_client_error() && !local_status.is_server_error() { + Ok(datadog::ResponseContent { + status: local_status, + content: local_content, + entity: None, + }) + } else { + let local_entity: Option = serde_json::from_str(&local_content).ok(); + let local_error = datadog::ResponseContent { + status: local_status, + content: local_content, + entity: local_entity, + }; + Err(datadog::Error::ResponseError(local_error)) + } + } + /// Delete a reference table by ID pub async fn delete_table(&self, id: String) -> Result<(), datadog::Error> { match self.delete_table_with_http_info(id).await { @@ -1076,4 +1227,139 @@ impl ReferenceTablesAPI { Err(datadog::Error::ResponseError(local_error)) } } + + /// Create or update rows in a Reference Table by their primary key values. If a row with the specified primary key exists, it is updated; otherwise, a new row is created. + pub async fn upsert_rows( + &self, + id: String, + body: crate::datadogV2::model::BatchUpsertRowsRequestArray, + ) -> Result<(), datadog::Error> { + match self.upsert_rows_with_http_info(id, body).await { + Ok(_) => Ok(()), + Err(err) => Err(err), + } + } + + /// Create or update rows in a Reference Table by their primary key values. If a row with the specified primary key exists, it is updated; otherwise, a new row is created. + pub async fn upsert_rows_with_http_info( + &self, + id: String, + body: crate::datadogV2::model::BatchUpsertRowsRequestArray, + ) -> Result, datadog::Error> { + let local_configuration = &self.config; + let operation_id = "v2.upsert_rows"; + + let local_client = &self.client; + + let local_uri_str = format!( + "{}/api/v2/reference-tables/tables/{id}/rows", + local_configuration.get_operation_host(operation_id), + id = datadog::urlencode(id) + ); + let mut local_req_builder = + local_client.request(reqwest::Method::POST, local_uri_str.as_str()); + + // build headers + let mut headers = HeaderMap::new(); + headers.insert("Content-Type", HeaderValue::from_static("application/json")); + headers.insert("Accept", HeaderValue::from_static("*/*")); + + // build user agent + match HeaderValue::from_str(local_configuration.user_agent.as_str()) { + Ok(user_agent) => headers.insert(reqwest::header::USER_AGENT, user_agent), + Err(e) => { + log::warn!("Failed to parse user agent header: {e}, falling back to default"); + headers.insert( + reqwest::header::USER_AGENT, + HeaderValue::from_static(datadog::DEFAULT_USER_AGENT.as_str()), + ) + } + }; + + // build auth + if let Some(local_key) = local_configuration.auth_keys.get("apiKeyAuth") { + headers.insert( + "DD-API-KEY", + HeaderValue::from_str(local_key.key.as_str()) + .expect("failed to parse DD-API-KEY header"), + ); + }; + if let Some(local_key) = local_configuration.auth_keys.get("appKeyAuth") { + headers.insert( + "DD-APPLICATION-KEY", + HeaderValue::from_str(local_key.key.as_str()) + .expect("failed to parse DD-APPLICATION-KEY header"), + ); + }; + + // build body parameters + let output = Vec::new(); + let mut ser = serde_json::Serializer::with_formatter(output, datadog::DDFormatter); + if body.serialize(&mut ser).is_ok() { + if let Some(content_encoding) = headers.get("Content-Encoding") { + match content_encoding.to_str().unwrap_or_default() { + "gzip" => { + let mut enc = GzEncoder::new(Vec::new(), Compression::default()); + let _ = enc.write_all(ser.into_inner().as_slice()); + match enc.finish() { + Ok(buf) => { + local_req_builder = local_req_builder.body(buf); + } + Err(e) => return Err(datadog::Error::Io(e)), + } + } + "deflate" => { + let mut enc = ZlibEncoder::new(Vec::new(), Compression::default()); + let _ = enc.write_all(ser.into_inner().as_slice()); + match enc.finish() { + Ok(buf) => { + local_req_builder = local_req_builder.body(buf); + } + Err(e) => return Err(datadog::Error::Io(e)), + } + } + "zstd1" => { + let mut enc = zstd::stream::Encoder::new(Vec::new(), 0).unwrap(); + let _ = enc.write_all(ser.into_inner().as_slice()); + match enc.finish() { + Ok(buf) => { + local_req_builder = local_req_builder.body(buf); + } + Err(e) => return Err(datadog::Error::Io(e)), + } + } + _ => { + local_req_builder = local_req_builder.body(ser.into_inner()); + } + } + } else { + local_req_builder = local_req_builder.body(ser.into_inner()); + } + } + + local_req_builder = local_req_builder.headers(headers); + let local_req = local_req_builder.build()?; + log::debug!("request content: {:?}", local_req.body()); + let local_resp = local_client.execute(local_req).await?; + + let local_status = local_resp.status(); + let local_content = local_resp.text().await?; + log::debug!("response content: {}", local_content); + + if !local_status.is_client_error() && !local_status.is_server_error() { + Ok(datadog::ResponseContent { + status: local_status, + content: local_content, + entity: None, + }) + } else { + let local_entity: Option = serde_json::from_str(&local_content).ok(); + let local_error = datadog::ResponseContent { + status: local_status, + content: local_content, + entity: local_entity, + }; + Err(datadog::Error::ResponseError(local_error)) + } + } } diff --git a/src/datadogV2/model/mod.rs b/src/datadogV2/model/mod.rs index 19b16a61f..b9cdb5408 100644 --- a/src/datadogV2/model/mod.rs +++ b/src/datadogV2/model/mod.rs @@ -4636,14 +4636,24 @@ pub mod model_patch_table_request_data_attributes_schema_fields_items; pub use self::model_patch_table_request_data_attributes_schema_fields_items::PatchTableRequestDataAttributesSchemaFieldsItems; pub mod model_patch_table_request_data_type; pub use self::model_patch_table_request_data_type::PatchTableRequestDataType; +pub mod model_batch_delete_rows_request_array; +pub use self::model_batch_delete_rows_request_array::BatchDeleteRowsRequestArray; +pub mod model_batch_delete_rows_request_data; +pub use self::model_batch_delete_rows_request_data::BatchDeleteRowsRequestData; +pub mod model_table_row_resource_data_type; +pub use self::model_table_row_resource_data_type::TableRowResourceDataType; pub mod model_table_row_resource_array; pub use self::model_table_row_resource_array::TableRowResourceArray; pub mod model_table_row_resource_data; pub use self::model_table_row_resource_data::TableRowResourceData; pub mod model_table_row_resource_data_attributes; pub use self::model_table_row_resource_data_attributes::TableRowResourceDataAttributes; -pub mod model_table_row_resource_data_type; -pub use self::model_table_row_resource_data_type::TableRowResourceDataType; +pub mod model_batch_upsert_rows_request_array; +pub use self::model_batch_upsert_rows_request_array::BatchUpsertRowsRequestArray; +pub mod model_batch_upsert_rows_request_data; +pub use self::model_batch_upsert_rows_request_data::BatchUpsertRowsRequestData; +pub mod model_batch_upsert_rows_request_data_attributes; +pub use self::model_batch_upsert_rows_request_data_attributes::BatchUpsertRowsRequestDataAttributes; pub mod model_create_upload_request; pub use self::model_create_upload_request::CreateUploadRequest; pub mod model_create_upload_request_data; diff --git a/src/datadogV2/model/model_batch_delete_rows_request_array.rs b/src/datadogV2/model/model_batch_delete_rows_request_array.rs new file mode 100644 index 000000000..087e174a1 --- /dev/null +++ b/src/datadogV2/model/model_batch_delete_rows_request_array.rs @@ -0,0 +1,94 @@ +// Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +// This product includes software developed at Datadog (https://www.datadoghq.com/). +// Copyright 2019-Present Datadog, Inc. +use serde::de::{Error, MapAccess, Visitor}; +use serde::{Deserialize, Deserializer, Serialize}; +use serde_with::skip_serializing_none; +use std::fmt::{self, Formatter}; + +/// The request body for deleting multiple rows from a reference table. +#[non_exhaustive] +#[skip_serializing_none] +#[derive(Clone, Debug, PartialEq, Serialize)] +pub struct BatchDeleteRowsRequestArray { + #[serde(rename = "data")] + pub data: Vec, + #[serde(flatten)] + pub additional_properties: std::collections::BTreeMap, + #[serde(skip)] + #[serde(default)] + pub(crate) _unparsed: bool, +} + +impl BatchDeleteRowsRequestArray { + pub fn new( + data: Vec, + ) -> BatchDeleteRowsRequestArray { + BatchDeleteRowsRequestArray { + data, + additional_properties: std::collections::BTreeMap::new(), + _unparsed: false, + } + } + + pub fn additional_properties( + mut self, + value: std::collections::BTreeMap, + ) -> Self { + self.additional_properties = value; + self + } +} + +impl<'de> Deserialize<'de> for BatchDeleteRowsRequestArray { + fn deserialize(deserializer: D) -> Result + where + D: Deserializer<'de>, + { + struct BatchDeleteRowsRequestArrayVisitor; + impl<'a> Visitor<'a> for BatchDeleteRowsRequestArrayVisitor { + type Value = BatchDeleteRowsRequestArray; + + fn expecting(&self, f: &mut Formatter<'_>) -> fmt::Result { + f.write_str("a mapping") + } + + fn visit_map(self, mut map: M) -> Result + where + M: MapAccess<'a>, + { + let mut data: Option> = + None; + let mut additional_properties: std::collections::BTreeMap< + String, + serde_json::Value, + > = std::collections::BTreeMap::new(); + let mut _unparsed = false; + + while let Some((k, v)) = map.next_entry::()? { + match k.as_str() { + "data" => { + data = Some(serde_json::from_value(v).map_err(M::Error::custom)?); + } + &_ => { + if let Ok(value) = serde_json::from_value(v.clone()) { + additional_properties.insert(k, value); + } + } + } + } + let data = data.ok_or_else(|| M::Error::missing_field("data"))?; + + let content = BatchDeleteRowsRequestArray { + data, + additional_properties, + _unparsed, + }; + + Ok(content) + } + } + + deserializer.deserialize_any(BatchDeleteRowsRequestArrayVisitor) + } +} diff --git a/src/datadogV2/model/model_batch_delete_rows_request_data.rs b/src/datadogV2/model/model_batch_delete_rows_request_data.rs new file mode 100644 index 000000000..d0788544a --- /dev/null +++ b/src/datadogV2/model/model_batch_delete_rows_request_data.rs @@ -0,0 +1,112 @@ +// Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +// This product includes software developed at Datadog (https://www.datadoghq.com/). +// Copyright 2019-Present Datadog, Inc. +use serde::de::{Error, MapAccess, Visitor}; +use serde::{Deserialize, Deserializer, Serialize}; +use serde_with::skip_serializing_none; +use std::fmt::{self, Formatter}; + +/// Row resource containing a single row identifier for deletion. +#[non_exhaustive] +#[skip_serializing_none] +#[derive(Clone, Debug, PartialEq, Serialize)] +pub struct BatchDeleteRowsRequestData { + #[serde(rename = "id")] + pub id: String, + /// Row resource type. + #[serde(rename = "type")] + pub type_: crate::datadogV2::model::TableRowResourceDataType, + #[serde(flatten)] + pub additional_properties: std::collections::BTreeMap, + #[serde(skip)] + #[serde(default)] + pub(crate) _unparsed: bool, +} + +impl BatchDeleteRowsRequestData { + pub fn new( + id: String, + type_: crate::datadogV2::model::TableRowResourceDataType, + ) -> BatchDeleteRowsRequestData { + BatchDeleteRowsRequestData { + id, + type_, + additional_properties: std::collections::BTreeMap::new(), + _unparsed: false, + } + } + + pub fn additional_properties( + mut self, + value: std::collections::BTreeMap, + ) -> Self { + self.additional_properties = value; + self + } +} + +impl<'de> Deserialize<'de> for BatchDeleteRowsRequestData { + fn deserialize(deserializer: D) -> Result + where + D: Deserializer<'de>, + { + struct BatchDeleteRowsRequestDataVisitor; + impl<'a> Visitor<'a> for BatchDeleteRowsRequestDataVisitor { + type Value = BatchDeleteRowsRequestData; + + fn expecting(&self, f: &mut Formatter<'_>) -> fmt::Result { + f.write_str("a mapping") + } + + fn visit_map(self, mut map: M) -> Result + where + M: MapAccess<'a>, + { + let mut id: Option = None; + let mut type_: Option = None; + let mut additional_properties: std::collections::BTreeMap< + String, + serde_json::Value, + > = std::collections::BTreeMap::new(); + let mut _unparsed = false; + + while let Some((k, v)) = map.next_entry::()? { + match k.as_str() { + "id" => { + id = Some(serde_json::from_value(v).map_err(M::Error::custom)?); + } + "type" => { + type_ = Some(serde_json::from_value(v).map_err(M::Error::custom)?); + if let Some(ref _type_) = type_ { + match _type_ { + crate::datadogV2::model::TableRowResourceDataType::UnparsedObject(_type_) => { + _unparsed = true; + }, + _ => {} + } + } + } + &_ => { + if let Ok(value) = serde_json::from_value(v.clone()) { + additional_properties.insert(k, value); + } + } + } + } + let id = id.ok_or_else(|| M::Error::missing_field("id"))?; + let type_ = type_.ok_or_else(|| M::Error::missing_field("type_"))?; + + let content = BatchDeleteRowsRequestData { + id, + type_, + additional_properties, + _unparsed, + }; + + Ok(content) + } + } + + deserializer.deserialize_any(BatchDeleteRowsRequestDataVisitor) + } +} diff --git a/src/datadogV2/model/model_batch_upsert_rows_request_array.rs b/src/datadogV2/model/model_batch_upsert_rows_request_array.rs new file mode 100644 index 000000000..57c5fb4c6 --- /dev/null +++ b/src/datadogV2/model/model_batch_upsert_rows_request_array.rs @@ -0,0 +1,94 @@ +// Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +// This product includes software developed at Datadog (https://www.datadoghq.com/). +// Copyright 2019-Present Datadog, Inc. +use serde::de::{Error, MapAccess, Visitor}; +use serde::{Deserialize, Deserializer, Serialize}; +use serde_with::skip_serializing_none; +use std::fmt::{self, Formatter}; + +/// The request body for creating or updating multiple rows into a reference table. +#[non_exhaustive] +#[skip_serializing_none] +#[derive(Clone, Debug, PartialEq, Serialize)] +pub struct BatchUpsertRowsRequestArray { + #[serde(rename = "data")] + pub data: Vec, + #[serde(flatten)] + pub additional_properties: std::collections::BTreeMap, + #[serde(skip)] + #[serde(default)] + pub(crate) _unparsed: bool, +} + +impl BatchUpsertRowsRequestArray { + pub fn new( + data: Vec, + ) -> BatchUpsertRowsRequestArray { + BatchUpsertRowsRequestArray { + data, + additional_properties: std::collections::BTreeMap::new(), + _unparsed: false, + } + } + + pub fn additional_properties( + mut self, + value: std::collections::BTreeMap, + ) -> Self { + self.additional_properties = value; + self + } +} + +impl<'de> Deserialize<'de> for BatchUpsertRowsRequestArray { + fn deserialize(deserializer: D) -> Result + where + D: Deserializer<'de>, + { + struct BatchUpsertRowsRequestArrayVisitor; + impl<'a> Visitor<'a> for BatchUpsertRowsRequestArrayVisitor { + type Value = BatchUpsertRowsRequestArray; + + fn expecting(&self, f: &mut Formatter<'_>) -> fmt::Result { + f.write_str("a mapping") + } + + fn visit_map(self, mut map: M) -> Result + where + M: MapAccess<'a>, + { + let mut data: Option> = + None; + let mut additional_properties: std::collections::BTreeMap< + String, + serde_json::Value, + > = std::collections::BTreeMap::new(); + let mut _unparsed = false; + + while let Some((k, v)) = map.next_entry::()? { + match k.as_str() { + "data" => { + data = Some(serde_json::from_value(v).map_err(M::Error::custom)?); + } + &_ => { + if let Ok(value) = serde_json::from_value(v.clone()) { + additional_properties.insert(k, value); + } + } + } + } + let data = data.ok_or_else(|| M::Error::missing_field("data"))?; + + let content = BatchUpsertRowsRequestArray { + data, + additional_properties, + _unparsed, + }; + + Ok(content) + } + } + + deserializer.deserialize_any(BatchUpsertRowsRequestArrayVisitor) + } +} diff --git a/src/datadogV2/model/model_batch_upsert_rows_request_data.rs b/src/datadogV2/model/model_batch_upsert_rows_request_data.rs new file mode 100644 index 000000000..9ef32109d --- /dev/null +++ b/src/datadogV2/model/model_batch_upsert_rows_request_data.rs @@ -0,0 +1,134 @@ +// Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +// This product includes software developed at Datadog (https://www.datadoghq.com/). +// Copyright 2019-Present Datadog, Inc. +use serde::de::{Error, MapAccess, Visitor}; +use serde::{Deserialize, Deserializer, Serialize}; +use serde_with::skip_serializing_none; +use std::fmt::{self, Formatter}; + +/// Row resource containing a single row identifier and its column values. +#[non_exhaustive] +#[skip_serializing_none] +#[derive(Clone, Debug, PartialEq, Serialize)] +pub struct BatchUpsertRowsRequestData { + /// Attributes containing row data values for row creation or update operations. + #[serde(rename = "attributes")] + pub attributes: Option, + #[serde(rename = "id")] + pub id: String, + /// Row resource type. + #[serde(rename = "type")] + pub type_: crate::datadogV2::model::TableRowResourceDataType, + #[serde(flatten)] + pub additional_properties: std::collections::BTreeMap, + #[serde(skip)] + #[serde(default)] + pub(crate) _unparsed: bool, +} + +impl BatchUpsertRowsRequestData { + pub fn new( + id: String, + type_: crate::datadogV2::model::TableRowResourceDataType, + ) -> BatchUpsertRowsRequestData { + BatchUpsertRowsRequestData { + attributes: None, + id, + type_, + additional_properties: std::collections::BTreeMap::new(), + _unparsed: false, + } + } + + pub fn attributes( + mut self, + value: crate::datadogV2::model::BatchUpsertRowsRequestDataAttributes, + ) -> Self { + self.attributes = Some(value); + self + } + + pub fn additional_properties( + mut self, + value: std::collections::BTreeMap, + ) -> Self { + self.additional_properties = value; + self + } +} + +impl<'de> Deserialize<'de> for BatchUpsertRowsRequestData { + fn deserialize(deserializer: D) -> Result + where + D: Deserializer<'de>, + { + struct BatchUpsertRowsRequestDataVisitor; + impl<'a> Visitor<'a> for BatchUpsertRowsRequestDataVisitor { + type Value = BatchUpsertRowsRequestData; + + fn expecting(&self, f: &mut Formatter<'_>) -> fmt::Result { + f.write_str("a mapping") + } + + fn visit_map(self, mut map: M) -> Result + where + M: MapAccess<'a>, + { + let mut attributes: Option< + crate::datadogV2::model::BatchUpsertRowsRequestDataAttributes, + > = None; + let mut id: Option = None; + let mut type_: Option = None; + let mut additional_properties: std::collections::BTreeMap< + String, + serde_json::Value, + > = std::collections::BTreeMap::new(); + let mut _unparsed = false; + + while let Some((k, v)) = map.next_entry::()? { + match k.as_str() { + "attributes" => { + if v.is_null() { + continue; + } + attributes = Some(serde_json::from_value(v).map_err(M::Error::custom)?); + } + "id" => { + id = Some(serde_json::from_value(v).map_err(M::Error::custom)?); + } + "type" => { + type_ = Some(serde_json::from_value(v).map_err(M::Error::custom)?); + if let Some(ref _type_) = type_ { + match _type_ { + crate::datadogV2::model::TableRowResourceDataType::UnparsedObject(_type_) => { + _unparsed = true; + }, + _ => {} + } + } + } + &_ => { + if let Ok(value) = serde_json::from_value(v.clone()) { + additional_properties.insert(k, value); + } + } + } + } + let id = id.ok_or_else(|| M::Error::missing_field("id"))?; + let type_ = type_.ok_or_else(|| M::Error::missing_field("type_"))?; + + let content = BatchUpsertRowsRequestData { + attributes, + id, + type_, + additional_properties, + _unparsed, + }; + + Ok(content) + } + } + + deserializer.deserialize_any(BatchUpsertRowsRequestDataVisitor) + } +} diff --git a/src/datadogV2/model/model_batch_upsert_rows_request_data_attributes.rs b/src/datadogV2/model/model_batch_upsert_rows_request_data_attributes.rs new file mode 100644 index 000000000..2bba778da --- /dev/null +++ b/src/datadogV2/model/model_batch_upsert_rows_request_data_attributes.rs @@ -0,0 +1,95 @@ +// Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +// This product includes software developed at Datadog (https://www.datadoghq.com/). +// Copyright 2019-Present Datadog, Inc. +use serde::de::{Error, MapAccess, Visitor}; +use serde::{Deserialize, Deserializer, Serialize}; +use serde_with::skip_serializing_none; +use std::fmt::{self, Formatter}; + +/// Attributes containing row data values for row creation or update operations. +#[non_exhaustive] +#[skip_serializing_none] +#[derive(Clone, Debug, PartialEq, Serialize)] +pub struct BatchUpsertRowsRequestDataAttributes { + /// Key-value pairs representing row data, where keys are field names from the schema. + #[serde(rename = "values")] + pub values: std::collections::BTreeMap, + #[serde(flatten)] + pub additional_properties: std::collections::BTreeMap, + #[serde(skip)] + #[serde(default)] + pub(crate) _unparsed: bool, +} + +impl BatchUpsertRowsRequestDataAttributes { + pub fn new( + values: std::collections::BTreeMap, + ) -> BatchUpsertRowsRequestDataAttributes { + BatchUpsertRowsRequestDataAttributes { + values, + additional_properties: std::collections::BTreeMap::new(), + _unparsed: false, + } + } + + pub fn additional_properties( + mut self, + value: std::collections::BTreeMap, + ) -> Self { + self.additional_properties = value; + self + } +} + +impl<'de> Deserialize<'de> for BatchUpsertRowsRequestDataAttributes { + fn deserialize(deserializer: D) -> Result + where + D: Deserializer<'de>, + { + struct BatchUpsertRowsRequestDataAttributesVisitor; + impl<'a> Visitor<'a> for BatchUpsertRowsRequestDataAttributesVisitor { + type Value = BatchUpsertRowsRequestDataAttributes; + + fn expecting(&self, f: &mut Formatter<'_>) -> fmt::Result { + f.write_str("a mapping") + } + + fn visit_map(self, mut map: M) -> Result + where + M: MapAccess<'a>, + { + let mut values: Option> = + None; + let mut additional_properties: std::collections::BTreeMap< + String, + serde_json::Value, + > = std::collections::BTreeMap::new(); + let mut _unparsed = false; + + while let Some((k, v)) = map.next_entry::()? { + match k.as_str() { + "values" => { + values = Some(serde_json::from_value(v).map_err(M::Error::custom)?); + } + &_ => { + if let Ok(value) = serde_json::from_value(v.clone()) { + additional_properties.insert(k, value); + } + } + } + } + let values = values.ok_or_else(|| M::Error::missing_field("values"))?; + + let content = BatchUpsertRowsRequestDataAttributes { + values, + additional_properties, + _unparsed, + }; + + Ok(content) + } + } + + deserializer.deserialize_any(BatchUpsertRowsRequestDataAttributesVisitor) + } +} diff --git a/tests/scenarios/features/v2/reference_tables.feature b/tests/scenarios/features/v2/reference_tables.feature index fa3ea4236..45a2861c1 100644 --- a/tests/scenarios/features/v2/reference_tables.feature +++ b/tests/scenarios/features/v2/reference_tables.feature @@ -53,6 +53,30 @@ Feature: Reference Tables When the request is sent Then the response status is 400 Bad Request + @generated @skip @team:DataDog/redapl-experiences + Scenario: Delete rows returns "Bad Request" response + Given new "DeleteRows" request + And request contains "id" parameter from "REPLACE.ME" + And body with value {"data": [{"id": "primary_key_value", "type": "row"}]} + When the request is sent + Then the response status is 400 Bad Request + + @generated @skip @team:DataDog/redapl-experiences + Scenario: Delete rows returns "Not Found" response + Given new "DeleteRows" request + And request contains "id" parameter from "REPLACE.ME" + And body with value {"data": [{"id": "primary_key_value", "type": "row"}]} + When the request is sent + Then the response status is 404 Not Found + + @generated @skip @team:DataDog/redapl-experiences + Scenario: Delete rows returns "Rows deleted successfully" response + Given new "DeleteRows" request + And request contains "id" parameter from "REPLACE.ME" + And body with value {"data": [{"id": "primary_key_value", "type": "row"}]} + When the request is sent + Then the response status is 200 Rows deleted successfully + @generated @skip @team:DataDog/redapl-experiences Scenario: Delete table returns "Not Found" response Given new "DeleteTable" request @@ -119,3 +143,27 @@ Feature: Reference Tables And body with value {"data": {"attributes": {"description": "this is a cloud table generated via a cloud bucket sync", "file_metadata": {"access_details": {"aws_detail": {"aws_account_id": "test-account-id", "aws_bucket_name": "test-bucket", "file_path": "test_rt.csv"}}, "sync_enabled": true}, "schema": {"fields": [{"name": "id", "type": "INT32"}, {"name": "name", "type": "STRING"}], "primary_keys": ["id"]}, "sync_enabled": false, "tags": ["test_tag"]}, "type": "reference_table"}} When the request is sent Then the response status is 200 OK + + @generated @skip @team:DataDog/redapl-experiences + Scenario: Upsert rows returns "Bad Request" response + Given new "UpsertRows" request + And request contains "id" parameter from "REPLACE.ME" + And body with value {"data": [{"attributes": {"values": {"example_key_value": "primary_key_value", "name": "row_name"}}, "id": "primary_key_value", "type": "row"}]} + When the request is sent + Then the response status is 400 Bad Request + + @generated @skip @team:DataDog/redapl-experiences + Scenario: Upsert rows returns "Not Found" response + Given new "UpsertRows" request + And request contains "id" parameter from "REPLACE.ME" + And body with value {"data": [{"attributes": {"values": {"example_key_value": "primary_key_value", "name": "row_name"}}, "id": "primary_key_value", "type": "row"}]} + When the request is sent + Then the response status is 404 Not Found + + @generated @skip @team:DataDog/redapl-experiences + Scenario: Upsert rows returns "Rows created or updated successfully" response + Given new "UpsertRows" request + And request contains "id" parameter from "REPLACE.ME" + And body with value {"data": [{"attributes": {"values": {"example_key_value": "primary_key_value", "name": "row_name"}}, "id": "primary_key_value", "type": "row"}]} + When the request is sent + Then the response status is 200 Rows created or updated successfully diff --git a/tests/scenarios/features/v2/undo.json b/tests/scenarios/features/v2/undo.json index 3799e64e6..35580ed3f 100644 --- a/tests/scenarios/features/v2/undo.json +++ b/tests/scenarios/features/v2/undo.json @@ -3089,12 +3089,26 @@ "type": "idempotent" } }, + "DeleteRows": { + "tag": "Reference Tables", + "undo": { + "type": "idempotent" + } + }, "GetRowsByID": { "tag": "Reference Tables", "undo": { "type": "safe" } }, + "UpsertRows": { + "tag": "Reference Tables", + "undo": { + "operationId": "DeleteRows", + "parameters": [], + "type": "unsafe" + } + }, "CreateReferenceTableUpload": { "tag": "Reference Tables", "undo": { diff --git a/tests/scenarios/function_mappings.rs b/tests/scenarios/function_mappings.rs index 4f9cae93a..f4a25f931 100644 --- a/tests/scenarios/function_mappings.rs +++ b/tests/scenarios/function_mappings.rs @@ -3848,9 +3848,15 @@ pub fn collect_function_calls(world: &mut DatadogWorld) { "v2.UpdateReferenceTable".into(), test_v2_update_reference_table, ); + world + .function_mappings + .insert("v2.DeleteRows".into(), test_v2_delete_rows); world .function_mappings .insert("v2.GetRowsByID".into(), test_v2_get_rows_by_id); + world + .function_mappings + .insert("v2.UpsertRows".into(), test_v2_upsert_rows); world.function_mappings.insert( "v2.CreateReferenceTableUpload".into(), test_v2_create_reference_table_upload, @@ -29254,6 +29260,32 @@ fn test_v2_update_reference_table(world: &mut DatadogWorld, _parameters: &HashMa world.response.code = response.status.as_u16(); } +fn test_v2_delete_rows(world: &mut DatadogWorld, _parameters: &HashMap) { + let api = world + .api_instances + .v2_api_reference_tables + .as_ref() + .expect("api instance not found"); + let id = serde_json::from_value(_parameters.get("id").unwrap().clone()).unwrap(); + let body = serde_json::from_value(_parameters.get("body").unwrap().clone()).unwrap(); + let response = match block_on(api.delete_rows_with_http_info(id, body)) { + Ok(response) => response, + Err(error) => { + return match error { + Error::ResponseError(e) => { + world.response.code = e.status.as_u16(); + if let Some(entity) = e.entity { + world.response.object = serde_json::to_value(entity).unwrap(); + } + } + _ => panic!("error parsing response: {error}"), + }; + } + }; + world.response.object = serde_json::to_value(response.entity).unwrap(); + world.response.code = response.status.as_u16(); +} + fn test_v2_get_rows_by_id(world: &mut DatadogWorld, _parameters: &HashMap) { let api = world .api_instances @@ -29280,6 +29312,32 @@ fn test_v2_get_rows_by_id(world: &mut DatadogWorld, _parameters: &HashMap) { + let api = world + .api_instances + .v2_api_reference_tables + .as_ref() + .expect("api instance not found"); + let id = serde_json::from_value(_parameters.get("id").unwrap().clone()).unwrap(); + let body = serde_json::from_value(_parameters.get("body").unwrap().clone()).unwrap(); + let response = match block_on(api.upsert_rows_with_http_info(id, body)) { + Ok(response) => response, + Err(error) => { + return match error { + Error::ResponseError(e) => { + world.response.code = e.status.as_u16(); + if let Some(entity) = e.entity { + world.response.object = serde_json::to_value(entity).unwrap(); + } + } + _ => panic!("error parsing response: {error}"), + }; + } + }; + world.response.object = serde_json::to_value(response.entity).unwrap(); + world.response.code = response.status.as_u16(); +} + fn test_v2_create_reference_table_upload( world: &mut DatadogWorld, _parameters: &HashMap,