diff --git a/common/src/api/external/mod.rs b/common/src/api/external/mod.rs index 60325218f1c..da8a5078324 100644 --- a/common/src/api/external/mod.rs +++ b/common/src/api/external/mod.rs @@ -977,6 +977,8 @@ pub enum ResourceType { AffinityGroupMember, AntiAffinityGroup, AntiAffinityGroupMember, + Alert, + AlertReceiver, AllowList, BackgroundTask, BgpConfig, @@ -1040,8 +1042,6 @@ pub enum ResourceType { Probe, ProbeNetworkInterface, LldpLinkConfig, - WebhookEvent, - WebhookReceiver, WebhookSecret, } diff --git a/dev-tools/omdb/src/bin/omdb/db.rs b/dev-tools/omdb/src/bin/omdb/db.rs index 28a194aed62..e8b3d1e58cf 100644 --- a/dev-tools/omdb/src/bin/omdb/db.rs +++ b/dev-tools/omdb/src/bin/omdb/db.rs @@ -23,6 +23,7 @@ use crate::helpers::CONNECTION_OPTIONS_HEADING; use crate::helpers::DATABASE_OPTIONS_HEADING; use crate::helpers::const_max_len; use crate::helpers::display_option_blank; +use alert::AlertArgs; use anyhow::Context; use anyhow::anyhow; use anyhow::bail; @@ -169,11 +170,9 @@ use std::sync::Arc; use strum::IntoEnumIterator; use tabled::Tabled; use uuid::Uuid; -use webhook::WebhookArgs; -use webhook::cmd_db_webhook; +mod alert; mod saga; -mod webhook; const NO_ACTIVE_PROPOLIS_MSG: &str = ""; const NOT_ON_SLED_MSG: &str = ""; @@ -387,8 +386,8 @@ enum DbCommands { Vmms(VmmListArgs), /// Print information about the oximeter collector. Oximeter(OximeterArgs), - /// Print information about webhooks - Webhook(WebhookArgs), + /// Print information about alerts + Alert(AlertArgs), /// Commands for querying and interacting with pools Zpool(ZpoolArgs), } @@ -1467,7 +1466,7 @@ impl DbArgs { command: OximeterCommands::ListProducers }) => cmd_db_oximeter_list_producers(&datastore, fetch_opts).await, - DbCommands::Webhook(args) => cmd_db_webhook(&opctx, &datastore, &fetch_opts, &args).await, + DbCommands::Alert(args) => alert::cmd_db_alert(&opctx, &datastore, &fetch_opts, &args).await, DbCommands::Zpool(ZpoolArgs { command: ZpoolCommands::List(args) }) => cmd_db_zpool_list(&opctx, &datastore, &args).await, diff --git a/dev-tools/omdb/src/bin/omdb/db/webhook.rs b/dev-tools/omdb/src/bin/omdb/db/alert.rs similarity index 77% rename from dev-tools/omdb/src/bin/omdb/db/webhook.rs rename to dev-tools/omdb/src/bin/omdb/db/alert.rs index fe45c08b9a4..f9810c7fb0e 100644 --- a/dev-tools/omdb/src/bin/omdb/db/webhook.rs +++ b/dev-tools/omdb/src/bin/omdb/db/alert.rs @@ -2,7 +2,7 @@ // License, v. 2.0. If a copy of the MPL was not distributed with this // file, You can obtain one at https://mozilla.org/MPL/2.0/. -//! `omdb db webhook` subcommands +//! `omdb db alert` subcommands use super::DbFetchOptions; use super::check_limit; @@ -22,100 +22,162 @@ use diesel::ExpressionMethods; use diesel::OptionalExtension; use diesel::expression::SelectableHelper; use diesel::query_dsl::QueryDsl; +use nexus_db_model::Alert; +use nexus_db_model::AlertClass; +use nexus_db_model::AlertReceiver; use nexus_db_model::WebhookDelivery; -use nexus_db_model::WebhookEvent; -use nexus_db_model::WebhookEventClass; -use nexus_db_model::WebhookReceiver; use nexus_db_queries::context::OpContext; use nexus_db_queries::db; use nexus_db_queries::db::DataStore; +use nexus_db_schema::schema::alert::dsl as alert_dsl; use nexus_db_schema::schema::webhook_delivery::dsl as delivery_dsl; use nexus_db_schema::schema::webhook_delivery_attempt::dsl as attempt_dsl; -use nexus_db_schema::schema::webhook_event::dsl as event_dsl; use nexus_types::identity::Resource; use omicron_common::api::external::DataPageParams; use omicron_common::api::external::NameOrId; use omicron_common::api::external::http_pagination::PaginatedBy; +use omicron_uuid_kinds::AlertUuid; use omicron_uuid_kinds::GenericUuid; -use omicron_uuid_kinds::WebhookEventUuid; use tabled::Tabled; use uuid::Uuid; #[derive(Debug, Args, Clone)] -pub(super) struct WebhookArgs { +pub(super) struct AlertArgs { #[command(subcommand)] command: Commands, } #[derive(Debug, Subcommand, Clone)] enum Commands { - /// Get information on webhook receivers + /// List alerts + #[clap(alias = "ls")] + List(AlertListArgs), + + /// Show details on an alert + #[clap(alias = "show")] + Info(AlertInfoArgs), + + /// Commands relating to webhook alerts. + Webhook(WebhookArgs), +} + +#[derive(Debug, Args, Clone)] +struct AlertListArgs { + /// If set, include alert JSON payloads in the output. + /// + /// Note that this results in very wide output. + #[clap(long, short)] + payload: bool, + + /// Include only alerts created before this timestamp + #[clap(long, short)] + before: Option>, + + /// Include only alerts created after this timestamp + #[clap(long, short)] + after: Option>, + + /// Include only alerts fully dispatched before this timestamp + #[clap(long)] + dispatched_before: Option>, + + /// Include only alerts fully dispatched after this timestamp + #[clap(long)] + dispatched_after: Option>, + + /// If `true`, include only alerts that have been fully dispatched. + /// If `false`, include only alerts that have not been fully dispatched. + /// + /// If this argument is not provided, both dispatched and un-dispatched + /// events are included. + #[clap(long, short)] + dispatched: Option, +} + +#[derive(Debug, Args, Clone)] +struct AlertInfoArgs { + /// The ID of the alert to show + id: AlertUuid, +} + +#[derive(Debug, Args, Clone)] +struct WebhookArgs { + #[clap(subcommand)] + command: WebhookCommands, +} + +#[derive(Debug, Subcommand, Clone)] +enum WebhookCommands { + /// Get information on webhook alert receivers #[clap(alias = "rx")] - Receiver { - #[command(subcommand)] - command: RxCommands, - }, - /// Get information on webhook events - Event { - #[command(subcommand)] - command: EventCommands, - }, - /// Get information on webhook delivieries - Delivery { - #[command(subcommand)] - command: DeliveryCommands, - }, + Receiver(WebhookRxArgs), + + /// Get information on webhook alert deliveries + Delivery(WebhookDeliveryArgs), +} + +#[derive(Debug, Args, Clone)] +struct WebhookRxArgs { + #[clap(subcommand)] + command: WebhookRxCommands, } #[derive(Debug, Subcommand, Clone)] -enum RxCommands { - /// List webhook receivers +enum WebhookRxCommands { + /// List webhook alert receivers #[clap(alias = "ls")] - List(RxListArgs), + List(WebhookRxListArgs), + /// Get details on a webhook alert receiver #[clap(alias = "show")] - Info(RxInfoArgs), + Info(WebhookRxInfoArgs), } #[derive(Debug, Args, Clone)] -struct RxInfoArgs { +struct WebhookRxInfoArgs { receiver: NameOrId, } #[derive(Debug, Args, Clone)] -struct RxListArgs { +struct WebhookRxListArgs { #[clap(long, short = 'a')] start_at: Option, } +#[derive(Debug, Args, Clone)] +struct WebhookDeliveryArgs { + #[clap(subcommand)] + command: WebhookDeliveryCommands, +} + #[derive(Debug, Subcommand, Clone)] -enum DeliveryCommands { +enum WebhookDeliveryCommands { /// List webhook deliveries #[clap(alias = "ls")] - List(DeliveryListArgs), + List(WebhookDeliveryListArgs), /// Show details on a webhook delivery, including its payload and attempt history. #[clap(alias = "show")] - Info(DeliveryInfoArgs), + Info(WebhookDeliveryInfoArgs), } #[derive(Debug, Args, Clone)] -struct DeliveryListArgs { +struct WebhookDeliveryListArgs { /// If present, show only deliveries to this receiver. #[clap(long, short, alias = "rx")] receiver: Option, /// If present, select only deliveries for the given event. #[clap(long, short)] - event: Option, + event: Option, /// If present, select only deliveries in the provided state(s) #[clap(long = "state", short)] - states: Vec, + states: Vec, /// If present, select only deliveries with the provided trigger(s) #[clap(long = "trigger", short)] - triggers: Vec, + triggers: Vec, /// Include only delivery entries created before this timestamp #[clap(long, short)] @@ -127,89 +189,52 @@ struct DeliveryListArgs { } #[derive(Debug, Args, Clone)] -struct DeliveryInfoArgs { +struct WebhookDeliveryInfoArgs { /// The ID of the delivery to show. delivery_id: Uuid, } -#[derive(Debug, Subcommand, Clone)] -enum EventCommands { - /// List webhook events - #[clap(alias = "ls")] - List(EventListArgs), - - /// Show details on a webhook event - #[clap(alias = "show")] - Info(EventInfoArgs), -} - -#[derive(Debug, Args, Clone)] -struct EventListArgs { - /// If set, include event JSON payloads in the output. - /// - /// Note that this results in very wide output. - #[clap(long, short)] - payload: bool, - - /// Include only events created before this timestamp - #[clap(long, short)] - before: Option>, - - /// Include only events created after this timestamp - #[clap(long, short)] - after: Option>, - - /// Include only events fully dispatched before this timestamp - #[clap(long)] - dispatched_before: Option>, - - /// Include only events fully dispatched after this timestamp - #[clap(long)] - dispatched_after: Option>, - - /// If `true`, include only events that have been fully dispatched. - /// If `false`, include only events that have not been fully dispatched. - /// - /// If this argument is not provided, both dispatched and un-dispatched - /// events are included. - #[clap(long, short)] - dispatched: Option, -} - -#[derive(Debug, Args, Clone)] -struct EventInfoArgs { - /// The ID of the event to show - event_id: WebhookEventUuid, -} - -pub(super) async fn cmd_db_webhook( +pub(super) async fn cmd_db_alert( opctx: &OpContext, datastore: &DataStore, fetch_opts: &DbFetchOptions, - args: &WebhookArgs, + args: &AlertArgs, ) -> anyhow::Result<()> { match &args.command { - Commands::Receiver { command: RxCommands::List(args) } => { - cmd_db_webhook_rx_list(opctx, datastore, fetch_opts, args).await - } - Commands::Receiver { command: RxCommands::Info(args) } => { - cmd_db_webhook_rx_info(datastore, fetch_opts, args).await + Commands::Info(args) => { + cmd_db_alert_info(datastore, fetch_opts, args).await } - Commands::Delivery { command: DeliveryCommands::List(args) } => { - cmd_db_webhook_delivery_list(datastore, fetch_opts, args).await + Commands::List(args) => { + cmd_db_alert_list(datastore, fetch_opts, args).await } - Commands::Delivery { command: DeliveryCommands::Info(args) } => { - cmd_db_webhook_delivery_info(datastore, fetch_opts, args).await - } - Commands::Event { command: EventCommands::Info(args) } => { - cmd_db_webhook_event_info(datastore, fetch_opts, args).await - } - Commands::Event { command: EventCommands::List(args) } => { - cmd_db_webhook_event_list(datastore, fetch_opts, args).await + Commands::Webhook(args) => { + cmd_db_webhook(opctx, datastore, fetch_opts, args).await } } } +async fn cmd_db_webhook( + opctx: &OpContext, + datastore: &DataStore, + fetch_opts: &DbFetchOptions, + args: &WebhookArgs, +) -> anyhow::Result<()> { + match &args.command { + WebhookCommands::Receiver(WebhookRxArgs { + command: WebhookRxCommands::Info(args), + }) => cmd_db_webhook_rx_info(datastore, fetch_opts, args).await, + WebhookCommands::Receiver(WebhookRxArgs { + command: WebhookRxCommands::List(args), + }) => cmd_db_webhook_rx_list(opctx, datastore, fetch_opts, args).await, + WebhookCommands::Delivery(WebhookDeliveryArgs { + command: WebhookDeliveryCommands::List(args), + }) => cmd_db_webhook_delivery_list(datastore, fetch_opts, args).await, + WebhookCommands::Delivery(WebhookDeliveryArgs { + command: WebhookDeliveryCommands::Info(args), + }) => cmd_db_webhook_delivery_info(datastore, fetch_opts, args).await, + } +} + const ID: &'static str = "ID"; const TIME_CREATED: &'static str = "created at"; const TIME_DELETED: &'static str = "deleted at"; @@ -219,7 +244,7 @@ async fn cmd_db_webhook_rx_list( opctx: &OpContext, datastore: &DataStore, fetch_opts: &DbFetchOptions, - args: &RxListArgs, + args: &WebhookRxListArgs, ) -> anyhow::Result<()> { let ctx = || { if let Some(starting_at) = args.start_at { @@ -233,7 +258,7 @@ async fn cmd_db_webhook_rx_list( ..first_page(fetch_opts.fetch_limit) }; let rxs = datastore - .webhook_rx_list(opctx, &PaginatedBy::Id(pagparams)) + .alert_rx_list(opctx, &PaginatedBy::Id(pagparams)) .await .with_context(ctx)?; @@ -279,10 +304,10 @@ async fn cmd_db_webhook_rx_list( async fn cmd_db_webhook_rx_info( datastore: &DataStore, fetch_opts: &DbFetchOptions, - args: &RxInfoArgs, + args: &WebhookRxInfoArgs, ) -> anyhow::Result<()> { - use nexus_db_schema::schema::webhook_rx_event_glob::dsl as glob_dsl; - use nexus_db_schema::schema::webhook_rx_subscription::dsl as subscription_dsl; + use nexus_db_schema::schema::alert_glob::dsl as glob_dsl; + use nexus_db_schema::schema::alert_subscription::dsl as subscription_dsl; use nexus_db_schema::schema::webhook_secret::dsl as secret_dsl; let conn = datastore.pool_connection_for_tests().await?; @@ -320,9 +345,9 @@ async fn cmd_db_webhook_rx_info( GLOB_EXACT, ]); - let WebhookReceiver { + let AlertReceiver { identity: - nexus_db_model::WebhookReceiverIdentity { + nexus_db_model::AlertReceiverIdentity { id, name, description, @@ -382,7 +407,7 @@ async fn cmd_db_webhook_rx_info( time_modified: _, time_created, }, - webhook_receiver_id: _, + alert_receiver_id: _, secret: _, time_deleted, }| SecretRow { @@ -404,17 +429,17 @@ async fn cmd_db_webhook_rx_info( println!("\n{:=<80}", "== SUBSCRIPTIONS "); println!(" {GEN:>WIDTH$}: {}", subscription_gen.0); - let exact = subscription_dsl::webhook_rx_subscription + let exact = subscription_dsl::alert_subscription .filter(subscription_dsl::rx_id.eq(id.into_untyped_uuid())) .filter(subscription_dsl::glob.is_null()) - .select(subscription_dsl::event_class) - .load_async::(&*conn) + .select(subscription_dsl::alert_class) + .load_async::(&*conn) .await; match exact { Ok(exact) => { println!(" {EXACT:>WIDTH$}: {}", exact.len()); - for event_class in exact { - println!(" - {event_class}"); + for alert_class in exact { + println!(" - {alert_class}"); } } Err(e) => { @@ -422,18 +447,18 @@ async fn cmd_db_webhook_rx_info( } } - let globs = glob_dsl::webhook_rx_event_glob + let globs = glob_dsl::alert_glob .filter(glob_dsl::rx_id.eq(id.into_untyped_uuid())) - .select(db::model::WebhookRxEventGlob::as_select()) - .load_async::(&*conn) + .select(db::model::AlertRxGlob::as_select()) + .load_async::(&*conn) .await; match globs { Ok(globs) => { println!(" {GLOBS:>WIDTH$}: {}", globs.len()); for glob in globs { - let db::model::WebhookRxEventGlob { + let db::model::AlertRxGlob { rx_id: _, - glob: db::model::WebhookGlob { glob, regex }, + glob: db::model::AlertGlob { glob, regex }, time_created, schema_version, } = glob; @@ -448,17 +473,17 @@ async fn cmd_db_webhook_rx_info( } println!(" {GLOB_REGEX:>WIDTH$}: {regex}"); - let exact = subscription_dsl::webhook_rx_subscription + let exact = subscription_dsl::alert_subscription .filter(subscription_dsl::rx_id.eq(id.into_untyped_uuid())) .filter(subscription_dsl::glob.eq(glob)) - .select(subscription_dsl::event_class) - .load_async::(&*conn) + .select(subscription_dsl::alert_class) + .load_async::(&*conn) .await; match exact { Ok(exact) => { println!(" {GLOB_EXACT:>WIDTH$}: {}", exact.len()); - for event_class in exact { - println!(" - {event_class}") + for alert_class in exact { + println!(" - {alert_class}") } } Err(e) => eprintln!( @@ -478,10 +503,16 @@ async fn cmd_db_webhook_rx_info( async fn cmd_db_webhook_delivery_list( datastore: &DataStore, fetch_opts: &DbFetchOptions, - args: &DeliveryListArgs, + args: &WebhookDeliveryListArgs, ) -> anyhow::Result<()> { - let DeliveryListArgs { before, after, receiver, states, triggers, event } = - args; + let WebhookDeliveryListArgs { + before, + after, + receiver, + states, + triggers, + event, + } = args; let conn = datastore.pool_connection_for_tests().await?; let mut query = delivery_dsl::webhook_delivery .limit(fetch_opts.fetch_limit.get().into()) @@ -522,7 +553,7 @@ async fn cmd_db_webhook_delivery_list( } if let Some(id) = event { - query = query.filter(delivery_dsl::event_id.eq(id.into_untyped_uuid())); + query = query.filter(delivery_dsl::alert_id.eq(id.into_untyped_uuid())); } let ctx = || "listing webhook deliveries"; @@ -540,7 +571,7 @@ async fn cmd_db_webhook_delivery_list( struct WithEventId { #[tabled(inline)] inner: T, - event_id: Uuid, + alert_id: Uuid, } impl<'d, T> From<&'d WebhookDelivery> for WithEventId @@ -548,7 +579,7 @@ async fn cmd_db_webhook_delivery_list( T: From<&'d WebhookDelivery> + Tabled, { fn from(d: &'d WebhookDelivery) -> Self { - Self { event_id: d.event_id.into_untyped_uuid(), inner: T::from(d) } + Self { alert_id: d.alert_id.into_untyped_uuid(), inner: T::from(d) } } } @@ -584,8 +615,8 @@ async fn cmd_db_webhook_delivery_list( #[tabled(rename_all = "SCREAMING_SNAKE_CASE")] struct DeliveryRow { id: Uuid, - trigger: nexus_db_model::WebhookDeliveryTrigger, - state: nexus_db_model::WebhookDeliveryState, + trigger: nexus_db_model::AlertDeliveryTrigger, + state: nexus_db_model::AlertDeliveryState, attempts: u8, #[tabled(display_with = "datetime_rfc3339_concise")] time_created: DateTime, @@ -608,7 +639,7 @@ impl From<&'_ WebhookDelivery> for DeliveryRow { // event and receiver UUIDs are toggled on and off based on // whether or not we are filtering by receiver and event, so // ignore them here. - event_id: _, + alert_id: _, rx_id: _, attempts, state, @@ -644,24 +675,24 @@ where async fn lookup_webhook_rx( datastore: &DataStore, name_or_id: &NameOrId, -) -> anyhow::Result> { - use nexus_db_schema::schema::webhook_receiver::dsl; +) -> anyhow::Result> { + use nexus_db_schema::schema::alert_receiver::dsl; let conn = datastore.pool_connection_for_tests().await?; match name_or_id { NameOrId::Id(id) => { - dsl::webhook_receiver + dsl::alert_receiver .filter(dsl::id.eq(*id)) .limit(1) - .select(WebhookReceiver::as_select()) + .select(AlertReceiver::as_select()) .get_result_async(&*conn) .await } NameOrId::Name(ref name) => { - dsl::webhook_receiver + dsl::alert_receiver .filter(dsl::name.eq(name.to_string())) .limit(1) - .select(WebhookReceiver::as_select()) + .select(AlertReceiver::as_select()) .get_result_async(&*conn) .await } @@ -673,11 +704,11 @@ async fn lookup_webhook_rx( async fn cmd_db_webhook_delivery_info( datastore: &DataStore, fetch_opts: &DbFetchOptions, - args: &DeliveryInfoArgs, + args: &WebhookDeliveryInfoArgs, ) -> anyhow::Result<()> { use db::model::WebhookDeliveryAttempt; - let DeliveryInfoArgs { delivery_id } = args; + let WebhookDeliveryInfoArgs { delivery_id } = args; let conn = datastore.pool_connection_for_tests().await?; let delivery = delivery_dsl::webhook_delivery .filter(delivery_dsl::id.eq(*delivery_id)) @@ -716,7 +747,7 @@ async fn cmd_db_webhook_delivery_info( let WebhookDelivery { id, - event_id, + alert_id, rx_id, triggered_by, attempts, @@ -728,7 +759,7 @@ async fn cmd_db_webhook_delivery_info( } = delivery; println!("\n{:=<80}", "== DELIVERY "); println!(" {ID:>WIDTH$}: {id}"); - println!(" {EVENT_ID:>WIDTH$}: {event_id}"); + println!(" {EVENT_ID:>WIDTH$}: {alert_id}"); println!(" {RECEIVER_ID:>WIDTH$}: {rx_id}"); println!(" {STATE:>WIDTH$}: {state}"); println!(" {TRIGGER:>WIDTH$}: {triggered_by}"); @@ -831,12 +862,12 @@ async fn cmd_db_webhook_delivery_info( Ok(()) } -async fn cmd_db_webhook_event_list( +async fn cmd_db_alert_list( datastore: &DataStore, fetch_opts: &DbFetchOptions, - args: &EventListArgs, + args: &AlertListArgs, ) -> anyhow::Result<()> { - let EventListArgs { + let AlertListArgs { payload, before, after, @@ -863,46 +894,46 @@ async fn cmd_db_webhook_event_list( let conn = datastore.pool_connection_for_tests().await?; - let mut query = event_dsl::webhook_event + let mut query = alert_dsl::alert .limit(fetch_opts.fetch_limit.get().into()) - .order_by(event_dsl::time_created.asc()) - .select(WebhookEvent::as_select()) + .order_by(alert_dsl::time_created.asc()) + .select(Alert::as_select()) .into_boxed(); if let Some(before) = before { - query = query.filter(event_dsl::time_created.lt(*before)); + query = query.filter(alert_dsl::time_created.lt(*before)); } if let Some(after) = after { - query = query.filter(event_dsl::time_created.gt(*after)); + query = query.filter(alert_dsl::time_created.gt(*after)); } if let Some(before) = dispatched_before { - query = query.filter(event_dsl::time_dispatched.lt(*before)); + query = query.filter(alert_dsl::time_dispatched.lt(*before)); } if let Some(after) = dispatched_after { - query = query.filter(event_dsl::time_dispatched.gt(*after)); + query = query.filter(alert_dsl::time_dispatched.gt(*after)); } if let Some(dispatched) = dispatched { if *dispatched { - query = query.filter(event_dsl::time_dispatched.is_not_null()); + query = query.filter(alert_dsl::time_dispatched.is_not_null()); } else { - query = query.filter(event_dsl::time_dispatched.is_null()); + query = query.filter(alert_dsl::time_dispatched.is_null()); } } - let ctx = || "loading webhook events"; - let events = query.load_async(&*conn).await.with_context(ctx)?; + let ctx = || "loading alerts"; + let alerts = query.load_async(&*conn).await.with_context(ctx)?; - check_limit(&events, fetch_opts.fetch_limit, ctx); + check_limit(&alerts, fetch_opts.fetch_limit, ctx); #[derive(Tabled)] #[tabled(rename_all = "SCREAMING_SNAKE_CASE")] - struct EventRow { + struct AlertRow { id: Uuid, - class: WebhookEventClass, + class: AlertClass, #[tabled(display_with = "datetime_rfc3339_concise")] time_created: DateTime, #[tabled(display_with = "datetime_opt_rfc3339_concise")] @@ -910,43 +941,43 @@ async fn cmd_db_webhook_event_list( dispatched: i64, } - impl From<&'_ WebhookEvent> for EventRow { - fn from(event: &'_ WebhookEvent) -> Self { + impl From<&'_ Alert> for AlertRow { + fn from(alert: &'_ Alert) -> Self { Self { - id: event.identity.id.into_untyped_uuid(), - class: event.event_class, - time_created: event.identity.time_created, - time_dispatched: event.time_dispatched, - dispatched: event.num_dispatched, + id: alert.identity.id.into_untyped_uuid(), + class: alert.class, + time_created: alert.identity.time_created, + time_dispatched: alert.time_dispatched, + dispatched: alert.num_dispatched, } } } #[derive(Tabled)] #[tabled(rename_all = "SCREAMING_SNAKE_CASE")] - struct EventRowWithPayload { + struct AlertRowWithPayload { #[tabled(inline)] - row: EventRow, + row: AlertRow, payload: String, } let mut table = if *payload { - let rows = events.iter().map(|event| { - let payload = match serde_json::to_string(&event.event) { + let rows = alerts.iter().map(|alert| { + let payload = match serde_json::to_string(&alert.payload) { Ok(payload) => payload, Err(e) => { eprintln!( "/!\\ failed to serialize payload for {:?}: {e}", - event.identity.id + alert.identity.id ); "".to_string() } }; - EventRowWithPayload { row: event.into(), payload } + AlertRowWithPayload { row: alert.into(), payload } }); tabled::Table::new(rows) } else { - let rows = events.iter().map(EventRow::from); + let rows = alerts.iter().map(AlertRow::from); tabled::Table::new(rows) }; table @@ -957,32 +988,31 @@ async fn cmd_db_webhook_event_list( Ok(()) } -async fn cmd_db_webhook_event_info( +async fn cmd_db_alert_info( datastore: &DataStore, fetch_opts: &DbFetchOptions, - args: &EventInfoArgs, + args: &AlertInfoArgs, ) -> anyhow::Result<()> { - let EventInfoArgs { event_id } = args; + let AlertInfoArgs { id } = args; let conn = datastore.pool_connection_for_tests().await?; - let event = event_dsl::webhook_event - .filter(event_dsl::id.eq(event_id.into_untyped_uuid())) - .select(WebhookEvent::as_select()) + let alert = alert_dsl::alert + .filter(alert_dsl::id.eq(id.into_untyped_uuid())) + .select(Alert::as_select()) .limit(1) .get_result_async(&*conn) .await .optional() - .with_context(|| format!("loading webhook event {event_id}"))? - .ok_or_else(|| anyhow::anyhow!("no webhook event {event_id} exists"))?; + .with_context(|| format!("loading alert {id}"))? + .ok_or_else(|| anyhow::anyhow!("no alert {id} exists"))?; - let WebhookEvent { - identity: - db::model::WebhookEventIdentity { id, time_created, time_modified }, + let Alert { + identity: db::model::AlertIdentity { id, time_created, time_modified }, time_dispatched, - event_class, - event, + class, + payload, num_dispatched, - } = event; + } = alert; const CLASS: &str = "class"; const TIME_DISPATCHED: &str = "fully dispatched at"; @@ -997,9 +1027,9 @@ async fn cmd_db_webhook_event_info( CLASS, ]); - println!("\n{:=<80}", "== EVENT "); + println!("\n{:=<80}", "== ALERT "); println!(" {ID:>WIDTH$}: {id:?}"); - println!(" {CLASS:>WIDTH$}: {event_class}"); + println!(" {CLASS:>WIDTH$}: {class}"); println!(" {TIME_CREATED:>WIDTH$}: {time_created}"); println!(" {TIME_MODIFIED:>WIDTH$}: {time_modified}"); println!(); @@ -1008,12 +1038,12 @@ async fn cmd_db_webhook_event_info( println!(" {TIME_DISPATCHED:>WIDTH$}: {t}") } - println!("\n{:=<80}", "== EVENT PAYLOAD "); - serde_json::to_writer_pretty(std::io::stdout(), &event).with_context( - || format!("failed to serialize event payload: {event:?}"), + println!("\n{:=<80}", "== ALERT PAYLOAD "); + serde_json::to_writer_pretty(std::io::stdout(), &payload).with_context( + || format!("failed to serialize alert payload: {payload:?}"), )?; - let ctx = || format!("listing deliveries for event {event_id:?}"); + let ctx = || format!("listing deliveries for alert {id:?}"); let deliveries = delivery_dsl::webhook_delivery .limit(fetch_opts.fetch_limit.get().into()) .order_by(delivery_dsl::time_created.desc()) @@ -1035,7 +1065,7 @@ async fn cmd_db_webhook_event_info( println!("{table}") } else if num_dispatched > 0 { println!( - "/!\\ WEIRD: event claims to have {num_dispatched} deliveries \ + "/!\\ WEIRD: alert claims to have {num_dispatched} deliveries \ dispatched, but no delivery records were found" ) } diff --git a/dev-tools/omdb/src/bin/omdb/nexus.rs b/dev-tools/omdb/src/bin/omdb/nexus.rs index 63d60ddb5cb..154714d992a 100644 --- a/dev-tools/omdb/src/bin/omdb/nexus.rs +++ b/dev-tools/omdb/src/bin/omdb/nexus.rs @@ -1126,8 +1126,8 @@ fn print_task_details(bgtask: &BackgroundTask, details: &serde_json::Value) { "tuf_artifact_replication" => { print_task_tuf_artifact_replication(details); } - "webhook_dispatcher" => { - print_task_webhook_dispatcher(details); + "alert_dispatcher" => { + print_task_alert_dispatcher(details); } "webhook_deliverator" => { print_task_webhook_deliverator(details); @@ -2429,19 +2429,18 @@ fn print_task_tuf_artifact_replication(details: &serde_json::Value) { } } -fn print_task_webhook_dispatcher(details: &serde_json::Value) { - use nexus_types::internal_api::background::WebhookDispatched; - use nexus_types::internal_api::background::WebhookDispatcherStatus; - use nexus_types::internal_api::background::WebhookGlobStatus; +fn print_task_alert_dispatcher(details: &serde_json::Value) { + use nexus_types::internal_api::background::AlertDispatched; + use nexus_types::internal_api::background::AlertDispatcherStatus; + use nexus_types::internal_api::background::AlertGlobStatus; - let WebhookDispatcherStatus { + let AlertDispatcherStatus { globs_reprocessed, glob_version, errors, dispatched, no_receivers, - } = match serde_json::from_value::(details.clone()) - { + } = match serde_json::from_value::(details.clone()) { Err(error) => { eprintln!( "warning: failed to interpret task details: {:?}: {:?}", @@ -2462,8 +2461,8 @@ fn print_task_webhook_dispatcher(details: &serde_json::Value) { } } - const DISPATCHED: &str = "events dispatched:"; - const NO_RECEIVERS: &str = "events with no receivers subscribed:"; + const DISPATCHED: &str = "alerts dispatched:"; + const NO_RECEIVERS: &str = "alerts with no receivers subscribed:"; const OUTDATED_GLOBS: &str = "outdated glob subscriptions:"; const GLOBS_REPROCESSED: &str = "glob subscriptions reprocessed:"; const ALREADY_REPROCESSED: &str = @@ -2491,9 +2490,9 @@ fn print_task_webhook_dispatcher(details: &serde_json::Value) { dispatched: usize, } let table_rows = dispatched.iter().map( - |&WebhookDispatched { event_id, subscribed, dispatched }| { + |&AlertDispatched { alert_id, subscribed, dispatched }| { DispatchedRow { - event: event_id.into_untyped_uuid(), + event: alert_id.into_untyped_uuid(), subscribed, dispatched, } @@ -2526,11 +2525,11 @@ fn print_task_webhook_dispatcher(details: &serde_json::Value) { println!(" receiver {rx_id:?}:"); for (glob, status) in globs { match status { - Ok(WebhookGlobStatus::AlreadyReprocessed) => { + Ok(AlertGlobStatus::AlreadyReprocessed) => { println!(" > {glob:?}: already reprocessed"); already_reprocessed += 1; } - Ok(WebhookGlobStatus::Reprocessed { + Ok(AlertGlobStatus::Reprocessed { created, deleted, prev_version, @@ -2661,7 +2660,7 @@ fn print_task_webhook_deliverator(details: &serde_json::Value) { let table_rows = failed_deliveries.into_iter().map( |WebhookDeliveryFailure { delivery_id, - event_id, + alert_id, attempt, result, response_status, @@ -2670,7 +2669,7 @@ fn print_task_webhook_deliverator(details: &serde_json::Value) { // Turn these into untyped `Uuid`s so that the Display impl // doesn't include the UUID kind in the table. delivery: delivery_id.into_untyped_uuid(), - event: event_id.into_untyped_uuid(), + event: alert_id.into_untyped_uuid(), attempt, result, status: response_status, diff --git a/dev-tools/omdb/tests/env.out b/dev-tools/omdb/tests/env.out index 0e6a62d6478..81bfa368cb8 100644 --- a/dev-tools/omdb/tests/env.out +++ b/dev-tools/omdb/tests/env.out @@ -30,6 +30,10 @@ task: "abandoned_vmm_reaper" instances +task: "alert_dispatcher" + dispatches queued alerts to receivers + + task: "bfd_manager" Manages bidirectional fowarding detection (BFD) configuration on rack switches @@ -195,10 +199,6 @@ task: "webhook_deliverator" sends webhook delivery requests -task: "webhook_dispatcher" - dispatches queued webhook events to receivers - - --------------------------------------------- stderr: note: using Nexus URL http://127.0.0.1:REDACTED_PORT @@ -226,6 +226,10 @@ task: "abandoned_vmm_reaper" instances +task: "alert_dispatcher" + dispatches queued alerts to receivers + + task: "bfd_manager" Manages bidirectional fowarding detection (BFD) configuration on rack switches @@ -391,10 +395,6 @@ task: "webhook_deliverator" sends webhook delivery requests -task: "webhook_dispatcher" - dispatches queued webhook events to receivers - - --------------------------------------------- stderr: note: Nexus URL not specified. Will pick one from DNS. @@ -409,6 +409,10 @@ task: "abandoned_vmm_reaper" instances +task: "alert_dispatcher" + dispatches queued alerts to receivers + + task: "bfd_manager" Manages bidirectional fowarding detection (BFD) configuration on rack switches @@ -574,10 +578,6 @@ task: "webhook_deliverator" sends webhook delivery requests -task: "webhook_dispatcher" - dispatches queued webhook events to receivers - - --------------------------------------------- stderr: note: Nexus URL not specified. Will pick one from DNS. diff --git a/dev-tools/omdb/tests/successes.out b/dev-tools/omdb/tests/successes.out index 0ef02465b0a..eef7d76fcc5 100644 --- a/dev-tools/omdb/tests/successes.out +++ b/dev-tools/omdb/tests/successes.out @@ -238,6 +238,10 @@ task: "abandoned_vmm_reaper" instances +task: "alert_dispatcher" + dispatches queued alerts to receivers + + task: "bfd_manager" Manages bidirectional fowarding detection (BFD) configuration on rack switches @@ -403,10 +407,6 @@ task: "webhook_deliverator" sends webhook delivery requests -task: "webhook_dispatcher" - dispatches queued webhook events to receivers - - --------------------------------------------- stderr: note: using Nexus URL http://127.0.0.1:REDACTED_PORT/ @@ -502,6 +502,14 @@ task: "abandoned_vmm_reaper" VMMs already deleted by another Nexus: 0 sled resource reservations deleted: 0 +task: "alert_dispatcher" + configured period: every m + currently executing: no + last completed activation: , triggered by a periodic timer firing + started at (s ago) and ran for ms + alerts dispatched: 0 + alerts with no receivers subscribed: 0 + task: "bfd_manager" configured period: every s currently executing: no @@ -795,14 +803,6 @@ task: "webhook_deliverator" already delivered by another Nexus: 0 in progress by another Nexus: 0 -task: "webhook_dispatcher" - configured period: every m - currently executing: no - last completed activation: , triggered by a periodic timer firing - started at (s ago) and ran for ms - events dispatched: 0 - events with no receivers subscribed: 0 - --------------------------------------------- stderr: note: using Nexus URL http://127.0.0.1:REDACTED_PORT/ @@ -1021,6 +1021,14 @@ task: "abandoned_vmm_reaper" VMMs already deleted by another Nexus: 0 sled resource reservations deleted: 0 +task: "alert_dispatcher" + configured period: every m + currently executing: no + last completed activation: , triggered by a periodic timer firing + started at (s ago) and ran for ms + alerts dispatched: 0 + alerts with no receivers subscribed: 0 + task: "bfd_manager" configured period: every s currently executing: no @@ -1314,14 +1322,6 @@ task: "webhook_deliverator" already delivered by another Nexus: 0 in progress by another Nexus: 0 -task: "webhook_dispatcher" - configured period: every m - currently executing: no - last completed activation: , triggered by a periodic timer firing - started at (s ago) and ran for ms - events dispatched: 0 - events with no receivers subscribed: 0 - --------------------------------------------- stderr: note: using Nexus URL http://127.0.0.1:REDACTED_PORT/ diff --git a/dev-tools/omdb/tests/usage_errors.out b/dev-tools/omdb/tests/usage_errors.out index a74715855e6..68ff3c59272 100644 --- a/dev-tools/omdb/tests/usage_errors.out +++ b/dev-tools/omdb/tests/usage_errors.out @@ -139,7 +139,7 @@ Commands: processes vmms Alias to `omdb db vmm list` oximeter Print information about the oximeter collector - webhook Print information about webhooks + alert Print information about alerts zpool Commands for querying and interacting with pools help Print this message or the help of the given subcommand(s) @@ -196,7 +196,7 @@ Commands: processes vmms Alias to `omdb db vmm list` oximeter Print information about the oximeter collector - webhook Print information about webhooks + alert Print information about alerts zpool Commands for querying and interacting with pools help Print this message or the help of the given subcommand(s) diff --git a/nexus-config/src/nexus_config.rs b/nexus-config/src/nexus_config.rs index 1364a52fbad..793a24c41d4 100644 --- a/nexus-config/src/nexus_config.rs +++ b/nexus-config/src/nexus_config.rs @@ -436,7 +436,7 @@ pub struct BackgroundTaskConfig { pub read_only_region_replacement_start: ReadOnlyRegionReplacementStartConfig, /// configuration for webhook dispatcher task - pub webhook_dispatcher: WebhookDispatcherConfig, + pub alert_dispatcher: AlertDispatcherConfig, /// configuration for webhook deliverator task pub webhook_deliverator: WebhookDeliveratorConfig, } @@ -765,7 +765,7 @@ pub struct ReadOnlyRegionReplacementStartConfig { #[serde_as] #[derive(Clone, Debug, Deserialize, Eq, PartialEq, Serialize)] -pub struct WebhookDispatcherConfig { +pub struct AlertDispatcherConfig { /// period (in seconds) for periodic activations of this background task #[serde_as(as = "DurationSeconds")] pub period_secs: Duration, @@ -1079,7 +1079,7 @@ mod test { tuf_artifact_replication.period_secs = 300 tuf_artifact_replication.min_sled_replication = 3 read_only_region_replacement_start.period_secs = 30 - webhook_dispatcher.period_secs = 42 + alert_dispatcher.period_secs = 42 webhook_deliverator.period_secs = 43 webhook_deliverator.lease_timeout_secs = 44 webhook_deliverator.first_retry_backoff_secs = 45 @@ -1292,7 +1292,7 @@ mod test { ReadOnlyRegionReplacementStartConfig { period_secs: Duration::from_secs(30), }, - webhook_dispatcher: WebhookDispatcherConfig { + alert_dispatcher: AlertDispatcherConfig { period_secs: Duration::from_secs(42), }, webhook_deliverator: WebhookDeliveratorConfig { @@ -1387,7 +1387,7 @@ mod test { tuf_artifact_replication.period_secs = 300 tuf_artifact_replication.min_sled_replication = 3 read_only_region_replacement_start.period_secs = 30 - webhook_dispatcher.period_secs = 42 + alert_dispatcher.period_secs = 42 webhook_deliverator.period_secs = 43 [default_region_allocation_strategy] type = "random" diff --git a/nexus/auth/src/authz/api_resources.rs b/nexus/auth/src/authz/api_resources.rs index 7d9c17bd71c..28b61f770d1 100644 --- a/nexus/auth/src/authz/api_resources.rs +++ b/nexus/auth/src/authz/api_resources.rs @@ -711,23 +711,21 @@ impl AuthorizedResource for TargetReleaseConfig { } } -/// Synthetic resource used for modeling access to the list of webhook event -/// classes. +/// Synthetic resource used for modeling access to the list of alert classes. #[derive(Clone, Copy, Debug, Eq, PartialEq)] -pub struct WebhookEventClassList; -pub const WEBHOOK_EVENT_CLASS_LIST: WebhookEventClassList = - WebhookEventClassList {}; +pub struct AlertClassList; +pub const ALERT_CLASS_LIST: AlertClassList = AlertClassList {}; -impl oso::PolarClass for WebhookEventClassList { +impl oso::PolarClass for AlertClassList { fn get_polar_class_builder() -> oso::ClassBuilder { - // Roles are not directly attached to EventClassList + // Roles are not directly attached to AlertClassList oso::Class::builder() .with_equality_check() .add_attribute_getter("fleet", |_| FLEET) } } -impl AuthorizedResource for WebhookEventClassList { +impl AuthorizedResource for AlertClassList { fn load_roles<'fut>( &'fut self, opctx: &'fut OpContext, @@ -1144,24 +1142,24 @@ authz_resource! { } authz_resource! { - name = "WebhookEvent", + name = "Alert", parent = "Fleet", - primary_key = { uuid_kind = WebhookEventKind }, + primary_key = { uuid_kind = AlertKind }, roles_allowed = false, polar_snippet = FleetChild, } authz_resource! { - name = "WebhookReceiver", + name = "AlertReceiver", parent = "Fleet", - primary_key = { uuid_kind = WebhookReceiverKind }, + primary_key = { uuid_kind = AlertReceiverKind }, roles_allowed = false, polar_snippet = FleetChild, } authz_resource! { name = "WebhookSecret", - parent = "WebhookReceiver", + parent = "AlertReceiver", primary_key = { uuid_kind = WebhookSecretKind }, roles_allowed = false, polar_snippet = Custom, diff --git a/nexus/auth/src/authz/omicron.polar b/nexus/auth/src/authz/omicron.polar index 80abeaa1458..a8d612dc99e 100644 --- a/nexus/auth/src/authz/omicron.polar +++ b/nexus/auth/src/authz/omicron.polar @@ -596,21 +596,21 @@ has_role(USER_INTERNAL_API: AuthenticatedActor, "admin", _silo: Silo); resource WebhookSecret { permissions = [ "read", "modify" ]; - relations = { parent_webhook_receiver: WebhookReceiver }; + relations = { parent_alert_receiver: AlertReceiver }; - "read" if "read" on "parent_webhook_receiver"; - "modify" if "modify" on "parent_webhook_receiver"; + "read" if "read" on "parent_alert_receiver"; + "modify" if "modify" on "parent_alert_receiver"; } -has_relation(rx: WebhookReceiver, "parent_webhook_receiver", secret: WebhookSecret) - if secret.webhook_receiver = rx; +has_relation(rx: AlertReceiver, "parent_alert_receiver", secret: WebhookSecret) + if secret.alert_receiver = rx; -resource WebhookEventClassList { +resource AlertClassList { permissions = [ "list_children" ]; relations = { parent_fleet: Fleet }; "list_children" if "viewer" on "parent_fleet"; } -has_relation(fleet: Fleet, "parent_fleet", collection: WebhookEventClassList) +has_relation(fleet: Fleet, "parent_fleet", collection: AlertClassList) if collection.fleet = fleet; diff --git a/nexus/auth/src/authz/oso_generic.rs b/nexus/auth/src/authz/oso_generic.rs index 3b47fbfdd0a..4d21777fc30 100644 --- a/nexus/auth/src/authz/oso_generic.rs +++ b/nexus/auth/src/authz/oso_generic.rs @@ -115,7 +115,7 @@ pub fn make_omicron_oso(log: &slog::Logger) -> Result { SiloIdentityProviderList::get_polar_class(), SiloUserList::get_polar_class(), TargetReleaseConfig::get_polar_class(), - WebhookEventClassList::get_polar_class(), + AlertClassList::get_polar_class(), ]; for c in classes { oso_builder = oso_builder.register_class(c)?; @@ -164,8 +164,8 @@ pub fn make_omicron_oso(log: &slog::Logger) -> Result { Sled::init(), TufRepo::init(), TufArtifact::init(), - WebhookEvent::init(), - WebhookReceiver::init(), + Alert::init(), + AlertReceiver::init(), WebhookSecret::init(), Zpool::init(), Service::init(), diff --git a/nexus/background-task-interface/src/init.rs b/nexus/background-task-interface/src/init.rs index d27b42e032c..82099f679ad 100644 --- a/nexus/background-task-interface/src/init.rs +++ b/nexus/background-task-interface/src/init.rs @@ -44,7 +44,7 @@ pub struct BackgroundTasks { pub task_region_snapshot_replacement_finish: Activator, pub task_tuf_artifact_replication: Activator, pub task_read_only_region_replacement_start: Activator, - pub task_webhook_dispatcher: Activator, + pub task_alert_dispatcher: Activator, pub task_webhook_deliverator: Activator, // Handles to activate background tasks that do not get used by Nexus diff --git a/nexus/db-lookup/src/lookup.rs b/nexus/db-lookup/src/lookup.rs index 9e2172d191d..a73f09f232e 100644 --- a/nexus/db-lookup/src/lookup.rs +++ b/nexus/db-lookup/src/lookup.rs @@ -26,13 +26,13 @@ use nexus_types::identity::Resource; use omicron_common::api::external::Error; use omicron_common::api::external::InternalContext; use omicron_common::api::external::{LookupResult, LookupType, ResourceType}; +use omicron_uuid_kinds::AlertReceiverUuid; +use omicron_uuid_kinds::AlertUuid; use omicron_uuid_kinds::PhysicalDiskUuid; use omicron_uuid_kinds::SupportBundleUuid; use omicron_uuid_kinds::TufArtifactKind; use omicron_uuid_kinds::TufRepoKind; use omicron_uuid_kinds::TypedUuid; -use omicron_uuid_kinds::WebhookEventUuid; -use omicron_uuid_kinds::WebhookReceiverUuid; use omicron_uuid_kinds::WebhookSecretUuid; use slog::{error, trace}; use uuid::Uuid; @@ -480,38 +480,38 @@ impl<'a> LookupPath<'a> { SamlIdentityProvider::PrimaryKey(Root { lookup_root: self }, id) } - pub fn webhook_receiver_id<'b>( + pub fn alert_receiver_id<'b>( self, - id: WebhookReceiverUuid, - ) -> WebhookReceiver<'b> + id: AlertReceiverUuid, + ) -> AlertReceiver<'b> where 'a: 'b, { - WebhookReceiver::PrimaryKey(Root { lookup_root: self }, id) + AlertReceiver::PrimaryKey(Root { lookup_root: self }, id) } - /// Select a resource of type [`WebhookReceiver`], identified by its name - pub fn webhook_receiver_name<'b, 'c>( + /// Select a resource of type [`AlertReceiver`], identified by its name + pub fn alert_receiver_name<'b, 'c>( self, name: &'b Name, - ) -> WebhookReceiver<'c> + ) -> AlertReceiver<'c> where 'a: 'c, 'b: 'c, { - WebhookReceiver::Name(Root { lookup_root: self }, name) + AlertReceiver::Name(Root { lookup_root: self }, name) } - /// Select a resource of type [`WebhookReceiver`], identified by its owned name - pub fn webhook_receiver_name_owned<'b, 'c>( + /// Select a resource of type [`AlertReceiver`], identified by its owned name + pub fn alert_receiver_name_owned<'b, 'c>( self, name: Name, - ) -> WebhookReceiver<'c> + ) -> AlertReceiver<'c> where 'a: 'c, 'b: 'c, { - WebhookReceiver::OwnedName(Root { lookup_root: self }, name) + AlertReceiver::OwnedName(Root { lookup_root: self }, name) } /// Select a resource of type [`WebhookSecret`], identified by its UUID. @@ -525,12 +525,12 @@ impl<'a> LookupPath<'a> { WebhookSecret::PrimaryKey(Root { lookup_root: self }, id) } - /// Select a resource of type [`WebhookEvent`], identified by its UUID. - pub fn webhook_event_id<'b>(self, id: WebhookEventUuid) -> WebhookEvent<'b> + /// Select a resource of type [`Alert`], identified by its UUID. + pub fn alert_id<'b>(self, id: AlertUuid) -> Alert<'b> where 'a: 'b, { - WebhookEvent::PrimaryKey(Root { lookup_root: self }, id) + Alert::PrimaryKey(Root { lookup_root: self }, id) } } @@ -908,18 +908,18 @@ lookup_resource! { } lookup_resource! { - name = "WebhookReceiver", + name = "AlertReceiver", ancestors = [], lookup_by_name = true, soft_deletes = true, primary_key_columns = [ - { column_name = "id", uuid_kind = WebhookReceiverKind } + { column_name = "id", uuid_kind = AlertReceiverKind } ] } lookup_resource! { name = "WebhookSecret", - ancestors = ["WebhookReceiver"], + ancestors = ["AlertReceiver"], lookup_by_name = false, soft_deletes = false, primary_key_columns = [ @@ -928,12 +928,12 @@ lookup_resource! { } lookup_resource! { - name = "WebhookEvent", + name = "Alert", ancestors = [], lookup_by_name = false, soft_deletes = false, primary_key_columns = [ - { column_name = "id", uuid_kind = WebhookEventKind } + { column_name = "id", uuid_kind = AlertKind } ] } diff --git a/nexus/db-model/src/webhook_event.rs b/nexus/db-model/src/alert.rs similarity index 52% rename from nexus/db-model/src/webhook_event.rs rename to nexus/db-model/src/alert.rs index 9865567c664..9c4dc24aed3 100644 --- a/nexus/db-model/src/webhook_event.rs +++ b/nexus/db-model/src/alert.rs @@ -2,10 +2,10 @@ // License, v. 2.0. If a copy of the MPL was not distributed with this // file, You can obtain one at https://mozilla.org/MPL/2.0/. -use crate::WebhookEventClass; +use crate::AlertClass; use chrono::{DateTime, Utc}; use db_macros::Asset; -use nexus_db_schema::schema::webhook_event; +use nexus_db_schema::schema::alert; use serde::{Deserialize, Serialize}; /// A webhook event. @@ -20,29 +20,30 @@ use serde::{Deserialize, Serialize}; PartialEq, Asset, )] -#[diesel(table_name = webhook_event)] -#[asset(uuid_kind = WebhookEventKind)] -pub struct WebhookEvent { +#[diesel(table_name = alert)] +#[asset(uuid_kind = AlertKind)] +pub struct Alert { #[diesel(embed)] - pub identity: WebhookEventIdentity, + pub identity: AlertIdentity, - /// The time at which this event was dispatched by creating entries in the + /// The time at which this alert was dispatched by creating entries in the /// `webhook_delivery` table. /// - /// If this is `None`, this event has yet to be dispatched. + /// If this is `None`, this alert has yet to be dispatched. pub time_dispatched: Option>, - /// The class of this event. - pub event_class: WebhookEventClass, + /// The class of this alert. + #[diesel(column_name = alert_class)] + pub class: AlertClass, - /// The event's data payload. - pub event: serde_json::Value, + /// The alert's data payload. + pub payload: serde_json::Value, pub num_dispatched: i64, } -impl WebhookEvent { - /// UUID of the singleton event entry for webhook liveness probes. - pub const PROBE_EVENT_ID: uuid::Uuid = +impl Alert { + /// UUID of the singleton event entry for alert receiver liveness probes. + pub const PROBE_ALERT_ID: uuid::Uuid = uuid::Uuid::from_u128(0x001de000_7768_4000_8000_000000000001); } diff --git a/nexus/db-model/src/webhook_event_class.rs b/nexus/db-model/src/alert_class.rs similarity index 81% rename from nexus/db-model/src/webhook_event_class.rs rename to nexus/db-model/src/alert_class.rs index dc7005ccf76..5f0b2129707 100644 --- a/nexus/db-model/src/webhook_event_class.rs +++ b/nexus/db-model/src/alert_class.rs @@ -9,7 +9,7 @@ use serde::ser::{Serialize, Serializer}; use std::fmt; impl_enum_type!( - WebhookEventClassEnum: + AlertClassEnum: #[derive( Copy, @@ -22,7 +22,7 @@ impl_enum_type!( FromSqlRow, strum::VariantArray, )] - pub enum WebhookEventClass; + pub enum AlertClass; Probe => b"probe" TestFoo => b"test.foo" @@ -32,7 +32,7 @@ impl_enum_type!( TestQuuxBarBaz => b"test.quux.bar.baz" ); -impl WebhookEventClass { +impl AlertClass { pub fn as_str(&self) -> &'static str { // TODO(eliza): it would be really nice if these strings were all // declared a single time, rather than twice (in both `impl_enum_type!` @@ -84,13 +84,13 @@ impl WebhookEventClass { ::VARIANTS; } -impl fmt::Display for WebhookEventClass { +impl fmt::Display for AlertClass { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{}", self.as_str()) } } -impl Serialize for WebhookEventClass { +impl Serialize for AlertClass { fn serialize(&self, serializer: S) -> Result where S: Serializer, @@ -99,19 +99,19 @@ impl Serialize for WebhookEventClass { } } -impl<'de> Deserialize<'de> for WebhookEventClass { +impl<'de> Deserialize<'de> for AlertClass { fn deserialize(deserializer: D) -> Result where D: Deserializer<'de>, { <&'de str>::deserialize(deserializer)? - .parse::() + .parse::() .map_err(de::Error::custom) } } -impl std::str::FromStr for WebhookEventClass { - type Err = EventClassParseError; +impl std::str::FromStr for AlertClass { + type Err = AlertClassParseError; fn from_str(s: &str) -> Result { for &class in Self::ALL_CLASSES { if s == class.as_str() { @@ -119,12 +119,12 @@ impl std::str::FromStr for WebhookEventClass { } } - Err(EventClassParseError(())) + Err(AlertClassParseError(())) } } -impl From for views::EventClass { - fn from(class: WebhookEventClass) -> Self { +impl From for views::AlertClass { + fn from(class: AlertClass) -> Self { Self { name: class.to_string(), description: class.description().to_string(), @@ -133,12 +133,12 @@ impl From for views::EventClass { } #[derive(Debug, Eq, PartialEq)] -pub struct EventClassParseError(()); +pub struct AlertClassParseError(()); -impl fmt::Display for EventClassParseError { +impl fmt::Display for AlertClassParseError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "expected one of [")?; - let mut variants = WebhookEventClass::ALL_CLASSES.iter(); + let mut variants = AlertClass::ALL_CLASSES.iter(); if let Some(v) = variants.next() { write!(f, "{v}")?; for v in variants { @@ -149,7 +149,7 @@ impl fmt::Display for EventClassParseError { } } -impl std::error::Error for EventClassParseError {} +impl std::error::Error for AlertClassParseError {} #[cfg(test)] mod tests { @@ -157,17 +157,17 @@ mod tests { #[test] fn test_from_str_roundtrips() { - for &variant in WebhookEventClass::ALL_CLASSES { + for &variant in AlertClass::ALL_CLASSES { assert_eq!(Ok(dbg!(variant)), dbg!(variant.to_string().parse())); } } // This is mainly a regression test to ensure that, should anyone add new - // `test.` variants in future, the `WebhookEventClass::is_test()` method + // `test.` variants in future, the `AlertClass::is_test()` method // returns `true` for them. #[test] fn test_is_test() { - let problematic_variants = WebhookEventClass::ALL_CLASSES + let problematic_variants = AlertClass::ALL_CLASSES .iter() .copied() .filter(|variant| { @@ -176,10 +176,10 @@ mod tests { .collect::>(); assert_eq!( problematic_variants, - Vec::::new(), + Vec::::new(), "you have added one or more new `test.*` webhook event class \ variant(s), but you seem to have not updated the \ - `WebhookEventClass::is_test()` method!\nthe problematic \ + `AlertClass::is_test()` method!\nthe problematic \ variant(s) are: {problematic_variants:?}", ); } diff --git a/nexus/db-model/src/webhook_delivery_state.rs b/nexus/db-model/src/alert_delivery_state.rs similarity index 53% rename from nexus/db-model/src/webhook_delivery_state.rs rename to nexus/db-model/src/alert_delivery_state.rs index e8714ecfeae..5c5553820bf 100644 --- a/nexus/db-model/src/webhook_delivery_state.rs +++ b/nexus/db-model/src/alert_delivery_state.rs @@ -10,7 +10,7 @@ use std::fmt; use std::str::FromStr; impl_enum_type!( - WebhookDeliveryStateEnum: + AlertDeliveryStateEnum: #[derive( Copy, @@ -24,7 +24,7 @@ impl_enum_type!( strum::VariantArray, )] #[serde(rename_all = "snake_case")] - pub enum WebhookDeliveryState; + pub enum AlertDeliveryState; Pending => b"pending" Failed => b"failed" @@ -32,36 +32,36 @@ impl_enum_type!( ); -impl fmt::Display for WebhookDeliveryState { +impl fmt::Display for AlertDeliveryState { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { // Forward to the canonical implementation in nexus-types. - views::WebhookDeliveryState::from(*self).fmt(f) + views::AlertDeliveryState::from(*self).fmt(f) } } -impl From for views::WebhookDeliveryState { - fn from(trigger: WebhookDeliveryState) -> Self { +impl From for views::AlertDeliveryState { + fn from(trigger: AlertDeliveryState) -> Self { match trigger { - WebhookDeliveryState::Pending => Self::Pending, - WebhookDeliveryState::Failed => Self::Failed, - WebhookDeliveryState::Delivered => Self::Delivered, + AlertDeliveryState::Pending => Self::Pending, + AlertDeliveryState::Failed => Self::Failed, + AlertDeliveryState::Delivered => Self::Delivered, } } } -impl From for WebhookDeliveryState { - fn from(trigger: views::WebhookDeliveryState) -> Self { +impl From for AlertDeliveryState { + fn from(trigger: views::AlertDeliveryState) -> Self { match trigger { - views::WebhookDeliveryState::Pending => Self::Pending, - views::WebhookDeliveryState::Failed => Self::Failed, - views::WebhookDeliveryState::Delivered => Self::Delivered, + views::AlertDeliveryState::Pending => Self::Pending, + views::AlertDeliveryState::Failed => Self::Failed, + views::AlertDeliveryState::Delivered => Self::Delivered, } } } -impl FromStr for WebhookDeliveryState { +impl FromStr for AlertDeliveryState { type Err = omicron_common::api::external::Error; fn from_str(s: &str) -> Result { - views::WebhookDeliveryState::from_str(s).map(Into::into) + views::AlertDeliveryState::from_str(s).map(Into::into) } } diff --git a/nexus/db-model/src/webhook_delivery_trigger.rs b/nexus/db-model/src/alert_delivery_trigger.rs similarity index 52% rename from nexus/db-model/src/webhook_delivery_trigger.rs rename to nexus/db-model/src/alert_delivery_trigger.rs index a69a69f50e6..6fd84d2d8d5 100644 --- a/nexus/db-model/src/webhook_delivery_trigger.rs +++ b/nexus/db-model/src/alert_delivery_trigger.rs @@ -10,7 +10,7 @@ use std::fmt; use std::str::FromStr; impl_enum_type!( - WebhookDeliveryTriggerEnum: + AlertDeliveryTriggerEnum: #[derive( Copy, @@ -24,48 +24,48 @@ impl_enum_type!( strum::VariantArray, )] #[serde(rename_all = "snake_case")] - pub enum WebhookDeliveryTrigger; + pub enum AlertDeliveryTrigger; - Event => b"event" + Alert => b"alert" Resend => b"resend" Probe => b"probe" ); -impl WebhookDeliveryTrigger { +impl AlertDeliveryTrigger { pub const ALL: &'static [Self] = ::VARIANTS; } -impl fmt::Display for WebhookDeliveryTrigger { +impl fmt::Display for AlertDeliveryTrigger { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { // Forward to the canonical implementation in nexus-types. - views::WebhookDeliveryTrigger::from(*self).fmt(f) + views::AlertDeliveryTrigger::from(*self).fmt(f) } } -impl From for views::WebhookDeliveryTrigger { - fn from(trigger: WebhookDeliveryTrigger) -> Self { +impl From for views::AlertDeliveryTrigger { + fn from(trigger: AlertDeliveryTrigger) -> Self { match trigger { - WebhookDeliveryTrigger::Event => Self::Event, - WebhookDeliveryTrigger::Resend => Self::Resend, - WebhookDeliveryTrigger::Probe => Self::Probe, + AlertDeliveryTrigger::Alert => Self::Alert, + AlertDeliveryTrigger::Resend => Self::Resend, + AlertDeliveryTrigger::Probe => Self::Probe, } } } -impl From for WebhookDeliveryTrigger { - fn from(trigger: views::WebhookDeliveryTrigger) -> Self { +impl From for AlertDeliveryTrigger { + fn from(trigger: views::AlertDeliveryTrigger) -> Self { match trigger { - views::WebhookDeliveryTrigger::Event => Self::Event, - views::WebhookDeliveryTrigger::Resend => Self::Resend, - views::WebhookDeliveryTrigger::Probe => Self::Probe, + views::AlertDeliveryTrigger::Alert => Self::Alert, + views::AlertDeliveryTrigger::Resend => Self::Resend, + views::AlertDeliveryTrigger::Probe => Self::Probe, } } } -impl FromStr for WebhookDeliveryTrigger { +impl FromStr for AlertDeliveryTrigger { type Err = omicron_common::api::external::Error; fn from_str(s: &str) -> Result { - views::WebhookDeliveryTrigger::from_str(s).map(Into::into) + views::AlertDeliveryTrigger::from_str(s).map(Into::into) } } diff --git a/nexus/db-model/src/alert_subscription.rs b/nexus/db-model/src/alert_subscription.rs new file mode 100644 index 00000000000..1f1c559d0e4 --- /dev/null +++ b/nexus/db-model/src/alert_subscription.rs @@ -0,0 +1,354 @@ +// This Source Code Form is subject to the terms of the Mozilla Public +// License, v. 2.0. If a copy of the MPL was not distributed with this +// file, You can obtain one at https://mozilla.org/MPL/2.0/. + +use crate::AlertClass; +use crate::AlertClassParseError; +use crate::SemverVersion; +use crate::typed_uuid::DbTypedUuid; +use chrono::{DateTime, Utc}; +use nexus_db_schema::schema::{alert_glob, alert_subscription}; +use nexus_types::external_api::shared; +use omicron_common::api::external::Error; +use omicron_uuid_kinds::{AlertReceiverKind, AlertReceiverUuid}; +use serde::{Deserialize, Serialize}; +use std::fmt; +use std::str::FromStr; + +#[derive( + Clone, Debug, Queryable, Selectable, Insertable, Serialize, Deserialize, +)] +#[diesel(table_name = alert_subscription)] +pub struct AlertRxSubscription { + pub rx_id: DbTypedUuid, + #[diesel(column_name = alert_class)] + pub class: AlertClass, + pub glob: Option, + pub time_created: DateTime, +} + +#[derive( + Clone, Debug, Queryable, Selectable, Insertable, Serialize, Deserialize, +)] +#[diesel(table_name = alert_glob)] +pub struct AlertRxGlob { + pub rx_id: DbTypedUuid, + #[diesel(embed)] + pub glob: AlertGlob, + pub time_created: DateTime, + pub schema_version: Option, +} + +impl AlertRxGlob { + pub fn new(rx_id: AlertReceiverUuid, glob: AlertGlob) -> Self { + Self { + rx_id: DbTypedUuid(rx_id), + glob, + time_created: Utc::now(), + // When inserting a new glob, set the schema version to NULL, + // indicating that the glob will need to be processed before alerts + // can be dispatched. + schema_version: None, + } + } +} +#[derive(Clone, Debug, Eq, PartialEq, Hash)] +pub enum AlertSubscriptionKind { + Glob(AlertGlob), + Exact(AlertClass), +} + +impl AlertSubscriptionKind { + pub fn new(value: String) -> Result { + if value.is_empty() { + return Err(Error::invalid_value( + "alert_class", + "must not be empty", + )); + } + if value.contains(char::is_whitespace) { + return Err(Error::invalid_value( + "alert_class", + format!( + "invalid alert class {value:?}: alert classes do not \ + contain whitespace", + ), + )); + } + + if value.contains('*') { + let regex = AlertGlob::regex_from_glob(&value)?; + return Ok(Self::Glob(AlertGlob { regex, glob: value })); + } + + let class = value.parse().map_err(|e: AlertClassParseError| { + Error::invalid_value("alert_class", e.to_string()) + })?; + + if class == AlertClass::Probe { + return Err(Error::invalid_value( + "alert_class", + "webhook receivers cannot subscribe to probes", + )); + } + + Ok(Self::Exact(class)) + } +} + +impl TryFrom for shared::AlertSubscription { + type Error = Error; + fn try_from(kind: AlertSubscriptionKind) -> Result { + match kind { + AlertSubscriptionKind::Exact(class) => class.as_str().parse(), + AlertSubscriptionKind::Glob(AlertGlob { glob, .. }) => { + glob.try_into() + } + } + .map_err(|e: anyhow::Error| { + // This is an internal error because any subscription string stored + // in the database should already have been validated. + Error::InternalError { internal_message: e.to_string() } + }) + } +} + +impl TryFrom for AlertSubscriptionKind { + type Error = Error; + fn try_from( + subscription: shared::AlertSubscription, + ) -> Result { + Self::new(String::from(subscription)) + } +} + +impl fmt::Display for AlertSubscriptionKind { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + Self::Exact(class) => class.fmt(f), + Self::Glob(glob) => glob.glob.fmt(f), + } + } +} + +#[derive( + Clone, + Debug, + Eq, + PartialEq, + Hash, + Queryable, + Selectable, + Insertable, + Serialize, + Deserialize, +)] +#[diesel(table_name = alert_glob)] +pub struct AlertGlob { + pub glob: String, + pub regex: String, +} + +impl FromStr for AlertGlob { + type Err = Error; + fn from_str(glob: &str) -> Result { + let regex = Self::regex_from_glob(glob)?; + Ok(Self { glob: glob.to_string(), regex }) + } +} + +impl TryFrom for AlertGlob { + type Error = Error; + fn try_from(glob: String) -> Result { + let regex = Self::regex_from_glob(&glob)?; + Ok(Self { glob, regex }) + } +} + +impl AlertGlob { + fn regex_from_glob(glob: &str) -> Result { + let seg2regex = + |segment: &str, regex: &mut String| -> Result<(), Error> { + match segment { + // Match one segment (i.e. any number of segment characters) + "*" => regex.push_str("[^\\.]+"), + // Match any number of segments + "**" => regex.push_str(".+"), + "" => { + return Err(Error::invalid_value( + "alert_class", + format!( + "invalid alert class {glob:?}: dot-delimited \ + alert class segments must not be empty" + ), + )); + } + s if s.contains('*') => { + return Err(Error::invalid_value( + "alert_class", + format!( + "invalid alert class {glob:?}: all segments \ + must be either '*', '**', or any sequence of \ + non-'*' alphanumeric characters", + ), + )); + } + // Match the literal segment. + s => regex.push_str(s), + } + Ok(()) + }; + + // The subscription's regex will always be at least as long as the alert + // class glob, plus start and end anchors. + let mut regex = String::with_capacity(glob.len()); + + regex.push('^'); // Start anchor + let mut segments = glob.split('.'); + if let Some(segment) = segments.next() { + seg2regex(segment, &mut regex)?; + for segment in segments { + regex.push_str("\\."); // segment separator + seg2regex(segment, &mut regex)?; + } + } else { + return Err(Error::invalid_value( + "alert_class", + "alert class strings must not be empty", + )); + }; + regex.push('$'); // End anchor + + Ok(regex) + } +} + +impl AlertRxSubscription { + pub fn exact(rx_id: AlertReceiverUuid, class: AlertClass) -> Self { + Self { + rx_id: DbTypedUuid(rx_id), + class, + glob: None, + time_created: Utc::now(), + } + } + + pub fn for_glob(glob: &AlertRxGlob, class: AlertClass) -> Self { + Self { + rx_id: glob.rx_id, + glob: Some(glob.glob.glob.clone()), + class, + time_created: Utc::now(), + } + } +} + +#[cfg(test)] +mod test { + use super::*; + + const GLOB_CASES: &[(&str, &str)] = &[ + ("foo.*.bar", "^foo\\.[^\\.]+\\.bar$"), + ("foo.*", "^foo\\.[^\\.]+$"), + ("*.foo", "^[^\\.]+\\.foo$"), + ("foo.**.bar", "^foo\\..+\\.bar$"), + ("foo.**", "^foo\\..+$"), + ("foo_bar.*.baz", "^foo_bar\\.[^\\.]+\\.baz$"), + ]; + + #[test] + fn test_alert_class_glob_to_regex() { + const NON_GLOB_CASES: &[(&str, &str)] = + &[("foo.bar", "^foo\\.bar$"), ("foo_bar.baz", "^foo_bar\\.baz$")]; + for (class, regex) in GLOB_CASES.iter().chain(NON_GLOB_CASES.iter()) { + let glob = match AlertGlob::from_str(dbg!(class)) { + Ok(glob) => glob, + Err(error) => panic!( + "alert class glob {class:?} should produce the regex + {regex:?}, but instead failed to parse: {error}" + ), + }; + assert_eq!( + dbg!(regex), + dbg!(&glob.regex), + "alert class {class:?} should produce the regex {regex:?}" + ); + } + } + + #[test] + fn test_valid_subscription_parsing() { + const EXACT_CASES: &[&str] = + &["test.foo", "test.foo.bar", "test.foo.baz"]; + for input in EXACT_CASES { + let parsed = AlertSubscriptionKind::new(dbg!(input).to_string()); + + match dbg!(parsed) { + Ok(AlertSubscriptionKind::Exact(exact)) => { + assert_eq!(exact.as_str(), *input) + } + Ok(AlertSubscriptionKind::Glob(glob)) => panic!( + "expected {input:?} to be an exact subscription, but it \ + parsed as glob {glob:?}", + ), + Err(e) => panic!( + "expected {input:?} to be a valid alert class, but it \ + failed to parse: {e}" + ), + } + } + + for (input, _) in GLOB_CASES { + let parsed = AlertSubscriptionKind::new(dbg!(input).to_string()); + + match dbg!(parsed) { + Ok(AlertSubscriptionKind::Exact(exact)) => { + panic!( + "expected {input:?} to be a glob subscription, but it \ + parsed as an exact subscription {exact:?}", + ); + } + Ok(AlertSubscriptionKind::Glob(glob)) => { + match regex::Regex::new(&glob.regex) { + Ok(_) => {} + Err(e) => panic!( + "glob {glob:?} produced an invalid regex: {e}" + ), + } + } + Err(e) => panic!( + "expected {input:?} to be a valid alert class, but it \ + failed to parse: {e}" + ), + } + } + } + + #[test] + fn test_invalid_subscription_parsing() { + const CASES: &[&str] = &[ + "foo..bar", + ".foo.bar", + "", + "..", + "foo.***", + "*****", + "foo.bar*.baz", + "foo*", + "foo bar.baz", + " ", + " .*", + ]; + for input in CASES { + match AlertSubscriptionKind::new(dbg!(input).to_string()) { + Ok(glob) => panic!( + "invalid alert class {input:?} was parsed \ + successfully as {glob:?}" + ), + Err(error) => { + dbg!(error); + } + } + } + } +} diff --git a/nexus/db-model/src/lib.rs b/nexus/db-model/src/lib.rs index 3627e8c395c..e58783512ec 100644 --- a/nexus/db-model/src/lib.rs +++ b/nexus/db-model/src/lib.rs @@ -11,6 +11,11 @@ extern crate newtype_derive; mod address_lot; mod affinity; +mod alert; +mod alert_class; +mod alert_delivery_state; +mod alert_delivery_trigger; +mod alert_subscription; mod allow_list; mod bfd; mod bgp; @@ -71,10 +76,6 @@ mod v2p_mapping; mod vmm_state; mod webhook_delivery; mod webhook_delivery_attempt_result; -mod webhook_delivery_state; -mod webhook_delivery_trigger; -mod webhook_event; -mod webhook_event_class; mod webhook_rx; // These actually represent subqueries, not real table. // However, they must be defined in the same crate as our tables @@ -141,6 +142,11 @@ pub use self::macaddr::*; pub use self::unsigned::*; pub use address_lot::*; pub use affinity::*; +pub use alert::*; +pub use alert_class::*; +pub use alert_delivery_state::*; +pub use alert_delivery_trigger::*; +pub use alert_subscription::*; pub use allow_list::*; pub use bfd::*; pub use bgp::*; @@ -245,10 +251,6 @@ pub use vpc_router::*; pub use vpc_subnet::*; pub use webhook_delivery::*; pub use webhook_delivery_attempt_result::*; -pub use webhook_delivery_state::*; -pub use webhook_delivery_trigger::*; -pub use webhook_event::*; -pub use webhook_event_class::*; pub use webhook_rx::*; pub use zpool::*; diff --git a/nexus/db-model/src/schema_versions.rs b/nexus/db-model/src/schema_versions.rs index 1582493e309..23a206039e6 100644 --- a/nexus/db-model/src/schema_versions.rs +++ b/nexus/db-model/src/schema_versions.rs @@ -16,7 +16,7 @@ use std::{collections::BTreeMap, sync::LazyLock}; /// /// This must be updated when you change the database schema. Refer to /// schema/crdb/README.adoc in the root of this repository for details. -pub const SCHEMA_VERSION: Version = Version::new(142, 0, 0); +pub const SCHEMA_VERSION: Version = Version::new(143, 0, 0); /// List of all past database schema versions, in *reverse* order /// @@ -28,6 +28,7 @@ static KNOWN_VERSIONS: LazyLock> = LazyLock::new(|| { // | leaving the first copy as an example for the next person. // v // KnownVersion::new(next_int, "unique-dirname-with-the-sql-files"), + KnownVersion::new(143, "alerts-renamening"), KnownVersion::new(142, "bp-add-remove-mupdate-override"), KnownVersion::new(141, "caboose-sign-value"), KnownVersion::new(140, "instance-intended-state"), diff --git a/nexus/db-model/src/webhook_delivery.rs b/nexus/db-model/src/webhook_delivery.rs index 4cbad4744c9..923eb86405d 100644 --- a/nexus/db-model/src/webhook_delivery.rs +++ b/nexus/db-model/src/webhook_delivery.rs @@ -2,13 +2,13 @@ // License, v. 2.0. If a copy of the MPL was not distributed with this // file, You can obtain one at https://mozilla.org/MPL/2.0/. +use crate::Alert; +use crate::AlertClass; +use crate::AlertDeliveryState; +use crate::AlertDeliveryTrigger; use crate::SqlU8; use crate::SqlU16; use crate::WebhookDeliveryAttemptResult; -use crate::WebhookDeliveryState; -use crate::WebhookDeliveryTrigger; -use crate::WebhookEvent; -use crate::WebhookEventClass; use crate::serde_time_delta::optional_time_delta; use crate::typed_uuid::DbTypedUuid; use chrono::{DateTime, TimeDelta, Utc}; @@ -16,9 +16,9 @@ use nexus_db_schema::schema::{webhook_delivery, webhook_delivery_attempt}; use nexus_types::external_api::views; use omicron_uuid_kinds::GenericUuid; use omicron_uuid_kinds::{ + AlertKind, AlertReceiverKind, AlertReceiverUuid, AlertUuid, OmicronZoneKind, OmicronZoneUuid, WebhookDeliveryAttemptKind, - WebhookDeliveryKind, WebhookDeliveryUuid, WebhookEventKind, - WebhookEventUuid, WebhookReceiverKind, WebhookReceiverUuid, + WebhookDeliveryKind, WebhookDeliveryUuid, }; use serde::Deserialize; use serde::Serialize; @@ -40,15 +40,16 @@ pub struct WebhookDelivery { pub id: DbTypedUuid, /// ID of the event dispatched to this receiver (foreign key into - /// `webhook_event`). - pub event_id: DbTypedUuid, + /// `alert`). + #[diesel(column_name = alert_id)] + pub alert_id: DbTypedUuid, /// ID of the receiver to which this event is dispatched (foreign key into /// `webhook_rx`). - pub rx_id: DbTypedUuid, + pub rx_id: DbTypedUuid, /// Describes why this delivery was triggered. - pub triggered_by: WebhookDeliveryTrigger, + pub triggered_by: AlertDeliveryTrigger, /// Attempt count pub attempts: SqlU8, @@ -60,7 +61,7 @@ pub struct WebhookDelivery { /// or permanently failed. pub time_completed: Option>, - pub state: WebhookDeliveryState, + pub state: AlertDeliveryState, pub deliverator_id: Option>, @@ -69,26 +70,26 @@ pub struct WebhookDelivery { impl WebhookDelivery { pub fn new( - event_id: &WebhookEventUuid, - rx_id: &WebhookReceiverUuid, - trigger: WebhookDeliveryTrigger, + alert_id: &AlertUuid, + rx_id: &AlertReceiverUuid, + triggered_by: AlertDeliveryTrigger, ) -> Self { Self { id: WebhookDeliveryUuid::new_v4().into(), - event_id: (*event_id).into(), + alert_id: (*alert_id).into(), rx_id: (*rx_id).into(), - triggered_by: trigger, + triggered_by, attempts: SqlU8::new(0), time_created: Utc::now(), time_completed: None, deliverator_id: None, time_leased: None, - state: WebhookDeliveryState::Pending, + state: AlertDeliveryState::Pending, } } pub fn new_probe( - rx_id: &WebhookReceiverUuid, + rx_id: &AlertReceiverUuid, deliverator_id: &OmicronZoneUuid, ) -> Self { Self { @@ -97,13 +98,11 @@ impl WebhookDelivery { // There's a singleton entry in the `webhook_event` table for // probes, so that we can reference a real event ID but need not // create a bunch of duplicate empty events every time a probe is sent. - event_id: WebhookEventUuid::from_untyped_uuid( - WebhookEvent::PROBE_EVENT_ID, - ) - .into(), + alert_id: AlertUuid::from_untyped_uuid(Alert::PROBE_ALERT_ID) + .into(), rx_id: (*rx_id).into(), - triggered_by: WebhookDeliveryTrigger::Probe, - state: WebhookDeliveryState::Pending, + triggered_by: AlertDeliveryTrigger::Probe, + state: AlertDeliveryState::Pending, attempts: SqlU8::new(0), time_created: Utc::now(), time_completed: None, @@ -114,28 +113,26 @@ impl WebhookDelivery { pub fn to_api_delivery( &self, - event_class: WebhookEventClass, + alert_class: AlertClass, attempts: &[WebhookDeliveryAttempt], - ) -> views::WebhookDelivery { - let mut view = views::WebhookDelivery { - id: self.id.into_untyped_uuid(), - webhook_id: self.rx_id.into(), - event_class: event_class.as_str().to_owned(), - event_id: self.event_id.into(), - state: self.state.into(), - trigger: self.triggered_by.into(), - attempts: attempts - .iter() - .map(views::WebhookDeliveryAttempt::from) - .collect(), - time_started: self.time_created, - }; + ) -> views::AlertDelivery { + let mut attempts: Vec<_> = + attempts.iter().map(views::WebhookDeliveryAttempt::from).collect(); // Make sure attempts are in order; each attempt entry also includes an // attempt number, which should be used authoritatively to determine the // ordering of attempts, but it seems nice to also sort the list, // because we can... - view.attempts.sort_by_key(|a| a.attempt); - view + attempts.sort_by_key(|a| a.attempt); + views::AlertDelivery { + id: self.id.into_untyped_uuid(), + receiver_id: self.rx_id.into(), + alert_class: alert_class.as_str().to_owned(), + alert_id: self.alert_id.into(), + state: self.state.into(), + trigger: self.triggered_by.into(), + attempts: views::AlertDeliveryAttempts::Webhook(attempts), + time_started: self.time_created, + } } } @@ -160,9 +157,9 @@ pub struct WebhookDeliveryAttempt { /// Attempt number (retry count). pub attempt: SqlU8, - /// ID of the receiver to which this event is dispatched (foreign key into + /// ID of the receiver to which this alert is dispatched (foreign key into /// `webhook_rx`). - pub rx_id: DbTypedUuid, + pub rx_id: DbTypedUuid, pub result: WebhookDeliveryAttemptResult, diff --git a/nexus/db-model/src/webhook_rx.rs b/nexus/db-model/src/webhook_rx.rs index 4973e77d102..508431595fb 100644 --- a/nexus/db-model/src/webhook_rx.rs +++ b/nexus/db-model/src/webhook_rx.rs @@ -2,38 +2,35 @@ // License, v. 2.0. If a copy of the MPL was not distributed with this // file, You can obtain one at https://mozilla.org/MPL/2.0/. -use crate::EventClassParseError; +use crate::AlertRxGlob; +use crate::AlertRxSubscription; +use crate::AlertSubscriptionKind; use crate::Generation; use crate::Name; -use crate::SemverVersion; -use crate::WebhookEventClass; use crate::collection::DatastoreCollectionConfig; use crate::typed_uuid::DbTypedUuid; use chrono::{DateTime, Utc}; use db_macros::{Asset, Resource}; use nexus_db_schema::schema::{ - webhook_receiver, webhook_rx_event_glob, webhook_rx_subscription, - webhook_secret, + alert_glob, alert_receiver, alert_subscription, webhook_secret, }; use nexus_types::external_api::shared; use nexus_types::external_api::views; use nexus_types::identity::Resource; use omicron_common::api::external::Error; use omicron_uuid_kinds::{ - GenericUuid, WebhookReceiverKind, WebhookReceiverUuid, WebhookSecretUuid, + AlertReceiverKind, AlertReceiverUuid, GenericUuid, WebhookSecretUuid, }; use serde::{Deserialize, Serialize}; -use std::fmt; -use std::str::FromStr; use uuid::Uuid; -/// The full configuration of a webhook receiver, including the -/// [`WebhookReceiver`] itself and its subscriptions and secrets. +/// The full configuration of a webhook alert receiver, including the +/// [`AlertReceiver`], its subscriptions, and secrets. #[derive(Clone, Debug)] pub struct WebhookReceiverConfig { - pub rx: WebhookReceiver, + pub rx: AlertReceiver, pub secrets: Vec, - pub subscriptions: Vec, + pub subscriptions: Vec, } impl TryFrom for views::WebhookReceiver { @@ -44,7 +41,7 @@ impl TryFrom for views::WebhookReceiver { let secrets = secrets.iter().map(views::WebhookSecret::from).collect(); let subscriptions = subscriptions .into_iter() - .map(shared::WebhookSubscription::try_from) + .map(shared::AlertSubscription::try_from) .collect::, _>>()?; let endpoint = rx.endpoint.parse().map_err(|e| Error::InternalError { @@ -57,14 +54,18 @@ impl TryFrom for views::WebhookReceiver { })?; Ok(views::WebhookReceiver { identity: rx.identity(), - endpoint, - secrets, subscriptions, + config: views::WebhookReceiverConfig { secrets, endpoint }, }) } } -/// A row in the `webhook_receiver` table. +/// A row in the `alert_receiver` table. +// XXX(eliza): Note that this presently contains both generic "alert receiver +// stuff" (i.e. the identity and subscription rcgen) *and* +// webhook-receiver-specific stuff (endpoint, secret rcgen). If/when we +// introduce other kinds of alert receivers, we will want to split that out into +// a webhook-specific table. #[derive( Clone, Debug, @@ -75,11 +76,11 @@ impl TryFrom for views::WebhookReceiver { Serialize, Deserialize, )] -#[resource(uuid_kind = WebhookReceiverKind)] -#[diesel(table_name = webhook_receiver)] -pub struct WebhookReceiver { +#[resource(uuid_kind = AlertReceiverKind)] +#[diesel(table_name = alert_receiver)] +pub struct AlertReceiver { #[diesel(embed)] - pub identity: WebhookReceiverIdentity, + pub identity: AlertReceiverIdentity, pub endpoint: String, /// child resource generation number for secrets, per RFD 192 @@ -88,30 +89,31 @@ pub struct WebhookReceiver { pub subscription_gen: Generation, } -impl DatastoreCollectionConfig for WebhookReceiver { +impl DatastoreCollectionConfig for AlertReceiver { type CollectionId = Uuid; - type GenerationNumberColumn = webhook_receiver::dsl::secret_gen; - type CollectionTimeDeletedColumn = webhook_receiver::dsl::time_deleted; + type GenerationNumberColumn = alert_receiver::dsl::secret_gen; + type CollectionTimeDeletedColumn = alert_receiver::dsl::time_deleted; type CollectionIdColumn = webhook_secret::dsl::rx_id; } -impl DatastoreCollectionConfig for WebhookReceiver { +impl DatastoreCollectionConfig for AlertReceiver { type CollectionId = Uuid; - type GenerationNumberColumn = webhook_receiver::dsl::subscription_gen; - type CollectionTimeDeletedColumn = webhook_receiver::dsl::time_deleted; - type CollectionIdColumn = webhook_rx_subscription::dsl::rx_id; + type GenerationNumberColumn = alert_receiver::dsl::subscription_gen; + type CollectionTimeDeletedColumn = alert_receiver::dsl::time_deleted; + type CollectionIdColumn = alert_subscription::dsl::rx_id; } -impl DatastoreCollectionConfig for WebhookReceiver { +impl DatastoreCollectionConfig for AlertReceiver { type CollectionId = Uuid; - type GenerationNumberColumn = webhook_receiver::dsl::subscription_gen; - type CollectionTimeDeletedColumn = webhook_receiver::dsl::time_deleted; - type CollectionIdColumn = webhook_rx_event_glob::dsl::rx_id; + type GenerationNumberColumn = alert_receiver::dsl::subscription_gen; + type CollectionTimeDeletedColumn = alert_receiver::dsl::time_deleted; + type CollectionIdColumn = alert_glob::dsl::rx_id; } -/// Describes a set of updates for the [`WebhookReceiver`] model. +/// Describes a set of updates for the [`alert_receiver`] table to update a +/// webhook receiver configuration. #[derive(Clone, AsChangeset)] -#[diesel(table_name = webhook_receiver)] +#[diesel(table_name = alert_receiver)] pub struct WebhookReceiverUpdate { pub name: Option, pub description: Option, @@ -135,16 +137,16 @@ pub struct WebhookSecret { #[diesel(embed)] pub identity: WebhookSecretIdentity, #[diesel(column_name = rx_id)] - pub webhook_receiver_id: DbTypedUuid, + pub alert_receiver_id: DbTypedUuid, pub secret: String, pub time_deleted: Option>, } impl WebhookSecret { - pub fn new(rx_id: WebhookReceiverUuid, secret: String) -> Self { + pub fn new(rx_id: AlertReceiverUuid, secret: String) -> Self { Self { identity: WebhookSecretIdentity::new(WebhookSecretUuid::new_v4()), - webhook_receiver_id: rx_id.into(), + alert_receiver_id: rx_id.into(), secret, time_deleted: None, } @@ -165,346 +167,3 @@ impl From for views::WebhookSecret { Self::from(&secret) } } - -#[derive( - Clone, Debug, Queryable, Selectable, Insertable, Serialize, Deserialize, -)] -#[diesel(table_name = webhook_rx_subscription)] -pub struct WebhookRxSubscription { - pub rx_id: DbTypedUuid, - pub event_class: WebhookEventClass, - pub glob: Option, - pub time_created: DateTime, -} - -#[derive( - Clone, Debug, Queryable, Selectable, Insertable, Serialize, Deserialize, -)] -#[diesel(table_name = webhook_rx_event_glob)] -pub struct WebhookRxEventGlob { - pub rx_id: DbTypedUuid, - #[diesel(embed)] - pub glob: WebhookGlob, - pub time_created: DateTime, - pub schema_version: Option, -} - -impl WebhookRxEventGlob { - pub fn new(rx_id: WebhookReceiverUuid, glob: WebhookGlob) -> Self { - Self { - rx_id: DbTypedUuid(rx_id), - glob, - time_created: Utc::now(), - // When inserting a new glob, set the schema version to NULL, - // indicating that the glob will need to be processed before events - // can be dispatched. - schema_version: None, - } - } -} -#[derive(Clone, Debug, Eq, PartialEq, Hash)] -pub enum WebhookSubscriptionKind { - Glob(WebhookGlob), - Exact(WebhookEventClass), -} - -impl WebhookSubscriptionKind { - pub fn new(value: String) -> Result { - if value.is_empty() { - return Err(Error::invalid_value( - "event_class", - "must not be empty", - )); - } - if value.contains(char::is_whitespace) { - return Err(Error::invalid_value( - "event_class", - format!( - "invalid event class {value:?}: event classes do not \ - contain whitespace", - ), - )); - } - - if value.contains('*') { - let regex = WebhookGlob::regex_from_glob(&value)?; - return Ok(Self::Glob(WebhookGlob { regex, glob: value })); - } - - let class = value.parse().map_err(|e: EventClassParseError| { - Error::invalid_value("event_class", e.to_string()) - })?; - - if class == WebhookEventClass::Probe { - return Err(Error::invalid_value( - "event_class", - "webhook receivers cannot subscribe to probes", - )); - } - - Ok(Self::Exact(class)) - } -} - -impl TryFrom for shared::WebhookSubscription { - type Error = Error; - fn try_from(kind: WebhookSubscriptionKind) -> Result { - match kind { - WebhookSubscriptionKind::Exact(class) => class.as_str().parse(), - WebhookSubscriptionKind::Glob(WebhookGlob { glob, .. }) => { - glob.try_into() - } - } - .map_err(|e: anyhow::Error| { - // This is an internal error because any subscription string stored - // in the database should already have been validated. - Error::InternalError { internal_message: e.to_string() } - }) - } -} - -impl TryFrom for WebhookSubscriptionKind { - type Error = Error; - fn try_from( - subscription: shared::WebhookSubscription, - ) -> Result { - Self::new(String::from(subscription)) - } -} - -impl fmt::Display for WebhookSubscriptionKind { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - match self { - Self::Exact(class) => class.fmt(f), - Self::Glob(glob) => glob.glob.fmt(f), - } - } -} - -#[derive( - Clone, - Debug, - Eq, - PartialEq, - Hash, - Queryable, - Selectable, - Insertable, - Serialize, - Deserialize, -)] -#[diesel(table_name = webhook_rx_event_glob)] -pub struct WebhookGlob { - pub glob: String, - pub regex: String, -} - -impl FromStr for WebhookGlob { - type Err = Error; - fn from_str(glob: &str) -> Result { - let regex = Self::regex_from_glob(glob)?; - Ok(Self { glob: glob.to_string(), regex }) - } -} - -impl TryFrom for WebhookGlob { - type Error = Error; - fn try_from(glob: String) -> Result { - let regex = Self::regex_from_glob(&glob)?; - Ok(Self { glob, regex }) - } -} - -impl WebhookGlob { - fn regex_from_glob(glob: &str) -> Result { - let seg2regex = - |segment: &str, regex: &mut String| -> Result<(), Error> { - match segment { - // Match one segment (i.e. any number of segment characters) - "*" => regex.push_str("[^\\.]+"), - // Match any number of segments - "**" => regex.push_str(".+"), - "" => { - return Err(Error::invalid_value( - "event_class", - format!( - "invalid event class {glob:?}: dot-delimited \ - event class segments must not be empty" - ), - )); - } - s if s.contains('*') => { - return Err(Error::invalid_value( - "event_class", - format!( - "invalid event class {glob:?}: all segments \ - must be either '*', '**', or any sequence of \ - non-'*' alphanumeric characters", - ), - )); - } - // Match the literal segment. - s => regex.push_str(s), - } - Ok(()) - }; - - // The subscription's regex will always be at least as long as the event - // class glob, plus start and end anchors. - let mut regex = String::with_capacity(glob.len()); - - regex.push('^'); // Start anchor - let mut segments = glob.split('.'); - if let Some(segment) = segments.next() { - seg2regex(segment, &mut regex)?; - for segment in segments { - regex.push_str("\\."); // segment separator - seg2regex(segment, &mut regex)?; - } - } else { - return Err(Error::invalid_value( - "event_class", - "event class strings must not be empty", - )); - }; - regex.push('$'); // End anchor - - Ok(regex) - } -} - -impl WebhookRxSubscription { - pub fn exact( - rx_id: WebhookReceiverUuid, - event_class: WebhookEventClass, - ) -> Self { - Self { - rx_id: DbTypedUuid(rx_id), - event_class, - glob: None, - time_created: Utc::now(), - } - } - - pub fn for_glob( - glob: &WebhookRxEventGlob, - event_class: WebhookEventClass, - ) -> Self { - Self { - rx_id: glob.rx_id, - glob: Some(glob.glob.glob.clone()), - event_class, - time_created: Utc::now(), - } - } -} - -#[cfg(test)] -mod test { - use super::*; - - const GLOB_CASES: &[(&str, &str)] = &[ - ("foo.*.bar", "^foo\\.[^\\.]+\\.bar$"), - ("foo.*", "^foo\\.[^\\.]+$"), - ("*.foo", "^[^\\.]+\\.foo$"), - ("foo.**.bar", "^foo\\..+\\.bar$"), - ("foo.**", "^foo\\..+$"), - ("foo_bar.*.baz", "^foo_bar\\.[^\\.]+\\.baz$"), - ]; - - #[test] - fn test_event_class_glob_to_regex() { - const NON_GLOB_CASES: &[(&str, &str)] = - &[("foo.bar", "^foo\\.bar$"), ("foo_bar.baz", "^foo_bar\\.baz$")]; - for (class, regex) in GLOB_CASES.iter().chain(NON_GLOB_CASES.iter()) { - let glob = match WebhookGlob::from_str(dbg!(class)) { - Ok(glob) => glob, - Err(error) => panic!( - "event class glob {class:?} should produce the regex - {regex:?}, but instead failed to parse: {error}" - ), - }; - assert_eq!( - dbg!(regex), - dbg!(&glob.regex), - "event class {class:?} should produce the regex {regex:?}" - ); - } - } - - #[test] - fn test_valid_subscription_parsing() { - const EXACT_CASES: &[&str] = - &["test.foo", "test.foo.bar", "test.foo.baz"]; - for input in EXACT_CASES { - let parsed = WebhookSubscriptionKind::new(dbg!(input).to_string()); - - match dbg!(parsed) { - Ok(WebhookSubscriptionKind::Exact(exact)) => { - assert_eq!(exact.as_str(), *input) - } - Ok(WebhookSubscriptionKind::Glob(glob)) => panic!( - "expected {input:?} to be an exact subscription, but it \ - parsed as glob {glob:?}", - ), - Err(e) => panic!( - "expected {input:?} to be a valid event class, but it \ - failed to parse: {e}" - ), - } - } - - for (input, _) in GLOB_CASES { - let parsed = WebhookSubscriptionKind::new(dbg!(input).to_string()); - - match dbg!(parsed) { - Ok(WebhookSubscriptionKind::Exact(exact)) => { - panic!( - "expected {input:?} to be a glob subscription, but it \ - parsed as an exact subscription {exact:?}", - ); - } - Ok(WebhookSubscriptionKind::Glob(glob)) => { - match regex::Regex::new(&glob.regex) { - Ok(_) => {} - Err(e) => panic!( - "glob {glob:?} produced an invalid regex: {e}" - ), - } - } - Err(e) => panic!( - "expected {input:?} to be a valid event class, but it \ - failed to parse: {e}" - ), - } - } - } - - #[test] - fn test_invalid_subscription_parsing() { - const CASES: &[&str] = &[ - "foo..bar", - ".foo.bar", - "", - "..", - "foo.***", - "*****", - "foo.bar*.baz", - "foo*", - "foo bar.baz", - " ", - " .*", - ]; - for input in CASES { - match WebhookSubscriptionKind::new(dbg!(input).to_string()) { - Ok(glob) => panic!( - "invalid event class {input:?} was parsed \ - successfully as {glob:?}" - ), - Err(error) => { - dbg!(error); - } - } - } - } -} diff --git a/nexus/db-queries/src/db/datastore/webhook_event.rs b/nexus/db-queries/src/db/datastore/alert.rs similarity index 53% rename from nexus/db-queries/src/db/datastore/webhook_event.rs rename to nexus/db-queries/src/db/datastore/alert.rs index 1cdf6cde7ea..d0b06c33943 100644 --- a/nexus/db-queries/src/db/datastore/webhook_event.rs +++ b/nexus/db-queries/src/db/datastore/alert.rs @@ -2,95 +2,93 @@ // License, v. 2.0. If a copy of the MPL was not distributed with this // file, You can obtain one at https://mozilla.org/MPL/2.0/. -//! [`DataStore`] methods for webhook events and event delivery dispatching. +//! [`DataStore`] methods for alerts and alert delivery dispatching. use super::DataStore; use crate::context::OpContext; -use crate::db::model::WebhookEvent; -use crate::db::model::WebhookEventClass; -use crate::db::model::WebhookEventIdentity; +use crate::db::model::Alert; +use crate::db::model::AlertClass; +use crate::db::model::AlertIdentity; use async_bb8_diesel::AsyncRunQueryDsl; use diesel::prelude::*; use diesel::result::OptionalExtension; use nexus_db_errors::ErrorHandler; use nexus_db_errors::public_error_from_diesel; -use nexus_db_schema::schema::webhook_event::dsl as event_dsl; +use nexus_db_schema::schema::alert::dsl as alert_dsl; use omicron_common::api::external::CreateResult; use omicron_common::api::external::Error; use omicron_common::api::external::UpdateResult; -use omicron_uuid_kinds::{GenericUuid, WebhookEventUuid}; +use omicron_uuid_kinds::{AlertUuid, GenericUuid}; impl DataStore { - pub async fn webhook_event_create( + pub async fn alert_create( &self, opctx: &OpContext, - id: WebhookEventUuid, - event_class: WebhookEventClass, - event: serde_json::Value, - ) -> CreateResult { + id: AlertUuid, + class: AlertClass, + payload: serde_json::Value, + ) -> CreateResult { let conn = self.pool_connection_authorized(&opctx).await?; - diesel::insert_into(event_dsl::webhook_event) - .values(WebhookEvent { - identity: WebhookEventIdentity::new(id), + diesel::insert_into(alert_dsl::alert) + .values(Alert { + identity: AlertIdentity::new(id), time_dispatched: None, - event_class, - event, + class, + payload, num_dispatched: 0, }) - .returning(WebhookEvent::as_returning()) + .returning(Alert::as_returning()) .get_result_async(&*conn) .await .map_err(|e| public_error_from_diesel(e, ErrorHandler::Server)) } - pub async fn webhook_event_select_next_for_dispatch( + pub async fn alert_select_next_for_dispatch( &self, opctx: &OpContext, - ) -> Result, Error> { + ) -> Result, Error> { let conn = self.pool_connection_authorized(&opctx).await?; - event_dsl::webhook_event - .filter(event_dsl::time_dispatched.is_null()) - .order_by(event_dsl::time_created.asc()) - .select(WebhookEvent::as_select()) + alert_dsl::alert + .filter(alert_dsl::time_dispatched.is_null()) + .order_by(alert_dsl::time_created.asc()) + .select(Alert::as_select()) .first_async(&*conn) .await .optional() .map_err(|e| public_error_from_diesel(e, ErrorHandler::Server)) } - pub async fn webhook_event_mark_dispatched( + pub async fn alert_mark_dispatched( &self, opctx: &OpContext, - event_id: &WebhookEventUuid, + alert_id: &AlertUuid, subscribed: usize, ) -> UpdateResult { let subscribed = i64::try_from(subscribed).map_err(|_| { - // that is way too many webhook receivers! - Error::internal_error( - "webhook event subscribed count exceeds i64::MAX", - ) + // that is way too many alert receivers! + Error::internal_error("alert subscribed count exceeds i64::MAX") })?; let conn = self.pool_connection_authorized(&opctx).await?; - diesel::update(event_dsl::webhook_event) - .filter(event_dsl::id.eq(event_id.into_untyped_uuid())) + diesel::update(alert_dsl::alert) + .filter(alert_dsl::id.eq(alert_id.into_untyped_uuid())) .filter( - // Update the event record if one of the following is true: + // Update the alert record if one of the following is true: // - The `time_dispatched`` field has not already been set, or // - `time_dispatched` IS set, but `num_dispatched` is less than // the number of deliveries we believe has been dispatched. // This may be the case if a webhook receiver which is - // subscribed to this event was added concurrently with - // another Nexus' dispatching the event, and we dispatched the - // event to that receiver but the other Nexus did not. In that + // subscribed to this alert was added concurrently with + // another Nexus' dispatching the alert, and we dispatched the + // alert to that receiver but the other Nexus did not. In that // case, we would like to update the record to indicate the // correct number of subscribers. - event_dsl::time_dispatched + alert_dsl::time_dispatched .is_null() - .or(event_dsl::num_dispatched.le(subscribed)), + .or(alert_dsl::num_dispatched.le(subscribed)), ) .set(( - event_dsl::time_dispatched.eq(diesel::dsl::now), - event_dsl::num_dispatched.eq(subscribed), + alert_dsl::time_dispatched.eq(diesel::dsl::now), + alert_dsl::num_dispatched.eq(subscribed), )) .execute_async(&*conn) .await diff --git a/nexus/db-queries/src/db/datastore/webhook_rx.rs b/nexus/db-queries/src/db/datastore/alert_rx.rs similarity index 80% rename from nexus/db-queries/src/db/datastore/webhook_rx.rs rename to nexus/db-queries/src/db/datastore/alert_rx.rs index 6f0fae5c827..b30d80d76cc 100644 --- a/nexus/db-queries/src/db/datastore/webhook_rx.rs +++ b/nexus/db-queries/src/db/datastore/alert_rx.rs @@ -12,19 +12,19 @@ use crate::db::collection_insert::AsyncInsertError; use crate::db::collection_insert::DatastoreCollection; use crate::db::datastore::RunnableQuery; use crate::db::datastore::SQL_BATCH_SIZE; +use crate::db::model::AlertClass; +use crate::db::model::AlertGlob; +use crate::db::model::AlertReceiver; +use crate::db::model::AlertReceiverIdentity; +use crate::db::model::AlertRxGlob; +use crate::db::model::AlertRxSubscription; +use crate::db::model::AlertSubscriptionKind; use crate::db::model::Generation; use crate::db::model::Name; use crate::db::model::SCHEMA_VERSION; use crate::db::model::SemverVersion; -use crate::db::model::WebhookEventClass; -use crate::db::model::WebhookGlob; -use crate::db::model::WebhookReceiver; use crate::db::model::WebhookReceiverConfig; -use crate::db::model::WebhookReceiverIdentity; -use crate::db::model::WebhookRxEventGlob; -use crate::db::model::WebhookRxSubscription; use crate::db::model::WebhookSecret; -use crate::db::model::WebhookSubscriptionKind; use crate::db::pagination::Paginator; use crate::db::pagination::paginated; use crate::db::pagination::paginated_multicolumn; @@ -37,16 +37,16 @@ use nexus_db_errors::OptionalError; use nexus_db_errors::TransactionError; use nexus_db_errors::public_error_from_diesel; use nexus_db_lookup::DbConnection; +use nexus_db_schema::schema::alert::dsl as alert_dsl; +use nexus_db_schema::schema::alert_glob::dsl as glob_dsl; +use nexus_db_schema::schema::alert_receiver::dsl as rx_dsl; +use nexus_db_schema::schema::alert_subscription::dsl as subscription_dsl; use nexus_db_schema::schema::webhook_delivery::dsl as delivery_dsl; use nexus_db_schema::schema::webhook_delivery_attempt::dsl as delivery_attempt_dsl; -use nexus_db_schema::schema::webhook_event::dsl as event_dsl; -use nexus_db_schema::schema::webhook_receiver::dsl as rx_dsl; -use nexus_db_schema::schema::webhook_rx_event_glob::dsl as glob_dsl; -use nexus_db_schema::schema::webhook_rx_subscription::dsl as subscription_dsl; use nexus_db_schema::schema::webhook_secret::dsl as secret_dsl; use nexus_types::external_api::params; use nexus_types::identity::Resource; -use nexus_types::internal_api::background::WebhookGlobStatus; +use nexus_types::internal_api::background::AlertGlobStatus; use omicron_common::api::external::CreateResult; use omicron_common::api::external::DataPageParams; use omicron_common::api::external::DeleteResult; @@ -55,8 +55,8 @@ use omicron_common::api::external::ListResultVec; use omicron_common::api::external::ResourceType; use omicron_common::api::external::UpdateResult; use omicron_common::api::external::http_pagination::PaginatedBy; +use omicron_uuid_kinds::AlertReceiverUuid; use omicron_uuid_kinds::GenericUuid; -use omicron_uuid_kinds::WebhookReceiverUuid; use ref_cast::RefCast; use uuid::Uuid; @@ -80,7 +80,7 @@ impl DataStore { let subscriptions = subscriptions .into_iter() - .map(WebhookSubscriptionKind::try_from) + .map(AlertSubscriptionKind::try_from) .collect::, _>>()?; let err = OptionalError::new(); let (rx, secrets) = self @@ -90,12 +90,9 @@ impl DataStore { // transaction fails because of a UUID collision. // // this probably won't happen, but, ya know... - let id = WebhookReceiverUuid::new_v4(); - let receiver = WebhookReceiver { - identity: WebhookReceiverIdentity::new( - id, - identity.clone(), - ), + let id = AlertReceiverUuid::new_v4(); + let receiver = AlertReceiver { + identity: AlertReceiverIdentity::new(id, identity.clone()), endpoint: endpoint.to_string(), secret_gen: Generation::new(), subscription_gen: Generation::new(), @@ -105,9 +102,9 @@ impl DataStore { let err = err.clone(); let name = identity.name.clone(); async move { - let rx = diesel::insert_into(rx_dsl::webhook_receiver) + let rx = diesel::insert_into(rx_dsl::alert_receiver) .values(receiver) - .returning(WebhookReceiver::as_returning()) + .returning(AlertReceiver::as_returning()) .get_result_async(&conn) .await .map_err(|e| { @@ -115,7 +112,7 @@ impl DataStore { public_error_from_diesel( e, ErrorHandler::Conflict( - ResourceType::WebhookReceiver, + ResourceType::AlertReceiver, name.as_str(), ), ) @@ -165,7 +162,7 @@ impl DataStore { public_error_from_diesel( e, ErrorHandler::Conflict( - ResourceType::WebhookReceiver, + ResourceType::AlertReceiver, identity.name.as_str(), ), ) @@ -176,8 +173,8 @@ impl DataStore { pub async fn webhook_rx_config_fetch( &self, opctx: &OpContext, - authz_rx: &authz::WebhookReceiver, - ) -> Result<(Vec, Vec), Error> { + authz_rx: &authz::AlertReceiver, + ) -> Result<(Vec, Vec), Error> { opctx.authorize(authz::Action::ListChildren, authz_rx).await?; self.rx_config_fetch_on_conn( authz_rx.id(), @@ -188,9 +185,9 @@ impl DataStore { async fn rx_config_fetch_on_conn( &self, - rx_id: WebhookReceiverUuid, + rx_id: AlertReceiverUuid, conn: &async_bb8_diesel::Connection, - ) -> Result<(Vec, Vec), Error> { + ) -> Result<(Vec, Vec), Error> { let subscriptions = self.rx_subscription_list_on_conn(rx_id, &conn).await?; let secrets = self.rx_secret_list_on_conn(rx_id, &conn).await?; @@ -200,8 +197,8 @@ impl DataStore { pub async fn webhook_rx_delete( &self, opctx: &OpContext, - authz_rx: &authz::WebhookReceiver, - db_rx: &WebhookReceiver, + authz_rx: &authz::AlertReceiver, + db_rx: &AlertReceiver, ) -> DeleteResult { opctx.authorize(authz::Action::Delete, authz_rx).await?; let rx_id = authz_rx.id().into_untyped_uuid(); @@ -235,7 +232,7 @@ impl DataStore { // Delete subscriptions and globs. let exact_subscriptions_deleted = diesel::delete( - subscription_dsl::webhook_rx_subscription, + subscription_dsl::alert_subscription, ) .filter(subscription_dsl::rx_id.eq(rx_id)) .execute_async(&conn) @@ -250,7 +247,7 @@ impl DataStore { })?; let globs_deleted = - diesel::delete(glob_dsl::webhook_rx_event_glob) + diesel::delete(glob_dsl::alert_glob) .filter(glob_dsl::rx_id.eq(rx_id)) .execute_async(&conn) .await @@ -297,7 +294,7 @@ impl DataStore { })?; // Finally, mark the webhook receiver record as deleted, // provided that none of its children were modified in the interim. - let deleted = diesel::update(rx_dsl::webhook_receiver) + let deleted = diesel::update(rx_dsl::alert_receiver) .filter(rx_dsl::id.eq(rx_id)) .filter(rx_dsl::time_deleted.is_null()) .filter(rx_dsl::subscription_gen.eq(db_rx.subscription_gen)) @@ -344,9 +341,9 @@ impl DataStore { pub async fn webhook_rx_update( &self, opctx: &OpContext, - authz_rx: &authz::WebhookReceiver, + authz_rx: &authz::AlertReceiver, params: params::WebhookReceiverUpdate, - ) -> UpdateResult { + ) -> UpdateResult { opctx.authorize(authz::Action::Modify, authz_rx).await?; let conn = self.pool_connection_authorized(opctx).await?; @@ -357,7 +354,7 @@ impl DataStore { endpoint: params.endpoint.as_ref().map(ToString::to_string), time_modified: chrono::Utc::now(), }; - let updated = diesel::update(rx_dsl::webhook_receiver) + let updated = diesel::update(rx_dsl::alert_receiver) .filter(rx_dsl::id.eq(rx_id)) .filter(rx_dsl::time_deleted.is_null()) .set(update) @@ -373,7 +370,7 @@ impl DataStore { Ok(updated.found) } - pub async fn webhook_rx_list( + pub async fn alert_rx_list( &self, opctx: &OpContext, pagparams: &PaginatedBy<'_>, @@ -388,20 +385,20 @@ impl DataStore { // // This is a bit unfortunate, and it would be nicer to do this with // JOINs, but it's a bit hairy as the subscriptions come from both the - // `webhook_rx_subscription` and `webhook_rx_glob` tables... + // `alert_subscription` and `webhook_rx_glob` tables... let receivers = match pagparams { PaginatedBy::Id(pagparams) => { - paginated(rx_dsl::webhook_receiver, rx_dsl::id, &pagparams) + paginated(rx_dsl::alert_receiver, rx_dsl::id, &pagparams) } PaginatedBy::Name(pagparams) => paginated( - rx_dsl::webhook_receiver, + rx_dsl::alert_receiver, rx_dsl::name, &pagparams.map_name(|n| Name::ref_cast(n)), ), } .filter(rx_dsl::time_deleted.is_null()) - .select(WebhookReceiver::as_select()) + .select(AlertReceiver::as_select()) .load_async(&*conn) .await .map_err(|e| { @@ -430,11 +427,11 @@ impl DataStore { // Subscriptions // - pub async fn webhook_rx_is_subscribed_to_event( + pub async fn alert_rx_is_subscribed_to_alert( &self, opctx: &OpContext, - authz_rx: &authz::WebhookReceiver, - authz_event: &authz::WebhookEvent, + authz_rx: &authz::AlertReceiver, + authz_event: &authz::Alert, ) -> Result { opctx.authorize(authz::Action::Read, authz_rx).await?; @@ -458,30 +455,28 @@ impl DataStore { for glob in batch { slog::debug!( opctx.log, - "reprocessing webhook glob subscription to checking if \ + "reprocessing alert glob subscription to check if \ receiver is subscribed to event"; "rx_id" => ?rx_id, "glob" => ?glob.glob.glob, "prior_version" => ?glob.schema_version, "current_version" => %SCHEMA_VERSION, ); - self.webhook_glob_reprocess(opctx, &glob).await.map_err( - |e| { - e.internal_context(format!( - "failed to reprocess glob {glob:?}" - )) - }, - )?; + self.alert_glob_reprocess(opctx, &glob).await.map_err(|e| { + e.internal_context(format!( + "failed to reprocess glob {glob:?}" + )) + })?; } } - let event_class = event_dsl::webhook_event - .filter(event_dsl::id.eq(authz_event.id().into_untyped_uuid())) - .select(event_dsl::event_class) + let alert_class = alert_dsl::alert + .filter(alert_dsl::id.eq(authz_event.id().into_untyped_uuid())) + .select(alert_dsl::alert_class) .single_value(); - subscription_dsl::webhook_rx_subscription + subscription_dsl::alert_subscription .filter(subscription_dsl::rx_id.eq(rx_id.into_untyped_uuid())) - .filter(subscription_dsl::event_class.nullable().eq(event_class)) + .filter(subscription_dsl::alert_class.nullable().eq(alert_class)) .select(subscription_dsl::rx_id) .first_async::(&*conn) .await @@ -491,11 +486,11 @@ impl DataStore { } /// Don't forget to like and subscribe! - pub async fn webhook_rx_subscription_add( + pub async fn alert_subscription_add( &self, opctx: &OpContext, - authz_rx: &authz::WebhookReceiver, - subscription: WebhookSubscriptionKind, + authz_rx: &authz::AlertReceiver, + subscription: AlertSubscriptionKind, ) -> CreateResult<()> { opctx.authorize(authz::Action::Modify, authz_rx).await?; self.rx_add_subscription_on_conn( @@ -515,11 +510,11 @@ impl DataStore { }) } - pub async fn webhook_rx_subscription_remove( + pub async fn alert_subscription_remove( &self, opctx: &OpContext, - authz_rx: &authz::WebhookReceiver, - subscription: WebhookSubscriptionKind, + authz_rx: &authz::AlertReceiver, + subscription: AlertSubscriptionKind, ) -> DeleteResult { opctx.authorize(authz::Action::Modify, authz_rx).await?; let rx_id = authz_rx.id().into_untyped_uuid(); @@ -541,9 +536,9 @@ impl DataStore { "unexpected database error: {error:#}" )), }; - const LOG_MSG: &str = "unsubscribed webhook receiver"; + const LOG_MSG: &str = "unsubscribed alert receiver"; match subscription { - WebhookSubscriptionKind::Glob(ref glob) => { + AlertSubscriptionKind::Glob(ref glob) => { // Deleting a glob subscription is performed in a transaction in // order to ensure that the glob is only deleted if its exact // subscriptions could also be deleted. @@ -553,13 +548,13 @@ impl DataStore { let glob = glob.glob.clone(); async move { let n_exact = diesel::delete( - subscription_dsl::webhook_rx_subscription, + subscription_dsl::alert_subscription, ) .filter(subscription_dsl::rx_id.eq(rx_id)) .filter(subscription_dsl::glob.eq(glob.clone())) .execute_async(&conn) .await?; - diesel::delete(glob_dsl::webhook_rx_event_glob) + diesel::delete(glob_dsl::alert_glob) .filter(glob_dsl::rx_id.eq(rx_id)) .filter(glob_dsl::glob.eq(glob)) .execute_async(&conn) @@ -577,10 +572,10 @@ impl DataStore { "exact_subscriptions_deleted" => n_exact, ); } - WebhookSubscriptionKind::Exact(class) => { - diesel::delete(subscription_dsl::webhook_rx_subscription) + AlertSubscriptionKind::Exact(class) => { + diesel::delete(subscription_dsl::alert_subscription) .filter(subscription_dsl::rx_id.eq(rx_id)) - .filter(subscription_dsl::event_class.eq(class)) + .filter(subscription_dsl::alert_class.eq(class)) .execute_async(&*conn) .await .map_err(error_handler)?; @@ -588,7 +583,7 @@ impl DataStore { &opctx.log, "{LOG_MSG}"; "rx_id" => %rx_id, - "subscription_event_class" => %class, + "subscription_alert_class" => %class, ); } } @@ -598,30 +593,30 @@ impl DataStore { async fn rx_subscription_list_on_conn( &self, - rx_id: WebhookReceiverUuid, + rx_id: AlertReceiverUuid, conn: &async_bb8_diesel::Connection, - ) -> ListResultVec { + ) -> ListResultVec { // TODO(eliza): rather than performing two separate queries, this could // perhaps be expressed using a SQL `union`, with an added "label" // column to distinguish between globs and exact subscriptions, but this // is a bit more complex, and would require raw SQL... // First, get all the exact subscriptions that aren't from globs. - let exact = subscription_dsl::webhook_rx_subscription + let exact = subscription_dsl::alert_subscription .filter(subscription_dsl::rx_id.eq(rx_id.into_untyped_uuid())) .filter(subscription_dsl::glob.is_null()) - .select(subscription_dsl::event_class) - .load_async::(conn) + .select(subscription_dsl::alert_class) + .load_async::(conn) .await .map_err(|e| { public_error_from_diesel(e, ErrorHandler::Server) .internal_context("failed to list exact subscriptions") })?; // Then, get the globs - let globs = glob_dsl::webhook_rx_event_glob + let globs = glob_dsl::alert_glob .filter(glob_dsl::rx_id.eq(rx_id.into_untyped_uuid())) - .select(WebhookGlob::as_select()) - .load_async::(conn) + .select(AlertGlob::as_select()) + .load_async::(conn) .await .map_err(|e| { public_error_from_diesel(e, ErrorHandler::Server) @@ -629,8 +624,8 @@ impl DataStore { })?; let subscriptions = exact .into_iter() - .map(WebhookSubscriptionKind::Exact) - .chain(globs.into_iter().map(WebhookSubscriptionKind::Glob)) + .map(AlertSubscriptionKind::Exact) + .chain(globs.into_iter().map(AlertSubscriptionKind::Glob)) .collect::>(); Ok(subscriptions) } @@ -638,17 +633,17 @@ impl DataStore { async fn rx_add_subscription_on_conn( &self, opctx: &OpContext, - rx_id: WebhookReceiverUuid, - subscription: WebhookSubscriptionKind, + rx_id: AlertReceiverUuid, + subscription: AlertSubscriptionKind, conn: &async_bb8_diesel::Connection, ) -> Result<(), TransactionError> { match subscription { - WebhookSubscriptionKind::Glob(glob) => { - let glob = WebhookRxEventGlob::new(rx_id, glob); - let result: Option = - WebhookReceiver::insert_resource( + AlertSubscriptionKind::Glob(glob) => { + let glob = AlertRxGlob::new(rx_id, glob); + let result: Option = + AlertReceiver::insert_resource( rx_id.into_untyped_uuid(), - diesel::insert_into(glob_dsl::webhook_rx_event_glob) + diesel::insert_into(glob_dsl::alert_glob) .values(glob) // If there's already a subscription to this glob, // that's fine... @@ -659,23 +654,23 @@ impl DataStore { .map_err(async_insert_error_to_txn(rx_id))?; slog::debug!( &opctx.log, - "added glob subscription to webhook receiver"; + "added glob subscription to alert receiver"; "rx_id" => ?rx_id, "subscription" => ?result, ); } - WebhookSubscriptionKind::Exact(event_class) => { - let subscription = WebhookRxSubscription { + AlertSubscriptionKind::Exact(class) => { + let subscription = AlertRxSubscription { rx_id: rx_id.into(), - event_class, + class, glob: None, time_created: chrono::Utc::now(), }; - let result: Option = - WebhookReceiver::insert_resource( + let result: Option = + AlertReceiver::insert_resource( rx_id.into_untyped_uuid(), diesel::insert_into( - subscription_dsl::webhook_rx_subscription, + subscription_dsl::alert_subscription, ) .values(subscription) // If there's already a subscription to this event @@ -687,7 +682,7 @@ impl DataStore { .map_err(async_insert_error_to_txn(rx_id))?; slog::debug!( &opctx.log, - "added exact subscription to webhook receiver"; + "added exact subscription to alert receiver"; "rx_id" => ?rx_id, "subscription" => ?result, ); @@ -698,13 +693,13 @@ impl DataStore { async fn add_exact_subscription_batch_on_conn( &self, - rx_id: WebhookReceiverUuid, - subscriptions: Vec, + rx_id: AlertReceiverUuid, + subscriptions: Vec, conn: &async_bb8_diesel::Connection, - ) -> Result, TransactionError> { - >::insert_resource( + ) -> Result, TransactionError> { + >::insert_resource( rx_id.into_untyped_uuid(), - diesel::insert_into(subscription_dsl::webhook_rx_subscription) + diesel::insert_into(subscription_dsl::alert_subscription) .values(subscriptions) .on_conflict_do_nothing() ).insert_and_get_results_async(conn) @@ -715,14 +710,14 @@ impl DataStore { async fn glob_generate_exact_subs( &self, opctx: &OpContext, - glob: &WebhookRxEventGlob, + glob: &AlertRxGlob, conn: &async_bb8_diesel::Connection, ) -> Result> { let regex = match regex::Regex::new(&glob.glob.regex) { Ok(r) => r, Err(error) => { const MSG: &str = - "webhook glob subscription regex was not a valid regex"; + "alert glob subscription regex was not a valid regex"; slog::error!( &opctx.log, "{MSG}"; @@ -735,27 +730,27 @@ impl DataStore { )); } }; - let subscriptions = WebhookEventClass::ALL_CLASSES + let subscriptions = AlertClass::ALL_CLASSES .iter() .filter_map(|class| { if regex.is_match(class.as_str()) { slog::debug!( &opctx.log, - "webhook glob matches event class"; + "alert glob matches event class"; "rx_id" => ?glob.rx_id, "glob" => ?glob.glob.glob, "regex" => ?regex, - "event_class" => %class, + "alert_class" => %class, ); - Some(WebhookRxSubscription::for_glob(&glob, *class)) + Some(AlertRxSubscription::for_glob(&glob, *class)) } else { slog::trace!( &opctx.log, - "webhook glob does not match event class"; + "alert glob does not match event class"; "rx_id" => ?glob.rx_id, "glob" => ?glob.glob.glob, "regex" => ?regex, - "event_class" => %class, + "alert_class" => %class, ); None } @@ -781,33 +776,33 @@ impl DataStore { } /// List all webhook receivers whose event class subscription globs match - /// the provided `event_class`. - pub async fn webhook_rx_list_subscribed_to_event( + /// the provided `alert_class`. + pub async fn alert_rx_list_subscribed_to_event( &self, opctx: &OpContext, - event_class: WebhookEventClass, - ) -> Result, Error> { + alert_class: AlertClass, + ) -> Result, Error> { let conn = self.pool_connection_authorized(opctx).await?; - Self::rx_list_subscribed_query(event_class) - .load_async::<(WebhookReceiver, WebhookRxSubscription)>(&*conn) + Self::rx_list_subscribed_query(alert_class) + .load_async::<(AlertReceiver, AlertRxSubscription)>(&*conn) .await .map_err(|e| public_error_from_diesel(e, ErrorHandler::Server)) } fn rx_list_subscribed_query( - event_class: WebhookEventClass, - ) -> impl RunnableQuery<(WebhookReceiver, WebhookRxSubscription)> { - subscription_dsl::webhook_rx_subscription - .filter(subscription_dsl::event_class.eq(event_class)) + alert_class: AlertClass, + ) -> impl RunnableQuery<(AlertReceiver, AlertRxSubscription)> { + subscription_dsl::alert_subscription + .filter(subscription_dsl::alert_class.eq(alert_class)) .order_by(subscription_dsl::rx_id.asc()) .inner_join( - rx_dsl::webhook_receiver + rx_dsl::alert_receiver .on(subscription_dsl::rx_id.eq(rx_dsl::id)), ) .filter(rx_dsl::time_deleted.is_null()) .select(( - WebhookReceiver::as_select(), - WebhookRxSubscription::as_select(), + AlertReceiver::as_select(), + AlertRxSubscription::as_select(), )) } @@ -815,7 +810,7 @@ impl DataStore { // Glob reprocessing // - /// List webhook glob subscriptions for which new exact subscriptions have + /// List alert glob subscriptions for which new exact subscriptions have /// to be generated. /// /// This includes glob subscriptions that were just created and have no @@ -823,23 +818,23 @@ impl DataStore { /// schema version. /// /// Such subscriptions will need to be reprocessed (by the - /// [`DataStore::webhook_glob_reprocess`] function), as event classes + /// [`DataStore::alert_glob_reprocess`] function), as event classes /// matching those globs may have been added in a later schema version. - pub async fn webhook_glob_list_reprocessable( + pub async fn alert_glob_list_reprocessable( &self, opctx: &OpContext, pagparams: &DataPageParams<'_, (Uuid, String)>, - ) -> ListResultVec { + ) -> ListResultVec { let conn = self.pool_connection_authorized(opctx).await?; self.rx_list_reprocessable_globs_on_conn(None, pagparams, &conn).await } async fn rx_list_reprocessable_globs_on_conn( &self, - rx_id: Option, + rx_id: Option, pagparams: &DataPageParams<'_, (Uuid, String)>, conn: &async_bb8_diesel::Connection, - ) -> ListResultVec { + ) -> ListResultVec { let (current_version, target_version) = self.database_schema_version().await.map_err(|e| { e.internal_context("couldn't load db schema version") @@ -854,7 +849,7 @@ impl DataStore { if let Some(target) = target_version { return Err(Error::InternalError { internal_message: format!( - "webhook glob reprocessing must wait until the migration \ + "alert glob reprocessing must wait until the migration \ from {current_version} to {target} has completed", ), }); @@ -877,7 +872,7 @@ impl DataStore { if current_version != SCHEMA_VERSION { return Err(Error::InternalError { internal_message: format!( - "cannot reprocess webhook globs, as our schema version \ + "cannot reprocess alert globs, as our schema version \ ({SCHEMA_VERSION}) doess not match the current version \ ({current_version})", ), @@ -885,7 +880,7 @@ impl DataStore { } let query = paginated_multicolumn( - glob_dsl::webhook_rx_event_glob, + glob_dsl::alert_glob, (glob_dsl::rx_id, glob_dsl::glob), pagparams, ) @@ -896,7 +891,7 @@ impl DataStore { .filter(glob_dsl::schema_version.is_null().or( glob_dsl::schema_version.ne(SemverVersion::from(SCHEMA_VERSION)), )) - .select(WebhookRxEventGlob::as_select()); + .select(AlertRxGlob::as_select()); // If we were asked for globs belonging to a specific receiver, add a // WHERE clause to filter on the receiver's UUID. We just use a match // rather than boxing the query since this is the only dynamically @@ -924,11 +919,11 @@ impl DataStore { /// dispatcher must ensure that all glob subscriptions are up-to-date before /// dispatching events, as a receiver with outdated globs may have a glob /// matching a new event class but no corresponding exact subscription yet. - pub async fn webhook_glob_reprocess( + pub async fn alert_glob_reprocess( &self, opctx: &OpContext, - glob: &WebhookRxEventGlob, - ) -> Result { + glob: &AlertRxGlob, + ) -> Result { let conn = self.pool_connection_authorized(opctx).await?; self.glob_reprocess_on_conn(opctx, glob, &conn).await } @@ -936,9 +931,9 @@ impl DataStore { async fn glob_reprocess_on_conn( &self, opctx: &OpContext, - glob: &WebhookRxEventGlob, + glob: &AlertRxGlob, conn: &async_bb8_diesel::Connection, - ) -> Result { + ) -> Result { slog::trace!( opctx.log, "reprocessing outdated webhook glob"; @@ -954,13 +949,15 @@ impl DataStore { let glob = glob.clone(); let err = err.clone(); async move { - let deleted = diesel::delete( - subscription_dsl::webhook_rx_subscription, - ) - .filter(subscription_dsl::glob.eq(glob.glob.glob.clone())) - .filter(subscription_dsl::rx_id.eq(glob.rx_id)) - .execute_async(&conn) - .await?; + let deleted = + diesel::delete(subscription_dsl::alert_subscription) + .filter( + subscription_dsl::glob + .eq(glob.glob.glob.clone()), + ) + .filter(subscription_dsl::rx_id.eq(glob.rx_id)) + .execute_async(&conn) + .await?; let created = self .glob_generate_exact_subs(opctx, &glob, &conn) .await @@ -970,17 +967,15 @@ impl DataStore { } TransactionError::Database(e) => e, })?; - let update = - diesel::update(glob_dsl::webhook_rx_event_glob) - .filter( - glob_dsl::rx_id - .eq(glob.rx_id.into_untyped_uuid()), - ) - .filter(glob_dsl::glob.eq(glob.glob.glob.clone())) - .set( - glob_dsl::schema_version - .eq(SemverVersion::from(SCHEMA_VERSION)), - ); + let update = diesel::update(glob_dsl::alert_glob) + .filter( + glob_dsl::rx_id.eq(glob.rx_id.into_untyped_uuid()), + ) + .filter(glob_dsl::glob.eq(glob.glob.glob.clone())) + .set( + glob_dsl::schema_version + .eq(SemverVersion::from(SCHEMA_VERSION)), + ); let did_update = match glob.schema_version { Some(ref version) => { update @@ -1004,7 +999,7 @@ impl DataStore { // it has been deleted. Err(diesel::result::Error::NotFound) | Ok(0) => { return Err(err.bail(Ok( - WebhookGlobStatus::AlreadyReprocessed, + AlertGlobStatus::AlreadyReprocessed, ))); } Err(e) => return Err(e), @@ -1013,7 +1008,7 @@ impl DataStore { } } - Ok(WebhookGlobStatus::Reprocessed { + Ok(AlertGlobStatus::Reprocessed { created, deleted, prev_version: glob @@ -1033,14 +1028,14 @@ impl DataStore { })?; match status { - WebhookGlobStatus::Reprocessed { + AlertGlobStatus::Reprocessed { created, deleted, ref prev_version, } => { slog::debug!( opctx.log, - "reprocessed outdated webhook glob"; + "reprocessed outdated alert glob subscription"; "rx_id" => ?glob.rx_id, "glob" => ?glob.glob.glob, "prev_version" => ?prev_version, @@ -1049,10 +1044,11 @@ impl DataStore { "subscriptions_deleted" => ?deleted, ); } - WebhookGlobStatus::AlreadyReprocessed => { + AlertGlobStatus::AlreadyReprocessed => { slog::trace!( opctx.log, - "outdated webhook glob was either already reprocessed or deleted"; + "outdated alert glob was either already reprocessed or\ + deleted"; "rx_id" => ?glob.rx_id, "glob" => ?glob.glob.glob, "prev_version" => ?glob.schema_version, @@ -1071,7 +1067,7 @@ impl DataStore { pub async fn webhook_rx_secret_list( &self, opctx: &OpContext, - authz_rx: &authz::WebhookReceiver, + authz_rx: &authz::AlertReceiver, ) -> ListResultVec { opctx.authorize(authz::Action::ListChildren, authz_rx).await?; let conn = self.pool_connection_authorized(&opctx).await?; @@ -1080,7 +1076,7 @@ impl DataStore { async fn rx_secret_list_on_conn( &self, - rx_id: WebhookReceiverUuid, + rx_id: AlertReceiverUuid, conn: &async_bb8_diesel::Connection, ) -> ListResultVec { secret_dsl::webhook_secret @@ -1098,7 +1094,7 @@ impl DataStore { pub async fn webhook_rx_secret_create( &self, opctx: &OpContext, - authz_rx: &authz::WebhookReceiver, + authz_rx: &authz::AlertReceiver, secret: WebhookSecret, ) -> CreateResult { opctx.authorize(authz::Action::CreateChild, authz_rx).await?; @@ -1118,7 +1114,7 @@ impl DataStore { pub async fn webhook_rx_secret_delete( &self, opctx: &OpContext, - authz_rx: &authz::WebhookReceiver, + authz_rx: &authz::AlertReceiver, authz_secret: &authz::WebhookSecret, ) -> DeleteResult { opctx.authorize(authz::Action::Delete, authz_secret).await?; @@ -1141,8 +1137,8 @@ impl DataStore { secret: WebhookSecret, conn: &async_bb8_diesel::Connection, ) -> Result> { - let rx_id = secret.webhook_receiver_id; - let secret: WebhookSecret = WebhookReceiver::insert_resource( + let rx_id = secret.alert_receiver_id; + let secret: WebhookSecret = AlertReceiver::insert_resource( rx_id.into_untyped_uuid(), diesel::insert_into(secret_dsl::webhook_secret).values(secret), ) @@ -1154,12 +1150,12 @@ impl DataStore { } fn async_insert_error_to_txn( - rx_id: WebhookReceiverUuid, + rx_id: AlertReceiverUuid, ) -> impl FnOnce(AsyncInsertError) -> TransactionError { move |e| match e { AsyncInsertError::CollectionNotFound => { TransactionError::CustomError(Error::not_found_by_id( - ResourceType::WebhookReceiver, + ResourceType::AlertReceiver, &rx_id.into_untyped_uuid(), )) } @@ -1176,7 +1172,7 @@ mod test { use nexus_db_lookup::LookupPath; use omicron_common::api::external::IdentityMetadataCreateParams; use omicron_test_utils::dev; - use omicron_uuid_kinds::WebhookEventUuid; + use omicron_uuid_kinds::AlertUuid; async fn create_receiver( datastore: &DataStore, @@ -1208,26 +1204,22 @@ mod test { async fn create_event( datastore: &DataStore, opctx: &OpContext, - event_class: WebhookEventClass, - ) -> (authz::WebhookEvent, crate::db::model::WebhookEvent) { - let id = WebhookEventUuid::new_v4(); + alert_class: AlertClass, + ) -> (authz::Alert, crate::db::model::Alert) { + let id = AlertUuid::new_v4(); datastore - .webhook_event_create(opctx, id, event_class, serde_json::json!({})) + .alert_create(opctx, id, alert_class, serde_json::json!({})) .await .expect("cant create ye event"); - LookupPath::new(opctx, datastore) - .webhook_event_id(id) - .fetch() - .await - .expect( + LookupPath::new(opctx, datastore).alert_id(id).fetch().await.expect( "cant get ye event (i just created it, so this is extra weird?)", ) } #[tokio::test] - async fn test_event_class_globs() { + async fn test_alert_class_globs() { // Test setup - let logctx = dev::test_setup_log("test_event_class_globs"); + let logctx = dev::test_setup_log("test_alert_class_globs"); let db = TestDatabase::new_with_datastore(&logctx.log).await; let (opctx, datastore) = (db.opctx(), db.datastore()); let mut all_rxs: Vec = Vec::new(); @@ -1308,7 +1300,7 @@ mod test { let mut paginator = Paginator::new(SQL_BATCH_SIZE); while let Some(p) = paginator.next() { let batch = datastore - .webhook_glob_list_reprocessable(opctx, &p.current_pagparams()) + .alert_glob_list_reprocessable(opctx, &p.current_pagparams()) .await .unwrap(); paginator = p.found_batch(&batch, &|glob| { @@ -1316,27 +1308,27 @@ mod test { }); for glob in batch { datastore - .webhook_glob_reprocess(opctx, dbg!(&glob)) + .alert_glob_reprocess(opctx, dbg!(&glob)) .await .unwrap(); } } - async fn check_event( + async fn check_alert( datastore: &DataStore, opctx: &OpContext, all_rxs: &Vec, - event_class: WebhookEventClass, + alert_class: AlertClass, matches: &[&WebhookReceiverConfig], ) { let subscribed = datastore - .webhook_rx_list_subscribed_to_event(opctx, event_class) + .alert_rx_list_subscribed_to_event(opctx, alert_class) .await .unwrap() .into_iter() .map(|(rx, subscription)| { eprintln!( - "receiver is subscribed to event {event_class}:\n\t\ + "receiver is subscribed to event {alert_class}:\n\t\ rx: {} ({})\n\tsubscription: {subscription:?}", rx.identity.name, rx.identity.id, ); @@ -1347,7 +1339,7 @@ mod test { for WebhookReceiverConfig { rx, subscriptions, .. } in matches { assert!( subscribed.contains(&rx.identity), - "expected {rx:?} to be subscribed to {event_class}\n\ + "expected {rx:?} to be subscribed to {alert_class}\n\ subscriptions: {subscriptions:?}" ); } @@ -1362,33 +1354,33 @@ mod test { for WebhookReceiverConfig { rx, subscriptions, .. } in not_matches { assert!( !subscribed.contains(&rx.identity), - "expected {rx:?} to not be subscribed to {event_class}\n\ + "expected {rx:?} to not be subscribed to {alert_class}\n\ subscriptions: {subscriptions:?}" ); } } - check_event( + check_alert( datastore, opctx, &all_rxs, - WebhookEventClass::TestFoo, + AlertClass::TestFoo, &[&test_star, &test_starstar], ) .await; - check_event( + check_alert( datastore, opctx, &all_rxs, - WebhookEventClass::TestFooBar, + AlertClass::TestFooBar, &[&test_starstar, &test_foo_star], ) .await; - check_event( + check_alert( datastore, opctx, &all_rxs, - WebhookEventClass::TestFooBaz, + AlertClass::TestFooBaz, &[ &test_starstar, &test_foo_star, @@ -1397,19 +1389,19 @@ mod test { ], ) .await; - check_event( + check_alert( datastore, opctx, &all_rxs, - WebhookEventClass::TestQuuxBar, + AlertClass::TestQuuxBar, &[&test_starstar, &test_quux_star, &test_quux_starstar], ) .await; - check_event( + check_alert( datastore, opctx, &all_rxs, - WebhookEventClass::TestQuuxBarBaz, + AlertClass::TestQuuxBarBaz, &[&test_starstar, &test_quux_starstar, &test_starstar_baz], ) .await; @@ -1420,14 +1412,13 @@ mod test { } #[tokio::test] - async fn explain_event_class_glob() { - let logctx = dev::test_setup_log("explain_event_class_glob"); + async fn explain_alert_class_glob() { + let logctx = dev::test_setup_log("explain_alert_class_glob"); let db = TestDatabase::new_with_pool(&logctx.log).await; let pool = db.pool(); let conn = pool.claim().await.unwrap(); - let query = - DataStore::rx_list_subscribed_query(WebhookEventClass::TestFooBar); + let query = DataStore::rx_list_subscribed_query(AlertClass::TestFooBar); let explanation = query .explain_async(&conn) .await @@ -1453,35 +1444,30 @@ mod test { .await; let (authz_rx, _) = LookupPath::new(opctx, datastore) - .webhook_receiver_id(rx.rx.id()) + .alert_receiver_id(rx.rx.id()) .fetch() .await .expect("cant get ye receiver"); let (authz_foo, _) = - create_event(datastore, opctx, WebhookEventClass::TestFoo).await; + create_event(datastore, opctx, AlertClass::TestFoo).await; let (authz_foo_bar, _) = - create_event(datastore, opctx, WebhookEventClass::TestFooBar).await; + create_event(datastore, opctx, AlertClass::TestFooBar).await; let (authz_quux_bar, _) = - create_event(datastore, opctx, WebhookEventClass::TestQuuxBar) - .await; + create_event(datastore, opctx, AlertClass::TestQuuxBar).await; let is_subscribed_foo = datastore - .webhook_rx_is_subscribed_to_event(opctx, &authz_rx, &authz_foo) + .alert_rx_is_subscribed_to_alert(opctx, &authz_rx, &authz_foo) .await; assert_eq!(is_subscribed_foo, Ok(false)); let is_subscribed_foo_bar = datastore - .webhook_rx_is_subscribed_to_event(opctx, &authz_rx, &authz_foo_bar) + .alert_rx_is_subscribed_to_alert(opctx, &authz_rx, &authz_foo_bar) .await; assert_eq!(is_subscribed_foo_bar, Ok(true)); let is_subscribed_quux_bar = datastore - .webhook_rx_is_subscribed_to_event( - opctx, - &authz_rx, - &authz_quux_bar, - ) + .alert_rx_is_subscribed_to_alert(opctx, &authz_rx, &authz_quux_bar) .await; assert_eq!(is_subscribed_quux_bar, Ok(true)); diff --git a/nexus/db-queries/src/db/datastore/mod.rs b/nexus/db-queries/src/db/datastore/mod.rs index 5a2acb6aff5..123ee6c97e9 100644 --- a/nexus/db-queries/src/db/datastore/mod.rs +++ b/nexus/db-queries/src/db/datastore/mod.rs @@ -46,6 +46,8 @@ use std::sync::Arc; mod address_lot; mod affinity; +mod alert; +mod alert_rx; mod allow_list; mod auth; mod bfd; @@ -110,8 +112,6 @@ mod volume; mod volume_repair; mod vpc; pub mod webhook_delivery; -mod webhook_event; -mod webhook_rx; mod zpool; pub use address_lot::AddressLotCreateResult; diff --git a/nexus/db-queries/src/db/datastore/saga.rs b/nexus/db-queries/src/db/datastore/saga.rs index 54e4224eabe..fd6ee5e390a 100644 --- a/nexus/db-queries/src/db/datastore/saga.rs +++ b/nexus/db-queries/src/db/datastore/saga.rs @@ -494,7 +494,7 @@ mod test { #[tokio::test] async fn test_update_state_idempotent() { // Test setup - let logctx = dev::test_setup_log("test_create_event_idempotent"); + let logctx = dev::test_setup_log("test_update_state_idempotent"); let db = TestDatabase::new_with_datastore(&logctx.log).await; let datastore = db.datastore(); let node_cx = SagaTestContext::new(SecId(Uuid::new_v4())); diff --git a/nexus/db-queries/src/db/datastore/webhook_delivery.rs b/nexus/db-queries/src/db/datastore/webhook_delivery.rs index e4593810e70..0f80ffba626 100644 --- a/nexus/db-queries/src/db/datastore/webhook_delivery.rs +++ b/nexus/db-queries/src/db/datastore/webhook_delivery.rs @@ -8,13 +8,13 @@ use super::DataStore; use crate::context::OpContext; use crate::db::IncompleteOnConflictExt; use crate::db::datastore::RunnableQuery; +use crate::db::model::Alert; +use crate::db::model::AlertClass; +use crate::db::model::AlertDeliveryState; +use crate::db::model::AlertDeliveryTrigger; use crate::db::model::WebhookDelivery; use crate::db::model::WebhookDeliveryAttempt; use crate::db::model::WebhookDeliveryAttemptResult; -use crate::db::model::WebhookDeliveryState; -use crate::db::model::WebhookDeliveryTrigger; -use crate::db::model::WebhookEvent; -use crate::db::model::WebhookEventClass; use crate::db::pagination::paginated_multicolumn; use crate::db::update_and_check::UpdateAndCheck; use crate::db::update_and_check::UpdateAndQueryResult; @@ -26,16 +26,16 @@ use diesel::prelude::*; use nexus_db_errors::ErrorHandler; use nexus_db_errors::public_error_from_diesel; use nexus_db_schema::schema; +use nexus_db_schema::schema::alert::dsl as alert_dsl; use nexus_db_schema::schema::webhook_delivery::dsl; use nexus_db_schema::schema::webhook_delivery_attempt::dsl as attempt_dsl; -use nexus_db_schema::schema::webhook_event::dsl as event_dsl; use omicron_common::api::external::CreateResult; use omicron_common::api::external::DataPageParams; use omicron_common::api::external::Error; use omicron_common::api::external::ListResultVec; +use omicron_uuid_kinds::AlertReceiverUuid; use omicron_uuid_kinds::GenericUuid; use omicron_uuid_kinds::OmicronZoneUuid; -use omicron_uuid_kinds::WebhookReceiverUuid; use uuid::Uuid; #[derive(Debug, Clone, Eq, PartialEq)] @@ -53,11 +53,11 @@ pub struct DeliveryConfig { } /// A record from the [`WebhookDelivery`] table along with the event class and -/// data of the corresponding [`WebhookEvent`] record. +/// data of the corresponding [`Alert`] record. #[derive(Debug, Clone)] pub struct DeliveryAndEvent { pub delivery: WebhookDelivery, - pub event_class: WebhookEventClass, + pub alert_class: AlertClass, pub event: serde_json::Value, } @@ -70,14 +70,7 @@ impl DataStore { let conn = self.pool_connection_authorized(opctx).await?; diesel::insert_into(dsl::webhook_delivery) .values(deliveries) - // N.B. that this is intended to ignore conflicts on the - // "one_webhook_event_dispatch_per_rx" index, but ON CONFLICT ... DO - // NOTHING can't be used with the names of indices, only actual - // UNIQUE CONSTRAINTs. So we just do a blanket ON CONFLICT DO - // NOTHING, which is fine, becausse the only other uniqueness - // constraint is the UUID primary key, and we kind of assume UUID - // collisions don't happen. Oh well. - .on_conflict((dsl::event_id, dsl::rx_id)) + .on_conflict((dsl::alert_id, dsl::rx_id)) .as_partial_index() .do_nothing() .execute_async(&*conn) @@ -90,8 +83,8 @@ impl DataStore { pub async fn webhook_rx_list_resendable_events( &self, opctx: &OpContext, - rx_id: &WebhookReceiverUuid, - ) -> ListResultVec { + rx_id: &AlertReceiverUuid, + ) -> ListResultVec { Self::rx_list_resendable_events_query(*rx_id) .load_async(&*self.pool_connection_authorized(opctx).await?) .await @@ -99,37 +92,37 @@ impl DataStore { } fn rx_list_resendable_events_query( - rx_id: WebhookReceiverUuid, - ) -> impl RunnableQuery { + rx_id: AlertReceiverUuid, + ) -> impl RunnableQuery { use diesel::dsl::*; let (delivery, also_delivery) = diesel::alias!( schema::webhook_delivery as delivery, - schema::webhook_delivery as also_delivey + schema::webhook_delivery as also_delivery ); - event_dsl::webhook_event - .filter(event_dsl::event_class.ne(WebhookEventClass::Probe)) + alert_dsl::alert + .filter(alert_dsl::alert_class.ne(AlertClass::Probe)) .inner_join( - delivery.on(delivery.field(dsl::event_id).eq(event_dsl::id)), + delivery.on(delivery.field(dsl::alert_id).eq(alert_dsl::id)), ) .filter(delivery.field(dsl::rx_id).eq(rx_id.into_untyped_uuid())) .filter(not(exists( also_delivery .select(also_delivery.field(dsl::id)) .filter( - also_delivery.field(dsl::event_id).eq(event_dsl::id), + also_delivery.field(dsl::alert_id).eq(alert_dsl::id), ) .filter( also_delivery .field(dsl::state) - .ne(WebhookDeliveryState::Failed), + .ne(AlertDeliveryState::Failed), ) .filter( also_delivery .field(dsl::triggered_by) - .ne(WebhookDeliveryTrigger::Probe), + .ne(AlertDeliveryTrigger::Probe), ), ))) - .select(WebhookEvent::as_select()) + .select(Alert::as_select()) // the inner join means we may return the same event multiple times, // so only return distinct events. .distinct() @@ -138,15 +131,12 @@ impl DataStore { pub async fn webhook_rx_delivery_list( &self, opctx: &OpContext, - rx_id: &WebhookReceiverUuid, - triggers: &'static [WebhookDeliveryTrigger], - only_states: Vec, + rx_id: &AlertReceiverUuid, + triggers: &'static [AlertDeliveryTrigger], + only_states: Vec, pagparams: &DataPageParams<'_, (DateTime, Uuid)>, - ) -> ListResultVec<( - WebhookDelivery, - WebhookEventClass, - Vec, - )> { + ) -> ListResultVec<(WebhookDelivery, AlertClass, Vec)> + { let conn = self.pool_connection_authorized(opctx).await?; // Paginate the query, ordered by delivery UUID. let mut query = paginated_multicolumn( @@ -164,16 +154,14 @@ impl DataStore { // Join with the event table on the delivery's event ID, // so that we can grab the event class of the event that initiated // this delivery. - .inner_join( - event_dsl::webhook_event.on(dsl::event_id.eq(event_dsl::id)), - ); + .inner_join(alert_dsl::alert.on(dsl::alert_id.eq(alert_dsl::id))); if !only_states.is_empty() { query = query.filter(dsl::state.eq_any(only_states)); } let deliveries = query - .select((WebhookDelivery::as_select(), event_dsl::event_class)) - .load_async::<(WebhookDelivery, WebhookEventClass)>(&*conn) + .select((WebhookDelivery::as_select(), alert_dsl::alert_class)) + .load_async::<(WebhookDelivery, AlertClass)>(&*conn) .await .map_err(|e| public_error_from_diesel(e, ErrorHandler::Server))?; @@ -201,7 +189,7 @@ impl DataStore { pub async fn webhook_rx_delivery_list_ready( &self, opctx: &OpContext, - rx_id: &WebhookReceiverUuid, + rx_id: &AlertReceiverUuid, cfg: &DeliveryConfig, ) -> Result + 'static, Error> { @@ -212,12 +200,12 @@ impl DataStore { // Filter out deliveries triggered by probe requests, as those are // executed synchronously by the probe endpoint, rather than by the // webhook deliverator. - .filter(dsl::triggered_by.ne(WebhookDeliveryTrigger::Probe)) + .filter(dsl::triggered_by.ne(AlertDeliveryTrigger::Probe)) // Only select deliveries that are still in progress. .filter( dsl::time_completed .is_null() - .and(dsl::state.eq(WebhookDeliveryState::Pending)), + .and(dsl::state.eq(AlertDeliveryState::Pending)), ) .filter(dsl::rx_id.eq(rx_id.into_untyped_uuid())) .filter((dsl::deliverator_id.is_null()).or( @@ -246,19 +234,17 @@ impl DataStore { .order_by(dsl::time_created.asc()) // Join with the `webhook_event` table to get the event class, which // is necessary to construct delivery requests. - .inner_join( - event_dsl::webhook_event.on(event_dsl::id.eq(dsl::event_id)), - ) + .inner_join(alert_dsl::alert.on(alert_dsl::id.eq(dsl::alert_id))) .select(( WebhookDelivery::as_select(), - event_dsl::event_class, - event_dsl::event, + alert_dsl::alert_class, + alert_dsl::payload, )) .load_async(&*conn) .await .map_err(|e| public_error_from_diesel(e, ErrorHandler::Server))?; - Ok(rows.into_iter().map(|(delivery, event_class, event)| { - DeliveryAndEvent { delivery, event_class, event } + Ok(rows.into_iter().map(|(delivery, alert_class, event)| { + DeliveryAndEvent { delivery, alert_class, event } })) } @@ -278,7 +264,7 @@ impl DataStore { .filter( dsl::time_completed .is_null() - .and(dsl::state.eq(WebhookDeliveryState::Pending)), + .and(dsl::state.eq(AlertDeliveryState::Pending)), ) .filter(dsl::id.eq(id)) .filter(dsl::deliverator_id.is_null().or( @@ -328,7 +314,7 @@ impl DataStore { opctx.log, "couldn't start delivery attempt: {MSG}"; "delivery_id" => %id, - "event_id" => %delivery.event_id, + "alert_id" => %delivery.alert_id, "nexus_id" => %nexus_id, "found_deliverator_id" => ?found.deliverator_id, "found_time_leased" => ?found.time_leased, @@ -356,18 +342,18 @@ impl DataStore { let new_state = if attempt.result == WebhookDeliveryAttemptResult::Succeeded { // The delivery has completed successfully. - WebhookDeliveryState::Delivered + AlertDeliveryState::Delivered } else if *attempt.attempt >= MAX_ATTEMPTS { // The delivery attempt failed, and we are out of retries. This // delivery has failed permanently. - WebhookDeliveryState::Failed + AlertDeliveryState::Failed } else { // This delivery attempt failed, but we still have retries // remaining, so the delivery remains pending. - WebhookDeliveryState::Pending + AlertDeliveryState::Pending }; let (completed, new_nexus_id) = - if new_state != WebhookDeliveryState::Pending { + if new_state != AlertDeliveryState::Pending { // If the delivery has succeeded or failed permanently, set the // "time_completed" timestamp to mark it as finished. Also, leave // the delivering Nexus ID in place to maintain a record of who @@ -399,7 +385,7 @@ impl DataStore { .filter( dsl::time_completed .is_null() - .and(dsl::state.eq(WebhookDeliveryState::Pending)), + .and(dsl::state.eq(AlertDeliveryState::Pending)), ) .set(( dsl::state.eq(new_state), @@ -419,7 +405,7 @@ impl DataStore { } if found.time_completed.is_some() - || found.state != WebhookDeliveryState::Pending + || found.state != AlertDeliveryState::Pending { return Err(Error::conflict( "delivery was already marked as completed", @@ -443,7 +429,7 @@ impl DataStore { opctx.log, "{MSG}"; "delivery_id" => %delivery.id, - "event_id" => %delivery.event_id, + "alert_id" => %delivery.alert_id, "nexus_id" => %nexus_id, "found_deliverator_id" => ?found.deliverator_id, "found_time_leased" => ?found.time_leased, @@ -460,14 +446,13 @@ impl DataStore { mod test { use super::*; use crate::db::explain::ExplainableAsync; - use crate::db::model::WebhookDeliveryTrigger; use crate::db::pagination::Paginator; use crate::db::pub_test_utils::TestDatabase; use crate::db::raw_query_builder::expectorate_query_contents; use nexus_types::external_api::params; use omicron_common::api::external::IdentityMetadataCreateParams; use omicron_test_utils::dev; - use omicron_uuid_kinds::WebhookEventUuid; + use omicron_uuid_kinds::AlertUuid; #[tokio::test] async fn test_dispatched_deliveries_are_unique_per_rx() { @@ -497,12 +482,12 @@ mod test { .await .unwrap(); let rx_id = rx.rx.identity.id.into(); - let event_id = WebhookEventUuid::new_v4(); + let alert_id = AlertUuid::new_v4(); datastore - .webhook_event_create( + .alert_create( &opctx, - event_id, - WebhookEventClass::TestFoo, + alert_id, + AlertClass::TestFoo, serde_json::json!({ "answer": 42, }), @@ -511,9 +496,9 @@ mod test { .expect("can't create ye event"); let dispatch1 = WebhookDelivery::new( - &event_id, + &alert_id, &rx_id, - WebhookDeliveryTrigger::Event, + AlertDeliveryTrigger::Alert, ); let inserted = datastore .webhook_delivery_create_batch(&opctx, vec![dispatch1.clone()]) @@ -522,9 +507,9 @@ mod test { assert_eq!(inserted, 1, "first dispatched delivery should be created"); let dispatch2 = WebhookDelivery::new( - &event_id, + &alert_id, &rx_id, - WebhookDeliveryTrigger::Event, + AlertDeliveryTrigger::Alert, ); let inserted = datastore .webhook_delivery_create_batch(opctx, vec![dispatch2.clone()]) @@ -536,9 +521,9 @@ mod test { ); let resend1 = WebhookDelivery::new( - &event_id, + &alert_id, &rx_id, - WebhookDeliveryTrigger::Resend, + AlertDeliveryTrigger::Resend, ); let inserted = datastore .webhook_delivery_create_batch(opctx, vec![resend1.clone()]) @@ -550,9 +535,9 @@ mod test { ); let resend2 = WebhookDelivery::new( - &event_id, + &alert_id, &rx_id, - WebhookDeliveryTrigger::Resend, + AlertDeliveryTrigger::Resend, ); let inserted = datastore .webhook_delivery_create_batch(opctx, vec![resend2.clone()]) @@ -571,7 +556,7 @@ mod test { .webhook_rx_delivery_list( &opctx, &rx_id, - WebhookDeliveryTrigger::ALL, + AlertDeliveryTrigger::ALL, Vec::new(), &p.current_pagparams(), ) @@ -596,7 +581,7 @@ mod test { #[tokio::test] async fn expectorate_rx_list_resendable() { let query = DataStore::rx_list_resendable_events_query( - WebhookReceiverUuid::nil(), + AlertReceiverUuid::nil(), ); expectorate_query_contents( @@ -614,7 +599,7 @@ mod test { let conn = pool.claim().await.unwrap(); let query = DataStore::rx_list_resendable_events_query( - WebhookReceiverUuid::nil(), + AlertReceiverUuid::nil(), ); let explanation = query .explain_async(&conn) diff --git a/nexus/db-queries/src/policy_test/resource_builder.rs b/nexus/db-queries/src/policy_test/resource_builder.rs index 88e7b34d7a6..cc0c0c06932 100644 --- a/nexus/db-queries/src/policy_test/resource_builder.rs +++ b/nexus/db-queries/src/policy_test/resource_builder.rs @@ -278,8 +278,8 @@ impl_dyn_authorized_resource_for_resource!(authz::TufArtifact); impl_dyn_authorized_resource_for_resource!(authz::TufRepo); impl_dyn_authorized_resource_for_resource!(authz::Vpc); impl_dyn_authorized_resource_for_resource!(authz::VpcSubnet); -impl_dyn_authorized_resource_for_resource!(authz::WebhookEvent); -impl_dyn_authorized_resource_for_resource!(authz::WebhookReceiver); +impl_dyn_authorized_resource_for_resource!(authz::Alert); +impl_dyn_authorized_resource_for_resource!(authz::AlertReceiver); impl_dyn_authorized_resource_for_resource!(authz::WebhookSecret); impl_dyn_authorized_resource_for_resource!(authz::Zpool); @@ -291,7 +291,7 @@ impl_dyn_authorized_resource_for_global!(authz::DnsConfig); impl_dyn_authorized_resource_for_global!(authz::IpPoolList); impl_dyn_authorized_resource_for_global!(authz::Inventory); impl_dyn_authorized_resource_for_global!(authz::TargetReleaseConfig); -impl_dyn_authorized_resource_for_global!(authz::WebhookEventClassList); +impl_dyn_authorized_resource_for_global!(authz::AlertClassList); impl DynAuthorizedResource for authz::SiloCertificateList { fn do_authorize<'a, 'b>( diff --git a/nexus/db-queries/src/policy_test/resources.rs b/nexus/db-queries/src/policy_test/resources.rs index 6853288ff09..14da469ccd1 100644 --- a/nexus/db-queries/src/policy_test/resources.rs +++ b/nexus/db-queries/src/policy_test/resources.rs @@ -74,7 +74,7 @@ pub async fn make_resources( builder.new_resource(authz::INVENTORY); builder.new_resource(authz::IP_POOL_LIST); builder.new_resource(authz::TARGET_RELEASE_CONFIG); - builder.new_resource(authz::WEBHOOK_EVENT_CLASS_LIST); + builder.new_resource(authz::ALERT_CLASS_LIST); // Silo/organization/project hierarchy make_silo(&mut builder, "silo1", main_silo_id, true).await; @@ -172,12 +172,12 @@ pub async fn make_resources( LookupType::ById(loopback_address_id.into_untyped_uuid()), )); - let webhook_event_id = + let webhook_alert_id = "31cb17da-4164-4cbf-b9a3-b3e4a687c08b".parse().unwrap(); - builder.new_resource(authz::WebhookEvent::new( + builder.new_resource(authz::Alert::new( authz::FLEET, - webhook_event_id, - LookupType::ById(webhook_event_id.into_untyped_uuid()), + webhook_alert_id, + LookupType::ById(webhook_alert_id.into_untyped_uuid()), )); make_webhook_rx(&mut builder).await; @@ -403,9 +403,9 @@ async fn make_project( /// very miniscule hierarchy (a secret). async fn make_webhook_rx(builder: &mut ResourceBuilder<'_>) { let rx_name = "webhooked-on-phonics"; - let webhook_rx = authz::WebhookReceiver::new( + let webhook_rx = authz::AlertReceiver::new( authz::FLEET, - omicron_uuid_kinds::WebhookReceiverUuid::new_v4(), + omicron_uuid_kinds::AlertReceiverUuid::new_v4(), LookupType::ByName(rx_name.to_string()), ); builder.new_resource(webhook_rx.clone()); diff --git a/nexus/db-queries/tests/output/authz-roles.out b/nexus/db-queries/tests/output/authz-roles.out index e83cacbe3a9..6d95baf280a 100644 --- a/nexus/db-queries/tests/output/authz-roles.out +++ b/nexus/db-queries/tests/output/authz-roles.out @@ -124,7 +124,7 @@ resource: authz::TargetReleaseConfig silo1-proj1-viewer ✘ ✘ ✘ ✘ ✘ ✘ ✘ ✘ unauthenticated ! ! ! ! ! ! ! ! -resource: authz::WebhookEventClassList +resource: authz::AlertClassList USER Q R LC RP M MP CC D fleet-admin ✘ ✘ ✔ ✘ ✘ ✘ ✘ ✘ @@ -1258,7 +1258,7 @@ resource: LoopbackAddress id "9efbf1b1-16f9-45ab-864a-f7ebe501ae5b" silo1-proj1-viewer ✘ ✘ ✘ ✘ ✘ ✘ ✘ ✘ unauthenticated ! ! ! ! ! ! ! ! -resource: WebhookEvent id "31cb17da-4164-4cbf-b9a3-b3e4a687c08b" +resource: Alert id "31cb17da-4164-4cbf-b9a3-b3e4a687c08b" USER Q R LC RP M MP CC D fleet-admin ✘ ✔ ✔ ✔ ✔ ✔ ✔ ✔ @@ -1272,7 +1272,7 @@ resource: WebhookEvent id "31cb17da-4164-4cbf-b9a3-b3e4a687c08b" silo1-proj1-viewer ✘ ✘ ✘ ✘ ✘ ✘ ✘ ✘ unauthenticated ! ! ! ! ! ! ! ! -resource: WebhookReceiver "webhooked-on-phonics" +resource: AlertReceiver "webhooked-on-phonics" USER Q R LC RP M MP CC D fleet-admin ✘ ✔ ✔ ✔ ✔ ✔ ✔ ✔ diff --git a/nexus/db-queries/tests/output/webhook_rx_list_resendable_events.sql b/nexus/db-queries/tests/output/webhook_rx_list_resendable_events.sql index f39e1afeb09..4c0795a750b 100644 --- a/nexus/db-queries/tests/output/webhook_rx_list_resendable_events.sql +++ b/nexus/db-queries/tests/output/webhook_rx_list_resendable_events.sql @@ -1,25 +1,25 @@ SELECT DISTINCT - webhook_event.id, - webhook_event.time_created, - webhook_event.time_modified, - webhook_event.time_dispatched, - webhook_event.event_class, - webhook_event.event, - webhook_event.num_dispatched + alert.id, + alert.time_created, + alert.time_modified, + alert.time_dispatched, + alert.alert_class, + alert.payload, + alert.num_dispatched FROM - webhook_event INNER JOIN webhook_delivery AS delivery ON delivery.event_id = webhook_event.id + alert INNER JOIN webhook_delivery AS delivery ON delivery.alert_id = alert.id WHERE - (webhook_event.event_class != $1 AND delivery.rx_id = $2) + (alert.alert_class != $1 AND delivery.rx_id = $2) AND NOT ( EXISTS( SELECT - also_delivey.id + also_delivery.id FROM - webhook_delivery AS also_delivey + webhook_delivery AS also_delivery WHERE - (also_delivey.event_id = webhook_event.id AND also_delivey.state != $3) - AND also_delivey.triggered_by != $4 + (also_delivery.alert_id = alert.id AND also_delivery.state != $3) + AND also_delivery.triggered_by != $4 ) ) diff --git a/nexus/db-schema/src/enums.rs b/nexus/db-schema/src/enums.rs index 79d312c8b64..917705d9e7e 100644 --- a/nexus/db-schema/src/enums.rs +++ b/nexus/db-schema/src/enums.rs @@ -22,6 +22,9 @@ define_enums! { // Please keep this list in alphabetical order. AddressLotKindEnum => "address_lot_kind", AffinityPolicyEnum => "affinity_policy", + AlertClassEnum => "alert_class", + AlertDeliveryTriggerEnum => "alert_delivery_trigger", + AlertDeliveryStateEnum => "alert_delivery_state", AuthenticationModeEnum => "authentication_mode", BfdModeEnum => "bfd_mode", BlockSizeEnum => "block_size", @@ -85,9 +88,6 @@ define_enums! { VpcFirewallRuleProtocolEnum => "vpc_firewall_rule_protocol", VpcFirewallRuleStatusEnum => "vpc_firewall_rule_status", VpcRouterKindEnum => "vpc_router_kind", - WebhookEventClassEnum => "webhook_event_class", WebhookDeliveryAttemptResultEnum => "webhook_delivery_attempt_result", - WebhookDeliveryTriggerEnum => "webhook_delivery_trigger", - WebhookDeliveryStateEnum => "webhook_delivery_state", ZoneTypeEnum => "zone_type", } diff --git a/nexus/db-schema/src/schema.rs b/nexus/db-schema/src/schema.rs index 89d92585ce0..1fd05de42fe 100644 --- a/nexus/db-schema/src/schema.rs +++ b/nexus/db-schema/src/schema.rs @@ -2205,7 +2205,7 @@ table! { } table! { - webhook_receiver (id) { + alert_receiver (id) { id -> Uuid, name -> Text, description -> Text, @@ -2230,16 +2230,16 @@ table! { } table! { - webhook_rx_subscription (rx_id, event_class) { + alert_subscription (rx_id, alert_class) { rx_id -> Uuid, - event_class -> crate::enums::WebhookEventClassEnum, + alert_class -> crate::enums::AlertClassEnum, glob -> Nullable, time_created -> Timestamptz, } } table! { - webhook_rx_event_glob (rx_id, glob) { + alert_glob (rx_id, glob) { rx_id -> Uuid, glob -> Text, regex -> Text, @@ -2249,23 +2249,23 @@ table! { } allow_tables_to_appear_in_same_query!( - webhook_receiver, + alert_receiver, webhook_secret, - webhook_rx_subscription, - webhook_rx_event_glob, - webhook_event, + alert_subscription, + alert_glob, + alert, ); -joinable!(webhook_rx_subscription -> webhook_receiver (rx_id)); -joinable!(webhook_secret -> webhook_receiver (rx_id)); -joinable!(webhook_rx_event_glob -> webhook_receiver (rx_id)); +joinable!(alert_subscription -> alert_receiver (rx_id)); +joinable!(webhook_secret -> alert_receiver (rx_id)); +joinable!(alert_glob -> alert_receiver (rx_id)); table! { - webhook_event (id) { + alert (id) { id -> Uuid, time_created -> Timestamptz, time_modified -> Timestamptz, - event_class -> crate::enums::WebhookEventClassEnum, - event -> Jsonb, + alert_class -> crate::enums::AlertClassEnum, + payload -> Jsonb, time_dispatched -> Nullable, num_dispatched -> Int8, } @@ -2274,23 +2274,23 @@ table! { table! { webhook_delivery (id) { id -> Uuid, - event_id -> Uuid, + alert_id -> Uuid, rx_id -> Uuid, - triggered_by -> crate::enums::WebhookDeliveryTriggerEnum, + triggered_by -> crate::enums::AlertDeliveryTriggerEnum, attempts -> Int2, time_created -> Timestamptz, time_completed -> Nullable, - state -> crate::enums::WebhookDeliveryStateEnum, + state -> crate::enums::AlertDeliveryStateEnum, deliverator_id -> Nullable, time_leased -> Nullable, } } -allow_tables_to_appear_in_same_query!(webhook_receiver, webhook_delivery); -joinable!(webhook_delivery -> webhook_receiver (rx_id)); -allow_tables_to_appear_in_same_query!(webhook_delivery, webhook_event); -allow_tables_to_appear_in_same_query!(webhook_delivery_attempt, webhook_event); -joinable!(webhook_delivery -> webhook_event (event_id)); +allow_tables_to_appear_in_same_query!(alert_receiver, webhook_delivery); +joinable!(webhook_delivery -> alert_receiver (rx_id)); +allow_tables_to_appear_in_same_query!(webhook_delivery, alert); +allow_tables_to_appear_in_same_query!(webhook_delivery_attempt, alert); +joinable!(webhook_delivery -> alert (alert_id)); table! { webhook_delivery_attempt (id) { diff --git a/nexus/examples/config-second.toml b/nexus/examples/config-second.toml index 5387793ec81..8f64d5f0859 100644 --- a/nexus/examples/config-second.toml +++ b/nexus/examples/config-second.toml @@ -145,7 +145,7 @@ tuf_artifact_replication.period_secs = 300 tuf_artifact_replication.min_sled_replication = 1 # In general, the webhook dispatcher will be activated when events are queued, # so we don't need to periodically activate it *that* frequently. -webhook_dispatcher.period_secs = 60 +alert_dispatcher.period_secs = 60 webhook_deliverator.period_secs = 60 read_only_region_replacement_start.period_secs = 30 diff --git a/nexus/examples/config.toml b/nexus/examples/config.toml index e5b4d564f55..d2684608add 100644 --- a/nexus/examples/config.toml +++ b/nexus/examples/config.toml @@ -131,7 +131,7 @@ tuf_artifact_replication.period_secs = 300 tuf_artifact_replication.min_sled_replication = 1 # In general, the webhook dispatcher will be activated when events are queued, # so we don't need to periodically activate it *that* frequently. -webhook_dispatcher.period_secs = 60 +alert_dispatcher.period_secs = 60 webhook_deliverator.period_secs = 60 read_only_region_replacement_start.period_secs = 30 diff --git a/nexus/external-api/output/nexus_tags.txt b/nexus/external-api/output/nexus_tags.txt index 448c554c968..b2359eb8fed 100644 --- a/nexus/external-api/output/nexus_tags.txt +++ b/nexus/external-api/output/nexus_tags.txt @@ -160,6 +160,23 @@ snapshot_delete DELETE /v1/snapshots/{snapshot} snapshot_list GET /v1/snapshots snapshot_view GET /v1/snapshots/{snapshot} +API operations found with tag "system/alerts" +OPERATION ID METHOD URL PATH +alert_class_list GET /v1/alert-classes +alert_delivery_list GET /v1/alert-receivers/{receiver}/deliveries +alert_delivery_resend POST /v1/alerts/{alert_id}/resend +alert_receiver_delete DELETE /v1/alert-receivers/{receiver} +alert_receiver_list GET /v1/alert-receivers +alert_receiver_probe POST /v1/alert-receivers/{receiver}/probe +alert_receiver_subscription_add POST /v1/alert-receivers/{receiver}/subscriptions +alert_receiver_subscription_remove DELETE /v1/alert-receivers/{receiver}/subscriptions/{subscription} +alert_receiver_view GET /v1/alert-receivers/{receiver} +webhook_receiver_create POST /v1/webhook-receivers +webhook_receiver_update PUT /v1/webhook-receivers/{receiver} +webhook_secrets_add POST /v1/webhook-secrets +webhook_secrets_delete DELETE /v1/webhook-secrets/{secret_id} +webhook_secrets_list GET /v1/webhook-secrets + API operations found with tag "system/hardware" OPERATION ID METHOD URL PATH networking_switch_port_apply_settings POST /v1/system/hardware/switch-port/{port}/settings @@ -268,23 +285,6 @@ API operations found with tag "system/status" OPERATION ID METHOD URL PATH ping GET /v1/ping -API operations found with tag "system/webhooks" -OPERATION ID METHOD URL PATH -webhook_delivery_list GET /v1/webhooks/deliveries -webhook_delivery_resend POST /v1/webhooks/deliveries/{event_id}/resend -webhook_event_class_list GET /v1/webhooks/event-classes -webhook_receiver_create POST /v1/webhooks/receivers -webhook_receiver_delete DELETE /v1/webhooks/receivers/{receiver} -webhook_receiver_list GET /v1/webhooks/receivers -webhook_receiver_probe POST /v1/webhooks/receivers/{receiver}/probe -webhook_receiver_subscription_add POST /v1/webhooks/receivers/{receiver}/subscriptions -webhook_receiver_subscription_remove DELETE /v1/webhooks/receivers/{receiver}/subscriptions/{subscription} -webhook_receiver_update PUT /v1/webhooks/receivers/{receiver} -webhook_receiver_view GET /v1/webhooks/receivers/{receiver} -webhook_secrets_add POST /v1/webhooks/secrets -webhook_secrets_delete DELETE /v1/webhooks/secrets/{secret_id} -webhook_secrets_list GET /v1/webhooks/secrets - API operations found with tag "vpcs" OPERATION ID METHOD URL PATH internet_gateway_create POST /v1/internet-gateways diff --git a/nexus/external-api/src/lib.rs b/nexus/external-api/src/lib.rs index fb4a6da27e8..cabbe7abfec 100644 --- a/nexus/external-api/src/lib.rs +++ b/nexus/external-api/src/lib.rs @@ -168,10 +168,10 @@ const PUT_UPDATE_REPOSITORY_MAX_BYTES: usize = 4 * GIB; url = "http://docs.oxide.computer/api/vpcs" } }, - "system/webhooks" = { - description = "Webhooks deliver notifications for audit log events and fault management alerts.", + "system/alerts" = { + description = "Alerts deliver notifications for events that occur on the Oxide rack", external_docs = { - url = "http://docs.oxide.computer/api/webhooks" + url = "http://docs.oxide.computer/api/alerts" } }, "system/probes" = { @@ -3542,197 +3542,205 @@ pub trait NexusExternalApi { params: TypedBody, ) -> Result, HttpError>; - // Webhooks + // Alerts - /// List webhook event classes + /// List alert classes #[endpoint { method = GET, - path = "/v1/webhooks/event-classes", - tags = ["system/webhooks"], + path = "/v1/alert-classes", + tags = ["system/alerts"], }] - async fn webhook_event_class_list( + async fn alert_class_list( rqctx: RequestContext, pag_params: Query< - PaginationParams, + PaginationParams, >, - filter: Query, - ) -> Result>, HttpError>; + filter: Query, + ) -> Result>, HttpError>; - /// List webhook receivers + /// List alert receivers #[endpoint { method = GET, - path = "/v1/webhooks/receivers", - tags = ["system/webhooks"], + path = "/v1/alert-receivers", + tags = ["system/alerts"], }] - async fn webhook_receiver_list( + async fn alert_receiver_list( rqctx: RequestContext, query_params: Query, - ) -> Result>, HttpError>; + ) -> Result>, HttpError>; - /// Fetch webhook receiver + /// Fetch alert receiver #[endpoint { method = GET, - path = "/v1/webhooks/receivers/{receiver}", - tags = ["system/webhooks"], + path = "/v1/alert-receivers/{receiver}", + tags = ["system/alerts"], }] - async fn webhook_receiver_view( + async fn alert_receiver_view( rqctx: RequestContext, - path_params: Path, - ) -> Result, HttpError>; + path_params: Path, + ) -> Result, HttpError>; - /// Create webhook receiver + /// Delete alert receiver #[endpoint { - method = POST, - path = "/v1/webhooks/receivers", - tags = ["system/webhooks"], + method = DELETE, + path = "/v1/alert-receivers/{receiver}", + tags = ["system/alerts"], }] - async fn webhook_receiver_create( + async fn alert_receiver_delete( rqctx: RequestContext, - params: TypedBody, - ) -> Result, HttpError>; + path_params: Path, + ) -> Result; - /// Update webhook receiver - /// - /// Note that receiver secrets are NOT added or removed using this endpoint. - /// Instead, use the `/v1/webhooks/{secrets}/?receiver={receiver}` endpoint - /// to add and remove secrets. + /// Add alert receiver subscription #[endpoint { - method = PUT, - path = "/v1/webhooks/receivers/{receiver}", - tags = ["system/webhooks"], + method = POST, + path = "/v1/alert-receivers/{receiver}/subscriptions", + tags = ["system/alerts"], }] - async fn webhook_receiver_update( + async fn alert_receiver_subscription_add( rqctx: RequestContext, - path_params: Path, - params: TypedBody, - ) -> Result; + path_params: Path, + params: TypedBody, + ) -> Result, HttpError>; - /// Delete webhook receiver + /// Remove alert receiver subscription #[endpoint { method = DELETE, - path = "/v1/webhooks/receivers/{receiver}", - tags = ["system/webhooks"], + path = "/v1/alert-receivers/{receiver}/subscriptions/{subscription}", + tags = ["system/alerts"], }] - async fn webhook_receiver_delete( + async fn alert_receiver_subscription_remove( rqctx: RequestContext, - path_params: Path, + path_params: Path, ) -> Result; - /// Add webhook receiver subscription - #[endpoint { - method = POST, - path = "/v1/webhooks/receivers/{receiver}/subscriptions", - tags = ["system/webhooks"], - }] - async fn webhook_receiver_subscription_add( - rqctx: RequestContext, - path_params: Path, - params: TypedBody, - ) -> Result, HttpError>; - - /// Remove webhook receiver subscription + /// List delivery attempts to alert receiver + /// + /// Optional query parameters to this endpoint may be used to filter + /// deliveries by state. If none of the `failed`, `pending` or `delivered` + /// query parameters are present, all deliveries are returned. If one or + /// more of these parameters are provided, only those which are set to + /// "true" are included in the response. #[endpoint { - method = DELETE, - path = "/v1/webhooks/receivers/{receiver}/subscriptions/{subscription}", - tags = ["system/webhooks"], + method = GET, + path = "/v1/alert-receivers/{receiver}/deliveries", + tags = ["system/alerts"], }] - async fn webhook_receiver_subscription_remove( + async fn alert_delivery_list( rqctx: RequestContext, - path_params: Path, - ) -> Result; + path_params: Path, + state_filter: Query, + pagination: Query, + ) -> Result>, HttpError>; - /// Send liveness probe to webhook receiver + /// Send liveness probe to alert receiver + /// + /// This endpoint synchronously sends a liveness probe to the selected alert + /// receiver. The response message describes the outcome of the probe: + /// either the successful response (as appropriate), or indication of why + /// the probe failed. /// - /// This endpoint synchronously sends a liveness probe request to the - /// selected webhook receiver. The response message describes the outcome of - /// the probe request: either the response from the receiver endpoint, or an - /// indication of why the probe failed. + /// The result of the probe is represented as an `AlertDelivery` model. + /// Details relating to the status of the probe depend on the alert delivery + /// mechanism, and are included in the `AlertDeliveryAttempts` model. For + /// example, webhook receiver liveness probes include the HTTP status code + /// returned by the receiver endpoint. /// /// Note that the response status is `200 OK` as long as a probe request was - /// able to be sent to the receiver endpoint. If the receiver responds with - /// another status code, including an error, this will be indicated by the - /// response body, *not* the status of the response. + /// able to be sent to the receiver endpoint. If an HTTP-based receiver, + /// such as a webhook, responds to the another status code, including an + /// error, this will be indicated by the response body, *not* the status of + /// the response. /// /// The `resend` query parameter can be used to request re-delivery of /// failed events if the liveness probe succeeds. If it is set to true and - /// the webhook receiver responds to the probe request with a `2xx` status - /// code, any events for which delivery to this receiver has failed will be - /// queued for re-delivery. + /// the liveness probe succeeds, any alerts for which delivery to this + /// receiver has failed will be queued for re-delivery. + #[endpoint { + method = POST, + path = "/v1/alert-receivers/{receiver}/probe", + tags = ["system/alerts"], + }] + async fn alert_receiver_probe( + rqctx: RequestContext, + path_params: Path, + query_params: Query, + ) -> Result, HttpError>; + + /// Request re-delivery of alert + #[endpoint { + method = POST, + path = "/v1/alerts/{alert_id}/resend", + tags = ["system/alerts"], + }] + async fn alert_delivery_resend( + rqctx: RequestContext, + path_params: Path, + receiver: Query, + ) -> Result, HttpError>; + + // ALERTS: WEBHOOKS + + /// Create webhook receiver #[endpoint { method = POST, - path = "/v1/webhooks/receivers/{receiver}/probe", - tags = ["system/webhooks"], + path = "/v1/webhook-receivers", + tags = ["system/alerts"], }] - async fn webhook_receiver_probe( + async fn webhook_receiver_create( rqctx: RequestContext, - path_params: Path, - query_params: Query, - ) -> Result, HttpError>; + params: TypedBody, + ) -> Result, HttpError>; + + /// Update webhook receiver + /// + /// Note that receiver secrets are NOT added or removed using this endpoint. + /// Instead, use the `/v1/webhooks/{secrets}/?receiver={receiver}` endpoint + /// to add and remove secrets. + #[endpoint { + method = PUT, + path = "/v1/webhook-receivers/{receiver}", + tags = ["system/alerts"], + }] + async fn webhook_receiver_update( + rqctx: RequestContext, + path_params: Path, + params: TypedBody, + ) -> Result; /// List webhook receiver secret IDs #[endpoint { method = GET, - path = "/v1/webhooks/secrets", - tags = ["system/webhooks"], + path = "/v1/webhook-secrets", + tags = ["system/alerts"], }] async fn webhook_secrets_list( rqctx: RequestContext, - query_params: Query, + query_params: Query, ) -> Result, HttpError>; /// Add secret to webhook receiver #[endpoint { method = POST, - path = "/v1/webhooks/secrets", - tags = ["system/webhooks"], + path = "/v1/webhook-secrets", + tags = ["system/alerts"], }] async fn webhook_secrets_add( rqctx: RequestContext, - query_params: Query, + query_params: Query, params: TypedBody, ) -> Result, HttpError>; /// Remove secret from webhook receiver #[endpoint { method = DELETE, - path = "/v1/webhooks/secrets/{secret_id}", - tags = ["system/webhooks"], + path = "/v1/webhook-secrets/{secret_id}", + tags = ["system/alerts"], }] async fn webhook_secrets_delete( rqctx: RequestContext, path_params: Path, ) -> Result; - - /// List delivery attempts to webhook receiver - /// - /// Optional query parameters to this endpoint may be used to filter - /// deliveries by state. If none of the `failed`, `pending` or `delivered` - /// query parameters are present, all deliveries are returned. If one or - /// more of these parameters are provided, only those which are set to - /// "true" are included in the response. - #[endpoint { - method = GET, - path = "/v1/webhooks/deliveries", - tags = ["system/webhooks"], - }] - async fn webhook_delivery_list( - rqctx: RequestContext, - receiver: Query, - state_filter: Query, - pagination: Query, - ) -> Result>, HttpError>; - - /// Request re-delivery of webhook event - #[endpoint { - method = POST, - path = "/v1/webhooks/deliveries/{event_id}/resend", - tags = ["system/webhooks"], - }] - async fn webhook_delivery_resend( - rqctx: RequestContext, - path_params: Path, - receiver: Query, - ) -> Result, HttpError>; } /// Perform extra validations on the OpenAPI spec. diff --git a/nexus/src/app/alert.rs b/nexus/src/app/alert.rs new file mode 100644 index 00000000000..91f21571723 --- /dev/null +++ b/nexus/src/app/alert.rs @@ -0,0 +1,551 @@ +// This Source Code Form is subject to the terms of the Mozilla Public +// License, v. 2.0. If a copy of the MPL was not distributed with this +// file, You can obtain one at https://mozilla.org/MPL/2.0/. + +//! # Alerts +//! +//! ## Dramatis Personae +//! +//! There are two central entities in our alerting system: +//! +//! + **Alert receivers** represent an external entity to which alerts are +//! delivered, and the configuration associated with that entity. An alert +//! receiver [subscribes](#alert-subscriptions) to a set of alert classes to +//! configure which alerts should be delivered to that receiver. With the +//! exception of the [`Alert`]s themselves, most other resources in the alert +//! API are children of the [`AlertReceiver`] API resource. +//! +//! Various mechanisms for delivering alerts are represented by "subtypes" of +//! alert receivers. At present, [webhooks](super::webhook) are the only +//! such subtype. Different subtypes of alert receivers are created and +//! modified by separate APIs for that particular type of receiver, as +//! different configuration options exist based on the receiver type. +//! However, some operations, such as listing receivers, viewing or +//! deleting a receiver, adding and removing subscriptions, and resending an +//! alert, are common to all receiver types. +//! +//! + **Alerts** represent events in the system for which an alert +//! notifications are generated and sent to receivers. The control plane +//! calls the [`Nexus::alert_publish`] method to record a new event +//! and publish it to receivers. +//! +//! Alerts are categorized into [alert classes], as described in RFD +//! 538. Receivers *subscribe* to these classes, indicating that they wish to +//! when an event with a particular class occurs. +//! +//! Two background tasks implement the reliable persistent workflow of +//! determining what events should be sent to what receiver, and performing the +//! actual HTTP requests to send the alert to the receiver: +//! +//! + The `alert_dispatcher` task is responsible for *dispatching* alerts to +//! receivers. For each event that has not yet been dispatched, the task +//! queries the database for alert receivers that have subscribed to that +//! alert, and creates a *delivery record* in appropriate delivery table, +//! indicating that the alert should be sent to that receiver. +//! +//! + The `webhook_deliverator`[^1] task reads webhook delivery records and +//! sends HTTP requests to webhook receiver endpoint for each webhook +//! delivery that is currently in flight. The deliverator is responsible +//! for recording the status of each *delivery attempt*. Retries and retry +//! backoff are the responsibility of the deliverator. +//! +//! In the future, other delivery mechanisms may be implemented by their own +//! deliverator background tasks, similarly to webhooks. Each delivery +//! mechanism will operate on its own delivery table, as different delivery +//! mechanisms may need to record different state associated with a delivery. +//! +//! ## Alert Subscriptions +//! +//! A receiver's subscriptions take one of two forms: +//! +//! + **Exact** subscriptions are when a receiver subscribes to a specific alert +//! class string. These are represented by entries in the +//! `alert_subscription` table in CockroachDB. +//! +//! + **Glob** subscriptions include wildcard segments that may match multiple +//! values. The globbing syntax is discussed in greater detail in RFD 538. +//! +//! We implement glob subscriptions by evaluating the glob against the list of +//! known alert classes when the glob is *created*, and creating corresponding +//! exact subscriptions for each event class that matches the glob. This way, +//! we need not perform complex pattern matching in the database when +//! dispatching an alert, and can instead simply query for the existence of a +//! record in the `alert_subscription` table. Each exact subscription entry +//! generated by a glob records which glob it came from, which is used when +//! a receiver's subscriptions change. +//! +//! Because the generation of exact subscriptions from globs occurs when the +//! subscription is created, globs must be *reprocessed* when new alert classes +//! are added to the system, generating new exact subscriptions for any +//! newly-added alert classes that match the glob, and potentially removing +//! subscriptions to any defunct alert classes This could occur in any software +//! release where new kinds of events are implemented. Therefore, when glob +//! subscriptions are created, we record the database schema version as part of +//! that glob subscription. Because event classes are represented as a SQL +//! `enum` type, we know that any change to the event classes should change the +//! database schema version as well. This way, we can detect whether a glob's +//! list of subscriptions are up to date. The `alert_dispatcher` background +//! task will query the database for any globs which were last reprocessed at +//! earlier database schema versions and reprocess those globs prior to +//! attempting to dispatch events to receivers. +//! +//! ## Deliveries, Delivery Attempts, and Liveness Probes +//! +//! A *delivery* represents state associated to sending an alert to a +//! particular receiver. +//! +//! At present, the only currently existing delivery type are *webhook +//! deliveries*, which represent the process of sending HTTP request(s) +//! representing an alert to a receiver. Failed HTTP requests are retried up +//! to two times, so a delivery may consist of up to three *delivery attempts*. +//! Each time the `webhook_deliverator` background task is activated, it +//! searches for deliveries which have not yet succeeded or permanently failed, +//! which are not presently being delivered by another Nexus, and for which the +//! backoff period for any prior failed delivery attempts has elapsed. It then +//! sends an HTTP request to the webhook receiver, and records the result, +//! creating a new `webhook_delivery_attempt` record and updating the +//! `webhook_delivery` record. +//! +//! Multiple Nexii use an advisory lease mechanism to avoid attempting to +//! deliver the same event simultaneously, by setting their UUID and a +//! timestamp on the `webhook_delivery` record. Because webhook delivery is +//! at-least-once, this lease mechanism is NOT REQUIRED FOR CORRECTNESS IN ANY +//! WAY, Andrew. :) Instead, it serves only to reduce duplicate work. +//! Therefore, should a Nexus acquire a lease on a delivery and fail to either +//! complete the delivery attempt within a period of time, another Nexus is +//! permitted to clobber its lease. +//! +//! Deliveries are created either because an alert occurred and a webhook +//! receiver is subscribed to it, or because we were asked to resend a previous +//! delivery that failed permanently by exhausting its retry budget. Initial +//! deliveries are created by activations of the webhook dispatcher background +//! task. When creating a delivery, the data associated with the alert record +//! in the `alert` table is processed to produce the data payload that +//! will actually be sent to the receiver. Data which the receiver's service +//! account is not authorized to read is filtered out of the payload.[^2] +//! +//! Re-delivery of an event can be requested either via the alert resend API +//! endpoint, or by a *liveness probe* succeeding. Liveness probes are +//! synthetic delivery requests sent to a webhook receiver to check whether it's +//! actually able to receive an event. They are triggered via the +//! [`Nexus::webhook_receiver_probe`] API endpoint. A probe may optionally +//! request that any events for which all past deliveries have failed be resent +//! if it succeeds. Delivery records are also created to represent the outcome +//! of a probe. +//! +//! [RFD 538]: https://rfd.shared.oxide.computer/538 +//! [alert classes]: https://rfd.shared.oxide.computer/rfd/538#_event_classes +//! [`AlertReceiver`]: nexus_db_queries::db::model::AlertReceiver +//! +//! [^1]: Read _Snow Crash_, if you haven't already. +//! [^2]: Presently, all alert receivers have the fleet.viewer role, so +//! this "filtering" doesn't actually do anything. When webhook receivers +//! with more restrictive permissions are implemented, please rememvber to +//! delete this footnote. + +use crate::Nexus; +use chrono::DateTime; +use chrono::Utc; +use nexus_db_lookup::LookupPath; +use nexus_db_lookup::lookup; +use nexus_db_queries::authz; +use nexus_db_queries::context::OpContext; +use nexus_db_queries::db::model::Alert; +use nexus_db_queries::db::model::AlertClass; +use nexus_db_queries::db::model::AlertDeliveryState; +use nexus_db_queries::db::model::AlertDeliveryTrigger; +use nexus_db_queries::db::model::WebhookDelivery; +use nexus_db_queries::db::model::WebhookReceiverConfig; +use nexus_types::external_api::params; +use nexus_types::external_api::shared; +use nexus_types::external_api::views; +use nexus_types::identity::Asset; +use omicron_common::api::external::CreateResult; +use omicron_common::api::external::DataPageParams; +use omicron_common::api::external::DeleteResult; +use omicron_common::api::external::Error; +use omicron_common::api::external::ListResultVec; +use omicron_common::api::external::LookupResult; +use omicron_common::api::external::NameOrId; +use omicron_common::api::external::http_pagination::PaginatedBy; +use omicron_uuid_kinds::AlertReceiverUuid; +use omicron_uuid_kinds::AlertUuid; +use omicron_uuid_kinds::GenericUuid; +use omicron_uuid_kinds::WebhookDeliveryUuid; +use uuid::Uuid; + +impl Nexus { + /// Publish a new alert, with the provided `id`, `alert_class`, and + /// JSON data payload. + /// + /// If this method returns `Ok`, the event has been durably recorded in + /// CockroachDB. Once the new event record is inserted into the database, + /// the webhook dispatcher background task is activated to dispatch the + /// event to receivers. However, if (for whatever reason) this Nexus fails + /// to do that, the event remains durably in the database to be dispatched + /// and delivered by someone else. + pub async fn alert_publish( + &self, + opctx: &OpContext, + id: AlertUuid, + class: AlertClass, + event: serde_json::Value, + ) -> Result { + let alert = + self.datastore().alert_create(opctx, id, class, event).await?; + slog::debug!( + &opctx.log, + "published alert"; + "alert_id" => ?id, + "alert_class" => %alert.class, + "time_created" => ?alert.identity.time_created, + ); + + // Once the alert has been inserted, activate the dispatcher task to + // ensure its propagated to receivers. + self.background_tasks.task_alert_dispatcher.activate(); + + Ok(alert) + } + + // + // Lookups + // + + pub fn alert_receiver_lookup<'a>( + &'a self, + opctx: &'a OpContext, + rx_selector: params::AlertReceiverSelector, + ) -> LookupResult> { + match rx_selector.receiver { + NameOrId::Id(id) => { + let rx = LookupPath::new(opctx, &self.db_datastore) + .alert_receiver_id(AlertReceiverUuid::from_untyped_uuid( + id, + )); + Ok(rx) + } + NameOrId::Name(name) => { + let rx = LookupPath::new(opctx, &self.db_datastore) + .alert_receiver_name_owned(name.into()); + Ok(rx) + } + } + } + + pub fn alert_lookup<'a>( + &'a self, + opctx: &'a OpContext, + params::AlertSelector { alert_id }: params::AlertSelector, + ) -> LookupResult> { + let event = LookupPath::new(opctx, &self.db_datastore) + .alert_id(AlertUuid::from_untyped_uuid(alert_id)); + Ok(event) + } + + // + // Alert class API + // + pub async fn alert_class_list( + &self, + opctx: &OpContext, + filter: params::AlertClassFilter, + pagparams: DataPageParams<'_, params::AlertClassPage>, + ) -> ListResultVec { + opctx + .authorize(authz::Action::ListChildren, &authz::ALERT_CLASS_LIST) + .await?; + Self::actually_list_alert_classes(filter, pagparams) + } + + // This is factored out to avoid having to make a whole Nexus to test it. + fn actually_list_alert_classes( + params::AlertClassFilter { filter }: params::AlertClassFilter, + pagparams: DataPageParams<'_, params::AlertClassPage>, + ) -> ListResultVec { + use nexus_db_model::AlertSubscriptionKind; + + let regex = if let Some(filter) = filter { + let sub = AlertSubscriptionKind::try_from(filter)?; + let regex_string = match sub { + AlertSubscriptionKind::Exact(class) => class.as_str(), + AlertSubscriptionKind::Glob(ref glob) => glob.regex.as_str(), + }; + let re = regex::Regex::new(regex_string).map_err(|e| { + // This oughtn't happen, provided the code for producing the + // regex for a glob is correct. + Error::InternalError { + internal_message: format!( + "valid alert class globs ({sub:?}) should always \ + produce a valid regex, and yet: {e:?}" + ), + } + })?; + Some(re) + } else { + None + }; + + // If we're resuming a previous scan, figure out where to start. + let start = if let Some(params::AlertClassPage { last_seen }) = + pagparams.marker + { + let start = AlertClass::ALL_CLASSES.iter().enumerate().find_map( + |(idx, class)| { + if class.as_str() == last_seen { Some(idx) } else { None } + }, + ); + match start { + Some(start) => start + 1, + None => return Ok(Vec::new()), + } + } else { + 0 + }; + + // This shouldn't ever happen, but...don't panic I guess. + if start > AlertClass::ALL_CLASSES.len() { + return Ok(Vec::new()); + } + + let result = AlertClass::ALL_CLASSES[start..] + .iter() + .filter_map(|&class| { + // Skip test classes, as they should not be used in the public + // API, except in test builds, where we need them + // for, you know... testing... + if !cfg!(test) && class.is_test() { + return None; + } + if let Some(ref regex) = regex { + if !regex.is_match(class.as_str()) { + return None; + } + } + Some(class.into()) + }) + .take(pagparams.limit.get() as usize) + .collect::>(); + Ok(result) + } + + // + // Receiver configuration API methods + // + + pub async fn alert_receiver_list( + &self, + opctx: &OpContext, + pagparams: &PaginatedBy<'_>, + ) -> ListResultVec { + opctx.authorize(authz::Action::ListChildren, &authz::FLEET).await?; + self.datastore().alert_rx_list(opctx, pagparams).await + } + + pub async fn alert_receiver_config_fetch( + &self, + opctx: &OpContext, + rx: lookup::AlertReceiver<'_>, + ) -> LookupResult { + let (authz_rx, rx) = rx.fetch().await?; + let (subscriptions, secrets) = + self.datastore().webhook_rx_config_fetch(opctx, &authz_rx).await?; + Ok(WebhookReceiverConfig { rx, secrets, subscriptions }) + } + + pub async fn alert_receiver_delivery_list( + &self, + opctx: &OpContext, + rx: lookup::AlertReceiver<'_>, + filter: params::AlertDeliveryStateFilter, + pagparams: &DataPageParams<'_, (DateTime, Uuid)>, + ) -> ListResultVec { + let (authz_rx,) = rx.lookup_for(authz::Action::ListChildren).await?; + let only_states = if filter.include_all() { + Vec::new() + } else { + let mut states = Vec::with_capacity(3); + if filter.include_failed() { + states.push(AlertDeliveryState::Failed); + } + if filter.include_pending() { + states.push(AlertDeliveryState::Pending); + } + if filter.include_delivered() { + states.push(AlertDeliveryState::Delivered); + } + states + }; + let deliveries = self + .datastore() + .webhook_rx_delivery_list( + opctx, + &authz_rx.id(), + // No probes; they could have their own list endpoint later... + &[AlertDeliveryTrigger::Alert, AlertDeliveryTrigger::Resend], + only_states, + pagparams, + ) + .await? + .into_iter() + .map(|(delivery, class, attempts)| { + delivery.to_api_delivery(class, &attempts) + }) + .collect(); + Ok(deliveries) + } + + // + // Receiver subscription API methods + // + + pub async fn alert_receiver_subscription_add( + &self, + opctx: &OpContext, + rx: lookup::AlertReceiver<'_>, + params::AlertSubscriptionCreate { subscription}: params::AlertSubscriptionCreate, + ) -> CreateResult { + let (authz_rx,) = rx.lookup_for(authz::Action::Modify).await?; + let db_subscription = nexus_db_model::AlertSubscriptionKind::try_from( + subscription.clone(), + )?; + let _ = self + .datastore() + .alert_subscription_add(opctx, &authz_rx, db_subscription) + .await?; + Ok(views::AlertSubscriptionCreated { subscription }) + } + + pub async fn alert_receiver_subscription_remove( + &self, + opctx: &OpContext, + rx: lookup::AlertReceiver<'_>, + subscription: shared::AlertSubscription, + ) -> DeleteResult { + let (authz_rx,) = rx.lookup_for(authz::Action::Modify).await?; + let db_subscription = + nexus_db_model::AlertSubscriptionKind::try_from(subscription)?; + let _ = self + .datastore() + .alert_subscription_remove(opctx, &authz_rx, db_subscription) + .await?; + Ok(()) + } + + pub async fn alert_receiver_resend( + &self, + opctx: &OpContext, + rx: lookup::AlertReceiver<'_>, + event: lookup::Alert<'_>, + ) -> CreateResult { + let (authz_rx,) = rx.lookup_for(authz::Action::CreateChild).await?; + let (authz_event, event) = event.fetch().await?; + let datastore = self.datastore(); + + let is_subscribed = datastore + .alert_rx_is_subscribed_to_alert(opctx, &authz_rx, &authz_event) + .await?; + if !is_subscribed { + return Err(Error::invalid_request(format!( + "cannot resend alert: receiver is not subscribed to the '{}' \ + alert class", + event.class, + ))); + } + + let delivery = WebhookDelivery::new( + &event.id(), + &authz_rx.id(), + AlertDeliveryTrigger::Resend, + ); + let delivery_id = delivery.id.into(); + + if let Err(e) = + datastore.webhook_delivery_create_batch(opctx, vec![delivery]).await + { + slog::error!( + &opctx.log, + "failed to create new delivery to resend webhook alert"; + "rx_id" => ?authz_rx.id(), + "alert_id" => ?authz_event.id(), + "alert_class" => %event.class, + "delivery_id" => ?delivery_id, + "error" => %e, + ); + return Err(e); + } + + slog::info!( + &opctx.log, + "resending webhook event"; + "rx_id" => ?authz_rx.id(), + "alert_id" => ?authz_event.id(), + "alert_class" => %event.class, + "delivery_id" => ?delivery_id, + ); + + self.background_tasks.task_webhook_deliverator.activate(); + Ok(delivery_id) + } +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::Nexus; + use std::num::NonZeroU32; + + #[test] + fn test_alert_class_list() { + #[track_caller] + fn list( + filter: Option<&str>, + last_seen: Option<&str>, + limit: u32, + ) -> Vec { + let filter = params::AlertClassFilter { + filter: dbg!(filter).map(|f| f.parse().unwrap()), + }; + let marker = dbg!(last_seen).map(|last_seen| { + params::AlertClassPage { last_seen: last_seen.to_string() } + }); + let result = Nexus::actually_list_alert_classes( + filter, + DataPageParams { + marker: marker.as_ref(), + direction: dropshot::PaginationOrder::Ascending, + limit: NonZeroU32::new(dbg!(limit)).unwrap(), + }, + ); + + // Throw away the description fields + dbg!(result) + .unwrap() + .into_iter() + .map(|view| view.name) + .collect::>() + } + + // Paginated class list, without a glob filter. + let classes = list(None, None, 3); + assert_eq!(classes, &["probe", "test.foo", "test.foo.bar"]); + let classes = list(None, Some("test.foo.bar"), 3); + assert_eq!( + classes, + &["test.foo.baz", "test.quux.bar", "test.quux.bar.baz"] + ); + // Don't assert that a third list will return no more results, since + // more events may be added in the future, and we don't have a filter. + + // Try a filter for only `test.**` events. + let filter = Some("test.**"); + let classes = list(filter, None, 2); + assert_eq!(classes, &["test.foo", "test.foo.bar"]); + let classes = list(filter, Some("test.foo.bar"), 2); + assert_eq!(classes, &["test.foo.baz", "test.quux.bar"]); + let classes = list(filter, Some("test.quux.bar"), 2); + assert_eq!(classes, &["test.quux.bar.baz"]); + let classes = list(filter, Some("test.quux.bar.baz"), 2); + assert_eq!(classes, Vec::::new()); + } +} diff --git a/nexus/src/app/background/init.rs b/nexus/src/app/background/init.rs index 2da3b146a06..b19c6ed5fcb 100644 --- a/nexus/src/app/background/init.rs +++ b/nexus/src/app/background/init.rs @@ -90,6 +90,7 @@ use super::Driver; use super::driver::TaskDefinition; use super::tasks::abandoned_vmm_reaper; +use super::tasks::alert_dispatcher::AlertDispatcher; use super::tasks::bfd; use super::tasks::blueprint_execution; use super::tasks::blueprint_load; @@ -125,7 +126,6 @@ use super::tasks::tuf_artifact_replication; use super::tasks::v2p_mappings::V2PManager; use super::tasks::vpc_routes; use super::tasks::webhook_deliverator; -use super::tasks::webhook_dispatcher::WebhookDispatcher; use crate::Nexus; use crate::app::oximeter::PRODUCER_LEASE_DURATION; use crate::app::saga::StartSaga; @@ -224,7 +224,7 @@ impl BackgroundTasksInitializer { task_region_snapshot_replacement_finish: Activator::new(), task_tuf_artifact_replication: Activator::new(), task_read_only_region_replacement_start: Activator::new(), - task_webhook_dispatcher: Activator::new(), + task_alert_dispatcher: Activator::new(), task_webhook_deliverator: Activator::new(), task_internal_dns_propagation: Activator::new(), @@ -298,7 +298,7 @@ impl BackgroundTasksInitializer { task_region_snapshot_replacement_finish, task_tuf_artifact_replication, task_read_only_region_replacement_start, - task_webhook_dispatcher, + task_alert_dispatcher, task_webhook_deliverator, // Add new background tasks here. Be sure to use this binding in a // call to `Driver::register()` below. That's what actually wires @@ -872,16 +872,16 @@ impl BackgroundTasksInitializer { }); driver.register(TaskDefinition { - name: "webhook_dispatcher", - description: "dispatches queued webhook events to receivers", - period: config.webhook_dispatcher.period_secs, - task_impl: Box::new(WebhookDispatcher::new( + name: "alert_dispatcher", + description: "dispatches queued alerts to receivers", + period: config.alert_dispatcher.period_secs, + task_impl: Box::new(AlertDispatcher::new( datastore.clone(), task_webhook_deliverator.clone(), )), opctx: opctx.child(BTreeMap::new()), watchers: vec![], - activator: task_webhook_dispatcher, + activator: task_alert_dispatcher, }); driver.register({ diff --git a/nexus/src/app/background/tasks/webhook_dispatcher.rs b/nexus/src/app/background/tasks/alert_dispatcher.rs similarity index 81% rename from nexus/src/app/background/tasks/webhook_dispatcher.rs rename to nexus/src/app/background/tasks/alert_dispatcher.rs index 17f9f0d5700..ad28a06ed9a 100644 --- a/nexus/src/app/background/tasks/webhook_dispatcher.rs +++ b/nexus/src/app/background/tasks/alert_dispatcher.rs @@ -4,7 +4,7 @@ //! Background task that dispatches queued webhook events to receivers. //! -//! This task reads un-dispatched webhook events from the [`WebhookEvent`] +//! This task reads un-dispatched webhook events from the [`Alert`] //! table, determines which webhook receivers are subscribed to those events, //! and constructs the event payload for those receivers. It then inserts new //! records into the [`WebhookDelivery`] table for those deliveries, which are @@ -18,16 +18,16 @@ //! and how they fit together, refer to the comments in the [`app::webhook`] //! module. //! -//! [`WebhookEvent`]: nexus_db_model::WebhookEvent +//! [`Alert`]: nexus_db_model::Alert //! [`webhook_deliverator`]: super::webhook_deliverator //! [`app::webhook`]: crate::app::webhook use crate::app::background::Activator; use crate::app::background::BackgroundTask; use futures::future::BoxFuture; +use nexus_db_model::AlertDeliveryTrigger; use nexus_db_model::SCHEMA_VERSION; use nexus_db_model::WebhookDelivery; -use nexus_db_model::WebhookDeliveryTrigger; use nexus_db_queries::context::OpContext; use nexus_db_queries::db::DataStore; use nexus_db_queries::db::datastore::SQL_BATCH_SIZE; @@ -35,24 +35,24 @@ use nexus_db_queries::db::pagination::Paginator; use nexus_types::identity::Asset; use nexus_types::identity::Resource; use nexus_types::internal_api::background::{ - WebhookDispatched, WebhookDispatcherStatus, WebhookGlobStatus, + AlertDispatched, AlertDispatcherStatus, AlertGlobStatus, }; use omicron_common::api::external::Error; use omicron_uuid_kinds::GenericUuid; use std::sync::Arc; -pub struct WebhookDispatcher { +pub struct AlertDispatcher { datastore: Arc, deliverator: Activator, } -impl BackgroundTask for WebhookDispatcher { +impl BackgroundTask for AlertDispatcher { fn activate<'a>( &'a mut self, opctx: &'a OpContext, ) -> BoxFuture<'a, serde_json::Value> { Box::pin(async move { - let mut status = WebhookDispatcherStatus { + let mut status = AlertDispatcherStatus { globs_reprocessed: Default::default(), glob_version: SCHEMA_VERSION, dispatched: Vec::new(), @@ -113,7 +113,7 @@ impl BackgroundTask for WebhookDispatcher { } } -impl WebhookDispatcher { +impl AlertDispatcher { pub fn new(datastore: Arc, deliverator: Activator) -> Self { Self { datastore, deliverator } } @@ -121,7 +121,7 @@ impl WebhookDispatcher { async fn actually_activate( &mut self, opctx: &OpContext, - status: &mut WebhookDispatcherStatus, + status: &mut AlertDispatcherStatus, ) -> Result<(), Error> { // Before dispatching any events, ensure that all webhook globs are up // to date with the current schema version. This has to be done before @@ -136,7 +136,7 @@ impl WebhookDispatcher { while let Some(p) = paginator.next() { let batch = self .datastore - .webhook_glob_list_reprocessable(opctx, &p.current_pagparams()) + .alert_glob_list_reprocessable(opctx, &p.current_pagparams()) .await .map_err(|e| { e.internal_context("failed to list outdated webhook globs") @@ -154,7 +154,7 @@ impl WebhookDispatcher { for glob in batch { let result = self .datastore - .webhook_glob_reprocess(opctx, &glob) + .alert_glob_reprocess(opctx, &glob) .await .map_err(|e| { globs_failed += 1; @@ -169,10 +169,10 @@ impl WebhookDispatcher { e.to_string() }) .inspect(|status| match status { - WebhookGlobStatus::Reprocessed { .. } => { + AlertGlobStatus::Reprocessed { .. } => { globs_reprocessed += 1 } - WebhookGlobStatus::AlreadyReprocessed => { + AlertGlobStatus::AlreadyReprocessed => { globs_already_reprocessed += 1 } }); @@ -203,38 +203,38 @@ impl WebhookDispatcher { // Select the next event that has yet to be dispatched in order of // creation, until there are none left in need of dispatching. while let Some(event) = - self.datastore.webhook_event_select_next_for_dispatch(opctx).await? + self.datastore.alert_select_next_for_dispatch(opctx).await? { slog::trace!( &opctx.log, "dispatching webhook event..."; - "event_id" => ?event.id(), - "event_class" => %event.event_class, + "alert_id" => ?event.id(), + "alert_class" => %event.class, ); // Okay, we found an event that needs to be dispatched. Next, get - // list the webhook receivers subscribed to this event class and + // list the alert receivers subscribed to this event class and // create delivery records for them. let rxs = match self .datastore - .webhook_rx_list_subscribed_to_event(&opctx, event.event_class) + .alert_rx_list_subscribed_to_event(&opctx, event.class) .await { Ok(rxs) => rxs, Err(error) => { const MSG: &str = - "failed to list webhook receivers subscribed to event"; + "failed to list alert receivers subscribed to event"; slog::error!( &opctx.log, "{MSG}"; - "event_id" => ?event.id(), - "event_class" => %event.event_class, + "alert_id" => ?event.id(), + "alert_class" => %event.class, "error" => &error, ); status.errors.push(format!( "{MSG} {} ({}): {error}", event.id(), - event.event_class + event.class )); // We weren't able to find receivers for this event, so // *don't* mark it as dispatched --- it's someone else's @@ -246,24 +246,24 @@ impl WebhookDispatcher { let deliveries: Vec = rxs .into_iter() .map(|(rx, sub)| { - // NOTE: In the future, if we add support for webhook receivers + // NOTE: In the future, if we add support for alert receivers // with roles other than 'fleet.viewer' (as described in // https://rfd.shared.oxide.computer/rfd/538#rbac-filtering), // this might be where we filter the actual dispatched payload // based on the individual receiver's permissions. slog::trace!( &opctx.log, - "webhook receiver is subscribed to event"; + "alert receiver is subscribed to event"; "rx_name" => %rx.name(), "rx_id" => ?rx.id(), - "event_id" => ?event.id(), - "event_class" => %event.event_class, + "alert_id" => ?event.id(), + "alert_class" => %event.class, "glob" => ?sub.glob, ); WebhookDelivery::new( &event.id(), &rx.id(), - WebhookDeliveryTrigger::Event, + AlertDeliveryTrigger::Alert, ) }) .collect(); @@ -280,8 +280,8 @@ impl WebhookDispatcher { slog::error!( &opctx.log, "failed to insert webhook deliveries"; - "event_id" => ?event.id(), - "event_class" => %event.event_class, + "alert_id" => ?event.id(), + "alert_class" => %event.class, "error" => %error, "num_subscribed" => ?subscribed, ); @@ -289,23 +289,23 @@ impl WebhookDispatcher { "failed to insert {subscribed} webhook deliveries \ for event {} ({}): {error}", event.id(), - event.event_class, + event.class, )); // We weren't able to create deliveries for this event, so // *don't* mark it as dispatched. continue; } }; - status.dispatched.push(WebhookDispatched { - event_id: event.id(), + status.dispatched.push(AlertDispatched { + alert_id: event.id(), subscribed, dispatched, }); slog::debug!( &opctx.log, "dispatched webhook event"; - "event_id" => ?event.id(), - "event_class" => %event.event_class, + "alert_id" => ?event.id(), + "alert_class" => %event.class, "num_subscribed" => subscribed, "num_dispatched" => dispatched, ); @@ -314,8 +314,8 @@ impl WebhookDispatcher { slog::debug!( &opctx.log, "no webhook receivers subscribed to event"; - "event_id" => ?event.id(), - "event_class" => %event.event_class, + "alert_id" => ?event.id(), + "alert_class" => %event.class, ); status.no_receivers.push(event.id()); 0 @@ -323,14 +323,14 @@ impl WebhookDispatcher { if let Err(error) = self .datastore - .webhook_event_mark_dispatched(&opctx, &event.id(), subscribed) + .alert_mark_dispatched(&opctx, &event.id(), subscribed) .await { slog::error!( &opctx.log, "failed to mark webhook event as dispatched"; - "event_id" => ?event.id(), - "event_class" => %event.event_class, + "alert_id" => ?event.id(), + "alert_class" => %event.class, "error" => %error, "num_subscribed" => subscribed, ); @@ -338,7 +338,7 @@ impl WebhookDispatcher { "failed to mark webhook event {} ({}) as dispatched: \ {error}", event.id(), - event.event_class, + event.class, )); } } @@ -354,8 +354,8 @@ mod test { use nexus_db_queries::db; use nexus_test_utils_macros::nexus_test; use omicron_common::api::external::IdentityMetadataCreateParams; - use omicron_uuid_kinds::WebhookEventUuid; - use omicron_uuid_kinds::WebhookReceiverUuid; + use omicron_uuid_kinds::AlertReceiverUuid; + use omicron_uuid_kinds::AlertUuid; type ControlPlaneTestContext = nexus_test_utils::ControlPlaneTestContext; @@ -364,9 +364,9 @@ mod test { // dispatching. #[nexus_test(server = crate::Server)] async fn test_glob_reprocessing(cptestctx: &ControlPlaneTestContext) { - use nexus_db_schema::schema::webhook_receiver::dsl as rx_dsl; - use nexus_db_schema::schema::webhook_rx_event_glob::dsl as glob_dsl; - use nexus_db_schema::schema::webhook_rx_subscription::dsl as subscription_dsl; + use nexus_db_schema::schema::alert_glob::dsl as glob_dsl; + use nexus_db_schema::schema::alert_receiver::dsl as rx_dsl; + use nexus_db_schema::schema::alert_subscription::dsl as subscription_dsl; let nexus = &cptestctx.server.server_context().nexus; let datastore = nexus.datastore(); @@ -374,7 +374,7 @@ mod test { cptestctx.logctx.log.clone(), datastore.clone(), ); - let rx_id = WebhookReceiverUuid::new_v4(); + let rx_id = AlertReceiverUuid::new_v4(); let conn = datastore .pool_connection_for_tests() .await @@ -383,9 +383,9 @@ mod test { // Unfortunately, we've gotta hand-create the receiver and its // subscriptions, so that we can create a set of globs that differs from // those generated by the currrent schema. - diesel::insert_into(rx_dsl::webhook_receiver) - .values(db::model::WebhookReceiver { - identity: db::model::WebhookReceiverIdentity::new( + diesel::insert_into(rx_dsl::alert_receiver) + .values(db::model::AlertReceiver { + identity: db::model::AlertReceiverIdentity::new( rx_id, IdentityMetadataCreateParams { name: "my-cool-webhook".parse().unwrap(), @@ -403,22 +403,22 @@ mod test { const GLOB_PATTERN: &str = "test.*.bar"; let glob = GLOB_PATTERN - .parse::() + .parse::() .expect("'test.*.bar should be an acceptable glob"); - let mut glob = db::model::WebhookRxEventGlob::new(rx_id, glob); + let mut glob = db::model::AlertRxGlob::new(rx_id, glob); // Just make something up that's obviously outdated... glob.schema_version = Some(db::model::SemverVersion::new(100, 0, 0)); - diesel::insert_into(glob_dsl::webhook_rx_event_glob) + diesel::insert_into(glob_dsl::alert_glob) .values(glob.clone()) .execute_async(&*conn) .await .expect("should insert glob entry"); - diesel::insert_into(subscription_dsl::webhook_rx_subscription) + diesel::insert_into(subscription_dsl::alert_subscription) .values( // Pretend `test.quux.bar` doesn't exist yet - db::model::WebhookRxSubscription::for_glob( + db::model::AlertRxSubscription::for_glob( &glob, - db::model::WebhookEventClass::TestFooBar, + db::model::AlertClass::TestFooBar, ), ) .execute_async(&*conn) @@ -427,7 +427,7 @@ mod test { // Also give the webhook receiver a secret just so everything // looks normalish. let (authz_rx, _) = nexus_db_lookup::LookupPath::new(&opctx, datastore) - .webhook_receiver_id(rx_id) + .alert_receiver_id(rx_id) .fetch() .await .expect("webhook rx should be there"); @@ -443,24 +443,24 @@ mod test { // OKAY GREAT NOW THAT WE DID ALL THAT STUFF let's see if it actually // works... - // N.B. that we are using the `DataStore::webhook_event_create` method - // rather than `Nexus::webhook_event_publish` (the expected entrypoint - // to publishing a webhook event) because `webhook_event_publish` also + // N.B. that we are using the `DataStore::alert_create` method + // rather than `Nexus::alert_publish` (the expected entrypoint + // to publishing a webhook event) because `alert_publish` also // activates the dispatcher task, and for this test, we would like to be // responsible for activating it. - let event_id = WebhookEventUuid::new_v4(); + let alert_id = AlertUuid::new_v4(); datastore - .webhook_event_create( + .alert_create( &opctx, - event_id, - db::model::WebhookEventClass::TestQuuxBar, + alert_id, + db::model::AlertClass::TestQuuxBar, serde_json::json!({"msg": "help im trapped in a webhook event factory"}), ) .await .expect("creating the event should work"); // okay now do the thing - let mut status = WebhookDispatcherStatus { + let mut status = AlertDispatcherStatus { globs_reprocessed: Default::default(), glob_version: SCHEMA_VERSION, dispatched: Vec::new(), @@ -468,7 +468,7 @@ mod test { no_receivers: Vec::new(), }; - let mut task = WebhookDispatcher::new( + let mut task = AlertDispatcher::new( datastore.clone(), nexus.background_tasks.task_webhook_deliverator.clone(), ); @@ -478,9 +478,9 @@ mod test { // The globs should have been reprocessed, creating a subscription to // `test.quux.bar`. - let subscriptions = subscription_dsl::webhook_rx_subscription + let subscriptions = subscription_dsl::alert_subscription .filter(subscription_dsl::rx_id.eq(rx_id.into_untyped_uuid())) - .load_async::(&*conn) + .load_async::(&*conn) .await .expect("should be able to get subscriptions") .into_iter() @@ -491,18 +491,18 @@ mod test { sub.glob.as_deref(), Some(GLOB_PATTERN), "found a subscription to {} that was not from our glob: {sub:?}", - sub.event_class, + sub.class, ); - sub.event_class + sub.class }).collect::>(); assert_eq!(subscriptions.len(), 2); assert!( - subscriptions.contains(&db::model::WebhookEventClass::TestFooBar), + subscriptions.contains(&db::model::AlertClass::TestFooBar), "subscription to test.foo.bar should exist; subscriptions: \ {subscriptions:?}", ); assert!( - subscriptions.contains(&db::model::WebhookEventClass::TestQuuxBar), + subscriptions.contains(&db::model::AlertClass::TestQuuxBar), "subscription to test.quux.bar should exist; subscriptions: \ {subscriptions:?}", ); @@ -514,7 +514,7 @@ mod test { assert!( matches!( reprocessed_entry, - Some(Ok(WebhookGlobStatus::Reprocessed { .. })) + Some(Ok(AlertGlobStatus::Reprocessed { .. })) ), "glob status should be 'reprocessed'" ); @@ -534,7 +534,7 @@ mod test { .webhook_rx_delivery_list( &opctx, &rx_id, - &[WebhookDeliveryTrigger::Event], + &[AlertDeliveryTrigger::Alert], Vec::new(), &p.current_pagparams(), ) @@ -546,7 +546,7 @@ mod test { deliveries.extend(batch); } let event = - deliveries.iter().find(|(d, _, _)| d.event_id == event_id.into()); + deliveries.iter().find(|(d, _, _)| d.alert_id == alert_id.into()); assert!( dbg!(event).is_some(), "delivery entry for dispatched event must exist" diff --git a/nexus/src/app/background/tasks/alert_subscription.rs b/nexus/src/app/background/tasks/alert_subscription.rs new file mode 100644 index 00000000000..e69de29bb2d diff --git a/nexus/src/app/background/tasks/mod.rs b/nexus/src/app/background/tasks/mod.rs index 0e06f5e99c2..48d29f18776 100644 --- a/nexus/src/app/background/tasks/mod.rs +++ b/nexus/src/app/background/tasks/mod.rs @@ -5,6 +5,7 @@ //! Implementations of specific background tasks pub mod abandoned_vmm_reaper; +pub mod alert_dispatcher; pub mod bfd; pub mod blueprint_execution; pub mod blueprint_load; @@ -41,4 +42,3 @@ pub mod tuf_artifact_replication; pub mod v2p_mappings; pub mod vpc_routes; pub mod webhook_deliverator; -pub mod webhook_dispatcher; diff --git a/nexus/src/app/background/tasks/webhook_deliverator.rs b/nexus/src/app/background/tasks/webhook_deliverator.rs index 41f202a5d7c..28d9589f3ee 100644 --- a/nexus/src/app/background/tasks/webhook_deliverator.rs +++ b/nexus/src/app/background/tasks/webhook_deliverator.rs @@ -6,7 +6,7 @@ //! active webhook deliveries. //! //! This task reads [`WebhookDelivery`] records from the database (created by the -//! [`webhook_dispatcher`] task) and sends HTTP requests to the receivers for +//! [`alert_dispatcher`] task) and sends HTTP requests to the receivers for //! those records. The deliverator is responsible for recording the status of //! each of these attempts, and for retrying failed attempts as needed. For //! an overview of all the components of the webhook subsystem, their roles, and @@ -27,7 +27,7 @@ //! eventually time out, and other Nexii will attempt that delivery. //! //! [`WebhookDelivery`]: nexus_db_model::WebhookDelivery -//! [`webhook_dispatcher`]: super::webhook_dispatcher +//! [`alert_dispatcher`]: super::alert_dispatcher //! [`app::webhook`]: crate::app::webhook use crate::app::background::BackgroundTask; @@ -158,10 +158,7 @@ impl WebhookDeliverator { while let Some(p) = paginator.next() { let rxs = self .datastore - .webhook_rx_list( - &opctx, - &PaginatedBy::Id(p.current_pagparams()), - ) + .alert_rx_list(&opctx, &PaginatedBy::Id(p.current_pagparams())) .await?; paginator = p .found_batch(&rxs, &|WebhookReceiverConfig { rx, .. }| { @@ -230,7 +227,7 @@ impl WebhookDeliverator { ..Default::default() }; - for DeliveryAndEvent { delivery, event_class, event } in deliveries { + for DeliveryAndEvent { delivery, alert_class, event } in deliveries { let attempt = (*delivery.attempts) + 1; let delivery_id = WebhookDeliveryUuid::from(delivery.id); match self @@ -246,8 +243,8 @@ impl WebhookDeliverator { Ok(DeliveryAttemptState::Started) => { slog::trace!(&opctx.log, "webhook event delivery attempt started"; - "event_id" => %delivery.event_id, - "event_class" => %event_class, + "alert_id" => %delivery.alert_id, + "alert_class" => %alert_class, "delivery_id" => %delivery_id, "attempt" => ?attempt, ); @@ -257,8 +254,8 @@ impl WebhookDeliverator { &opctx.log, "delivery of this webhook event was already completed \ at {time:?}"; - "event_id" => %delivery.event_id, - "event_class" => %event_class, + "alert_id" => %delivery.alert_id, + "alert_class" => %alert_class, "delivery_id" => %delivery_id, "time_completed" => ?time, ); @@ -270,8 +267,8 @@ impl WebhookDeliverator { &opctx.log, "delivery of this webhook event is in progress by \ another Nexus"; - "event_id" => %delivery.event_id, - "event_class" => %event_class, + "alert_id" => %delivery.alert_id, + "alert_class" => %alert_class, "delivery_id" => %delivery_id, "nexus_id" => %nexus_id, "time_started" => ?started, @@ -284,8 +281,8 @@ impl WebhookDeliverator { &opctx.log, "unexpected database error error starting webhook \ delivery attempt"; - "event_id" => %delivery.event_id, - "event_class" => %event_class, + "alert_id" => %delivery.alert_id, + "alert_class" => %alert_class, "delivery_id" => %delivery_id, "error" => %error, ); @@ -298,7 +295,7 @@ impl WebhookDeliverator { // okay, actually do the thing... let delivery_attempt = match client - .send_delivery_request(opctx, &delivery, event_class, &event) + .send_delivery_request(opctx, &delivery, alert_class, &event) .await { Ok(delivery) => delivery, @@ -324,8 +321,8 @@ impl WebhookDeliverator { slog::error!( &opctx.log, "{MSG}"; - "event_id" => %delivery.event_id, - "event_class" => %event_class, + "alert_id" => %delivery.alert_id, + "alert_class" => %alert_class, "delivery_id" => %delivery_id, "error" => %e, ); @@ -342,7 +339,7 @@ impl WebhookDeliverator { delivery_status.failed_deliveries.push( WebhookDeliveryFailure { delivery_id, - event_id: delivery.event_id.into(), + alert_id: delivery.alert_id.into(), attempt: delivery_attempt.attempt.0 as usize, result: delivery_attempt.result.into(), response_status: delivery_attempt diff --git a/nexus/src/app/mod.rs b/nexus/src/app/mod.rs index 1291f369c73..bbf90e16eb5 100644 --- a/nexus/src/app/mod.rs +++ b/nexus/src/app/mod.rs @@ -53,6 +53,7 @@ use uuid::Uuid; // by resource. mod address_lot; mod affinity; +mod alert; mod allow_list; pub(crate) mod background; mod bfd; diff --git a/nexus/src/app/webhook.rs b/nexus/src/app/webhook.rs index af219c36e4b..55b7661932e 100644 --- a/nexus/src/app/webhook.rs +++ b/nexus/src/app/webhook.rs @@ -2,137 +2,33 @@ // License, v. 2.0. If a copy of the MPL was not distributed with this // file, You can obtain one at https://mozilla.org/MPL/2.0/. -//! Webhooks +//! # Webhooks //! -//! # Webhooks: Theory and Practice +//! Webhooks provide a mechanism for delivering [alerts] by sending an HTTP +//! request representing the alert to an external HTTP server. //! //! [RFD 538] describes the user-facing interface for Oxide rack webhooks. //! However, that RFD does not describe internal implementation details of the //! webhook implementation, the key players, their roles, and interactions. -//! Instead, the implementation of webhooks are discussed here. +//! For details on the internals of alert delivery, including webhooks, see the +//! documenation in the [`alert` module][alerts]. //! -//! ## Dramatis Personae -//! -//! There are two key elements in our webhook design: -//! -//! + **Webhook receivers** are the endpoints external to the rack to which -//! webhook requests are sent. In the context of the control plane, the term -//! "webhook receiver" refers to the configuration and state associated with -//! such an endpoint. Most other entities in the webhook API are chiild -//! resources of the [`WebhookReceiver`] API resource. -//! -//! + **Webhook events** represent events in the system for which webhook -//! notifications are generated and sent to receivers. The control plane -//! calls the [`Nexus::webhook_event_publish`] method to record a new event -//! and publish it to receivers. -//! -//! Events are categorized into [event classes], as described in RFD -//! 538. Receivers *subscribe* to these classes, indicating that they wish to -//! when an event with a particular class occurs. -//! -//! Two background tasks implement the reliable persistent workflow of -//! determining what events should be sent to what receiver, and performing the -//! actual HTTP requests to send the event to the receiver: -//! -//! + The `webhook_dispatcher` task is responsible for *dispatching* events to -//! receivers. For each event that has not yet been dispatched, the task -//! queries the database for webhook receivers that have subscribed to that -//! event, and creates a *delivery record* in the `webhook_delivery` table, -//! indicating that the event should be sent to that receiver. -//! -//! + The `webhook_deliverator`[^1] task reads these delivery records and sends -//! HTTP requests to the receiver endpoint for each delivery that is -//! currently in flight. The deliverator is responsible for recording the -//! status of each *delivery attempt*. Retries and retry backoff are -//! the responsibility of the deliverator. -//! -//! ## Event Subscriptions -//! -//! A receiver's subscriptions take one of two forms: -//! -//! + **Exact** subscriptions are when a receiver subscribes to a specific event -//! class string. These are represented by entries in the -//! `webhook_rx_event_subscription` table in CockroachDB. -//! -//! + **Glob** subscriptions include wildcard segments that may match multiple -//! values. The globbing syntax is discussed in greater detail in RFD 538. -//! -//! We implement glob subscriptions by evaluating the glob against the list of -//! known webhook event classes when the glob is *created*, and creating -//! corresponding exact subscriptions for each event class that matches the -//! glob. This way, we need not perform complex pattern matching in the -//! database when dispatching an event, and can instead simply query for the -//! existence of a record in the `webhook_rx_event_subscription` table. Each -//! exact subscription entry generated by a glob records which glob it came -//! from, which is used when a receiver's subscriptions change. -//! -//! Because the generation of exact subscriptions from globs occurs when the -//! subscription is created, globs must be *reprocessed* when new event classes -//! are added to the system, generating new exact subscriptions for any -//! newly-added event classes that match the glob, and potentially removing -//! subscriptions to any defunct event classes This could occur in any software -//! release where new kinds of events are implemented. Therefore, when glob -//! subscriptions are created, we record the database schema version as part of -//! that glob subscription. Because event classes are represented as a SQL -//! `enum` type, we know that any change to the event classes should change the -//! database schema version as well. This way, we can detect whether a glob's -//! list of subscriptions are up to date. The `webhook_dispatcher` background -//! task will query the database for any globs which were last reprocessed at -//! earlier database schema versions and reprocess those globs prior to -//! attempting to dispatch events to receivers. -//! -//! ## Deliveries, Delivery Attempts, and Liveness Probes -//! -//! A *delivery* represents the process of sending HTTP request(s) representing -//! a webhook event to a receiver. Failed HTTP requests are retried up to two -//! times, so a delivery may consist of up to three *delivery attempts*. -//! Each time the `webhook_deliverator` background task is activated, it -//! searches for deliveries which have not yet succeeded or permanently failed, -//! which are not presently being delivered by another Nexus, and for which the -//! backoff period for any prior failed delivery attempts has elapsed. It then -//! sends an HTTP request to the webhook receiver, and records the result, -//! creating a new `webhook_delivery_attempt` record and updating the -//! `webhook_delivery` record. -//! -//! Multiple Nexii use an advisory lease mechanism to avoid attempting to -//! deliver the same event simultaneously, by setting their UUID and a timestamp -//! on the `webhook_delivery` record. Because webhook delivery is -//! at-least-once, this lease mechanism is NOT REQUIRED FOR CORRECTNESS IN ANY -//! WAY, Andrew. :) Instead, it serves only to reduce duplicate work. -//! Therefore, should a Nexus acquire a lease on a delivery and fail to either -//! complete the delivery attempt within a period of time, another Nexus is -//! permitted to clobber its lease. -//! -//! Deliveries are created either because an event occurred and a webhook -//! receiver is subscribed to it, or because we were asked to resend a previous -//! delivery that failed permanently by exhausting its retry budget. Initial -//! deliveries are created by activations of the webhook dispatcher background -//! task. When creating a delivery, the data associated with the event record -//! in the `webhook_event` table is processed to produce the data payload that -//! will actually be sent to the receiver. Data which the receiver's service -//! account is not authorized to read is filtered out of the payload.[^2] -//! -//! Re-delivery of an event can be requested either via the event resend API -//! endpoint, or by a *liveness probe* succeeding. Liveness probes are -//! synthetic delivery requests sent to a webhook receiver to check whether it's -//! actually able to receive an event. They are triggered via the -//! [`Nexus::webhook_receiver_probe`] API endpoint. A probe may optionally -//! request that any events for which all past deliveries have failed be resent -//! if it succeeds. Delivery records are also created to represent the outcome -//! of a probe. +//! Generic operations on all types of alert receivers, such as listing +//! receivers and adding/removing subscriptions, are defined in the +//! [`alert` module][alerts]. Operations relating to webhook-specific +//! configurations or concepts, such as managing secrets, sending liveness +//! probes, and creating and updating webhook receiver configuration, are +//! defined here. This module also defines [`ReceiverClient`], which +//! implements the HTTP client for sending webhook requests to webhook +//! receivers. The client implementation is defined here, as it is used by +//! both the API (for probe requests) and the `webhook_deliverator` background +//! task, which performs the delivery of queued alerts. //! +//! [alerts]: super::alert //! [RFD 538]: https://rfd.shared.oxide.computer/538 -//! [event classes]: https://rfd.shared.oxide.computer/rfd/538#_event_classes -//! -//! [^1]: Read _Snow Crash_, if you haven't already. -//! [^2]: Presently, all weebhook receivers have the fleet.viewer role, so -//! this "filtering" doesn't actually do anything. When webhook receivers -//! with more restrictive permissions are implemented, please rememvber to -//! delete this footnote. use crate::Nexus; use anyhow::Context; -use chrono::DateTime; use chrono::TimeDelta; use chrono::Utc; use hmac::{Hmac, Mac}; @@ -142,105 +38,39 @@ use nexus_db_lookup::LookupPath; use nexus_db_lookup::lookup; use nexus_db_queries::authz; use nexus_db_queries::context::OpContext; +use nexus_db_queries::db::model::AlertClass; +use nexus_db_queries::db::model::AlertDeliveryState; +use nexus_db_queries::db::model::AlertDeliveryTrigger; +use nexus_db_queries::db::model::AlertReceiver; use nexus_db_queries::db::model::SqlU8; use nexus_db_queries::db::model::WebhookDelivery; use nexus_db_queries::db::model::WebhookDeliveryAttempt; use nexus_db_queries::db::model::WebhookDeliveryAttemptResult; -use nexus_db_queries::db::model::WebhookDeliveryState; -use nexus_db_queries::db::model::WebhookDeliveryTrigger; -use nexus_db_queries::db::model::WebhookEvent; -use nexus_db_queries::db::model::WebhookEventClass; -use nexus_db_queries::db::model::WebhookReceiver; use nexus_db_queries::db::model::WebhookReceiverConfig; use nexus_db_queries::db::model::WebhookSecret; use nexus_types::external_api::params; -use nexus_types::external_api::shared; use nexus_types::external_api::views; use nexus_types::identity::Asset; use nexus_types::identity::Resource; use omicron_common::api::external::CreateResult; -use omicron_common::api::external::DataPageParams; use omicron_common::api::external::DeleteResult; use omicron_common::api::external::Error; use omicron_common::api::external::ListResultVec; use omicron_common::api::external::LookupResult; -use omicron_common::api::external::NameOrId; use omicron_common::api::external::UpdateResult; -use omicron_common::api::external::http_pagination::PaginatedBy; +use omicron_uuid_kinds::AlertReceiverUuid; +use omicron_uuid_kinds::AlertUuid; use omicron_uuid_kinds::GenericUuid; use omicron_uuid_kinds::OmicronZoneUuid; use omicron_uuid_kinds::WebhookDeliveryAttemptUuid; use omicron_uuid_kinds::WebhookDeliveryUuid; -use omicron_uuid_kinds::WebhookEventUuid; -use omicron_uuid_kinds::WebhookReceiverUuid; use omicron_uuid_kinds::WebhookSecretUuid; use sha2::Sha256; use std::sync::LazyLock; use std::time::Duration; use std::time::Instant; -use uuid::Uuid; impl Nexus { - /// Publish a new webhook event, with the provided `id`, `event_class`, and - /// JSON data payload. - /// - /// If this method returns `Ok`, the event has been durably recorded in - /// CockroachDB. Once the new event record is inserted into the database, - /// the webhook dispatcher background task is activated to dispatch the - /// event to receivers. However, if (for whatever reason) this Nexus fails - /// to do that, the event remains durably in the database to be dispatched - /// and delivered by someone else. - pub async fn webhook_event_publish( - &self, - opctx: &OpContext, - id: WebhookEventUuid, - event_class: WebhookEventClass, - event: serde_json::Value, - ) -> Result { - let event = self - .datastore() - .webhook_event_create(opctx, id, event_class, event) - .await?; - slog::debug!( - &opctx.log, - "enqueued webhook event"; - "event_id" => ?id, - "event_class" => %event.event_class, - "time_created" => ?event.identity.time_created, - ); - - // Once the event has been inserted, activate the dispatcher task to - // ensure its propagated to receivers. - self.background_tasks.task_webhook_dispatcher.activate(); - - Ok(event) - } - - // - // Lookups - // - - pub fn webhook_receiver_lookup<'a>( - &'a self, - opctx: &'a OpContext, - webhook_selector: params::WebhookReceiverSelector, - ) -> LookupResult> { - match webhook_selector.receiver { - NameOrId::Id(id) => { - let webhook = LookupPath::new(opctx, &self.db_datastore) - .webhook_receiver_id( - WebhookReceiverUuid::from_untyped_uuid(id), - ); - Ok(webhook) - } - NameOrId::Name(name) => { - let webhook = LookupPath::new(opctx, &self.db_datastore) - .webhook_receiver_name_owned(name.into()); - Ok(webhook) - } - } - } - pub fn webhook_secret_lookup<'a>( &'a self, opctx: &'a OpContext, @@ -253,130 +83,6 @@ impl Nexus { Ok(lookup) } - pub fn webhook_event_lookup<'a>( - &'a self, - opctx: &'a OpContext, - params::WebhookEventSelector { event_id }: params::WebhookEventSelector, - ) -> LookupResult> { - let event = LookupPath::new(opctx, &self.db_datastore) - .webhook_event_id(WebhookEventUuid::from_untyped_uuid(event_id)); - Ok(event) - } - - // - // Event class API - // - pub async fn webhook_event_class_list( - &self, - opctx: &OpContext, - filter: params::EventClassFilter, - pagparams: DataPageParams<'_, params::EventClassPage>, - ) -> ListResultVec { - opctx - .authorize( - authz::Action::ListChildren, - &authz::WEBHOOK_EVENT_CLASS_LIST, - ) - .await?; - Self::actually_list_event_classes(filter, pagparams) - } - - // This is factored out to avoid having to make a whole Nexus to test it. - fn actually_list_event_classes( - params::EventClassFilter { filter }: params::EventClassFilter, - pagparams: DataPageParams<'_, params::EventClassPage>, - ) -> ListResultVec { - use nexus_db_model::WebhookSubscriptionKind; - - let regex = if let Some(filter) = filter { - let sub = WebhookSubscriptionKind::try_from(filter)?; - let regex_string = match sub { - WebhookSubscriptionKind::Exact(class) => class.as_str(), - WebhookSubscriptionKind::Glob(ref glob) => glob.regex.as_str(), - }; - let re = regex::Regex::new(regex_string).map_err(|e| { - // This oughtn't happen, provided the code for producing the - // regex for a glob is correct. - Error::InternalError { - internal_message: format!( - "valid event class globs ({sub:?}) should always \ - produce a valid regex, and yet: {e:?}" - ), - } - })?; - Some(re) - } else { - None - }; - - // If we're resuming a previous scan, figure out where to start. - let start = if let Some(params::EventClassPage { last_seen }) = - pagparams.marker - { - let start = WebhookEventClass::ALL_CLASSES - .iter() - .enumerate() - .find_map(|(idx, class)| { - if class.as_str() == last_seen { Some(idx) } else { None } - }); - match start { - Some(start) => start + 1, - None => return Ok(Vec::new()), - } - } else { - 0 - }; - - // This shouldn't ever happen, but...don't panic I guess. - if start > WebhookEventClass::ALL_CLASSES.len() { - return Ok(Vec::new()); - } - - let result = WebhookEventClass::ALL_CLASSES[start..] - .iter() - .filter_map(|&class| { - // Skip test classes, as they should not be used in the public - // API, except in test builds, where we need them - // for, you know... testing... - if !cfg!(test) && class.is_test() { - return None; - } - if let Some(ref regex) = regex { - if !regex.is_match(class.as_str()) { - return None; - } - } - Some(class.into()) - }) - .take(pagparams.limit.get() as usize) - .collect::>(); - Ok(result) - } - - // - // Receiver configuration API methods - // - - pub async fn webhook_receiver_list( - &self, - opctx: &OpContext, - pagparams: &PaginatedBy<'_>, - ) -> ListResultVec { - opctx.authorize(authz::Action::ListChildren, &authz::FLEET).await?; - self.datastore().webhook_rx_list(opctx, pagparams).await - } - - pub async fn webhook_receiver_config_fetch( - &self, - opctx: &OpContext, - rx: lookup::WebhookReceiver<'_>, - ) -> LookupResult { - let (authz_rx, rx) = rx.fetch().await?; - let (subscriptions, secrets) = - self.datastore().webhook_rx_config_fetch(opctx, &authz_rx).await?; - Ok(WebhookReceiverConfig { rx, secrets, subscriptions }) - } - pub async fn webhook_receiver_create( &self, opctx: &OpContext, @@ -388,7 +94,7 @@ impl Nexus { pub async fn webhook_receiver_update( &self, opctx: &OpContext, - rx: lookup::WebhookReceiver<'_>, + rx: lookup::AlertReceiver<'_>, params: params::WebhookReceiverUpdate, ) -> UpdateResult<()> { let (authz_rx,) = rx.lookup_for(authz::Action::Modify).await?; @@ -402,50 +108,12 @@ impl Nexus { pub async fn webhook_receiver_delete( &self, opctx: &OpContext, - rx: lookup::WebhookReceiver<'_>, + rx: lookup::AlertReceiver<'_>, ) -> DeleteResult { let (authz_rx, db_rx) = rx.fetch_for(authz::Action::Delete).await?; self.datastore().webhook_rx_delete(&opctx, &authz_rx, &db_rx).await } - // - // Receiver subscription API methods - // - - pub async fn webhook_receiver_subscription_add( - &self, - opctx: &OpContext, - rx: lookup::WebhookReceiver<'_>, - params::WebhookSubscriptionCreate { subscription}: params::WebhookSubscriptionCreate, - ) -> CreateResult { - let (authz_rx,) = rx.lookup_for(authz::Action::Modify).await?; - let db_subscription = - nexus_db_model::WebhookSubscriptionKind::try_from( - subscription.clone(), - )?; - let _ = self - .datastore() - .webhook_rx_subscription_add(opctx, &authz_rx, db_subscription) - .await?; - Ok(views::WebhookSubscriptionCreated { subscription }) - } - - pub async fn webhook_receiver_subscription_remove( - &self, - opctx: &OpContext, - rx: lookup::WebhookReceiver<'_>, - subscription: shared::WebhookSubscription, - ) -> DeleteResult { - let (authz_rx,) = rx.lookup_for(authz::Action::Modify).await?; - let db_subscription = - nexus_db_model::WebhookSubscriptionKind::try_from(subscription)?; - let _ = self - .datastore() - .webhook_rx_subscription_remove(opctx, &authz_rx, db_subscription) - .await?; - Ok(()) - } - // // Receiver secret API methods // @@ -453,7 +121,7 @@ impl Nexus { pub async fn webhook_receiver_secrets_list( &self, opctx: &OpContext, - rx: lookup::WebhookReceiver<'_>, + rx: lookup::AlertReceiver<'_>, ) -> ListResultVec { let (authz_rx,) = rx.lookup_for(authz::Action::ListChildren).await?; self.datastore().webhook_rx_secret_list(opctx, &authz_rx).await @@ -462,7 +130,7 @@ impl Nexus { pub async fn webhook_receiver_secret_add( &self, opctx: &OpContext, - rx: lookup::WebhookReceiver<'_>, + rx: lookup::AlertReceiver<'_>, secret: String, ) -> Result { let (authz_rx,) = rx.lookup_for(authz::Action::CreateChild).await?; @@ -507,9 +175,9 @@ impl Nexus { pub async fn webhook_receiver_probe( &self, opctx: &OpContext, - rx: lookup::WebhookReceiver<'_>, - params: params::WebhookProbe, - ) -> Result { + rx: lookup::AlertReceiver<'_>, + params: params::AlertReceiverProbe, + ) -> Result { let (authz_rx, rx) = rx.fetch_for(authz::Action::ListChildren).await?; let rx_id = authz_rx.id(); let datastore = self.datastore(); @@ -523,7 +191,7 @@ impl Nexus { )?; let mut delivery = WebhookDelivery::new_probe(&rx_id, &self.id); - const CLASS: WebhookEventClass = WebhookEventClass::Probe; + const CLASS: AlertClass = AlertClass::Probe; static DATA: LazyLock = LazyLock::new(|| serde_json::json!({})); @@ -550,9 +218,9 @@ impl Nexus { // Update the delivery state based on the result of the probe attempt. // Otherwise, it will still appear "pending", which is obviously wrong. delivery.state = if attempt.result.is_failed() { - WebhookDeliveryState::Failed + AlertDeliveryState::Failed } else { - WebhookDeliveryState::Delivered + AlertDeliveryState::Delivered }; let resends_started = if params.resend @@ -581,13 +249,13 @@ impl Nexus { "rx_id" => ?authz_rx.id(), "rx_name" => %rx.name(), "delivery_id" => ?delivery.id, - "event_id" => ?event.id(), - "event_class" => %event.event_class, + "alert_id" => ?event.id(), + "alert_class" => %event.class, ); WebhookDelivery::new( &event.id(), &rx_id, - WebhookDeliveryTrigger::Resend, + AlertDeliveryTrigger::Resend, ) }) .collect::>(); @@ -634,112 +302,11 @@ impl Nexus { None }; - Ok(views::WebhookProbeResult { + Ok(views::AlertProbeResult { probe: delivery.to_api_delivery(CLASS, &[attempt]), resends_started, }) } - - pub async fn webhook_receiver_event_resend( - &self, - opctx: &OpContext, - rx: lookup::WebhookReceiver<'_>, - event: lookup::WebhookEvent<'_>, - ) -> CreateResult { - let (authz_rx,) = rx.lookup_for(authz::Action::CreateChild).await?; - let (authz_event, event) = event.fetch().await?; - let datastore = self.datastore(); - - let is_subscribed = datastore - .webhook_rx_is_subscribed_to_event(opctx, &authz_rx, &authz_event) - .await?; - if !is_subscribed { - return Err(Error::invalid_request(format!( - "cannot resend event: receiver is not subscribed to the '{}' \ - event class", - event.event_class, - ))); - } - - let delivery = WebhookDelivery::new( - &event.id(), - &authz_rx.id(), - WebhookDeliveryTrigger::Resend, - ); - let delivery_id = delivery.id.into(); - - if let Err(e) = - datastore.webhook_delivery_create_batch(opctx, vec![delivery]).await - { - slog::error!( - &opctx.log, - "failed to create new delivery to resend webhook event"; - "rx_id" => ?authz_rx.id(), - "event_id" => ?authz_event.id(), - "event_class" => %event.event_class, - "delivery_id" => ?delivery_id, - "error" => %e, - ); - return Err(e); - } - - slog::info!( - &opctx.log, - "resending webhook event"; - "rx_id" => ?authz_rx.id(), - "event_id" => ?authz_event.id(), - "event_class" => %event.event_class, - "delivery_id" => ?delivery_id, - ); - - self.background_tasks.task_webhook_deliverator.activate(); - Ok(delivery_id) - } - - pub async fn webhook_receiver_delivery_list( - &self, - opctx: &OpContext, - rx: lookup::WebhookReceiver<'_>, - filter: params::WebhookDeliveryStateFilter, - pagparams: &DataPageParams<'_, (DateTime, Uuid)>, - ) -> ListResultVec { - let (authz_rx,) = rx.lookup_for(authz::Action::ListChildren).await?; - let only_states = if filter.include_all() { - Vec::new() - } else { - let mut states = Vec::with_capacity(3); - if filter.include_failed() { - states.push(WebhookDeliveryState::Failed); - } - if filter.include_pending() { - states.push(WebhookDeliveryState::Pending); - } - if filter.include_delivered() { - states.push(WebhookDeliveryState::Delivered); - } - states - }; - let deliveries = self - .datastore() - .webhook_rx_delivery_list( - opctx, - &authz_rx.id(), - // No probes; they could have their own list endpoint later... - &[ - WebhookDeliveryTrigger::Event, - WebhookDeliveryTrigger::Resend, - ], - only_states, - pagparams, - ) - .await? - .into_iter() - .map(|(delivery, class, attempts)| { - delivery.to_api_delivery(class, &attempts) - }) - .collect(); - Ok(deliveries) - } } /// Construct a [`reqwest::Client`] configured for webhook delivery requests. @@ -773,7 +340,7 @@ pub(super) fn delivery_client( /// API in the liveness probe endpoint. pub(crate) struct ReceiverClient<'a> { client: &'a reqwest::Client, - rx: &'a WebhookReceiver, + rx: &'a AlertReceiver, secrets: Vec<(WebhookSecretUuid, Hmac)>, hdr_rx_id: http::HeaderValue, nexus_id: OmicronZoneUuid, @@ -783,7 +350,7 @@ impl<'a> ReceiverClient<'a> { pub(crate) fn new( client: &'a reqwest::Client, secrets: impl IntoIterator, - rx: &'a WebhookReceiver, + rx: &'a AlertReceiver, nexus_id: OmicronZoneUuid, ) -> Result { let secrets = secrets @@ -808,7 +375,7 @@ impl<'a> ReceiverClient<'a> { &mut self, opctx: &OpContext, delivery: &WebhookDelivery, - event_class: WebhookEventClass, + alert_class: AlertClass, data: &serde_json::Value, ) -> Result { const HDR_DELIVERY_ID: HeaderName = @@ -824,8 +391,8 @@ impl<'a> ReceiverClient<'a> { #[derive(serde::Serialize, Debug)] struct Payload<'a> { - event_class: WebhookEventClass, - event_id: WebhookEventUuid, + alert_class: AlertClass, + alert_id: AlertUuid, data: &'a serde_json::Value, delivery: DeliveryMetadata<'a>, } @@ -833,17 +400,17 @@ impl<'a> ReceiverClient<'a> { #[derive(serde::Serialize, Debug)] struct DeliveryMetadata<'a> { id: WebhookDeliveryUuid, - webhook_id: WebhookReceiverUuid, + webhook_id: AlertReceiverUuid, sent_at: &'a str, - trigger: views::WebhookDeliveryTrigger, + trigger: views::AlertDeliveryTrigger, } // okay, actually do the thing... let time_attempted = Utc::now(); let sent_at = time_attempted.to_rfc3339(); let payload = Payload { - event_class, - event_id: delivery.event_id.into(), + alert_class, + alert_id: delivery.alert_id.into(), data, delivery: DeliveryMetadata { id: delivery.id.into(), @@ -864,8 +431,8 @@ impl<'a> ReceiverClient<'a> { slog::error!( &opctx.log, "webhook {MSG}"; - "event_id" => %delivery.event_id, - "event_class" => %event_class, + "alert_id" => %delivery.alert_id, + "alert_class" => %alert_class, "delivery_id" => %delivery.id, "delivery_trigger" => %delivery.triggered_by, "error" => %e, @@ -890,8 +457,8 @@ impl<'a> ReceiverClient<'a> { .post(&self.rx.endpoint) .header(HDR_RX_ID, self.hdr_rx_id.clone()) .header(HDR_DELIVERY_ID, delivery.id.to_string()) - .header(HDR_EVENT_ID, delivery.event_id.to_string()) - .header(HDR_EVENT_CLASS, event_class.to_string()) + .header(HDR_EVENT_ID, delivery.alert_id.to_string()) + .header(HDR_EVENT_CLASS, alert_class.to_string()) .header(http::header::CONTENT_TYPE, "application/json"); // For each secret assigned to this webhook, calculate the HMAC and add a signature header. @@ -912,8 +479,8 @@ impl<'a> ReceiverClient<'a> { slog::error!( &opctx.log, "{MSG}"; - "event_id" => %delivery.event_id, - "event_class" => %event_class, + "alert_id" => %delivery.alert_id, + "alert_class" => %alert_class, "delivery_id" => %delivery.id, "delivery_trigger" => %delivery.triggered_by, "error" => %e, @@ -934,8 +501,8 @@ impl<'a> ReceiverClient<'a> { slog::error!( &opctx.log, "{MSG}"; - "event_id" => %delivery.event_id, - "event_class" => %event_class, + "alert_id" => %delivery.alert_id, + "alert_class" => %alert_class, "delivery_id" => %delivery.id, "delivery_trigger" => %delivery.triggered_by, "error" => %e, @@ -947,8 +514,8 @@ impl<'a> ReceiverClient<'a> { slog::warn!( &opctx.log, "webhook receiver endpoint returned an HTTP error"; - "event_id" => %delivery.event_id, - "event_class" => %event_class, + "alert_id" => %delivery.alert_id, + "alert_class" => %alert_class, "delivery_id" => %delivery.id, "delivery_trigger" => %delivery.triggered_by, "response_status" => ?status, @@ -971,8 +538,8 @@ impl<'a> ReceiverClient<'a> { slog::warn!( &opctx.log, "webhook delivery request failed"; - "event_id" => %delivery.event_id, - "event_class" => %event_class, + "alert_id" => %delivery.alert_id, + "alert_class" => %alert_class, "delivery_id" => %delivery.id, "delivery_trigger" => %delivery.triggered_by, "error" => %e, @@ -986,8 +553,8 @@ impl<'a> ReceiverClient<'a> { slog::debug!( &opctx.log, "webhook event delivered successfully"; - "event_id" => %delivery.event_id, - "event_class" => %event_class, + "alert_id" => %delivery.alert_id, + "alert_class" => %alert_class, "delivery_id" => %delivery.id, "delivery_trigger" => %delivery.triggered_by, "response_status" => ?status, @@ -998,8 +565,8 @@ impl<'a> ReceiverClient<'a> { slog::warn!( &opctx.log, "webhook receiver endpoint returned an HTTP error"; - "event_id" => %delivery.event_id, - "event_class" => %event_class, + "alert_id" => %delivery.alert_id, + "alert_class" => %alert_class, "delivery_id" => %delivery.id, "delivery_trigger" => %delivery.triggered_by, "response_status" => ?status, @@ -1034,64 +601,3 @@ impl<'a> ReceiverClient<'a> { }) } } - -#[cfg(test)] -mod tests { - use super::*; - use crate::Nexus; - use std::num::NonZeroU32; - - #[test] - fn test_event_class_list() { - #[track_caller] - fn list( - filter: Option<&str>, - last_seen: Option<&str>, - limit: u32, - ) -> Vec { - let filter = params::EventClassFilter { - filter: dbg!(filter).map(|f| f.parse().unwrap()), - }; - let marker = dbg!(last_seen).map(|last_seen| { - params::EventClassPage { last_seen: last_seen.to_string() } - }); - let result = Nexus::actually_list_event_classes( - filter, - DataPageParams { - marker: marker.as_ref(), - direction: dropshot::PaginationOrder::Ascending, - limit: NonZeroU32::new(dbg!(limit)).unwrap(), - }, - ); - - // Throw away the description fields - dbg!(result) - .unwrap() - .into_iter() - .map(|view| view.name) - .collect::>() - } - - // Paginated class list, without a glob filter. - let classes = list(None, None, 3); - assert_eq!(classes, &["probe", "test.foo", "test.foo.bar"]); - let classes = list(None, Some("test.foo.bar"), 3); - assert_eq!( - classes, - &["test.foo.baz", "test.quux.bar", "test.quux.bar.baz"] - ); - // Don't assert that a third list will return no more results, since - // more events may be added in the future, and we don't have a filter. - - // Try a filter for only `test.**` events. - let filter = Some("test.**"); - let classes = list(filter, None, 2); - assert_eq!(classes, &["test.foo", "test.foo.bar"]); - let classes = list(filter, Some("test.foo.bar"), 2); - assert_eq!(classes, &["test.foo.baz", "test.quux.bar"]); - let classes = list(filter, Some("test.quux.bar"), 2); - assert_eq!(classes, &["test.quux.bar.baz"]); - let classes = list(filter, Some("test.quux.bar.baz"), 2); - assert_eq!(classes, Vec::::new()); - } -} diff --git a/nexus/src/external_api/http_entrypoints.rs b/nexus/src/external_api/http_entrypoints.rs index fa2e195574c..060327f5e81 100644 --- a/nexus/src/external_api/http_entrypoints.rs +++ b/nexus/src/external_api/http_entrypoints.rs @@ -7798,13 +7798,13 @@ impl NexusExternalApi for NexusExternalApiImpl { .await } - async fn webhook_event_class_list( + async fn alert_class_list( rqctx: RequestContext, pag_params: Query< - PaginationParams, + PaginationParams, >, - filter: Query, - ) -> Result>, HttpError> { + filter: Query, + ) -> Result>, HttpError> { let apictx = rqctx.context(); let handler = async { let nexus = &apictx.context.nexus; @@ -7822,13 +7822,12 @@ impl NexusExternalApi for NexusExternalApiImpl { direction: PaginationOrder::Ascending, marker, }; - let event_classes = nexus - .webhook_event_class_list(&opctx, filter, pag_params) - .await?; + let alert_classes = + nexus.alert_class_list(&opctx, filter, pag_params).await?; Ok(HttpResponseOk(ResultsPage::new( - event_classes, + alert_classes, &EmptyScanParams {}, - |class: &views::EventClass, _| class.name.clone(), + |class: &views::AlertClass, _| class.name.clone(), )?)) }; apictx @@ -7838,10 +7837,10 @@ impl NexusExternalApi for NexusExternalApiImpl { .await } - async fn webhook_receiver_list( + async fn alert_receiver_list( rqctx: RequestContext, query_params: Query, - ) -> Result>, HttpError> + ) -> Result>, HttpError> { let apictx = rqctx.context(); let handler = async { @@ -7856,10 +7855,13 @@ impl NexusExternalApi for NexusExternalApiImpl { let paginated_by = name_or_id_pagination(&pagparams, scan_params)?; let rxs = nexus - .webhook_receiver_list(&opctx, &paginated_by) + .alert_receiver_list(&opctx, &paginated_by) .await? .into_iter() - .map(views::WebhookReceiver::try_from) + .map(|webhook| { + views::WebhookReceiver::try_from(webhook) + .map(views::AlertReceiver::from) + }) .collect::, _>>()?; Ok(HttpResponseOk(ScanByNameOrId::results_page( @@ -7876,10 +7878,10 @@ impl NexusExternalApi for NexusExternalApiImpl { .await } - async fn webhook_receiver_view( + async fn alert_receiver_view( rqctx: RequestContext, - path_params: Path, - ) -> Result, HttpError> { + path_params: Path, + ) -> Result, HttpError> { let apictx = rqctx.context(); let handler = async { let nexus = &apictx.context.nexus; @@ -7887,10 +7889,11 @@ impl NexusExternalApi for NexusExternalApiImpl { let opctx = crate::context::op_context_for_external_api(&rqctx).await?; let webhook_selector = path_params.into_inner(); - let rx = nexus.webhook_receiver_lookup(&opctx, webhook_selector)?; - let webhook = - nexus.webhook_receiver_config_fetch(&opctx, rx).await?; - Ok(HttpResponseOk(views::WebhookReceiver::try_from(webhook)?)) + let rx = nexus.alert_receiver_lookup(&opctx, webhook_selector)?; + let webhook = nexus.alert_receiver_config_fetch(&opctx, rx).await?; + Ok(HttpResponseOk( + views::WebhookReceiver::try_from(webhook)?.into(), + )) }; apictx .context @@ -7923,7 +7926,7 @@ impl NexusExternalApi for NexusExternalApiImpl { async fn webhook_receiver_update( rqctx: RequestContext, - path_params: Path, + path_params: Path, params: TypedBody, ) -> Result { let apictx = rqctx.context(); @@ -7935,7 +7938,7 @@ impl NexusExternalApi for NexusExternalApiImpl { let webhook_selector = path_params.into_inner(); let params = params.into_inner(); - let rx = nexus.webhook_receiver_lookup(&opctx, webhook_selector)?; + let rx = nexus.alert_receiver_lookup(&opctx, webhook_selector)?; nexus.webhook_receiver_update(&opctx, rx, params).await?; Ok(HttpResponseUpdatedNoContent()) @@ -7947,9 +7950,9 @@ impl NexusExternalApi for NexusExternalApiImpl { .await } - async fn webhook_receiver_delete( + async fn alert_receiver_delete( rqctx: RequestContext, - path_params: Path, + path_params: Path, ) -> Result { let apictx = rqctx.context(); let handler = async { @@ -7959,7 +7962,7 @@ impl NexusExternalApi for NexusExternalApiImpl { crate::context::op_context_for_external_api(&rqctx).await?; let webhook_selector = path_params.into_inner(); - let rx = nexus.webhook_receiver_lookup(&opctx, webhook_selector)?; + let rx = nexus.alert_receiver_lookup(&opctx, webhook_selector)?; nexus.webhook_receiver_delete(&opctx, rx).await?; Ok(HttpResponseDeleted()) @@ -7971,11 +7974,11 @@ impl NexusExternalApi for NexusExternalApiImpl { .await } - async fn webhook_receiver_subscription_add( + async fn alert_receiver_subscription_add( rqctx: RequestContext, - path_params: Path, - params: TypedBody, - ) -> Result, HttpError> + path_params: Path, + params: TypedBody, + ) -> Result, HttpError> { let apictx = rqctx.context(); let handler = async { @@ -7986,10 +7989,10 @@ impl NexusExternalApi for NexusExternalApiImpl { let webhook_selector = path_params.into_inner(); let subscription = params.into_inner(); - let rx = nexus.webhook_receiver_lookup(&opctx, webhook_selector)?; + let rx = nexus.alert_receiver_lookup(&opctx, webhook_selector)?; let subscription = nexus - .webhook_receiver_subscription_add(&opctx, rx, subscription) + .alert_receiver_subscription_add(&opctx, rx, subscription) .await?; Ok(HttpResponseCreated(subscription)) @@ -8001,9 +8004,9 @@ impl NexusExternalApi for NexusExternalApiImpl { .await } - async fn webhook_receiver_subscription_remove( + async fn alert_receiver_subscription_remove( rqctx: RequestContext, - path_params: Path, + path_params: Path, ) -> Result { let apictx = rqctx.context(); let handler = async { @@ -8012,12 +8015,12 @@ impl NexusExternalApi for NexusExternalApiImpl { let opctx = crate::context::op_context_for_external_api(&rqctx).await?; - let params::WebhookSubscriptionSelector { receiver, subscription } = + let params::AlertSubscriptionSelector { receiver, subscription } = path_params.into_inner(); - let rx = nexus.webhook_receiver_lookup(&opctx, receiver)?; + let rx = nexus.alert_receiver_lookup(&opctx, receiver)?; nexus - .webhook_receiver_subscription_remove(&opctx, rx, subscription) + .alert_receiver_subscription_remove(&opctx, rx, subscription) .await?; Ok(HttpResponseDeleted()) @@ -8029,11 +8032,11 @@ impl NexusExternalApi for NexusExternalApiImpl { .await } - async fn webhook_receiver_probe( + async fn alert_receiver_probe( rqctx: RequestContext, - path_params: Path, - query_params: Query, - ) -> Result, HttpError> { + path_params: Path, + query_params: Query, + ) -> Result, HttpError> { let apictx = rqctx.context(); let handler = async { let nexus = &apictx.context.nexus; @@ -8043,7 +8046,7 @@ impl NexusExternalApi for NexusExternalApiImpl { let webhook_selector = path_params.into_inner(); let probe_params = query_params.into_inner(); - let rx = nexus.webhook_receiver_lookup(&opctx, webhook_selector)?; + let rx = nexus.alert_receiver_lookup(&opctx, webhook_selector)?; let result = nexus.webhook_receiver_probe(&opctx, rx, probe_params).await?; Ok(HttpResponseOk(result)) @@ -8057,7 +8060,7 @@ impl NexusExternalApi for NexusExternalApiImpl { async fn webhook_secrets_list( rqctx: RequestContext, - query_params: Query, + query_params: Query, ) -> Result, HttpError> { let apictx = rqctx.context(); let handler = async { @@ -8067,7 +8070,7 @@ impl NexusExternalApi for NexusExternalApiImpl { crate::context::op_context_for_external_api(&rqctx).await?; let webhook_selector = query_params.into_inner(); - let rx = nexus.webhook_receiver_lookup(&opctx, webhook_selector)?; + let rx = nexus.alert_receiver_lookup(&opctx, webhook_selector)?; let secrets = nexus .webhook_receiver_secrets_list(&opctx, rx) .await? @@ -8087,7 +8090,7 @@ impl NexusExternalApi for NexusExternalApiImpl { /// Add a secret to a webhook. async fn webhook_secrets_add( rqctx: RequestContext, - query_params: Query, + query_params: Query, params: TypedBody, ) -> Result, HttpError> { let apictx = rqctx.context(); @@ -8098,7 +8101,7 @@ impl NexusExternalApi for NexusExternalApiImpl { let params::WebhookSecretCreate { secret } = params.into_inner(); let webhook_selector = query_params.into_inner(); - let rx = nexus.webhook_receiver_lookup(&opctx, webhook_selector)?; + let rx = nexus.alert_receiver_lookup(&opctx, webhook_selector)?; let secret = nexus.webhook_receiver_secret_add(&opctx, rx, secret).await?; Ok(HttpResponseCreated(secret)) @@ -8136,12 +8139,12 @@ impl NexusExternalApi for NexusExternalApiImpl { .await } - async fn webhook_delivery_list( + async fn alert_delivery_list( rqctx: RequestContext, - receiver: Query, - filter: Query, + receiver: Path, + filter: Query, query: Query, - ) -> Result>, HttpError> + ) -> Result>, HttpError> { let apictx = rqctx.context(); let handler = async { @@ -8154,9 +8157,9 @@ impl NexusExternalApi for NexusExternalApiImpl { let filter = filter.into_inner(); let query = query.into_inner(); let pag_params = data_page_params_for(&rqctx, &query)?; - let rx = nexus.webhook_receiver_lookup(&opctx, webhook_selector)?; + let rx = nexus.alert_receiver_lookup(&opctx, webhook_selector)?; let deliveries = nexus - .webhook_receiver_delivery_list(&opctx, rx, filter, &pag_params) + .alert_receiver_delivery_list(&opctx, rx, filter, &pag_params) .await?; Ok(HttpResponseOk(ScanByTimeAndId::results_page( @@ -8172,11 +8175,11 @@ impl NexusExternalApi for NexusExternalApiImpl { .await } - async fn webhook_delivery_resend( + async fn alert_delivery_resend( rqctx: RequestContext, - path_params: Path, - receiver: Query, - ) -> Result, HttpError> { + path_params: Path, + receiver: Query, + ) -> Result, HttpError> { let apictx = rqctx.context(); let handler = async { let nexus = &apictx.context.nexus; @@ -8186,12 +8189,12 @@ impl NexusExternalApi for NexusExternalApiImpl { let event_selector = path_params.into_inner(); let webhook_selector = receiver.into_inner(); - let event = nexus.webhook_event_lookup(&opctx, event_selector)?; - let rx = nexus.webhook_receiver_lookup(&opctx, webhook_selector)?; + let event = nexus.alert_lookup(&opctx, event_selector)?; + let rx = nexus.alert_receiver_lookup(&opctx, webhook_selector)?; let delivery_id = - nexus.webhook_receiver_event_resend(&opctx, rx, event).await?; + nexus.alert_receiver_resend(&opctx, rx, event).await?; - Ok(HttpResponseCreated(views::WebhookDeliveryId { + Ok(HttpResponseCreated(views::AlertDeliveryId { delivery_id: delivery_id.into_untyped_uuid(), })) }; diff --git a/nexus/tests/config.test.toml b/nexus/tests/config.test.toml index ab3cbcd9af9..4cf96ba177a 100644 --- a/nexus/tests/config.test.toml +++ b/nexus/tests/config.test.toml @@ -163,7 +163,7 @@ tuf_artifact_replication.period_secs = 3600 tuf_artifact_replication.min_sled_replication = 3 # In general, the webhook dispatcher will be activated when events are queued, # so we don't need to periodically activate it *that* frequently. -webhook_dispatcher.period_secs = 60 +alert_dispatcher.period_secs = 60 webhook_deliverator.period_secs = 60 # In order to test webhook delivery retry behavior without waiting for a long # time, turn these backoff periods down from multiple minutes to just a couple diff --git a/nexus/tests/integration_tests/endpoints.rs b/nexus/tests/integration_tests/endpoints.rs index 2f4c9e6c3a6..2e91816d563 100644 --- a/nexus/tests/integration_tests/endpoints.rs +++ b/nexus/tests/integration_tests/endpoints.rs @@ -1160,10 +1160,10 @@ pub static DEMO_TARGET_RELEASE: LazyLock = system_version: Version::new(0, 0, 0), }); -// Webhooks -pub static WEBHOOK_RECEIVERS_URL: &'static str = "/v1/webhooks/receivers"; -pub static WEBHOOK_EVENT_CLASSES_URL: &'static str = - "/v1/webhooks/event-classes"; +// Alerts +pub static ALERT_CLASSES_URL: &'static str = "/v1/alert-classes"; +pub static ALERT_RECEIVERS_URL: &'static str = "/v1/alert-receivers"; +pub static WEBHOOK_RECEIVERS_URL: &'static str = "/v1/webhook-receivers"; pub static DEMO_WEBHOOK_RECEIVER_NAME: LazyLock = LazyLock::new(|| "my-great-webhook".parse().unwrap()); @@ -1191,52 +1191,47 @@ pub static DEMO_WEBHOOK_RECEIVER_UPDATE: LazyLock< endpoint: Some("https://example.com/my-cool-webhook".parse().unwrap()), }); +pub static DEMO_ALERT_RECEIVER_URL: LazyLock = LazyLock::new(|| { + format!("{ALERT_RECEIVERS_URL}/{}", *DEMO_WEBHOOK_RECEIVER_NAME) +}); pub static DEMO_WEBHOOK_RECEIVER_URL: LazyLock = LazyLock::new(|| { format!("{WEBHOOK_RECEIVERS_URL}/{}", *DEMO_WEBHOOK_RECEIVER_NAME) }); -pub static DEMO_WEBHOOK_RECEIVER_PROBE_URL: LazyLock = - LazyLock::new(|| { - format!("{WEBHOOK_RECEIVERS_URL}/{}/probe", *DEMO_WEBHOOK_RECEIVER_NAME) - }); +pub static DEMO_ALERT_RECEIVER_PROBE_URL: LazyLock = + LazyLock::new(|| format!("{}/probe", *DEMO_ALERT_RECEIVER_URL)); -pub static DEMO_WEBHOOK_SUBSCRIPTIONS_URL: LazyLock = - LazyLock::new(|| { - format!( - "{WEBHOOK_RECEIVERS_URL}/{}/subscriptions", - *DEMO_WEBHOOK_RECEIVER_NAME - ) - }); +pub static DEMO_ALERT_DELIVERIES_URL: LazyLock = + LazyLock::new(|| format!("{}/deliveries", *DEMO_ALERT_RECEIVER_URL)); -pub static DEMO_WEBHOOK_SUBSCRIPTION: LazyLock = +pub static DEMO_ALERT_SUBSCRIPTIONS_URL: LazyLock = + LazyLock::new(|| format!("{}/subscriptions", *DEMO_ALERT_RECEIVER_URL)); + +pub static DEMO_ALERT_SUBSCRIPTION: LazyLock = LazyLock::new(|| "test.foo.**".parse().unwrap()); -pub static DEMO_WEBHOOK_SUBSCRIPTION_CREATE: LazyLock< - params::WebhookSubscriptionCreate, -> = LazyLock::new(|| params::WebhookSubscriptionCreate { - subscription: DEMO_WEBHOOK_SUBSCRIPTION.clone(), +pub static DEMO_ALERT_SUBSCRIPTION_CREATE: LazyLock< + params::AlertSubscriptionCreate, +> = LazyLock::new(|| params::AlertSubscriptionCreate { + subscription: DEMO_ALERT_SUBSCRIPTION.clone(), }); -pub static DEMO_WEBHOOK_SUBSCRIPTION_DELETE_URL: LazyLock = +pub static DEMO_ALERT_SUBSCRIPTION_DELETE_URL: LazyLock = LazyLock::new(|| { format!( - "{WEBHOOK_RECEIVERS_URL}/{}/subscriptions/{}", - *DEMO_WEBHOOK_RECEIVER_NAME, *DEMO_WEBHOOK_SUBSCRIPTION, + "{}/subscriptions/{}", + *DEMO_ALERT_RECEIVER_URL, *DEMO_ALERT_SUBSCRIPTION, ) }); -pub static DEMO_WEBHOOK_DELIVERY_URL: LazyLock = LazyLock::new(|| { - format!("/v1/webhooks/deliveries?receiver={}", *DEMO_WEBHOOK_RECEIVER_NAME) -}); - pub static DEMO_WEBHOOK_SECRETS_URL: LazyLock = LazyLock::new(|| { - format!("/v1/webhooks/secrets?receiver={}", *DEMO_WEBHOOK_RECEIVER_NAME) + format!("/v1/webhook-secrets?receiver={}", *DEMO_WEBHOOK_RECEIVER_NAME) }); pub static DEMO_WEBHOOK_SECRET_DELETE_URL: LazyLock = LazyLock::new(|| { format!( - "/v1/webhooks/secrets/{{id}}?receiver={}", + "/v1/webhook-secrets/{{id}}?receiver={}", *DEMO_WEBHOOK_RECEIVER_NAME ) }); @@ -1246,9 +1241,6 @@ pub static DEMO_WEBHOOK_SECRET_CREATE: LazyLock = secret: "TRUSTNO1".to_string(), }); -// pub static DEMO_WEBHOOK_SUBSCRIPTION: LazyLock = -// LazyLock::new(|| "test.foo.baz".parse().unwrap()); - /// Describes an API endpoint to be verified by the "unauthorized" test /// /// These structs are also used to check whether we're covering all endpoints in @@ -2830,34 +2822,42 @@ pub static VERIFY_ENDPOINTS: LazyLock> = ), ], }, - // Webhooks + // Alerts VerifyEndpoint { url: &WEBHOOK_RECEIVERS_URL, visibility: Visibility::Public, unprivileged_access: UnprivilegedAccess::None, - allowed_methods: vec![ - AllowedMethod::Get, - AllowedMethod::Post( - serde_json::to_value(&*DEMO_WEBHOOK_RECEIVER_CREATE) - .unwrap(), - ), - ], + allowed_methods: vec![AllowedMethod::Post( + serde_json::to_value(&*DEMO_WEBHOOK_RECEIVER_CREATE) + .unwrap(), + )], }, VerifyEndpoint { - url: &DEMO_WEBHOOK_RECEIVER_URL, + url: &ALERT_RECEIVERS_URL, + visibility: Visibility::Public, + unprivileged_access: UnprivilegedAccess::None, + allowed_methods: vec![AllowedMethod::Get], + }, + VerifyEndpoint { + url: &DEMO_ALERT_RECEIVER_URL, visibility: Visibility::Protected, unprivileged_access: UnprivilegedAccess::None, allowed_methods: vec![ AllowedMethod::Get, - AllowedMethod::Put( - serde_json::to_value(&*DEMO_WEBHOOK_RECEIVER_UPDATE) - .unwrap(), - ), AllowedMethod::Delete, ], }, VerifyEndpoint { - url: &DEMO_WEBHOOK_RECEIVER_PROBE_URL, + url: &DEMO_WEBHOOK_RECEIVER_URL, + visibility: Visibility::Protected, + unprivileged_access: UnprivilegedAccess::None, + allowed_methods: vec![AllowedMethod::Put( + serde_json::to_value(&*DEMO_WEBHOOK_RECEIVER_UPDATE) + .unwrap(), + )], + }, + VerifyEndpoint { + url: &DEMO_ALERT_RECEIVER_PROBE_URL, visibility: Visibility::Protected, unprivileged_access: UnprivilegedAccess::None, allowed_methods: vec![AllowedMethod::Post( @@ -2877,16 +2877,16 @@ pub static VERIFY_ENDPOINTS: LazyLock> = ], }, VerifyEndpoint { - url: &DEMO_WEBHOOK_SUBSCRIPTIONS_URL, + url: &DEMO_ALERT_SUBSCRIPTIONS_URL, visibility: Visibility::Protected, unprivileged_access: UnprivilegedAccess::None, allowed_methods: vec![AllowedMethod::Post( - serde_json::to_value(&*DEMO_WEBHOOK_SUBSCRIPTION_CREATE) + serde_json::to_value(&*DEMO_ALERT_SUBSCRIPTION_CREATE) .unwrap(), )], }, VerifyEndpoint { - url: &DEMO_WEBHOOK_SUBSCRIPTION_DELETE_URL, + url: &DEMO_ALERT_SUBSCRIPTION_DELETE_URL, visibility: Visibility::Protected, unprivileged_access: UnprivilegedAccess::None, allowed_methods: vec![AllowedMethod::Delete], @@ -2898,13 +2898,13 @@ pub static VERIFY_ENDPOINTS: LazyLock> = allowed_methods: vec![AllowedMethod::Delete], }, VerifyEndpoint { - url: &DEMO_WEBHOOK_DELIVERY_URL, + url: &DEMO_ALERT_DELIVERIES_URL, visibility: Visibility::Protected, unprivileged_access: UnprivilegedAccess::None, allowed_methods: vec![AllowedMethod::Get], }, VerifyEndpoint { - url: &WEBHOOK_EVENT_CLASSES_URL, + url: &ALERT_CLASSES_URL, visibility: Visibility::Public, unprivileged_access: UnprivilegedAccess::None, allowed_methods: vec![AllowedMethod::Get], diff --git a/nexus/tests/integration_tests/schema.rs b/nexus/tests/integration_tests/schema.rs index 9bdc3125219..406ccb6e7bf 100644 --- a/nexus/tests/integration_tests/schema.rs +++ b/nexus/tests/integration_tests/schema.rs @@ -2044,7 +2044,7 @@ fn after_134_0_0<'a>(ctx: &'a MigrationContext<'a>) -> BoxFuture<'a, ()> { fn after_139_0_0<'a>(ctx: &'a MigrationContext<'a>) -> BoxFuture<'a, ()> { Box::pin(async { - let probe_event_id: Uuid = + let probe_alert_id: Uuid = "001de000-7768-4000-8000-000000000001".parse().unwrap(); let rows = ctx .client @@ -2080,7 +2080,7 @@ fn after_139_0_0<'a>(ctx: &'a MigrationContext<'a>) -> BoxFuture<'a, ()> { assert_eq!( records[0].values, vec![ - ColumnValue::new("id", probe_event_id), + ColumnValue::new("id", probe_alert_id), ColumnValue::new( "event_class", SqlEnum::from(("webhook_event_class", "probe")), diff --git a/nexus/tests/integration_tests/webhooks.rs b/nexus/tests/integration_tests/webhooks.rs index 6f1d122904f..8b46f79febc 100644 --- a/nexus/tests/integration_tests/webhooks.rs +++ b/nexus/tests/integration_tests/webhooks.rs @@ -7,7 +7,7 @@ use dropshot::test_util::ClientTestContext; use hmac::{Hmac, Mac}; use httpmock::prelude::*; -use nexus_db_model::WebhookEventClass; +use nexus_db_model::AlertClass; use nexus_db_queries::context::OpContext; use nexus_test_utils::background::activate_background_task; use nexus_test_utils::http_testing::AuthnMode; @@ -19,9 +19,9 @@ use nexus_test_utils_macros::nexus_test; use nexus_types::external_api::{params, shared, views}; use omicron_common::api::external::IdentityMetadataCreateParams; use omicron_common::api::external::NameOrId; +use omicron_uuid_kinds::AlertReceiverUuid; +use omicron_uuid_kinds::AlertUuid; use omicron_uuid_kinds::GenericUuid; -use omicron_uuid_kinds::WebhookEventUuid; -use omicron_uuid_kinds::WebhookReceiverUuid; use sha2::Sha256; use std::time::Duration; use uuid::Uuid; @@ -29,9 +29,10 @@ use uuid::Uuid; type ControlPlaneTestContext = nexus_test_utils::ControlPlaneTestContext; -const RECEIVERS_BASE_PATH: &str = "/v1/webhooks/receivers"; -const SECRETS_BASE_PATH: &str = "/v1/webhooks/secrets"; -const DELIVERIES_BASE_PATH: &str = "/v1/webhooks/deliveries"; +const ALERTS_BASE_PATH: &str = "/v1/alerts"; +const ALERT_RECEIVERS_BASE_PATH: &str = "/v1/alert-receivers"; +const WEBHOOK_RECEIVERS_BASE_PATH: &str = "/v1/webhook-receivers"; +const SECRETS_BASE_PATH: &str = "/v1/webhook-secrets"; async fn webhook_create( ctx: &ControlPlaneTestContext, @@ -40,27 +41,27 @@ async fn webhook_create( resource_helpers::object_create::< params::WebhookCreate, views::WebhookReceiver, - >(&ctx.external_client, RECEIVERS_BASE_PATH, params) + >(&ctx.external_client, WEBHOOK_RECEIVERS_BASE_PATH, params) .await } -fn get_webhooks_url(name_or_id: impl Into) -> String { +fn alert_rx_url(name_or_id: impl Into) -> String { let name_or_id = name_or_id.into(); - format!("{RECEIVERS_BASE_PATH}/{name_or_id}") + format!("{ALERT_RECEIVERS_BASE_PATH}/{name_or_id}") } -async fn webhook_get( +async fn alert_rx_get( client: &ClientTestContext, webhook_url: &str, -) -> views::WebhookReceiver { - webhook_get_as(client, webhook_url, AuthnMode::PrivilegedUser).await +) -> views::AlertReceiver { + alert_rx_get_as(client, webhook_url, AuthnMode::PrivilegedUser).await } -async fn webhook_get_as( +async fn alert_rx_get_as( client: &ClientTestContext, webhook_url: &str, authn_as: AuthnMode, -) -> views::WebhookReceiver { +) -> views::AlertReceiver { NexusRequest::object_get(client, &webhook_url) .authn_as(authn_as) .execute() @@ -70,12 +71,12 @@ async fn webhook_get_as( .unwrap() } -async fn webhook_rx_list( +async fn alert_rx_list( client: &ClientTestContext, -) -> Vec { - resource_helpers::objects_list_page_authz::( +) -> Vec { + resource_helpers::objects_list_page_authz::( client, - RECEIVERS_BASE_PATH, + ALERT_RECEIVERS_BASE_PATH, ) .await .items @@ -100,36 +101,32 @@ async fn webhook_secrets_get( fn resend_url( webhook_name_or_id: impl Into, - event_id: WebhookEventUuid, + alert_id: AlertUuid, ) -> String { let rx = webhook_name_or_id.into(); - format!("{DELIVERIES_BASE_PATH}/{event_id}/resend?receiver={rx}") + format!("{ALERTS_BASE_PATH}/{alert_id}/resend?receiver={rx}") } -async fn webhook_deliveries_list( +async fn alert_deliveries_list( client: &ClientTestContext, webhook_name_or_id: impl Into, -) -> Collection { - let rx = webhook_name_or_id.into(); - NexusRequest::iter_collection_authn( - client, - &format!("{DELIVERIES_BASE_PATH}?receiver={rx}"), - "", - None, - ) - .await - .unwrap() +) -> Collection { + let mut rx_url = alert_rx_url(webhook_name_or_id); + rx_url.push_str("/deliveries"); + NexusRequest::iter_collection_authn(client, &rx_url, "", None) + .await + .unwrap() } -async fn webhook_delivery_resend( +async fn alert_delivery_resend( client: &ClientTestContext, webhook_name_or_id: impl Into, - event_id: WebhookEventUuid, -) -> views::WebhookDeliveryId { + alert_id: AlertUuid, +) -> views::AlertDeliveryId { let req = RequestBuilder::new( client, http::Method::POST, - &resend_url(webhook_name_or_id, event_id), + &resend_url(webhook_name_or_id, alert_id), ) .body::(None) .expect_status(Some(http::StatusCode::CREATED)); @@ -145,13 +142,13 @@ async fn webhook_delivery_resend( async fn webhook_delivery_resend_error( client: &ClientTestContext, webhook_name_or_id: impl Into, - event_id: WebhookEventUuid, + alert_id: AlertUuid, status: http::StatusCode, ) -> dropshot::HttpErrorResponseBody { let req = RequestBuilder::new( client, http::Method::POST, - &resend_url(webhook_name_or_id, event_id), + &resend_url(webhook_name_or_id, alert_id), ) .body::(None) .expect_status(Some(status)); @@ -189,7 +186,7 @@ const MY_COOL_SECRET: &str = "my cool secret"; async fn secret_add( ctx: &ControlPlaneTestContext, - webhook_id: WebhookReceiverUuid, + webhook_id: AlertReceiverUuid, params: ¶ms::WebhookSecretCreate, ) -> views::WebhookSecret { resource_helpers::object_create::< @@ -205,23 +202,21 @@ async fn secret_add( async fn subscription_add( ctx: &ControlPlaneTestContext, - webhook_id: WebhookReceiverUuid, - subscription: &shared::WebhookSubscription, -) -> views::WebhookSubscriptionCreated { + webhook_id: AlertReceiverUuid, + subscription: &shared::AlertSubscription, +) -> views::AlertSubscriptionCreated { resource_helpers::object_create( &ctx.external_client, - &format!("{RECEIVERS_BASE_PATH}/{webhook_id}/subscriptions"), - ¶ms::WebhookSubscriptionCreate { - subscription: subscription.clone(), - }, + &format!("{ALERT_RECEIVERS_BASE_PATH}/{webhook_id}/subscriptions"), + ¶ms::AlertSubscriptionCreate { subscription: subscription.clone() }, ) .await } async fn subscription_remove( ctx: &ControlPlaneTestContext, - webhook_id: WebhookReceiverUuid, - subscription: &shared::WebhookSubscription, + webhook_id: AlertReceiverUuid, + subscription: &shared::AlertSubscription, ) { resource_helpers::object_delete( &ctx.external_client, @@ -231,20 +226,23 @@ async fn subscription_remove( } fn subscription_remove_url( - webhook_id: WebhookReceiverUuid, - subscription: &shared::WebhookSubscription, + webhook_id: AlertReceiverUuid, + subscription: &shared::AlertSubscription, ) -> String { - format!("{RECEIVERS_BASE_PATH}/{webhook_id}/subscriptions/{subscription}") + format!( + "{ALERT_RECEIVERS_BASE_PATH}/{webhook_id}/subscriptions/{subscription}" + ) } -async fn webhook_send_probe( +async fn alert_receiver_send_probe( ctx: &ControlPlaneTestContext, - webhook_id: &WebhookReceiverUuid, + webhook_id: &AlertReceiverUuid, resend: bool, status: http::StatusCode, -) -> views::WebhookProbeResult { +) -> views::AlertProbeResult { let pathparams = if resend { "?resend=true" } else { "" }; - let path = format!("{RECEIVERS_BASE_PATH}/{webhook_id}/probe{pathparams}"); + let path = + format!("{ALERT_RECEIVERS_BASE_PATH}/{webhook_id}/probe{pathparams}"); NexusRequest::new( RequestBuilder::new(&ctx.external_client, http::Method::POST, &path) .expect_status(Some(status)), @@ -262,7 +260,7 @@ async fn webhook_send_probe( fn is_valid_for_webhook( webhook: &views::WebhookReceiver, ) -> impl FnOnce(httpmock::When) -> httpmock::When { - let path = webhook.endpoint.path().to_string(); + let path = webhook.config.endpoint.path().to_string(); let id = webhook.identity.id.to_string(); move |when| { when.path(path) @@ -333,9 +331,10 @@ struct ExpectAttempt { /// such as timestamps, which are variable. #[track_caller] fn expect_delivery_attempts( - actual: &[views::WebhookDeliveryAttempt], + actual: &views::AlertDeliveryAttempts, expected: &[ExpectAttempt], ) { + let views::AlertDeliveryAttempts::Webhook(actual) = actual; assert_eq!( actual.len(), expected.len(), @@ -372,13 +371,13 @@ async fn test_webhook_receiver_get(cptestctx: &ControlPlaneTestContext) { dbg!(&created_webhook); // Fetch the receiver by ID. - let by_id_url = get_webhooks_url(created_webhook.identity.id); - let webhook_view = webhook_get(client, &by_id_url).await; + let by_id_url = alert_rx_url(created_webhook.identity.id); + let webhook_view = alert_rx_get(client, &by_id_url).await; assert_eq!(created_webhook, webhook_view); // Fetch the receiver by name. - let by_name_url = get_webhooks_url(created_webhook.identity.name.clone()); - let webhook_view = webhook_get(client, &by_name_url).await; + let by_name_url = alert_rx_url(created_webhook.identity.name.clone()); + let webhook_view = alert_rx_get(client, &by_name_url).await; assert_eq!(created_webhook, webhook_view); } @@ -403,16 +402,13 @@ async fn test_webhook_receiver_create_delete( .await; dbg!(&created_webhook); - resource_helpers::object_delete( - client, - &format!("{RECEIVERS_BASE_PATH}/{}", created_webhook.identity.name), - ) - .await; + let delete_url = alert_rx_url(created_webhook.identity.name.clone()); + resource_helpers::object_delete(client, &delete_url).await; // It should be gone now. resource_helpers::object_delete_error( client, - &format!("{RECEIVERS_BASE_PATH}/{}", created_webhook.identity.name), + &delete_url, http::StatusCode::NOT_FOUND, ) .await; @@ -441,7 +437,7 @@ async fn test_webhook_receiver_names_are_unique( let error = resource_helpers::object_create_error( &client, - RECEIVERS_BASE_PATH, + WEBHOOK_RECEIVERS_BASE_PATH, ¶ms::WebhookCreate { identity: my_great_webhook_identity(), endpoint: "https://example.com/more-webhooks" @@ -455,7 +451,7 @@ async fn test_webhook_receiver_names_are_unique( .await; assert_eq!( dbg!(&error).message, - "already exists: webhook-receiver \"my-great-webhook\"" + "already exists: alert-receiver \"my-great-webhook\"" ); } @@ -465,7 +461,7 @@ async fn test_cannot_subscribe_to_probes(cptestctx: &ControlPlaneTestContext) { let error = resource_helpers::object_create_error( &client, - RECEIVERS_BASE_PATH, + WEBHOOK_RECEIVERS_BASE_PATH, ¶ms::WebhookCreate { identity: my_great_webhook_identity(), endpoint: "https://example.com/webhooks" @@ -498,7 +494,7 @@ async fn test_event_delivery(cptestctx: &ControlPlaneTestContext) { let server = httpmock::MockServer::start_async().await; - let id = WebhookEventUuid::new_v4(); + let id = AlertUuid::new_v4(); // Create a webhook receiver. let webhook = @@ -510,8 +506,8 @@ async fn test_event_delivery(cptestctx: &ControlPlaneTestContext) { server .mock_async(move |when, then| { let body = serde_json::json!({ - "event_class": "test.foo", - "event_id": id, + "alert_class": "test.foo", + "alert_id": id, "data": { "hello_world": true, } @@ -522,7 +518,7 @@ async fn test_event_delivery(cptestctx: &ControlPlaneTestContext) { .header("x-oxide-event-id", id.to_string()) .and(is_valid_for_webhook(&webhook)) .is_true(signature_verifies( - webhook.secrets[0].id, + webhook.config.secrets[0].id, MY_COOL_SECRET.as_bytes().to_vec(), )) .json_body_includes(body); @@ -533,17 +529,17 @@ async fn test_event_delivery(cptestctx: &ControlPlaneTestContext) { // Publish an event let event = nexus - .webhook_event_publish( + .alert_publish( &opctx, id, - WebhookEventClass::TestFoo, + AlertClass::TestFoo, serde_json::json!({"hello_world": true}), ) .await .expect("event should be published successfully"); dbg!(event); - dbg!(activate_background_task(internal_client, "webhook_dispatcher").await); + dbg!(activate_background_task(internal_client, "alert_dispatcher").await); dbg!( activate_background_task(internal_client, "webhook_deliverator").await ); @@ -562,7 +558,7 @@ async fn test_multiple_secrets(cptestctx: &ControlPlaneTestContext) { let server = httpmock::MockServer::start_async().await; - let id = WebhookEventUuid::new_v4(); + let id = AlertUuid::new_v4(); let endpoint = server.url("/webhooks").parse().expect("this should be a valid URL"); @@ -585,9 +581,9 @@ async fn test_multiple_secrets(cptestctx: &ControlPlaneTestContext) { ) .await; dbg!(&webhook); - let rx_id = WebhookReceiverUuid::from_untyped_uuid(webhook.identity.id); + let rx_id = AlertReceiverUuid::from_untyped_uuid(webhook.identity.id); - let secret1_id = webhook.secrets[0].id; + let secret1_id = webhook.config.secrets[0].id; let client = &cptestctx.external_client; let assert_secrets_get = |mut expected: Vec| async move { @@ -655,17 +651,17 @@ async fn test_multiple_secrets(cptestctx: &ControlPlaneTestContext) { // Publish an event let event = nexus - .webhook_event_publish( + .alert_publish( &opctx, id, - WebhookEventClass::TestFoo, + AlertClass::TestFoo, serde_json::json!({"hello_world": true}), ) .await .expect("event should be published successfully"); dbg!(event); - dbg!(activate_background_task(internal_client, "webhook_dispatcher").await); + dbg!(activate_background_task(internal_client, "alert_dispatcher").await); dbg!( activate_background_task(internal_client, "webhook_deliverator").await ); @@ -683,12 +679,12 @@ async fn test_multiple_receivers(cptestctx: &ControlPlaneTestContext) { let opctx = OpContext::for_tests(cptestctx.logctx.log.new(o!()), datastore.clone()); - let bar_event_id = WebhookEventUuid::new_v4(); - let baz_event_id = WebhookEventUuid::new_v4(); + let bar_alert_id = AlertUuid::new_v4(); + let baz_alert_id = AlertUuid::new_v4(); let assert_webhook_rx_list_matches = - |mut expected: Vec| async move { - let mut actual = webhook_rx_list(client).await; + |mut expected: Vec| async move { + let mut actual = alert_rx_list(client).await; actual.sort_by_key(|rx| rx.identity.id); expected.sort_by_key(|rx| rx.identity.id); assert_eq!(expected, actual); @@ -714,17 +710,17 @@ async fn test_multiple_receivers(cptestctx: &ControlPlaneTestContext) { ) .await; dbg!(&rx_bar); - assert_webhook_rx_list_matches(vec![rx_bar.clone()]).await; + assert_webhook_rx_list_matches(vec![rx_bar.clone().into()]).await; let mock_bar = { let webhook = rx_bar.clone(); srv_bar .mock_async(move |when, then| { when.method(POST) .header("x-oxide-event-class", "test.foo.bar") - .header("x-oxide-event-id", bar_event_id.to_string()) + .header("x-oxide-event-id", bar_alert_id.to_string()) .and(is_valid_for_webhook(&webhook)) .is_true(signature_verifies( - webhook.secrets[0].id, + webhook.config.secrets[0].id, BAR_SECRET.as_bytes().to_vec(), )); then.status(200); @@ -751,17 +747,21 @@ async fn test_multiple_receivers(cptestctx: &ControlPlaneTestContext) { ) .await; dbg!(&rx_baz); - assert_webhook_rx_list_matches(vec![rx_bar.clone(), rx_baz.clone()]).await; + assert_webhook_rx_list_matches(vec![ + rx_bar.clone().into(), + rx_baz.clone().into(), + ]) + .await; let mock_baz = { let webhook = rx_baz.clone(); srv_baz .mock_async(move |when, then| { when.method(POST) .header("x-oxide-event-class", "test.foo.baz") - .header("x-oxide-event-id", baz_event_id.to_string()) + .header("x-oxide-event-id", baz_alert_id.to_string()) .and(is_valid_for_webhook(&webhook)) .is_true(signature_verifies( - webhook.secrets[0].id, + webhook.config.secrets[0].id, BAZ_SECRET.as_bytes().to_vec(), )); then.status(200); @@ -789,9 +789,9 @@ async fn test_multiple_receivers(cptestctx: &ControlPlaneTestContext) { .await; dbg!(&rx_star); assert_webhook_rx_list_matches(vec![ - rx_bar.clone(), - rx_baz.clone(), - rx_star.clone(), + rx_bar.clone().into(), + rx_baz.clone().into(), + rx_star.clone().into(), ]) .await; let mock_star = { @@ -806,7 +806,7 @@ async fn test_multiple_receivers(cptestctx: &ControlPlaneTestContext) { .header_exists("x-oxide-event-id") .and(is_valid_for_webhook(&webhook)) .is_true(signature_verifies( - webhook.secrets[0].id, + webhook.config.secrets[0].id, STAR_SECRET.as_bytes().to_vec(), )); then.status(200); @@ -816,10 +816,10 @@ async fn test_multiple_receivers(cptestctx: &ControlPlaneTestContext) { // Publish a test.foo.bar event let event = nexus - .webhook_event_publish( + .alert_publish( &opctx, - bar_event_id, - WebhookEventClass::TestFooBar, + bar_alert_id, + AlertClass::TestFooBar, serde_json::json!({"lol": "webhooked on phonics"}), ) .await @@ -827,17 +827,17 @@ async fn test_multiple_receivers(cptestctx: &ControlPlaneTestContext) { dbg!(event); // Publish a test.foo.baz event let event = nexus - .webhook_event_publish( + .alert_publish( &opctx, - baz_event_id, - WebhookEventClass::TestFooBaz, + baz_alert_id, + AlertClass::TestFooBaz, serde_json::json!({"lol": "webhook, line, and sinker"}), ) .await .expect("event should be published successfully"); dbg!(event); - dbg!(activate_background_task(internal_client, "webhook_dispatcher").await); + dbg!(activate_background_task(internal_client, "alert_dispatcher").await); dbg!( activate_background_task(internal_client, "webhook_deliverator").await ); @@ -863,7 +863,7 @@ async fn test_retry_backoff(cptestctx: &ControlPlaneTestContext) { let server = httpmock::MockServer::start_async().await; - let id = WebhookEventUuid::new_v4(); + let id = AlertUuid::new_v4(); // Create a webhook receiver. let webhook = @@ -875,8 +875,8 @@ async fn test_retry_backoff(cptestctx: &ControlPlaneTestContext) { server .mock_async(move |when, then| { let body = serde_json::json!({ - "event_class": "test.foo", - "event_id": id, + "alert_class": "test.foo", + "alert_id": id, "data": { "hello_world": true, } @@ -887,7 +887,7 @@ async fn test_retry_backoff(cptestctx: &ControlPlaneTestContext) { .header("x-oxide-event-id", id.to_string()) .and(is_valid_for_webhook(&webhook)) .is_true(signature_verifies( - webhook.secrets[0].id, + webhook.config.secrets[0].id, MY_COOL_SECRET.as_bytes().to_vec(), )) .json_body_includes(body); @@ -898,28 +898,26 @@ async fn test_retry_backoff(cptestctx: &ControlPlaneTestContext) { // Publish an event let event = nexus - .webhook_event_publish( + .alert_publish( &opctx, id, - WebhookEventClass::TestFoo, + AlertClass::TestFoo, serde_json::json!({"hello_world": true}), ) .await .expect("event should be published successfully"); dbg!(event); - dbg!(activate_background_task(internal_client, "webhook_dispatcher").await); + dbg!(activate_background_task(internal_client, "alert_dispatcher").await); dbg!( activate_background_task(internal_client, "webhook_deliverator").await ); mock.assert_calls_async(1).await; - let deliveries = webhook_deliveries_list( - &cptestctx.external_client, - webhook.identity.id, - ) - .await; + let deliveries = + alert_deliveries_list(&cptestctx.external_client, webhook.identity.id) + .await; assert_eq!( deliveries.all_items.len(), 1, @@ -928,10 +926,10 @@ async fn test_retry_backoff(cptestctx: &ControlPlaneTestContext) { ); let delivery = dbg!(&deliveries.all_items[0]); - assert_eq!(delivery.webhook_id.into_untyped_uuid(), webhook.identity.id); - assert_eq!(delivery.event_id, id); - assert_eq!(delivery.event_class, "test.foo"); - assert_eq!(delivery.state, views::WebhookDeliveryState::Pending); + assert_eq!(delivery.receiver_id.into_untyped_uuid(), webhook.identity.id); + assert_eq!(delivery.alert_id, id); + assert_eq!(delivery.alert_class, "test.foo"); + assert_eq!(delivery.state, views::AlertDeliveryState::Pending); expect_delivery_attempts( &delivery.attempts, &[ExpectAttempt { @@ -956,8 +954,8 @@ async fn test_retry_backoff(cptestctx: &ControlPlaneTestContext) { server .mock_async(move |when, then| { let body = serde_json::json!({ - "event_class": "test.foo", - "event_id": id, + "alert_class": "test.foo", + "alert_id": id, "data": { "hello_world": true, } @@ -968,7 +966,7 @@ async fn test_retry_backoff(cptestctx: &ControlPlaneTestContext) { .header("x-oxide-event-id", id.to_string()) .and(is_valid_for_webhook(&webhook)) .is_true(signature_verifies( - webhook.secrets[0].id, + webhook.config.secrets[0].id, MY_COOL_SECRET.as_bytes().to_vec(), )) .json_body_includes(body); @@ -991,11 +989,9 @@ async fn test_retry_backoff(cptestctx: &ControlPlaneTestContext) { mock.assert_calls_async(1).await; mock.delete_async().await; - let deliveries = webhook_deliveries_list( - &cptestctx.external_client, - webhook.identity.id, - ) - .await; + let deliveries = + alert_deliveries_list(&cptestctx.external_client, webhook.identity.id) + .await; assert_eq!( deliveries.all_items.len(), 1, @@ -1004,10 +1000,10 @@ async fn test_retry_backoff(cptestctx: &ControlPlaneTestContext) { ); let delivery = dbg!(&deliveries.all_items[0]); - assert_eq!(delivery.webhook_id.into_untyped_uuid(), webhook.identity.id); - assert_eq!(delivery.event_id, id); - assert_eq!(delivery.event_class, "test.foo"); - assert_eq!(delivery.state, views::WebhookDeliveryState::Pending); + assert_eq!(delivery.receiver_id.into_untyped_uuid(), webhook.identity.id); + assert_eq!(delivery.alert_id, id); + assert_eq!(delivery.alert_class, "test.foo"); + assert_eq!(delivery.state, views::AlertDeliveryState::Pending); expect_delivery_attempts( &delivery.attempts, &[ @@ -1028,8 +1024,8 @@ async fn test_retry_backoff(cptestctx: &ControlPlaneTestContext) { server .mock_async(move |when, then| { let body = serde_json::json!({ - "event_class": "test.foo", - "event_id": id, + "alert_class": "test.foo", + "alert_id": id, "data": { "hello_world": true, } @@ -1040,7 +1036,7 @@ async fn test_retry_backoff(cptestctx: &ControlPlaneTestContext) { .header("x-oxide-event-id", id.to_string()) .and(is_valid_for_webhook(&webhook)) .is_true(signature_verifies( - webhook.secrets[0].id, + webhook.config.secrets[0].id, MY_COOL_SECRET.as_bytes().to_vec(), )) .json_body_includes(body); @@ -1063,11 +1059,9 @@ async fn test_retry_backoff(cptestctx: &ControlPlaneTestContext) { mock.assert_async().await; // Make sure the deliveries endpoint correctly records the request history. - let deliveries = webhook_deliveries_list( - &cptestctx.external_client, - webhook.identity.id, - ) - .await; + let deliveries = + alert_deliveries_list(&cptestctx.external_client, webhook.identity.id) + .await; assert_eq!( deliveries.all_items.len(), 1, @@ -1075,10 +1069,10 @@ async fn test_retry_backoff(cptestctx: &ControlPlaneTestContext) { deliveries.all_items ); let delivery = dbg!(&deliveries.all_items[0]); - assert_eq!(delivery.webhook_id.into_untyped_uuid(), webhook.identity.id); - assert_eq!(delivery.event_id, id); - assert_eq!(delivery.event_class, "test.foo"); - assert_eq!(delivery.state, views::WebhookDeliveryState::Delivered); + assert_eq!(delivery.receiver_id.into_untyped_uuid(), webhook.identity.id); + assert_eq!(delivery.alert_id, id); + assert_eq!(delivery.alert_class, "test.foo"); + assert_eq!(delivery.state, views::AlertDeliveryState::Delivered); expect_delivery_attempts( &delivery.attempts, &[ @@ -1106,10 +1100,10 @@ async fn test_probe(cptestctx: &ControlPlaneTestContext) { let webhook = webhook_create(&cptestctx, &my_great_webhook_params(&server)).await; dbg!(&webhook); - let rx_id = WebhookReceiverUuid::from_untyped_uuid(webhook.identity.id); + let rx_id = AlertReceiverUuid::from_untyped_uuid(webhook.identity.id); let body = serde_json::json!({ - "event_class": "probe", + "alert_class": "probe", "data": {} }) .to_string(); @@ -1125,7 +1119,7 @@ async fn test_probe(cptestctx: &ControlPlaneTestContext) { .header("x-oxide-event-class", "probe") .and(is_valid_for_webhook(&webhook)) .is_true(signature_verifies( - webhook.secrets[0].id, + webhook.config.secrets[0].id, MY_COOL_SECRET.as_bytes().to_vec(), )) .json_body_includes(body); @@ -1142,20 +1136,27 @@ async fn test_probe(cptestctx: &ControlPlaneTestContext) { }; // Send a probe. The probe should fail due to a timeout. - let probe1 = - webhook_send_probe(&cptestctx, &rx_id, false, http::StatusCode::OK) - .await; + let probe1 = alert_receiver_send_probe( + &cptestctx, + &rx_id, + false, + http::StatusCode::OK, + ) + .await; dbg!(&probe1); mock.assert_async().await; - assert_eq!( - probe1.probe.attempts[0].result, - views::WebhookDeliveryAttemptResult::FailedTimeout + expect_delivery_attempts( + &probe1.probe.attempts, + &[ExpectAttempt { + result: views::WebhookDeliveryAttemptResult::FailedTimeout, + status: None, + }], ); - assert_eq!(probe1.probe.event_class, "probe"); - assert_eq!(probe1.probe.trigger, views::WebhookDeliveryTrigger::Probe); - assert_eq!(probe1.probe.state, views::WebhookDeliveryState::Failed); + assert_eq!(probe1.probe.alert_class, "probe"); + assert_eq!(probe1.probe.trigger, views::AlertDeliveryTrigger::Probe); + assert_eq!(probe1.probe.state, views::AlertDeliveryState::Failed); assert_eq!( probe1.resends_started, None, "we did not request events be resent" @@ -1172,7 +1173,7 @@ async fn test_probe(cptestctx: &ControlPlaneTestContext) { .header("x-oxide-event-class", "probe") .and(is_valid_for_webhook(&webhook)) .is_true(signature_verifies( - webhook.secrets[0].id, + webhook.config.secrets[0].id, MY_COOL_SECRET.as_bytes().to_vec(), )) .json_body_includes(body); @@ -1181,19 +1182,26 @@ async fn test_probe(cptestctx: &ControlPlaneTestContext) { .await }; - let probe2 = - webhook_send_probe(&cptestctx, &rx_id, false, http::StatusCode::OK) - .await; + let probe2 = alert_receiver_send_probe( + &cptestctx, + &rx_id, + false, + http::StatusCode::OK, + ) + .await; dbg!(&probe2); mock.assert_async().await; - assert_eq!( - probe2.probe.attempts[0].result, - views::WebhookDeliveryAttemptResult::FailedHttpError + expect_delivery_attempts( + &probe2.probe.attempts, + &[ExpectAttempt { + result: views::WebhookDeliveryAttemptResult::FailedHttpError, + status: Some(503), + }], ); - assert_eq!(probe2.probe.event_class, "probe"); - assert_eq!(probe2.probe.trigger, views::WebhookDeliveryTrigger::Probe); - assert_eq!(probe2.probe.state, views::WebhookDeliveryState::Failed); + assert_eq!(probe2.probe.alert_class, "probe"); + assert_eq!(probe2.probe.trigger, views::AlertDeliveryTrigger::Probe); + assert_eq!(probe2.probe.state, views::AlertDeliveryState::Failed); assert_ne!( probe2.probe.id, probe1.probe.id, "a new delivery ID should be assigned to each probe" @@ -1214,7 +1222,7 @@ async fn test_probe(cptestctx: &ControlPlaneTestContext) { .header("x-oxide-event-class", "probe") .and(is_valid_for_webhook(&webhook)) .is_true(signature_verifies( - webhook.secrets[0].id, + webhook.config.secrets[0].id, MY_COOL_SECRET.as_bytes().to_vec(), )) .json_body_includes(body); @@ -1223,18 +1231,26 @@ async fn test_probe(cptestctx: &ControlPlaneTestContext) { .await }; - let probe3 = - webhook_send_probe(&cptestctx, &rx_id, false, http::StatusCode::OK) - .await; + let probe3 = alert_receiver_send_probe( + &cptestctx, + &rx_id, + false, + http::StatusCode::OK, + ) + .await; dbg!(&probe3); mock.assert_async().await; - assert_eq!( - probe3.probe.attempts[0].result, - views::WebhookDeliveryAttemptResult::Succeeded + + expect_delivery_attempts( + &probe3.probe.attempts, + &[ExpectAttempt { + result: views::WebhookDeliveryAttemptResult::Succeeded, + status: Some(200), + }], ); - assert_eq!(probe3.probe.event_class, "probe"); - assert_eq!(probe3.probe.trigger, views::WebhookDeliveryTrigger::Probe); - assert_eq!(probe3.probe.state, views::WebhookDeliveryState::Delivered); + assert_eq!(probe3.probe.alert_class, "probe"); + assert_eq!(probe3.probe.trigger, views::AlertDeliveryTrigger::Probe); + assert_eq!(probe3.probe.state, views::AlertDeliveryState::Delivered); assert_ne!( probe3.probe.id, probe1.probe.id, "a new delivery ID should be assigned to each probe" @@ -1266,8 +1282,8 @@ async fn test_probe_resends_failed_deliveries( webhook_create(&cptestctx, &my_great_webhook_params(&server)).await; dbg!(&webhook); - let event1_id = WebhookEventUuid::new_v4(); - let event2_id = WebhookEventUuid::new_v4(); + let event1_id = AlertUuid::new_v4(); + let event2_id = AlertUuid::new_v4(); let mock = { let webhook = webhook.clone(); server @@ -1281,7 +1297,7 @@ async fn test_probe_resends_failed_deliveries( ) .and(is_valid_for_webhook(&webhook)) .is_true(signature_verifies( - webhook.secrets[0].id, + webhook.config.secrets[0].id, MY_COOL_SECRET.as_bytes().to_vec(), )); then.status(500); @@ -1292,10 +1308,10 @@ async fn test_probe_resends_failed_deliveries( // Publish both events dbg!( nexus - .webhook_event_publish( + .alert_publish( &opctx, event1_id, - WebhookEventClass::TestFoo, + AlertClass::TestFoo, serde_json::json!({"hello": "world"}), ) .await @@ -1303,17 +1319,17 @@ async fn test_probe_resends_failed_deliveries( ); dbg!( nexus - .webhook_event_publish( + .alert_publish( &opctx, event2_id, - WebhookEventClass::TestFoo, + AlertClass::TestFoo, serde_json::json!({"hello": "emeryville"}), ) .await .expect("event2 should be published successfully") ); - dbg!(activate_background_task(internal_client, "webhook_dispatcher").await); + dbg!(activate_background_task(internal_client, "alert_dispatcher").await); dbg!( activate_background_task(internal_client, "webhook_deliverator").await ); @@ -1341,7 +1357,7 @@ async fn test_probe_resends_failed_deliveries( server .mock_async(move |when, then| { let body = serde_json::json!({ - "event_class": "probe", + "alert_class": "probe", "data": { } }) @@ -1350,7 +1366,7 @@ async fn test_probe_resends_failed_deliveries( .header("x-oxide-event-class", "probe") .and(is_valid_for_webhook(&webhook)) .is_true(signature_verifies( - webhook.secrets[0].id, + webhook.config.secrets[0].id, MY_COOL_SECRET.as_bytes().to_vec(), )) .json_body_includes(body); @@ -1373,7 +1389,7 @@ async fn test_probe_resends_failed_deliveries( ) .and(is_valid_for_webhook(&webhook)) .is_true(signature_verifies( - webhook.secrets[0].id, + webhook.config.secrets[0].id, MY_COOL_SECRET.as_bytes().to_vec(), )); then.status(200); @@ -1382,14 +1398,18 @@ async fn test_probe_resends_failed_deliveries( }; // Send a probe with ?resend=true - let rx_id = WebhookReceiverUuid::from_untyped_uuid(webhook.identity.id); - let probe = - webhook_send_probe(&cptestctx, &rx_id, true, http::StatusCode::OK) - .await; + let rx_id = AlertReceiverUuid::from_untyped_uuid(webhook.identity.id); + let probe = alert_receiver_send_probe( + &cptestctx, + &rx_id, + true, + http::StatusCode::OK, + ) + .await; dbg!(&probe); probe_mock.assert_async().await; probe_mock.delete_async().await; - assert_eq!(probe.probe.state, views::WebhookDeliveryState::Delivered); + assert_eq!(probe.probe.state, views::AlertDeliveryState::Delivered); assert_eq!(probe.resends_started, Some(2)); // Both events should be resent. @@ -1417,11 +1437,11 @@ async fn test_api_resends_failed_deliveries( webhook_create(&cptestctx, &my_great_webhook_params(&server)).await; dbg!(&webhook); - let event1_id = WebhookEventUuid::new_v4(); - let event2_id = WebhookEventUuid::new_v4(); + let event1_id = AlertUuid::new_v4(); + let event2_id = AlertUuid::new_v4(); let body = serde_json::json!({ - "event_class": "test.foo", - "event_id": event1_id, + "alert_class": "test.foo", + "alert_id": event1_id, "data": { "hello_world": true, } @@ -1437,7 +1457,7 @@ async fn test_api_resends_failed_deliveries( .header("x-oxide-event-id", event1_id.to_string()) .and(is_valid_for_webhook(&webhook)) .is_true(signature_verifies( - webhook.secrets[0].id, + webhook.config.secrets[0].id, MY_COOL_SECRET.as_bytes().to_vec(), )) .json_body_includes(body); @@ -1448,10 +1468,10 @@ async fn test_api_resends_failed_deliveries( // Publish an event let event1 = nexus - .webhook_event_publish( + .alert_publish( &opctx, event1_id, - WebhookEventClass::TestFoo, + AlertClass::TestFoo, serde_json::json!({"hello_world": true}), ) .await @@ -1460,17 +1480,17 @@ async fn test_api_resends_failed_deliveries( // Publish another event that our receiver is not subscribed to. let event2 = nexus - .webhook_event_publish( + .alert_publish( &opctx, event2_id, - WebhookEventClass::TestQuuxBar, + AlertClass::TestQuuxBar, serde_json::json!({"hello_world": true}), ) .await .expect("event should be published successfully"); dbg!(event2); - dbg!(activate_background_task(internal_client, "webhook_dispatcher").await); + dbg!(activate_background_task(internal_client, "alert_dispatcher").await); dbg!( activate_background_task(internal_client, "webhook_deliverator").await ); @@ -1497,7 +1517,7 @@ async fn test_api_resends_failed_deliveries( .header("x-oxide-event-id", event1_id.to_string()) .and(is_valid_for_webhook(&webhook)) .is_true(signature_verifies( - webhook.secrets[0].id, + webhook.config.secrets[0].id, MY_COOL_SECRET.as_bytes().to_vec(), )) .json_body_includes(body); @@ -1508,7 +1528,7 @@ async fn test_api_resends_failed_deliveries( // Try to resend event 1. let delivery = - webhook_delivery_resend(client, webhook.identity.id, event1_id).await; + alert_delivery_resend(client, webhook.identity.id, event1_id).await; dbg!(delivery); // Try to resend event 2. This should fail, as the receiver is not @@ -1551,8 +1571,8 @@ async fn subscription_add_test( let server = httpmock::MockServer::start_async().await; - let id1 = WebhookEventUuid::new_v4(); - let id2 = WebhookEventUuid::new_v4(); + let id1 = AlertUuid::new_v4(); + let id2 = AlertUuid::new_v4(); // Create a webhook receiver. let webhook = @@ -1564,8 +1584,8 @@ async fn subscription_add_test( server .mock_async(move |when, then| { let body = serde_json::json!({ - "event_class": "test.foo.bar", - "event_id": id2, + "alert_class": "test.foo.bar", + "alert_id": id2, "data": { "hello_world": true, } @@ -1576,7 +1596,7 @@ async fn subscription_add_test( .header("x-oxide-event-id", id2.to_string()) .and(is_valid_for_webhook(&webhook)) .is_true(signature_verifies( - webhook.secrets[0].id, + webhook.config.secrets[0].id, MY_COOL_SECRET.as_bytes().to_vec(), )) .json_body_includes(body); @@ -1588,32 +1608,32 @@ async fn subscription_add_test( // Publish an event. This should not be received, as we are not subscribed // to it. let event = nexus - .webhook_event_publish( + .alert_publish( &opctx, id1, - WebhookEventClass::TestFooBar, + AlertClass::TestFooBar, serde_json::json!({"hello_world": false}), ) .await .expect("event should be published successfully"); dbg!(event); - dbg!(activate_background_task(internal_client, "webhook_dispatcher").await); + dbg!(activate_background_task(internal_client, "alert_dispatcher").await); dbg!( activate_background_task(internal_client, "webhook_deliverator").await ); mock.assert_calls_async(0).await; - let rx_id = WebhookReceiverUuid::from_untyped_uuid(webhook.identity.id); + let rx_id = AlertReceiverUuid::from_untyped_uuid(webhook.identity.id); let new_subscription = - new_subscription.parse::().unwrap(); + new_subscription.parse::().unwrap(); dbg!(subscription_add(&cptestctx, rx_id, &new_subscription).await); // The new subscription should be there. - let rx = webhook_get( + let rx = alert_rx_get( &cptestctx.external_client, - &get_webhooks_url(webhook.identity.id), + &alert_rx_url(webhook.identity.id), ) .await; dbg!(&rx); @@ -1621,17 +1641,17 @@ async fn subscription_add_test( // Publish an event. This one should make it through. let event = nexus - .webhook_event_publish( + .alert_publish( &opctx, id2, - WebhookEventClass::TestFooBar, + AlertClass::TestFooBar, serde_json::json!({"hello_world": true}), ) .await .expect("event should be published successfully"); dbg!(event); - dbg!(activate_background_task(internal_client, "webhook_dispatcher").await); + dbg!(activate_background_task(internal_client, "alert_dispatcher").await); dbg!( activate_background_task(internal_client, "webhook_deliverator").await ); @@ -1668,14 +1688,14 @@ async fn subscription_remove_test( let server = httpmock::MockServer::start_async().await; - let id1 = WebhookEventUuid::new_v4(); - let id2 = WebhookEventUuid::new_v4(); - let id3 = WebhookEventUuid::new_v4(); + let id1 = AlertUuid::new_v4(); + let id2 = AlertUuid::new_v4(); + let id3 = AlertUuid::new_v4(); let other_subscription = - "test.foo".parse::().unwrap(); + "test.foo".parse::().unwrap(); let deleted_subscription = - deleted_subscription.parse::().unwrap(); + deleted_subscription.parse::().unwrap(); // Create a webhook receiver. let webhook = webhook_create( @@ -1696,8 +1716,8 @@ async fn subscription_remove_test( server .mock_async(move |when, then| { let body = serde_json::json!({ - "event_class": "test.foo.bar", - "event_id": id1, + "alert_class": "test.foo.bar", + "alert_id": id1, "data": { "hello_world": true, } @@ -1708,7 +1728,7 @@ async fn subscription_remove_test( .header("x-oxide-event-id", id1.to_string()) .and(is_valid_for_webhook(&webhook)) .is_true(signature_verifies( - webhook.secrets[0].id, + webhook.config.secrets[0].id, MY_COOL_SECRET.as_bytes().to_vec(), )) .json_body_includes(body); @@ -1720,30 +1740,30 @@ async fn subscription_remove_test( // Publish an event. This should be received, as it matches the subscription // we are about to delete. let event = nexus - .webhook_event_publish( + .alert_publish( &opctx, id1, - WebhookEventClass::TestFooBar, + AlertClass::TestFooBar, serde_json::json!({"hello_world": true}), ) .await .expect("event should be published successfully"); dbg!(event); - dbg!(activate_background_task(internal_client, "webhook_dispatcher").await); + dbg!(activate_background_task(internal_client, "alert_dispatcher").await); dbg!( activate_background_task(internal_client, "webhook_deliverator").await ); mock.assert_calls_async(1).await; - let rx_id = WebhookReceiverUuid::from_untyped_uuid(webhook.identity.id); + let rx_id = AlertReceiverUuid::from_untyped_uuid(webhook.identity.id); dbg!(subscription_remove(&cptestctx, rx_id, &deleted_subscription).await); // The deleted subscription should no longer be there. - let rx = webhook_get( + let rx = alert_rx_get( &cptestctx.external_client, - &get_webhooks_url(webhook.identity.id), + &alert_rx_url(webhook.identity.id), ) .await; dbg!(&rx); @@ -1752,17 +1772,17 @@ async fn subscription_remove_test( // Publish an event. This one should not be received, as we are no longer // subscribed to its event class. let event = nexus - .webhook_event_publish( + .alert_publish( &opctx, id2, - WebhookEventClass::TestFooBar, + AlertClass::TestFooBar, serde_json::json!({"hello_world": false}), ) .await .expect("event should be published successfully"); dbg!(event); - dbg!(activate_background_task(internal_client, "webhook_dispatcher").await); + dbg!(activate_background_task(internal_client, "alert_dispatcher").await); dbg!( activate_background_task(internal_client, "webhook_deliverator").await ); @@ -1777,8 +1797,8 @@ async fn subscription_remove_test( server .mock_async(move |when, then| { let body = serde_json::json!({ - "event_class": "test.foo", - "event_id": id3, + "alert_class": "test.foo", + "alert_id": id3, "data": { "whatever": 1 } @@ -1789,7 +1809,7 @@ async fn subscription_remove_test( .header("x-oxide-event-id", id3.to_string()) .and(is_valid_for_webhook(&webhook)) .is_true(signature_verifies( - webhook.secrets[0].id, + webhook.config.secrets[0].id, MY_COOL_SECRET.as_bytes().to_vec(), )) .json_body_includes(body); @@ -1799,17 +1819,17 @@ async fn subscription_remove_test( }; let event = nexus - .webhook_event_publish( + .alert_publish( &opctx, id3, - WebhookEventClass::TestFoo, + AlertClass::TestFoo, serde_json::json!({"whatever": 1}), ) .await .expect("event should be published successfully"); dbg!(event); - dbg!(activate_background_task(internal_client, "webhook_dispatcher").await); + dbg!(activate_background_task(internal_client, "alert_dispatcher").await); dbg!( activate_background_task(internal_client, "webhook_deliverator").await ); diff --git a/nexus/tests/output/uncovered-authz-endpoints.txt b/nexus/tests/output/uncovered-authz-endpoints.txt index 258d9065fe6..7dfe8735af9 100644 --- a/nexus/tests/output/uncovered-authz-endpoints.txt +++ b/nexus/tests/output/uncovered-authz-endpoints.txt @@ -16,7 +16,7 @@ device_auth_confirm (post "/device/confirm") device_access_token (post "/device/token") probe_create (post "/experimental/v1/probes") login_saml (post "/login/{silo_name}/saml/{provider_name}") +alert_delivery_resend (post "/v1/alerts/{alert_id}/resend") login_local (post "/v1/login/{silo_name}/local") logout (post "/v1/logout") networking_switch_port_lldp_config_update (post "/v1/system/hardware/switch-port/{port}/lldp/config") -webhook_delivery_resend (post "/v1/webhooks/deliveries/{event_id}/resend") diff --git a/nexus/types/src/external_api/params.rs b/nexus/types/src/external_api/params.rs index 337b335e68d..edf464a5626 100644 --- a/nexus/types/src/external_api/params.rs +++ b/nexus/types/src/external_api/params.rs @@ -2381,26 +2381,41 @@ pub struct DeviceAccessTokenRequest { pub client_id: Uuid, } -// Webhooks +// Alerts -/// Query params for listing webhook event classes. +/// Query params for listing alert classes. #[derive(Clone, Debug, Deserialize, Serialize, JsonSchema)] -pub struct EventClassFilter { - /// An optional glob pattern for filtering event class names. +pub struct AlertClassFilter { + /// An optional glob pattern for filtering alert class names. /// - /// If provided, only event classes which match this glob pattern will be + /// If provided, only alert classes which match this glob pattern will be /// included in the response. - pub filter: Option, + pub filter: Option, +} + +#[derive(Deserialize, JsonSchema)] +pub struct AlertSelector { + /// UUID of the alert + pub alert_id: Uuid, +} + +#[derive(Clone, Debug, Deserialize, Serialize, JsonSchema)] +pub struct AlertSubscriptionSelector { + /// The webhook receiver that the subscription is attached to. + #[serde(flatten)] + pub receiver: AlertReceiverSelector, + /// The event class subscription itself. + pub subscription: shared::AlertSubscription, } #[derive(Clone, Debug, Deserialize, Serialize, JsonSchema)] -pub struct EventClassPage { +pub struct AlertClassPage { /// The last webhook event class returned by a previous page. pub last_seen: String, } #[derive(Clone, Debug, Deserialize, Serialize, JsonSchema)] -pub struct WebhookReceiverSelector { +pub struct AlertReceiverSelector { /// The name or ID of the webhook receiver. pub receiver: NameOrId, } @@ -2421,7 +2436,7 @@ pub struct WebhookCreate { /// If this list is empty or is not included in the request body, the /// webhook will not be subscribed to any events. #[serde(default)] - pub subscriptions: Vec, + pub subscriptions: Vec, } /// Parameters to update a webhook configuration. @@ -2435,9 +2450,9 @@ pub struct WebhookReceiverUpdate { } #[derive(Clone, Debug, Deserialize, Serialize, JsonSchema)] -pub struct WebhookSubscriptionCreate { +pub struct AlertSubscriptionCreate { /// The event class pattern to subscribe to. - pub subscription: shared::WebhookSubscription, + pub subscription: shared::AlertSubscription, } #[derive(Clone, Debug, Deserialize, Serialize, JsonSchema)] @@ -2452,23 +2467,8 @@ pub struct WebhookSecretSelector { pub secret_id: Uuid, } -#[derive(Deserialize, JsonSchema)] -pub struct WebhookEventSelector { - /// UUID of the event - pub event_id: Uuid, -} - -#[derive(Clone, Debug, Deserialize, Serialize, JsonSchema)] -pub struct WebhookSubscriptionSelector { - /// The webhook receiver that the subscription is attached to. - #[serde(flatten)] - pub receiver: WebhookReceiverSelector, - /// The event class subscription itself. - pub subscription: shared::WebhookSubscription, -} - #[derive(Copy, Clone, Debug, Deserialize, Serialize, JsonSchema)] -pub struct WebhookDeliveryStateFilter { +pub struct AlertDeliveryStateFilter { /// If true, include deliveries which are currently in progress. /// /// If any of the "pending", "failed", or "delivered" query parameters are @@ -2499,13 +2499,13 @@ pub struct WebhookDeliveryStateFilter { pub delivered: Option, } -impl Default for WebhookDeliveryStateFilter { +impl Default for AlertDeliveryStateFilter { fn default() -> Self { Self::ALL } } -impl WebhookDeliveryStateFilter { +impl AlertDeliveryStateFilter { pub const ALL: Self = Self { pending: Some(true), failed: Some(true), delivered: Some(true) }; @@ -2536,7 +2536,7 @@ impl WebhookDeliveryStateFilter { } #[derive(Clone, Debug, Deserialize, Serialize, JsonSchema)] -pub struct WebhookProbe { +pub struct AlertReceiverProbe { /// If true, resend all events that have not been delivered successfully if /// the probe request succeeds. #[serde(default)] diff --git a/nexus/types/src/external_api/shared.rs b/nexus/types/src/external_api/shared.rs index 1db01c3d1cc..151633d52ba 100644 --- a/nexus/types/src/external_api/shared.rs +++ b/nexus/types/src/external_api/shared.rs @@ -377,25 +377,25 @@ impl JsonSchema for SwitchLinkState { #[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)] #[serde(try_from = "String")] #[serde(into = "String")] -pub struct WebhookSubscription(String); +pub struct AlertSubscription(String); -impl WebhookSubscription { +impl AlertSubscription { const PATTERN: &str = r"^([a-zA-Z0-9_]+|\*|\*\*)(\.([a-zA-Z0-9_]+|\*|\*\*))*$"; fn is_valid(s: &str) -> Result<(), anyhow::Error> { static REGEX: std::sync::LazyLock = std::sync::LazyLock::new(|| { - regex::Regex::new(WebhookSubscription::PATTERN).expect( - "WebhookSubscription validation regex should be valid", + regex::Regex::new(AlertSubscription::PATTERN).expect( + "AlertSubscription validation regex should be valid", ) }); if REGEX.is_match(s) { Ok(()) } else { Err(anyhow::anyhow!( - "webhook subscription {s:?} does not match the pattern {}", - WebhookSubscription::PATTERN + "alert subscription {s:?} does not match the pattern {}", + AlertSubscription::PATTERN )) } } @@ -406,7 +406,7 @@ impl WebhookSubscription { } } -impl TryFrom for WebhookSubscription { +impl TryFrom for AlertSubscription { type Error = anyhow::Error; fn try_from(s: String) -> Result { Self::is_valid(&s)?; @@ -414,7 +414,7 @@ impl TryFrom for WebhookSubscription { } } -impl std::str::FromStr for WebhookSubscription { +impl std::str::FromStr for AlertSubscription { type Err = anyhow::Error; fn from_str(s: &str) -> Result { Self::is_valid(s)?; @@ -422,21 +422,21 @@ impl std::str::FromStr for WebhookSubscription { } } -impl From for String { - fn from(WebhookSubscription(s): WebhookSubscription) -> Self { +impl From for String { + fn from(AlertSubscription(s): AlertSubscription) -> Self { s } } -impl AsRef for WebhookSubscription { +impl AsRef for AlertSubscription { fn as_ref(&self) -> &str { self.as_str() } } -impl JsonSchema for WebhookSubscription { +impl JsonSchema for AlertSubscription { fn schema_name() -> String { - "WebhookSubscription".to_string() + "AlertSubscription".to_string() } fn json_schema( @@ -455,7 +455,7 @@ impl JsonSchema for WebhookSubscription { string: Some(Box::new(schemars::schema::StringValidation { max_length: None, min_length: None, - pattern: Some(WebhookSubscription::PATTERN.to_string()), + pattern: Some(AlertSubscription::PATTERN.to_string()), })), ..Default::default() } @@ -463,7 +463,7 @@ impl JsonSchema for WebhookSubscription { } } -impl std::fmt::Display for WebhookSubscription { +impl std::fmt::Display for AlertSubscription { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { f.write_str(self.as_str()) } @@ -471,9 +471,9 @@ impl std::fmt::Display for WebhookSubscription { #[cfg(test)] mod test { + use super::AlertSubscription; use super::MAX_ROLE_ASSIGNMENTS_PER_RESOURCE; use super::Policy; - use super::WebhookSubscription; use serde::Deserialize; #[derive(Clone, Copy, Debug, Deserialize, Eq, PartialEq)] @@ -550,7 +550,7 @@ mod test { "foo.[barbaz]", ]; for s in successes { - match s.parse::() { + match s.parse::() { Ok(_) => {} Err(e) => panic!( "expected string {s:?} to be a valid webhook subscription: {e}" @@ -559,7 +559,7 @@ mod test { } for s in failures { - match s.parse::() { + match s.parse::() { Ok(_) => panic!( "expected string {s:?} to NOT be a valid webhook subscription" ), diff --git a/nexus/types/src/external_api/views.rs b/nexus/types/src/external_api/views.rs index 84a00476049..b62f8b0f4ee 100644 --- a/nexus/types/src/external_api/views.rs +++ b/nexus/types/src/external_api/views.rs @@ -17,7 +17,7 @@ use omicron_common::api::external::{ Digest, Error, FailureDomain, IdentityMetadata, InstanceState, Name, ObjectIdentity, RoleName, SimpleIdentityOrName, }; -use omicron_uuid_kinds::{WebhookEventUuid, WebhookReceiverUuid}; +use omicron_uuid_kinds::{AlertReceiverUuid, AlertUuid}; use oxnet::{Ipv4Net, Ipv6Net}; use schemars::JsonSchema; use semver::Version; @@ -1055,19 +1055,47 @@ pub struct OxqlQueryResult { pub tables: Vec, } -// WEBHOOKS +// ALERTS -/// A webhook event class. +/// An alert class. #[derive(Clone, Debug, Deserialize, Serialize, JsonSchema)] -pub struct EventClass { - /// The name of the event class. +pub struct AlertClass { + /// The name of the alert class. pub name: String, - /// A description of what this event class represents. + /// A description of what this alert class represents. pub description: String, } -/// The configuration for a webhook. +/// The configuration for an alert receiver. +#[derive( + ObjectIdentity, Clone, Debug, Deserialize, Serialize, JsonSchema, PartialEq, +)] +pub struct AlertReceiver { + #[serde(flatten)] + pub identity: IdentityMetadata, + + /// The list of alert classes to which this receiver is subscribed. + pub subscriptions: Vec, + + /// Configuration specific to the kind of alert receiver that this is. + pub kind: AlertReceiverKind, +} + +#[derive(Clone, Debug, Deserialize, Serialize, JsonSchema)] +pub struct AlertSubscriptionCreated { + /// The new subscription added to the receiver. + pub subscription: shared::AlertSubscription, +} + +/// The possible alert delivery mechanisms for an alert receiver. +#[derive(Clone, Debug, Deserialize, Serialize, JsonSchema, PartialEq)] +#[serde(rename_all = "snake_case")] +pub enum AlertReceiverKind { + Webhook(WebhookReceiverConfig), +} + +/// The configuration for a webhook alert receiver. #[derive( ObjectIdentity, Clone, Debug, Deserialize, Serialize, JsonSchema, PartialEq, )] @@ -1075,22 +1103,62 @@ pub struct WebhookReceiver { #[serde(flatten)] pub identity: IdentityMetadata, + /// The list of alert classes to which this receiver is subscribed. + pub subscriptions: Vec, + + #[serde(flatten)] + pub config: WebhookReceiverConfig, +} + +impl From for AlertReceiver { + fn from( + WebhookReceiver { identity, subscriptions, config }: WebhookReceiver, + ) -> Self { + Self { + identity, + subscriptions, + kind: AlertReceiverKind::Webhook(config), + } + } +} + +impl PartialEq for AlertReceiver { + fn eq(&self, other: &WebhookReceiver) -> bool { + // Will become refutable if/when more variants are added... + #[allow(irrefutable_let_patterns)] + let AlertReceiverKind::Webhook(ref config) = self.kind else { + return false; + }; + self.identity == other.identity + && self.subscriptions == other.subscriptions + && config == &other.config + } +} + +impl PartialEq for WebhookReceiver { + fn eq(&self, other: &AlertReceiver) -> bool { + // Will become refutable if/when more variants are added... + #[allow(irrefutable_let_patterns)] + let AlertReceiverKind::Webhook(ref config) = other.kind else { + return false; + }; + self.identity == other.identity + && self.subscriptions == other.subscriptions + && &self.config == config + } +} + +/// Webhook-specific alert receiver configuration. +#[derive(Clone, Debug, Deserialize, Serialize, JsonSchema, PartialEq)] +pub struct WebhookReceiverConfig { /// The URL that webhook notification requests are sent to. pub endpoint: Url, // A list containing the IDs of the secret keys used to sign payloads sent // to this receiver. pub secrets: Vec, - /// The list of event classes to which this receiver is subscribed. - pub subscriptions: Vec, -} - -#[derive(Clone, Debug, Deserialize, Serialize, JsonSchema)] -pub struct WebhookSubscriptionCreated { - /// The new subscription added to the receiver. - pub subscription: shared::WebhookSubscription, } -/// A list of the IDs of secrets associated with a webhook. +/// A list of the IDs of secrets associated with a webhook receiver. #[derive(Clone, Debug, Deserialize, Serialize, JsonSchema)] pub struct WebhookSecrets { pub secrets: Vec, @@ -1112,27 +1180,27 @@ pub struct WebhookSecret { /// A delivery of a webhook event. #[derive(Clone, Debug, PartialEq, Eq, Deserialize, Serialize, JsonSchema)] -pub struct WebhookDelivery { +pub struct AlertDelivery { /// The UUID of this delivery attempt. pub id: Uuid, - /// The UUID of the webhook receiver that this event was delivered to. - pub webhook_id: WebhookReceiverUuid, + /// The UUID of the alert receiver that this event was delivered to. + pub receiver_id: AlertReceiverUuid, /// The event class. - pub event_class: String, + pub alert_class: String, /// The UUID of the event. - pub event_id: WebhookEventUuid, + pub alert_id: AlertUuid, /// The state of this delivery. - pub state: WebhookDeliveryState, + pub state: AlertDeliveryState, /// Why this delivery was performed. - pub trigger: WebhookDeliveryTrigger, + pub trigger: AlertDeliveryTrigger, /// Individual attempts to deliver this webhook event, and their outcomes. - pub attempts: Vec, + pub attempts: AlertDeliveryAttempts, /// The time at which this delivery began (i.e. the event was dispatched to /// the receiver). @@ -1152,7 +1220,7 @@ pub struct WebhookDelivery { strum::VariantArray, )] #[serde(rename_all = "snake_case")] -pub enum WebhookDeliveryState { +pub enum AlertDeliveryState { /// The webhook event has not yet been delivered successfully. /// /// Either no delivery attempts have yet been performed, or the delivery has @@ -1165,7 +1233,7 @@ pub enum WebhookDeliveryState { Failed, } -impl WebhookDeliveryState { +impl AlertDeliveryState { pub const ALL: &[Self] = ::VARIANTS; pub fn as_str(&self) -> &'static str { @@ -1177,28 +1245,28 @@ impl WebhookDeliveryState { } } -impl fmt::Display for WebhookDeliveryState { +impl fmt::Display for AlertDeliveryState { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.write_str(self.as_str()) } } -impl std::str::FromStr for WebhookDeliveryState { +impl std::str::FromStr for AlertDeliveryState { type Err = Error; fn from_str(s: &str) -> Result { static EXPECTED_ONE_OF: LazyLock = - LazyLock::new(expected_one_of::); + LazyLock::new(expected_one_of::); for &v in Self::ALL { if s.trim().eq_ignore_ascii_case(v.as_str()) { return Ok(v); } } - Err(Error::invalid_value("WebhookDeliveryState", &*EXPECTED_ONE_OF)) + Err(Error::invalid_value("AlertDeliveryState", &*EXPECTED_ONE_OF)) } } -/// The reason a webhook event was delivered +/// The reason an alert was delivered #[derive( Copy, Clone, @@ -1211,46 +1279,59 @@ impl std::str::FromStr for WebhookDeliveryState { strum::VariantArray, )] #[serde(rename_all = "snake_case")] -pub enum WebhookDeliveryTrigger { - /// Delivery was triggered by the event occurring for the first time. - Event, - /// Delivery was triggered by a request to resend the event. +pub enum AlertDeliveryTrigger { + /// Delivery was triggered by the alert itself. + Alert, + /// Delivery was triggered by a request to resend the alert. Resend, /// This delivery is a liveness probe. Probe, } -impl WebhookDeliveryTrigger { +impl AlertDeliveryTrigger { pub fn as_str(&self) -> &'static str { match self { - Self::Event => "event", + Self::Alert => "alert", Self::Resend => "resend", Self::Probe => "probe", } } } -impl fmt::Display for WebhookDeliveryTrigger { +impl fmt::Display for AlertDeliveryTrigger { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.write_str(self.as_str()) } } -impl std::str::FromStr for WebhookDeliveryTrigger { +impl std::str::FromStr for AlertDeliveryTrigger { type Err = Error; fn from_str(s: &str) -> Result { static EXPECTED_ONE_OF: LazyLock = - LazyLock::new(expected_one_of::); + LazyLock::new(expected_one_of::); for &v in ::VARIANTS { if s.trim().eq_ignore_ascii_case(v.as_str()) { return Ok(v); } } - Err(Error::invalid_value("WebhookDeliveryTrigger", &*EXPECTED_ONE_OF)) + Err(Error::invalid_value("AlertDeliveryTrigger", &*EXPECTED_ONE_OF)) } } +/// A list of attempts to deliver an alert to a receiver. +/// +/// The type of the delivery attempt model depends on the receiver type, as it +/// may contain information specific to that delivery mechanism. For example, +/// webhook delivery attempts contain the HTTP status code of the webhook +/// request. +#[derive(Clone, Debug, PartialEq, Eq, Deserialize, Serialize, JsonSchema)] +#[serde(rename_all = "snake_case")] +pub enum AlertDeliveryAttempts { + /// A list of attempts to deliver an alert to a webhook receiver. + Webhook(Vec), +} + /// An individual delivery attempt for a webhook event. /// /// This represents a single HTTP request that was sent to the receiver, and its @@ -1330,15 +1411,15 @@ pub struct WebhookDeliveryResponse { } #[derive(Clone, Debug, Eq, PartialEq, Deserialize, Serialize, JsonSchema)] -pub struct WebhookDeliveryId { +pub struct AlertDeliveryId { pub delivery_id: Uuid, } -/// Data describing the result of a webhook liveness probe attempt. +/// Data describing the result of an alert receiver liveness probe attempt. #[derive(Clone, Debug, Eq, PartialEq, Deserialize, Serialize, JsonSchema)] -pub struct WebhookProbeResult { - /// The outcome of the probe request. - pub probe: WebhookDelivery, +pub struct AlertProbeResult { + /// The outcome of the probe delivery. + pub probe: AlertDelivery, /// If the probe request succeeded, and resending failed deliveries on /// success was requested, the number of new delivery attempts started. /// Otherwise, if the probe did not succeed, or resending failed deliveries diff --git a/nexus/types/src/internal_api/background.rs b/nexus/types/src/internal_api/background.rs index 2bbd73e8191..96eae23ff9d 100644 --- a/nexus/types/src/internal_api/background.rs +++ b/nexus/types/src/internal_api/background.rs @@ -6,13 +6,13 @@ use crate::external_api::views; use chrono::DateTime; use chrono::Utc; use omicron_common::api::external::Generation; +use omicron_uuid_kinds::AlertReceiverUuid; +use omicron_uuid_kinds::AlertUuid; use omicron_uuid_kinds::BlueprintUuid; use omicron_uuid_kinds::CollectionUuid; use omicron_uuid_kinds::SledUuid; use omicron_uuid_kinds::SupportBundleUuid; use omicron_uuid_kinds::WebhookDeliveryUuid; -use omicron_uuid_kinds::WebhookEventUuid; -use omicron_uuid_kinds::WebhookReceiverUuid; use serde::Deserialize; use serde::Serialize; use std::collections::BTreeMap; @@ -455,27 +455,27 @@ impl slog::KV for DebugDatasetsRendezvousStats { } } -/// The status of a `webhook_dispatcher` background task activation. +/// The status of a `alert_dispatcher` background task activation. #[derive(Debug, Clone, Eq, PartialEq, Serialize, Deserialize)] -pub struct WebhookDispatcherStatus { - pub globs_reprocessed: BTreeMap, +pub struct AlertDispatcherStatus { + pub globs_reprocessed: BTreeMap, pub glob_version: semver::Version, - /// The webhook events dispatched on this activation. - pub dispatched: Vec, + /// The alerts dispatched on this activation. + pub dispatched: Vec, - /// Webhook events which did not have receivers. - pub no_receivers: Vec, + /// Alerts which did not have receivers. + pub no_receivers: Vec, /// Any errors that occurred during activation. pub errors: Vec, } -type ReprocessedGlobs = BTreeMap>; +type ReprocessedGlobs = BTreeMap>; #[derive(Debug, Clone, Eq, PartialEq, Serialize, Deserialize)] -pub enum WebhookGlobStatus { +pub enum AlertGlobStatus { AlreadyReprocessed, Reprocessed { created: usize, @@ -485,15 +485,15 @@ pub enum WebhookGlobStatus { } #[derive(Debug, Copy, Clone, Eq, PartialEq, Serialize, Deserialize)] -pub struct WebhookDispatched { - pub event_id: WebhookEventUuid, +pub struct AlertDispatched { + pub alert_id: AlertUuid, pub subscribed: usize, pub dispatched: usize, } #[derive(Debug, Clone, Serialize, Deserialize)] pub struct WebhookDeliveratorStatus { - pub by_rx: BTreeMap, + pub by_rx: BTreeMap, pub error: Option, } @@ -511,7 +511,7 @@ pub struct WebhookRxDeliveryStatus { #[derive(Debug, Clone, Serialize, Deserialize)] pub struct WebhookDeliveryFailure { pub delivery_id: WebhookDeliveryUuid, - pub event_id: WebhookEventUuid, + pub alert_id: AlertUuid, pub attempt: usize, pub result: views::WebhookDeliveryAttemptResult, pub response_status: Option, diff --git a/openapi/nexus.json b/openapi/nexus.json index 72b688a861c..9eaf3f40302 100644 --- a/openapi/nexus.json +++ b/openapi/nexus.json @@ -1159,13 +1159,13 @@ } } }, - "/v1/anti-affinity-groups": { + "/v1/alert-classes": { "get": { "tags": [ - "affinity" + "system/alerts" ], - "summary": "List anti-affinity groups", - "operationId": "anti_affinity_group_list", + "summary": "List alert classes", + "operationId": "alert_class_list", "parameters": [ { "in": "query", @@ -1189,17 +1189,10 @@ }, { "in": "query", - "name": "project", - "description": "Name or ID of the project", - "schema": { - "$ref": "#/components/schemas/NameOrId" - } - }, - { - "in": "query", - "name": "sort_by", + "name": "filter", + "description": "An optional glob pattern for filtering alert class names.\n\nIf provided, only alert classes which match this glob pattern will be included in the response.", "schema": { - "$ref": "#/components/schemas/NameOrIdSortMode" + "$ref": "#/components/schemas/AlertSubscription" } } ], @@ -1209,7 +1202,7 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/AntiAffinityGroupResultsPage" + "$ref": "#/components/schemas/AlertClassResultsPage" } } } @@ -1222,81 +1215,43 @@ } }, "x-dropshot-pagination": { - "required": [ - "project" - ] + "required": [] } - }, - "post": { + } + }, + "/v1/alert-receivers": { + "get": { "tags": [ - "affinity" + "system/alerts" ], - "summary": "Create anti-affinity group", - "operationId": "anti_affinity_group_create", + "summary": "List alert receivers", + "operationId": "alert_receiver_list", "parameters": [ { "in": "query", - "name": "project", - "description": "Name or ID of the project", - "required": true, + "name": "limit", + "description": "Maximum number of items returned by a single call", "schema": { - "$ref": "#/components/schemas/NameOrId" - } - } - ], - "requestBody": { - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/AntiAffinityGroupCreate" - } - } - }, - "required": true - }, - "responses": { - "201": { - "description": "successful creation", - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/AntiAffinityGroup" - } - } + "nullable": true, + "type": "integer", + "format": "uint32", + "minimum": 1 } }, - "4XX": { - "$ref": "#/components/responses/Error" - }, - "5XX": { - "$ref": "#/components/responses/Error" - } - } - } - }, - "/v1/anti-affinity-groups/{anti_affinity_group}": { - "get": { - "tags": [ - "affinity" - ], - "summary": "Fetch anti-affinity group", - "operationId": "anti_affinity_group_view", - "parameters": [ { "in": "query", - "name": "project", - "description": "Name or ID of the project", + "name": "page_token", + "description": "Token returned by previous call to retrieve the subsequent page", "schema": { - "$ref": "#/components/schemas/NameOrId" + "nullable": true, + "type": "string" } }, { - "in": "path", - "name": "anti_affinity_group", - "description": "Name or ID of the anti affinity group", - "required": true, + "in": "query", + "name": "sort_by", "schema": { - "$ref": "#/components/schemas/NameOrId" + "$ref": "#/components/schemas/NameOrIdSortMode" } } ], @@ -1306,7 +1261,7 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/AntiAffinityGroup" + "$ref": "#/components/schemas/AlertReceiverResultsPage" } } } @@ -1317,50 +1272,37 @@ "5XX": { "$ref": "#/components/responses/Error" } + }, + "x-dropshot-pagination": { + "required": [] } - }, - "put": { + } + }, + "/v1/alert-receivers/{receiver}": { + "get": { "tags": [ - "affinity" + "system/alerts" ], - "summary": "Update anti-affinity group", - "operationId": "anti_affinity_group_update", + "summary": "Fetch alert receiver", + "operationId": "alert_receiver_view", "parameters": [ - { - "in": "query", - "name": "project", - "description": "Name or ID of the project", - "schema": { - "$ref": "#/components/schemas/NameOrId" - } - }, { "in": "path", - "name": "anti_affinity_group", - "description": "Name or ID of the anti affinity group", + "name": "receiver", + "description": "The name or ID of the webhook receiver.", "required": true, "schema": { "$ref": "#/components/schemas/NameOrId" } } ], - "requestBody": { - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/AntiAffinityGroupUpdate" - } - } - }, - "required": true - }, "responses": { "200": { "description": "successful operation", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/AntiAffinityGroup" + "$ref": "#/components/schemas/AlertReceiver" } } } @@ -1375,23 +1317,15 @@ }, "delete": { "tags": [ - "affinity" + "system/alerts" ], - "summary": "Delete anti-affinity group", - "operationId": "anti_affinity_group_delete", + "summary": "Delete alert receiver", + "operationId": "alert_receiver_delete", "parameters": [ - { - "in": "query", - "name": "project", - "description": "Name or ID of the project", - "schema": { - "$ref": "#/components/schemas/NameOrId" - } - }, { "in": "path", - "name": "anti_affinity_group", - "description": "Name or ID of the anti affinity group", + "name": "receiver", + "description": "The name or ID of the webhook receiver.", "required": true, "schema": { "$ref": "#/components/schemas/NameOrId" @@ -1411,56 +1345,76 @@ } } }, - "/v1/anti-affinity-groups/{anti_affinity_group}/members": { + "/v1/alert-receivers/{receiver}/deliveries": { "get": { "tags": [ - "affinity" + "system/alerts" ], - "summary": "List anti-affinity group members", - "operationId": "anti_affinity_group_member_list", + "summary": "List delivery attempts to alert receiver", + "description": "Optional query parameters to this endpoint may be used to filter deliveries by state. If none of the `failed`, `pending` or `delivered` query parameters are present, all deliveries are returned. If one or more of these parameters are provided, only those which are set to \"true\" are included in the response.", + "operationId": "alert_delivery_list", "parameters": [ + { + "in": "path", + "name": "receiver", + "description": "The name or ID of the webhook receiver.", + "required": true, + "schema": { + "$ref": "#/components/schemas/NameOrId" + } + }, { "in": "query", - "name": "limit", - "description": "Maximum number of items returned by a single call", + "name": "delivered", + "description": "If true, include deliveries which have succeeded.\n\nIf any of the \"pending\", \"failed\", or \"delivered\" query parameters are set to true, only deliveries matching those state(s) will be included in the response. If NO state filter parameters are set, then all deliveries are included.", "schema": { "nullable": true, - "type": "integer", - "format": "uint32", - "minimum": 1 + "type": "boolean" } }, { "in": "query", - "name": "page_token", - "description": "Token returned by previous call to retrieve the subsequent page", + "name": "failed", + "description": "If true, include deliveries which have failed permanently.\n\nIf any of the \"pending\", \"failed\", or \"delivered\" query parameters are set to true, only deliveries matching those state(s) will be included in the response. If NO state filter parameters are set, then all deliveries are included.\n\nA delivery fails permanently when the retry limit of three total attempts is reached without a successful delivery.", "schema": { "nullable": true, - "type": "string" + "type": "boolean" } }, { "in": "query", - "name": "project", - "description": "Name or ID of the project", + "name": "pending", + "description": "If true, include deliveries which are currently in progress.\n\nIf any of the \"pending\", \"failed\", or \"delivered\" query parameters are set to true, only deliveries matching those state(s) will be included in the response. If NO state filter parameters are set, then all deliveries are included.\n\nA delivery is considered \"pending\" if it has not yet been sent at all, or if a delivery attempt has failed but the delivery has retries remaining.", "schema": { - "$ref": "#/components/schemas/NameOrId" + "nullable": true, + "type": "boolean" } }, { "in": "query", - "name": "sort_by", + "name": "limit", + "description": "Maximum number of items returned by a single call", "schema": { - "$ref": "#/components/schemas/NameOrIdSortMode" + "nullable": true, + "type": "integer", + "format": "uint32", + "minimum": 1 } }, { - "in": "path", - "name": "anti_affinity_group", - "description": "Name or ID of the anti affinity group", - "required": true, + "in": "query", + "name": "page_token", + "description": "Token returned by previous call to retrieve the subsequent page", "schema": { - "$ref": "#/components/schemas/NameOrId" + "nullable": true, + "type": "string" + } + }, + { + "in": "query", + "name": "sort_by", + "schema": { + "$ref": "#/components/schemas/TimeAndIdSortMode" } } ], @@ -1470,7 +1424,7 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/AntiAffinityGroupMemberResultsPage" + "$ref": "#/components/schemas/AlertDeliveryResultsPage" } } } @@ -1487,36 +1441,30 @@ } } }, - "/v1/anti-affinity-groups/{anti_affinity_group}/members/instance/{instance}": { - "get": { + "/v1/alert-receivers/{receiver}/probe": { + "post": { "tags": [ - "affinity" + "system/alerts" ], - "summary": "Fetch anti-affinity group member", - "operationId": "anti_affinity_group_member_instance_view", + "summary": "Send liveness probe to alert receiver", + "description": "This endpoint synchronously sends a liveness probe to the selected alert receiver. The response message describes the outcome of the probe: either the successful response (as appropriate), or indication of why the probe failed.\n\nThe result of the probe is represented as an `AlertDelivery` model. Details relating to the status of the probe depend on the alert delivery mechanism, and are included in the `AlertDeliveryAttempts` model. For example, webhook receiver liveness probes include the HTTP status code returned by the receiver endpoint.\n\nNote that the response status is `200 OK` as long as a probe request was able to be sent to the receiver endpoint. If an HTTP-based receiver, such as a webhook, responds to the another status code, including an error, this will be indicated by the response body, *not* the status of the response.\n\nThe `resend` query parameter can be used to request re-delivery of failed events if the liveness probe succeeds. If it is set to true and the liveness probe succeeds, any alerts for which delivery to this receiver has failed will be queued for re-delivery.", + "operationId": "alert_receiver_probe", "parameters": [ - { - "in": "query", - "name": "project", - "description": "Name or ID of the project", - "schema": { - "$ref": "#/components/schemas/NameOrId" - } - }, { "in": "path", - "name": "anti_affinity_group", + "name": "receiver", + "description": "The name or ID of the webhook receiver.", "required": true, "schema": { "$ref": "#/components/schemas/NameOrId" } }, { - "in": "path", - "name": "instance", - "required": true, + "in": "query", + "name": "resend", + "description": "If true, resend all events that have not been delivered successfully if the probe request succeeds.", "schema": { - "$ref": "#/components/schemas/NameOrId" + "type": "boolean" } } ], @@ -1526,7 +1474,7 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/AntiAffinityGroupMember" + "$ref": "#/components/schemas/AlertProbeResult" } } } @@ -1538,46 +1486,43 @@ "$ref": "#/components/responses/Error" } } - }, + } + }, + "/v1/alert-receivers/{receiver}/subscriptions": { "post": { "tags": [ - "affinity" + "system/alerts" ], - "summary": "Add member to anti-affinity group", - "operationId": "anti_affinity_group_member_instance_add", + "summary": "Add alert receiver subscription", + "operationId": "alert_receiver_subscription_add", "parameters": [ - { - "in": "query", - "name": "project", - "description": "Name or ID of the project", - "schema": { - "$ref": "#/components/schemas/NameOrId" - } - }, - { - "in": "path", - "name": "anti_affinity_group", - "required": true, - "schema": { - "$ref": "#/components/schemas/NameOrId" - } - }, { "in": "path", - "name": "instance", + "name": "receiver", + "description": "The name or ID of the webhook receiver.", "required": true, "schema": { "$ref": "#/components/schemas/NameOrId" } } ], - "responses": { - "201": { + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/AlertSubscriptionCreate" + } + } + }, + "required": true + }, + "responses": { + "201": { "description": "successful creation", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/AntiAffinityGroupMember" + "$ref": "#/components/schemas/AlertSubscriptionCreated" } } } @@ -1589,33 +1534,70 @@ "$ref": "#/components/responses/Error" } } - }, + } + }, + "/v1/alert-receivers/{receiver}/subscriptions/{subscription}": { "delete": { "tags": [ - "affinity" + "system/alerts" ], - "summary": "Remove member from anti-affinity group", - "operationId": "anti_affinity_group_member_instance_delete", + "summary": "Remove alert receiver subscription", + "operationId": "alert_receiver_subscription_remove", "parameters": [ { - "in": "query", - "name": "project", - "description": "Name or ID of the project", + "in": "path", + "name": "receiver", + "description": "The name or ID of the webhook receiver.", + "required": true, "schema": { "$ref": "#/components/schemas/NameOrId" } }, { "in": "path", - "name": "anti_affinity_group", + "name": "subscription", + "description": "The event class subscription itself.", "required": true, "schema": { - "$ref": "#/components/schemas/NameOrId" + "$ref": "#/components/schemas/AlertSubscription" } + } + ], + "responses": { + "204": { + "description": "successful deletion" + }, + "4XX": { + "$ref": "#/components/responses/Error" }, + "5XX": { + "$ref": "#/components/responses/Error" + } + } + } + }, + "/v1/alerts/{alert_id}/resend": { + "post": { + "tags": [ + "system/alerts" + ], + "summary": "Request re-delivery of alert", + "operationId": "alert_delivery_resend", + "parameters": [ { "in": "path", - "name": "instance", + "name": "alert_id", + "description": "UUID of the alert", + "required": true, + "schema": { + "type": "string", + "format": "uuid" + } + }, + { + "in": "query", + "name": "receiver", + "description": "The name or ID of the webhook receiver.", "required": true, "schema": { "$ref": "#/components/schemas/NameOrId" @@ -1623,8 +1605,15 @@ } ], "responses": { - "204": { - "description": "successful deletion" + "201": { + "description": "successful creation", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/AlertDeliveryId" + } + } + } }, "4XX": { "$ref": "#/components/responses/Error" @@ -1635,14 +1624,13 @@ } } }, - "/v1/certificates": { + "/v1/anti-affinity-groups": { "get": { "tags": [ - "silos" + "affinity" ], - "summary": "List certificates for external endpoints", - "description": "Returns a list of TLS certificates used for the external API (for the current Silo). These are sorted by creation date, with the most recent certificates appearing first.", - "operationId": "certificate_list", + "summary": "List anti-affinity groups", + "operationId": "anti_affinity_group_list", "parameters": [ { "in": "query", @@ -1664,6 +1652,14 @@ "type": "string" } }, + { + "in": "query", + "name": "project", + "description": "Name or ID of the project", + "schema": { + "$ref": "#/components/schemas/NameOrId" + } + }, { "in": "query", "name": "sort_by", @@ -1678,7 +1674,7 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/CertificateResultsPage" + "$ref": "#/components/schemas/AntiAffinityGroupResultsPage" } } } @@ -1691,21 +1687,33 @@ } }, "x-dropshot-pagination": { - "required": [] + "required": [ + "project" + ] } }, "post": { "tags": [ - "silos" + "affinity" + ], + "summary": "Create anti-affinity group", + "operationId": "anti_affinity_group_create", + "parameters": [ + { + "in": "query", + "name": "project", + "description": "Name or ID of the project", + "required": true, + "schema": { + "$ref": "#/components/schemas/NameOrId" + } + } ], - "summary": "Create new system-wide x.509 certificate", - "description": "This certificate is automatically used by the Oxide Control plane to serve external connections.", - "operationId": "certificate_create", "requestBody": { "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/CertificateCreate" + "$ref": "#/components/schemas/AntiAffinityGroupCreate" } } }, @@ -1717,7 +1725,7 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/Certificate" + "$ref": "#/components/schemas/AntiAffinityGroup" } } } @@ -1731,32 +1739,93 @@ } } }, - "/v1/certificates/{certificate}": { + "/v1/anti-affinity-groups/{anti_affinity_group}": { "get": { "tags": [ - "silos" + "affinity" ], - "summary": "Fetch certificate", - "description": "Returns the details of a specific certificate", - "operationId": "certificate_view", + "summary": "Fetch anti-affinity group", + "operationId": "anti_affinity_group_view", + "parameters": [ + { + "in": "query", + "name": "project", + "description": "Name or ID of the project", + "schema": { + "$ref": "#/components/schemas/NameOrId" + } + }, + { + "in": "path", + "name": "anti_affinity_group", + "description": "Name or ID of the anti affinity group", + "required": true, + "schema": { + "$ref": "#/components/schemas/NameOrId" + } + } + ], + "responses": { + "200": { + "description": "successful operation", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/AntiAffinityGroup" + } + } + } + }, + "4XX": { + "$ref": "#/components/responses/Error" + }, + "5XX": { + "$ref": "#/components/responses/Error" + } + } + }, + "put": { + "tags": [ + "affinity" + ], + "summary": "Update anti-affinity group", + "operationId": "anti_affinity_group_update", "parameters": [ + { + "in": "query", + "name": "project", + "description": "Name or ID of the project", + "schema": { + "$ref": "#/components/schemas/NameOrId" + } + }, { "in": "path", - "name": "certificate", - "description": "Name or ID of the certificate", + "name": "anti_affinity_group", + "description": "Name or ID of the anti affinity group", "required": true, "schema": { "$ref": "#/components/schemas/NameOrId" } } ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/AntiAffinityGroupUpdate" + } + } + }, + "required": true + }, "responses": { "200": { "description": "successful operation", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/Certificate" + "$ref": "#/components/schemas/AntiAffinityGroup" } } } @@ -1771,16 +1840,23 @@ }, "delete": { "tags": [ - "silos" + "affinity" ], - "summary": "Delete certificate", - "description": "Permanently delete a certificate. This operation cannot be undone.", - "operationId": "certificate_delete", + "summary": "Delete anti-affinity group", + "operationId": "anti_affinity_group_delete", "parameters": [ + { + "in": "query", + "name": "project", + "description": "Name or ID of the project", + "schema": { + "$ref": "#/components/schemas/NameOrId" + } + }, { "in": "path", - "name": "certificate", - "description": "Name or ID of the certificate", + "name": "anti_affinity_group", + "description": "Name or ID of the anti affinity group", "required": true, "schema": { "$ref": "#/components/schemas/NameOrId" @@ -1800,13 +1876,13 @@ } } }, - "/v1/disks": { + "/v1/anti-affinity-groups/{anti_affinity_group}/members": { "get": { "tags": [ - "disks" + "affinity" ], - "summary": "List disks", - "operationId": "disk_list", + "summary": "List anti-affinity group members", + "operationId": "anti_affinity_group_member_list", "parameters": [ { "in": "query", @@ -1842,6 +1918,15 @@ "schema": { "$ref": "#/components/schemas/NameOrIdSortMode" } + }, + { + "in": "path", + "name": "anti_affinity_group", + "description": "Name or ID of the anti affinity group", + "required": true, + "schema": { + "$ref": "#/components/schemas/NameOrId" + } } ], "responses": { @@ -1850,7 +1935,7 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/DiskResultsPage" + "$ref": "#/components/schemas/AntiAffinityGroupMemberResultsPage" } } } @@ -1863,45 +1948,50 @@ } }, "x-dropshot-pagination": { - "required": [ - "project" - ] + "required": [] } - }, - "post": { + } + }, + "/v1/anti-affinity-groups/{anti_affinity_group}/members/instance/{instance}": { + "get": { "tags": [ - "disks" + "affinity" ], - "summary": "Create a disk", - "operationId": "disk_create", + "summary": "Fetch anti-affinity group member", + "operationId": "anti_affinity_group_member_instance_view", "parameters": [ { "in": "query", "name": "project", "description": "Name or ID of the project", - "required": true, "schema": { "$ref": "#/components/schemas/NameOrId" } - } - ], - "requestBody": { - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/DiskCreate" - } - } }, - "required": true - }, + { + "in": "path", + "name": "anti_affinity_group", + "required": true, + "schema": { + "$ref": "#/components/schemas/NameOrId" + } + }, + { + "in": "path", + "name": "instance", + "required": true, + "schema": { + "$ref": "#/components/schemas/NameOrId" + } + } + ], "responses": { - "201": { - "description": "successful creation", + "200": { + "description": "successful operation", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/Disk" + "$ref": "#/components/schemas/AntiAffinityGroupMember" } } } @@ -1913,41 +2003,46 @@ "$ref": "#/components/responses/Error" } } - } - }, - "/v1/disks/{disk}": { - "get": { + }, + "post": { "tags": [ - "disks" + "affinity" ], - "summary": "Fetch disk", - "operationId": "disk_view", + "summary": "Add member to anti-affinity group", + "operationId": "anti_affinity_group_member_instance_add", "parameters": [ + { + "in": "query", + "name": "project", + "description": "Name or ID of the project", + "schema": { + "$ref": "#/components/schemas/NameOrId" + } + }, { "in": "path", - "name": "disk", - "description": "Name or ID of the disk", + "name": "anti_affinity_group", "required": true, "schema": { "$ref": "#/components/schemas/NameOrId" } }, { - "in": "query", - "name": "project", - "description": "Name or ID of the project", + "in": "path", + "name": "instance", + "required": true, "schema": { "$ref": "#/components/schemas/NameOrId" } } ], "responses": { - "200": { - "description": "successful operation", + "201": { + "description": "successful creation", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/Disk" + "$ref": "#/components/schemas/AntiAffinityGroupMember" } } } @@ -1962,24 +2057,31 @@ }, "delete": { "tags": [ - "disks" + "affinity" ], - "summary": "Delete disk", - "operationId": "disk_delete", + "summary": "Remove member from anti-affinity group", + "operationId": "anti_affinity_group_member_instance_delete", "parameters": [ + { + "in": "query", + "name": "project", + "description": "Name or ID of the project", + "schema": { + "$ref": "#/components/schemas/NameOrId" + } + }, { "in": "path", - "name": "disk", - "description": "Name or ID of the disk", + "name": "anti_affinity_group", "required": true, "schema": { "$ref": "#/components/schemas/NameOrId" } }, { - "in": "query", - "name": "project", - "description": "Name or ID of the project", + "in": "path", + "name": "instance", + "required": true, "schema": { "$ref": "#/components/schemas/NameOrId" } @@ -1998,45 +2100,92 @@ } } }, - "/v1/disks/{disk}/bulk-write": { - "post": { + "/v1/certificates": { + "get": { "tags": [ - "disks" + "silos" ], - "summary": "Import blocks into disk", - "operationId": "disk_bulk_write_import", + "summary": "List certificates for external endpoints", + "description": "Returns a list of TLS certificates used for the external API (for the current Silo). These are sorted by creation date, with the most recent certificates appearing first.", + "operationId": "certificate_list", "parameters": [ { - "in": "path", - "name": "disk", - "description": "Name or ID of the disk", - "required": true, + "in": "query", + "name": "limit", + "description": "Maximum number of items returned by a single call", "schema": { - "$ref": "#/components/schemas/NameOrId" + "nullable": true, + "type": "integer", + "format": "uint32", + "minimum": 1 } }, { "in": "query", - "name": "project", - "description": "Name or ID of the project", + "name": "page_token", + "description": "Token returned by previous call to retrieve the subsequent page", "schema": { - "$ref": "#/components/schemas/NameOrId" + "nullable": true, + "type": "string" + } + }, + { + "in": "query", + "name": "sort_by", + "schema": { + "$ref": "#/components/schemas/NameOrIdSortMode" + } + } + ], + "responses": { + "200": { + "description": "successful operation", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/CertificateResultsPage" + } + } } + }, + "4XX": { + "$ref": "#/components/responses/Error" + }, + "5XX": { + "$ref": "#/components/responses/Error" } + }, + "x-dropshot-pagination": { + "required": [] + } + }, + "post": { + "tags": [ + "silos" ], + "summary": "Create new system-wide x.509 certificate", + "description": "This certificate is automatically used by the Oxide Control plane to serve external connections.", + "operationId": "certificate_create", "requestBody": { "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/ImportBlocksBulkWrite" + "$ref": "#/components/schemas/CertificateCreate" } } }, "required": true }, "responses": { - "204": { - "description": "resource updated" + "201": { + "description": "successful creation", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/Certificate" + } + } + } }, "4XX": { "$ref": "#/components/responses/Error" @@ -2047,36 +2196,35 @@ } } }, - "/v1/disks/{disk}/bulk-write-start": { - "post": { + "/v1/certificates/{certificate}": { + "get": { "tags": [ - "disks" + "silos" ], - "summary": "Start importing blocks into disk", - "description": "Start the process of importing blocks into a disk", - "operationId": "disk_bulk_write_import_start", + "summary": "Fetch certificate", + "description": "Returns the details of a specific certificate", + "operationId": "certificate_view", "parameters": [ { "in": "path", - "name": "disk", - "description": "Name or ID of the disk", + "name": "certificate", + "description": "Name or ID of the certificate", "required": true, "schema": { "$ref": "#/components/schemas/NameOrId" } - }, - { - "in": "query", - "name": "project", - "description": "Name or ID of the project", - "schema": { - "$ref": "#/components/schemas/NameOrId" - } } ], "responses": { - "204": { - "description": "resource updated" + "200": { + "description": "successful operation", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/Certificate" + } + } + } }, "4XX": { "$ref": "#/components/responses/Error" @@ -2085,38 +2233,28 @@ "$ref": "#/components/responses/Error" } } - } - }, - "/v1/disks/{disk}/bulk-write-stop": { - "post": { + }, + "delete": { "tags": [ - "disks" + "silos" ], - "summary": "Stop importing blocks into disk", - "description": "Stop the process of importing blocks into a disk", - "operationId": "disk_bulk_write_import_stop", + "summary": "Delete certificate", + "description": "Permanently delete a certificate. This operation cannot be undone.", + "operationId": "certificate_delete", "parameters": [ { "in": "path", - "name": "disk", - "description": "Name or ID of the disk", + "name": "certificate", + "description": "Name or ID of the certificate", "required": true, "schema": { "$ref": "#/components/schemas/NameOrId" } - }, - { - "in": "query", - "name": "project", - "description": "Name or ID of the project", - "schema": { - "$ref": "#/components/schemas/NameOrId" - } } ], "responses": { "204": { - "description": "resource updated" + "description": "successful deletion" }, "4XX": { "$ref": "#/components/responses/Error" @@ -2127,186 +2265,32 @@ } } }, - "/v1/disks/{disk}/finalize": { - "post": { + "/v1/disks": { + "get": { "tags": [ "disks" ], - "summary": "Confirm disk block import completion", - "operationId": "disk_finalize_import", + "summary": "List disks", + "operationId": "disk_list", "parameters": [ { - "in": "path", - "name": "disk", - "description": "Name or ID of the disk", - "required": true, + "in": "query", + "name": "limit", + "description": "Maximum number of items returned by a single call", "schema": { - "$ref": "#/components/schemas/NameOrId" - } - }, - { - "in": "query", - "name": "project", - "description": "Name or ID of the project", - "schema": { - "$ref": "#/components/schemas/NameOrId" - } - } - ], - "requestBody": { - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/FinalizeDisk" - } - } - }, - "required": true - }, - "responses": { - "204": { - "description": "resource updated" - }, - "4XX": { - "$ref": "#/components/responses/Error" - }, - "5XX": { - "$ref": "#/components/responses/Error" - } - } - } - }, - "/v1/disks/{disk}/metrics/{metric}": { - "get": { - "tags": [ - "disks" - ], - "summary": "Fetch disk metrics", - "operationId": "disk_metrics_list", - "parameters": [ - { - "in": "path", - "name": "disk", - "required": true, - "schema": { - "$ref": "#/components/schemas/NameOrId" - } - }, - { - "in": "path", - "name": "metric", - "required": true, - "schema": { - "$ref": "#/components/schemas/DiskMetricName" - } - }, - { - "in": "query", - "name": "end_time", - "description": "An exclusive end time of metrics.", - "schema": { - "type": "string", - "format": "date-time" - } - }, - { - "in": "query", - "name": "limit", - "description": "Maximum number of items returned by a single call", - "schema": { - "nullable": true, - "type": "integer", - "format": "uint32", - "minimum": 1 - } - }, - { - "in": "query", - "name": "order", - "description": "Query result order", - "schema": { - "$ref": "#/components/schemas/PaginationOrder" - } - }, - { - "in": "query", - "name": "page_token", - "description": "Token returned by previous call to retrieve the subsequent page", - "schema": { - "nullable": true, - "type": "string" - } - }, - { - "in": "query", - "name": "start_time", - "description": "An inclusive start time of metrics.", - "schema": { - "type": "string", - "format": "date-time" - } - }, - { - "in": "query", - "name": "project", - "description": "Name or ID of the project", - "schema": { - "$ref": "#/components/schemas/NameOrId" - } - } - ], - "responses": { - "200": { - "description": "successful operation", - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/MeasurementResultsPage" - } - } - } - }, - "4XX": { - "$ref": "#/components/responses/Error" - }, - "5XX": { - "$ref": "#/components/responses/Error" - } - }, - "x-dropshot-pagination": { - "required": [ - "end_time", - "start_time" - ] - } - } - }, - "/v1/floating-ips": { - "get": { - "tags": [ - "floating-ips" - ], - "summary": "List floating IPs", - "operationId": "floating_ip_list", - "parameters": [ - { - "in": "query", - "name": "limit", - "description": "Maximum number of items returned by a single call", - "schema": { - "nullable": true, - "type": "integer", - "format": "uint32", - "minimum": 1 - } - }, - { - "in": "query", - "name": "page_token", - "description": "Token returned by previous call to retrieve the subsequent page", - "schema": { - "nullable": true, - "type": "string" + "nullable": true, + "type": "integer", + "format": "uint32", + "minimum": 1 + } + }, + { + "in": "query", + "name": "page_token", + "description": "Token returned by previous call to retrieve the subsequent page", + "schema": { + "nullable": true, + "type": "string" } }, { @@ -2331,7 +2315,7 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/FloatingIpResultsPage" + "$ref": "#/components/schemas/DiskResultsPage" } } } @@ -2351,10 +2335,10 @@ }, "post": { "tags": [ - "floating-ips" + "disks" ], - "summary": "Create floating IP", - "operationId": "floating_ip_create", + "summary": "Create a disk", + "operationId": "disk_create", "parameters": [ { "in": "query", @@ -2370,7 +2354,7 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/FloatingIpCreate" + "$ref": "#/components/schemas/DiskCreate" } } }, @@ -2382,7 +2366,7 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/FloatingIp" + "$ref": "#/components/schemas/Disk" } } } @@ -2396,18 +2380,18 @@ } } }, - "/v1/floating-ips/{floating_ip}": { + "/v1/disks/{disk}": { "get": { "tags": [ - "floating-ips" + "disks" ], - "summary": "Fetch floating IP", - "operationId": "floating_ip_view", + "summary": "Fetch disk", + "operationId": "disk_view", "parameters": [ { "in": "path", - "name": "floating_ip", - "description": "Name or ID of the floating IP", + "name": "disk", + "description": "Name or ID of the disk", "required": true, "schema": { "$ref": "#/components/schemas/NameOrId" @@ -2428,7 +2412,7 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/FloatingIp" + "$ref": "#/components/schemas/Disk" } } } @@ -2441,17 +2425,17 @@ } } }, - "put": { + "delete": { "tags": [ - "floating-ips" + "disks" ], - "summary": "Update floating IP", - "operationId": "floating_ip_update", + "summary": "Delete disk", + "operationId": "disk_delete", "parameters": [ { "in": "path", - "name": "floating_ip", - "description": "Name or ID of the floating IP", + "name": "disk", + "description": "Name or ID of the disk", "required": true, "schema": { "$ref": "#/components/schemas/NameOrId" @@ -2466,26 +2450,9 @@ } } ], - "requestBody": { - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/FloatingIpUpdate" - } - } - }, - "required": true - }, "responses": { - "200": { - "description": "successful operation", - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/FloatingIp" - } - } - } + "204": { + "description": "successful deletion" }, "4XX": { "$ref": "#/components/responses/Error" @@ -2494,18 +2461,20 @@ "$ref": "#/components/responses/Error" } } - }, - "delete": { + } + }, + "/v1/disks/{disk}/bulk-write": { + "post": { "tags": [ - "floating-ips" + "disks" ], - "summary": "Delete floating IP", - "operationId": "floating_ip_delete", + "summary": "Import blocks into disk", + "operationId": "disk_bulk_write_import", "parameters": [ { "in": "path", - "name": "floating_ip", - "description": "Name or ID of the floating IP", + "name": "disk", + "description": "Name or ID of the disk", "required": true, "schema": { "$ref": "#/components/schemas/NameOrId" @@ -2520,9 +2489,19 @@ } } ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ImportBlocksBulkWrite" + } + } + }, + "required": true + }, "responses": { "204": { - "description": "successful deletion" + "description": "resource updated" }, "4XX": { "$ref": "#/components/responses/Error" @@ -2533,19 +2512,19 @@ } } }, - "/v1/floating-ips/{floating_ip}/attach": { + "/v1/disks/{disk}/bulk-write-start": { "post": { "tags": [ - "floating-ips" + "disks" ], - "summary": "Attach floating IP", - "description": "Attach floating IP to an instance or other resource.", - "operationId": "floating_ip_attach", - "parameters": [ + "summary": "Start importing blocks into disk", + "description": "Start the process of importing blocks into a disk", + "operationId": "disk_bulk_write_import_start", + "parameters": [ { "in": "path", - "name": "floating_ip", - "description": "Name or ID of the floating IP", + "name": "disk", + "description": "Name or ID of the disk", "required": true, "schema": { "$ref": "#/components/schemas/NameOrId" @@ -2560,26 +2539,9 @@ } } ], - "requestBody": { - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/FloatingIpAttach" - } - } - }, - "required": true - }, "responses": { - "202": { - "description": "successfully enqueued operation", - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/FloatingIp" - } - } - } + "204": { + "description": "resource updated" }, "4XX": { "$ref": "#/components/responses/Error" @@ -2590,18 +2552,19 @@ } } }, - "/v1/floating-ips/{floating_ip}/detach": { + "/v1/disks/{disk}/bulk-write-stop": { "post": { "tags": [ - "floating-ips" + "disks" ], - "summary": "Detach floating IP", - "operationId": "floating_ip_detach", + "summary": "Stop importing blocks into disk", + "description": "Stop the process of importing blocks into a disk", + "operationId": "disk_bulk_write_import_stop", "parameters": [ { "in": "path", - "name": "floating_ip", - "description": "Name or ID of the floating IP", + "name": "disk", + "description": "Name or ID of the disk", "required": true, "schema": { "$ref": "#/components/schemas/NameOrId" @@ -2617,15 +2580,8 @@ } ], "responses": { - "202": { - "description": "successfully enqueued operation", - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/FloatingIp" - } - } - } + "204": { + "description": "resource updated" }, "4XX": { "$ref": "#/components/responses/Error" @@ -2636,81 +2592,131 @@ } } }, - "/v1/groups": { - "get": { + "/v1/disks/{disk}/finalize": { + "post": { "tags": [ - "silos" + "disks" ], - "summary": "List groups", - "operationId": "group_list", + "summary": "Confirm disk block import completion", + "operationId": "disk_finalize_import", "parameters": [ { - "in": "query", - "name": "limit", - "description": "Maximum number of items returned by a single call", - "schema": { - "nullable": true, - "type": "integer", - "format": "uint32", - "minimum": 1 - } - }, - { - "in": "query", - "name": "page_token", - "description": "Token returned by previous call to retrieve the subsequent page", + "in": "path", + "name": "disk", + "description": "Name or ID of the disk", + "required": true, "schema": { - "nullable": true, - "type": "string" + "$ref": "#/components/schemas/NameOrId" } }, { "in": "query", - "name": "sort_by", + "name": "project", + "description": "Name or ID of the project", "schema": { - "$ref": "#/components/schemas/IdSortMode" + "$ref": "#/components/schemas/NameOrId" } } ], - "responses": { - "200": { - "description": "successful operation", - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/GroupResultsPage" - } + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/FinalizeDisk" } } }, + "required": true + }, + "responses": { + "204": { + "description": "resource updated" + }, "4XX": { "$ref": "#/components/responses/Error" }, "5XX": { "$ref": "#/components/responses/Error" } - }, - "x-dropshot-pagination": { - "required": [] } } }, - "/v1/groups/{group_id}": { + "/v1/disks/{disk}/metrics/{metric}": { "get": { "tags": [ - "silos" + "disks" ], - "summary": "Fetch group", - "operationId": "group_view", + "summary": "Fetch disk metrics", + "operationId": "disk_metrics_list", "parameters": [ { "in": "path", - "name": "group_id", - "description": "ID of the group", + "name": "disk", + "required": true, + "schema": { + "$ref": "#/components/schemas/NameOrId" + } + }, + { + "in": "path", + "name": "metric", "required": true, + "schema": { + "$ref": "#/components/schemas/DiskMetricName" + } + }, + { + "in": "query", + "name": "end_time", + "description": "An exclusive end time of metrics.", "schema": { "type": "string", - "format": "uuid" + "format": "date-time" + } + }, + { + "in": "query", + "name": "limit", + "description": "Maximum number of items returned by a single call", + "schema": { + "nullable": true, + "type": "integer", + "format": "uint32", + "minimum": 1 + } + }, + { + "in": "query", + "name": "order", + "description": "Query result order", + "schema": { + "$ref": "#/components/schemas/PaginationOrder" + } + }, + { + "in": "query", + "name": "page_token", + "description": "Token returned by previous call to retrieve the subsequent page", + "schema": { + "nullable": true, + "type": "string" + } + }, + { + "in": "query", + "name": "start_time", + "description": "An inclusive start time of metrics.", + "schema": { + "type": "string", + "format": "date-time" + } + }, + { + "in": "query", + "name": "project", + "description": "Name or ID of the project", + "schema": { + "$ref": "#/components/schemas/NameOrId" } } ], @@ -2720,7 +2726,7 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/Group" + "$ref": "#/components/schemas/MeasurementResultsPage" } } } @@ -2731,17 +2737,22 @@ "5XX": { "$ref": "#/components/responses/Error" } + }, + "x-dropshot-pagination": { + "required": [ + "end_time", + "start_time" + ] } } }, - "/v1/images": { + "/v1/floating-ips": { "get": { "tags": [ - "images" + "floating-ips" ], - "summary": "List images", - "description": "List images which are global or scoped to the specified project. The images are returned sorted by creation date, with the most recent images appearing first.", - "operationId": "image_list", + "summary": "List floating IPs", + "operationId": "floating_ip_list", "parameters": [ { "in": "query", @@ -2785,7 +2796,7 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/ImageResultsPage" + "$ref": "#/components/schemas/FloatingIpResultsPage" } } } @@ -2798,21 +2809,23 @@ } }, "x-dropshot-pagination": { - "required": [] + "required": [ + "project" + ] } }, "post": { "tags": [ - "images" + "floating-ips" ], - "summary": "Create image", - "description": "Create a new image in a project.", - "operationId": "image_create", + "summary": "Create floating IP", + "operationId": "floating_ip_create", "parameters": [ { "in": "query", "name": "project", "description": "Name or ID of the project", + "required": true, "schema": { "$ref": "#/components/schemas/NameOrId" } @@ -2822,7 +2835,7 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/ImageCreate" + "$ref": "#/components/schemas/FloatingIpCreate" } } }, @@ -2834,7 +2847,7 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/Image" + "$ref": "#/components/schemas/FloatingIp" } } } @@ -2848,19 +2861,18 @@ } } }, - "/v1/images/{image}": { + "/v1/floating-ips/{floating_ip}": { "get": { "tags": [ - "images" + "floating-ips" ], - "summary": "Fetch image", - "description": "Fetch the details for a specific image in a project.", - "operationId": "image_view", + "summary": "Fetch floating IP", + "operationId": "floating_ip_view", "parameters": [ { "in": "path", - "name": "image", - "description": "Name or ID of the image", + "name": "floating_ip", + "description": "Name or ID of the floating IP", "required": true, "schema": { "$ref": "#/components/schemas/NameOrId" @@ -2881,7 +2893,7 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/Image" + "$ref": "#/components/schemas/FloatingIp" } } } @@ -2894,18 +2906,17 @@ } } }, - "delete": { + "put": { "tags": [ - "images" + "floating-ips" ], - "summary": "Delete image", - "description": "Permanently delete an image from a project. This operation cannot be undone. Any instances in the project using the image will continue to run, however new instances can not be created with this image.", - "operationId": "image_delete", + "summary": "Update floating IP", + "operationId": "floating_ip_update", "parameters": [ { "in": "path", - "name": "image", - "description": "Name or ID of the image", + "name": "floating_ip", + "description": "Name or ID of the floating IP", "required": true, "schema": { "$ref": "#/components/schemas/NameOrId" @@ -2920,32 +2931,86 @@ } } ], - "responses": { - "204": { - "description": "successful deletion" - }, - "4XX": { - "$ref": "#/components/responses/Error" - }, - "5XX": { + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/FloatingIpUpdate" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "successful operation", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/FloatingIp" + } + } + } + }, + "4XX": { + "$ref": "#/components/responses/Error" + }, + "5XX": { + "$ref": "#/components/responses/Error" + } + } + }, + "delete": { + "tags": [ + "floating-ips" + ], + "summary": "Delete floating IP", + "operationId": "floating_ip_delete", + "parameters": [ + { + "in": "path", + "name": "floating_ip", + "description": "Name or ID of the floating IP", + "required": true, + "schema": { + "$ref": "#/components/schemas/NameOrId" + } + }, + { + "in": "query", + "name": "project", + "description": "Name or ID of the project", + "schema": { + "$ref": "#/components/schemas/NameOrId" + } + } + ], + "responses": { + "204": { + "description": "successful deletion" + }, + "4XX": { + "$ref": "#/components/responses/Error" + }, + "5XX": { "$ref": "#/components/responses/Error" } } } }, - "/v1/images/{image}/demote": { + "/v1/floating-ips/{floating_ip}/attach": { "post": { "tags": [ - "images" + "floating-ips" ], - "summary": "Demote silo image", - "description": "Demote silo image to be visible only to a specified project", - "operationId": "image_demote", + "summary": "Attach floating IP", + "description": "Attach floating IP to an instance or other resource.", + "operationId": "floating_ip_attach", "parameters": [ { "in": "path", - "name": "image", - "description": "Name or ID of the image", + "name": "floating_ip", + "description": "Name or ID of the floating IP", "required": true, "schema": { "$ref": "#/components/schemas/NameOrId" @@ -2955,19 +3020,28 @@ "in": "query", "name": "project", "description": "Name or ID of the project", - "required": true, "schema": { "$ref": "#/components/schemas/NameOrId" } } ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/FloatingIpAttach" + } + } + }, + "required": true + }, "responses": { "202": { "description": "successfully enqueued operation", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/Image" + "$ref": "#/components/schemas/FloatingIp" } } } @@ -2981,19 +3055,18 @@ } } }, - "/v1/images/{image}/promote": { + "/v1/floating-ips/{floating_ip}/detach": { "post": { "tags": [ - "images" + "floating-ips" ], - "summary": "Promote project image", - "description": "Promote project image to be visible to all projects in the silo", - "operationId": "image_promote", + "summary": "Detach floating IP", + "operationId": "floating_ip_detach", "parameters": [ { "in": "path", - "name": "image", - "description": "Name or ID of the image", + "name": "floating_ip", + "description": "Name or ID of the floating IP", "required": true, "schema": { "$ref": "#/components/schemas/NameOrId" @@ -3014,7 +3087,7 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/Image" + "$ref": "#/components/schemas/FloatingIp" } } } @@ -3028,13 +3101,13 @@ } } }, - "/v1/instances": { + "/v1/groups": { "get": { "tags": [ - "instances" + "silos" ], - "summary": "List instances", - "operationId": "instance_list", + "summary": "List groups", + "operationId": "group_list", "parameters": [ { "in": "query", @@ -3056,19 +3129,11 @@ "type": "string" } }, - { - "in": "query", - "name": "project", - "description": "Name or ID of the project", - "schema": { - "$ref": "#/components/schemas/NameOrId" - } - }, { "in": "query", "name": "sort_by", "schema": { - "$ref": "#/components/schemas/NameOrIdSortMode" + "$ref": "#/components/schemas/IdSortMode" } } ], @@ -3078,7 +3143,7 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/InstanceResultsPage" + "$ref": "#/components/schemas/GroupResultsPage" } } } @@ -3091,45 +3156,36 @@ } }, "x-dropshot-pagination": { - "required": [ - "project" - ] + "required": [] } - }, - "post": { + } + }, + "/v1/groups/{group_id}": { + "get": { "tags": [ - "instances" + "silos" ], - "summary": "Create instance", - "operationId": "instance_create", + "summary": "Fetch group", + "operationId": "group_view", "parameters": [ { - "in": "query", - "name": "project", - "description": "Name or ID of the project", + "in": "path", + "name": "group_id", + "description": "ID of the group", "required": true, "schema": { - "$ref": "#/components/schemas/NameOrId" + "type": "string", + "format": "uuid" } } ], - "requestBody": { - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/InstanceCreate" - } - } - }, - "required": true - }, "responses": { - "201": { - "description": "successful creation", + "200": { + "description": "successful operation", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/Instance" + "$ref": "#/components/schemas/Group" } } } @@ -3143,14 +3199,35 @@ } } }, - "/v1/instances/{instance}": { + "/v1/images": { "get": { "tags": [ - "instances" + "images" ], - "summary": "Fetch instance", - "operationId": "instance_view", + "summary": "List images", + "description": "List images which are global or scoped to the specified project. The images are returned sorted by creation date, with the most recent images appearing first.", + "operationId": "image_list", "parameters": [ + { + "in": "query", + "name": "limit", + "description": "Maximum number of items returned by a single call", + "schema": { + "nullable": true, + "type": "integer", + "format": "uint32", + "minimum": 1 + } + }, + { + "in": "query", + "name": "page_token", + "description": "Token returned by previous call to retrieve the subsequent page", + "schema": { + "nullable": true, + "type": "string" + } + }, { "in": "query", "name": "project", @@ -3160,12 +3237,10 @@ } }, { - "in": "path", - "name": "instance", - "description": "Name or ID of the instance", - "required": true, + "in": "query", + "name": "sort_by", "schema": { - "$ref": "#/components/schemas/NameOrId" + "$ref": "#/components/schemas/NameOrIdSortMode" } } ], @@ -3175,7 +3250,7 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/Instance" + "$ref": "#/components/schemas/ImageResultsPage" } } } @@ -3186,14 +3261,18 @@ "5XX": { "$ref": "#/components/responses/Error" } + }, + "x-dropshot-pagination": { + "required": [] } }, - "put": { + "post": { "tags": [ - "instances" + "images" ], - "summary": "Update instance", - "operationId": "instance_update", + "summary": "Create image", + "description": "Create a new image in a project.", + "operationId": "image_create", "parameters": [ { "in": "query", @@ -3202,34 +3281,25 @@ "schema": { "$ref": "#/components/schemas/NameOrId" } - }, - { - "in": "path", - "name": "instance", - "description": "Name or ID of the instance", - "required": true, - "schema": { - "$ref": "#/components/schemas/NameOrId" - } } ], "requestBody": { "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/InstanceUpdate" + "$ref": "#/components/schemas/ImageCreate" } } }, "required": true }, "responses": { - "200": { - "description": "successful operation", + "201": { + "description": "successful creation", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/Instance" + "$ref": "#/components/schemas/Image" } } } @@ -3241,14 +3311,26 @@ "$ref": "#/components/responses/Error" } } - }, - "delete": { + } + }, + "/v1/images/{image}": { + "get": { "tags": [ - "instances" + "images" ], - "summary": "Delete instance", - "operationId": "instance_delete", + "summary": "Fetch image", + "description": "Fetch the details for a specific image in a project.", + "operationId": "image_view", "parameters": [ + { + "in": "path", + "name": "image", + "description": "Name or ID of the image", + "required": true, + "schema": { + "$ref": "#/components/schemas/NameOrId" + } + }, { "in": "query", "name": "project", @@ -3256,15 +3338,51 @@ "schema": { "$ref": "#/components/schemas/NameOrId" } - }, + } + ], + "responses": { + "200": { + "description": "successful operation", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/Image" + } + } + } + }, + "4XX": { + "$ref": "#/components/responses/Error" + }, + "5XX": { + "$ref": "#/components/responses/Error" + } + } + }, + "delete": { + "tags": [ + "images" + ], + "summary": "Delete image", + "description": "Permanently delete an image from a project. This operation cannot be undone. Any instances in the project using the image will continue to run, however new instances can not be created with this image.", + "operationId": "image_delete", + "parameters": [ { "in": "path", - "name": "instance", - "description": "Name or ID of the instance", + "name": "image", + "description": "Name or ID of the image", "required": true, "schema": { "$ref": "#/components/schemas/NameOrId" } + }, + { + "in": "query", + "name": "project", + "description": "Name or ID of the project", + "schema": { + "$ref": "#/components/schemas/NameOrId" + } } ], "responses": { @@ -3280,53 +3398,28 @@ } } }, - "/v1/instances/{instance}/affinity-groups": { - "get": { + "/v1/images/{image}/demote": { + "post": { "tags": [ - "hidden" + "images" ], - "summary": "List affinity groups containing instance", - "operationId": "instance_affinity_group_list", + "summary": "Demote silo image", + "description": "Demote silo image to be visible only to a specified project", + "operationId": "image_demote", "parameters": [ { - "in": "query", - "name": "limit", - "description": "Maximum number of items returned by a single call", - "schema": { - "nullable": true, - "type": "integer", - "format": "uint32", - "minimum": 1 - } - }, - { - "in": "query", - "name": "page_token", - "description": "Token returned by previous call to retrieve the subsequent page", + "in": "path", + "name": "image", + "description": "Name or ID of the image", + "required": true, "schema": { - "nullable": true, - "type": "string" + "$ref": "#/components/schemas/NameOrId" } }, { "in": "query", "name": "project", "description": "Name or ID of the project", - "schema": { - "$ref": "#/components/schemas/NameOrId" - } - }, - { - "in": "query", - "name": "sort_by", - "schema": { - "$ref": "#/components/schemas/NameOrIdSortMode" - } - }, - { - "in": "path", - "name": "instance", - "description": "Name or ID of the instance", "required": true, "schema": { "$ref": "#/components/schemas/NameOrId" @@ -3334,12 +3427,12 @@ } ], "responses": { - "200": { - "description": "successful operation", + "202": { + "description": "successfully enqueued operation", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/AffinityGroupResultsPage" + "$ref": "#/components/schemas/Image" } } } @@ -3350,38 +3443,25 @@ "5XX": { "$ref": "#/components/responses/Error" } - }, - "x-dropshot-pagination": { - "required": [] } } }, - "/v1/instances/{instance}/anti-affinity-groups": { - "get": { + "/v1/images/{image}/promote": { + "post": { "tags": [ - "instances" + "images" ], - "summary": "List anti-affinity groups containing instance", - "operationId": "instance_anti_affinity_group_list", + "summary": "Promote project image", + "description": "Promote project image to be visible to all projects in the silo", + "operationId": "image_promote", "parameters": [ { - "in": "query", - "name": "limit", - "description": "Maximum number of items returned by a single call", - "schema": { - "nullable": true, - "type": "integer", - "format": "uint32", - "minimum": 1 - } - }, - { - "in": "query", - "name": "page_token", - "description": "Token returned by previous call to retrieve the subsequent page", + "in": "path", + "name": "image", + "description": "Name or ID of the image", + "required": true, "schema": { - "nullable": true, - "type": "string" + "$ref": "#/components/schemas/NameOrId" } }, { @@ -3391,31 +3471,15 @@ "schema": { "$ref": "#/components/schemas/NameOrId" } - }, - { - "in": "query", - "name": "sort_by", - "schema": { - "$ref": "#/components/schemas/NameOrIdSortMode" - } - }, - { - "in": "path", - "name": "instance", - "description": "Name or ID of the instance", - "required": true, - "schema": { - "$ref": "#/components/schemas/NameOrId" - } } ], "responses": { - "200": { - "description": "successful operation", + "202": { + "description": "successfully enqueued operation", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/AntiAffinityGroupResultsPage" + "$ref": "#/components/schemas/Image" } } } @@ -3426,19 +3490,16 @@ "5XX": { "$ref": "#/components/responses/Error" } - }, - "x-dropshot-pagination": { - "required": [] } } }, - "/v1/instances/{instance}/disks": { + "/v1/instances": { "get": { "tags": [ "instances" ], - "summary": "List disks for instance", - "operationId": "instance_disk_list", + "summary": "List instances", + "operationId": "instance_list", "parameters": [ { "in": "query", @@ -3474,15 +3535,6 @@ "schema": { "$ref": "#/components/schemas/NameOrIdSortMode" } - }, - { - "in": "path", - "name": "instance", - "description": "Name or ID of the instance", - "required": true, - "schema": { - "$ref": "#/components/schemas/NameOrId" - } } ], "responses": { @@ -3491,7 +3543,7 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/DiskResultsPage" + "$ref": "#/components/schemas/InstanceResultsPage" } } } @@ -3504,31 +3556,23 @@ } }, "x-dropshot-pagination": { - "required": [] + "required": [ + "project" + ] } - } - }, - "/v1/instances/{instance}/disks/attach": { + }, "post": { "tags": [ "instances" ], - "summary": "Attach disk to instance", - "operationId": "instance_disk_attach", + "summary": "Create instance", + "operationId": "instance_create", "parameters": [ - { - "in": "path", - "name": "instance", - "description": "Name or ID of the instance", - "required": true, - "schema": { - "$ref": "#/components/schemas/NameOrId" - } - }, { "in": "query", "name": "project", "description": "Name or ID of the project", + "required": true, "schema": { "$ref": "#/components/schemas/NameOrId" } @@ -3538,19 +3582,19 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/DiskPath" + "$ref": "#/components/schemas/InstanceCreate" } } }, "required": true }, "responses": { - "202": { - "description": "successfully enqueued operation", + "201": { + "description": "successful creation", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/Disk" + "$ref": "#/components/schemas/Instance" } } } @@ -3564,49 +3608,39 @@ } } }, - "/v1/instances/{instance}/disks/detach": { - "post": { + "/v1/instances/{instance}": { + "get": { "tags": [ "instances" ], - "summary": "Detach disk from instance", - "operationId": "instance_disk_detach", + "summary": "Fetch instance", + "operationId": "instance_view", "parameters": [ { - "in": "path", - "name": "instance", - "description": "Name or ID of the instance", - "required": true, + "in": "query", + "name": "project", + "description": "Name or ID of the project", "schema": { "$ref": "#/components/schemas/NameOrId" } }, { - "in": "query", - "name": "project", - "description": "Name or ID of the project", + "in": "path", + "name": "instance", + "description": "Name or ID of the instance", + "required": true, "schema": { "$ref": "#/components/schemas/NameOrId" } } ], - "requestBody": { - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/DiskPath" - } - } - }, - "required": true - }, "responses": { - "202": { - "description": "successfully enqueued operation", + "200": { + "description": "successful operation", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/Disk" + "$ref": "#/components/schemas/Instance" } } } @@ -3618,15 +3652,13 @@ "$ref": "#/components/responses/Error" } } - } - }, - "/v1/instances/{instance}/external-ips": { - "get": { + }, + "put": { "tags": [ "instances" ], - "summary": "List external IP addresses", - "operationId": "instance_external_ip_list", + "summary": "Update instance", + "operationId": "instance_update", "parameters": [ { "in": "query", @@ -3646,69 +3678,23 @@ } } ], - "responses": { - "200": { - "description": "successful operation", - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/ExternalIpResultsPage" - } - } - } - }, - "4XX": { - "$ref": "#/components/responses/Error" - }, - "5XX": { - "$ref": "#/components/responses/Error" - } - } - } - }, - "/v1/instances/{instance}/external-ips/ephemeral": { - "post": { - "tags": [ - "instances" - ], - "summary": "Allocate and attach ephemeral IP to instance", - "operationId": "instance_ephemeral_ip_attach", - "parameters": [ - { - "in": "path", - "name": "instance", - "description": "Name or ID of the instance", - "required": true, - "schema": { - "$ref": "#/components/schemas/NameOrId" - } - }, - { - "in": "query", - "name": "project", - "description": "Name or ID of the project", - "schema": { - "$ref": "#/components/schemas/NameOrId" - } - } - ], "requestBody": { "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/EphemeralIpCreate" + "$ref": "#/components/schemas/InstanceUpdate" } } }, "required": true }, "responses": { - "202": { - "description": "successfully enqueued operation", + "200": { + "description": "successful operation", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/ExternalIp" + "$ref": "#/components/schemas/Instance" } } } @@ -3725,22 +3711,22 @@ "tags": [ "instances" ], - "summary": "Detach and deallocate ephemeral IP from instance", - "operationId": "instance_ephemeral_ip_detach", + "summary": "Delete instance", + "operationId": "instance_delete", "parameters": [ { - "in": "path", - "name": "instance", - "description": "Name or ID of the instance", - "required": true, + "in": "query", + "name": "project", + "description": "Name or ID of the project", "schema": { "$ref": "#/components/schemas/NameOrId" } }, { - "in": "query", - "name": "project", - "description": "Name or ID of the project", + "in": "path", + "name": "instance", + "description": "Name or ID of the instance", + "required": true, "schema": { "$ref": "#/components/schemas/NameOrId" } @@ -3759,14 +3745,34 @@ } } }, - "/v1/instances/{instance}/reboot": { - "post": { + "/v1/instances/{instance}/affinity-groups": { + "get": { "tags": [ - "instances" + "hidden" ], - "summary": "Reboot an instance", - "operationId": "instance_reboot", + "summary": "List affinity groups containing instance", + "operationId": "instance_affinity_group_list", "parameters": [ + { + "in": "query", + "name": "limit", + "description": "Maximum number of items returned by a single call", + "schema": { + "nullable": true, + "type": "integer", + "format": "uint32", + "minimum": 1 + } + }, + { + "in": "query", + "name": "page_token", + "description": "Token returned by previous call to retrieve the subsequent page", + "schema": { + "nullable": true, + "type": "string" + } + }, { "in": "query", "name": "project", @@ -3775,6 +3781,13 @@ "$ref": "#/components/schemas/NameOrId" } }, + { + "in": "query", + "name": "sort_by", + "schema": { + "$ref": "#/components/schemas/NameOrIdSortMode" + } + }, { "in": "path", "name": "instance", @@ -3786,12 +3799,12 @@ } ], "responses": { - "202": { - "description": "successfully enqueued operation", + "200": { + "description": "successful operation", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/Instance" + "$ref": "#/components/schemas/AffinityGroupResultsPage" } } } @@ -3802,63 +3815,60 @@ "5XX": { "$ref": "#/components/responses/Error" } + }, + "x-dropshot-pagination": { + "required": [] } } }, - "/v1/instances/{instance}/serial-console": { + "/v1/instances/{instance}/anti-affinity-groups": { "get": { "tags": [ "instances" ], - "summary": "Fetch instance serial console", - "operationId": "instance_serial_console", + "summary": "List anti-affinity groups containing instance", + "operationId": "instance_anti_affinity_group_list", "parameters": [ { - "in": "path", - "name": "instance", - "description": "Name or ID of the instance", - "required": true, + "in": "query", + "name": "limit", + "description": "Maximum number of items returned by a single call", "schema": { - "$ref": "#/components/schemas/NameOrId" + "nullable": true, + "type": "integer", + "format": "uint32", + "minimum": 1 } }, { "in": "query", - "name": "from_start", - "description": "Character index in the serial buffer from which to read, counting the bytes output since instance start. If this is not provided, `most_recent` must be provided, and if this *is* provided, `most_recent` must *not* be provided.", + "name": "page_token", + "description": "Token returned by previous call to retrieve the subsequent page", "schema": { "nullable": true, - "type": "integer", - "format": "uint64", - "minimum": 0 + "type": "string" } }, { "in": "query", - "name": "max_bytes", - "description": "Maximum number of bytes of buffered serial console contents to return. If the requested range runs to the end of the available buffer, the data returned will be shorter than `max_bytes`.", + "name": "project", + "description": "Name or ID of the project", "schema": { - "nullable": true, - "type": "integer", - "format": "uint64", - "minimum": 0 + "$ref": "#/components/schemas/NameOrId" } }, { "in": "query", - "name": "most_recent", - "description": "Character index in the serial buffer from which to read, counting *backward* from the most recently buffered data retrieved from the instance. (See note on `from_start` about mutual exclusivity)", + "name": "sort_by", "schema": { - "nullable": true, - "type": "integer", - "format": "uint64", - "minimum": 0 + "$ref": "#/components/schemas/NameOrIdSortMode" } }, { - "in": "query", - "name": "project", - "description": "Name or ID of the project, only required if `instance` is provided as a `Name`", + "in": "path", + "name": "instance", + "description": "Name or ID of the instance", + "required": true, "schema": { "$ref": "#/components/schemas/NameOrId" } @@ -3870,7 +3880,7 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/InstanceSerialConsoleData" + "$ref": "#/components/schemas/AntiAffinityGroupResultsPage" } } } @@ -3881,77 +3891,20 @@ "5XX": { "$ref": "#/components/responses/Error" } - } - } - }, - "/v1/instances/{instance}/serial-console/stream": { - "get": { - "tags": [ - "instances" - ], - "summary": "Stream instance serial console", - "operationId": "instance_serial_console_stream", - "parameters": [ - { - "in": "path", - "name": "instance", - "description": "Name or ID of the instance", - "required": true, - "schema": { - "$ref": "#/components/schemas/NameOrId" - } - }, - { - "in": "query", - "name": "most_recent", - "description": "Character index in the serial buffer from which to read, counting *backward* from the most recently buffered data retrieved from the instance.", - "schema": { - "nullable": true, - "type": "integer", - "format": "uint64", - "minimum": 0 - } - }, - { - "in": "query", - "name": "project", - "description": "Name or ID of the project, only required if `instance` is provided as a `Name`", - "schema": { - "$ref": "#/components/schemas/NameOrId" - } - } - ], - "responses": { - "default": { - "description": "", - "content": { - "*/*": { - "schema": {} - } - } - } }, - "x-dropshot-websocket": {} + "x-dropshot-pagination": { + "required": [] + } } }, - "/v1/instances/{instance}/ssh-public-keys": { + "/v1/instances/{instance}/disks": { "get": { "tags": [ "instances" ], - "summary": "List SSH public keys for instance", - "description": "List SSH public keys injected via cloud-init during instance creation. Note that this list is a snapshot in time and will not reflect updates made after the instance is created.", - "operationId": "instance_ssh_public_key_list", + "summary": "List disks for instance", + "operationId": "instance_disk_list", "parameters": [ - { - "in": "path", - "name": "instance", - "description": "Name or ID of the instance", - "required": true, - "schema": { - "$ref": "#/components/schemas/NameOrId" - } - }, { "in": "query", "name": "limit", @@ -3986,6 +3939,15 @@ "schema": { "$ref": "#/components/schemas/NameOrIdSortMode" } + }, + { + "in": "path", + "name": "instance", + "description": "Name or ID of the instance", + "required": true, + "schema": { + "$ref": "#/components/schemas/NameOrId" + } } ], "responses": { @@ -3994,7 +3956,7 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/SshKeyResultsPage" + "$ref": "#/components/schemas/DiskResultsPage" } } } @@ -4011,39 +3973,49 @@ } } }, - "/v1/instances/{instance}/start": { + "/v1/instances/{instance}/disks/attach": { "post": { "tags": [ "instances" ], - "summary": "Boot instance", - "operationId": "instance_start", + "summary": "Attach disk to instance", + "operationId": "instance_disk_attach", "parameters": [ { - "in": "query", - "name": "project", - "description": "Name or ID of the project", + "in": "path", + "name": "instance", + "description": "Name or ID of the instance", + "required": true, "schema": { "$ref": "#/components/schemas/NameOrId" } }, { - "in": "path", - "name": "instance", - "description": "Name or ID of the instance", - "required": true, + "in": "query", + "name": "project", + "description": "Name or ID of the project", "schema": { "$ref": "#/components/schemas/NameOrId" } } ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/DiskPath" + } + } + }, + "required": true + }, "responses": { "202": { "description": "successfully enqueued operation", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/Instance" + "$ref": "#/components/schemas/Disk" } } } @@ -4057,39 +4029,49 @@ } } }, - "/v1/instances/{instance}/stop": { + "/v1/instances/{instance}/disks/detach": { "post": { "tags": [ "instances" ], - "summary": "Stop instance", - "operationId": "instance_stop", + "summary": "Detach disk from instance", + "operationId": "instance_disk_detach", "parameters": [ { - "in": "query", - "name": "project", - "description": "Name or ID of the project", + "in": "path", + "name": "instance", + "description": "Name or ID of the instance", + "required": true, "schema": { "$ref": "#/components/schemas/NameOrId" } }, { - "in": "path", - "name": "instance", - "description": "Name or ID of the instance", - "required": true, + "in": "query", + "name": "project", + "description": "Name or ID of the project", "schema": { "$ref": "#/components/schemas/NameOrId" } } ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/DiskPath" + } + } + }, + "required": true + }, "responses": { "202": { "description": "successfully enqueued operation", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/Instance" + "$ref": "#/components/schemas/Disk" } } } @@ -4103,61 +4085,27 @@ } } }, - "/v1/internet-gateway-ip-addresses": { + "/v1/instances/{instance}/external-ips": { "get": { "tags": [ - "vpcs" + "instances" ], - "summary": "List IP addresses attached to internet gateway", - "operationId": "internet_gateway_ip_address_list", + "summary": "List external IP addresses", + "operationId": "instance_external_ip_list", "parameters": [ - { - "in": "query", - "name": "gateway", - "description": "Name or ID of the internet gateway", - "schema": { - "$ref": "#/components/schemas/NameOrId" - } - }, - { - "in": "query", - "name": "limit", - "description": "Maximum number of items returned by a single call", - "schema": { - "nullable": true, - "type": "integer", - "format": "uint32", - "minimum": 1 - } - }, - { - "in": "query", - "name": "page_token", - "description": "Token returned by previous call to retrieve the subsequent page", - "schema": { - "nullable": true, - "type": "string" - } - }, { "in": "query", "name": "project", - "description": "Name or ID of the project, only required if `vpc` is provided as a `Name`", + "description": "Name or ID of the project", "schema": { "$ref": "#/components/schemas/NameOrId" } }, { - "in": "query", - "name": "sort_by", - "schema": { - "$ref": "#/components/schemas/NameOrIdSortMode" - } - }, - { - "in": "query", - "name": "vpc", - "description": "Name or ID of the VPC, only required if `gateway` is provided as a `Name`", + "in": "path", + "name": "instance", + "description": "Name or ID of the instance", + "required": true, "schema": { "$ref": "#/components/schemas/NameOrId" } @@ -4169,7 +4117,7 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/InternetGatewayIpAddressResultsPage" + "$ref": "#/components/schemas/ExternalIpResultsPage" } } } @@ -4180,24 +4128,21 @@ "5XX": { "$ref": "#/components/responses/Error" } - }, - "x-dropshot-pagination": { - "required": [ - "gateway" - ] } - }, + } + }, + "/v1/instances/{instance}/external-ips/ephemeral": { "post": { "tags": [ - "vpcs" + "instances" ], - "summary": "Attach IP address to internet gateway", - "operationId": "internet_gateway_ip_address_create", + "summary": "Allocate and attach ephemeral IP to instance", + "operationId": "instance_ephemeral_ip_attach", "parameters": [ { - "in": "query", - "name": "gateway", - "description": "Name or ID of the internet gateway", + "in": "path", + "name": "instance", + "description": "Name or ID of the instance", "required": true, "schema": { "$ref": "#/components/schemas/NameOrId" @@ -4206,15 +4151,7 @@ { "in": "query", "name": "project", - "description": "Name or ID of the project, only required if `vpc` is provided as a `Name`", - "schema": { - "$ref": "#/components/schemas/NameOrId" - } - }, - { - "in": "query", - "name": "vpc", - "description": "Name or ID of the VPC, only required if `gateway` is provided as a `Name`", + "description": "Name or ID of the project", "schema": { "$ref": "#/components/schemas/NameOrId" } @@ -4224,19 +4161,19 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/InternetGatewayIpAddressCreate" + "$ref": "#/components/schemas/EphemeralIpCreate" } } }, "required": true }, "responses": { - "201": { - "description": "successful creation", + "202": { + "description": "successfully enqueued operation", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/InternetGatewayIpAddress" + "$ref": "#/components/schemas/ExternalIp" } } } @@ -4248,20 +4185,18 @@ "$ref": "#/components/responses/Error" } } - } - }, - "/v1/internet-gateway-ip-addresses/{address}": { + }, "delete": { "tags": [ - "vpcs" + "instances" ], - "summary": "Detach IP address from internet gateway", - "operationId": "internet_gateway_ip_address_delete", + "summary": "Detach and deallocate ephemeral IP from instance", + "operationId": "instance_ephemeral_ip_detach", "parameters": [ { "in": "path", - "name": "address", - "description": "Name or ID of the IP address", + "name": "instance", + "description": "Name or ID of the instance", "required": true, "schema": { "$ref": "#/components/schemas/NameOrId" @@ -4269,40 +4204,62 @@ }, { "in": "query", - "name": "cascade", - "description": "Also delete routes targeting this gateway element.", - "schema": { - "type": "boolean" - } - }, - { - "in": "query", - "name": "gateway", - "description": "Name or ID of the internet gateway", + "name": "project", + "description": "Name or ID of the project", "schema": { "$ref": "#/components/schemas/NameOrId" } + } + ], + "responses": { + "204": { + "description": "successful deletion" + }, + "4XX": { + "$ref": "#/components/responses/Error" }, + "5XX": { + "$ref": "#/components/responses/Error" + } + } + } + }, + "/v1/instances/{instance}/reboot": { + "post": { + "tags": [ + "instances" + ], + "summary": "Reboot an instance", + "operationId": "instance_reboot", + "parameters": [ { "in": "query", "name": "project", - "description": "Name or ID of the project, only required if `vpc` is provided as a `Name`", + "description": "Name or ID of the project", "schema": { "$ref": "#/components/schemas/NameOrId" } }, { - "in": "query", - "name": "vpc", - "description": "Name or ID of the VPC, only required if `gateway` is provided as a `Name`", + "in": "path", + "name": "instance", + "description": "Name or ID of the instance", + "required": true, "schema": { "$ref": "#/components/schemas/NameOrId" } } ], "responses": { - "204": { - "description": "successful deletion" + "202": { + "description": "successfully enqueued operation", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/Instance" + } + } + } }, "4XX": { "$ref": "#/components/responses/Error" @@ -4313,61 +4270,60 @@ } } }, - "/v1/internet-gateway-ip-pools": { + "/v1/instances/{instance}/serial-console": { "get": { "tags": [ - "vpcs" + "instances" ], - "summary": "List IP pools attached to internet gateway", - "operationId": "internet_gateway_ip_pool_list", + "summary": "Fetch instance serial console", + "operationId": "instance_serial_console", "parameters": [ { - "in": "query", - "name": "gateway", - "description": "Name or ID of the internet gateway", + "in": "path", + "name": "instance", + "description": "Name or ID of the instance", + "required": true, "schema": { "$ref": "#/components/schemas/NameOrId" } }, { "in": "query", - "name": "limit", - "description": "Maximum number of items returned by a single call", + "name": "from_start", + "description": "Character index in the serial buffer from which to read, counting the bytes output since instance start. If this is not provided, `most_recent` must be provided, and if this *is* provided, `most_recent` must *not* be provided.", "schema": { "nullable": true, "type": "integer", - "format": "uint32", - "minimum": 1 + "format": "uint64", + "minimum": 0 } }, { "in": "query", - "name": "page_token", - "description": "Token returned by previous call to retrieve the subsequent page", + "name": "max_bytes", + "description": "Maximum number of bytes of buffered serial console contents to return. If the requested range runs to the end of the available buffer, the data returned will be shorter than `max_bytes`.", "schema": { "nullable": true, - "type": "string" + "type": "integer", + "format": "uint64", + "minimum": 0 } }, { "in": "query", - "name": "project", - "description": "Name or ID of the project, only required if `vpc` is provided as a `Name`", + "name": "most_recent", + "description": "Character index in the serial buffer from which to read, counting *backward* from the most recently buffered data retrieved from the instance. (See note on `from_start` about mutual exclusivity)", "schema": { - "$ref": "#/components/schemas/NameOrId" + "nullable": true, + "type": "integer", + "format": "uint64", + "minimum": 0 } }, { "in": "query", - "name": "sort_by", - "schema": { - "$ref": "#/components/schemas/NameOrIdSortMode" - } - }, - { - "in": "query", - "name": "vpc", - "description": "Name or ID of the VPC, only required if `gateway` is provided as a `Name`", + "name": "project", + "description": "Name or ID of the project, only required if `instance` is provided as a `Name`", "schema": { "$ref": "#/components/schemas/NameOrId" } @@ -4379,7 +4335,7 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/InternetGatewayIpPoolResultsPage" + "$ref": "#/components/schemas/InstanceSerialConsoleData" } } } @@ -4390,24 +4346,21 @@ "5XX": { "$ref": "#/components/responses/Error" } - }, - "x-dropshot-pagination": { - "required": [ - "gateway" - ] } - }, - "post": { + } + }, + "/v1/instances/{instance}/serial-console/stream": { + "get": { "tags": [ - "vpcs" + "instances" ], - "summary": "Attach IP pool to internet gateway", - "operationId": "internet_gateway_ip_pool_create", + "summary": "Stream instance serial console", + "operationId": "instance_serial_console_stream", "parameters": [ { - "in": "query", - "name": "gateway", - "description": "Name or ID of the internet gateway", + "in": "path", + "name": "instance", + "description": "Name or ID of the instance", "required": true, "schema": { "$ref": "#/components/schemas/NameOrId" @@ -4415,38 +4368,98 @@ }, { "in": "query", - "name": "project", - "description": "Name or ID of the project, only required if `vpc` is provided as a `Name`", + "name": "most_recent", + "description": "Character index in the serial buffer from which to read, counting *backward* from the most recently buffered data retrieved from the instance.", "schema": { - "$ref": "#/components/schemas/NameOrId" + "nullable": true, + "type": "integer", + "format": "uint64", + "minimum": 0 } }, { "in": "query", - "name": "vpc", - "description": "Name or ID of the VPC, only required if `gateway` is provided as a `Name`", + "name": "project", + "description": "Name or ID of the project, only required if `instance` is provided as a `Name`", "schema": { "$ref": "#/components/schemas/NameOrId" } } ], - "requestBody": { - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/InternetGatewayIpPoolCreate" + "responses": { + "default": { + "description": "", + "content": { + "*/*": { + "schema": {} } } - }, - "required": true + } }, + "x-dropshot-websocket": {} + } + }, + "/v1/instances/{instance}/ssh-public-keys": { + "get": { + "tags": [ + "instances" + ], + "summary": "List SSH public keys for instance", + "description": "List SSH public keys injected via cloud-init during instance creation. Note that this list is a snapshot in time and will not reflect updates made after the instance is created.", + "operationId": "instance_ssh_public_key_list", + "parameters": [ + { + "in": "path", + "name": "instance", + "description": "Name or ID of the instance", + "required": true, + "schema": { + "$ref": "#/components/schemas/NameOrId" + } + }, + { + "in": "query", + "name": "limit", + "description": "Maximum number of items returned by a single call", + "schema": { + "nullable": true, + "type": "integer", + "format": "uint32", + "minimum": 1 + } + }, + { + "in": "query", + "name": "page_token", + "description": "Token returned by previous call to retrieve the subsequent page", + "schema": { + "nullable": true, + "type": "string" + } + }, + { + "in": "query", + "name": "project", + "description": "Name or ID of the project", + "schema": { + "$ref": "#/components/schemas/NameOrId" + } + }, + { + "in": "query", + "name": "sort_by", + "schema": { + "$ref": "#/components/schemas/NameOrIdSortMode" + } + } + ], "responses": { - "201": { - "description": "successful creation", + "200": { + "description": "successful operation", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/InternetGatewayIpPool" + "$ref": "#/components/schemas/SshKeyResultsPage" } } } @@ -4457,62 +4470,94 @@ "5XX": { "$ref": "#/components/responses/Error" } + }, + "x-dropshot-pagination": { + "required": [] } } }, - "/v1/internet-gateway-ip-pools/{pool}": { - "delete": { + "/v1/instances/{instance}/start": { + "post": { "tags": [ - "vpcs" + "instances" ], - "summary": "Detach IP pool from internet gateway", - "operationId": "internet_gateway_ip_pool_delete", + "summary": "Boot instance", + "operationId": "instance_start", "parameters": [ - { - "in": "path", - "name": "pool", - "description": "Name or ID of the IP pool", - "required": true, - "schema": { - "$ref": "#/components/schemas/NameOrId" - } - }, { "in": "query", - "name": "cascade", - "description": "Also delete routes targeting this gateway element.", + "name": "project", + "description": "Name or ID of the project", "schema": { - "type": "boolean" + "$ref": "#/components/schemas/NameOrId" } }, { - "in": "query", - "name": "gateway", - "description": "Name or ID of the internet gateway", + "in": "path", + "name": "instance", + "description": "Name or ID of the instance", + "required": true, "schema": { "$ref": "#/components/schemas/NameOrId" } + } + ], + "responses": { + "202": { + "description": "successfully enqueued operation", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/Instance" + } + } + } + }, + "4XX": { + "$ref": "#/components/responses/Error" }, + "5XX": { + "$ref": "#/components/responses/Error" + } + } + } + }, + "/v1/instances/{instance}/stop": { + "post": { + "tags": [ + "instances" + ], + "summary": "Stop instance", + "operationId": "instance_stop", + "parameters": [ { "in": "query", "name": "project", - "description": "Name or ID of the project, only required if `vpc` is provided as a `Name`", + "description": "Name or ID of the project", "schema": { "$ref": "#/components/schemas/NameOrId" } }, { - "in": "query", - "name": "vpc", - "description": "Name or ID of the VPC, only required if `gateway` is provided as a `Name`", + "in": "path", + "name": "instance", + "description": "Name or ID of the instance", + "required": true, "schema": { "$ref": "#/components/schemas/NameOrId" } } ], "responses": { - "204": { - "description": "successful deletion" + "202": { + "description": "successfully enqueued operation", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/Instance" + } + } + } }, "4XX": { "$ref": "#/components/responses/Error" @@ -4523,14 +4568,22 @@ } } }, - "/v1/internet-gateways": { + "/v1/internet-gateway-ip-addresses": { "get": { "tags": [ "vpcs" ], - "summary": "List internet gateways", - "operationId": "internet_gateway_list", + "summary": "List IP addresses attached to internet gateway", + "operationId": "internet_gateway_ip_address_list", "parameters": [ + { + "in": "query", + "name": "gateway", + "description": "Name or ID of the internet gateway", + "schema": { + "$ref": "#/components/schemas/NameOrId" + } + }, { "in": "query", "name": "limit", @@ -4569,7 +4622,7 @@ { "in": "query", "name": "vpc", - "description": "Name or ID of the VPC", + "description": "Name or ID of the VPC, only required if `gateway` is provided as a `Name`", "schema": { "$ref": "#/components/schemas/NameOrId" } @@ -4581,7 +4634,7 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/InternetGatewayResultsPage" + "$ref": "#/components/schemas/InternetGatewayIpAddressResultsPage" } } } @@ -4595,7 +4648,7 @@ }, "x-dropshot-pagination": { "required": [ - "vpc" + "gateway" ] } }, @@ -4603,9 +4656,18 @@ "tags": [ "vpcs" ], - "summary": "Create VPC internet gateway", - "operationId": "internet_gateway_create", + "summary": "Attach IP address to internet gateway", + "operationId": "internet_gateway_ip_address_create", "parameters": [ + { + "in": "query", + "name": "gateway", + "description": "Name or ID of the internet gateway", + "required": true, + "schema": { + "$ref": "#/components/schemas/NameOrId" + } + }, { "in": "query", "name": "project", @@ -4617,8 +4679,7 @@ { "in": "query", "name": "vpc", - "description": "Name or ID of the VPC", - "required": true, + "description": "Name or ID of the VPC, only required if `gateway` is provided as a `Name`", "schema": { "$ref": "#/components/schemas/NameOrId" } @@ -4628,7 +4689,7 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/InternetGatewayCreate" + "$ref": "#/components/schemas/InternetGatewayIpAddressCreate" } } }, @@ -4640,7 +4701,7 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/InternetGateway" + "$ref": "#/components/schemas/InternetGatewayIpAddress" } } } @@ -4654,18 +4715,18 @@ } } }, - "/v1/internet-gateways/{gateway}": { - "get": { + "/v1/internet-gateway-ip-addresses/{address}": { + "delete": { "tags": [ "vpcs" ], - "summary": "Fetch internet gateway", - "operationId": "internet_gateway_view", + "summary": "Detach IP address from internet gateway", + "operationId": "internet_gateway_ip_address_delete", "parameters": [ { "in": "path", - "name": "gateway", - "description": "Name or ID of the gateway", + "name": "address", + "description": "Name or ID of the IP address", "required": true, "schema": { "$ref": "#/components/schemas/NameOrId" @@ -4673,64 +4734,20 @@ }, { "in": "query", - "name": "project", - "description": "Name or ID of the project, only required if `vpc` is provided as a `Name`", + "name": "cascade", + "description": "Also delete routes targeting this gateway element.", "schema": { - "$ref": "#/components/schemas/NameOrId" + "type": "boolean" } }, { "in": "query", - "name": "vpc", - "description": "Name or ID of the VPC", - "schema": { - "$ref": "#/components/schemas/NameOrId" - } - } - ], - "responses": { - "200": { - "description": "successful operation", - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/InternetGateway" - } - } - } - }, - "4XX": { - "$ref": "#/components/responses/Error" - }, - "5XX": { - "$ref": "#/components/responses/Error" - } - } - }, - "delete": { - "tags": [ - "vpcs" - ], - "summary": "Delete internet gateway", - "operationId": "internet_gateway_delete", - "parameters": [ - { - "in": "path", "name": "gateway", - "description": "Name or ID of the gateway", - "required": true, + "description": "Name or ID of the internet gateway", "schema": { "$ref": "#/components/schemas/NameOrId" } }, - { - "in": "query", - "name": "cascade", - "description": "Also delete routes targeting this gateway.", - "schema": { - "type": "boolean" - } - }, { "in": "query", "name": "project", @@ -4742,7 +4759,7 @@ { "in": "query", "name": "vpc", - "description": "Name or ID of the VPC", + "description": "Name or ID of the VPC, only required if `gateway` is provided as a `Name`", "schema": { "$ref": "#/components/schemas/NameOrId" } @@ -4761,14 +4778,22 @@ } } }, - "/v1/ip-pools": { + "/v1/internet-gateway-ip-pools": { "get": { "tags": [ - "projects" + "vpcs" ], - "summary": "List IP pools", - "operationId": "project_ip_pool_list", + "summary": "List IP pools attached to internet gateway", + "operationId": "internet_gateway_ip_pool_list", "parameters": [ + { + "in": "query", + "name": "gateway", + "description": "Name or ID of the internet gateway", + "schema": { + "$ref": "#/components/schemas/NameOrId" + } + }, { "in": "query", "name": "limit", @@ -4789,12 +4814,28 @@ "type": "string" } }, + { + "in": "query", + "name": "project", + "description": "Name or ID of the project, only required if `vpc` is provided as a `Name`", + "schema": { + "$ref": "#/components/schemas/NameOrId" + } + }, { "in": "query", "name": "sort_by", "schema": { "$ref": "#/components/schemas/NameOrIdSortMode" } + }, + { + "in": "query", + "name": "vpc", + "description": "Name or ID of the VPC, only required if `gateway` is provided as a `Name`", + "schema": { + "$ref": "#/components/schemas/NameOrId" + } } ], "responses": { @@ -4803,7 +4844,7 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/SiloIpPoolResultsPage" + "$ref": "#/components/schemas/InternetGatewayIpPoolResultsPage" } } } @@ -4816,35 +4857,61 @@ } }, "x-dropshot-pagination": { - "required": [] + "required": [ + "gateway" + ] } - } - }, - "/v1/ip-pools/{pool}": { - "get": { + }, + "post": { "tags": [ - "projects" + "vpcs" ], - "summary": "Fetch IP pool", - "operationId": "project_ip_pool_view", + "summary": "Attach IP pool to internet gateway", + "operationId": "internet_gateway_ip_pool_create", "parameters": [ { - "in": "path", - "name": "pool", - "description": "Name or ID of the IP pool", + "in": "query", + "name": "gateway", + "description": "Name or ID of the internet gateway", "required": true, "schema": { "$ref": "#/components/schemas/NameOrId" } + }, + { + "in": "query", + "name": "project", + "description": "Name or ID of the project, only required if `vpc` is provided as a `Name`", + "schema": { + "$ref": "#/components/schemas/NameOrId" + } + }, + { + "in": "query", + "name": "vpc", + "description": "Name or ID of the VPC, only required if `gateway` is provided as a `Name`", + "schema": { + "$ref": "#/components/schemas/NameOrId" + } } ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/InternetGatewayIpPoolCreate" + } + } + }, + "required": true + }, "responses": { - "200": { - "description": "successful operation", + "201": { + "description": "successful creation", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/SiloIpPool" + "$ref": "#/components/schemas/InternetGatewayIpPool" } } } @@ -4858,83 +4925,59 @@ } } }, - "/v1/login/{silo_name}/local": { - "post": { + "/v1/internet-gateway-ip-pools/{pool}": { + "delete": { "tags": [ - "login" + "vpcs" ], - "summary": "Authenticate a user via username and password", - "operationId": "login_local", + "summary": "Detach IP pool from internet gateway", + "operationId": "internet_gateway_ip_pool_delete", "parameters": [ { "in": "path", - "name": "silo_name", + "name": "pool", + "description": "Name or ID of the IP pool", "required": true, "schema": { - "$ref": "#/components/schemas/Name" + "$ref": "#/components/schemas/NameOrId" } - } - ], - "requestBody": { - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/UsernamePasswordCredentials" - } + }, + { + "in": "query", + "name": "cascade", + "description": "Also delete routes targeting this gateway element.", + "schema": { + "type": "boolean" } }, - "required": true - }, - "responses": { - "204": { - "description": "resource updated" + { + "in": "query", + "name": "gateway", + "description": "Name or ID of the internet gateway", + "schema": { + "$ref": "#/components/schemas/NameOrId" + } }, - "4XX": { - "$ref": "#/components/responses/Error" + { + "in": "query", + "name": "project", + "description": "Name or ID of the project, only required if `vpc` is provided as a `Name`", + "schema": { + "$ref": "#/components/schemas/NameOrId" + } }, - "5XX": { - "$ref": "#/components/responses/Error" + { + "in": "query", + "name": "vpc", + "description": "Name or ID of the VPC, only required if `gateway` is provided as a `Name`", + "schema": { + "$ref": "#/components/schemas/NameOrId" + } } - } - } - }, - "/v1/logout": { - "post": { - "tags": [ - "hidden" ], - "summary": "Log user out of web console by deleting session on client and server", - "operationId": "logout", "responses": { "204": { - "description": "resource updated" - }, - "4XX": { - "$ref": "#/components/responses/Error" - }, - "5XX": { - "$ref": "#/components/responses/Error" - } - } - } - }, - "/v1/me": { - "get": { - "tags": [ - "session" - ], - "summary": "Fetch user for current session", - "operationId": "current_user_view", - "responses": { - "200": { - "description": "successful operation", - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/CurrentUser" - } - } - } + "description": "successful deletion" }, "4XX": { "$ref": "#/components/responses/Error" @@ -4945,13 +4988,13 @@ } } }, - "/v1/me/groups": { + "/v1/internet-gateways": { "get": { "tags": [ - "session" + "vpcs" ], - "summary": "Fetch current user's groups", - "operationId": "current_user_groups", + "summary": "List internet gateways", + "operationId": "internet_gateway_list", "parameters": [ { "in": "query", @@ -4975,69 +5018,25 @@ }, { "in": "query", - "name": "sort_by", + "name": "project", + "description": "Name or ID of the project, only required if `vpc` is provided as a `Name`", "schema": { - "$ref": "#/components/schemas/IdSortMode" - } - } - ], - "responses": { - "200": { - "description": "successful operation", - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/GroupResultsPage" - } - } - } - }, - "4XX": { - "$ref": "#/components/responses/Error" - }, - "5XX": { - "$ref": "#/components/responses/Error" - } - }, - "x-dropshot-pagination": { - "required": [] - } - } - }, - "/v1/me/ssh-keys": { - "get": { - "tags": [ - "session" - ], - "summary": "List SSH public keys", - "description": "Lists SSH public keys for the currently authenticated user.", - "operationId": "current_user_ssh_key_list", - "parameters": [ - { - "in": "query", - "name": "limit", - "description": "Maximum number of items returned by a single call", - "schema": { - "nullable": true, - "type": "integer", - "format": "uint32", - "minimum": 1 + "$ref": "#/components/schemas/NameOrId" } }, { "in": "query", - "name": "page_token", - "description": "Token returned by previous call to retrieve the subsequent page", + "name": "sort_by", "schema": { - "nullable": true, - "type": "string" + "$ref": "#/components/schemas/NameOrIdSortMode" } }, { "in": "query", - "name": "sort_by", + "name": "vpc", + "description": "Name or ID of the VPC", "schema": { - "$ref": "#/components/schemas/NameOrIdSortMode" + "$ref": "#/components/schemas/NameOrId" } } ], @@ -5047,7 +5046,7 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/SshKeyResultsPage" + "$ref": "#/components/schemas/InternetGatewayResultsPage" } } } @@ -5060,21 +5059,41 @@ } }, "x-dropshot-pagination": { - "required": [] + "required": [ + "vpc" + ] } }, "post": { "tags": [ - "session" + "vpcs" + ], + "summary": "Create VPC internet gateway", + "operationId": "internet_gateway_create", + "parameters": [ + { + "in": "query", + "name": "project", + "description": "Name or ID of the project, only required if `vpc` is provided as a `Name`", + "schema": { + "$ref": "#/components/schemas/NameOrId" + } + }, + { + "in": "query", + "name": "vpc", + "description": "Name or ID of the VPC", + "required": true, + "schema": { + "$ref": "#/components/schemas/NameOrId" + } + } ], - "summary": "Create SSH public key", - "description": "Create an SSH public key for the currently authenticated user.", - "operationId": "current_user_ssh_key_create", "requestBody": { "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/SshKeyCreate" + "$ref": "#/components/schemas/InternetGatewayCreate" } } }, @@ -5086,7 +5105,7 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/SshKey" + "$ref": "#/components/schemas/InternetGateway" } } } @@ -5100,23 +5119,38 @@ } } }, - "/v1/me/ssh-keys/{ssh_key}": { + "/v1/internet-gateways/{gateway}": { "get": { "tags": [ - "session" + "vpcs" ], - "summary": "Fetch SSH public key", - "description": "Fetch SSH public key associated with the currently authenticated user.", - "operationId": "current_user_ssh_key_view", + "summary": "Fetch internet gateway", + "operationId": "internet_gateway_view", "parameters": [ { "in": "path", - "name": "ssh_key", - "description": "Name or ID of the SSH key", + "name": "gateway", + "description": "Name or ID of the gateway", "required": true, "schema": { "$ref": "#/components/schemas/NameOrId" } + }, + { + "in": "query", + "name": "project", + "description": "Name or ID of the project, only required if `vpc` is provided as a `Name`", + "schema": { + "$ref": "#/components/schemas/NameOrId" + } + }, + { + "in": "query", + "name": "vpc", + "description": "Name or ID of the VPC", + "schema": { + "$ref": "#/components/schemas/NameOrId" + } } ], "responses": { @@ -5125,7 +5159,7 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/SshKey" + "$ref": "#/components/schemas/InternetGateway" } } } @@ -5140,117 +5174,48 @@ }, "delete": { "tags": [ - "session" + "vpcs" ], - "summary": "Delete SSH public key", - "description": "Delete an SSH public key associated with the currently authenticated user.", - "operationId": "current_user_ssh_key_delete", + "summary": "Delete internet gateway", + "operationId": "internet_gateway_delete", "parameters": [ { "in": "path", - "name": "ssh_key", - "description": "Name or ID of the SSH key", + "name": "gateway", + "description": "Name or ID of the gateway", "required": true, "schema": { "$ref": "#/components/schemas/NameOrId" } - } - ], - "responses": { - "204": { - "description": "successful deletion" - }, - "4XX": { - "$ref": "#/components/responses/Error" - }, - "5XX": { - "$ref": "#/components/responses/Error" - } - } - } - }, - "/v1/metrics/{metric_name}": { - "get": { - "tags": [ - "metrics" - ], - "summary": "View metrics", - "description": "View CPU, memory, or storage utilization metrics at the silo or project level.", - "operationId": "silo_metric", - "parameters": [ - { - "in": "path", - "name": "metric_name", - "required": true, - "schema": { - "$ref": "#/components/schemas/SystemMetricName" - } - }, - { - "in": "query", - "name": "end_time", - "description": "An exclusive end time of metrics.", - "schema": { - "type": "string", - "format": "date-time" - } - }, - { - "in": "query", - "name": "limit", - "description": "Maximum number of items returned by a single call", - "schema": { - "nullable": true, - "type": "integer", - "format": "uint32", - "minimum": 1 - } - }, - { - "in": "query", - "name": "order", - "description": "Query result order", - "schema": { - "$ref": "#/components/schemas/PaginationOrder" - } }, { "in": "query", - "name": "page_token", - "description": "Token returned by previous call to retrieve the subsequent page", + "name": "cascade", + "description": "Also delete routes targeting this gateway.", "schema": { - "nullable": true, - "type": "string" + "type": "boolean" } }, { "in": "query", - "name": "start_time", - "description": "An inclusive start time of metrics.", + "name": "project", + "description": "Name or ID of the project, only required if `vpc` is provided as a `Name`", "schema": { - "type": "string", - "format": "date-time" + "$ref": "#/components/schemas/NameOrId" } }, { "in": "query", - "name": "project", - "description": "Name or ID of the project", + "name": "vpc", + "description": "Name or ID of the VPC", "schema": { "$ref": "#/components/schemas/NameOrId" } } ], "responses": { - "200": { - "description": "successful operation", - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/MeasurementResultsPage" - } - } - } + "204": { + "description": "successful deletion" }, "4XX": { "$ref": "#/components/responses/Error" @@ -5258,31 +5223,17 @@ "5XX": { "$ref": "#/components/responses/Error" } - }, - "x-dropshot-pagination": { - "required": [ - "end_time", - "start_time" - ] } } }, - "/v1/network-interfaces": { + "/v1/ip-pools": { "get": { "tags": [ - "instances" + "projects" ], - "summary": "List network interfaces", - "operationId": "instance_network_interface_list", + "summary": "List IP pools", + "operationId": "project_ip_pool_list", "parameters": [ - { - "in": "query", - "name": "instance", - "description": "Name or ID of the instance", - "schema": { - "$ref": "#/components/schemas/NameOrId" - } - }, { "in": "query", "name": "limit", @@ -5303,14 +5254,6 @@ "type": "string" } }, - { - "in": "query", - "name": "project", - "description": "Name or ID of the project, only required if `instance` is provided as a `Name`", - "schema": { - "$ref": "#/components/schemas/NameOrId" - } - }, { "in": "query", "name": "sort_by", @@ -5325,7 +5268,7 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/InstanceNetworkInterfaceResultsPage" + "$ref": "#/components/schemas/SiloIpPoolResultsPage" } } } @@ -5338,98 +5281,26 @@ } }, "x-dropshot-pagination": { - "required": [ - "instance" - ] - } - }, - "post": { - "tags": [ - "instances" - ], - "summary": "Create network interface", - "operationId": "instance_network_interface_create", - "parameters": [ - { - "in": "query", - "name": "instance", - "description": "Name or ID of the instance", - "required": true, - "schema": { - "$ref": "#/components/schemas/NameOrId" - } - }, - { - "in": "query", - "name": "project", - "description": "Name or ID of the project, only required if `instance` is provided as a `Name`", - "schema": { - "$ref": "#/components/schemas/NameOrId" - } - } - ], - "requestBody": { - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/InstanceNetworkInterfaceCreate" - } - } - }, - "required": true - }, - "responses": { - "201": { - "description": "successful creation", - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/InstanceNetworkInterface" - } - } - } - }, - "4XX": { - "$ref": "#/components/responses/Error" - }, - "5XX": { - "$ref": "#/components/responses/Error" - } + "required": [] } } }, - "/v1/network-interfaces/{interface}": { + "/v1/ip-pools/{pool}": { "get": { "tags": [ - "instances" + "projects" ], - "summary": "Fetch network interface", - "operationId": "instance_network_interface_view", + "summary": "Fetch IP pool", + "operationId": "project_ip_pool_view", "parameters": [ { "in": "path", - "name": "interface", - "description": "Name or ID of the network interface", + "name": "pool", + "description": "Name or ID of the IP pool", "required": true, "schema": { "$ref": "#/components/schemas/NameOrId" } - }, - { - "in": "query", - "name": "instance", - "description": "Name or ID of the instance", - "schema": { - "$ref": "#/components/schemas/NameOrId" - } - }, - { - "in": "query", - "name": "project", - "description": "Name or ID of the project, only required if `instance` is provided as a `Name`", - "schema": { - "$ref": "#/components/schemas/NameOrId" - } } ], "responses": { @@ -5438,7 +5309,7 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/InstanceNetworkInterface" + "$ref": "#/components/schemas/SiloIpPool" } } } @@ -5450,37 +5321,22 @@ "$ref": "#/components/responses/Error" } } - }, - "put": { + } + }, + "/v1/login/{silo_name}/local": { + "post": { "tags": [ - "instances" + "login" ], - "summary": "Update network interface", - "operationId": "instance_network_interface_update", + "summary": "Authenticate a user via username and password", + "operationId": "login_local", "parameters": [ { "in": "path", - "name": "interface", - "description": "Name or ID of the network interface", + "name": "silo_name", "required": true, "schema": { - "$ref": "#/components/schemas/NameOrId" - } - }, - { - "in": "query", - "name": "instance", - "description": "Name or ID of the instance", - "schema": { - "$ref": "#/components/schemas/NameOrId" - } - }, - { - "in": "query", - "name": "project", - "description": "Name or ID of the project, only required if `instance` is provided as a `Name`", - "schema": { - "$ref": "#/components/schemas/NameOrId" + "$ref": "#/components/schemas/Name" } } ], @@ -5488,68 +5344,15 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/InstanceNetworkInterfaceUpdate" + "$ref": "#/components/schemas/UsernamePasswordCredentials" } } }, "required": true }, - "responses": { - "200": { - "description": "successful operation", - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/InstanceNetworkInterface" - } - } - } - }, - "4XX": { - "$ref": "#/components/responses/Error" - }, - "5XX": { - "$ref": "#/components/responses/Error" - } - } - }, - "delete": { - "tags": [ - "instances" - ], - "summary": "Delete network interface", - "description": "Note that the primary interface for an instance cannot be deleted if there are any secondary interfaces. A new primary interface must be designated first. The primary interface can be deleted if there are no secondary interfaces.", - "operationId": "instance_network_interface_delete", - "parameters": [ - { - "in": "path", - "name": "interface", - "description": "Name or ID of the network interface", - "required": true, - "schema": { - "$ref": "#/components/schemas/NameOrId" - } - }, - { - "in": "query", - "name": "instance", - "description": "Name or ID of the instance", - "schema": { - "$ref": "#/components/schemas/NameOrId" - } - }, - { - "in": "query", - "name": "project", - "description": "Name or ID of the project, only required if `instance` is provided as a `Name`", - "schema": { - "$ref": "#/components/schemas/NameOrId" - } - } - ], "responses": { "204": { - "description": "successful deletion" + "description": "resource updated" }, "4XX": { "$ref": "#/components/responses/Error" @@ -5560,24 +5363,16 @@ } } }, - "/v1/ping": { - "get": { + "/v1/logout": { + "post": { "tags": [ - "system/status" + "hidden" ], - "summary": "Ping API", - "description": "Always responds with Ok if it responds at all.", - "operationId": "ping", + "summary": "Log user out of web console by deleting session on client and server", + "operationId": "logout", "responses": { - "200": { - "description": "successful operation", - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/Ping" - } - } - } + "204": { + "description": "resource updated" }, "4XX": { "$ref": "#/components/responses/Error" @@ -5588,55 +5383,20 @@ } } }, - "/v1/policy": { + "/v1/me": { "get": { "tags": [ - "silos" - ], - "summary": "Fetch current silo's IAM policy", - "operationId": "policy_view", - "responses": { - "200": { - "description": "successful operation", - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/SiloRolePolicy" - } - } - } - }, - "4XX": { - "$ref": "#/components/responses/Error" - }, - "5XX": { - "$ref": "#/components/responses/Error" - } - } - }, - "put": { - "tags": [ - "silos" + "session" ], - "summary": "Update current silo's IAM policy", - "operationId": "policy_update", - "requestBody": { - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/SiloRolePolicy" - } - } - }, - "required": true - }, + "summary": "Fetch user for current session", + "operationId": "current_user_view", "responses": { "200": { "description": "successful operation", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/SiloRolePolicy" + "$ref": "#/components/schemas/CurrentUser" } } } @@ -5650,13 +5410,13 @@ } } }, - "/v1/projects": { + "/v1/me/groups": { "get": { "tags": [ - "projects" + "session" ], - "summary": "List projects", - "operationId": "project_list", + "summary": "Fetch current user's groups", + "operationId": "current_user_groups", "parameters": [ { "in": "query", @@ -5682,7 +5442,7 @@ "in": "query", "name": "sort_by", "schema": { - "$ref": "#/components/schemas/NameOrIdSortMode" + "$ref": "#/components/schemas/IdSortMode" } } ], @@ -5692,7 +5452,7 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/ProjectResultsPage" + "$ref": "#/components/schemas/GroupResultsPage" } } } @@ -5707,30 +5467,52 @@ "x-dropshot-pagination": { "required": [] } - }, - "post": { - "tags": [ - "projects" + } + }, + "/v1/me/ssh-keys": { + "get": { + "tags": [ + "session" ], - "summary": "Create project", - "operationId": "project_create", - "requestBody": { - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/ProjectCreate" - } + "summary": "List SSH public keys", + "description": "Lists SSH public keys for the currently authenticated user.", + "operationId": "current_user_ssh_key_list", + "parameters": [ + { + "in": "query", + "name": "limit", + "description": "Maximum number of items returned by a single call", + "schema": { + "nullable": true, + "type": "integer", + "format": "uint32", + "minimum": 1 } }, - "required": true - }, + { + "in": "query", + "name": "page_token", + "description": "Token returned by previous call to retrieve the subsequent page", + "schema": { + "nullable": true, + "type": "string" + } + }, + { + "in": "query", + "name": "sort_by", + "schema": { + "$ref": "#/components/schemas/NameOrIdSortMode" + } + } + ], "responses": { - "201": { - "description": "successful creation", + "200": { + "description": "successful operation", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/Project" + "$ref": "#/components/schemas/SshKeyResultsPage" } } } @@ -5741,34 +5523,35 @@ "5XX": { "$ref": "#/components/responses/Error" } + }, + "x-dropshot-pagination": { + "required": [] } - } - }, - "/v1/projects/{project}": { - "get": { + }, + "post": { "tags": [ - "projects" + "session" ], - "summary": "Fetch project", - "operationId": "project_view", - "parameters": [ - { - "in": "path", - "name": "project", - "description": "Name or ID of the project", - "required": true, - "schema": { - "$ref": "#/components/schemas/NameOrId" + "summary": "Create SSH public key", + "description": "Create an SSH public key for the currently authenticated user.", + "operationId": "current_user_ssh_key_create", + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/SshKeyCreate" + } } - } - ], + }, + "required": true + }, "responses": { - "200": { - "description": "successful operation", + "201": { + "description": "successful creation", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/Project" + "$ref": "#/components/schemas/SshKey" } } } @@ -5780,41 +5563,34 @@ "$ref": "#/components/responses/Error" } } - }, - "put": { + } + }, + "/v1/me/ssh-keys/{ssh_key}": { + "get": { "tags": [ - "projects" + "session" ], - "summary": "Update a project", - "operationId": "project_update", + "summary": "Fetch SSH public key", + "description": "Fetch SSH public key associated with the currently authenticated user.", + "operationId": "current_user_ssh_key_view", "parameters": [ { "in": "path", - "name": "project", - "description": "Name or ID of the project", + "name": "ssh_key", + "description": "Name or ID of the SSH key", "required": true, "schema": { "$ref": "#/components/schemas/NameOrId" } } ], - "requestBody": { - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/ProjectUpdate" - } - } - }, - "required": true - }, "responses": { "200": { "description": "successful operation", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/Project" + "$ref": "#/components/schemas/SshKey" } } } @@ -5829,15 +5605,16 @@ }, "delete": { "tags": [ - "projects" + "session" ], - "summary": "Delete project", - "operationId": "project_delete", + "summary": "Delete SSH public key", + "description": "Delete an SSH public key associated with the currently authenticated user.", + "operationId": "current_user_ssh_key_delete", "parameters": [ { "in": "path", - "name": "project", - "description": "Name or ID of the project", + "name": "ssh_key", + "description": "Name or ID of the SSH key", "required": true, "schema": { "$ref": "#/components/schemas/NameOrId" @@ -5857,77 +5634,85 @@ } } }, - "/v1/projects/{project}/policy": { + "/v1/metrics/{metric_name}": { "get": { "tags": [ - "projects" + "metrics" ], - "summary": "Fetch project's IAM policy", - "operationId": "project_policy_view", + "summary": "View metrics", + "description": "View CPU, memory, or storage utilization metrics at the silo or project level.", + "operationId": "silo_metric", "parameters": [ { "in": "path", - "name": "project", - "description": "Name or ID of the project", + "name": "metric_name", "required": true, "schema": { - "$ref": "#/components/schemas/NameOrId" + "$ref": "#/components/schemas/SystemMetricName" } - } - ], - "responses": { - "200": { - "description": "successful operation", - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/ProjectRolePolicy" - } - } + }, + { + "in": "query", + "name": "end_time", + "description": "An exclusive end time of metrics.", + "schema": { + "type": "string", + "format": "date-time" } }, - "4XX": { - "$ref": "#/components/responses/Error" + { + "in": "query", + "name": "limit", + "description": "Maximum number of items returned by a single call", + "schema": { + "nullable": true, + "type": "integer", + "format": "uint32", + "minimum": 1 + } }, - "5XX": { - "$ref": "#/components/responses/Error" - } - } - }, - "put": { - "tags": [ - "projects" - ], - "summary": "Update project's IAM policy", - "operationId": "project_policy_update", - "parameters": [ { - "in": "path", + "in": "query", + "name": "order", + "description": "Query result order", + "schema": { + "$ref": "#/components/schemas/PaginationOrder" + } + }, + { + "in": "query", + "name": "page_token", + "description": "Token returned by previous call to retrieve the subsequent page", + "schema": { + "nullable": true, + "type": "string" + } + }, + { + "in": "query", + "name": "start_time", + "description": "An inclusive start time of metrics.", + "schema": { + "type": "string", + "format": "date-time" + } + }, + { + "in": "query", "name": "project", "description": "Name or ID of the project", - "required": true, "schema": { "$ref": "#/components/schemas/NameOrId" } } ], - "requestBody": { - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/ProjectRolePolicy" - } - } - }, - "required": true - }, "responses": { "200": { "description": "successful operation", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/ProjectRolePolicy" + "$ref": "#/components/schemas/MeasurementResultsPage" } } } @@ -5938,17 +5723,31 @@ "5XX": { "$ref": "#/components/responses/Error" } + }, + "x-dropshot-pagination": { + "required": [ + "end_time", + "start_time" + ] } } }, - "/v1/snapshots": { + "/v1/network-interfaces": { "get": { "tags": [ - "snapshots" + "instances" ], - "summary": "List snapshots", - "operationId": "snapshot_list", + "summary": "List network interfaces", + "operationId": "instance_network_interface_list", "parameters": [ + { + "in": "query", + "name": "instance", + "description": "Name or ID of the instance", + "schema": { + "$ref": "#/components/schemas/NameOrId" + } + }, { "in": "query", "name": "limit", @@ -5972,7 +5771,7 @@ { "in": "query", "name": "project", - "description": "Name or ID of the project", + "description": "Name or ID of the project, only required if `instance` is provided as a `Name`", "schema": { "$ref": "#/components/schemas/NameOrId" } @@ -5991,7 +5790,7 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/SnapshotResultsPage" + "$ref": "#/components/schemas/InstanceNetworkInterfaceResultsPage" } } } @@ -6005,33 +5804,40 @@ }, "x-dropshot-pagination": { "required": [ - "project" + "instance" ] } }, "post": { "tags": [ - "snapshots" + "instances" ], - "summary": "Create snapshot", - "description": "Creates a point-in-time snapshot from a disk.", - "operationId": "snapshot_create", + "summary": "Create network interface", + "operationId": "instance_network_interface_create", "parameters": [ { "in": "query", - "name": "project", - "description": "Name or ID of the project", + "name": "instance", + "description": "Name or ID of the instance", "required": true, "schema": { "$ref": "#/components/schemas/NameOrId" } + }, + { + "in": "query", + "name": "project", + "description": "Name or ID of the project, only required if `instance` is provided as a `Name`", + "schema": { + "$ref": "#/components/schemas/NameOrId" + } } ], "requestBody": { "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/SnapshotCreate" + "$ref": "#/components/schemas/InstanceNetworkInterfaceCreate" } } }, @@ -6043,7 +5849,7 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/Snapshot" + "$ref": "#/components/schemas/InstanceNetworkInterface" } } } @@ -6057,27 +5863,35 @@ } } }, - "/v1/snapshots/{snapshot}": { + "/v1/network-interfaces/{interface}": { "get": { "tags": [ - "snapshots" + "instances" ], - "summary": "Fetch snapshot", - "operationId": "snapshot_view", + "summary": "Fetch network interface", + "operationId": "instance_network_interface_view", "parameters": [ { "in": "path", - "name": "snapshot", - "description": "Name or ID of the snapshot", + "name": "interface", + "description": "Name or ID of the network interface", "required": true, "schema": { "$ref": "#/components/schemas/NameOrId" } }, + { + "in": "query", + "name": "instance", + "description": "Name or ID of the instance", + "schema": { + "$ref": "#/components/schemas/NameOrId" + } + }, { "in": "query", "name": "project", - "description": "Name or ID of the project", + "description": "Name or ID of the project, only required if `instance` is provided as a `Name`", "schema": { "$ref": "#/components/schemas/NameOrId" } @@ -6089,7 +5903,7 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/Snapshot" + "$ref": "#/components/schemas/InstanceNetworkInterface" } } } @@ -6102,34 +5916,59 @@ } } }, - "delete": { + "put": { "tags": [ - "snapshots" + "instances" ], - "summary": "Delete snapshot", - "operationId": "snapshot_delete", + "summary": "Update network interface", + "operationId": "instance_network_interface_update", "parameters": [ { "in": "path", - "name": "snapshot", - "description": "Name or ID of the snapshot", + "name": "interface", + "description": "Name or ID of the network interface", "required": true, "schema": { "$ref": "#/components/schemas/NameOrId" } }, + { + "in": "query", + "name": "instance", + "description": "Name or ID of the instance", + "schema": { + "$ref": "#/components/schemas/NameOrId" + } + }, { "in": "query", "name": "project", - "description": "Name or ID of the project", + "description": "Name or ID of the project, only required if `instance` is provided as a `Name`", "schema": { "$ref": "#/components/schemas/NameOrId" } } ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/InstanceNetworkInterfaceUpdate" + } + } + }, + "required": true + }, "responses": { - "204": { - "description": "successful deletion" + "200": { + "description": "successful operation", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/InstanceNetworkInterface" + } + } + } }, "4XX": { "$ref": "#/components/responses/Error" @@ -6138,51 +5977,69 @@ "$ref": "#/components/responses/Error" } } - } - }, - "/v1/system/hardware/disks": { - "get": { + }, + "delete": { "tags": [ - "system/hardware" + "instances" ], - "summary": "List physical disks", - "operationId": "physical_disk_list", + "summary": "Delete network interface", + "description": "Note that the primary interface for an instance cannot be deleted if there are any secondary interfaces. A new primary interface must be designated first. The primary interface can be deleted if there are no secondary interfaces.", + "operationId": "instance_network_interface_delete", "parameters": [ { - "in": "query", - "name": "limit", - "description": "Maximum number of items returned by a single call", + "in": "path", + "name": "interface", + "description": "Name or ID of the network interface", + "required": true, "schema": { - "nullable": true, - "type": "integer", - "format": "uint32", - "minimum": 1 + "$ref": "#/components/schemas/NameOrId" } }, { "in": "query", - "name": "page_token", - "description": "Token returned by previous call to retrieve the subsequent page", + "name": "instance", + "description": "Name or ID of the instance", "schema": { - "nullable": true, - "type": "string" + "$ref": "#/components/schemas/NameOrId" } }, { "in": "query", - "name": "sort_by", + "name": "project", + "description": "Name or ID of the project, only required if `instance` is provided as a `Name`", "schema": { - "$ref": "#/components/schemas/IdSortMode" + "$ref": "#/components/schemas/NameOrId" } } ], + "responses": { + "204": { + "description": "successful deletion" + }, + "4XX": { + "$ref": "#/components/responses/Error" + }, + "5XX": { + "$ref": "#/components/responses/Error" + } + } + } + }, + "/v1/ping": { + "get": { + "tags": [ + "system/status" + ], + "summary": "Ping API", + "description": "Always responds with Ok if it responds at all.", + "operationId": "ping", "responses": { "200": { "description": "successful operation", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/PhysicalDiskResultsPage" + "$ref": "#/components/schemas/Ping" } } } @@ -6193,38 +6050,58 @@ "5XX": { "$ref": "#/components/responses/Error" } - }, - "x-dropshot-pagination": { - "required": [] } } }, - "/v1/system/hardware/disks/{disk_id}": { + "/v1/policy": { "get": { "tags": [ - "system/hardware" + "silos" ], - "summary": "Get a physical disk", - "operationId": "physical_disk_view", - "parameters": [ - { - "in": "path", - "name": "disk_id", - "description": "ID of the physical disk", - "required": true, - "schema": { - "type": "string", - "format": "uuid" + "summary": "Fetch current silo's IAM policy", + "operationId": "policy_view", + "responses": { + "200": { + "description": "successful operation", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/SiloRolePolicy" + } + } } + }, + "4XX": { + "$ref": "#/components/responses/Error" + }, + "5XX": { + "$ref": "#/components/responses/Error" } + } + }, + "put": { + "tags": [ + "silos" ], + "summary": "Update current silo's IAM policy", + "operationId": "policy_update", + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/SiloRolePolicy" + } + } + }, + "required": true + }, "responses": { "200": { "description": "successful operation", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/PhysicalDisk" + "$ref": "#/components/schemas/SiloRolePolicy" } } } @@ -6238,42 +6115,14 @@ } } }, - "/v1/system/hardware/rack-switch-port/{rack_id}/{switch_location}/{port}/lldp/neighbors": { + "/v1/projects": { "get": { "tags": [ - "system/networking" + "projects" ], - "summary": "Fetch the LLDP neighbors seen on a switch port", - "operationId": "networking_switch_port_lldp_neighbors", + "summary": "List projects", + "operationId": "project_list", "parameters": [ - { - "in": "path", - "name": "port", - "description": "A name to use when selecting switch ports.", - "required": true, - "schema": { - "$ref": "#/components/schemas/Name" - } - }, - { - "in": "path", - "name": "rack_id", - "description": "A rack id to use when selecting switch ports.", - "required": true, - "schema": { - "type": "string", - "format": "uuid" - } - }, - { - "in": "path", - "name": "switch_location", - "description": "A switch location to use when selecting switch ports.", - "required": true, - "schema": { - "$ref": "#/components/schemas/Name" - } - }, { "in": "query", "name": "limit", @@ -6298,7 +6147,7 @@ "in": "query", "name": "sort_by", "schema": { - "$ref": "#/components/schemas/IdSortMode" + "$ref": "#/components/schemas/NameOrIdSortMode" } } ], @@ -6308,7 +6157,7 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/LldpNeighborResultsPage" + "$ref": "#/components/schemas/ProjectResultsPage" } } } @@ -6323,51 +6172,30 @@ "x-dropshot-pagination": { "required": [] } - } - }, - "/v1/system/hardware/racks": { - "get": { + }, + "post": { "tags": [ - "system/hardware" + "projects" ], - "summary": "List racks", - "operationId": "rack_list", - "parameters": [ - { - "in": "query", - "name": "limit", - "description": "Maximum number of items returned by a single call", - "schema": { - "nullable": true, - "type": "integer", - "format": "uint32", - "minimum": 1 - } - }, - { - "in": "query", - "name": "page_token", - "description": "Token returned by previous call to retrieve the subsequent page", - "schema": { - "nullable": true, - "type": "string" + "summary": "Create project", + "operationId": "project_create", + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ProjectCreate" + } } }, - { - "in": "query", - "name": "sort_by", - "schema": { - "$ref": "#/components/schemas/IdSortMode" - } - } - ], + "required": true + }, "responses": { - "200": { - "description": "successful operation", + "201": { + "description": "successful creation", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/RackResultsPage" + "$ref": "#/components/schemas/Project" } } } @@ -6378,28 +6206,24 @@ "5XX": { "$ref": "#/components/responses/Error" } - }, - "x-dropshot-pagination": { - "required": [] } } }, - "/v1/system/hardware/racks/{rack_id}": { + "/v1/projects/{project}": { "get": { "tags": [ - "system/hardware" + "projects" ], - "summary": "Fetch rack", - "operationId": "rack_view", + "summary": "Fetch project", + "operationId": "project_view", "parameters": [ { "in": "path", - "name": "rack_id", - "description": "ID of the rack", + "name": "project", + "description": "Name or ID of the project", "required": true, "schema": { - "type": "string", - "format": "uuid" + "$ref": "#/components/schemas/NameOrId" } } ], @@ -6409,7 +6233,7 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/Rack" + "$ref": "#/components/schemas/Project" } } } @@ -6421,51 +6245,41 @@ "$ref": "#/components/responses/Error" } } - } - }, - "/v1/system/hardware/sleds": { - "get": { + }, + "put": { "tags": [ - "system/hardware" + "projects" ], - "summary": "List sleds", - "operationId": "sled_list", + "summary": "Update a project", + "operationId": "project_update", "parameters": [ { - "in": "query", - "name": "limit", - "description": "Maximum number of items returned by a single call", - "schema": { - "nullable": true, - "type": "integer", - "format": "uint32", - "minimum": 1 - } - }, - { - "in": "query", - "name": "page_token", - "description": "Token returned by previous call to retrieve the subsequent page", - "schema": { - "nullable": true, - "type": "string" - } - }, - { - "in": "query", - "name": "sort_by", + "in": "path", + "name": "project", + "description": "Name or ID of the project", + "required": true, "schema": { - "$ref": "#/components/schemas/IdSortMode" + "$ref": "#/components/schemas/NameOrId" } } ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ProjectUpdate" + } + } + }, + "required": true + }, "responses": { "200": { "description": "successful operation", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/SledResultsPage" + "$ref": "#/components/schemas/Project" } } } @@ -6476,34 +6290,63 @@ "5XX": { "$ref": "#/components/responses/Error" } - }, - "x-dropshot-pagination": { - "required": [] } }, - "post": { + "delete": { "tags": [ - "system/hardware" + "projects" ], - "summary": "Add sled to initialized rack", - "operationId": "sled_add", - "requestBody": { - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/UninitializedSledId" - } + "summary": "Delete project", + "operationId": "project_delete", + "parameters": [ + { + "in": "path", + "name": "project", + "description": "Name or ID of the project", + "required": true, + "schema": { + "$ref": "#/components/schemas/NameOrId" } + } + ], + "responses": { + "204": { + "description": "successful deletion" }, - "required": true - }, + "4XX": { + "$ref": "#/components/responses/Error" + }, + "5XX": { + "$ref": "#/components/responses/Error" + } + } + } + }, + "/v1/projects/{project}/policy": { + "get": { + "tags": [ + "projects" + ], + "summary": "Fetch project's IAM policy", + "operationId": "project_policy_view", + "parameters": [ + { + "in": "path", + "name": "project", + "description": "Name or ID of the project", + "required": true, + "schema": { + "$ref": "#/components/schemas/NameOrId" + } + } + ], "responses": { - "201": { - "description": "successful creation", + "200": { + "description": "successful operation", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/SledId" + "$ref": "#/components/schemas/ProjectRolePolicy" } } } @@ -6515,34 +6358,41 @@ "$ref": "#/components/responses/Error" } } - } - }, - "/v1/system/hardware/sleds/{sled_id}": { - "get": { + }, + "put": { "tags": [ - "system/hardware" + "projects" ], - "summary": "Fetch sled", - "operationId": "sled_view", + "summary": "Update project's IAM policy", + "operationId": "project_policy_update", "parameters": [ { "in": "path", - "name": "sled_id", - "description": "ID of the sled", + "name": "project", + "description": "Name or ID of the project", "required": true, "schema": { - "type": "string", - "format": "uuid" + "$ref": "#/components/schemas/NameOrId" } } ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ProjectRolePolicy" + } + } + }, + "required": true + }, "responses": { "200": { "description": "successful operation", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/Sled" + "$ref": "#/components/schemas/ProjectRolePolicy" } } } @@ -6556,24 +6406,14 @@ } } }, - "/v1/system/hardware/sleds/{sled_id}/disks": { + "/v1/snapshots": { "get": { "tags": [ - "system/hardware" + "snapshots" ], - "summary": "List physical disks attached to sleds", - "operationId": "sled_physical_disk_list", + "summary": "List snapshots", + "operationId": "snapshot_list", "parameters": [ - { - "in": "path", - "name": "sled_id", - "description": "ID of the sled", - "required": true, - "schema": { - "type": "string", - "format": "uuid" - } - }, { "in": "query", "name": "limit", @@ -6594,11 +6434,19 @@ "type": "string" } }, + { + "in": "query", + "name": "project", + "description": "Name or ID of the project", + "schema": { + "$ref": "#/components/schemas/NameOrId" + } + }, { "in": "query", "name": "sort_by", "schema": { - "$ref": "#/components/schemas/IdSortMode" + "$ref": "#/components/schemas/NameOrIdSortMode" } } ], @@ -6608,7 +6456,7 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/PhysicalDiskResultsPage" + "$ref": "#/components/schemas/SnapshotResultsPage" } } } @@ -6621,28 +6469,150 @@ } }, "x-dropshot-pagination": { - "required": [] + "required": [ + "project" + ] } - } - }, - "/v1/system/hardware/sleds/{sled_id}/instances": { - "get": { + }, + "post": { "tags": [ - "system/hardware" + "snapshots" ], - "summary": "List instances running on given sled", - "operationId": "sled_instance_list", + "summary": "Create snapshot", + "description": "Creates a point-in-time snapshot from a disk.", + "operationId": "snapshot_create", + "parameters": [ + { + "in": "query", + "name": "project", + "description": "Name or ID of the project", + "required": true, + "schema": { + "$ref": "#/components/schemas/NameOrId" + } + } + ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/SnapshotCreate" + } + } + }, + "required": true + }, + "responses": { + "201": { + "description": "successful creation", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/Snapshot" + } + } + } + }, + "4XX": { + "$ref": "#/components/responses/Error" + }, + "5XX": { + "$ref": "#/components/responses/Error" + } + } + } + }, + "/v1/snapshots/{snapshot}": { + "get": { + "tags": [ + "snapshots" + ], + "summary": "Fetch snapshot", + "operationId": "snapshot_view", "parameters": [ { "in": "path", - "name": "sled_id", - "description": "ID of the sled", + "name": "snapshot", + "description": "Name or ID of the snapshot", "required": true, "schema": { - "type": "string", - "format": "uuid" + "$ref": "#/components/schemas/NameOrId" + } + }, + { + "in": "query", + "name": "project", + "description": "Name or ID of the project", + "schema": { + "$ref": "#/components/schemas/NameOrId" + } + } + ], + "responses": { + "200": { + "description": "successful operation", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/Snapshot" + } + } + } + }, + "4XX": { + "$ref": "#/components/responses/Error" + }, + "5XX": { + "$ref": "#/components/responses/Error" + } + } + }, + "delete": { + "tags": [ + "snapshots" + ], + "summary": "Delete snapshot", + "operationId": "snapshot_delete", + "parameters": [ + { + "in": "path", + "name": "snapshot", + "description": "Name or ID of the snapshot", + "required": true, + "schema": { + "$ref": "#/components/schemas/NameOrId" + } + }, + { + "in": "query", + "name": "project", + "description": "Name or ID of the project", + "schema": { + "$ref": "#/components/schemas/NameOrId" } + } + ], + "responses": { + "204": { + "description": "successful deletion" }, + "4XX": { + "$ref": "#/components/responses/Error" + }, + "5XX": { + "$ref": "#/components/responses/Error" + } + } + } + }, + "/v1/system/hardware/disks": { + "get": { + "tags": [ + "system/hardware" + ], + "summary": "List physical disks", + "operationId": "physical_disk_list", + "parameters": [ { "in": "query", "name": "limit", @@ -6677,7 +6647,7 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/SledInstanceResultsPage" + "$ref": "#/components/schemas/PhysicalDiskResultsPage" } } } @@ -6694,18 +6664,18 @@ } } }, - "/v1/system/hardware/sleds/{sled_id}/provision-policy": { - "put": { + "/v1/system/hardware/disks/{disk_id}": { + "get": { "tags": [ "system/hardware" ], - "summary": "Set sled provision policy", - "operationId": "sled_set_provision_policy", + "summary": "Get a physical disk", + "operationId": "physical_disk_view", "parameters": [ { "in": "path", - "name": "sled_id", - "description": "ID of the sled", + "name": "disk_id", + "description": "ID of the physical disk", "required": true, "schema": { "type": "string", @@ -6713,23 +6683,13 @@ } } ], - "requestBody": { - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/SledProvisionPolicyParams" - } - } - }, - "required": true - }, "responses": { "200": { "description": "successful operation", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/SledProvisionPolicyResponse" + "$ref": "#/components/schemas/PhysicalDisk" } } } @@ -6743,14 +6703,42 @@ } } }, - "/v1/system/hardware/sleds-uninitialized": { + "/v1/system/hardware/rack-switch-port/{rack_id}/{switch_location}/{port}/lldp/neighbors": { "get": { "tags": [ - "system/hardware" + "system/networking" ], - "summary": "List uninitialized sleds", - "operationId": "sled_list_uninitialized", + "summary": "Fetch the LLDP neighbors seen on a switch port", + "operationId": "networking_switch_port_lldp_neighbors", "parameters": [ + { + "in": "path", + "name": "port", + "description": "A name to use when selecting switch ports.", + "required": true, + "schema": { + "$ref": "#/components/schemas/Name" + } + }, + { + "in": "path", + "name": "rack_id", + "description": "A rack id to use when selecting switch ports.", + "required": true, + "schema": { + "type": "string", + "format": "uuid" + } + }, + { + "in": "path", + "name": "switch_location", + "description": "A switch location to use when selecting switch ports.", + "required": true, + "schema": { + "$ref": "#/components/schemas/Name" + } + }, { "in": "query", "name": "limit", @@ -6770,6 +6758,13 @@ "nullable": true, "type": "string" } + }, + { + "in": "query", + "name": "sort_by", + "schema": { + "$ref": "#/components/schemas/IdSortMode" + } } ], "responses": { @@ -6778,7 +6773,7 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/UninitializedSledResultsPage" + "$ref": "#/components/schemas/LldpNeighborResultsPage" } } } @@ -6795,13 +6790,13 @@ } } }, - "/v1/system/hardware/switch-port": { + "/v1/system/hardware/racks": { "get": { "tags": [ "system/hardware" ], - "summary": "List switch ports", - "operationId": "networking_switch_port_list", + "summary": "List racks", + "operationId": "rack_list", "parameters": [ { "in": "query", @@ -6829,16 +6824,6 @@ "schema": { "$ref": "#/components/schemas/IdSortMode" } - }, - { - "in": "query", - "name": "switch_port_id", - "description": "An optional switch port id to use when listing switch ports.", - "schema": { - "nullable": true, - "type": "string", - "format": "uuid" - } } ], "responses": { @@ -6847,7 +6832,7 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/SwitchPortResultsPage" + "$ref": "#/components/schemas/RackResultsPage" } } } @@ -6864,41 +6849,23 @@ } } }, - "/v1/system/hardware/switch-port/{port}/lldp/config": { + "/v1/system/hardware/racks/{rack_id}": { "get": { "tags": [ - "system/networking" + "system/hardware" ], - "summary": "Fetch the LLDP configuration for a switch port", - "operationId": "networking_switch_port_lldp_config_view", + "summary": "Fetch rack", + "operationId": "rack_view", "parameters": [ { "in": "path", - "name": "port", - "description": "A name to use when selecting switch ports.", - "required": true, - "schema": { - "$ref": "#/components/schemas/Name" - } - }, - { - "in": "query", "name": "rack_id", - "description": "A rack id to use when selecting switch ports.", + "description": "ID of the rack", "required": true, "schema": { "type": "string", "format": "uuid" } - }, - { - "in": "query", - "name": "switch_location", - "description": "A switch location to use when selecting switch ports.", - "required": true, - "schema": { - "$ref": "#/components/schemas/Name" - } } ], "responses": { @@ -6907,7 +6874,7 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/LldpLinkConfig" + "$ref": "#/components/schemas/Rack" } } } @@ -6919,116 +6886,92 @@ "$ref": "#/components/responses/Error" } } - }, - "post": { - "tags": [ - "system/networking" + } + }, + "/v1/system/hardware/sleds": { + "get": { + "tags": [ + "system/hardware" ], - "summary": "Update the LLDP configuration for a switch port", - "operationId": "networking_switch_port_lldp_config_update", + "summary": "List sleds", + "operationId": "sled_list", "parameters": [ { - "in": "path", - "name": "port", - "description": "A name to use when selecting switch ports.", - "required": true, + "in": "query", + "name": "limit", + "description": "Maximum number of items returned by a single call", "schema": { - "$ref": "#/components/schemas/Name" + "nullable": true, + "type": "integer", + "format": "uint32", + "minimum": 1 } }, { "in": "query", - "name": "rack_id", - "description": "A rack id to use when selecting switch ports.", - "required": true, + "name": "page_token", + "description": "Token returned by previous call to retrieve the subsequent page", "schema": { - "type": "string", - "format": "uuid" + "nullable": true, + "type": "string" } }, { "in": "query", - "name": "switch_location", - "description": "A switch location to use when selecting switch ports.", - "required": true, + "name": "sort_by", "schema": { - "$ref": "#/components/schemas/Name" + "$ref": "#/components/schemas/IdSortMode" } } ], - "requestBody": { - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/LldpLinkConfig" + "responses": { + "200": { + "description": "successful operation", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/SledResultsPage" + } } } }, - "required": true - }, - "responses": { - "204": { - "description": "resource updated" - }, "4XX": { "$ref": "#/components/responses/Error" }, "5XX": { "$ref": "#/components/responses/Error" } + }, + "x-dropshot-pagination": { + "required": [] } - } - }, - "/v1/system/hardware/switch-port/{port}/settings": { + }, "post": { "tags": [ "system/hardware" ], - "summary": "Apply switch port settings", - "operationId": "networking_switch_port_apply_settings", - "parameters": [ - { - "in": "path", - "name": "port", - "description": "A name to use when selecting switch ports.", - "required": true, - "schema": { - "$ref": "#/components/schemas/Name" - } - }, - { - "in": "query", - "name": "rack_id", - "description": "A rack id to use when selecting switch ports.", - "required": true, - "schema": { - "type": "string", - "format": "uuid" - } - }, - { - "in": "query", - "name": "switch_location", - "description": "A switch location to use when selecting switch ports.", - "required": true, - "schema": { - "$ref": "#/components/schemas/Name" - } - } - ], + "summary": "Add sled to initialized rack", + "operationId": "sled_add", "requestBody": { "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/SwitchPortApplySettings" + "$ref": "#/components/schemas/UninitializedSledId" } } }, "required": true }, "responses": { - "204": { - "description": "resource updated" + "201": { + "description": "successful creation", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/SledId" + } + } + } }, "4XX": { "$ref": "#/components/responses/Error" @@ -7037,46 +6980,37 @@ "$ref": "#/components/responses/Error" } } - }, - "delete": { + } + }, + "/v1/system/hardware/sleds/{sled_id}": { + "get": { "tags": [ "system/hardware" ], - "summary": "Clear switch port settings", - "operationId": "networking_switch_port_clear_settings", + "summary": "Fetch sled", + "operationId": "sled_view", "parameters": [ { "in": "path", - "name": "port", - "description": "A name to use when selecting switch ports.", - "required": true, - "schema": { - "$ref": "#/components/schemas/Name" - } - }, - { - "in": "query", - "name": "rack_id", - "description": "A rack id to use when selecting switch ports.", + "name": "sled_id", + "description": "ID of the sled", "required": true, "schema": { "type": "string", "format": "uuid" } - }, - { - "in": "query", - "name": "switch_location", - "description": "A switch location to use when selecting switch ports.", - "required": true, - "schema": { - "$ref": "#/components/schemas/Name" - } } ], "responses": { - "204": { - "description": "resource updated" + "200": { + "description": "successful operation", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/Sled" + } + } + } }, "4XX": { "$ref": "#/components/responses/Error" @@ -7087,40 +7021,49 @@ } } }, - "/v1/system/hardware/switch-port/{port}/status": { + "/v1/system/hardware/sleds/{sled_id}/disks": { "get": { "tags": [ "system/hardware" ], - "summary": "Get switch port status", - "operationId": "networking_switch_port_status", + "summary": "List physical disks attached to sleds", + "operationId": "sled_physical_disk_list", "parameters": [ { "in": "path", - "name": "port", - "description": "A name to use when selecting switch ports.", + "name": "sled_id", + "description": "ID of the sled", "required": true, "schema": { - "$ref": "#/components/schemas/Name" + "type": "string", + "format": "uuid" } }, { "in": "query", - "name": "rack_id", - "description": "A rack id to use when selecting switch ports.", - "required": true, + "name": "limit", + "description": "Maximum number of items returned by a single call", "schema": { - "type": "string", - "format": "uuid" + "nullable": true, + "type": "integer", + "format": "uint32", + "minimum": 1 } }, { "in": "query", - "name": "switch_location", - "description": "A switch location to use when selecting switch ports.", - "required": true, + "name": "page_token", + "description": "Token returned by previous call to retrieve the subsequent page", "schema": { - "$ref": "#/components/schemas/Name" + "nullable": true, + "type": "string" + } + }, + { + "in": "query", + "name": "sort_by", + "schema": { + "$ref": "#/components/schemas/IdSortMode" } } ], @@ -7130,7 +7073,7 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/SwitchLinkState" + "$ref": "#/components/schemas/PhysicalDiskResultsPage" } } } @@ -7141,17 +7084,30 @@ "5XX": { "$ref": "#/components/responses/Error" } + }, + "x-dropshot-pagination": { + "required": [] } } }, - "/v1/system/hardware/switches": { + "/v1/system/hardware/sleds/{sled_id}/instances": { "get": { "tags": [ "system/hardware" ], - "summary": "List switches", - "operationId": "switch_list", + "summary": "List instances running on given sled", + "operationId": "sled_instance_list", "parameters": [ + { + "in": "path", + "name": "sled_id", + "description": "ID of the sled", + "required": true, + "schema": { + "type": "string", + "format": "uuid" + } + }, { "in": "query", "name": "limit", @@ -7186,7 +7142,7 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/SwitchResultsPage" + "$ref": "#/components/schemas/SledInstanceResultsPage" } } } @@ -7203,18 +7159,18 @@ } } }, - "/v1/system/hardware/switches/{switch_id}": { - "get": { + "/v1/system/hardware/sleds/{sled_id}/provision-policy": { + "put": { "tags": [ "system/hardware" ], - "summary": "Fetch switch", - "operationId": "switch_view", + "summary": "Set sled provision policy", + "operationId": "sled_set_provision_policy", "parameters": [ { "in": "path", - "name": "switch_id", - "description": "ID of the switch", + "name": "sled_id", + "description": "ID of the sled", "required": true, "schema": { "type": "string", @@ -7222,13 +7178,23 @@ } } ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/SledProvisionPolicyParams" + } + } + }, + "required": true + }, "responses": { "200": { "description": "successful operation", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/Switch" + "$ref": "#/components/schemas/SledProvisionPolicyResponse" } } } @@ -7242,14 +7208,13 @@ } } }, - "/v1/system/identity-providers": { + "/v1/system/hardware/sleds-uninitialized": { "get": { "tags": [ - "system/silos" + "system/hardware" ], - "summary": "List identity providers for silo", - "description": "List identity providers for silo by silo name or ID.", - "operationId": "silo_identity_provider_list", + "summary": "List uninitialized sleds", + "operationId": "sled_list_uninitialized", "parameters": [ { "in": "query", @@ -7270,21 +7235,6 @@ "nullable": true, "type": "string" } - }, - { - "in": "query", - "name": "silo", - "description": "Name or ID of the silo", - "schema": { - "$ref": "#/components/schemas/NameOrId" - } - }, - { - "in": "query", - "name": "sort_by", - "schema": { - "$ref": "#/components/schemas/NameOrIdSortMode" - } } ], "responses": { @@ -7293,7 +7243,7 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/IdentityProviderResultsPage" + "$ref": "#/components/schemas/UninitializedSledResultsPage" } } } @@ -7306,48 +7256,63 @@ } }, "x-dropshot-pagination": { - "required": [ - "silo" - ] + "required": [] } } }, - "/v1/system/identity-providers/local/users": { - "post": { + "/v1/system/hardware/switch-port": { + "get": { "tags": [ - "system/silos" + "system/hardware" ], - "summary": "Create user", - "description": "Users can only be created in Silos with `provision_type` == `Fixed`. Otherwise, Silo users are just-in-time (JIT) provisioned when a user first logs in using an external Identity Provider.", - "operationId": "local_idp_user_create", + "summary": "List switch ports", + "operationId": "networking_switch_port_list", "parameters": [ { "in": "query", - "name": "silo", - "description": "Name or ID of the silo", - "required": true, + "name": "limit", + "description": "Maximum number of items returned by a single call", "schema": { - "$ref": "#/components/schemas/NameOrId" + "nullable": true, + "type": "integer", + "format": "uint32", + "minimum": 1 } - } - ], - "requestBody": { - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/UserCreate" - } + }, + { + "in": "query", + "name": "page_token", + "description": "Token returned by previous call to retrieve the subsequent page", + "schema": { + "nullable": true, + "type": "string" } }, - "required": true - }, + { + "in": "query", + "name": "sort_by", + "schema": { + "$ref": "#/components/schemas/IdSortMode" + } + }, + { + "in": "query", + "name": "switch_port_id", + "description": "An optional switch port id to use when listing switch ports.", + "schema": { + "nullable": true, + "type": "string", + "format": "uuid" + } + } + ], "responses": { - "201": { - "description": "successful creation", + "200": { + "description": "successful operation", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/User" + "$ref": "#/components/schemas/SwitchPortResultsPage" } } } @@ -7358,21 +7323,33 @@ "5XX": { "$ref": "#/components/responses/Error" } + }, + "x-dropshot-pagination": { + "required": [] } } }, - "/v1/system/identity-providers/local/users/{user_id}": { - "delete": { + "/v1/system/hardware/switch-port/{port}/lldp/config": { + "get": { "tags": [ - "system/silos" + "system/networking" ], - "summary": "Delete user", - "operationId": "local_idp_user_delete", + "summary": "Fetch the LLDP configuration for a switch port", + "operationId": "networking_switch_port_lldp_config_view", "parameters": [ { "in": "path", - "name": "user_id", - "description": "The user's internal ID", + "name": "port", + "description": "A name to use when selecting switch ports.", + "required": true, + "schema": { + "$ref": "#/components/schemas/Name" + } + }, + { + "in": "query", + "name": "rack_id", + "description": "A rack id to use when selecting switch ports.", "required": true, "schema": { "type": "string", @@ -7381,17 +7358,24 @@ }, { "in": "query", - "name": "silo", - "description": "Name or ID of the silo", + "name": "switch_location", + "description": "A switch location to use when selecting switch ports.", "required": true, "schema": { - "$ref": "#/components/schemas/NameOrId" + "$ref": "#/components/schemas/Name" } } ], "responses": { - "204": { - "description": "successful deletion" + "200": { + "description": "successful operation", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/LldpLinkConfig" + } + } + } }, "4XX": { "$ref": "#/components/responses/Error" @@ -7400,21 +7384,27 @@ "$ref": "#/components/responses/Error" } } - } - }, - "/v1/system/identity-providers/local/users/{user_id}/set-password": { + }, "post": { "tags": [ - "system/silos" + "system/networking" ], - "summary": "Set or invalidate user's password", - "description": "Passwords can only be updated for users in Silos with identity mode `LocalOnly`.", - "operationId": "local_idp_user_set_password", + "summary": "Update the LLDP configuration for a switch port", + "operationId": "networking_switch_port_lldp_config_update", "parameters": [ { "in": "path", - "name": "user_id", - "description": "The user's internal ID", + "name": "port", + "description": "A name to use when selecting switch ports.", + "required": true, + "schema": { + "$ref": "#/components/schemas/Name" + } + }, + { + "in": "query", + "name": "rack_id", + "description": "A rack id to use when selecting switch ports.", "required": true, "schema": { "type": "string", @@ -7423,11 +7413,11 @@ }, { "in": "query", - "name": "silo", - "description": "Name or ID of the silo", + "name": "switch_location", + "description": "A switch location to use when selecting switch ports.", "required": true, "schema": { - "$ref": "#/components/schemas/NameOrId" + "$ref": "#/components/schemas/Name" } } ], @@ -7435,7 +7425,7 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/UserPassword" + "$ref": "#/components/schemas/LldpLinkConfig" } } }, @@ -7454,21 +7444,40 @@ } } }, - "/v1/system/identity-providers/saml": { + "/v1/system/hardware/switch-port/{port}/settings": { "post": { "tags": [ - "system/silos" + "system/hardware" ], - "summary": "Create SAML identity provider", - "operationId": "saml_identity_provider_create", + "summary": "Apply switch port settings", + "operationId": "networking_switch_port_apply_settings", "parameters": [ + { + "in": "path", + "name": "port", + "description": "A name to use when selecting switch ports.", + "required": true, + "schema": { + "$ref": "#/components/schemas/Name" + } + }, { "in": "query", - "name": "silo", - "description": "Name or ID of the silo", + "name": "rack_id", + "description": "A rack id to use when selecting switch ports.", "required": true, "schema": { - "$ref": "#/components/schemas/NameOrId" + "type": "string", + "format": "uuid" + } + }, + { + "in": "query", + "name": "switch_location", + "description": "A switch location to use when selecting switch ports.", + "required": true, + "schema": { + "$ref": "#/components/schemas/Name" } } ], @@ -7476,22 +7485,15 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/SamlIdentityProviderCreate" + "$ref": "#/components/schemas/SwitchPortApplySettings" } } }, "required": true }, "responses": { - "201": { - "description": "successful creation", - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/SamlIdentityProvider" - } - } - } + "204": { + "description": "resource updated" }, "4XX": { "$ref": "#/components/responses/Error" @@ -7500,44 +7502,46 @@ "$ref": "#/components/responses/Error" } } - } - }, - "/v1/system/identity-providers/saml/{provider}": { - "get": { + }, + "delete": { "tags": [ - "system/silos" + "system/hardware" ], - "summary": "Fetch SAML identity provider", - "operationId": "saml_identity_provider_view", + "summary": "Clear switch port settings", + "operationId": "networking_switch_port_clear_settings", "parameters": [ { "in": "path", - "name": "provider", - "description": "Name or ID of the SAML identity provider", + "name": "port", + "description": "A name to use when selecting switch ports.", "required": true, "schema": { - "$ref": "#/components/schemas/NameOrId" + "$ref": "#/components/schemas/Name" } }, { "in": "query", - "name": "silo", - "description": "Name or ID of the silo", + "name": "rack_id", + "description": "A rack id to use when selecting switch ports.", + "required": true, "schema": { - "$ref": "#/components/schemas/NameOrId" + "type": "string", + "format": "uuid" + } + }, + { + "in": "query", + "name": "switch_location", + "description": "A switch location to use when selecting switch ports.", + "required": true, + "schema": { + "$ref": "#/components/schemas/Name" } } ], "responses": { - "200": { - "description": "successful operation", - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/SamlIdentityProvider" - } - } - } + "204": { + "description": "resource updated" }, "4XX": { "$ref": "#/components/responses/Error" @@ -7548,39 +7552,40 @@ } } }, - "/v1/system/ip-pools": { + "/v1/system/hardware/switch-port/{port}/status": { "get": { "tags": [ - "system/ip-pools" + "system/hardware" ], - "summary": "List IP pools", - "operationId": "ip_pool_list", + "summary": "Get switch port status", + "operationId": "networking_switch_port_status", "parameters": [ { - "in": "query", - "name": "limit", - "description": "Maximum number of items returned by a single call", - "schema": { - "nullable": true, - "type": "integer", - "format": "uint32", - "minimum": 1 + "in": "path", + "name": "port", + "description": "A name to use when selecting switch ports.", + "required": true, + "schema": { + "$ref": "#/components/schemas/Name" } }, { "in": "query", - "name": "page_token", - "description": "Token returned by previous call to retrieve the subsequent page", + "name": "rack_id", + "description": "A rack id to use when selecting switch ports.", + "required": true, "schema": { - "nullable": true, - "type": "string" + "type": "string", + "format": "uuid" } }, { "in": "query", - "name": "sort_by", + "name": "switch_location", + "description": "A switch location to use when selecting switch ports.", + "required": true, "schema": { - "$ref": "#/components/schemas/NameOrIdSortMode" + "$ref": "#/components/schemas/Name" } } ], @@ -7590,45 +7595,7 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/IpPoolResultsPage" - } - } - } - }, - "4XX": { - "$ref": "#/components/responses/Error" - }, - "5XX": { - "$ref": "#/components/responses/Error" - } - }, - "x-dropshot-pagination": { - "required": [] - } - }, - "post": { - "tags": [ - "system/ip-pools" - ], - "summary": "Create IP pool", - "operationId": "ip_pool_create", - "requestBody": { - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/IpPoolCreate" - } - } - }, - "required": true - }, - "responses": { - "201": { - "description": "successful creation", - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/IpPool" + "$ref": "#/components/schemas/SwitchLinkState" } } } @@ -7642,21 +7609,39 @@ } } }, - "/v1/system/ip-pools/{pool}": { + "/v1/system/hardware/switches": { "get": { "tags": [ - "system/ip-pools" + "system/hardware" ], - "summary": "Fetch IP pool", - "operationId": "ip_pool_view", + "summary": "List switches", + "operationId": "switch_list", "parameters": [ { - "in": "path", - "name": "pool", - "description": "Name or ID of the IP pool", - "required": true, + "in": "query", + "name": "limit", + "description": "Maximum number of items returned by a single call", "schema": { - "$ref": "#/components/schemas/NameOrId" + "nullable": true, + "type": "integer", + "format": "uint32", + "minimum": 1 + } + }, + { + "in": "query", + "name": "page_token", + "description": "Token returned by previous call to retrieve the subsequent page", + "schema": { + "nullable": true, + "type": "string" + } + }, + { + "in": "query", + "name": "sort_by", + "schema": { + "$ref": "#/components/schemas/IdSortMode" } } ], @@ -7666,7 +7651,7 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/IpPool" + "$ref": "#/components/schemas/SwitchResultsPage" } } } @@ -7677,42 +7662,38 @@ "5XX": { "$ref": "#/components/responses/Error" } + }, + "x-dropshot-pagination": { + "required": [] } - }, - "put": { + } + }, + "/v1/system/hardware/switches/{switch_id}": { + "get": { "tags": [ - "system/ip-pools" + "system/hardware" ], - "summary": "Update IP pool", - "operationId": "ip_pool_update", + "summary": "Fetch switch", + "operationId": "switch_view", "parameters": [ { "in": "path", - "name": "pool", - "description": "Name or ID of the IP pool", + "name": "switch_id", + "description": "ID of the switch", "required": true, "schema": { - "$ref": "#/components/schemas/NameOrId" + "type": "string", + "format": "uuid" } } ], - "requestBody": { - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/IpPoolUpdate" - } - } - }, - "required": true - }, "responses": { "200": { "description": "successful operation", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/IpPool" + "$ref": "#/components/schemas/Switch" } } } @@ -7724,55 +7705,17 @@ "$ref": "#/components/responses/Error" } } - }, - "delete": { - "tags": [ - "system/ip-pools" - ], - "summary": "Delete IP pool", - "operationId": "ip_pool_delete", - "parameters": [ - { - "in": "path", - "name": "pool", - "description": "Name or ID of the IP pool", - "required": true, - "schema": { - "$ref": "#/components/schemas/NameOrId" - } - } - ], - "responses": { - "204": { - "description": "successful deletion" - }, - "4XX": { - "$ref": "#/components/responses/Error" - }, - "5XX": { - "$ref": "#/components/responses/Error" - } - } } }, - "/v1/system/ip-pools/{pool}/ranges": { + "/v1/system/identity-providers": { "get": { "tags": [ - "system/ip-pools" + "system/silos" ], - "summary": "List ranges for IP pool", - "description": "Ranges are ordered by their first address.", - "operationId": "ip_pool_range_list", + "summary": "List identity providers for silo", + "description": "List identity providers for silo by silo name or ID.", + "operationId": "silo_identity_provider_list", "parameters": [ - { - "in": "path", - "name": "pool", - "description": "Name or ID of the IP pool", - "required": true, - "schema": { - "$ref": "#/components/schemas/NameOrId" - } - }, { "in": "query", "name": "limit", @@ -7792,6 +7735,21 @@ "nullable": true, "type": "string" } + }, + { + "in": "query", + "name": "silo", + "description": "Name or ID of the silo", + "schema": { + "$ref": "#/components/schemas/NameOrId" + } + }, + { + "in": "query", + "name": "sort_by", + "schema": { + "$ref": "#/components/schemas/NameOrIdSortMode" + } } ], "responses": { @@ -7800,7 +7758,7 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/IpPoolRangeResultsPage" + "$ref": "#/components/schemas/IdentityProviderResultsPage" } } } @@ -7813,23 +7771,25 @@ } }, "x-dropshot-pagination": { - "required": [] + "required": [ + "silo" + ] } } }, - "/v1/system/ip-pools/{pool}/ranges/add": { + "/v1/system/identity-providers/local/users": { "post": { "tags": [ - "system/ip-pools" + "system/silos" ], - "summary": "Add range to IP pool", - "description": "IPv6 ranges are not allowed yet.", - "operationId": "ip_pool_range_add", + "summary": "Create user", + "description": "Users can only be created in Silos with `provision_type` == `Fixed`. Otherwise, Silo users are just-in-time (JIT) provisioned when a user first logs in using an external Identity Provider.", + "operationId": "local_idp_user_create", "parameters": [ { - "in": "path", - "name": "pool", - "description": "Name or ID of the IP pool", + "in": "query", + "name": "silo", + "description": "Name or ID of the silo", "required": true, "schema": { "$ref": "#/components/schemas/NameOrId" @@ -7840,7 +7800,7 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/IpRange" + "$ref": "#/components/schemas/UserCreate" } } }, @@ -7852,7 +7812,7 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/IpPoolRange" + "$ref": "#/components/schemas/User" } } } @@ -7866,37 +7826,37 @@ } } }, - "/v1/system/ip-pools/{pool}/ranges/remove": { - "post": { + "/v1/system/identity-providers/local/users/{user_id}": { + "delete": { "tags": [ - "system/ip-pools" + "system/silos" ], - "summary": "Remove range from IP pool", - "operationId": "ip_pool_range_remove", + "summary": "Delete user", + "operationId": "local_idp_user_delete", "parameters": [ { "in": "path", - "name": "pool", - "description": "Name or ID of the IP pool", + "name": "user_id", + "description": "The user's internal ID", + "required": true, + "schema": { + "type": "string", + "format": "uuid" + } + }, + { + "in": "query", + "name": "silo", + "description": "Name or ID of the silo", "required": true, "schema": { "$ref": "#/components/schemas/NameOrId" } } ], - "requestBody": { - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/IpRange" - } - } - }, - "required": true - }, "responses": { "204": { - "description": "resource updated" + "description": "successful deletion" }, "4XX": { "$ref": "#/components/responses/Error" @@ -7907,85 +7867,29 @@ } } }, - "/v1/system/ip-pools/{pool}/silos": { - "get": { + "/v1/system/identity-providers/local/users/{user_id}/set-password": { + "post": { "tags": [ - "system/ip-pools" + "system/silos" ], - "summary": "List IP pool's linked silos", - "operationId": "ip_pool_silo_list", + "summary": "Set or invalidate user's password", + "description": "Passwords can only be updated for users in Silos with identity mode `LocalOnly`.", + "operationId": "local_idp_user_set_password", "parameters": [ { "in": "path", - "name": "pool", - "description": "Name or ID of the IP pool", + "name": "user_id", + "description": "The user's internal ID", "required": true, "schema": { - "$ref": "#/components/schemas/NameOrId" - } - }, - { - "in": "query", - "name": "limit", - "description": "Maximum number of items returned by a single call", - "schema": { - "nullable": true, - "type": "integer", - "format": "uint32", - "minimum": 1 - } - }, - { - "in": "query", - "name": "page_token", - "description": "Token returned by previous call to retrieve the subsequent page", - "schema": { - "nullable": true, - "type": "string" + "type": "string", + "format": "uuid" } }, { "in": "query", - "name": "sort_by", - "schema": { - "$ref": "#/components/schemas/IdSortMode" - } - } - ], - "responses": { - "200": { - "description": "successful operation", - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/IpPoolSiloLinkResultsPage" - } - } - } - }, - "4XX": { - "$ref": "#/components/responses/Error" - }, - "5XX": { - "$ref": "#/components/responses/Error" - } - }, - "x-dropshot-pagination": { - "required": [] - } - }, - "post": { - "tags": [ - "system/ip-pools" - ], - "summary": "Link IP pool to silo", - "description": "Users in linked silos can allocate external IPs from this pool for their instances. A silo can have at most one default pool. IPs are allocated from the default pool when users ask for one without specifying a pool.", - "operationId": "ip_pool_silo_link", - "parameters": [ - { - "in": "path", - "name": "pool", - "description": "Name or ID of the IP pool", + "name": "silo", + "description": "Name or ID of the silo", "required": true, "schema": { "$ref": "#/components/schemas/NameOrId" @@ -7996,22 +7900,15 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/IpPoolLinkSilo" + "$ref": "#/components/schemas/UserPassword" } } }, "required": true }, "responses": { - "201": { - "description": "successful creation", - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/IpPoolSiloLink" - } - } - } + "204": { + "description": "resource updated" }, "4XX": { "$ref": "#/components/responses/Error" @@ -8022,26 +7919,18 @@ } } }, - "/v1/system/ip-pools/{pool}/silos/{silo}": { - "put": { + "/v1/system/identity-providers/saml": { + "post": { "tags": [ - "system/ip-pools" + "system/silos" ], - "summary": "Make IP pool default for silo", - "description": "When a user asks for an IP (e.g., at instance create time) without specifying a pool, the IP comes from the default pool if a default is configured. When a pool is made the default for a silo, any existing default will remain linked to the silo, but will no longer be the default.", - "operationId": "ip_pool_silo_update", + "summary": "Create SAML identity provider", + "operationId": "saml_identity_provider_create", "parameters": [ { - "in": "path", - "name": "pool", - "required": true, - "schema": { - "$ref": "#/components/schemas/NameOrId" - } - }, - { - "in": "path", + "in": "query", "name": "silo", + "description": "Name or ID of the silo", "required": true, "schema": { "$ref": "#/components/schemas/NameOrId" @@ -8052,19 +7941,19 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/IpPoolSiloUpdate" + "$ref": "#/components/schemas/SamlIdentityProviderCreate" } } }, "required": true }, "responses": { - "200": { - "description": "successful operation", + "201": { + "description": "successful creation", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/IpPoolSiloLink" + "$ref": "#/components/schemas/SamlIdentityProvider" } } } @@ -8076,58 +7965,29 @@ "$ref": "#/components/responses/Error" } } - }, - "delete": { + } + }, + "/v1/system/identity-providers/saml/{provider}": { + "get": { "tags": [ - "system/ip-pools" + "system/silos" ], - "summary": "Unlink IP pool from silo", - "description": "Will fail if there are any outstanding IPs allocated in the silo.", - "operationId": "ip_pool_silo_unlink", + "summary": "Fetch SAML identity provider", + "operationId": "saml_identity_provider_view", "parameters": [ { "in": "path", - "name": "pool", + "name": "provider", + "description": "Name or ID of the SAML identity provider", "required": true, "schema": { "$ref": "#/components/schemas/NameOrId" } }, { - "in": "path", + "in": "query", "name": "silo", - "required": true, - "schema": { - "$ref": "#/components/schemas/NameOrId" - } - } - ], - "responses": { - "204": { - "description": "resource updated" - }, - "4XX": { - "$ref": "#/components/responses/Error" - }, - "5XX": { - "$ref": "#/components/responses/Error" - } - } - } - }, - "/v1/system/ip-pools/{pool}/utilization": { - "get": { - "tags": [ - "system/ip-pools" - ], - "summary": "Fetch IP pool utilization", - "operationId": "ip_pool_utilization_view", - "parameters": [ - { - "in": "path", - "name": "pool", - "description": "Name or ID of the IP pool", - "required": true, + "description": "Name or ID of the silo", "schema": { "$ref": "#/components/schemas/NameOrId" } @@ -8139,34 +7999,7 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/IpPoolUtilization" - } - } - } - }, - "4XX": { - "$ref": "#/components/responses/Error" - }, - "5XX": { - "$ref": "#/components/responses/Error" - } - } - } - }, - "/v1/system/ip-pools-service": { - "get": { - "tags": [ - "system/ip-pools" - ], - "summary": "Fetch Oxide service IP pool", - "operationId": "ip_pool_service_view", - "responses": { - "200": { - "description": "successful operation", - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/IpPool" + "$ref": "#/components/schemas/SamlIdentityProvider" } } } @@ -8180,14 +8013,13 @@ } } }, - "/v1/system/ip-pools-service/ranges": { + "/v1/system/ip-pools": { "get": { "tags": [ "system/ip-pools" ], - "summary": "List IP ranges for the Oxide service pool", - "description": "Ranges are ordered by their first address.", - "operationId": "ip_pool_service_range_list", + "summary": "List IP pools", + "operationId": "ip_pool_list", "parameters": [ { "in": "query", @@ -8208,6 +8040,13 @@ "nullable": true, "type": "string" } + }, + { + "in": "query", + "name": "sort_by", + "schema": { + "$ref": "#/components/schemas/NameOrIdSortMode" + } } ], "responses": { @@ -8216,7 +8055,7 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/IpPoolRangeResultsPage" + "$ref": "#/components/schemas/IpPoolResultsPage" } } } @@ -8231,21 +8070,18 @@ "x-dropshot-pagination": { "required": [] } - } - }, - "/v1/system/ip-pools-service/ranges/add": { + }, "post": { "tags": [ "system/ip-pools" ], - "summary": "Add IP range to Oxide service pool", - "description": "IPv6 ranges are not allowed yet.", - "operationId": "ip_pool_service_range_add", + "summary": "Create IP pool", + "operationId": "ip_pool_create", "requestBody": { "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/IpRange" + "$ref": "#/components/schemas/IpPoolCreate" } } }, @@ -8257,7 +8093,7 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/IpPoolRange" + "$ref": "#/components/schemas/IpPool" } } } @@ -8271,26 +8107,34 @@ } } }, - "/v1/system/ip-pools-service/ranges/remove": { - "post": { + "/v1/system/ip-pools/{pool}": { + "get": { "tags": [ "system/ip-pools" ], - "summary": "Remove IP range from Oxide service pool", - "operationId": "ip_pool_service_range_remove", - "requestBody": { - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/IpRange" - } - } - }, - "required": true - }, - "responses": { - "204": { - "description": "resource updated" + "summary": "Fetch IP pool", + "operationId": "ip_pool_view", + "parameters": [ + { + "in": "path", + "name": "pool", + "description": "Name or ID of the IP pool", + "required": true, + "schema": { + "$ref": "#/components/schemas/NameOrId" + } + } + ], + "responses": { + "200": { + "description": "successful operation", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/IpPool" + } + } + } }, "4XX": { "$ref": "#/components/responses/Error" @@ -8299,87 +8143,41 @@ "$ref": "#/components/responses/Error" } } - } - }, - "/v1/system/metrics/{metric_name}": { - "get": { + }, + "put": { "tags": [ - "system/metrics" + "system/ip-pools" ], - "summary": "View metrics", - "description": "View CPU, memory, or storage utilization metrics at the fleet or silo level.", - "operationId": "system_metric", + "summary": "Update IP pool", + "operationId": "ip_pool_update", "parameters": [ { "in": "path", - "name": "metric_name", + "name": "pool", + "description": "Name or ID of the IP pool", "required": true, - "schema": { - "$ref": "#/components/schemas/SystemMetricName" - } - }, - { - "in": "query", - "name": "end_time", - "description": "An exclusive end time of metrics.", - "schema": { - "type": "string", - "format": "date-time" - } - }, - { - "in": "query", - "name": "limit", - "description": "Maximum number of items returned by a single call", - "schema": { - "nullable": true, - "type": "integer", - "format": "uint32", - "minimum": 1 - } - }, - { - "in": "query", - "name": "order", - "description": "Query result order", - "schema": { - "$ref": "#/components/schemas/PaginationOrder" - } - }, - { - "in": "query", - "name": "page_token", - "description": "Token returned by previous call to retrieve the subsequent page", - "schema": { - "nullable": true, - "type": "string" - } - }, - { - "in": "query", - "name": "start_time", - "description": "An inclusive start time of metrics.", - "schema": { - "type": "string", - "format": "date-time" - } - }, - { - "in": "query", - "name": "silo", - "description": "Name or ID of the silo", "schema": { "$ref": "#/components/schemas/NameOrId" } } ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/IpPoolUpdate" + } + } + }, + "required": true + }, "responses": { "200": { "description": "successful operation", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/MeasurementResultsPage" + "$ref": "#/components/schemas/IpPool" } } } @@ -8390,23 +8188,56 @@ "5XX": { "$ref": "#/components/responses/Error" } - }, - "x-dropshot-pagination": { - "required": [ - "end_time", - "start_time" - ] + } + }, + "delete": { + "tags": [ + "system/ip-pools" + ], + "summary": "Delete IP pool", + "operationId": "ip_pool_delete", + "parameters": [ + { + "in": "path", + "name": "pool", + "description": "Name or ID of the IP pool", + "required": true, + "schema": { + "$ref": "#/components/schemas/NameOrId" + } + } + ], + "responses": { + "204": { + "description": "successful deletion" + }, + "4XX": { + "$ref": "#/components/responses/Error" + }, + "5XX": { + "$ref": "#/components/responses/Error" + } } } }, - "/v1/system/networking/address-lot": { + "/v1/system/ip-pools/{pool}/ranges": { "get": { "tags": [ - "system/networking" + "system/ip-pools" ], - "summary": "List address lots", - "operationId": "networking_address_lot_list", + "summary": "List ranges for IP pool", + "description": "Ranges are ordered by their first address.", + "operationId": "ip_pool_range_list", "parameters": [ + { + "in": "path", + "name": "pool", + "description": "Name or ID of the IP pool", + "required": true, + "schema": { + "$ref": "#/components/schemas/NameOrId" + } + }, { "in": "query", "name": "limit", @@ -8426,13 +8257,6 @@ "nullable": true, "type": "string" } - }, - { - "in": "query", - "name": "sort_by", - "schema": { - "$ref": "#/components/schemas/NameOrIdSortMode" - } } ], "responses": { @@ -8441,7 +8265,7 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/AddressLotResultsPage" + "$ref": "#/components/schemas/IpPoolRangeResultsPage" } } } @@ -8456,18 +8280,32 @@ "x-dropshot-pagination": { "required": [] } - }, + } + }, + "/v1/system/ip-pools/{pool}/ranges/add": { "post": { "tags": [ - "system/networking" + "system/ip-pools" + ], + "summary": "Add range to IP pool", + "description": "IPv6 ranges are not allowed yet.", + "operationId": "ip_pool_range_add", + "parameters": [ + { + "in": "path", + "name": "pool", + "description": "Name or ID of the IP pool", + "required": true, + "schema": { + "$ref": "#/components/schemas/NameOrId" + } + } ], - "summary": "Create address lot", - "operationId": "networking_address_lot_create", "requestBody": { "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/AddressLotCreate" + "$ref": "#/components/schemas/IpRange" } } }, @@ -8479,7 +8317,7 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/AddressLotCreateResponse" + "$ref": "#/components/schemas/IpPoolRange" } } } @@ -8493,27 +8331,37 @@ } } }, - "/v1/system/networking/address-lot/{address_lot}": { - "delete": { + "/v1/system/ip-pools/{pool}/ranges/remove": { + "post": { "tags": [ - "system/networking" + "system/ip-pools" ], - "summary": "Delete address lot", - "operationId": "networking_address_lot_delete", + "summary": "Remove range from IP pool", + "operationId": "ip_pool_range_remove", "parameters": [ { "in": "path", - "name": "address_lot", - "description": "Name or ID of the address lot", + "name": "pool", + "description": "Name or ID of the IP pool", "required": true, "schema": { "$ref": "#/components/schemas/NameOrId" } } ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/IpRange" + } + } + }, + "required": true + }, "responses": { "204": { - "description": "successful deletion" + "description": "resource updated" }, "4XX": { "$ref": "#/components/responses/Error" @@ -8524,18 +8372,18 @@ } } }, - "/v1/system/networking/address-lot/{address_lot}/blocks": { + "/v1/system/ip-pools/{pool}/silos": { "get": { "tags": [ - "system/networking" + "system/ip-pools" ], - "summary": "List blocks in address lot", - "operationId": "networking_address_lot_block_list", + "summary": "List IP pool's linked silos", + "operationId": "ip_pool_silo_list", "parameters": [ { "in": "path", - "name": "address_lot", - "description": "Name or ID of the address lot", + "name": "pool", + "description": "Name or ID of the IP pool", "required": true, "schema": { "$ref": "#/components/schemas/NameOrId" @@ -8575,7 +8423,7 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/AddressLotBlockResultsPage" + "$ref": "#/components/schemas/IpPoolSiloLinkResultsPage" } } } @@ -8590,57 +8438,42 @@ "x-dropshot-pagination": { "required": [] } - } - }, - "/v1/system/networking/allow-list": { - "get": { + }, + "post": { "tags": [ - "system/networking" + "system/ip-pools" ], - "summary": "Get user-facing services IP allowlist", - "operationId": "networking_allow_list_view", - "responses": { - "200": { - "description": "successful operation", - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/AllowList" - } - } - } - }, - "4XX": { - "$ref": "#/components/responses/Error" - }, - "5XX": { - "$ref": "#/components/responses/Error" + "summary": "Link IP pool to silo", + "description": "Users in linked silos can allocate external IPs from this pool for their instances. A silo can have at most one default pool. IPs are allocated from the default pool when users ask for one without specifying a pool.", + "operationId": "ip_pool_silo_link", + "parameters": [ + { + "in": "path", + "name": "pool", + "description": "Name or ID of the IP pool", + "required": true, + "schema": { + "$ref": "#/components/schemas/NameOrId" + } } - } - }, - "put": { - "tags": [ - "system/networking" ], - "summary": "Update user-facing services IP allowlist", - "operationId": "networking_allow_list_update", "requestBody": { "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/AllowListUpdate" + "$ref": "#/components/schemas/IpPoolLinkSilo" } } }, "required": true }, "responses": { - "200": { - "description": "successful operation", + "201": { + "description": "successful creation", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/AllowList" + "$ref": "#/components/schemas/IpPoolSiloLink" } } } @@ -8654,23 +8487,86 @@ } } }, - "/v1/system/networking/bfd-disable": { - "post": { + "/v1/system/ip-pools/{pool}/silos/{silo}": { + "put": { "tags": [ - "system/networking" + "system/ip-pools" + ], + "summary": "Make IP pool default for silo", + "description": "When a user asks for an IP (e.g., at instance create time) without specifying a pool, the IP comes from the default pool if a default is configured. When a pool is made the default for a silo, any existing default will remain linked to the silo, but will no longer be the default.", + "operationId": "ip_pool_silo_update", + "parameters": [ + { + "in": "path", + "name": "pool", + "required": true, + "schema": { + "$ref": "#/components/schemas/NameOrId" + } + }, + { + "in": "path", + "name": "silo", + "required": true, + "schema": { + "$ref": "#/components/schemas/NameOrId" + } + } ], - "summary": "Disable a BFD session", - "operationId": "networking_bfd_disable", "requestBody": { "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/BfdSessionDisable" + "$ref": "#/components/schemas/IpPoolSiloUpdate" } } }, "required": true }, + "responses": { + "200": { + "description": "successful operation", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/IpPoolSiloLink" + } + } + } + }, + "4XX": { + "$ref": "#/components/responses/Error" + }, + "5XX": { + "$ref": "#/components/responses/Error" + } + } + }, + "delete": { + "tags": [ + "system/ip-pools" + ], + "summary": "Unlink IP pool from silo", + "description": "Will fail if there are any outstanding IPs allocated in the silo.", + "operationId": "ip_pool_silo_unlink", + "parameters": [ + { + "in": "path", + "name": "pool", + "required": true, + "schema": { + "$ref": "#/components/schemas/NameOrId" + } + }, + { + "in": "path", + "name": "silo", + "required": true, + "schema": { + "$ref": "#/components/schemas/NameOrId" + } + } + ], "responses": { "204": { "description": "resource updated" @@ -8684,26 +8580,34 @@ } } }, - "/v1/system/networking/bfd-enable": { - "post": { + "/v1/system/ip-pools/{pool}/utilization": { + "get": { "tags": [ - "system/networking" + "system/ip-pools" ], - "summary": "Enable a BFD session", - "operationId": "networking_bfd_enable", - "requestBody": { - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/BfdSessionEnable" - } + "summary": "Fetch IP pool utilization", + "operationId": "ip_pool_utilization_view", + "parameters": [ + { + "in": "path", + "name": "pool", + "description": "Name or ID of the IP pool", + "required": true, + "schema": { + "$ref": "#/components/schemas/NameOrId" } - }, - "required": true - }, + } + ], "responses": { - "204": { - "description": "resource updated" + "200": { + "description": "successful operation", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/IpPoolUtilization" + } + } + } }, "4XX": { "$ref": "#/components/responses/Error" @@ -8714,24 +8618,20 @@ } } }, - "/v1/system/networking/bfd-status": { + "/v1/system/ip-pools-service": { "get": { "tags": [ - "system/networking" + "system/ip-pools" ], - "summary": "Get BFD status", - "operationId": "networking_bfd_status", + "summary": "Fetch Oxide service IP pool", + "operationId": "ip_pool_service_view", "responses": { "200": { "description": "successful operation", "content": { "application/json": { "schema": { - "title": "Array_of_BfdStatus", - "type": "array", - "items": { - "$ref": "#/components/schemas/BfdStatus" - } + "$ref": "#/components/schemas/IpPool" } } } @@ -8745,13 +8645,14 @@ } } }, - "/v1/system/networking/bgp": { + "/v1/system/ip-pools-service/ranges": { "get": { "tags": [ - "system/networking" + "system/ip-pools" ], - "summary": "List BGP configurations", - "operationId": "networking_bgp_config_list", + "summary": "List IP ranges for the Oxide service pool", + "description": "Ranges are ordered by their first address.", + "operationId": "ip_pool_service_range_list", "parameters": [ { "in": "query", @@ -8772,13 +8673,6 @@ "nullable": true, "type": "string" } - }, - { - "in": "query", - "name": "sort_by", - "schema": { - "$ref": "#/components/schemas/NameOrIdSortMode" - } } ], "responses": { @@ -8787,7 +8681,7 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/BgpConfigResultsPage" + "$ref": "#/components/schemas/IpPoolRangeResultsPage" } } } @@ -8802,18 +8696,21 @@ "x-dropshot-pagination": { "required": [] } - }, + } + }, + "/v1/system/ip-pools-service/ranges/add": { "post": { "tags": [ - "system/networking" + "system/ip-pools" ], - "summary": "Create new BGP configuration", - "operationId": "networking_bgp_config_create", + "summary": "Add IP range to Oxide service pool", + "description": "IPv6 ranges are not allowed yet.", + "operationId": "ip_pool_service_range_add", "requestBody": { "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/BgpConfigCreate" + "$ref": "#/components/schemas/IpRange" } } }, @@ -8825,7 +8722,7 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/BgpConfig" + "$ref": "#/components/schemas/IpPoolRange" } } } @@ -8837,24 +8734,25 @@ "$ref": "#/components/responses/Error" } } - }, - "delete": { + } + }, + "/v1/system/ip-pools-service/ranges/remove": { + "post": { "tags": [ - "system/networking" + "system/ip-pools" ], - "summary": "Delete BGP configuration", - "operationId": "networking_bgp_config_delete", - "parameters": [ - { - "in": "query", - "name": "name_or_id", - "description": "A name or id to use when selecting BGP config.", - "required": true, - "schema": { - "$ref": "#/components/schemas/NameOrId" + "summary": "Remove IP range from Oxide service pool", + "operationId": "ip_pool_service_range_remove", + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/IpRange" + } } - } - ], + }, + "required": true + }, "responses": { "204": { "description": "resource updated" @@ -8868,14 +8766,32 @@ } } }, - "/v1/system/networking/bgp-announce-set": { + "/v1/system/metrics/{metric_name}": { "get": { "tags": [ - "system/networking" + "system/metrics" ], - "summary": "List BGP announce sets", - "operationId": "networking_bgp_announce_set_list", + "summary": "View metrics", + "description": "View CPU, memory, or storage utilization metrics at the fleet or silo level.", + "operationId": "system_metric", "parameters": [ + { + "in": "path", + "name": "metric_name", + "required": true, + "schema": { + "$ref": "#/components/schemas/SystemMetricName" + } + }, + { + "in": "query", + "name": "end_time", + "description": "An exclusive end time of metrics.", + "schema": { + "type": "string", + "format": "date-time" + } + }, { "in": "query", "name": "limit", @@ -8889,11 +8805,91 @@ }, { "in": "query", - "name": "page_token", - "description": "Token returned by previous call to retrieve the subsequent page", + "name": "order", + "description": "Query result order", "schema": { - "nullable": true, - "type": "string" + "$ref": "#/components/schemas/PaginationOrder" + } + }, + { + "in": "query", + "name": "page_token", + "description": "Token returned by previous call to retrieve the subsequent page", + "schema": { + "nullable": true, + "type": "string" + } + }, + { + "in": "query", + "name": "start_time", + "description": "An inclusive start time of metrics.", + "schema": { + "type": "string", + "format": "date-time" + } + }, + { + "in": "query", + "name": "silo", + "description": "Name or ID of the silo", + "schema": { + "$ref": "#/components/schemas/NameOrId" + } + } + ], + "responses": { + "200": { + "description": "successful operation", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/MeasurementResultsPage" + } + } + } + }, + "4XX": { + "$ref": "#/components/responses/Error" + }, + "5XX": { + "$ref": "#/components/responses/Error" + } + }, + "x-dropshot-pagination": { + "required": [ + "end_time", + "start_time" + ] + } + } + }, + "/v1/system/networking/address-lot": { + "get": { + "tags": [ + "system/networking" + ], + "summary": "List address lots", + "operationId": "networking_address_lot_list", + "parameters": [ + { + "in": "query", + "name": "limit", + "description": "Maximum number of items returned by a single call", + "schema": { + "nullable": true, + "type": "integer", + "format": "uint32", + "minimum": 1 + } + }, + { + "in": "query", + "name": "page_token", + "description": "Token returned by previous call to retrieve the subsequent page", + "schema": { + "nullable": true, + "type": "string" } }, { @@ -8910,11 +8906,7 @@ "content": { "application/json": { "schema": { - "title": "Array_of_BgpAnnounceSet", - "type": "array", - "items": { - "$ref": "#/components/schemas/BgpAnnounceSet" - } + "$ref": "#/components/schemas/AddressLotResultsPage" } } } @@ -8930,18 +8922,17 @@ "required": [] } }, - "put": { + "post": { "tags": [ "system/networking" ], - "summary": "Update BGP announce set", - "description": "If the announce set exists, this endpoint replaces the existing announce set with the one specified.", - "operationId": "networking_bgp_announce_set_update", + "summary": "Create address lot", + "operationId": "networking_address_lot_create", "requestBody": { "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/BgpAnnounceSetCreate" + "$ref": "#/components/schemas/AddressLotCreate" } } }, @@ -8953,7 +8944,7 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/BgpAnnounceSet" + "$ref": "#/components/schemas/AddressLotCreateResponse" } } } @@ -8967,18 +8958,18 @@ } } }, - "/v1/system/networking/bgp-announce-set/{announce_set}": { + "/v1/system/networking/address-lot/{address_lot}": { "delete": { "tags": [ "system/networking" ], - "summary": "Delete BGP announce set", - "operationId": "networking_bgp_announce_set_delete", + "summary": "Delete address lot", + "operationId": "networking_address_lot_delete", "parameters": [ { "in": "path", - "name": "announce_set", - "description": "Name or ID of the announce set", + "name": "address_lot", + "description": "Name or ID of the address lot", "required": true, "schema": { "$ref": "#/components/schemas/NameOrId" @@ -8987,7 +8978,7 @@ ], "responses": { "204": { - "description": "resource updated" + "description": "successful deletion" }, "4XX": { "$ref": "#/components/responses/Error" @@ -8998,22 +8989,49 @@ } } }, - "/v1/system/networking/bgp-announce-set/{announce_set}/announcement": { + "/v1/system/networking/address-lot/{address_lot}/blocks": { "get": { "tags": [ "system/networking" ], - "summary": "Get originated routes for a specified BGP announce set", - "operationId": "networking_bgp_announcement_list", + "summary": "List blocks in address lot", + "operationId": "networking_address_lot_block_list", "parameters": [ { "in": "path", - "name": "announce_set", - "description": "Name or ID of the announce set", + "name": "address_lot", + "description": "Name or ID of the address lot", "required": true, "schema": { "$ref": "#/components/schemas/NameOrId" } + }, + { + "in": "query", + "name": "limit", + "description": "Maximum number of items returned by a single call", + "schema": { + "nullable": true, + "type": "integer", + "format": "uint32", + "minimum": 1 + } + }, + { + "in": "query", + "name": "page_token", + "description": "Token returned by previous call to retrieve the subsequent page", + "schema": { + "nullable": true, + "type": "string" + } + }, + { + "in": "query", + "name": "sort_by", + "schema": { + "$ref": "#/components/schemas/IdSortMode" + } } ], "responses": { @@ -9022,11 +9040,7 @@ "content": { "application/json": { "schema": { - "title": "Array_of_BgpAnnouncement", - "type": "array", - "items": { - "$ref": "#/components/schemas/BgpAnnouncement" - } + "$ref": "#/components/schemas/AddressLotBlockResultsPage" } } } @@ -9037,23 +9051,26 @@ "5XX": { "$ref": "#/components/responses/Error" } + }, + "x-dropshot-pagination": { + "required": [] } } }, - "/v1/system/networking/bgp-exported": { + "/v1/system/networking/allow-list": { "get": { "tags": [ "system/networking" ], - "summary": "Get BGP exported routes", - "operationId": "networking_bgp_exported", + "summary": "Get user-facing services IP allowlist", + "operationId": "networking_allow_list_view", "responses": { "200": { "description": "successful operation", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/BgpExported" + "$ref": "#/components/schemas/AllowList" } } } @@ -9065,35 +9082,30 @@ "$ref": "#/components/responses/Error" } } - } - }, - "/v1/system/networking/bgp-message-history": { - "get": { + }, + "put": { "tags": [ "system/networking" ], - "summary": "Get BGP router message history", - "operationId": "networking_bgp_message_history", - "parameters": [ - { - "in": "query", - "name": "asn", - "description": "The ASN to filter on. Required.", - "required": true, - "schema": { - "type": "integer", - "format": "uint32", - "minimum": 0 + "summary": "Update user-facing services IP allowlist", + "operationId": "networking_allow_list_update", + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/AllowListUpdate" + } } - } - ], + }, + "required": true + }, "responses": { "200": { "description": "successful operation", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/AggregateBgpMessageHistory" + "$ref": "#/components/schemas/AllowList" } } } @@ -9107,41 +9119,27 @@ } } }, - "/v1/system/networking/bgp-routes-ipv4": { - "get": { + "/v1/system/networking/bfd-disable": { + "post": { "tags": [ "system/networking" ], - "summary": "Get imported IPv4 BGP routes", - "operationId": "networking_bgp_imported_routes_ipv4", - "parameters": [ - { - "in": "query", - "name": "asn", - "description": "The ASN to filter on. Required.", - "required": true, - "schema": { - "type": "integer", - "format": "uint32", - "minimum": 0 - } - } - ], - "responses": { - "200": { - "description": "successful operation", - "content": { - "application/json": { - "schema": { - "title": "Array_of_BgpImportedRouteIpv4", - "type": "array", - "items": { - "$ref": "#/components/schemas/BgpImportedRouteIpv4" - } - } + "summary": "Disable a BFD session", + "operationId": "networking_bfd_disable", + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/BfdSessionDisable" } } }, + "required": true + }, + "responses": { + "204": { + "description": "resource updated" + }, "4XX": { "$ref": "#/components/responses/Error" }, @@ -9151,23 +9149,53 @@ } } }, - "/v1/system/networking/bgp-status": { - "get": { + "/v1/system/networking/bfd-enable": { + "post": { "tags": [ "system/networking" ], - "summary": "Get BGP peer status", - "operationId": "networking_bgp_status", - "responses": { - "200": { - "description": "successful operation", - "content": { - "application/json": { - "schema": { - "title": "Array_of_BgpPeerStatus", - "type": "array", - "items": { - "$ref": "#/components/schemas/BgpPeerStatus" + "summary": "Enable a BFD session", + "operationId": "networking_bfd_enable", + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/BfdSessionEnable" + } + } + }, + "required": true + }, + "responses": { + "204": { + "description": "resource updated" + }, + "4XX": { + "$ref": "#/components/responses/Error" + }, + "5XX": { + "$ref": "#/components/responses/Error" + } + } + } + }, + "/v1/system/networking/bfd-status": { + "get": { + "tags": [ + "system/networking" + ], + "summary": "Get BFD status", + "operationId": "networking_bfd_status", + "responses": { + "200": { + "description": "successful operation", + "content": { + "application/json": { + "schema": { + "title": "Array_of_BfdStatus", + "type": "array", + "items": { + "$ref": "#/components/schemas/BfdStatus" } } } @@ -9182,13 +9210,13 @@ } } }, - "/v1/system/networking/loopback-address": { + "/v1/system/networking/bgp": { "get": { "tags": [ "system/networking" ], - "summary": "List loopback addresses", - "operationId": "networking_loopback_address_list", + "summary": "List BGP configurations", + "operationId": "networking_bgp_config_list", "parameters": [ { "in": "query", @@ -9214,7 +9242,7 @@ "in": "query", "name": "sort_by", "schema": { - "$ref": "#/components/schemas/IdSortMode" + "$ref": "#/components/schemas/NameOrIdSortMode" } } ], @@ -9224,7 +9252,7 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/LoopbackAddressResultsPage" + "$ref": "#/components/schemas/BgpConfigResultsPage" } } } @@ -9244,13 +9272,13 @@ "tags": [ "system/networking" ], - "summary": "Create loopback address", - "operationId": "networking_loopback_address_create", + "summary": "Create new BGP configuration", + "operationId": "networking_bgp_config_create", "requestBody": { "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/LoopbackAddressCreate" + "$ref": "#/components/schemas/BgpConfigCreate" } } }, @@ -9262,7 +9290,7 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/LoopbackAddress" + "$ref": "#/components/schemas/BgpConfig" } } } @@ -9274,60 +9302,27 @@ "$ref": "#/components/responses/Error" } } - } - }, - "/v1/system/networking/loopback-address/{rack_id}/{switch_location}/{address}/{subnet_mask}": { + }, "delete": { "tags": [ "system/networking" ], - "summary": "Delete loopback address", - "operationId": "networking_loopback_address_delete", + "summary": "Delete BGP configuration", + "operationId": "networking_bgp_config_delete", "parameters": [ { - "in": "path", - "name": "address", - "description": "The IP address and subnet mask to use when selecting the loopback address.", - "required": true, - "schema": { - "type": "string", - "format": "ip" - } - }, - { - "in": "path", - "name": "rack_id", - "description": "The rack to use when selecting the loopback address.", - "required": true, - "schema": { - "type": "string", - "format": "uuid" - } - }, - { - "in": "path", - "name": "subnet_mask", - "description": "The IP address and subnet mask to use when selecting the loopback address.", - "required": true, - "schema": { - "type": "integer", - "format": "uint8", - "minimum": 0 - } - }, - { - "in": "path", - "name": "switch_location", - "description": "The switch location to use when selecting the loopback address.", + "in": "query", + "name": "name_or_id", + "description": "A name or id to use when selecting BGP config.", "required": true, "schema": { - "$ref": "#/components/schemas/Name" + "$ref": "#/components/schemas/NameOrId" } } ], "responses": { "204": { - "description": "successful deletion" + "description": "resource updated" }, "4XX": { "$ref": "#/components/responses/Error" @@ -9338,13 +9333,13 @@ } } }, - "/v1/system/networking/switch-port-settings": { + "/v1/system/networking/bgp-announce-set": { "get": { "tags": [ "system/networking" ], - "summary": "List switch port settings", - "operationId": "networking_switch_port_settings_list", + "summary": "List BGP announce sets", + "operationId": "networking_bgp_announce_set_list", "parameters": [ { "in": "query", @@ -9366,14 +9361,6 @@ "type": "string" } }, - { - "in": "query", - "name": "port_settings", - "description": "An optional name or id to use when selecting port settings.", - "schema": { - "$ref": "#/components/schemas/NameOrId" - } - }, { "in": "query", "name": "sort_by", @@ -9388,7 +9375,11 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/SwitchPortSettingsResultsPage" + "title": "Array_of_BgpAnnounceSet", + "type": "array", + "items": { + "$ref": "#/components/schemas/BgpAnnounceSet" + } } } } @@ -9404,17 +9395,18 @@ "required": [] } }, - "post": { + "put": { "tags": [ "system/networking" ], - "summary": "Create switch port settings", - "operationId": "networking_switch_port_settings_create", + "summary": "Update BGP announce set", + "description": "If the announce set exists, this endpoint replaces the existing announce set with the one specified.", + "operationId": "networking_bgp_announce_set_update", "requestBody": { "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/SwitchPortSettingsCreate" + "$ref": "#/components/schemas/BgpAnnounceSetCreate" } } }, @@ -9426,7 +9418,7 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/SwitchPortSettingsView" + "$ref": "#/components/schemas/BgpAnnounceSet" } } } @@ -9438,18 +9430,21 @@ "$ref": "#/components/responses/Error" } } - }, + } + }, + "/v1/system/networking/bgp-announce-set/{announce_set}": { "delete": { "tags": [ "system/networking" ], - "summary": "Delete switch port settings", - "operationId": "networking_switch_port_settings_delete", + "summary": "Delete BGP announce set", + "operationId": "networking_bgp_announce_set_delete", "parameters": [ { - "in": "query", - "name": "port_settings", - "description": "An optional name or id to use when selecting port settings.", + "in": "path", + "name": "announce_set", + "description": "Name or ID of the announce set", + "required": true, "schema": { "$ref": "#/components/schemas/NameOrId" } @@ -9457,7 +9452,7 @@ ], "responses": { "204": { - "description": "successful deletion" + "description": "resource updated" }, "4XX": { "$ref": "#/components/responses/Error" @@ -9468,18 +9463,18 @@ } } }, - "/v1/system/networking/switch-port-settings/{port}": { + "/v1/system/networking/bgp-announce-set/{announce_set}/announcement": { "get": { "tags": [ "system/networking" ], - "summary": "Get information about switch port", - "operationId": "networking_switch_port_settings_view", + "summary": "Get originated routes for a specified BGP announce set", + "operationId": "networking_bgp_announcement_list", "parameters": [ { "in": "path", - "name": "port", - "description": "A name or id to use when selecting switch port settings info objects.", + "name": "announce_set", + "description": "Name or ID of the announce set", "required": true, "schema": { "$ref": "#/components/schemas/NameOrId" @@ -9492,7 +9487,11 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/SwitchPortSettingsView" + "title": "Array_of_BgpAnnouncement", + "type": "array", + "items": { + "$ref": "#/components/schemas/BgpAnnouncement" + } } } } @@ -9506,20 +9505,20 @@ } } }, - "/v1/system/policy": { + "/v1/system/networking/bgp-exported": { "get": { "tags": [ - "policy" + "system/networking" ], - "summary": "Fetch top-level IAM policy", - "operationId": "system_policy_view", + "summary": "Get BGP exported routes", + "operationId": "networking_bgp_exported", "responses": { "200": { "description": "successful operation", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/FleetRolePolicy" + "$ref": "#/components/schemas/BgpExported" } } } @@ -9531,30 +9530,35 @@ "$ref": "#/components/responses/Error" } } - }, - "put": { + } + }, + "/v1/system/networking/bgp-message-history": { + "get": { "tags": [ - "policy" + "system/networking" ], - "summary": "Update top-level IAM policy", - "operationId": "system_policy_update", - "requestBody": { - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/FleetRolePolicy" - } + "summary": "Get BGP router message history", + "operationId": "networking_bgp_message_history", + "parameters": [ + { + "in": "query", + "name": "asn", + "description": "The ASN to filter on. Required.", + "required": true, + "schema": { + "type": "integer", + "format": "uint32", + "minimum": 0 } - }, - "required": true - }, + } + ], "responses": { "200": { "description": "successful operation", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/FleetRolePolicy" + "$ref": "#/components/schemas/AggregateBgpMessageHistory" } } } @@ -9568,32 +9572,23 @@ } } }, - "/v1/system/roles": { + "/v1/system/networking/bgp-routes-ipv4": { "get": { "tags": [ - "roles" + "system/networking" ], - "summary": "List built-in roles", - "operationId": "role_list", + "summary": "Get imported IPv4 BGP routes", + "operationId": "networking_bgp_imported_routes_ipv4", "parameters": [ { "in": "query", - "name": "limit", - "description": "Maximum number of items returned by a single call", + "name": "asn", + "description": "The ASN to filter on. Required.", + "required": true, "schema": { - "nullable": true, "type": "integer", "format": "uint32", - "minimum": 1 - } - }, - { - "in": "query", - "name": "page_token", - "description": "Token returned by previous call to retrieve the subsequent page", - "schema": { - "nullable": true, - "type": "string" + "minimum": 0 } } ], @@ -9603,7 +9598,11 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/RoleResultsPage" + "title": "Array_of_BgpImportedRouteIpv4", + "type": "array", + "items": { + "$ref": "#/components/schemas/BgpImportedRouteIpv4" + } } } } @@ -9614,37 +9613,27 @@ "5XX": { "$ref": "#/components/responses/Error" } - }, - "x-dropshot-pagination": { - "required": [] } } }, - "/v1/system/roles/{role_name}": { + "/v1/system/networking/bgp-status": { "get": { "tags": [ - "roles" - ], - "summary": "Fetch built-in role", - "operationId": "role_view", - "parameters": [ - { - "in": "path", - "name": "role_name", - "description": "The built-in role's unique name.", - "required": true, - "schema": { - "type": "string" - } - } + "system/networking" ], + "summary": "Get BGP peer status", + "operationId": "networking_bgp_status", "responses": { "200": { "description": "successful operation", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/Role" + "title": "Array_of_BgpPeerStatus", + "type": "array", + "items": { + "$ref": "#/components/schemas/BgpPeerStatus" + } } } } @@ -9658,13 +9647,13 @@ } } }, - "/v1/system/silo-quotas": { + "/v1/system/networking/loopback-address": { "get": { "tags": [ - "system/silos" + "system/networking" ], - "summary": "Lists resource quotas for all silos", - "operationId": "system_quotas_list", + "summary": "List loopback addresses", + "operationId": "networking_loopback_address_list", "parameters": [ { "in": "query", @@ -9700,67 +9689,7 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/SiloQuotasResultsPage" - } - } - } - }, - "4XX": { - "$ref": "#/components/responses/Error" - }, - "5XX": { - "$ref": "#/components/responses/Error" - } - }, - "x-dropshot-pagination": { - "required": [] - } - } - }, - "/v1/system/silos": { - "get": { - "tags": [ - "system/silos" - ], - "summary": "List silos", - "description": "Lists silos that are discoverable based on the current permissions.", - "operationId": "silo_list", - "parameters": [ - { - "in": "query", - "name": "limit", - "description": "Maximum number of items returned by a single call", - "schema": { - "nullable": true, - "type": "integer", - "format": "uint32", - "minimum": 1 - } - }, - { - "in": "query", - "name": "page_token", - "description": "Token returned by previous call to retrieve the subsequent page", - "schema": { - "nullable": true, - "type": "string" - } - }, - { - "in": "query", - "name": "sort_by", - "schema": { - "$ref": "#/components/schemas/NameOrIdSortMode" - } - } - ], - "responses": { - "200": { - "description": "successful operation", - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/SiloResultsPage" + "$ref": "#/components/schemas/LoopbackAddressResultsPage" } } } @@ -9778,15 +9707,15 @@ }, "post": { "tags": [ - "system/silos" + "system/networking" ], - "summary": "Create a silo", - "operationId": "silo_create", + "summary": "Create loopback address", + "operationId": "networking_loopback_address_create", "requestBody": { "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/SiloCreate" + "$ref": "#/components/schemas/LoopbackAddressCreate" } } }, @@ -9798,7 +9727,7 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/Silo" + "$ref": "#/components/schemas/LoopbackAddress" } } } @@ -9812,59 +9741,52 @@ } } }, - "/v1/system/silos/{silo}": { - "get": { + "/v1/system/networking/loopback-address/{rack_id}/{switch_location}/{address}/{subnet_mask}": { + "delete": { "tags": [ - "system/silos" + "system/networking" ], - "summary": "Fetch silo", - "description": "Fetch silo by name or ID.", - "operationId": "silo_view", + "summary": "Delete loopback address", + "operationId": "networking_loopback_address_delete", "parameters": [ { "in": "path", - "name": "silo", - "description": "Name or ID of the silo", + "name": "address", + "description": "The IP address and subnet mask to use when selecting the loopback address.", "required": true, "schema": { - "$ref": "#/components/schemas/NameOrId" + "type": "string", + "format": "ip" } - } - ], - "responses": { - "200": { - "description": "successful operation", - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/Silo" - } - } + }, + { + "in": "path", + "name": "rack_id", + "description": "The rack to use when selecting the loopback address.", + "required": true, + "schema": { + "type": "string", + "format": "uuid" } }, - "4XX": { - "$ref": "#/components/responses/Error" + { + "in": "path", + "name": "subnet_mask", + "description": "The IP address and subnet mask to use when selecting the loopback address.", + "required": true, + "schema": { + "type": "integer", + "format": "uint8", + "minimum": 0 + } }, - "5XX": { - "$ref": "#/components/responses/Error" - } - } - }, - "delete": { - "tags": [ - "system/silos" - ], - "summary": "Delete a silo", - "description": "Delete a silo by name or ID.", - "operationId": "silo_delete", - "parameters": [ { "in": "path", - "name": "silo", - "description": "Name or ID of the silo", + "name": "switch_location", + "description": "The switch location to use when selecting the loopback address.", "required": true, "schema": { - "$ref": "#/components/schemas/NameOrId" + "$ref": "#/components/schemas/Name" } } ], @@ -9881,24 +9803,14 @@ } } }, - "/v1/system/silos/{silo}/ip-pools": { + "/v1/system/networking/switch-port-settings": { "get": { "tags": [ - "system/silos" + "system/networking" ], - "summary": "List IP pools linked to silo", - "description": "Linked IP pools are available to users in the specified silo. A silo can have at most one default pool. IPs are allocated from the default pool when users ask for one without specifying a pool.", - "operationId": "silo_ip_pool_list", + "summary": "List switch port settings", + "operationId": "networking_switch_port_settings_list", "parameters": [ - { - "in": "path", - "name": "silo", - "description": "Name or ID of the silo", - "required": true, - "schema": { - "$ref": "#/components/schemas/NameOrId" - } - }, { "in": "query", "name": "limit", @@ -9919,6 +9831,14 @@ "type": "string" } }, + { + "in": "query", + "name": "port_settings", + "description": "An optional name or id to use when selecting port settings.", + "schema": { + "$ref": "#/components/schemas/NameOrId" + } + }, { "in": "query", "name": "sort_by", @@ -9933,7 +9853,7 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/SiloIpPoolResultsPage" + "$ref": "#/components/schemas/SwitchPortSettingsResultsPage" } } } @@ -9948,33 +9868,30 @@ "x-dropshot-pagination": { "required": [] } - } - }, - "/v1/system/silos/{silo}/policy": { - "get": { + }, + "post": { "tags": [ - "system/silos" + "system/networking" ], - "summary": "Fetch silo IAM policy", - "operationId": "silo_policy_view", - "parameters": [ - { - "in": "path", - "name": "silo", - "description": "Name or ID of the silo", - "required": true, - "schema": { - "$ref": "#/components/schemas/NameOrId" + "summary": "Create switch port settings", + "operationId": "networking_switch_port_settings_create", + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/SwitchPortSettingsCreate" + } } - } - ], + }, + "required": true + }, "responses": { - "200": { - "description": "successful operation", + "201": { + "description": "successful creation", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/SiloRolePolicy" + "$ref": "#/components/schemas/SwitchPortSettingsView" } } } @@ -9987,43 +9904,25 @@ } } }, - "put": { + "delete": { "tags": [ - "system/silos" + "system/networking" ], - "summary": "Update silo IAM policy", - "operationId": "silo_policy_update", + "summary": "Delete switch port settings", + "operationId": "networking_switch_port_settings_delete", "parameters": [ { - "in": "path", - "name": "silo", - "description": "Name or ID of the silo", - "required": true, + "in": "query", + "name": "port_settings", + "description": "An optional name or id to use when selecting port settings.", "schema": { "$ref": "#/components/schemas/NameOrId" } } ], - "requestBody": { - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/SiloRolePolicy" - } - } - }, - "required": true - }, "responses": { - "200": { - "description": "successful operation", - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/SiloRolePolicy" - } - } - } + "204": { + "description": "successful deletion" }, "4XX": { "$ref": "#/components/responses/Error" @@ -10034,18 +9933,18 @@ } } }, - "/v1/system/silos/{silo}/quotas": { + "/v1/system/networking/switch-port-settings/{port}": { "get": { "tags": [ - "system/silos" + "system/networking" ], - "summary": "Fetch resource quotas for silo", - "operationId": "silo_quotas_view", + "summary": "Get information about switch port", + "operationId": "networking_switch_port_settings_view", "parameters": [ { "in": "path", - "name": "silo", - "description": "Name or ID of the silo", + "name": "port", + "description": "A name or id to use when selecting switch port settings info objects.", "required": true, "schema": { "$ref": "#/components/schemas/NameOrId" @@ -10058,7 +9957,7 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/SiloQuotas" + "$ref": "#/components/schemas/SwitchPortSettingsView" } } } @@ -10070,42 +9969,22 @@ "$ref": "#/components/responses/Error" } } - }, - "put": { + } + }, + "/v1/system/policy": { + "get": { "tags": [ - "system/silos" - ], - "summary": "Update resource quotas for silo", - "description": "If a quota value is not specified, it will remain unchanged.", - "operationId": "silo_quotas_update", - "parameters": [ - { - "in": "path", - "name": "silo", - "description": "Name or ID of the silo", - "required": true, - "schema": { - "$ref": "#/components/schemas/NameOrId" - } - } + "policy" ], - "requestBody": { - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/SiloQuotasUpdate" - } - } - }, - "required": true - }, + "summary": "Fetch top-level IAM policy", + "operationId": "system_policy_view", "responses": { "200": { "description": "successful operation", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/SiloQuotas" + "$ref": "#/components/schemas/FleetRolePolicy" } } } @@ -10117,21 +9996,18 @@ "$ref": "#/components/responses/Error" } } - } - }, - "/v1/system/timeseries/query": { - "post": { + }, + "put": { "tags": [ - "system/metrics" + "policy" ], - "summary": "Run timeseries query", - "description": "Queries are written in OxQL.", - "operationId": "system_timeseries_query", + "summary": "Update top-level IAM policy", + "operationId": "system_policy_update", "requestBody": { "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/TimeseriesQuery" + "$ref": "#/components/schemas/FleetRolePolicy" } } }, @@ -10143,7 +10019,7 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/OxqlQueryResult" + "$ref": "#/components/schemas/FleetRolePolicy" } } } @@ -10157,13 +10033,13 @@ } } }, - "/v1/system/timeseries/schemas": { + "/v1/system/roles": { "get": { "tags": [ - "system/metrics" + "roles" ], - "summary": "List timeseries schemas", - "operationId": "system_timeseries_schema_list", + "summary": "List built-in roles", + "operationId": "role_list", "parameters": [ { "in": "query", @@ -10192,7 +10068,7 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/TimeseriesSchemaResultsPage" + "$ref": "#/components/schemas/RoleResultsPage" } } } @@ -10209,57 +10085,31 @@ } } }, - "/v1/system/update/target-release": { + "/v1/system/roles/{role_name}": { "get": { "tags": [ - "hidden" + "roles" ], - "summary": "Get the current target release of the rack's system software", - "description": "This may not correspond to the actual software running on the rack at the time of request; it is instead the release that the rack reconfigurator should be moving towards as a goal state. After some number of planning and execution phases, the software running on the rack should eventually correspond to the release described here.", - "operationId": "target_release_view", - "responses": { - "200": { - "description": "successful operation", - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/TargetRelease" - } - } + "summary": "Fetch built-in role", + "operationId": "role_view", + "parameters": [ + { + "in": "path", + "name": "role_name", + "description": "The built-in role's unique name.", + "required": true, + "schema": { + "type": "string" } - }, - "4XX": { - "$ref": "#/components/responses/Error" - }, - "5XX": { - "$ref": "#/components/responses/Error" } - } - }, - "put": { - "tags": [ - "hidden" ], - "summary": "Set the current target release of the rack's system software", - "description": "The rack reconfigurator will treat the software specified here as a goal state for the rack's software, and attempt to asynchronously update to that release.", - "operationId": "target_release_update", - "requestBody": { - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/SetTargetReleaseParams" - } - } - }, - "required": true - }, "responses": { - "201": { - "description": "successful creation", + "200": { + "description": "successful operation", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/TargetRelease" + "$ref": "#/components/schemas/Role" } } } @@ -10273,13 +10123,13 @@ } } }, - "/v1/system/users": { + "/v1/system/silo-quotas": { "get": { "tags": [ "system/silos" ], - "summary": "List built-in (system) users in silo", - "operationId": "silo_user_list", + "summary": "Lists resource quotas for all silos", + "operationId": "system_quotas_list", "parameters": [ { "in": "query", @@ -10301,14 +10151,6 @@ "type": "string" } }, - { - "in": "query", - "name": "silo", - "description": "Name or ID of the silo", - "schema": { - "$ref": "#/components/schemas/NameOrId" - } - }, { "in": "query", "name": "sort_by", @@ -10323,7 +10165,7 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/UserResultsPage" + "$ref": "#/components/schemas/SiloQuotasResultsPage" } } } @@ -10336,67 +10178,18 @@ } }, "x-dropshot-pagination": { - "required": [ - "silo" - ] - } - } - }, - "/v1/system/users/{user_id}": { - "get": { - "tags": [ - "system/silos" - ], - "summary": "Fetch built-in (system) user", - "operationId": "silo_user_view", - "parameters": [ - { - "in": "path", - "name": "user_id", - "description": "The user's internal ID", - "required": true, - "schema": { - "type": "string", - "format": "uuid" - } - }, - { - "in": "query", - "name": "silo", - "description": "Name or ID of the silo", - "required": true, - "schema": { - "$ref": "#/components/schemas/NameOrId" - } - } - ], - "responses": { - "200": { - "description": "successful operation", - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/User" - } - } - } - }, - "4XX": { - "$ref": "#/components/responses/Error" - }, - "5XX": { - "$ref": "#/components/responses/Error" - } + "required": [] } } }, - "/v1/system/users-builtin": { + "/v1/system/silos": { "get": { "tags": [ "system/silos" ], - "summary": "List built-in users", - "operationId": "user_builtin_list", + "summary": "List silos", + "description": "Lists silos that are discoverable based on the current permissions.", + "operationId": "silo_list", "parameters": [ { "in": "query", @@ -10422,7 +10215,7 @@ "in": "query", "name": "sort_by", "schema": { - "$ref": "#/components/schemas/NameSortMode" + "$ref": "#/components/schemas/NameOrIdSortMode" } } ], @@ -10432,7 +10225,7 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/UserBuiltinResultsPage" + "$ref": "#/components/schemas/SiloResultsPage" } } } @@ -10447,32 +10240,69 @@ "x-dropshot-pagination": { "required": [] } - } - }, - "/v1/system/users-builtin/{user}": { - "get": { + }, + "post": { "tags": [ "system/silos" ], - "summary": "Fetch built-in user", - "operationId": "user_builtin_view", - "parameters": [ - { - "in": "path", - "name": "user", - "required": true, - "schema": { - "$ref": "#/components/schemas/NameOrId" + "summary": "Create a silo", + "operationId": "silo_create", + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/SiloCreate" + } } - } - ], + }, + "required": true + }, + "responses": { + "201": { + "description": "successful creation", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/Silo" + } + } + } + }, + "4XX": { + "$ref": "#/components/responses/Error" + }, + "5XX": { + "$ref": "#/components/responses/Error" + } + } + } + }, + "/v1/system/silos/{silo}": { + "get": { + "tags": [ + "system/silos" + ], + "summary": "Fetch silo", + "description": "Fetch silo by name or ID.", + "operationId": "silo_view", + "parameters": [ + { + "in": "path", + "name": "silo", + "description": "Name or ID of the silo", + "required": true, + "schema": { + "$ref": "#/components/schemas/NameOrId" + } + } + ], "responses": { "200": { "description": "successful operation", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/UserBuiltin" + "$ref": "#/components/schemas/Silo" } } } @@ -10484,16 +10314,56 @@ "$ref": "#/components/responses/Error" } } + }, + "delete": { + "tags": [ + "system/silos" + ], + "summary": "Delete a silo", + "description": "Delete a silo by name or ID.", + "operationId": "silo_delete", + "parameters": [ + { + "in": "path", + "name": "silo", + "description": "Name or ID of the silo", + "required": true, + "schema": { + "$ref": "#/components/schemas/NameOrId" + } + } + ], + "responses": { + "204": { + "description": "successful deletion" + }, + "4XX": { + "$ref": "#/components/responses/Error" + }, + "5XX": { + "$ref": "#/components/responses/Error" + } + } } }, - "/v1/system/utilization/silos": { + "/v1/system/silos/{silo}/ip-pools": { "get": { "tags": [ "system/silos" ], - "summary": "List current utilization state for all silos", - "operationId": "silo_utilization_list", + "summary": "List IP pools linked to silo", + "description": "Linked IP pools are available to users in the specified silo. A silo can have at most one default pool. IPs are allocated from the default pool when users ask for one without specifying a pool.", + "operationId": "silo_ip_pool_list", "parameters": [ + { + "in": "path", + "name": "silo", + "description": "Name or ID of the silo", + "required": true, + "schema": { + "$ref": "#/components/schemas/NameOrId" + } + }, { "in": "query", "name": "limit", @@ -10528,7 +10398,7 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/SiloUtilizationResultsPage" + "$ref": "#/components/schemas/SiloIpPoolResultsPage" } } } @@ -10545,13 +10415,13 @@ } } }, - "/v1/system/utilization/silos/{silo}": { + "/v1/system/silos/{silo}/policy": { "get": { "tags": [ "system/silos" ], - "summary": "Fetch current utilization for given silo", - "operationId": "silo_utilization_view", + "summary": "Fetch silo IAM policy", + "operationId": "silo_policy_view", "parameters": [ { "in": "path", @@ -10569,7 +10439,7 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/SiloUtilization" + "$ref": "#/components/schemas/SiloRolePolicy" } } } @@ -10581,21 +10451,18 @@ "$ref": "#/components/responses/Error" } } - } - }, - "/v1/timeseries/query": { - "post": { + }, + "put": { "tags": [ - "hidden" + "system/silos" ], - "summary": "Run project-scoped timeseries query", - "description": "Queries are written in OxQL. Project must be specified by name or ID in URL query parameter. The OxQL query will only return timeseries data from the specified project.", - "operationId": "timeseries_query", + "summary": "Update silo IAM policy", + "operationId": "silo_policy_update", "parameters": [ { - "in": "query", - "name": "project", - "description": "Name or ID of the project", + "in": "path", + "name": "silo", + "description": "Name or ID of the silo", "required": true, "schema": { "$ref": "#/components/schemas/NameOrId" @@ -10606,7 +10473,7 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/TimeseriesQuery" + "$ref": "#/components/schemas/SiloRolePolicy" } } }, @@ -10618,7 +10485,7 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/OxqlQueryResult" + "$ref": "#/components/schemas/SiloRolePolicy" } } } @@ -10632,58 +10499,78 @@ } } }, - "/v1/users": { + "/v1/system/silos/{silo}/quotas": { "get": { "tags": [ - "silos" + "system/silos" ], - "summary": "List users", - "operationId": "user_list", + "summary": "Fetch resource quotas for silo", + "operationId": "silo_quotas_view", "parameters": [ { - "in": "query", - "name": "group", + "in": "path", + "name": "silo", + "description": "Name or ID of the silo", + "required": true, "schema": { - "nullable": true, - "type": "string", - "format": "uuid" + "$ref": "#/components/schemas/NameOrId" } - }, - { - "in": "query", - "name": "limit", - "description": "Maximum number of items returned by a single call", - "schema": { - "nullable": true, - "type": "integer", - "format": "uint32", - "minimum": 1 + } + ], + "responses": { + "200": { + "description": "successful operation", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/SiloQuotas" + } + } } }, - { - "in": "query", - "name": "page_token", - "description": "Token returned by previous call to retrieve the subsequent page", - "schema": { - "nullable": true, - "type": "string" - } + "4XX": { + "$ref": "#/components/responses/Error" }, + "5XX": { + "$ref": "#/components/responses/Error" + } + } + }, + "put": { + "tags": [ + "system/silos" + ], + "summary": "Update resource quotas for silo", + "description": "If a quota value is not specified, it will remain unchanged.", + "operationId": "silo_quotas_update", + "parameters": [ { - "in": "query", - "name": "sort_by", + "in": "path", + "name": "silo", + "description": "Name or ID of the silo", + "required": true, "schema": { - "$ref": "#/components/schemas/IdSortMode" + "$ref": "#/components/schemas/NameOrId" } } ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/SiloQuotasUpdate" + } + } + }, + "required": true + }, "responses": { "200": { "description": "successful operation", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/UserResultsPage" + "$ref": "#/components/schemas/SiloQuotas" } } } @@ -10694,26 +10581,34 @@ "5XX": { "$ref": "#/components/responses/Error" } - }, - "x-dropshot-pagination": { - "required": [] } } }, - "/v1/utilization": { - "get": { + "/v1/system/timeseries/query": { + "post": { "tags": [ - "silos" + "system/metrics" ], - "summary": "Fetch resource utilization for user's current silo", - "operationId": "utilization_view", + "summary": "Run timeseries query", + "description": "Queries are written in OxQL.", + "operationId": "system_timeseries_query", + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/TimeseriesQuery" + } + } + }, + "required": true + }, "responses": { "200": { "description": "successful operation", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/Utilization" + "$ref": "#/components/schemas/OxqlQueryResult" } } } @@ -10727,29 +10622,32 @@ } } }, - "/v1/vpc-firewall-rules": { + "/v1/system/timeseries/schemas": { "get": { "tags": [ - "vpcs" + "system/metrics" ], - "summary": "List firewall rules", - "operationId": "vpc_firewall_rules_view", + "summary": "List timeseries schemas", + "operationId": "system_timeseries_schema_list", "parameters": [ { "in": "query", - "name": "project", - "description": "Name or ID of the project, only required if `vpc` is provided as a `Name`", + "name": "limit", + "description": "Maximum number of items returned by a single call", "schema": { - "$ref": "#/components/schemas/NameOrId" + "nullable": true, + "type": "integer", + "format": "uint32", + "minimum": 1 } }, { "in": "query", - "name": "vpc", - "description": "Name or ID of the VPC", - "required": true, + "name": "page_token", + "description": "Token returned by previous call to retrieve the subsequent page", "schema": { - "$ref": "#/components/schemas/NameOrId" + "nullable": true, + "type": "string" } } ], @@ -10759,7 +10657,7 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/VpcFirewallRules" + "$ref": "#/components/schemas/TimeseriesSchemaResultsPage" } } } @@ -10770,51 +10668,63 @@ "5XX": { "$ref": "#/components/responses/Error" } - } - }, - "put": { + }, + "x-dropshot-pagination": { + "required": [] + } + } + }, + "/v1/system/update/target-release": { + "get": { "tags": [ - "vpcs" + "hidden" ], - "summary": "Replace firewall rules", - "description": "The maximum number of rules per VPC is 1024.\n\nTargets are used to specify the set of instances to which a firewall rule applies. You can target instances directly by name, or specify a VPC, VPC subnet, IP, or IP subnet, which will apply the rule to traffic going to all matching instances. Targets are additive: the rule applies to instances matching ANY target. The maximum number of targets is 256.\n\nFilters reduce the scope of a firewall rule. Without filters, the rule applies to all packets to the targets (or from the targets, if it's an outbound rule). With multiple filters, the rule applies only to packets matching ALL filters. The maximum number of each type of filter is 256.", - "operationId": "vpc_firewall_rules_update", - "parameters": [ - { - "in": "query", - "name": "project", - "description": "Name or ID of the project, only required if `vpc` is provided as a `Name`", - "schema": { - "$ref": "#/components/schemas/NameOrId" + "summary": "Get the current target release of the rack's system software", + "description": "This may not correspond to the actual software running on the rack at the time of request; it is instead the release that the rack reconfigurator should be moving towards as a goal state. After some number of planning and execution phases, the software running on the rack should eventually correspond to the release described here.", + "operationId": "target_release_view", + "responses": { + "200": { + "description": "successful operation", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/TargetRelease" + } + } } }, - { - "in": "query", - "name": "vpc", - "description": "Name or ID of the VPC", - "required": true, - "schema": { - "$ref": "#/components/schemas/NameOrId" - } + "4XX": { + "$ref": "#/components/responses/Error" + }, + "5XX": { + "$ref": "#/components/responses/Error" } + } + }, + "put": { + "tags": [ + "hidden" ], + "summary": "Set the current target release of the rack's system software", + "description": "The rack reconfigurator will treat the software specified here as a goal state for the rack's software, and attempt to asynchronously update to that release.", + "operationId": "target_release_update", "requestBody": { "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/VpcFirewallRuleUpdateParams" + "$ref": "#/components/schemas/SetTargetReleaseParams" } } }, "required": true }, "responses": { - "200": { - "description": "successful operation", + "201": { + "description": "successful creation", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/VpcFirewallRules" + "$ref": "#/components/schemas/TargetRelease" } } } @@ -10828,14 +10738,13 @@ } } }, - "/v1/vpc-router-routes": { + "/v1/system/users": { "get": { "tags": [ - "vpcs" + "system/silos" ], - "summary": "List routes", - "description": "List the routes associated with a router in a particular VPC.", - "operationId": "vpc_router_route_list", + "summary": "List built-in (system) users in silo", + "operationId": "silo_user_list", "parameters": [ { "in": "query", @@ -10859,16 +10768,8 @@ }, { "in": "query", - "name": "project", - "description": "Name or ID of the project, only required if `vpc` is provided as a `Name`", - "schema": { - "$ref": "#/components/schemas/NameOrId" - } - }, - { - "in": "query", - "name": "router", - "description": "Name or ID of the router", + "name": "silo", + "description": "Name or ID of the silo", "schema": { "$ref": "#/components/schemas/NameOrId" } @@ -10877,15 +10778,7 @@ "in": "query", "name": "sort_by", "schema": { - "$ref": "#/components/schemas/NameOrIdSortMode" - } - }, - { - "in": "query", - "name": "vpc", - "description": "Name or ID of the VPC, only required if `router` is provided as a `Name`", - "schema": { - "$ref": "#/components/schemas/NameOrId" + "$ref": "#/components/schemas/IdSortMode" } } ], @@ -10895,7 +10788,7 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/RouterRouteResultsPage" + "$ref": "#/components/schemas/UserResultsPage" } } } @@ -10909,60 +10802,46 @@ }, "x-dropshot-pagination": { "required": [ - "router" + "silo" ] } - }, - "post": { + } + }, + "/v1/system/users/{user_id}": { + "get": { "tags": [ - "vpcs" + "system/silos" ], - "summary": "Create route", - "operationId": "vpc_router_route_create", + "summary": "Fetch built-in (system) user", + "operationId": "silo_user_view", "parameters": [ { - "in": "query", - "name": "project", - "description": "Name or ID of the project, only required if `vpc` is provided as a `Name`", - "schema": { - "$ref": "#/components/schemas/NameOrId" - } - }, - { - "in": "query", - "name": "router", - "description": "Name or ID of the router", + "in": "path", + "name": "user_id", + "description": "The user's internal ID", "required": true, "schema": { - "$ref": "#/components/schemas/NameOrId" + "type": "string", + "format": "uuid" } }, { "in": "query", - "name": "vpc", - "description": "Name or ID of the VPC, only required if `router` is provided as a `Name`", + "name": "silo", + "description": "Name or ID of the silo", + "required": true, "schema": { "$ref": "#/components/schemas/NameOrId" } } ], - "requestBody": { - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/RouterRouteCreate" - } - } - }, - "required": true - }, "responses": { - "201": { - "description": "successful creation", + "200": { + "description": "successful operation", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/RouterRoute" + "$ref": "#/components/schemas/User" } } } @@ -10976,45 +10855,39 @@ } } }, - "/v1/vpc-router-routes/{route}": { + "/v1/system/users-builtin": { "get": { "tags": [ - "vpcs" + "system/silos" ], - "summary": "Fetch route", - "operationId": "vpc_router_route_view", + "summary": "List built-in users", + "operationId": "user_builtin_list", "parameters": [ - { - "in": "path", - "name": "route", - "description": "Name or ID of the route", - "required": true, - "schema": { - "$ref": "#/components/schemas/NameOrId" - } - }, { "in": "query", - "name": "project", - "description": "Name or ID of the project, only required if `vpc` is provided as a `Name`", + "name": "limit", + "description": "Maximum number of items returned by a single call", "schema": { - "$ref": "#/components/schemas/NameOrId" + "nullable": true, + "type": "integer", + "format": "uint32", + "minimum": 1 } }, { "in": "query", - "name": "router", - "description": "Name or ID of the router", + "name": "page_token", + "description": "Token returned by previous call to retrieve the subsequent page", "schema": { - "$ref": "#/components/schemas/NameOrId" + "nullable": true, + "type": "string" } }, { "in": "query", - "name": "vpc", - "description": "Name or ID of the VPC, only required if `router` is provided as a `Name`", + "name": "sort_by", "schema": { - "$ref": "#/components/schemas/NameOrId" + "$ref": "#/components/schemas/NameSortMode" } } ], @@ -11024,7 +10897,7 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/RouterRoute" + "$ref": "#/components/schemas/UserBuiltinResultsPage" } } } @@ -11035,66 +10908,36 @@ "5XX": { "$ref": "#/components/responses/Error" } + }, + "x-dropshot-pagination": { + "required": [] } - }, - "put": { + } + }, + "/v1/system/users-builtin/{user}": { + "get": { "tags": [ - "vpcs" + "system/silos" ], - "summary": "Update route", - "operationId": "vpc_router_route_update", + "summary": "Fetch built-in user", + "operationId": "user_builtin_view", "parameters": [ { "in": "path", - "name": "route", - "description": "Name or ID of the route", + "name": "user", "required": true, "schema": { "$ref": "#/components/schemas/NameOrId" } - }, - { - "in": "query", - "name": "project", - "description": "Name or ID of the project, only required if `vpc` is provided as a `Name`", - "schema": { - "$ref": "#/components/schemas/NameOrId" - } - }, - { - "in": "query", - "name": "router", - "description": "Name or ID of the router", - "schema": { - "$ref": "#/components/schemas/NameOrId" - } - }, - { - "in": "query", - "name": "vpc", - "description": "Name or ID of the VPC, only required if `router` is provided as a `Name`", - "schema": { - "$ref": "#/components/schemas/NameOrId" - } } ], - "requestBody": { - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/RouterRouteUpdate" - } - } - }, - "required": true - }, "responses": { "200": { "description": "successful operation", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/RouterRoute" + "$ref": "#/components/schemas/UserBuiltin" } } } @@ -11106,51 +10949,144 @@ "$ref": "#/components/responses/Error" } } - }, - "delete": { + } + }, + "/v1/system/utilization/silos": { + "get": { "tags": [ - "vpcs" + "system/silos" ], - "summary": "Delete route", - "operationId": "vpc_router_route_delete", + "summary": "List current utilization state for all silos", + "operationId": "silo_utilization_list", "parameters": [ { - "in": "path", - "name": "route", - "description": "Name or ID of the route", - "required": true, + "in": "query", + "name": "limit", + "description": "Maximum number of items returned by a single call", "schema": { - "$ref": "#/components/schemas/NameOrId" + "nullable": true, + "type": "integer", + "format": "uint32", + "minimum": 1 } }, { "in": "query", - "name": "project", - "description": "Name or ID of the project, only required if `vpc` is provided as a `Name`", + "name": "page_token", + "description": "Token returned by previous call to retrieve the subsequent page", "schema": { - "$ref": "#/components/schemas/NameOrId" + "nullable": true, + "type": "string" } }, { "in": "query", - "name": "router", - "description": "Name or ID of the router", + "name": "sort_by", + "schema": { + "$ref": "#/components/schemas/NameOrIdSortMode" + } + } + ], + "responses": { + "200": { + "description": "successful operation", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/SiloUtilizationResultsPage" + } + } + } + }, + "4XX": { + "$ref": "#/components/responses/Error" + }, + "5XX": { + "$ref": "#/components/responses/Error" + } + }, + "x-dropshot-pagination": { + "required": [] + } + } + }, + "/v1/system/utilization/silos/{silo}": { + "get": { + "tags": [ + "system/silos" + ], + "summary": "Fetch current utilization for given silo", + "operationId": "silo_utilization_view", + "parameters": [ + { + "in": "path", + "name": "silo", + "description": "Name or ID of the silo", + "required": true, "schema": { "$ref": "#/components/schemas/NameOrId" } + } + ], + "responses": { + "200": { + "description": "successful operation", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/SiloUtilization" + } + } + } + }, + "4XX": { + "$ref": "#/components/responses/Error" }, + "5XX": { + "$ref": "#/components/responses/Error" + } + } + } + }, + "/v1/timeseries/query": { + "post": { + "tags": [ + "hidden" + ], + "summary": "Run project-scoped timeseries query", + "description": "Queries are written in OxQL. Project must be specified by name or ID in URL query parameter. The OxQL query will only return timeseries data from the specified project.", + "operationId": "timeseries_query", + "parameters": [ { "in": "query", - "name": "vpc", - "description": "Name or ID of the VPC, only required if `router` is provided as a `Name`", + "name": "project", + "description": "Name or ID of the project", + "required": true, "schema": { "$ref": "#/components/schemas/NameOrId" } } ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/TimeseriesQuery" + } + } + }, + "required": true + }, "responses": { - "204": { - "description": "successful deletion" + "200": { + "description": "successful operation", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/OxqlQueryResult" + } + } + } }, "4XX": { "$ref": "#/components/responses/Error" @@ -11161,14 +11097,23 @@ } } }, - "/v1/vpc-routers": { + "/v1/users": { "get": { "tags": [ - "vpcs" + "silos" ], - "summary": "List routers", - "operationId": "vpc_router_list", + "summary": "List users", + "operationId": "user_list", "parameters": [ + { + "in": "query", + "name": "group", + "schema": { + "nullable": true, + "type": "string", + "format": "uuid" + } + }, { "in": "query", "name": "limit", @@ -11189,27 +11134,11 @@ "type": "string" } }, - { - "in": "query", - "name": "project", - "description": "Name or ID of the project, only required if `vpc` is provided as a `Name`", - "schema": { - "$ref": "#/components/schemas/NameOrId" - } - }, { "in": "query", "name": "sort_by", "schema": { - "$ref": "#/components/schemas/NameOrIdSortMode" - } - }, - { - "in": "query", - "name": "vpc", - "description": "Name or ID of the VPC", - "schema": { - "$ref": "#/components/schemas/NameOrId" + "$ref": "#/components/schemas/IdSortMode" } } ], @@ -11219,7 +11148,7 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/VpcRouterResultsPage" + "$ref": "#/components/schemas/UserResultsPage" } } } @@ -11232,53 +11161,24 @@ } }, "x-dropshot-pagination": { - "required": [ - "vpc" - ] + "required": [] } - }, - "post": { + } + }, + "/v1/utilization": { + "get": { "tags": [ - "vpcs" - ], - "summary": "Create VPC router", - "operationId": "vpc_router_create", - "parameters": [ - { - "in": "query", - "name": "project", - "description": "Name or ID of the project, only required if `vpc` is provided as a `Name`", - "schema": { - "$ref": "#/components/schemas/NameOrId" - } - }, - { - "in": "query", - "name": "vpc", - "description": "Name or ID of the VPC", - "required": true, - "schema": { - "$ref": "#/components/schemas/NameOrId" - } - } + "silos" ], - "requestBody": { - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/VpcRouterCreate" - } - } - }, - "required": true - }, + "summary": "Fetch resource utilization for user's current silo", + "operationId": "utilization_view", "responses": { - "201": { - "description": "successful creation", + "200": { + "description": "successful operation", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/VpcRouter" + "$ref": "#/components/schemas/Utilization" } } } @@ -11292,23 +11192,14 @@ } } }, - "/v1/vpc-routers/{router}": { + "/v1/vpc-firewall-rules": { "get": { "tags": [ "vpcs" ], - "summary": "Fetch router", - "operationId": "vpc_router_view", + "summary": "List firewall rules", + "operationId": "vpc_firewall_rules_view", "parameters": [ - { - "in": "path", - "name": "router", - "description": "Name or ID of the router", - "required": true, - "schema": { - "$ref": "#/components/schemas/NameOrId" - } - }, { "in": "query", "name": "project", @@ -11321,6 +11212,7 @@ "in": "query", "name": "vpc", "description": "Name or ID of the VPC", + "required": true, "schema": { "$ref": "#/components/schemas/NameOrId" } @@ -11332,7 +11224,7 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/VpcRouter" + "$ref": "#/components/schemas/VpcFirewallRules" } } } @@ -11349,18 +11241,10 @@ "tags": [ "vpcs" ], - "summary": "Update router", - "operationId": "vpc_router_update", + "summary": "Replace firewall rules", + "description": "The maximum number of rules per VPC is 1024.\n\nTargets are used to specify the set of instances to which a firewall rule applies. You can target instances directly by name, or specify a VPC, VPC subnet, IP, or IP subnet, which will apply the rule to traffic going to all matching instances. Targets are additive: the rule applies to instances matching ANY target. The maximum number of targets is 256.\n\nFilters reduce the scope of a firewall rule. Without filters, the rule applies to all packets to the targets (or from the targets, if it's an outbound rule). With multiple filters, the rule applies only to packets matching ALL filters. The maximum number of each type of filter is 256.", + "operationId": "vpc_firewall_rules_update", "parameters": [ - { - "in": "path", - "name": "router", - "description": "Name or ID of the router", - "required": true, - "schema": { - "$ref": "#/components/schemas/NameOrId" - } - }, { "in": "query", "name": "project", @@ -11373,6 +11257,7 @@ "in": "query", "name": "vpc", "description": "Name or ID of the VPC", + "required": true, "schema": { "$ref": "#/components/schemas/NameOrId" } @@ -11382,7 +11267,7 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/VpcRouterUpdate" + "$ref": "#/components/schemas/VpcFirewallRuleUpdateParams" } } }, @@ -11394,7 +11279,7 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/VpcRouter" + "$ref": "#/components/schemas/VpcFirewallRules" } } } @@ -11406,60 +11291,16 @@ "$ref": "#/components/responses/Error" } } - }, - "delete": { + } + }, + "/v1/vpc-router-routes": { + "get": { "tags": [ "vpcs" ], - "summary": "Delete router", - "operationId": "vpc_router_delete", - "parameters": [ - { - "in": "path", - "name": "router", - "description": "Name or ID of the router", - "required": true, - "schema": { - "$ref": "#/components/schemas/NameOrId" - } - }, - { - "in": "query", - "name": "project", - "description": "Name or ID of the project, only required if `vpc` is provided as a `Name`", - "schema": { - "$ref": "#/components/schemas/NameOrId" - } - }, - { - "in": "query", - "name": "vpc", - "description": "Name or ID of the VPC", - "schema": { - "$ref": "#/components/schemas/NameOrId" - } - } - ], - "responses": { - "204": { - "description": "successful deletion" - }, - "4XX": { - "$ref": "#/components/responses/Error" - }, - "5XX": { - "$ref": "#/components/responses/Error" - } - } - } - }, - "/v1/vpc-subnets": { - "get": { - "tags": [ - "vpcs" - ], - "summary": "List subnets", - "operationId": "vpc_subnet_list", + "summary": "List routes", + "description": "List the routes associated with a router in a particular VPC.", + "operationId": "vpc_router_route_list", "parameters": [ { "in": "query", @@ -11489,6 +11330,14 @@ "$ref": "#/components/schemas/NameOrId" } }, + { + "in": "query", + "name": "router", + "description": "Name or ID of the router", + "schema": { + "$ref": "#/components/schemas/NameOrId" + } + }, { "in": "query", "name": "sort_by", @@ -11499,7 +11348,7 @@ { "in": "query", "name": "vpc", - "description": "Name or ID of the VPC", + "description": "Name or ID of the VPC, only required if `router` is provided as a `Name`", "schema": { "$ref": "#/components/schemas/NameOrId" } @@ -11511,7 +11360,7 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/VpcSubnetResultsPage" + "$ref": "#/components/schemas/RouterRouteResultsPage" } } } @@ -11525,7 +11374,7 @@ }, "x-dropshot-pagination": { "required": [ - "vpc" + "router" ] } }, @@ -11533,8 +11382,8 @@ "tags": [ "vpcs" ], - "summary": "Create subnet", - "operationId": "vpc_subnet_create", + "summary": "Create route", + "operationId": "vpc_router_route_create", "parameters": [ { "in": "query", @@ -11546,19 +11395,27 @@ }, { "in": "query", - "name": "vpc", - "description": "Name or ID of the VPC", + "name": "router", + "description": "Name or ID of the router", "required": true, "schema": { "$ref": "#/components/schemas/NameOrId" } + }, + { + "in": "query", + "name": "vpc", + "description": "Name or ID of the VPC, only required if `router` is provided as a `Name`", + "schema": { + "$ref": "#/components/schemas/NameOrId" + } } ], "requestBody": { "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/VpcSubnetCreate" + "$ref": "#/components/schemas/RouterRouteCreate" } } }, @@ -11570,7 +11427,7 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/VpcSubnet" + "$ref": "#/components/schemas/RouterRoute" } } } @@ -11584,18 +11441,18 @@ } } }, - "/v1/vpc-subnets/{subnet}": { + "/v1/vpc-router-routes/{route}": { "get": { "tags": [ "vpcs" ], - "summary": "Fetch subnet", - "operationId": "vpc_subnet_view", + "summary": "Fetch route", + "operationId": "vpc_router_route_view", "parameters": [ { "in": "path", - "name": "subnet", - "description": "Name or ID of the subnet", + "name": "route", + "description": "Name or ID of the route", "required": true, "schema": { "$ref": "#/components/schemas/NameOrId" @@ -11609,10 +11466,18 @@ "$ref": "#/components/schemas/NameOrId" } }, + { + "in": "query", + "name": "router", + "description": "Name or ID of the router", + "schema": { + "$ref": "#/components/schemas/NameOrId" + } + }, { "in": "query", "name": "vpc", - "description": "Name or ID of the VPC", + "description": "Name or ID of the VPC, only required if `router` is provided as a `Name`", "schema": { "$ref": "#/components/schemas/NameOrId" } @@ -11624,7 +11489,7 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/VpcSubnet" + "$ref": "#/components/schemas/RouterRoute" } } } @@ -11641,13 +11506,13 @@ "tags": [ "vpcs" ], - "summary": "Update subnet", - "operationId": "vpc_subnet_update", + "summary": "Update route", + "operationId": "vpc_router_route_update", "parameters": [ { "in": "path", - "name": "subnet", - "description": "Name or ID of the subnet", + "name": "route", + "description": "Name or ID of the route", "required": true, "schema": { "$ref": "#/components/schemas/NameOrId" @@ -11661,10 +11526,18 @@ "$ref": "#/components/schemas/NameOrId" } }, + { + "in": "query", + "name": "router", + "description": "Name or ID of the router", + "schema": { + "$ref": "#/components/schemas/NameOrId" + } + }, { "in": "query", "name": "vpc", - "description": "Name or ID of the VPC", + "description": "Name or ID of the VPC, only required if `router` is provided as a `Name`", "schema": { "$ref": "#/components/schemas/NameOrId" } @@ -11674,7 +11547,7 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/VpcSubnetUpdate" + "$ref": "#/components/schemas/RouterRouteUpdate" } } }, @@ -11686,7 +11559,7 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/VpcSubnet" + "$ref": "#/components/schemas/RouterRoute" } } } @@ -11703,13 +11576,13 @@ "tags": [ "vpcs" ], - "summary": "Delete subnet", - "operationId": "vpc_subnet_delete", + "summary": "Delete route", + "operationId": "vpc_router_route_delete", "parameters": [ { "in": "path", - "name": "subnet", - "description": "Name or ID of the subnet", + "name": "route", + "description": "Name or ID of the route", "required": true, "schema": { "$ref": "#/components/schemas/NameOrId" @@ -11723,10 +11596,18 @@ "$ref": "#/components/schemas/NameOrId" } }, + { + "in": "query", + "name": "router", + "description": "Name or ID of the router", + "schema": { + "$ref": "#/components/schemas/NameOrId" + } + }, { "in": "query", "name": "vpc", - "description": "Name or ID of the VPC", + "description": "Name or ID of the VPC, only required if `router` is provided as a `Name`", "schema": { "$ref": "#/components/schemas/NameOrId" } @@ -11745,23 +11626,14 @@ } } }, - "/v1/vpc-subnets/{subnet}/network-interfaces": { + "/v1/vpc-routers": { "get": { "tags": [ "vpcs" ], - "summary": "List network interfaces", - "operationId": "vpc_subnet_list_network_interfaces", + "summary": "List routers", + "operationId": "vpc_router_list", "parameters": [ - { - "in": "path", - "name": "subnet", - "description": "Name or ID of the subnet", - "required": true, - "schema": { - "$ref": "#/components/schemas/NameOrId" - } - }, { "in": "query", "name": "limit", @@ -11812,7 +11684,7 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/InstanceNetworkInterfaceResultsPage" + "$ref": "#/components/schemas/VpcRouterResultsPage" } } } @@ -11825,89 +11697,30 @@ } }, "x-dropshot-pagination": { - "required": [] + "required": [ + "vpc" + ] } - } - }, - "/v1/vpcs": { - "get": { + }, + "post": { "tags": [ "vpcs" ], - "summary": "List VPCs", - "operationId": "vpc_list", + "summary": "Create VPC router", + "operationId": "vpc_router_create", "parameters": [ - { - "in": "query", - "name": "limit", - "description": "Maximum number of items returned by a single call", - "schema": { - "nullable": true, - "type": "integer", - "format": "uint32", - "minimum": 1 - } - }, - { - "in": "query", - "name": "page_token", - "description": "Token returned by previous call to retrieve the subsequent page", - "schema": { - "nullable": true, - "type": "string" - } - }, { "in": "query", "name": "project", - "description": "Name or ID of the project", + "description": "Name or ID of the project, only required if `vpc` is provided as a `Name`", "schema": { "$ref": "#/components/schemas/NameOrId" } }, { "in": "query", - "name": "sort_by", - "schema": { - "$ref": "#/components/schemas/NameOrIdSortMode" - } - } - ], - "responses": { - "200": { - "description": "successful operation", - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/VpcResultsPage" - } - } - } - }, - "4XX": { - "$ref": "#/components/responses/Error" - }, - "5XX": { - "$ref": "#/components/responses/Error" - } - }, - "x-dropshot-pagination": { - "required": [ - "project" - ] - } - }, - "post": { - "tags": [ - "vpcs" - ], - "summary": "Create VPC", - "operationId": "vpc_create", - "parameters": [ - { - "in": "query", - "name": "project", - "description": "Name or ID of the project", + "name": "vpc", + "description": "Name or ID of the VPC", "required": true, "schema": { "$ref": "#/components/schemas/NameOrId" @@ -11918,7 +11731,7 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/VpcCreate" + "$ref": "#/components/schemas/VpcRouterCreate" } } }, @@ -11930,7 +11743,7 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/Vpc" + "$ref": "#/components/schemas/VpcRouter" } } } @@ -11944,18 +11757,18 @@ } } }, - "/v1/vpcs/{vpc}": { + "/v1/vpc-routers/{router}": { "get": { "tags": [ "vpcs" ], - "summary": "Fetch VPC", - "operationId": "vpc_view", + "summary": "Fetch router", + "operationId": "vpc_router_view", "parameters": [ { "in": "path", - "name": "vpc", - "description": "Name or ID of the VPC", + "name": "router", + "description": "Name or ID of the router", "required": true, "schema": { "$ref": "#/components/schemas/NameOrId" @@ -11964,19 +11777,27 @@ { "in": "query", "name": "project", - "description": "Name or ID of the project", + "description": "Name or ID of the project, only required if `vpc` is provided as a `Name`", "schema": { "$ref": "#/components/schemas/NameOrId" } - } - ], - "responses": { - "200": { + }, + { + "in": "query", + "name": "vpc", + "description": "Name or ID of the VPC", + "schema": { + "$ref": "#/components/schemas/NameOrId" + } + } + ], + "responses": { + "200": { "description": "successful operation", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/Vpc" + "$ref": "#/components/schemas/VpcRouter" } } } @@ -11993,13 +11814,13 @@ "tags": [ "vpcs" ], - "summary": "Update a VPC", - "operationId": "vpc_update", + "summary": "Update router", + "operationId": "vpc_router_update", "parameters": [ { "in": "path", - "name": "vpc", - "description": "Name or ID of the VPC", + "name": "router", + "description": "Name or ID of the router", "required": true, "schema": { "$ref": "#/components/schemas/NameOrId" @@ -12008,7 +11829,15 @@ { "in": "query", "name": "project", - "description": "Name or ID of the project", + "description": "Name or ID of the project, only required if `vpc` is provided as a `Name`", + "schema": { + "$ref": "#/components/schemas/NameOrId" + } + }, + { + "in": "query", + "name": "vpc", + "description": "Name or ID of the VPC", "schema": { "$ref": "#/components/schemas/NameOrId" } @@ -12018,7 +11847,7 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/VpcUpdate" + "$ref": "#/components/schemas/VpcRouterUpdate" } } }, @@ -12030,7 +11859,7 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/Vpc" + "$ref": "#/components/schemas/VpcRouter" } } } @@ -12047,13 +11876,13 @@ "tags": [ "vpcs" ], - "summary": "Delete VPC", - "operationId": "vpc_delete", + "summary": "Delete router", + "operationId": "vpc_router_delete", "parameters": [ { "in": "path", - "name": "vpc", - "description": "Name or ID of the VPC", + "name": "router", + "description": "Name or ID of the router", "required": true, "schema": { "$ref": "#/components/schemas/NameOrId" @@ -12062,7 +11891,15 @@ { "in": "query", "name": "project", - "description": "Name or ID of the project", + "description": "Name or ID of the project, only required if `vpc` is provided as a `Name`", + "schema": { + "$ref": "#/components/schemas/NameOrId" + } + }, + { + "in": "query", + "name": "vpc", + "description": "Name or ID of the VPC", "schema": { "$ref": "#/components/schemas/NameOrId" } @@ -12081,76 +11918,55 @@ } } }, - "/v1/webhooks/deliveries": { + "/v1/vpc-subnets": { "get": { "tags": [ - "system/webhooks" + "vpcs" ], - "summary": "List delivery attempts to webhook receiver", - "description": "Optional query parameters to this endpoint may be used to filter deliveries by state. If none of the `failed`, `pending` or `delivered` query parameters are present, all deliveries are returned. If one or more of these parameters are provided, only those which are set to \"true\" are included in the response.", - "operationId": "webhook_delivery_list", + "summary": "List subnets", + "operationId": "vpc_subnet_list", "parameters": [ { "in": "query", - "name": "receiver", - "description": "The name or ID of the webhook receiver.", - "required": true, - "schema": { - "$ref": "#/components/schemas/NameOrId" - } - }, - { - "in": "query", - "name": "delivered", - "description": "If true, include deliveries which have succeeded.\n\nIf any of the \"pending\", \"failed\", or \"delivered\" query parameters are set to true, only deliveries matching those state(s) will be included in the response. If NO state filter parameters are set, then all deliveries are included.", - "schema": { - "nullable": true, - "type": "boolean" - } - }, - { - "in": "query", - "name": "failed", - "description": "If true, include deliveries which have failed permanently.\n\nIf any of the \"pending\", \"failed\", or \"delivered\" query parameters are set to true, only deliveries matching those state(s) will be included in the response. If NO state filter parameters are set, then all deliveries are included.\n\nA delivery fails permanently when the retry limit of three total attempts is reached without a successful delivery.", + "name": "limit", + "description": "Maximum number of items returned by a single call", "schema": { "nullable": true, - "type": "boolean" + "type": "integer", + "format": "uint32", + "minimum": 1 } }, { "in": "query", - "name": "pending", - "description": "If true, include deliveries which are currently in progress.\n\nIf any of the \"pending\", \"failed\", or \"delivered\" query parameters are set to true, only deliveries matching those state(s) will be included in the response. If NO state filter parameters are set, then all deliveries are included.\n\nA delivery is considered \"pending\" if it has not yet been sent at all, or if a delivery attempt has failed but the delivery has retries remaining.", + "name": "page_token", + "description": "Token returned by previous call to retrieve the subsequent page", "schema": { "nullable": true, - "type": "boolean" + "type": "string" } }, { "in": "query", - "name": "limit", - "description": "Maximum number of items returned by a single call", + "name": "project", + "description": "Name or ID of the project, only required if `vpc` is provided as a `Name`", "schema": { - "nullable": true, - "type": "integer", - "format": "uint32", - "minimum": 1 + "$ref": "#/components/schemas/NameOrId" } }, { "in": "query", - "name": "page_token", - "description": "Token returned by previous call to retrieve the subsequent page", + "name": "sort_by", "schema": { - "nullable": true, - "type": "string" + "$ref": "#/components/schemas/NameOrIdSortMode" } }, { "in": "query", - "name": "sort_by", + "name": "vpc", + "description": "Name or ID of the VPC", "schema": { - "$ref": "#/components/schemas/TimeAndIdSortMode" + "$ref": "#/components/schemas/NameOrId" } } ], @@ -12160,7 +11976,7 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/WebhookDeliveryResultsPage" + "$ref": "#/components/schemas/VpcSubnetResultsPage" } } } @@ -12173,45 +11989,53 @@ } }, "x-dropshot-pagination": { - "required": [] + "required": [ + "vpc" + ] } - } - }, - "/v1/webhooks/deliveries/{event_id}/resend": { + }, "post": { "tags": [ - "system/webhooks" + "vpcs" ], - "summary": "Request re-delivery of webhook event", - "operationId": "webhook_delivery_resend", + "summary": "Create subnet", + "operationId": "vpc_subnet_create", "parameters": [ { - "in": "path", - "name": "event_id", - "description": "UUID of the event", - "required": true, + "in": "query", + "name": "project", + "description": "Name or ID of the project, only required if `vpc` is provided as a `Name`", "schema": { - "type": "string", - "format": "uuid" + "$ref": "#/components/schemas/NameOrId" } }, { "in": "query", - "name": "receiver", - "description": "The name or ID of the webhook receiver.", + "name": "vpc", + "description": "Name or ID of the VPC", "required": true, "schema": { "$ref": "#/components/schemas/NameOrId" } } ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/VpcSubnetCreate" + } + } + }, + "required": true + }, "responses": { "201": { "description": "successful creation", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/WebhookDeliveryId" + "$ref": "#/components/schemas/VpcSubnet" } } } @@ -12225,40 +12049,37 @@ } } }, - "/v1/webhooks/event-classes": { + "/v1/vpc-subnets/{subnet}": { "get": { "tags": [ - "system/webhooks" + "vpcs" ], - "summary": "List webhook event classes", - "operationId": "webhook_event_class_list", + "summary": "Fetch subnet", + "operationId": "vpc_subnet_view", "parameters": [ { - "in": "query", - "name": "limit", - "description": "Maximum number of items returned by a single call", + "in": "path", + "name": "subnet", + "description": "Name or ID of the subnet", + "required": true, "schema": { - "nullable": true, - "type": "integer", - "format": "uint32", - "minimum": 1 + "$ref": "#/components/schemas/NameOrId" } }, { "in": "query", - "name": "page_token", - "description": "Token returned by previous call to retrieve the subsequent page", + "name": "project", + "description": "Name or ID of the project, only required if `vpc` is provided as a `Name`", "schema": { - "nullable": true, - "type": "string" + "$ref": "#/components/schemas/NameOrId" } }, { "in": "query", - "name": "filter", - "description": "An optional glob pattern for filtering event class names.\n\nIf provided, only event classes which match this glob pattern will be included in the response.", + "name": "vpc", + "description": "Name or ID of the VPC", "schema": { - "$ref": "#/components/schemas/WebhookSubscription" + "$ref": "#/components/schemas/NameOrId" } } ], @@ -12268,7 +12089,7 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/EventClassResultsPage" + "$ref": "#/components/schemas/VpcSubnet" } } } @@ -12279,93 +12100,58 @@ "5XX": { "$ref": "#/components/responses/Error" } - }, - "x-dropshot-pagination": { - "required": [] } - } - }, - "/v1/webhooks/receivers": { - "get": { + }, + "put": { "tags": [ - "system/webhooks" + "vpcs" ], - "summary": "List webhook receivers", - "operationId": "webhook_receiver_list", + "summary": "Update subnet", + "operationId": "vpc_subnet_update", "parameters": [ { - "in": "query", - "name": "limit", - "description": "Maximum number of items returned by a single call", + "in": "path", + "name": "subnet", + "description": "Name or ID of the subnet", + "required": true, "schema": { - "nullable": true, - "type": "integer", - "format": "uint32", - "minimum": 1 + "$ref": "#/components/schemas/NameOrId" } }, { "in": "query", - "name": "page_token", - "description": "Token returned by previous call to retrieve the subsequent page", + "name": "project", + "description": "Name or ID of the project, only required if `vpc` is provided as a `Name`", "schema": { - "nullable": true, - "type": "string" + "$ref": "#/components/schemas/NameOrId" } }, { "in": "query", - "name": "sort_by", + "name": "vpc", + "description": "Name or ID of the VPC", "schema": { - "$ref": "#/components/schemas/NameOrIdSortMode" - } - } - ], - "responses": { - "200": { - "description": "successful operation", - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/WebhookReceiverResultsPage" - } - } + "$ref": "#/components/schemas/NameOrId" } - }, - "4XX": { - "$ref": "#/components/responses/Error" - }, - "5XX": { - "$ref": "#/components/responses/Error" } - }, - "x-dropshot-pagination": { - "required": [] - } - }, - "post": { - "tags": [ - "system/webhooks" ], - "summary": "Create webhook receiver", - "operationId": "webhook_receiver_create", "requestBody": { "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/WebhookCreate" + "$ref": "#/components/schemas/VpcSubnetUpdate" } } }, "required": true }, "responses": { - "201": { - "description": "successful creation", + "200": { + "description": "successful operation", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/WebhookReceiver" + "$ref": "#/components/schemas/VpcSubnet" } } } @@ -12377,76 +12163,43 @@ "$ref": "#/components/responses/Error" } } - } - }, - "/v1/webhooks/receivers/{receiver}": { - "get": { + }, + "delete": { "tags": [ - "system/webhooks" + "vpcs" ], - "summary": "Fetch webhook receiver", - "operationId": "webhook_receiver_view", + "summary": "Delete subnet", + "operationId": "vpc_subnet_delete", "parameters": [ { "in": "path", - "name": "receiver", - "description": "The name or ID of the webhook receiver.", + "name": "subnet", + "description": "Name or ID of the subnet", "required": true, "schema": { "$ref": "#/components/schemas/NameOrId" } - } - ], - "responses": { - "200": { - "description": "successful operation", - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/WebhookReceiver" - } - } - } }, - "4XX": { - "$ref": "#/components/responses/Error" + { + "in": "query", + "name": "project", + "description": "Name or ID of the project, only required if `vpc` is provided as a `Name`", + "schema": { + "$ref": "#/components/schemas/NameOrId" + } }, - "5XX": { - "$ref": "#/components/responses/Error" - } - } - }, - "put": { - "tags": [ - "system/webhooks" - ], - "summary": "Update webhook receiver", - "description": "Note that receiver secrets are NOT added or removed using this endpoint. Instead, use the `/v1/webhooks/{secrets}/?receiver={receiver}` endpoint to add and remove secrets.", - "operationId": "webhook_receiver_update", - "parameters": [ { - "in": "path", - "name": "receiver", - "description": "The name or ID of the webhook receiver.", - "required": true, + "in": "query", + "name": "vpc", + "description": "Name or ID of the VPC", "schema": { "$ref": "#/components/schemas/NameOrId" } } ], - "requestBody": { - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/WebhookReceiverUpdate" - } - } - }, - "required": true - }, "responses": { "204": { - "description": "resource updated" + "description": "successful deletion" }, "4XX": { "$ref": "#/components/responses/Error" @@ -12455,27 +12208,79 @@ "$ref": "#/components/responses/Error" } } - }, - "delete": { + } + }, + "/v1/vpc-subnets/{subnet}/network-interfaces": { + "get": { "tags": [ - "system/webhooks" + "vpcs" ], - "summary": "Delete webhook receiver", - "operationId": "webhook_receiver_delete", + "summary": "List network interfaces", + "operationId": "vpc_subnet_list_network_interfaces", "parameters": [ { "in": "path", - "name": "receiver", - "description": "The name or ID of the webhook receiver.", + "name": "subnet", + "description": "Name or ID of the subnet", "required": true, "schema": { "$ref": "#/components/schemas/NameOrId" } + }, + { + "in": "query", + "name": "limit", + "description": "Maximum number of items returned by a single call", + "schema": { + "nullable": true, + "type": "integer", + "format": "uint32", + "minimum": 1 + } + }, + { + "in": "query", + "name": "page_token", + "description": "Token returned by previous call to retrieve the subsequent page", + "schema": { + "nullable": true, + "type": "string" + } + }, + { + "in": "query", + "name": "project", + "description": "Name or ID of the project, only required if `vpc` is provided as a `Name`", + "schema": { + "$ref": "#/components/schemas/NameOrId" + } + }, + { + "in": "query", + "name": "sort_by", + "schema": { + "$ref": "#/components/schemas/NameOrIdSortMode" + } + }, + { + "in": "query", + "name": "vpc", + "description": "Name or ID of the VPC", + "schema": { + "$ref": "#/components/schemas/NameOrId" + } } ], "responses": { - "204": { - "description": "successful deletion" + "200": { + "description": "successful operation", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/InstanceNetworkInterfaceResultsPage" + } + } + } }, "4XX": { "$ref": "#/components/responses/Error" @@ -12483,33 +12288,53 @@ "5XX": { "$ref": "#/components/responses/Error" } + }, + "x-dropshot-pagination": { + "required": [] } } }, - "/v1/webhooks/receivers/{receiver}/probe": { - "post": { + "/v1/vpcs": { + "get": { "tags": [ - "system/webhooks" + "vpcs" ], - "summary": "Send liveness probe to webhook receiver", - "description": "This endpoint synchronously sends a liveness probe request to the selected webhook receiver. The response message describes the outcome of the probe request: either the response from the receiver endpoint, or an indication of why the probe failed.\n\nNote that the response status is `200 OK` as long as a probe request was able to be sent to the receiver endpoint. If the receiver responds with another status code, including an error, this will be indicated by the response body, *not* the status of the response.\n\nThe `resend` query parameter can be used to request re-delivery of failed events if the liveness probe succeeds. If it is set to true and the webhook receiver responds to the probe request with a `2xx` status code, any events for which delivery to this receiver has failed will be queued for re-delivery.", - "operationId": "webhook_receiver_probe", + "summary": "List VPCs", + "operationId": "vpc_list", "parameters": [ { - "in": "path", - "name": "receiver", - "description": "The name or ID of the webhook receiver.", - "required": true, + "in": "query", + "name": "limit", + "description": "Maximum number of items returned by a single call", + "schema": { + "nullable": true, + "type": "integer", + "format": "uint32", + "minimum": 1 + } + }, + { + "in": "query", + "name": "page_token", + "description": "Token returned by previous call to retrieve the subsequent page", + "schema": { + "nullable": true, + "type": "string" + } + }, + { + "in": "query", + "name": "project", + "description": "Name or ID of the project", "schema": { "$ref": "#/components/schemas/NameOrId" } }, { "in": "query", - "name": "resend", - "description": "If true, resend all events that have not been delivered successfully if the probe request succeeds.", + "name": "sort_by", "schema": { - "type": "boolean" + "$ref": "#/components/schemas/NameOrIdSortMode" } } ], @@ -12519,7 +12344,7 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/WebhookProbeResult" + "$ref": "#/components/schemas/VpcResultsPage" } } } @@ -12530,21 +12355,24 @@ "5XX": { "$ref": "#/components/responses/Error" } + }, + "x-dropshot-pagination": { + "required": [ + "project" + ] } - } - }, - "/v1/webhooks/receivers/{receiver}/subscriptions": { + }, "post": { "tags": [ - "system/webhooks" + "vpcs" ], - "summary": "Add webhook receiver subscription", - "operationId": "webhook_receiver_subscription_add", + "summary": "Create VPC", + "operationId": "vpc_create", "parameters": [ { - "in": "path", - "name": "receiver", - "description": "The name or ID of the webhook receiver.", + "in": "query", + "name": "project", + "description": "Name or ID of the project", "required": true, "schema": { "$ref": "#/components/schemas/NameOrId" @@ -12555,7 +12383,7 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/WebhookSubscriptionCreate" + "$ref": "#/components/schemas/VpcCreate" } } }, @@ -12567,7 +12395,7 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/WebhookSubscriptionCreated" + "$ref": "#/components/schemas/Vpc" } } } @@ -12581,59 +12409,27 @@ } } }, - "/v1/webhooks/receivers/{receiver}/subscriptions/{subscription}": { - "delete": { + "/v1/vpcs/{vpc}": { + "get": { "tags": [ - "system/webhooks" + "vpcs" ], - "summary": "Remove webhook receiver subscription", - "operationId": "webhook_receiver_subscription_remove", + "summary": "Fetch VPC", + "operationId": "vpc_view", "parameters": [ { "in": "path", - "name": "receiver", - "description": "The name or ID of the webhook receiver.", + "name": "vpc", + "description": "Name or ID of the VPC", "required": true, "schema": { "$ref": "#/components/schemas/NameOrId" } }, - { - "in": "path", - "name": "subscription", - "description": "The event class subscription itself.", - "required": true, - "schema": { - "$ref": "#/components/schemas/WebhookSubscription" - } - } - ], - "responses": { - "204": { - "description": "successful deletion" - }, - "4XX": { - "$ref": "#/components/responses/Error" - }, - "5XX": { - "$ref": "#/components/responses/Error" - } - } - } - }, - "/v1/webhooks/secrets": { - "get": { - "tags": [ - "system/webhooks" - ], - "summary": "List webhook receiver secret IDs", - "operationId": "webhook_secrets_list", - "parameters": [ { "in": "query", - "name": "receiver", - "description": "The name or ID of the webhook receiver.", - "required": true, + "name": "project", + "description": "Name or ID of the project", "schema": { "$ref": "#/components/schemas/NameOrId" } @@ -12645,7 +12441,7 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/WebhookSecrets" + "$ref": "#/components/schemas/Vpc" } } } @@ -12658,40 +12454,48 @@ } } }, - "post": { + "put": { "tags": [ - "system/webhooks" + "vpcs" ], - "summary": "Add secret to webhook receiver", - "operationId": "webhook_secrets_add", + "summary": "Update a VPC", + "operationId": "vpc_update", "parameters": [ { - "in": "query", - "name": "receiver", - "description": "The name or ID of the webhook receiver.", + "in": "path", + "name": "vpc", + "description": "Name or ID of the VPC", "required": true, "schema": { "$ref": "#/components/schemas/NameOrId" } + }, + { + "in": "query", + "name": "project", + "description": "Name or ID of the project", + "schema": { + "$ref": "#/components/schemas/NameOrId" + } } ], "requestBody": { "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/WebhookSecretCreate" + "$ref": "#/components/schemas/VpcUpdate" } } }, "required": true }, "responses": { - "201": { - "description": "successful creation", + "200": { + "description": "successful operation", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/WebhookSecret" + "$ref": "#/components/schemas/Vpc" } } } @@ -12703,12 +12507,212 @@ "$ref": "#/components/responses/Error" } } - } - }, - "/v1/webhooks/secrets/{secret_id}": { + }, "delete": { "tags": [ - "system/webhooks" + "vpcs" + ], + "summary": "Delete VPC", + "operationId": "vpc_delete", + "parameters": [ + { + "in": "path", + "name": "vpc", + "description": "Name or ID of the VPC", + "required": true, + "schema": { + "$ref": "#/components/schemas/NameOrId" + } + }, + { + "in": "query", + "name": "project", + "description": "Name or ID of the project", + "schema": { + "$ref": "#/components/schemas/NameOrId" + } + } + ], + "responses": { + "204": { + "description": "successful deletion" + }, + "4XX": { + "$ref": "#/components/responses/Error" + }, + "5XX": { + "$ref": "#/components/responses/Error" + } + } + } + }, + "/v1/webhook-receivers": { + "post": { + "tags": [ + "system/alerts" + ], + "summary": "Create webhook receiver", + "operationId": "webhook_receiver_create", + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/WebhookCreate" + } + } + }, + "required": true + }, + "responses": { + "201": { + "description": "successful creation", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/WebhookReceiver" + } + } + } + }, + "4XX": { + "$ref": "#/components/responses/Error" + }, + "5XX": { + "$ref": "#/components/responses/Error" + } + } + } + }, + "/v1/webhook-receivers/{receiver}": { + "put": { + "tags": [ + "system/alerts" + ], + "summary": "Update webhook receiver", + "description": "Note that receiver secrets are NOT added or removed using this endpoint. Instead, use the `/v1/webhooks/{secrets}/?receiver={receiver}` endpoint to add and remove secrets.", + "operationId": "webhook_receiver_update", + "parameters": [ + { + "in": "path", + "name": "receiver", + "description": "The name or ID of the webhook receiver.", + "required": true, + "schema": { + "$ref": "#/components/schemas/NameOrId" + } + } + ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/WebhookReceiverUpdate" + } + } + }, + "required": true + }, + "responses": { + "204": { + "description": "resource updated" + }, + "4XX": { + "$ref": "#/components/responses/Error" + }, + "5XX": { + "$ref": "#/components/responses/Error" + } + } + } + }, + "/v1/webhook-secrets": { + "get": { + "tags": [ + "system/alerts" + ], + "summary": "List webhook receiver secret IDs", + "operationId": "webhook_secrets_list", + "parameters": [ + { + "in": "query", + "name": "receiver", + "description": "The name or ID of the webhook receiver.", + "required": true, + "schema": { + "$ref": "#/components/schemas/NameOrId" + } + } + ], + "responses": { + "200": { + "description": "successful operation", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/WebhookSecrets" + } + } + } + }, + "4XX": { + "$ref": "#/components/responses/Error" + }, + "5XX": { + "$ref": "#/components/responses/Error" + } + } + }, + "post": { + "tags": [ + "system/alerts" + ], + "summary": "Add secret to webhook receiver", + "operationId": "webhook_secrets_add", + "parameters": [ + { + "in": "query", + "name": "receiver", + "description": "The name or ID of the webhook receiver.", + "required": true, + "schema": { + "$ref": "#/components/schemas/NameOrId" + } + } + ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/WebhookSecretCreate" + } + } + }, + "required": true + }, + "responses": { + "201": { + "description": "successful creation", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/WebhookSecret" + } + } + } + }, + "4XX": { + "$ref": "#/components/responses/Error" + }, + "5XX": { + "$ref": "#/components/responses/Error" + } + } + } + }, + "/v1/webhook-secrets/{secret_id}": { + "delete": { + "tags": [ + "system/alerts" ], "summary": "Remove secret from webhook receiver", "operationId": "webhook_secrets_delete", @@ -12959,66 +12963,522 @@ } }, "required": [ - "blocks", - "lot" + "blocks", + "lot" + ] + }, + "AddressLotKind": { + "description": "The kind associated with an address lot.", + "oneOf": [ + { + "description": "Infrastructure address lots are used for network infrastructure like addresses assigned to rack switches.", + "type": "string", + "enum": [ + "infra" + ] + }, + { + "description": "Pool address lots are used by IP pools.", + "type": "string", + "enum": [ + "pool" + ] + } + ] + }, + "AddressLotResultsPage": { + "description": "A single page of results", + "type": "object", + "properties": { + "items": { + "description": "list of items on this page of results", + "type": "array", + "items": { + "$ref": "#/components/schemas/AddressLot" + } + }, + "next_page": { + "nullable": true, + "description": "token used to fetch the next page of results (if any)", + "type": "string" + } + }, + "required": [ + "items" + ] + }, + "AffinityGroup": { + "description": "View of an Affinity Group", + "type": "object", + "properties": { + "description": { + "description": "human-readable free-form text about a resource", + "type": "string" + }, + "failure_domain": { + "$ref": "#/components/schemas/FailureDomain" + }, + "id": { + "description": "unique, immutable, system-controlled identifier for each resource", + "type": "string", + "format": "uuid" + }, + "name": { + "description": "unique, mutable, user-controlled identifier for each resource", + "allOf": [ + { + "$ref": "#/components/schemas/Name" + } + ] + }, + "policy": { + "$ref": "#/components/schemas/AffinityPolicy" + }, + "project_id": { + "type": "string", + "format": "uuid" + }, + "time_created": { + "description": "timestamp when this resource was created", + "type": "string", + "format": "date-time" + }, + "time_modified": { + "description": "timestamp when this resource was last modified", + "type": "string", + "format": "date-time" + } + }, + "required": [ + "description", + "failure_domain", + "id", + "name", + "policy", + "project_id", + "time_created", + "time_modified" + ] + }, + "AffinityGroupCreate": { + "description": "Create-time parameters for an `AffinityGroup`", + "type": "object", + "properties": { + "description": { + "type": "string" + }, + "failure_domain": { + "$ref": "#/components/schemas/FailureDomain" + }, + "name": { + "$ref": "#/components/schemas/Name" + }, + "policy": { + "$ref": "#/components/schemas/AffinityPolicy" + } + }, + "required": [ + "description", + "failure_domain", + "name", + "policy" + ] + }, + "AffinityGroupMember": { + "description": "A member of an Affinity Group\n\nMembership in a group is not exclusive - members may belong to multiple affinity / anti-affinity groups.\n\nAffinity Groups can contain up to 32 members.", + "oneOf": [ + { + "description": "An instance belonging to this group\n\nInstances can belong to up to 16 affinity groups.", + "type": "object", + "properties": { + "type": { + "type": "string", + "enum": [ + "instance" + ] + }, + "value": { + "type": "object", + "properties": { + "id": { + "$ref": "#/components/schemas/TypedUuidForInstanceKind" + }, + "name": { + "$ref": "#/components/schemas/Name" + }, + "run_state": { + "$ref": "#/components/schemas/InstanceState" + } + }, + "required": [ + "id", + "name", + "run_state" + ] + } + }, + "required": [ + "type", + "value" + ] + } + ] + }, + "AffinityGroupMemberResultsPage": { + "description": "A single page of results", + "type": "object", + "properties": { + "items": { + "description": "list of items on this page of results", + "type": "array", + "items": { + "$ref": "#/components/schemas/AffinityGroupMember" + } + }, + "next_page": { + "nullable": true, + "description": "token used to fetch the next page of results (if any)", + "type": "string" + } + }, + "required": [ + "items" + ] + }, + "AffinityGroupResultsPage": { + "description": "A single page of results", + "type": "object", + "properties": { + "items": { + "description": "list of items on this page of results", + "type": "array", + "items": { + "$ref": "#/components/schemas/AffinityGroup" + } + }, + "next_page": { + "nullable": true, + "description": "token used to fetch the next page of results (if any)", + "type": "string" + } + }, + "required": [ + "items" + ] + }, + "AffinityGroupUpdate": { + "description": "Updateable properties of an `AffinityGroup`", + "type": "object", + "properties": { + "description": { + "nullable": true, + "type": "string" + }, + "name": { + "nullable": true, + "allOf": [ + { + "$ref": "#/components/schemas/Name" + } + ] + } + } + }, + "AffinityPolicy": { + "description": "Affinity policy used to describe \"what to do when a request cannot be satisfied\"\n\nUsed for both Affinity and Anti-Affinity Groups", + "oneOf": [ + { + "description": "If the affinity request cannot be satisfied, allow it anyway.\n\nThis enables a \"best-effort\" attempt to satisfy the affinity policy.", + "type": "string", + "enum": [ + "allow" + ] + }, + { + "description": "If the affinity request cannot be satisfied, fail explicitly.", + "type": "string", + "enum": [ + "fail" + ] + } + ] + }, + "AggregateBgpMessageHistory": { + "description": "BGP message history for rack switches.", + "type": "object", + "properties": { + "switch_histories": { + "description": "BGP history organized by switch.", + "type": "array", + "items": { + "$ref": "#/components/schemas/SwitchBgpHistory" + } + } + }, + "required": [ + "switch_histories" + ] + }, + "AlertClass": { + "description": "An alert class.", + "type": "object", + "properties": { + "description": { + "description": "A description of what this alert class represents.", + "type": "string" + }, + "name": { + "description": "The name of the alert class.", + "type": "string" + } + }, + "required": [ + "description", + "name" + ] + }, + "AlertClassResultsPage": { + "description": "A single page of results", + "type": "object", + "properties": { + "items": { + "description": "list of items on this page of results", + "type": "array", + "items": { + "$ref": "#/components/schemas/AlertClass" + } + }, + "next_page": { + "nullable": true, + "description": "token used to fetch the next page of results (if any)", + "type": "string" + } + }, + "required": [ + "items" + ] + }, + "AlertDelivery": { + "description": "A delivery of a webhook event.", + "type": "object", + "properties": { + "alert_class": { + "description": "The event class.", + "type": "string" + }, + "alert_id": { + "description": "The UUID of the event.", + "allOf": [ + { + "$ref": "#/components/schemas/TypedUuidForAlertKind" + } + ] + }, + "attempts": { + "description": "Individual attempts to deliver this webhook event, and their outcomes.", + "allOf": [ + { + "$ref": "#/components/schemas/AlertDeliveryAttempts" + } + ] + }, + "id": { + "description": "The UUID of this delivery attempt.", + "type": "string", + "format": "uuid" + }, + "receiver_id": { + "description": "The UUID of the alert receiver that this event was delivered to.", + "allOf": [ + { + "$ref": "#/components/schemas/TypedUuidForAlertReceiverKind" + } + ] + }, + "state": { + "description": "The state of this delivery.", + "allOf": [ + { + "$ref": "#/components/schemas/AlertDeliveryState" + } + ] + }, + "time_started": { + "description": "The time at which this delivery began (i.e. the event was dispatched to the receiver).", + "type": "string", + "format": "date-time" + }, + "trigger": { + "description": "Why this delivery was performed.", + "allOf": [ + { + "$ref": "#/components/schemas/AlertDeliveryTrigger" + } + ] + } + }, + "required": [ + "alert_class", + "alert_id", + "attempts", + "id", + "receiver_id", + "state", + "time_started", + "trigger" + ] + }, + "AlertDeliveryAttempts": { + "description": "A list of attempts to deliver an alert to a receiver.\n\nThe type of the delivery attempt model depends on the receiver type, as it may contain information specific to that delivery mechanism. For example, webhook delivery attempts contain the HTTP status code of the webhook request.", + "oneOf": [ + { + "description": "A list of attempts to deliver an alert to a webhook receiver.", + "type": "object", + "properties": { + "webhook": { + "type": "array", + "items": { + "$ref": "#/components/schemas/WebhookDeliveryAttempt" + } + } + }, + "required": [ + "webhook" + ], + "additionalProperties": false + } + ] + }, + "AlertDeliveryId": { + "type": "object", + "properties": { + "delivery_id": { + "type": "string", + "format": "uuid" + } + }, + "required": [ + "delivery_id" + ] + }, + "AlertDeliveryResultsPage": { + "description": "A single page of results", + "type": "object", + "properties": { + "items": { + "description": "list of items on this page of results", + "type": "array", + "items": { + "$ref": "#/components/schemas/AlertDelivery" + } + }, + "next_page": { + "nullable": true, + "description": "token used to fetch the next page of results (if any)", + "type": "string" + } + }, + "required": [ + "items" ] }, - "AddressLotKind": { - "description": "The kind associated with an address lot.", + "AlertDeliveryState": { + "description": "The state of a webhook delivery attempt.", "oneOf": [ { - "description": "Infrastructure address lots are used for network infrastructure like addresses assigned to rack switches.", + "description": "The webhook event has not yet been delivered successfully.\n\nEither no delivery attempts have yet been performed, or the delivery has failed at least once but has retries remaining.", "type": "string", "enum": [ - "infra" + "pending" ] }, { - "description": "Pool address lots are used by IP pools.", + "description": "The webhook event has been delivered successfully.", "type": "string", "enum": [ - "pool" + "delivered" + ] + }, + { + "description": "The webhook delivery attempt has failed permanently and will not be retried again.", + "type": "string", + "enum": [ + "failed" ] } ] }, - "AddressLotResultsPage": { - "description": "A single page of results", + "AlertDeliveryTrigger": { + "description": "The reason an alert was delivered", + "oneOf": [ + { + "description": "Delivery was triggered by the alert itself.", + "type": "string", + "enum": [ + "alert" + ] + }, + { + "description": "Delivery was triggered by a request to resend the alert.", + "type": "string", + "enum": [ + "resend" + ] + }, + { + "description": "This delivery is a liveness probe.", + "type": "string", + "enum": [ + "probe" + ] + } + ] + }, + "AlertProbeResult": { + "description": "Data describing the result of an alert receiver liveness probe attempt.", "type": "object", "properties": { - "items": { - "description": "list of items on this page of results", - "type": "array", - "items": { - "$ref": "#/components/schemas/AddressLot" - } + "probe": { + "description": "The outcome of the probe delivery.", + "allOf": [ + { + "$ref": "#/components/schemas/AlertDelivery" + } + ] }, - "next_page": { + "resends_started": { "nullable": true, - "description": "token used to fetch the next page of results (if any)", - "type": "string" + "description": "If the probe request succeeded, and resending failed deliveries on success was requested, the number of new delivery attempts started. Otherwise, if the probe did not succeed, or resending failed deliveries was not requested, this is null.\n\nNote that this may be 0, if there were no events found which had not been delivered successfully to this receiver.", + "type": "integer", + "format": "uint", + "minimum": 0 } }, "required": [ - "items" + "probe" ] }, - "AffinityGroup": { - "description": "View of an Affinity Group", + "AlertReceiver": { + "description": "The configuration for an alert receiver.", "type": "object", "properties": { "description": { "description": "human-readable free-form text about a resource", "type": "string" }, - "failure_domain": { - "$ref": "#/components/schemas/FailureDomain" - }, "id": { "description": "unique, immutable, system-controlled identifier for each resource", "type": "string", "format": "uuid" }, + "kind": { + "description": "Configuration specific to the kind of alert receiver that this is.", + "allOf": [ + { + "$ref": "#/components/schemas/AlertReceiverKind" + } + ] + }, "name": { "description": "unique, mutable, user-controlled identifier for each resource", "allOf": [ @@ -13027,12 +13487,12 @@ } ] }, - "policy": { - "$ref": "#/components/schemas/AffinityPolicy" - }, - "project_id": { - "type": "string", - "format": "uuid" + "subscriptions": { + "description": "The list of alert classes to which this receiver is subscribed.", + "type": "array", + "items": { + "$ref": "#/components/schemas/AlertSubscription" + } }, "time_created": { "description": "timestamp when this resource was created", @@ -13047,80 +13507,32 @@ }, "required": [ "description", - "failure_domain", "id", + "kind", "name", - "policy", - "project_id", + "subscriptions", "time_created", "time_modified" ] }, - "AffinityGroupCreate": { - "description": "Create-time parameters for an `AffinityGroup`", - "type": "object", - "properties": { - "description": { - "type": "string" - }, - "failure_domain": { - "$ref": "#/components/schemas/FailureDomain" - }, - "name": { - "$ref": "#/components/schemas/Name" - }, - "policy": { - "$ref": "#/components/schemas/AffinityPolicy" - } - }, - "required": [ - "description", - "failure_domain", - "name", - "policy" - ] - }, - "AffinityGroupMember": { - "description": "A member of an Affinity Group\n\nMembership in a group is not exclusive - members may belong to multiple affinity / anti-affinity groups.\n\nAffinity Groups can contain up to 32 members.", + "AlertReceiverKind": { + "description": "The possible alert delivery mechanisms for an alert receiver.", "oneOf": [ { - "description": "An instance belonging to this group\n\nInstances can belong to up to 16 affinity groups.", "type": "object", "properties": { - "type": { - "type": "string", - "enum": [ - "instance" - ] - }, - "value": { - "type": "object", - "properties": { - "id": { - "$ref": "#/components/schemas/TypedUuidForInstanceKind" - }, - "name": { - "$ref": "#/components/schemas/Name" - }, - "run_state": { - "$ref": "#/components/schemas/InstanceState" - } - }, - "required": [ - "id", - "name", - "run_state" - ] + "webhook": { + "$ref": "#/components/schemas/WebhookReceiverConfig" } }, "required": [ - "type", - "value" - ] + "webhook" + ], + "additionalProperties": false } ] }, - "AffinityGroupMemberResultsPage": { + "AlertReceiverResultsPage": { "description": "A single page of results", "type": "object", "properties": { @@ -13128,7 +13540,7 @@ "description": "list of items on this page of results", "type": "array", "items": { - "$ref": "#/components/schemas/AffinityGroupMember" + "$ref": "#/components/schemas/AlertReceiver" } }, "next_page": { @@ -13141,78 +13553,42 @@ "items" ] }, - "AffinityGroupResultsPage": { - "description": "A single page of results", + "AlertSubscription": { + "title": "A webhook event class subscription", + "description": "A webhook event class subscription matches either a single event class exactly, or a glob pattern including wildcards that may match multiple event classes", + "type": "string", + "pattern": "^([a-zA-Z0-9_]+|\\*|\\*\\*)(\\.([a-zA-Z0-9_]+|\\*|\\*\\*))*$" + }, + "AlertSubscriptionCreate": { "type": "object", "properties": { - "items": { - "description": "list of items on this page of results", - "type": "array", - "items": { - "$ref": "#/components/schemas/AffinityGroup" - } - }, - "next_page": { - "nullable": true, - "description": "token used to fetch the next page of results (if any)", - "type": "string" + "subscription": { + "description": "The event class pattern to subscribe to.", + "allOf": [ + { + "$ref": "#/components/schemas/AlertSubscription" + } + ] } }, "required": [ - "items" + "subscription" ] }, - "AffinityGroupUpdate": { - "description": "Updateable properties of an `AffinityGroup`", + "AlertSubscriptionCreated": { "type": "object", "properties": { - "description": { - "nullable": true, - "type": "string" - }, - "name": { - "nullable": true, + "subscription": { + "description": "The new subscription added to the receiver.", "allOf": [ { - "$ref": "#/components/schemas/Name" + "$ref": "#/components/schemas/AlertSubscription" } ] } - } - }, - "AffinityPolicy": { - "description": "Affinity policy used to describe \"what to do when a request cannot be satisfied\"\n\nUsed for both Affinity and Anti-Affinity Groups", - "oneOf": [ - { - "description": "If the affinity request cannot be satisfied, allow it anyway.\n\nThis enables a \"best-effort\" attempt to satisfy the affinity policy.", - "type": "string", - "enum": [ - "allow" - ] - }, - { - "description": "If the affinity request cannot be satisfied, fail explicitly.", - "type": "string", - "enum": [ - "fail" - ] - } - ] - }, - "AggregateBgpMessageHistory": { - "description": "BGP message history for rack switches.", - "type": "object", - "properties": { - "switch_histories": { - "description": "BGP history organized by switch.", - "type": "array", - "items": { - "$ref": "#/components/schemas/SwitchBgpHistory" - } - } }, "required": [ - "switch_histories" + "subscription" ] }, "AllowList": { @@ -16660,52 +17036,13 @@ "message": { "type": "string" }, - "request_id": { - "type": "string" - } - }, - "required": [ - "message", - "request_id" - ] - }, - "EventClass": { - "description": "A webhook event class.", - "type": "object", - "properties": { - "description": { - "description": "A description of what this event class represents.", - "type": "string" - }, - "name": { - "description": "The name of the event class.", - "type": "string" - } - }, - "required": [ - "description", - "name" - ] - }, - "EventClassResultsPage": { - "description": "A single page of results", - "type": "object", - "properties": { - "items": { - "description": "list of items on this page of results", - "type": "array", - "items": { - "$ref": "#/components/schemas/EventClass" - } - }, - "next_page": { - "nullable": true, - "description": "token used to fetch the next page of results (if any)", + "request_id": { "type": "string" } }, "required": [ - "items" + "message", + "request_id" ] }, "ExternalIp": { @@ -24185,19 +24522,19 @@ } } }, - "TypedUuidForInstanceKind": { + "TypedUuidForAlertKind": { "type": "string", "format": "uuid" }, - "TypedUuidForSupportBundleKind": { + "TypedUuidForAlertReceiverKind": { "type": "string", "format": "uuid" }, - "TypedUuidForWebhookEventKind": { + "TypedUuidForInstanceKind": { "type": "string", "format": "uuid" }, - "TypedUuidForWebhookReceiverKind": { + "TypedUuidForSupportBundleKind": { "type": "string", "format": "uuid" }, @@ -25638,7 +25975,7 @@ "default": [], "type": "array", "items": { - "$ref": "#/components/schemas/WebhookSubscription" + "$ref": "#/components/schemas/AlertSubscription" } } }, @@ -25649,75 +25986,6 @@ "secrets" ] }, - "WebhookDelivery": { - "description": "A delivery of a webhook event.", - "type": "object", - "properties": { - "attempts": { - "description": "Individual attempts to deliver this webhook event, and their outcomes.", - "type": "array", - "items": { - "$ref": "#/components/schemas/WebhookDeliveryAttempt" - } - }, - "event_class": { - "description": "The event class.", - "type": "string" - }, - "event_id": { - "description": "The UUID of the event.", - "allOf": [ - { - "$ref": "#/components/schemas/TypedUuidForWebhookEventKind" - } - ] - }, - "id": { - "description": "The UUID of this delivery attempt.", - "type": "string", - "format": "uuid" - }, - "state": { - "description": "The state of this delivery.", - "allOf": [ - { - "$ref": "#/components/schemas/WebhookDeliveryState" - } - ] - }, - "time_started": { - "description": "The time at which this delivery began (i.e. the event was dispatched to the receiver).", - "type": "string", - "format": "date-time" - }, - "trigger": { - "description": "Why this delivery was performed.", - "allOf": [ - { - "$ref": "#/components/schemas/WebhookDeliveryTrigger" - } - ] - }, - "webhook_id": { - "description": "The UUID of the webhook receiver that this event was delivered to.", - "allOf": [ - { - "$ref": "#/components/schemas/TypedUuidForWebhookReceiverKind" - } - ] - } - }, - "required": [ - "attempts", - "event_class", - "event_id", - "id", - "state", - "time_started", - "trigger", - "webhook_id" - ] - }, "WebhookDeliveryAttempt": { "description": "An individual delivery attempt for a webhook event.\n\nThis represents a single HTTP request that was sent to the receiver, and its outcome.", "type": "object", @@ -25788,18 +26056,6 @@ } ] }, - "WebhookDeliveryId": { - "type": "object", - "properties": { - "delivery_id": { - "type": "string", - "format": "uuid" - } - }, - "required": [ - "delivery_id" - ] - }, "WebhookDeliveryResponse": { "description": "The response received from a webhook receiver endpoint.", "type": "object", @@ -25822,105 +26078,8 @@ "status" ] }, - "WebhookDeliveryResultsPage": { - "description": "A single page of results", - "type": "object", - "properties": { - "items": { - "description": "list of items on this page of results", - "type": "array", - "items": { - "$ref": "#/components/schemas/WebhookDelivery" - } - }, - "next_page": { - "nullable": true, - "description": "token used to fetch the next page of results (if any)", - "type": "string" - } - }, - "required": [ - "items" - ] - }, - "WebhookDeliveryState": { - "description": "The state of a webhook delivery attempt.", - "oneOf": [ - { - "description": "The webhook event has not yet been delivered successfully.\n\nEither no delivery attempts have yet been performed, or the delivery has failed at least once but has retries remaining.", - "type": "string", - "enum": [ - "pending" - ] - }, - { - "description": "The webhook event has been delivered successfully.", - "type": "string", - "enum": [ - "delivered" - ] - }, - { - "description": "The webhook delivery attempt has failed permanently and will not be retried again.", - "type": "string", - "enum": [ - "failed" - ] - } - ] - }, - "WebhookDeliveryTrigger": { - "description": "The reason a webhook event was delivered", - "oneOf": [ - { - "description": "Delivery was triggered by the event occurring for the first time.", - "type": "string", - "enum": [ - "event" - ] - }, - { - "description": "Delivery was triggered by a request to resend the event.", - "type": "string", - "enum": [ - "resend" - ] - }, - { - "description": "This delivery is a liveness probe.", - "type": "string", - "enum": [ - "probe" - ] - } - ] - }, - "WebhookProbeResult": { - "description": "Data describing the result of a webhook liveness probe attempt.", - "type": "object", - "properties": { - "probe": { - "description": "The outcome of the probe request.", - "allOf": [ - { - "$ref": "#/components/schemas/WebhookDelivery" - } - ] - }, - "resends_started": { - "nullable": true, - "description": "If the probe request succeeded, and resending failed deliveries on success was requested, the number of new delivery attempts started. Otherwise, if the probe did not succeed, or resending failed deliveries was not requested, this is null.\n\nNote that this may be 0, if there were no events found which had not been delivered successfully to this receiver.", - "type": "integer", - "format": "uint", - "minimum": 0 - } - }, - "required": [ - "probe" - ] - }, "WebhookReceiver": { - "description": "The configuration for a webhook.", + "description": "The configuration for a webhook alert receiver.", "type": "object", "properties": { "description": { @@ -25952,10 +26111,10 @@ } }, "subscriptions": { - "description": "The list of event classes to which this receiver is subscribed.", + "description": "The list of alert classes to which this receiver is subscribed.", "type": "array", "items": { - "$ref": "#/components/schemas/WebhookSubscription" + "$ref": "#/components/schemas/AlertSubscription" } }, "time_created": { @@ -25980,25 +26139,25 @@ "time_modified" ] }, - "WebhookReceiverResultsPage": { - "description": "A single page of results", + "WebhookReceiverConfig": { + "description": "Webhook-specific alert receiver configuration.", "type": "object", "properties": { - "items": { - "description": "list of items on this page of results", + "endpoint": { + "description": "The URL that webhook notification requests are sent to.", + "type": "string", + "format": "uri" + }, + "secrets": { "type": "array", "items": { - "$ref": "#/components/schemas/WebhookReceiver" + "$ref": "#/components/schemas/WebhookSecret" } - }, - "next_page": { - "nullable": true, - "description": "token used to fetch the next page of results (if any)", - "type": "string" } }, "required": [ - "items" + "endpoint", + "secrets" ] }, "WebhookReceiverUpdate": { @@ -26058,7 +26217,7 @@ ] }, "WebhookSecrets": { - "description": "A list of the IDs of secrets associated with a webhook.", + "description": "A list of the IDs of secrets associated with a webhook receiver.", "type": "object", "properties": { "secrets": { @@ -26072,44 +26231,6 @@ "secrets" ] }, - "WebhookSubscription": { - "title": "A webhook event class subscription", - "description": "A webhook event class subscription matches either a single event class exactly, or a glob pattern including wildcards that may match multiple event classes", - "type": "string", - "pattern": "^([a-zA-Z0-9_]+|\\*|\\*\\*)(\\.([a-zA-Z0-9_]+|\\*|\\*\\*))*$" - }, - "WebhookSubscriptionCreate": { - "type": "object", - "properties": { - "subscription": { - "description": "The event class pattern to subscribe to.", - "allOf": [ - { - "$ref": "#/components/schemas/WebhookSubscription" - } - ] - } - }, - "required": [ - "subscription" - ] - }, - "WebhookSubscriptionCreated": { - "type": "object", - "properties": { - "subscription": { - "description": "The new subscription added to the receiver.", - "allOf": [ - { - "$ref": "#/components/schemas/WebhookSubscription" - } - ] - } - }, - "required": [ - "subscription" - ] - }, "NameOrIdSortMode": { "description": "Supported set of sort modes for scanning by name or id", "oneOf": [ @@ -26148,6 +26269,25 @@ } ] }, + "TimeAndIdSortMode": { + "description": "Supported set of sort modes for scanning by timestamp and ID", + "oneOf": [ + { + "description": "sort in increasing order of timestamp and ID, i.e., earliest first", + "type": "string", + "enum": [ + "ascending" + ] + }, + { + "description": "sort in increasing order of timestamp and ID, i.e., most recent first", + "type": "string", + "enum": [ + "descending" + ] + } + ] + }, "DiskMetricName": { "type": "string", "enum": [ @@ -26186,25 +26326,6 @@ ] } ] - }, - "TimeAndIdSortMode": { - "description": "Supported set of sort modes for scanning by timestamp and ID", - "oneOf": [ - { - "description": "sort in increasing order of timestamp and ID, i.e., earliest first", - "type": "string", - "enum": [ - "ascending" - ] - }, - { - "description": "sort in increasing order of timestamp and ID, i.e., most recent first", - "type": "string", - "enum": [ - "descending" - ] - } - ] } }, "responses": { @@ -26319,6 +26440,13 @@ "url": "http://docs.oxide.computer/api/snapshots" } }, + { + "name": "system/alerts", + "description": "Alerts deliver notifications for events that occur on the Oxide rack", + "externalDocs": { + "url": "http://docs.oxide.computer/api/alerts" + } + }, { "name": "system/hardware", "description": "These operations pertain to hardware inventory and management. Racks are the unit of expansion of an Oxide deployment. Racks are in turn composed of sleds, switches, power supplies, and a cabled backplane.", @@ -26371,13 +26499,6 @@ { "name": "system/update" }, - { - "name": "system/webhooks", - "description": "Webhooks deliver notifications for audit log events and fault management alerts.", - "externalDocs": { - "url": "http://docs.oxide.computer/api/webhooks" - } - }, { "name": "vpcs", "description": "Virtual Private Clouds (VPCs) provide isolated network environments for managing and deploying services.", diff --git a/schema/crdb/alerts-renamening/up01.sql b/schema/crdb/alerts-renamening/up01.sql new file mode 100644 index 00000000000..8487411e56a --- /dev/null +++ b/schema/crdb/alerts-renamening/up01.sql @@ -0,0 +1,2 @@ +ALTER TABLE IF EXISTS omicron.public.webhook_receiver +RENAME TO omicron.public.alert_receiver; diff --git a/schema/crdb/alerts-renamening/up02.sql b/schema/crdb/alerts-renamening/up02.sql new file mode 100644 index 00000000000..f7655ca65c6 --- /dev/null +++ b/schema/crdb/alerts-renamening/up02.sql @@ -0,0 +1,2 @@ +ALTER INDEX IF EXISTS lookup_webhook_rx_by_id +RENAME TO lookup_alert_rx_by_id; diff --git a/schema/crdb/alerts-renamening/up03.sql b/schema/crdb/alerts-renamening/up03.sql new file mode 100644 index 00000000000..f05cea3b9b6 --- /dev/null +++ b/schema/crdb/alerts-renamening/up03.sql @@ -0,0 +1,2 @@ +ALTER INDEX IF EXISTS lookup_webhook_rx_by_name +RENAME TO lookup_alert_rx_by_name; diff --git a/schema/crdb/alerts-renamening/up04.sql b/schema/crdb/alerts-renamening/up04.sql new file mode 100644 index 00000000000..4ea4c061ce4 --- /dev/null +++ b/schema/crdb/alerts-renamening/up04.sql @@ -0,0 +1,13 @@ +CREATE TYPE IF NOT EXISTS omicron.public.alert_class +AS ENUM ( + -- Liveness probes, which are technically not real alerts, but, you know... + 'probe', + -- Test classes used to test globbing. + -- + -- These are not publicly exposed. + 'test.foo', + 'test.foo.bar', + 'test.foo.baz', + 'test.quux.bar', + 'test.quux.bar.baz' +); diff --git a/schema/crdb/alerts-renamening/up05.sql b/schema/crdb/alerts-renamening/up05.sql new file mode 100644 index 00000000000..e8b7ee9a0a3 --- /dev/null +++ b/schema/crdb/alerts-renamening/up05.sql @@ -0,0 +1,2 @@ +ALTER TABLE IF EXISTS omicron.public.webhook_rx_event_glob +RENAME TO omicron.public.alert_glob; diff --git a/schema/crdb/alerts-renamening/up06.sql b/schema/crdb/alerts-renamening/up06.sql new file mode 100644 index 00000000000..61d3c39fd9f --- /dev/null +++ b/schema/crdb/alerts-renamening/up06.sql @@ -0,0 +1,2 @@ +ALTER INDEX IF EXISTS lookup_webhook_event_globs_for_rx +RENAME TO lookup_alert_globs_for_rx; diff --git a/schema/crdb/alerts-renamening/up07.sql b/schema/crdb/alerts-renamening/up07.sql new file mode 100644 index 00000000000..faacafaf164 --- /dev/null +++ b/schema/crdb/alerts-renamening/up07.sql @@ -0,0 +1,2 @@ +ALTER INDEX IF EXISTS lookup_webhook_event_globs_by_schema_version +RENAME TO lookup_alert_globs_by_schema_version diff --git a/schema/crdb/alerts-renamening/up08.sql b/schema/crdb/alerts-renamening/up08.sql new file mode 100644 index 00000000000..6919f87136f --- /dev/null +++ b/schema/crdb/alerts-renamening/up08.sql @@ -0,0 +1,20 @@ +CREATE TABLE IF NOT EXISTS omicron.public.alert_subscription ( + -- UUID of the alert receiver (foreign key into + -- `omicron.public.alert_receiver`) + rx_id UUID NOT NULL, + -- An alert class to which the receiver is subscribed. + alert_class omicron.public.alert_class NOT NULL, + -- If this subscription is a concrete instantiation of a glob pattern, the + -- value of the glob that created it (and, a foreign key into + -- `webhook_rx_event_glob`). If the receiver is subscribed to this exact + -- event class, then this is NULL. + -- + -- This is used when deleting a glob subscription, as it is necessary to + -- delete any concrete subscriptions to individual event classes matching + -- that glob. + glob STRING(512), + + time_created TIMESTAMPTZ NOT NULL, + + PRIMARY KEY (rx_id, alert_class) +); diff --git a/schema/crdb/alerts-renamening/up09.sql b/schema/crdb/alerts-renamening/up09.sql new file mode 100644 index 00000000000..20dd3d96c73 --- /dev/null +++ b/schema/crdb/alerts-renamening/up09.sql @@ -0,0 +1,18 @@ +set + local disallow_full_table_scans = off; + +INSERT INTO omicron.public.alert_subscription ( + rx_id, + alert_class, + glob, + time_created +) +SELECT + rx_id, + event_class::text::omicron.public.alert_class, + glob, + time_created +FROM omicron.public.webhook_rx_subscription +-- this makes it idempotent +ON CONFLICT (rx_id, alert_class) +DO NOTHING; diff --git a/schema/crdb/alerts-renamening/up10.sql b/schema/crdb/alerts-renamening/up10.sql new file mode 100644 index 00000000000..fd25f4c14bf --- /dev/null +++ b/schema/crdb/alerts-renamening/up10.sql @@ -0,0 +1 @@ +DROP TABLE IF EXISTS omicron.public.webhook_rx_subscription; diff --git a/schema/crdb/alerts-renamening/up11.sql b/schema/crdb/alerts-renamening/up11.sql new file mode 100644 index 00000000000..54eac55873f --- /dev/null +++ b/schema/crdb/alerts-renamening/up11.sql @@ -0,0 +1,4 @@ +CREATE INDEX IF NOT EXISTS lookup_alert_rxs_for_class +ON omicron.public.alert_subscription ( + alert_class +); diff --git a/schema/crdb/alerts-renamening/up12.sql b/schema/crdb/alerts-renamening/up12.sql new file mode 100644 index 00000000000..f226c302a3e --- /dev/null +++ b/schema/crdb/alerts-renamening/up12.sql @@ -0,0 +1,4 @@ +CREATE INDEX IF NOT EXISTS lookup_exact_subscriptions_for_alert_rx +on omicron.public.alert_subscription ( + rx_id +) WHERE glob IS NULL; diff --git a/schema/crdb/alerts-renamening/up13.sql b/schema/crdb/alerts-renamening/up13.sql new file mode 100644 index 00000000000..fc16ffc1b31 --- /dev/null +++ b/schema/crdb/alerts-renamening/up13.sql @@ -0,0 +1,22 @@ +CREATE TABLE IF NOT EXISTS omicron.public.alert ( + id UUID PRIMARY KEY, + time_created TIMESTAMPTZ NOT NULL, + time_modified TIMESTAMPTZ NOT NULL, + + alert_class omicron.public.alert_class NOT NULL, + -- Actual alert data. The structure of this depends on the alert class. + payload JSONB NOT NULL, + + -- Set when dispatch entries have been created for this alert. + time_dispatched TIMESTAMPTZ, + -- The number of receivers that this event was dispatched to. + num_dispatched INT8 NOT NULL, + + CONSTRAINT time_dispatched_set_if_dispatched CHECK ( + (num_dispatched = 0) OR (time_dispatched IS NOT NULL) + ), + + CONSTRAINT num_dispatched_is_positive CHECK ( + (num_dispatched >= 0) + ) +); diff --git a/schema/crdb/alerts-renamening/up14.sql b/schema/crdb/alerts-renamening/up14.sql new file mode 100644 index 00000000000..3294b05d719 --- /dev/null +++ b/schema/crdb/alerts-renamening/up14.sql @@ -0,0 +1,24 @@ +set + local disallow_full_table_scans = off; + +INSERT INTO omicron.public.alert ( + id, + time_created, + time_modified, + alert_class, + payload, + time_dispatched, + num_dispatched +) +SELECT + id, + time_created, + time_modified, + event_class::text::omicron.public.alert_class, + event as payload, + time_dispatched, + num_dispatched +FROM omicron.public.webhook_event +-- this makes it idempotent +ON CONFLICT (id) +DO NOTHING; diff --git a/schema/crdb/alerts-renamening/up15.sql b/schema/crdb/alerts-renamening/up15.sql new file mode 100644 index 00000000000..19d46741256 --- /dev/null +++ b/schema/crdb/alerts-renamening/up15.sql @@ -0,0 +1 @@ +DROP TABLE IF EXISTS omicron.public.webhook_event; diff --git a/schema/crdb/alerts-renamening/up16.sql b/schema/crdb/alerts-renamening/up16.sql new file mode 100644 index 00000000000..d951f22ab90 --- /dev/null +++ b/schema/crdb/alerts-renamening/up16.sql @@ -0,0 +1,4 @@ +CREATE INDEX IF NOT EXISTS lookup_undispatched_alerts +ON omicron.public.alert ( + id, time_created +) WHERE time_dispatched IS NULL; diff --git a/schema/crdb/alerts-renamening/up17.sql b/schema/crdb/alerts-renamening/up17.sql new file mode 100644 index 00000000000..63f089c3ab5 --- /dev/null +++ b/schema/crdb/alerts-renamening/up17.sql @@ -0,0 +1,8 @@ +CREATE TYPE IF NOT EXISTS omicron.public.alert_delivery_trigger AS ENUM ( + -- This delivery was triggered by the alert being dispatched. + 'alert', + -- This delivery was triggered by an explicit call to the alert resend API. + 'resend', + --- This delivery is a liveness probe. + 'probe' +); diff --git a/schema/crdb/alerts-renamening/up18.sql b/schema/crdb/alerts-renamening/up18.sql new file mode 100644 index 00000000000..bc77c3672e3 --- /dev/null +++ b/schema/crdb/alerts-renamening/up18.sql @@ -0,0 +1,9 @@ +-- Describes the state of an alert delivery +CREATE TYPE IF NOT EXISTS omicron.public.alert_delivery_state AS ENUM ( + -- This delivery has not yet completed. + 'pending', + -- This delivery has failed. + 'failed', + --- This delivery has completed successfully. + 'delivered' +); diff --git a/schema/crdb/alerts-renamening/up19.sql b/schema/crdb/alerts-renamening/up19.sql new file mode 100644 index 00000000000..b2f50fca86a --- /dev/null +++ b/schema/crdb/alerts-renamening/up19.sql @@ -0,0 +1,4 @@ +-- We are about to create a new table and drop this one, as we cannot easily +-- change the enum column types otherwise... :( +ALTER TABLE IF EXISTS omicron.public.webhook_delivery +RENAME TO omicron.public.webhook_delivery_old; diff --git a/schema/crdb/alerts-renamening/up20.sql b/schema/crdb/alerts-renamening/up20.sql new file mode 100644 index 00000000000..1f9f6dc9d78 --- /dev/null +++ b/schema/crdb/alerts-renamening/up20.sql @@ -0,0 +1,37 @@ +-- Delivery dispatch table for webhook receivers. +CREATE TABLE IF NOT EXISTS omicron.public.webhook_delivery ( + -- UUID of this delivery. + id UUID PRIMARY KEY, + --- UUID of the alert (foreign key into `omicron.public.alert`). + alert_id UUID NOT NULL, + -- UUID of the webhook receiver (foreign key into + -- `omicron.public.alert_receiver`) + rx_id UUID NOT NULL, + + triggered_by omicron.public.alert_delivery_trigger NOT NULL, + + --- Delivery attempt count. Starts at 0. + attempts INT2 NOT NULL, + + time_created TIMESTAMPTZ NOT NULL, + -- If this is set, then this webhook message has either been delivered + -- successfully, or is considered permanently failed. + time_completed TIMESTAMPTZ, + + state omicron.public.alert_delivery_state NOT NULL, + + -- Deliverator coordination bits + deliverator_id UUID, + time_leased TIMESTAMPTZ, + + CONSTRAINT attempts_is_non_negative CHECK (attempts >= 0), + CONSTRAINT active_deliveries_have_started_timestamps CHECK ( + (deliverator_id IS NULL) OR ( + deliverator_id IS NOT NULL AND time_leased IS NOT NULL + ) + ), + CONSTRAINT time_completed_iff_not_pending CHECK ( + (state = 'pending' AND time_completed IS NULL) OR + (state != 'pending' AND time_completed IS NOT NULL) + ) +); diff --git a/schema/crdb/alerts-renamening/up21.sql b/schema/crdb/alerts-renamening/up21.sql new file mode 100644 index 00000000000..1f43ffdae5b --- /dev/null +++ b/schema/crdb/alerts-renamening/up21.sql @@ -0,0 +1,34 @@ +set + local disallow_full_table_scans = off; + +INSERT INTO omicron.public.webhook_delivery ( + id, + alert_id, + rx_id, + triggered_by, + attempts, + time_created, + time_completed, + state, + deliverator_id, + time_leased +) +SELECT + id, + webhook_delivery_old.event_id as alert_id, + rx_id, + CASE webhook_delivery_old.triggered_by + WHEN 'event' THEN 'alert'::omicron.public.alert_delivery_trigger + WHEN 'resend' THEN 'resend'::omicron.public.alert_delivery_trigger + WHEN 'probe' THEN 'probe'::omicron.public.alert_delivery_trigger + END, + attempts, + time_created, + time_completed, + webhook_delivery_old.state::text::omicron.public.alert_delivery_state, + deliverator_id, + time_leased +FROM omicron.public.webhook_delivery_old +-- this makes it idempotent +ON CONFLICT (id) +DO NOTHING; diff --git a/schema/crdb/alerts-renamening/up22.sql b/schema/crdb/alerts-renamening/up22.sql new file mode 100644 index 00000000000..4549dd17701 --- /dev/null +++ b/schema/crdb/alerts-renamening/up22.sql @@ -0,0 +1 @@ +DROP TABLE IF EXISTS omicron.public.webhook_delivery_old; diff --git a/schema/crdb/alerts-renamening/up23.sql b/schema/crdb/alerts-renamening/up23.sql new file mode 100644 index 00000000000..3877fba1bd1 --- /dev/null +++ b/schema/crdb/alerts-renamening/up23.sql @@ -0,0 +1,10 @@ +-- Ensure that initial delivery attempts (nexus-dispatched) are unique to avoid +-- duplicate work when an alert is dispatched. For deliveries created by calls +-- to the webhook event resend API, we don't enforce this constraint, to allow +-- re-delivery to be triggered multiple times. +CREATE UNIQUE INDEX IF NOT EXISTS one_webhook_event_dispatch_per_rx +ON omicron.public.webhook_delivery ( + alert_id, rx_id +) +WHERE + triggered_by = 'alert'; diff --git a/schema/crdb/alerts-renamening/up24.sql b/schema/crdb/alerts-renamening/up24.sql new file mode 100644 index 00000000000..a5a6958458f --- /dev/null +++ b/schema/crdb/alerts-renamening/up24.sql @@ -0,0 +1,5 @@ +-- Index for looking up all webhook messages dispatched to a receiver ID +CREATE INDEX IF NOT EXISTS lookup_webhook_delivery_dispatched_to_rx +ON omicron.public.webhook_delivery ( + rx_id, alert_id +); diff --git a/schema/crdb/alerts-renamening/up25.sql b/schema/crdb/alerts-renamening/up25.sql new file mode 100644 index 00000000000..1b7b1e2fd0e --- /dev/null +++ b/schema/crdb/alerts-renamening/up25.sql @@ -0,0 +1,5 @@ +-- Index for looking up all delivery attempts for an alert +CREATE INDEX IF NOT EXISTS lookup_webhook_deliveries_for_alert +ON omicron.public.webhook_delivery ( + alert_id +); diff --git a/schema/crdb/alerts-renamening/up26.sql b/schema/crdb/alerts-renamening/up26.sql new file mode 100644 index 00000000000..9f8973ec163 --- /dev/null +++ b/schema/crdb/alerts-renamening/up26.sql @@ -0,0 +1,7 @@ +-- Index for looking up all currently in-flight webhook messages, and ordering +-- them by their creation times. +CREATE INDEX IF NOT EXISTS webhook_deliveries_in_flight +ON omicron.public.webhook_delivery ( + time_created, id +) WHERE + time_completed IS NULL; diff --git a/schema/crdb/alerts-renamening/up27.sql b/schema/crdb/alerts-renamening/up27.sql new file mode 100644 index 00000000000..c7dbd17278e --- /dev/null +++ b/schema/crdb/alerts-renamening/up27.sql @@ -0,0 +1 @@ +DROP TYPE IF EXISTS omicron.public.webhook_event_class; diff --git a/schema/crdb/alerts-renamening/up28.sql b/schema/crdb/alerts-renamening/up28.sql new file mode 100644 index 00000000000..a8d1662fa92 --- /dev/null +++ b/schema/crdb/alerts-renamening/up28.sql @@ -0,0 +1 @@ +DROP TYPE IF EXISTS omicron.public.webhook_delivery_state; diff --git a/schema/crdb/alerts-renamening/up29.sql b/schema/crdb/alerts-renamening/up29.sql new file mode 100644 index 00000000000..cd51440f079 --- /dev/null +++ b/schema/crdb/alerts-renamening/up29.sql @@ -0,0 +1 @@ +DROP TYPE IF EXISTS omicron.public.webhook_delivery_trigger; diff --git a/schema/crdb/alerts-renamening/up30.sql b/schema/crdb/alerts-renamening/up30.sql new file mode 100644 index 00000000000..a9d99e8d118 --- /dev/null +++ b/schema/crdb/alerts-renamening/up30.sql @@ -0,0 +1,2 @@ +ALTER INDEX IF EXISTS omicron.public.alert_receiver @ webhook_receiver_pkey + RENAME TO alert_receiver_pkey; diff --git a/schema/crdb/alerts-renamening/up31.sql b/schema/crdb/alerts-renamening/up31.sql new file mode 100644 index 00000000000..00b47240481 --- /dev/null +++ b/schema/crdb/alerts-renamening/up31.sql @@ -0,0 +1,2 @@ +ALTER INDEX IF EXISTS omicron.public.alert_glob @ webhook_rx_event_glob_pkey +RENAME TO alert_glob_pkey; diff --git a/schema/crdb/dbinit.sql b/schema/crdb/dbinit.sql index 9185123b558..dfa1f2f5f4e 100644 --- a/schema/crdb/dbinit.sql +++ b/schema/crdb/dbinit.sql @@ -5130,15 +5130,15 @@ CREATE UNIQUE INDEX IF NOT EXISTS one_record_per_volume_resource_usage on omicro ); /* - * WEBHOOKS + * Alerts */ /* - * Webhook receivers, receiver secrets, and receiver subscriptions. + * Alert webhook receivers, receiver secrets, and receiver subscriptions. */ -CREATE TABLE IF NOT EXISTS omicron.public.webhook_receiver ( +CREATE TABLE IF NOT EXISTS omicron.public.alert_receiver ( /* Identity metadata (resource) */ id UUID PRIMARY KEY, name STRING(63) NOT NULL, @@ -5157,13 +5157,13 @@ CREATE TABLE IF NOT EXISTS omicron.public.webhook_receiver ( endpoint STRING(512) NOT NULL ); -CREATE UNIQUE INDEX IF NOT EXISTS lookup_webhook_rx_by_id -ON omicron.public.webhook_receiver (id) +CREATE UNIQUE INDEX IF NOT EXISTS lookup_alert_rx_by_id +ON omicron.public.alert_receiver (id) WHERE time_deleted IS NULL; -CREATE UNIQUE INDEX IF NOT EXISTS lookup_webhook_rx_by_name -ON omicron.public.webhook_receiver ( +CREATE UNIQUE INDEX IF NOT EXISTS lookup_alert_rx_by_name +ON omicron.public.alert_receiver ( name ) WHERE time_deleted IS NULL; @@ -5189,11 +5189,11 @@ ON omicron.public.webhook_secret ( ) WHERE time_deleted IS NULL; --- Webhook event classes. +-- Alert classes. -- --- When creating new event classes, be sure to add them here! -CREATE TYPE IF NOT EXISTS omicron.public.webhook_event_class AS ENUM ( - -- Liveness probes, which are technically not real events, but, you know... +-- When creating new alert classes, be sure to add them here! +CREATE TYPE IF NOT EXISTS omicron.public.alert_class AS ENUM ( + -- Liveness probes, which are technically not real alerts, but, you know... 'probe', -- Test classes used to test globbing. -- @@ -5203,21 +5203,21 @@ CREATE TYPE IF NOT EXISTS omicron.public.webhook_event_class AS ENUM ( 'test.foo.baz', 'test.quux.bar', 'test.quux.bar.baz' - -- Add new event classes here! + -- Add new alert classes here! ); --- The set of event class filters (either event class names or event class glob --- patterns) associated with a webhook receiver. +-- The set of alert class filters (either alert class names or alert class glob +-- patterns) associated with a alert receiver. -- --- This is used when creating entries in the webhook_rx_subscription table to --- indicate that a webhook receiver is interested in a given event class. -CREATE TABLE IF NOT EXISTS omicron.public.webhook_rx_event_glob ( - -- UUID of the webhook receiver (foreign key into - -- `omicron.public.webhook_rx`) +-- This is used when creating entries in the alert_subscription table to +-- indicate that a alert receiver is interested in a given event class. +CREATE TABLE IF NOT EXISTS omicron.public.alert_glob ( + -- UUID of the alert receiver (foreign key into + -- `omicron.public.alert_receiver`) rx_id UUID NOT NULL, -- An event class glob to which this receiver is subscribed. glob STRING(512) NOT NULL, - -- Regex used when evaluating this filter against concrete event classes. + -- Regex used when evaluating this filter against concrete alert classes. regex STRING(512) NOT NULL, time_created TIMESTAMPTZ NOT NULL, -- The database schema version at which this glob was last expanded. @@ -5232,21 +5232,21 @@ CREATE TABLE IF NOT EXISTS omicron.public.webhook_rx_event_glob ( PRIMARY KEY (rx_id, glob) ); --- Look up all event class globs for a webhook receiver. -CREATE INDEX IF NOT EXISTS lookup_webhook_event_globs_for_rx -ON omicron.public.webhook_rx_event_glob ( +-- Look up all event class globs for an alert receiver. +CREATE INDEX IF NOT EXISTS lookup_alert_globs_for_rx +ON omicron.public.alert_glob ( rx_id ); -CREATE INDEX IF NOT EXISTS lookup_webhook_event_globs_by_schema_version -ON omicron.public.webhook_rx_event_glob (schema_version); +CREATE INDEX IF NOT EXISTS lookup_alert_globs_by_schema_version +ON omicron.public.alert_glob (schema_version); -CREATE TABLE IF NOT EXISTS omicron.public.webhook_rx_subscription ( - -- UUID of the webhook receiver (foreign key into - -- `omicron.public.webhook_rx`) +CREATE TABLE IF NOT EXISTS omicron.public.alert_subscription ( + -- UUID of the alert receiver (foreign key into + -- `omicron.public.alert_receiver`) rx_id UUID NOT NULL, - -- An event class to which the receiver is subscribed. - event_class omicron.public.webhook_event_class NOT NULL, + -- An alert class to which the receiver is subscribed. + alert_class omicron.public.alert_class NOT NULL, -- If this subscription is a concrete instantiation of a glob pattern, the -- value of the glob that created it (and, a foreign key into -- `webhook_rx_event_glob`). If the receiver is subscribed to this exact @@ -5259,14 +5259,14 @@ CREATE TABLE IF NOT EXISTS omicron.public.webhook_rx_subscription ( time_created TIMESTAMPTZ NOT NULL, - PRIMARY KEY (rx_id, event_class) + PRIMARY KEY (rx_id, alert_class) ); --- Look up all webhook receivers subscribed to an event class. This is used by +-- Look up all receivers subscribed to an alert class. This is used by -- the dispatcher to determine who is interested in a particular event. -CREATE INDEX IF NOT EXISTS lookup_webhook_rxs_for_event_class -ON omicron.public.webhook_rx_subscription ( - event_class +CREATE INDEX IF NOT EXISTS lookup_alert_rxs_for_class +ON omicron.public.alert_subscription ( + alert_class ); -- Look up all exact event class subscriptions for a receiver. @@ -5274,28 +5274,28 @@ ON omicron.public.webhook_rx_subscription ( -- This is used when generating a view of all user-provided original -- subscriptions provided for a receiver. That list is generated by looking up -- all exact event class subscriptions for the receiver ID in this table, --- combined with the list of all globs in the `webhook_rx_event_glob` table. -CREATE INDEX IF NOT EXISTS lookup_exact_subscriptions_for_webhook_rx -on omicron.public.webhook_rx_subscription ( +-- combined with the list of all globs in the `alert_glob` table. +CREATE INDEX IF NOT EXISTS lookup_exact_subscriptions_for_alert_rx +on omicron.public.alert_subscription ( rx_id ) WHERE glob IS NULL; /* - * Webhook event message queue. + * Alert message queue. */ -CREATE TABLE IF NOT EXISTS omicron.public.webhook_event ( +CREATE TABLE IF NOT EXISTS omicron.public.alert ( id UUID PRIMARY KEY, time_created TIMESTAMPTZ NOT NULL, time_modified TIMESTAMPTZ NOT NULL, - -- The class of event that this is. - event_class omicron.public.webhook_event_class NOT NULL, - -- Actual event data. The structure of this depends on the event class. - event JSONB NOT NULL, + -- The class of alert that this is. + alert_class omicron.public.alert_class NOT NULL, + -- Actual alert data. The structure of this depends on the alert class. + payload JSONB NOT NULL, - -- Set when dispatch entries have been created for this event. + -- Set when dispatch entries have been created for this alert. time_dispatched TIMESTAMPTZ, - -- The number of receivers that this event was dispatched to. + -- The number of receivers that this alart was dispatched to. num_dispatched INT8 NOT NULL, CONSTRAINT time_dispatched_set_if_dispatched CHECK ( @@ -5307,23 +5307,23 @@ CREATE TABLE IF NOT EXISTS omicron.public.webhook_event ( ) ); --- Singleton probe event -INSERT INTO omicron.public.webhook_event ( +-- Singleton probe alert +INSERT INTO omicron.public.alert ( id, time_created, time_modified, - event_class, - event, + alert_class, + payload, time_dispatched, num_dispatched ) VALUES ( - -- NOTE: this UUID is duplicated in nexus_db_model::webhook_event. + -- NOTE: this UUID is duplicated in nexus_db_model::alert. '001de000-7768-4000-8000-000000000001', NOW(), NOW(), 'probe', '{}', - -- Pretend to be dispatched so we won't show up in "list events needing + -- Pretend to be dispatched so we won't show up in "list alerts needing -- dispatch" queries NOW(), 0 @@ -5332,29 +5332,28 @@ INSERT INTO omicron.public.webhook_event ( -- Look up webhook events in need of dispatching. -- -- This is used by the message dispatcher when looking for events to dispatch. -CREATE INDEX IF NOT EXISTS lookup_undispatched_webhook_events -ON omicron.public.webhook_event ( +CREATE INDEX IF NOT EXISTS lookup_undispatched_alerts +ON omicron.public.alert ( id, time_created ) WHERE time_dispatched IS NULL; /* - * Webhook message dispatching and delivery attempts. + * Alert message dispatching and delivery attempts. */ --- Describes why a webhook delivery was triggered -CREATE TYPE IF NOT EXISTS omicron.public.webhook_delivery_trigger AS ENUM ( - -- This delivery was triggered by the event being dispatched. - 'event', - -- This delivery was triggered by an explicit call to the webhook event - -- resend API. +-- Describes why an alert delivery was triggered +CREATE TYPE IF NOT EXISTS omicron.public.alert_delivery_trigger AS ENUM ( + -- This delivery was triggered by the alert being dispatched. + 'alert', + -- This delivery was triggered by an explicit call to the alert resend API. 'resend', --- This delivery is a liveness probe. 'probe' ); --- Describes the state of a webhook delivery -CREATE TYPE IF NOT EXISTS omicron.public.webhook_delivery_state AS ENUM ( +-- Describes the state of an alert delivery +CREATE TYPE IF NOT EXISTS omicron.public.alert_delivery_state AS ENUM ( -- This delivery has not yet completed. 'pending', -- This delivery has failed. @@ -5363,16 +5362,17 @@ CREATE TYPE IF NOT EXISTS omicron.public.webhook_delivery_state AS ENUM ( 'delivered' ); +-- Delivery dispatch table for webhook receivers. CREATE TABLE IF NOT EXISTS omicron.public.webhook_delivery ( -- UUID of this delivery. id UUID PRIMARY KEY, - --- UUID of the event (foreign key into `omicron.public.webhook_event`). - event_id UUID NOT NULL, + --- UUID of the alert (foreign key into `omicron.public.alert`). + alert_id UUID NOT NULL, -- UUID of the webhook receiver (foreign key into - -- `omicron.public.webhook_rx`) + -- `omicron.public.alert_receiver`) rx_id UUID NOT NULL, - triggered_by omicron.public.webhook_delivery_trigger NOT NULL, + triggered_by omicron.public.alert_delivery_trigger NOT NULL, --- Delivery attempt count. Starts at 0. attempts INT2 NOT NULL, @@ -5382,7 +5382,7 @@ CREATE TABLE IF NOT EXISTS omicron.public.webhook_delivery ( -- successfully, or is considered permanently failed. time_completed TIMESTAMPTZ, - state omicron.public.webhook_delivery_state NOT NULL, + state omicron.public.alert_delivery_state NOT NULL, -- Deliverator coordination bits deliverator_id UUID, @@ -5401,26 +5401,26 @@ CREATE TABLE IF NOT EXISTS omicron.public.webhook_delivery ( ); -- Ensure that initial delivery attempts (nexus-dispatched) are unique to avoid --- duplicate work when an event is dispatched. For deliveries created by calls +-- duplicate work when an alert is dispatched. For deliveries created by calls -- to the webhook event resend API, we don't enforce this constraint, to allow -- re-delivery to be triggered multiple times. CREATE UNIQUE INDEX IF NOT EXISTS one_webhook_event_dispatch_per_rx ON omicron.public.webhook_delivery ( - event_id, rx_id + alert_id, rx_id ) WHERE - triggered_by = 'event'; + triggered_by = 'alert'; -- Index for looking up all webhook messages dispatched to a receiver ID CREATE INDEX IF NOT EXISTS lookup_webhook_delivery_dispatched_to_rx ON omicron.public.webhook_delivery ( - rx_id, event_id + rx_id, alert_id ); --- Index for looking up all delivery attempts for an event -CREATE INDEX IF NOT EXISTS lookup_webhook_deliveries_for_event +-- Index for looking up all delivery attempts for an alert +CREATE INDEX IF NOT EXISTS lookup_webhook_deliveries_for_alert ON omicron.public.webhook_delivery ( - event_id + alert_id ); -- Index for looking up all currently in-flight webhook messages, and ordering @@ -5518,7 +5518,7 @@ INSERT INTO omicron.public.db_metadata ( version, target_version ) VALUES - (TRUE, NOW(), NOW(), '142.0.0', NULL) + (TRUE, NOW(), NOW(), '143.0.0', NULL) ON CONFLICT DO NOTHING; COMMIT; diff --git a/smf/nexus/multi-sled/config-partial.toml b/smf/nexus/multi-sled/config-partial.toml index 770bee73b1b..5cf5491fe91 100644 --- a/smf/nexus/multi-sled/config-partial.toml +++ b/smf/nexus/multi-sled/config-partial.toml @@ -78,7 +78,7 @@ tuf_artifact_replication.min_sled_replication = 3 read_only_region_replacement_start.period_secs = 30 # In general, the webhook dispatcher will be activated when events are queued, # so we don't need to periodically activate it *that* frequently. -webhook_dispatcher.period_secs = 60 +alert_dispatcher.period_secs = 60 webhook_deliverator.period_secs = 60 [default_region_allocation_strategy] diff --git a/smf/nexus/single-sled/config-partial.toml b/smf/nexus/single-sled/config-partial.toml index eefe7554ed6..52a8b642283 100644 --- a/smf/nexus/single-sled/config-partial.toml +++ b/smf/nexus/single-sled/config-partial.toml @@ -78,7 +78,7 @@ tuf_artifact_replication.min_sled_replication = 1 read_only_region_replacement_start.period_secs = 30 # In general, the webhook dispatcher will be activated when events are queued, # so we don't need to periodically activate it *that* frequently. -webhook_dispatcher.period_secs = 60 +alert_dispatcher.period_secs = 60 webhook_deliverator.period_secs = 60 [default_region_allocation_strategy] diff --git a/uuid-kinds/src/lib.rs b/uuid-kinds/src/lib.rs index 1e343ecfc74..3eddf8011af 100644 --- a/uuid-kinds/src/lib.rs +++ b/uuid-kinds/src/lib.rs @@ -52,6 +52,8 @@ macro_rules! impl_typed_uuid_kind { impl_typed_uuid_kind! { AffinityGroup => "affinity_group", + Alert => "alert", + AlertReceiver => "alert_receiver", AntiAffinityGroup => "anti_affinity_group", Blueprint => "blueprint", Collection => "collection", @@ -82,8 +84,6 @@ impl_typed_uuid_kind! { UpstairsSession => "upstairs_session", Vnic => "vnic", Volume => "volume", - WebhookEvent => "webhook_event", - WebhookReceiver => "webhook_receiver", WebhookDelivery => "webhook_delivery", WebhookDeliveryAttempt => "webhook_delivery_attempt", WebhookSecret => "webhook_secret",