Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
22 commits
Select commit Hold shift + click to select a range
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

17 changes: 17 additions & 0 deletions dev-tools/omdb/src/bin/omdb/nexus.rs
Original file line number Diff line number Diff line change
Expand Up @@ -70,6 +70,7 @@ use nexus_types::internal_api::background::SupportBundleEreportStatus;
use nexus_types::internal_api::background::TufArtifactReplicationCounters;
use nexus_types::internal_api::background::TufArtifactReplicationRequest;
use nexus_types::internal_api::background::TufArtifactReplicationStatus;
use nexus_types::internal_api::background::TufRepoPrunerStatus;
use nexus_types::inventory::BaseboardId;
use omicron_uuid_kinds::BlueprintUuid;
use omicron_uuid_kinds::CollectionUuid;
Expand Down Expand Up @@ -1199,6 +1200,9 @@ fn print_task_details(bgtask: &BackgroundTask, details: &serde_json::Value) {
"tuf_artifact_replication" => {
print_task_tuf_artifact_replication(details);
}
"tuf_repo_pruner" => {
print_task_tuf_repo_pruner(details);
}
"alert_dispatcher" => {
print_task_alert_dispatcher(details);
}
Expand Down Expand Up @@ -2573,6 +2577,19 @@ fn print_task_tuf_artifact_replication(details: &serde_json::Value) {
}
}

fn print_task_tuf_repo_pruner(details: &serde_json::Value) {
match serde_json::from_value::<TufRepoPrunerStatus>(details.clone()) {
Err(error) => eprintln!(
"warning: failed to interpret task details: {}: {:?}",
InlineErrorChain::new(&error),
details
),
Ok(status) => {
print!("{}", status);
}
}
}

fn print_task_alert_dispatcher(details: &serde_json::Value) {
use nexus_types::internal_api::background::AlertDispatched;
use nexus_types::internal_api::background::AlertDispatcherStatus;
Expand Down
12 changes: 12 additions & 0 deletions dev-tools/omdb/tests/env.out
Original file line number Diff line number Diff line change
Expand Up @@ -199,6 +199,10 @@ task: "tuf_artifact_replication"
replicate update repo artifacts across sleds


task: "tuf_repo_pruner"
determine which TUF repos' artifacts can be pruned


task: "v2p_manager"
manages opte v2p mappings for vpc networking

Expand Down Expand Up @@ -407,6 +411,10 @@ task: "tuf_artifact_replication"
replicate update repo artifacts across sleds


task: "tuf_repo_pruner"
determine which TUF repos' artifacts can be pruned


task: "v2p_manager"
manages opte v2p mappings for vpc networking

Expand Down Expand Up @@ -602,6 +610,10 @@ task: "tuf_artifact_replication"
replicate update repo artifacts across sleds


task: "tuf_repo_pruner"
determine which TUF repos' artifacts can be pruned


task: "v2p_manager"
manages opte v2p mappings for vpc networking

Expand Down
28 changes: 28 additions & 0 deletions dev-tools/omdb/tests/successes.out
Original file line number Diff line number Diff line change
Expand Up @@ -434,6 +434,10 @@ task: "tuf_artifact_replication"
replicate update repo artifacts across sleds


task: "tuf_repo_pruner"
determine which TUF repos' artifacts can be pruned


task: "v2p_manager"
manages opte v2p mappings for vpc networking

Expand Down Expand Up @@ -819,6 +823,18 @@ task: "tuf_artifact_replication"
copy err: 0
local repos: 0

task: "tuf_repo_pruner"
configured period: every <REDACTED_DURATION>m
last completed activation: <REDACTED ITERATIONS>, triggered by <TRIGGERED_BY_REDACTED>
started at <REDACTED_TIMESTAMP> (<REDACTED DURATION>s ago) and ran for <REDACTED DURATION>ms
configuration:
nkeep_recent_releases: 3
nkeep_recent_uploads: 3
repo pruned: none
repos kept because they're recent target releases: none
repos kept because they're recent uploads: none
other repos eligible for pruning: none

task: "v2p_manager"
configured period: every <REDACTED_DURATION>s
last completed activation: <REDACTED ITERATIONS>, triggered by <TRIGGERED_BY_REDACTED>
Expand Down Expand Up @@ -1335,6 +1351,18 @@ task: "tuf_artifact_replication"
copy err: 0
local repos: 0

task: "tuf_repo_pruner"
configured period: every <REDACTED_DURATION>m
last completed activation: <REDACTED ITERATIONS>, triggered by <TRIGGERED_BY_REDACTED>
started at <REDACTED_TIMESTAMP> (<REDACTED DURATION>s ago) and ran for <REDACTED DURATION>ms
configuration:
nkeep_recent_releases: 3
nkeep_recent_uploads: 3
repo pruned: none
repos kept because they're recent target releases: none
repos kept because they're recent uploads: none
other repos eligible for pruning: none

task: "v2p_manager"
configured period: every <REDACTED_DURATION>s
last completed activation: <REDACTED ITERATIONS>, triggered by <TRIGGERED_BY_REDACTED>
Expand Down
33 changes: 33 additions & 0 deletions nexus-config/src/nexus_config.rs
Original file line number Diff line number Diff line change
Expand Up @@ -430,6 +430,8 @@ pub struct BackgroundTaskConfig {
RegionSnapshotReplacementFinishConfig,
/// configuration for TUF artifact replication task
pub tuf_artifact_replication: TufArtifactReplicationConfig,
/// configuration for TUF repo pruner task
pub tuf_repo_pruner: TufRepoPrunerConfig,
/// configuration for read-only region replacement start task
pub read_only_region_replacement_start:
ReadOnlyRegionReplacementStartConfig,
Expand Down Expand Up @@ -765,6 +767,26 @@ pub struct TufArtifactReplicationConfig {
pub min_sled_replication: usize,
}

#[serde_as]
#[derive(Clone, Debug, Deserialize, Eq, PartialEq, Serialize)]
pub struct TufRepoPrunerConfig {
/// period (in seconds) for periodic activations of this background task
#[serde_as(as = "DurationSeconds<u64>")]
pub period_secs: Duration,

/// number of extra recent target releases to keep
///
/// The system always keeps two: the current release and the previous one.
/// This number is in addition to that.
pub nkeep_extra_target_releases: u8,

/// number of extra recently uploaded repos to keep
///
/// The system always keeps one, assuming that the operator may be about to
/// update to it. This number is in addition to that.
pub nkeep_extra_newly_uploaded: u8,
}

#[serde_as]
#[derive(Clone, Debug, Deserialize, Eq, PartialEq, Serialize)]
pub struct ReadOnlyRegionReplacementStartConfig {
Expand Down Expand Up @@ -1119,6 +1141,9 @@ mod test {
region_snapshot_replacement_finish.period_secs = 30
tuf_artifact_replication.period_secs = 300
tuf_artifact_replication.min_sled_replication = 3
tuf_repo_pruner.period_secs = 299
tuf_repo_pruner.nkeep_extra_target_releases = 51
tuf_repo_pruner.nkeep_extra_newly_uploaded = 52
read_only_region_replacement_start.period_secs = 30
alert_dispatcher.period_secs = 42
webhook_deliverator.period_secs = 43
Expand Down Expand Up @@ -1342,6 +1367,11 @@ mod test {
period_secs: Duration::from_secs(300),
min_sled_replication: 3,
},
tuf_repo_pruner: TufRepoPrunerConfig {
period_secs: Duration::from_secs(299),
nkeep_extra_target_releases: 51,
nkeep_extra_newly_uploaded: 52,
},
read_only_region_replacement_start:
ReadOnlyRegionReplacementStartConfig {
period_secs: Duration::from_secs(30),
Expand Down Expand Up @@ -1449,6 +1479,9 @@ mod test {
region_snapshot_replacement_finish.period_secs = 30
tuf_artifact_replication.period_secs = 300
tuf_artifact_replication.min_sled_replication = 3
tuf_repo_pruner.period_secs = 299
tuf_repo_pruner.nkeep_extra_target_releases = 51
tuf_repo_pruner.nkeep_extra_newly_uploaded = 52
read_only_region_replacement_start.period_secs = 30
alert_dispatcher.period_secs = 42
webhook_deliverator.period_secs = 43
Expand Down
1 change: 1 addition & 0 deletions nexus/background-task-interface/src/init.rs
Original file line number Diff line number Diff line change
Expand Up @@ -44,6 +44,7 @@ pub struct BackgroundTasks {
pub task_region_snapshot_replacement_step: Activator,
pub task_region_snapshot_replacement_finish: Activator,
pub task_tuf_artifact_replication: Activator,
pub task_tuf_repo_pruner: Activator,
pub task_read_only_region_replacement_start: Activator,
pub task_alert_dispatcher: Activator,
pub task_webhook_deliverator: Activator,
Expand Down
3 changes: 2 additions & 1 deletion nexus/db-model/src/schema_versions.rs
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ use std::{collections::BTreeMap, sync::LazyLock};
///
/// This must be updated when you change the database schema. Refer to
/// schema/crdb/README.adoc in the root of this repository for details.
pub const SCHEMA_VERSION: Version = Version::new(194, 0, 0);
pub const SCHEMA_VERSION: Version = Version::new(195, 0, 0);

/// List of all past database schema versions, in *reverse* order
///
Expand All @@ -28,6 +28,7 @@ static KNOWN_VERSIONS: LazyLock<Vec<KnownVersion>> = LazyLock::new(|| {
// | leaving the first copy as an example for the next person.
// v
// KnownVersion::new(next_int, "unique-dirname-with-the-sql-files"),
KnownVersion::new(195, "tuf-pruned-index"),
KnownVersion::new(194, "tuf-pruned"),
KnownVersion::new(193, "nexus-lockstep-port"),
KnownVersion::new(192, "blueprint-source"),
Expand Down
Loading
Loading