From 597a569e622a85ea3509e5930c05a633441b24e5 Mon Sep 17 00:00:00 2001 From: Rain Date: Mon, 30 Jun 2025 23:08:56 +0000 Subject: [PATCH 1/3] [spr] changes to main this commit is based on Created using spr 1.3.6-beta.1 [skip ci] --- Cargo.lock | 2 + dev-tools/reconfigurator-cli/Cargo.toml | 4 +- dev-tools/reconfigurator-cli/src/lib.rs | 81 ++++++++++ .../reconfigurator-cli/tests/common/mod.rs | 1 + ...et-release.txt => cmds-target-release.txt} | 9 +- ...ease-stderr => cmds-target-release-stderr} | 0 ...ease-stdout => cmds-target-release-stdout} | 13 +- .../reconfigurator-cli/tests/test-custom.rs | 47 ------ nexus/db-model/src/target_release.rs | 2 + .../db-queries/src/db/datastore/deployment.rs | 39 +++++ .../src/db/datastore/target_release.rs | 149 +++++++++++++++++- nexus/types/src/external_api/views.rs | 11 ++ openapi/nexus.json | 5 + 13 files changed, 307 insertions(+), 56 deletions(-) rename dev-tools/reconfigurator-cli/tests/input/{target-release.txt => cmds-target-release.txt} (90%) rename dev-tools/reconfigurator-cli/tests/output/{target-release-stderr => cmds-target-release-stderr} (100%) rename dev-tools/reconfigurator-cli/tests/output/{target-release-stdout => cmds-target-release-stdout} (99%) delete mode 100644 dev-tools/reconfigurator-cli/tests/test-custom.rs diff --git a/Cargo.lock b/Cargo.lock index 1f746b5c779..1bea60531d8 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -10599,8 +10599,10 @@ dependencies = [ "swrite", "tabled 0.15.0", "tokio", + "toml 0.8.23", "tufaceous", "tufaceous-artifact", + "tufaceous-lib", "update-common", "uuid", ] diff --git a/dev-tools/reconfigurator-cli/Cargo.toml b/dev-tools/reconfigurator-cli/Cargo.toml index da04874dd4a..a560b2224be 100644 --- a/dev-tools/reconfigurator-cli/Cargo.toml +++ b/dev-tools/reconfigurator-cli/Cargo.toml @@ -42,7 +42,10 @@ slog.workspace = true swrite.workspace = true tabled.workspace = true tokio.workspace = true +toml.workspace = true +tufaceous.workspace = true tufaceous-artifact.workspace = true +tufaceous-lib.workspace = true update-common.workspace = true uuid.workspace = true omicron-workspace-hack.workspace = true @@ -59,7 +62,6 @@ omicron-test-utils.workspace = true serde.workspace = true subprocess.workspace = true tokio.workspace = true -tufaceous.workspace = true # Disable doc builds by default for our binaries to work around issue # rust-lang/cargo#8373. These docs would not be very useful anyway. diff --git a/dev-tools/reconfigurator-cli/src/lib.rs b/dev-tools/reconfigurator-cli/src/lib.rs index 46e88f51029..cbec115cbdb 100644 --- a/dev-tools/reconfigurator-cli/src/lib.rs +++ b/dev-tools/reconfigurator-cli/src/lib.rs @@ -60,10 +60,19 @@ use tabled::Tabled; use tufaceous_artifact::ArtifactHash; use tufaceous_artifact::ArtifactVersion; use tufaceous_artifact::ArtifactVersionError; +use tufaceous_lib::assemble::ArtifactManifest; use update_common::artifacts::{ArtifactsWithPlan, ControlPlaneZonesMode}; mod log_capture; +/// The default key for TUF repository generation. +/// +/// This was randomly generated through a tufaceous invocation. +pub static DEFAULT_TUFACEOUS_KEY: &str = "ed25519:\ +MFECAQEwBQYDK2VwBCIEIJ9CnAhwk8PPt1x8icu\ +z9c12PdfCRHJpoUkuqJmIZ8GbgSEAbNGMpsHK5_w32\ +qwYdZH_BeVssmKzQlFsnPuaiHx2hy0="; + /// REPL state #[derive(Debug)] struct ReconfiguratorSim { @@ -228,6 +237,7 @@ fn process_command( Commands::BlueprintSave(args) => cmd_blueprint_save(sim, args), Commands::Show => cmd_show(sim), Commands::Set(args) => cmd_set(sim, args), + Commands::TufAssemble(args) => cmd_tuf_assemble(sim, args), Commands::Load(args) => cmd_load(sim, args), Commands::LoadExample(args) => cmd_load_example(sim, args), Commands::FileContents(args) => cmd_file_contents(args), @@ -303,6 +313,9 @@ enum Commands { #[command(subcommand)] Set(SetArgs), + /// use tufaceous to generate a repo from a manifest + TufAssemble(TufAssembleArgs), + /// save state to a file Save(SaveArgs), /// load state from a file @@ -735,6 +748,20 @@ enum SetArgs { }, } +#[derive(Debug, Args)] +struct TufAssembleArgs { + /// The tufaceous manifest path (relative to this crate's root) + manifest_path: Utf8PathBuf, + + #[clap( + long, + // Use help here rather than a doc comment because rustdoc doesn't like + // `<` and `>` in help messages. + help = "The path to the output [default: repo-.zip]" + )] + output: Option, +} + #[derive(Debug, Args)] struct LoadArgs { /// input file @@ -1770,6 +1797,60 @@ fn cmd_set( Ok(Some(rv)) } +fn cmd_tuf_assemble( + sim: &ReconfiguratorSim, + args: TufAssembleArgs, +) -> anyhow::Result> { + let manifest_path = if args.manifest_path.is_absolute() { + args.manifest_path.clone() + } else { + // Use CARGO_MANIFEST_DIR to resolve relative paths. + let dir = std::env::var("CARGO_MANIFEST_DIR").context( + "CARGO_MANIFEST_DIR not set in environment \ + (are you running with `cargo run`?)", + )?; + let mut dir = Utf8PathBuf::from(dir); + dir.push(&args.manifest_path); + dir + }; + + // Obtain the system version from the manifest. + let manifest = + ArtifactManifest::from_path(&manifest_path).with_context(|| { + format!("error parsing manifest from `{manifest_path}`") + })?; + + let output_path = if let Some(output_path) = &args.output { + output_path.clone() + } else { + // This is relative to the current directory. + Utf8PathBuf::from(format!("repo-{}.zip", manifest.system_version)) + }; + + // Just use a fixed key for now. + // + // In the future we may want to test changing the TUF key. + let args = tufaceous::Args::try_parse_from([ + "tufaceous", + "--key", + DEFAULT_TUFACEOUS_KEY, + "assemble", + manifest_path.as_str(), + output_path.as_str(), + ]) + .expect("args are valid so this shouldn't fail"); + let rt = + tokio::runtime::Runtime::new().context("creating tokio runtime")?; + rt.block_on(async move { args.exec(&sim.log).await }) + .context("error executing tufaceous assemble")?; + + let rv = format!( + "created {} for system version {}", + output_path, manifest.system_version, + ); + Ok(Some(rv)) +} + fn read_file( input_path: &camino::Utf8Path, ) -> anyhow::Result { diff --git a/dev-tools/reconfigurator-cli/tests/common/mod.rs b/dev-tools/reconfigurator-cli/tests/common/mod.rs index ee1bc9c4f62..a2c901a28fb 100644 --- a/dev-tools/reconfigurator-cli/tests/common/mod.rs +++ b/dev-tools/reconfigurator-cli/tests/common/mod.rs @@ -57,6 +57,7 @@ pub fn script_with_cwd( // However, it's necessary to redact paths from generated log entries. let stdout_text = Redactor::default() .uuids(false) + .field("assembling repository in", ".*") .field("extracting uploaded archive to", ".*") .field("created directory to store extracted artifacts, path:", ".*") .do_redact(&stdout_text); diff --git a/dev-tools/reconfigurator-cli/tests/input/target-release.txt b/dev-tools/reconfigurator-cli/tests/input/cmds-target-release.txt similarity index 90% rename from dev-tools/reconfigurator-cli/tests/input/target-release.txt rename to dev-tools/reconfigurator-cli/tests/input/cmds-target-release.txt index ac7e2c633a8..6d390365a9b 100644 --- a/dev-tools/reconfigurator-cli/tests/input/target-release.txt +++ b/dev-tools/reconfigurator-cli/tests/input/cmds-target-release.txt @@ -4,9 +4,12 @@ load-example --nsleds 3 --ndisks-per-sled 3 # Print the default target release show -# Load a target release from a fake file. -# This file was generated by the test runner in a temporary directory that this -# invocation of `reconfigurator-cli` is running out of as its working directory. +# Create a TUF repository from a fake manifest. (The output TUF repo is +# written to a temporary directory that this invocation of `reconfigurator-cli` +# is running out of as its working directory.) +tuf-assemble ../../update-common/manifests/fake.toml + +# Load the target release from the assembled TUF repository. set target-release repo-1.0.0.zip # Print the default target release again. diff --git a/dev-tools/reconfigurator-cli/tests/output/target-release-stderr b/dev-tools/reconfigurator-cli/tests/output/cmds-target-release-stderr similarity index 100% rename from dev-tools/reconfigurator-cli/tests/output/target-release-stderr rename to dev-tools/reconfigurator-cli/tests/output/cmds-target-release-stderr diff --git a/dev-tools/reconfigurator-cli/tests/output/target-release-stdout b/dev-tools/reconfigurator-cli/tests/output/cmds-target-release-stdout similarity index 99% rename from dev-tools/reconfigurator-cli/tests/output/target-release-stdout rename to dev-tools/reconfigurator-cli/tests/output/cmds-target-release-stdout index d9c3e2ca881..8218fd338b9 100644 --- a/dev-tools/reconfigurator-cli/tests/output/target-release-stdout +++ b/dev-tools/reconfigurator-cli/tests/output/cmds-target-release-stdout @@ -17,9 +17,16 @@ target release (generation 1): unset -> # Load a target release from a fake file. -> # This file was generated by the test runner in a temporary directory that this -> # invocation of `reconfigurator-cli` is running out of as its working directory. +> # Create a TUF repository from a fake manifest. (The output TUF repo is +> # written to a temporary directory that this invocation of `reconfigurator-cli` +> # is running out of as its working directory.) +> tuf-assemble ../../update-common/manifests/fake.toml +INFO assembling repository in +INFO artifacts assembled and archived to `repo-1.0.0.zip`, component: OmicronRepoAssembler +created repo-1.0.0.zip for system version 1.0.0 + + +> # Load the target release from the assembled TUF repository. > set target-release repo-1.0.0.zip INFO extracting uploaded archive to INFO created directory to store extracted artifacts, path: diff --git a/dev-tools/reconfigurator-cli/tests/test-custom.rs b/dev-tools/reconfigurator-cli/tests/test-custom.rs deleted file mode 100644 index 9ec2cff9d46..00000000000 --- a/dev-tools/reconfigurator-cli/tests/test-custom.rs +++ /dev/null @@ -1,47 +0,0 @@ -// This Source Code Form is subject to the terms of the Mozilla Public -// License, v. 2.0. If a copy of the MPL was not distributed with this -// file, You can obtain one at https://mozilla.org/MPL/2.0/. - -//! `reconfigurator-cli` tests that require custom setup -//! -//! If your test does not require custom setup, use `test-scripts.rs` instead. - -use camino::Utf8PathBuf; -use clap::Parser as _; -use dropshot::ConfigLogging; -use dropshot::ConfigLoggingLevel; -use dropshot::test_util::LogContext; - -mod common; -use common::script_with_cwd; - -// This test is special-cased because it requires custom setup (creating a TUF -// repo). -#[tokio::test] -async fn test_target_release() { - let logctx = LogContext::new( - "reconfigurator-cli-target-release", - &ConfigLogging::StderrTerminal { level: ConfigLoggingLevel::Debug }, - ); - let tmpdir = camino_tempfile::tempdir().expect("failed to create tmpdir"); - let path = tmpdir.path().join("repo-1.0.0.zip"); - tufaceous::Args::try_parse_from([ - "tufaceous", - "assemble", - "../../update-common/manifests/fake.toml", - path.as_str(), - ]) - .expect("can't parse tufaceous args") - .exec(&logctx.log) - .await - .expect("can't assemble TUF repo"); - - script_with_cwd( - &Utf8PathBuf::from("tests/input/target-release.txt"), - Some(tmpdir.path()), - ) - .expect("test failed"); - - // Dropping the tmpdir will clean it up. - logctx.cleanup_successful(); -} diff --git a/nexus/db-model/src/target_release.rs b/nexus/db-model/src/target_release.rs index cbc681912f6..3e3f8266592 100644 --- a/nexus/db-model/src/target_release.rs +++ b/nexus/db-model/src/target_release.rs @@ -64,11 +64,13 @@ impl TargetRelease { pub fn into_external( &self, release_source: views::TargetReleaseSource, + mupdate_override: bool, ) -> views::TargetRelease { views::TargetRelease { generation: (&self.generation.0).into(), time_requested: self.time_requested, release_source, + mupdate_override, } } } diff --git a/nexus/db-queries/src/db/datastore/deployment.rs b/nexus/db-queries/src/db/datastore/deployment.rs index 64144c610e8..ed21fa2452d 100644 --- a/nexus/db-queries/src/db/datastore/deployment.rs +++ b/nexus/db-queries/src/db/datastore/deployment.rs @@ -1560,6 +1560,23 @@ impl DataStore { Self::blueprint_current_target_only(&conn).await.map_err(|e| e.into()) } + /// Get the minimum generation for the current target blueprint, if one exists + pub async fn blueprint_target_get_current_min_gen( + &self, + opctx: &OpContext, + ) -> Result { + opctx.authorize(authz::Action::Read, &authz::BLUEPRINT_CONFIG).await?; + let conn = self.pool_connection_authorized(opctx).await?; + let target = Self::blueprint_current_target_only(&conn).await?; + + let authz_blueprint = authz_blueprint_from_id(target.target_id); + Self::blueprint_get_minimum_generation_connection( + &authz_blueprint, + &conn, + ) + .await + } + // Helper to fetch the current blueprint target (without fetching the entire // blueprint for that target). // @@ -1587,6 +1604,28 @@ impl DataStore { Ok(current_target.into()) } + + // Helper to fetch the minimum generation for a blueprint ID (without + // fetching the entire blueprint for that ID.) + async fn blueprint_get_minimum_generation_connection( + authz: &authz::Blueprint, + conn: &async_bb8_diesel::Connection, + ) -> Result { + use nexus_db_schema::schema::blueprint::dsl; + + let id = authz.id(); + let db_blueprint = dsl::blueprint + .filter(dsl::id.eq(id)) + .select(DbBlueprint::as_select()) + .first_async::(conn) + .await + .optional() + .map_err(|e| public_error_from_diesel(e, ErrorHandler::Server))?; + let db_blueprint = db_blueprint.ok_or_else(|| { + Error::not_found_by_id(ResourceType::Blueprint, &id) + })?; + Ok(db_blueprint.target_release_minimum_generation.0) + } } // Helper to create an `authz::Blueprint` for a specific blueprint ID diff --git a/nexus/db-queries/src/db/datastore/target_release.rs b/nexus/db-queries/src/db/datastore/target_release.rs index a6f82edff99..023e074322e 100644 --- a/nexus/db-queries/src/db/datastore/target_release.rs +++ b/nexus/db-queries/src/db/datastore/target_release.rs @@ -125,7 +125,15 @@ impl DataStore { } } }; - Ok(target_release.into_external(release_source)) + // We choose to fetch the blueprint directly from the database rather + // than relying on the cached blueprint in Nexus because our APIs try to + // be strongly consistent. This shows up/will show up as a warning in + // the UI, and we don't want the warning to flicker in and out of + // existence based on which Nexus is getting hit. + let min_gen = self.blueprint_target_get_current_min_gen(opctx).await?; + // The semantics of min_gen mean we use a > sign here, not >=. + let mupdate_override = min_gen > target_release.generation.0; + Ok(target_release.into_external(release_source, mupdate_override)) } } @@ -135,6 +143,12 @@ mod test { use crate::db::model::{Generation, TargetReleaseSource}; use crate::db::pub_test_utils::TestDatabase; use chrono::{TimeDelta, Utc}; + use nexus_inventory::now_db_precision; + use nexus_reconfigurator_planning::blueprint_builder::BlueprintBuilder; + use nexus_reconfigurator_planning::example::{ + ExampleSystemBuilder, SimRngState, + }; + use nexus_types::deployment::BlueprintTarget; use omicron_common::api::external::{ TufArtifactMeta, TufRepoDescription, TufRepoMeta, }; @@ -145,7 +159,8 @@ mod test { #[tokio::test] async fn target_release_datastore() { - let logctx = dev::test_setup_log("target_release_datastore"); + const TEST_NAME: &str = "target_release_datastore"; + let logctx = dev::test_setup_log(TEST_NAME); let db = TestDatabase::new_with_datastore(&logctx.log).await; let (opctx, datastore) = (db.opctx(), db.datastore()); @@ -163,6 +178,56 @@ mod test { ); assert!(initial_target_release.tuf_repo_id.is_none()); + // Set up an initial blueprint and make it the target. This models real + // systems which always have a target blueprint. + let mut rng = SimRngState::from_seed(TEST_NAME); + let (system, mut blueprint) = ExampleSystemBuilder::new_with_rng( + &logctx.log, + rng.next_system_rng(), + ) + .build(); + assert_eq!( + blueprint.target_release_minimum_generation, + 1.into(), + "initial blueprint should have minimum generation of 1", + ); + // Treat this blueprint as the initial one for the system. + blueprint.parent_blueprint_id = None; + + datastore + .blueprint_insert(&opctx, &blueprint) + .await + .expect("inserted blueprint"); + datastore + .blueprint_target_set_current( + opctx, + BlueprintTarget { + target_id: blueprint.id, + // enabled = true or false shouldn't matter for this. + enabled: true, + time_made_target: now_db_precision(), + }, + ) + .await + .expect("set blueprint target"); + + // We should always be able to get a view of the target release. + let initial_target_release_view = datastore + .target_release_view(opctx, &initial_target_release) + .await + .expect("got target release"); + eprintln!( + "initial target release view: {:#?}", + initial_target_release_view + ); + + // This target release should not have the mupdate override set, because + // the generation is <= the minimum generation in the target blueprint. + assert!( + !initial_target_release_view.mupdate_override, + "mupdate_override should be false for initial target release" + ); + // We should be able to set a new generation just like the first. // We allow some slack in the timestamp comparison because the // database only stores timestamps with μsec precision. @@ -256,6 +321,86 @@ mod test { ); assert_eq!(target_release.tuf_repo_id, Some(tuf_repo_id)); + // Generate a new blueprint with a greater target release generation. + let mut builder = BlueprintBuilder::new_based_on( + &logctx.log, + &blueprint, + &system.input, + &system.collection, + TEST_NAME, + ) + .expect("created blueprint builder"); + builder.set_rng(rng.next_planner_rng()); + builder + .set_target_release_minimum_generation( + blueprint.target_release_minimum_generation, + 5.into(), + ) + .expect("set target release minimum generation"); + let bp2 = builder.build(); + + datastore + .blueprint_insert(&opctx, &bp2) + .await + .expect("inserted blueprint"); + datastore + .blueprint_target_set_current( + opctx, + BlueprintTarget { + target_id: bp2.id, + // enabled = true or false shouldn't matter for this. + enabled: true, + time_made_target: now_db_precision(), + }, + ) + .await + .expect("set blueprint target"); + + // Fetch the target release again. + let target_release = datastore + .target_release_get_current(opctx) + .await + .expect("got target release"); + let target_release_view_2 = datastore + .target_release_view(opctx, &target_release) + .await + .expect("got target release"); + + eprintln!("target release view 2: {target_release_view_2:#?}"); + + assert!( + target_release_view_2.mupdate_override, + "mupdate override is set", + ); + + // Now set the target release again -- this should cause the mupdate + // override to disappear. + let before = Utc::now(); + let target_release = datastore + .target_release_insert( + opctx, + TargetRelease::new_system_version(&target_release, tuf_repo_id), + ) + .await + .unwrap(); + let after = Utc::now(); + + assert_eq!(target_release.generation, Generation(5.into())); + assert!(target_release.time_requested >= before); + assert!(target_release.time_requested <= after); + + let target_release_view_3 = datastore + .target_release_view(opctx, &target_release) + .await + .expect("got target release"); + + eprintln!("target release view 3: {target_release_view_3:#?}"); + + assert!( + !target_release_view_3.mupdate_override, + "mupdate override is not set", + ); + // Clean up. db.terminate().await; logctx.cleanup_successful(); diff --git a/nexus/types/src/external_api/views.rs b/nexus/types/src/external_api/views.rs index 88c1d177ddb..98f1124d933 100644 --- a/nexus/types/src/external_api/views.rs +++ b/nexus/types/src/external_api/views.rs @@ -1488,6 +1488,17 @@ pub struct TargetRelease { /// The source of the target release. pub release_source: TargetReleaseSource, + + /// If true, indicates that at least one sled in the system has been updated + /// through the recovery (MUPdate) path since the last time the target + /// release was set. + /// + /// In this case, the system will ignore the currently-set target release, + /// on the assumption that continuing an update may reintroduce or + /// exacerbate whatever problem caused the recovery path to be used. An + /// operator must set the target release again in order to resume automated + /// updates. + pub mupdate_override: bool, } fn expected_one_of() -> String { diff --git a/openapi/nexus.json b/openapi/nexus.json index 5306f4417e5..f4002704e3b 100644 --- a/openapi/nexus.json +++ b/openapi/nexus.json @@ -24802,6 +24802,10 @@ "type": "integer", "format": "int64" }, + "mupdate_override": { + "description": "If true, indicates that at least one sled in the system has been updated through the recovery (MUPdate) path since the last time the target release was set.\n\nIn this case, the system will ignore the currently-set target release, on the assumption that continuing an update may reintroduce or exacerbate whatever problem caused the recovery path to be used. An operator must set the target release again in order to resume automated updates.", + "type": "boolean" + }, "release_source": { "description": "The source of the target release.", "allOf": [ @@ -24818,6 +24822,7 @@ }, "required": [ "generation", + "mupdate_override", "release_source", "time_requested" ] From b4d7d5112f36d26cd9a3a4345287d8482a0b6710 Mon Sep 17 00:00:00 2001 From: Rain Date: Mon, 30 Jun 2025 23:41:24 +0000 Subject: [PATCH 2/3] add non-matching target release Created using spr 1.3.6-beta.1 --- dev-tools/reconfigurator-cli/src/lib.rs | 14 +- .../tests/input/cmds-noop-image-source.txt | 12 +- .../output/cmds-noop-image-source-stdout | 152 ++++++++++++++---- update-common/manifests/fake-non-semver.toml | 13 +- 4 files changed, 148 insertions(+), 43 deletions(-) diff --git a/dev-tools/reconfigurator-cli/src/lib.rs b/dev-tools/reconfigurator-cli/src/lib.rs index 5b8600b9998..77df8c46028 100644 --- a/dev-tools/reconfigurator-cli/src/lib.rs +++ b/dev-tools/reconfigurator-cli/src/lib.rs @@ -799,6 +799,10 @@ struct TufAssembleArgs { /// The tufaceous manifest path (relative to this crate's root) manifest_path: Utf8PathBuf, + /// Allow non-semver artifact versions. + #[clap(long)] + allow_non_semver: bool, + #[clap( long, // Use help here rather than a doc comment because rustdoc doesn't like @@ -1962,15 +1966,19 @@ fn cmd_tuf_assemble( // Just use a fixed key for now. // // In the future we may want to test changing the TUF key. - let args = tufaceous::Args::try_parse_from([ + let mut tufaceous_args = vec![ "tufaceous", "--key", DEFAULT_TUFACEOUS_KEY, "assemble", manifest_path.as_str(), output_path.as_str(), - ]) - .expect("args are valid so this shouldn't fail"); + ]; + if args.allow_non_semver { + tufaceous_args.push("--allow-non-semver"); + } + let args = tufaceous::Args::try_parse_from(tufaceous_args) + .expect("args are valid so this shouldn't fail"); let rt = tokio::runtime::Runtime::new().context("creating tokio runtime")?; rt.block_on(async move { args.exec(&sim.log).await }) diff --git a/dev-tools/reconfigurator-cli/tests/input/cmds-noop-image-source.txt b/dev-tools/reconfigurator-cli/tests/input/cmds-noop-image-source.txt index 4d946e8b884..06c49d2dacf 100644 --- a/dev-tools/reconfigurator-cli/tests/input/cmds-noop-image-source.txt +++ b/dev-tools/reconfigurator-cli/tests/input/cmds-noop-image-source.txt @@ -1,5 +1,5 @@ # Load example system -load-example --nsleds 4 --ndisks-per-sled 1 +load-example --nsleds 5 --ndisks-per-sled 1 sled-list @@ -7,8 +7,10 @@ sled-list # written to a temporary directory that this invocation of `reconfigurator-cli` # is running out of as its working directory.) tuf-assemble ../../update-common/manifests/fake.toml +# Create a second TUF repository from a different fake manifest. +tuf-assemble ../../update-common/manifests/fake-non-semver.toml --allow-non-semver -# Load the target release from the assembled TUF repository. +# Load the target release from the first TUF repository. set target-release repo-1.0.0.zip # On one sled, update the install dataset. @@ -26,7 +28,11 @@ blueprint-edit latest set-remove-mupdate-override aff6c093-197d-42c5-ad80-9f10ba # On a fourth sled, simulate an error validating the install dataset image on one zone. # We pick ntp because every sled has it. -sled-update-install-dataset d81c6a84-79b8-4958-ae41-ea46c9b19763 --to-target-release --with-zone-error ntp +sled-update-install-dataset b82ede02-399c-48c6-a1de-411df4fa49a7 --to-target-release --with-zone-error ntp + +# On a fifth sled, set the install dataset to the repo-2.0.0.zip generated by the +# second TUF repository. +sled-update-install-dataset d81c6a84-79b8-4958-ae41-ea46c9b19763 --from-repo repo-2.0.0.zip # Generate an inventory and run a blueprint planning step. inventory-generate diff --git a/dev-tools/reconfigurator-cli/tests/output/cmds-noop-image-source-stdout b/dev-tools/reconfigurator-cli/tests/output/cmds-noop-image-source-stdout index 828b8698d5d..5928752a14e 100644 --- a/dev-tools/reconfigurator-cli/tests/output/cmds-noop-image-source-stdout +++ b/dev-tools/reconfigurator-cli/tests/output/cmds-noop-image-source-stdout @@ -1,6 +1,6 @@ using provided RNG seed: reconfigurator-cli-test > # Load example system -> load-example --nsleds 4 --ndisks-per-sled 1 +> load-example --nsleds 5 --ndisks-per-sled 1 loaded example system with: - collection: f45ba181-4b56-42cc-a762-874d90184a43 - blueprint: dbcbd3d6-41ff-48ae-ac0b-1becc9b2fd21 @@ -11,6 +11,7 @@ ID SERIAL NZPOOLS SUBNET 2b8f0cb3-0295-4b3c-bc58-4fe88b57112c serial1 1 fd00:1122:3344:102::/64 98e6b7c2-2efa-41ca-b20a-0a4d61102fe6 serial0 1 fd00:1122:3344:101::/64 aff6c093-197d-42c5-ad80-9f10ba051a34 serial3 1 fd00:1122:3344:104::/64 +b82ede02-399c-48c6-a1de-411df4fa49a7 serial4 1 fd00:1122:3344:105::/64 d81c6a84-79b8-4958-ae41-ea46c9b19763 serial2 1 fd00:1122:3344:103::/64 @@ -22,8 +23,14 @@ INFO assembling repository in INFO artifacts assembled and archived to `repo-1.0.0.zip`, component: OmicronRepoAssembler created repo-1.0.0.zip for system version 1.0.0 +> # Create a second TUF repository from a different fake manifest. +> tuf-assemble ../../update-common/manifests/fake-non-semver.toml --allow-non-semver +INFO assembling repository in +INFO artifacts assembled and archived to `repo-2.0.0.zip`, component: OmicronRepoAssembler +created repo-2.0.0.zip for system version 2.0.0 + -> # Load the target release from the assembled TUF repository. +> # Load the target release from the first TUF repository. > set target-release repo-1.0.0.zip INFO extracting uploaded archive to INFO created directory to store extracted artifacts, path: @@ -80,8 +87,43 @@ blueprint 8da82a8e-bf97-4fbd-8ddd-9f6462732cf1 created from latest blueprint (db > # On a fourth sled, simulate an error validating the install dataset image on one zone. > # We pick ntp because every sled has it. -> sled-update-install-dataset d81c6a84-79b8-4958-ae41-ea46c9b19763 --to-target-release --with-zone-error ntp -sled d81c6a84-79b8-4958-ae41-ea46c9b19763: install dataset updated: to target release (system version 1.0.0, 1 zone errors) +> sled-update-install-dataset b82ede02-399c-48c6-a1de-411df4fa49a7 --to-target-release --with-zone-error ntp +sled b82ede02-399c-48c6-a1de-411df4fa49a7: install dataset updated: to target release (system version 1.0.0, 1 zone errors) + + +> # On a fifth sled, set the install dataset to the repo-2.0.0.zip generated by the +> # second TUF repository. +> sled-update-install-dataset d81c6a84-79b8-4958-ae41-ea46c9b19763 --from-repo repo-2.0.0.zip +INFO extracting uploaded archive to +INFO created directory to store extracted artifacts, path: +INFO added artifact, name: fake-gimlet-sp, kind: gimlet_sp, version: 2.0.0, hash: ce1e98a8a9ae541654508f101d59a3ddeba3d28177f1d42d5614248eef0b820b, length: 751 +INFO added artifact, name: fake-gimlet-rot, kind: gimlet_rot_image_a, version: 2.0.0, hash: e7047f500a5391e22cd8e6a8d3ae66c9d9de7a8d021e6e9a10e05bb6d554da77, length: 743 +INFO added artifact, name: fake-gimlet-rot, kind: gimlet_rot_image_b, version: 2.0.0, hash: e7047f500a5391e22cd8e6a8d3ae66c9d9de7a8d021e6e9a10e05bb6d554da77, length: 743 +INFO added artifact, name: fake-gimlet-rot-bootloader, kind: gimlet_rot_bootloader, version: 2.0.0, hash: 238a9bfc87f02141c7555ff5ebb7a22ec37bc24d6f724ce3af05ed7c412cd115, length: 750 +INFO added artifact, name: fake-host, kind: host_phase_1, version: 2.0.0, hash: 44714733af7600b30a50bfd2cbaf707ff7ee9724073ff70a6732e55a88864cf6, length: 524288 +INFO added artifact, name: fake-host, kind: host_phase_2, version: 2.0.0, hash: 0c0362b640cece5b9a5e86d8fa683bd2eb84c3e7f90731f597197d604ffa76e3, length: 1048576 +INFO added artifact, name: fake-trampoline, kind: trampoline_phase_1, version: non-semver, hash: 24f8ca0d52da5238644b11964c6feda854c7530820713efefa7ac91683b3fc76, length: 524288 +INFO added artifact, name: fake-trampoline, kind: trampoline_phase_2, version: non-semver, hash: 5fceee33d358aacb8a34ca93a30e28354bd8f341f6e3e895a2cafe83904f3d80, length: 1048576 +INFO added artifact, name: clickhouse, kind: zone, version: 2.0.0, hash: bb2d1ff02d11f72bc9049ae57f27536207519a1859d29f8d7a90ab3b44d56b08, length: 1687 +INFO added artifact, name: clickhouse_keeper, kind: zone, version: 2.0.0, hash: 1eb9f24be68f13c274aa0ac9b863cec520dbfe762620c328431728d75bfd2198, length: 1691 +INFO added artifact, name: clickhouse_server, kind: zone, version: 2.0.0, hash: 50fe271948672a9af1ba5f96c9d87ff2736fa72d78dfef598a79fa0cc8a00474, length: 1691 +INFO added artifact, name: cockroachdb, kind: zone, version: 2.0.0, hash: ebc82bf181db864b78cb7e3ddedf7ab1dd8fe7b377b02846f3c27cf0387bb387, length: 1690 +INFO added artifact, name: crucible-zone, kind: zone, version: 2.0.0, hash: 866f6a7c2e51c056fb722b5113e80181cc9cd8b712a0d3dbf1edc4ce29e5229e, length: 1691 +INFO added artifact, name: crucible-pantry-zone, kind: zone, version: 2.0.0, hash: 3ff26dad96faa8f67251f5de40458b4f809d536bfe8572134da0e42c2fa12674, length: 1696 +INFO added artifact, name: external-dns, kind: zone, version: 2.0.0, hash: f282c45771429f7bebf71f0cc668521066db57c6bb07fcfccdfb44825d3d930f, length: 1690 +INFO added artifact, name: internal-dns, kind: zone, version: 2.0.0, hash: de30657a72b066b8ef1f56351a0a5d4d7000da0a62c4be9b2e949a107ca8a389, length: 1690 +INFO added artifact, name: ntp, kind: zone, version: 2.0.0, hash: d76e26198daed69cdae04490d7477f8c842e0dbe37d463eac0d0a8d3fb803095, length: 1682 +INFO added artifact, name: nexus, kind: zone, version: 2.0.0, hash: e9b7035f41848a987a798c15ac424cc91dd662b1af0920d58d8aa1ebad7467b6, length: 1683 +INFO added artifact, name: oximeter, kind: zone, version: 2.0.0, hash: 9f4bc56a15d5fd943fdac94309994b8fd73aa2be1ec61faf44bfcf2356c9dc23, length: 1683 +INFO added artifact, name: fake-psc-sp, kind: psc_sp, version: 2.0.0, hash: 7adf04de523865003dbf120cebddd5fcf5bad650640281b294197e6ca7016e47, length: 748 +INFO added artifact, name: fake-psc-rot, kind: psc_rot_image_a, version: 2.0.0, hash: 6d1c432647e9b9e4cf846ff5d17932d75cba49c0d3f23d24243238bc40bcfef5, length: 746 +INFO added artifact, name: fake-psc-rot, kind: psc_rot_image_b, version: 2.0.0, hash: 6d1c432647e9b9e4cf846ff5d17932d75cba49c0d3f23d24243238bc40bcfef5, length: 746 +INFO added artifact, name: fake-psc-rot-bootloader, kind: psc_rot_bootloader, version: 2.0.0, hash: 238a9bfc87f02141c7555ff5ebb7a22ec37bc24d6f724ce3af05ed7c412cd115, length: 750 +INFO added artifact, name: fake-switch-sp, kind: switch_sp, version: 2.0.0, hash: 5a9019c484c051edfab4903a7a5e1817c89bd555eea3e48f6b92c6e67442e13e, length: 746 +INFO added artifact, name: fake-switch-rot, kind: switch_rot_image_a, version: 2.0.0, hash: e7047f500a5391e22cd8e6a8d3ae66c9d9de7a8d021e6e9a10e05bb6d554da77, length: 743 +INFO added artifact, name: fake-switch-rot, kind: switch_rot_image_b, version: 2.0.0, hash: e7047f500a5391e22cd8e6a8d3ae66c9d9de7a8d021e6e9a10e05bb6d554da77, length: 743 +INFO added artifact, name: fake-switch-rot-bootloader, kind: switch_rot_bootloader, version: non-semver-2, hash: a0d6df68e6112edcf62c035947563d2a58d06e11443b95b90bf087da710550a5, length: 758 +sled d81c6a84-79b8-4958-ae41-ea46c9b19763: install dataset updated: from repo at repo-2.0.0.zip (system version 2.0.0) > # Generate an inventory and run a blueprint planning step. @@ -99,9 +141,10 @@ INFO install dataset artifact hash matches TUF repo, switching out the zone imag INFO noop converting 7/7 install-dataset zones to artifact store, sled_id: 2b8f0cb3-0295-4b3c-bc58-4fe88b57112c WARN skipping noop image source check since sled-agent encountered error retrieving zone manifest (this is abnormal), sled_id: 98e6b7c2-2efa-41ca-b20a-0a4d61102fe6, error: reconfigurator-sim simulated error: simulated error obtaining zone manifest INFO skipping noop image source check on sled (blueprint has get_remove_mupdate_override set for sled), sled_id: aff6c093-197d-42c5-ad80-9f10ba051a34, bp_remove_mupdate_override_id: ffffffff-ffff-ffff-ffff-ffffffffffff -WARN zone manifest inventory indicated install dataset artifact is invalid, not using artifact (this is abnormal), sled_id: d81c6a84-79b8-4958-ae41-ea46c9b19763, zone_id: 1ac82e10-6e82-4d4d-a1a7-c2aaf0284408, kind: internal_ntp, file_name: ntp.tar.gz, error: reconfigurator-sim: simulated error validating zone image -INFO install dataset artifact hash matches TUF repo, switching out the zone image source to Artifact, sled_id: d81c6a84-79b8-4958-ae41-ea46c9b19763, tuf_artifact_id: crucible-zone v1.0.0 (zone) -INFO noop converting 1/2 install-dataset zones to artifact store, sled_id: d81c6a84-79b8-4958-ae41-ea46c9b19763 +WARN zone manifest inventory indicated install dataset artifact is invalid, not using artifact (this is abnormal), sled_id: b82ede02-399c-48c6-a1de-411df4fa49a7, zone_id: 805d9b32-915a-4e16-aff8-dbf31cc21373, kind: internal_ntp, file_name: ntp.tar.gz, error: reconfigurator-sim: simulated error validating zone image +INFO install dataset artifact hash matches TUF repo, switching out the zone image source to Artifact, sled_id: b82ede02-399c-48c6-a1de-411df4fa49a7, tuf_artifact_id: crucible-zone v1.0.0 (zone) +INFO noop converting 1/2 install-dataset zones to artifact store, sled_id: b82ede02-399c-48c6-a1de-411df4fa49a7 +INFO noop converting 0/2 install-dataset zones to artifact store, sled_id: d81c6a84-79b8-4958-ae41-ea46c9b19763 INFO sufficient BoundaryNtp zones exist in plan, desired_count: 0, current_count: 0 INFO sufficient Clickhouse zones exist in plan, desired_count: 1, current_count: 1 INFO sufficient ClickhouseKeeper zones exist in plan, desired_count: 0, current_count: 0 @@ -242,32 +285,61 @@ parent: 8da82a8e-bf97-4fbd-8ddd-9f6462732cf1 - sled: d81c6a84-79b8-4958-ae41-ea46c9b19763 (active, config generation 3) + sled: b82ede02-399c-48c6-a1de-411df4fa49a7 (active, config generation 3) physical disks: ------------------------------------------------------------------------------------ vendor model serial disposition ------------------------------------------------------------------------------------ - fake-vendor fake-model serial-4930954e-9ac7-4453-b63f-5ab97c389a99 in service + fake-vendor fake-model serial-4c4d1e39-411c-4f56-9a05-ed325928c343 in service datasets: ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ dataset name dataset id disposition quota reservation compression ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ - oxp_4930954e-9ac7-4453-b63f-5ab97c389a99/crucible 254037b1-5372-453d-a07c-c8223b2ea0fc in service none none off - oxp_4930954e-9ac7-4453-b63f-5ab97c389a99/crypt/zone 1fca599f-eed2-4473-8ff6-c94a1f10a755 in service none none off - oxp_4930954e-9ac7-4453-b63f-5ab97c389a99/crypt/zone/oxz_crucible_8f80b0a7-672b-40f9-81d2-18d92e4e1bae bbe582a5-b481-42db-9b2d-52ee15ce8c6e in service none none off - oxp_4930954e-9ac7-4453-b63f-5ab97c389a99/crypt/zone/oxz_ntp_1ac82e10-6e82-4d4d-a1a7-c2aaf0284408 483d01f7-b1b3-4078-9a29-fe7314d91e7f in service none none off - oxp_4930954e-9ac7-4453-b63f-5ab97c389a99/crypt/debug 097a6d9f-b800-43ce-baf5-53376fc0ad2f in service 100 GiB none gzip-9 + oxp_4c4d1e39-411c-4f56-9a05-ed325928c343/crucible 36fc643b-750a-4911-9c53-c37164da4c30 in service none none off + oxp_4c4d1e39-411c-4f56-9a05-ed325928c343/crypt/zone a8f7759a-7d45-4d84-93e0-389bfe3049a5 in service none none off + oxp_4c4d1e39-411c-4f56-9a05-ed325928c343/crypt/zone/oxz_crucible_c247aef2-4e7c-4690-8a5d-c040b2644ebf d081f65d-4a27-44ab-83ee-00930924ac87 in service none none off + oxp_4c4d1e39-411c-4f56-9a05-ed325928c343/crypt/zone/oxz_ntp_805d9b32-915a-4e16-aff8-dbf31cc21373 77a72699-6928-4839-9141-371c778598b3 in service none none off + oxp_4c4d1e39-411c-4f56-9a05-ed325928c343/crypt/debug ac20ae29-41d5-4b7e-bf48-64867c019b27 in service 100 GiB none gzip-9 omicron zones: -------------------------------------------------------------------------------------------------------------------- zone type zone id image source disposition underlay IP -------------------------------------------------------------------------------------------------------------------- - crucible 8f80b0a7-672b-40f9-81d2-18d92e4e1bae artifact: version 1.0.0 in service fd00:1122:3344:103::22 - internal_ntp 1ac82e10-6e82-4d4d-a1a7-c2aaf0284408 install dataset in service fd00:1122:3344:103::21 + crucible c247aef2-4e7c-4690-8a5d-c040b2644ebf artifact: version 1.0.0 in service fd00:1122:3344:105::22 + internal_ntp 805d9b32-915a-4e16-aff8-dbf31cc21373 install dataset in service fd00:1122:3344:105::21 + + + + sled: d81c6a84-79b8-4958-ae41-ea46c9b19763 (active, config generation 2) + + physical disks: + ------------------------------------------------------------------------------------ + vendor model serial disposition + ------------------------------------------------------------------------------------ + fake-vendor fake-model serial-4930954e-9ac7-4453-b63f-5ab97c389a99 in service + + + datasets: + ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ + dataset name dataset id disposition quota reservation compression + ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ + oxp_4930954e-9ac7-4453-b63f-5ab97c389a99/crucible 5007977c-9fe5-4f81-9d10-44d16405ee88 in service none none off + oxp_4930954e-9ac7-4453-b63f-5ab97c389a99/crypt/zone 4b00211c-b3bf-46fe-9daa-6a56310dd96d in service none none off + oxp_4930954e-9ac7-4453-b63f-5ab97c389a99/crypt/zone/oxz_crucible_2618b6ef-25e4-4f88-9c72-d516e291bd66 0aff2076-6844-434f-85e5-a3b662df45aa in service none none off + oxp_4930954e-9ac7-4453-b63f-5ab97c389a99/crypt/zone/oxz_ntp_a9847fba-3865-4af1-981d-bade5b56fb5d edb38f48-39c8-4c4a-85b8-32bb5c9fa12a in service none none off + oxp_4930954e-9ac7-4453-b63f-5ab97c389a99/crypt/debug 9b6e1e4a-fd94-4cab-ac98-1db56ffc50e1 in service 100 GiB none gzip-9 + + + omicron zones: + ------------------------------------------------------------------------------------------------------------ + zone type zone id image source disposition underlay IP + ------------------------------------------------------------------------------------------------------------ + crucible 2618b6ef-25e4-4f88-9c72-d516e291bd66 install dataset in service fd00:1122:3344:103::22 + internal_ntp a9847fba-3865-4af1-981d-bade5b56fb5d install dataset in service fd00:1122:3344:103::21 COCKROACHDB SETTINGS: @@ -281,7 +353,7 @@ parent: 8da82a8e-bf97-4fbd-8ddd-9f6462732cf1 METADATA: created by::::::::::::: reconfigurator-sim created at::::::::::::: - comment:::::::::::::::: sled 2b8f0cb3-0295-4b3c-bc58-4fe88b57112c: performed 7 noop zone image source updates, sled 2b8f0cb3-0295-4b3c-bc58-4fe88b57112c: performed 7 noop zone image source updates, sled 2b8f0cb3-0295-4b3c-bc58-4fe88b57112c: performed 7 noop zone image source updates, sled 2b8f0cb3-0295-4b3c-bc58-4fe88b57112c: performed 7 noop zone image source updates, sled 2b8f0cb3-0295-4b3c-bc58-4fe88b57112c: performed 7 noop zone image source updates, sled 2b8f0cb3-0295-4b3c-bc58-4fe88b57112c: performed 7 noop zone image source updates, sled 2b8f0cb3-0295-4b3c-bc58-4fe88b57112c: performed 7 noop zone image source updates, sled d81c6a84-79b8-4958-ae41-ea46c9b19763: performed 1 noop zone image source updates + comment:::::::::::::::: sled 2b8f0cb3-0295-4b3c-bc58-4fe88b57112c: performed 7 noop zone image source updates, sled 2b8f0cb3-0295-4b3c-bc58-4fe88b57112c: performed 7 noop zone image source updates, sled 2b8f0cb3-0295-4b3c-bc58-4fe88b57112c: performed 7 noop zone image source updates, sled 2b8f0cb3-0295-4b3c-bc58-4fe88b57112c: performed 7 noop zone image source updates, sled 2b8f0cb3-0295-4b3c-bc58-4fe88b57112c: performed 7 noop zone image source updates, sled 2b8f0cb3-0295-4b3c-bc58-4fe88b57112c: performed 7 noop zone image source updates, sled 2b8f0cb3-0295-4b3c-bc58-4fe88b57112c: performed 7 noop zone image source updates, sled b82ede02-399c-48c6-a1de-411df4fa49a7: performed 1 noop zone image source updates internal DNS version::: 1 external DNS version::: 1 target release min gen: 1 @@ -349,32 +421,32 @@ to: blueprint 58d5e830-0884-47d8-a7cd-b2b3751adeb4 └─ + artifact: version 1.0.0 - sled d81c6a84-79b8-4958-ae41-ea46c9b19763 (active, config generation 2 -> 3): + sled b82ede02-399c-48c6-a1de-411df4fa49a7 (active, config generation 2 -> 3): physical disks: ------------------------------------------------------------------------------------ vendor model serial disposition ------------------------------------------------------------------------------------ - fake-vendor fake-model serial-4930954e-9ac7-4453-b63f-5ab97c389a99 in service + fake-vendor fake-model serial-4c4d1e39-411c-4f56-9a05-ed325928c343 in service datasets: ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ dataset name dataset id disposition quota reservation compression ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ - oxp_4930954e-9ac7-4453-b63f-5ab97c389a99/crucible 254037b1-5372-453d-a07c-c8223b2ea0fc in service none none off - oxp_4930954e-9ac7-4453-b63f-5ab97c389a99/crypt/zone 1fca599f-eed2-4473-8ff6-c94a1f10a755 in service none none off - oxp_4930954e-9ac7-4453-b63f-5ab97c389a99/crypt/zone/oxz_crucible_8f80b0a7-672b-40f9-81d2-18d92e4e1bae bbe582a5-b481-42db-9b2d-52ee15ce8c6e in service none none off - oxp_4930954e-9ac7-4453-b63f-5ab97c389a99/crypt/zone/oxz_ntp_1ac82e10-6e82-4d4d-a1a7-c2aaf0284408 483d01f7-b1b3-4078-9a29-fe7314d91e7f in service none none off - oxp_4930954e-9ac7-4453-b63f-5ab97c389a99/crypt/debug 097a6d9f-b800-43ce-baf5-53376fc0ad2f in service 100 GiB none gzip-9 + oxp_4c4d1e39-411c-4f56-9a05-ed325928c343/crucible 36fc643b-750a-4911-9c53-c37164da4c30 in service none none off + oxp_4c4d1e39-411c-4f56-9a05-ed325928c343/crypt/zone a8f7759a-7d45-4d84-93e0-389bfe3049a5 in service none none off + oxp_4c4d1e39-411c-4f56-9a05-ed325928c343/crypt/zone/oxz_crucible_c247aef2-4e7c-4690-8a5d-c040b2644ebf d081f65d-4a27-44ab-83ee-00930924ac87 in service none none off + oxp_4c4d1e39-411c-4f56-9a05-ed325928c343/crypt/zone/oxz_ntp_805d9b32-915a-4e16-aff8-dbf31cc21373 77a72699-6928-4839-9141-371c778598b3 in service none none off + oxp_4c4d1e39-411c-4f56-9a05-ed325928c343/crypt/debug ac20ae29-41d5-4b7e-bf48-64867c019b27 in service 100 GiB none gzip-9 omicron zones: ---------------------------------------------------------------------------------------------------------------------- zone type zone id image source disposition underlay IP ---------------------------------------------------------------------------------------------------------------------- - internal_ntp 1ac82e10-6e82-4d4d-a1a7-c2aaf0284408 install dataset in service fd00:1122:3344:103::21 -* crucible 8f80b0a7-672b-40f9-81d2-18d92e4e1bae - install dataset in service fd00:1122:3344:103::22 + internal_ntp 805d9b32-915a-4e16-aff8-dbf31cc21373 install dataset in service fd00:1122:3344:105::21 +* crucible c247aef2-4e7c-4690-8a5d-c040b2644ebf - install dataset in service fd00:1122:3344:105::22 └─ + artifact: version 1.0.0 @@ -404,8 +476,8 @@ internal DNS: DNS zone: "control-plane.oxide.internal" (unchanged) name: 0c71b3b2-6ceb-4e8f-b020-b08675e83038.host (records: 1) AAAA fd00:1122:3344:101::22 - name: 1ac82e10-6e82-4d4d-a1a7-c2aaf0284408.host (records: 1) - AAAA fd00:1122:3344:103::21 + name: 2618b6ef-25e4-4f88-9c72-d516e291bd66.host (records: 1) + AAAA fd00:1122:3344:103::22 name: 2b8f0cb3-0295-4b3c-bc58-4fe88b57112c.sled (records: 1) AAAA fd00:1122:3344:102::1 name: 353b3b65-20f7-48c3-88f7-495bd5d31545.host (records: 1) @@ -428,8 +500,8 @@ internal DNS: AAAA fd00:1122:3344:102::24 name: 803bfb63-c246-41db-b0da-d3b87ddfc63d.host (records: 1) AAAA fd00:1122:3344:101::23 - name: 8f80b0a7-672b-40f9-81d2-18d92e4e1bae.host (records: 1) - AAAA fd00:1122:3344:103::22 + name: 805d9b32-915a-4e16-aff8-dbf31cc21373.host (records: 1) + AAAA fd00:1122:3344:105::21 name: 95c17269-db13-4002-b61c-cbb273c0748e.host (records: 1) AAAA fd00:1122:3344:104::23 name: 98e6b7c2-2efa-41ca-b20a-0a4d61102fe6.sled (records: 1) @@ -450,22 +522,25 @@ internal DNS: SRV port 17000 4c3f0eba-66fa-4a76-bab7-2e5b1b4212bf.host.control-plane.oxide.internal SRV port 17000 ad6a3a03-8d0f-4504-99a4-cbf73d69b973.host.control-plane.oxide.internal SRV port 17000 ba4994a8-23f9-4b1a-a84f-a08d74591389.host.control-plane.oxide.internal + name: _crucible._tcp.2618b6ef-25e4-4f88-9c72-d516e291bd66 (records: 1) + SRV port 32345 2618b6ef-25e4-4f88-9c72-d516e291bd66.host.control-plane.oxide.internal name: _crucible._tcp.5199c033-4cf9-4ab6-8ae7-566bd7606363 (records: 1) SRV port 32345 5199c033-4cf9-4ab6-8ae7-566bd7606363.host.control-plane.oxide.internal - name: _crucible._tcp.8f80b0a7-672b-40f9-81d2-18d92e4e1bae (records: 1) - SRV port 32345 8f80b0a7-672b-40f9-81d2-18d92e4e1bae.host.control-plane.oxide.internal name: _crucible._tcp.bd354eef-d8a6-4165-9124-283fb5e46d77 (records: 1) SRV port 32345 bd354eef-d8a6-4165-9124-283fb5e46d77.host.control-plane.oxide.internal + name: _crucible._tcp.c247aef2-4e7c-4690-8a5d-c040b2644ebf (records: 1) + SRV port 32345 c247aef2-4e7c-4690-8a5d-c040b2644ebf.host.control-plane.oxide.internal name: _crucible._tcp.ef8992cd-92a2-400c-9e59-3821c70af149 (records: 1) SRV port 32345 ef8992cd-92a2-400c-9e59-3821c70af149.host.control-plane.oxide.internal name: _external-dns._tcp (records: 3) SRV port 5353 6c3ae381-04f7-41ea-b0ac-74db387dbc3a.host.control-plane.oxide.internal SRV port 5353 803bfb63-c246-41db-b0da-d3b87ddfc63d.host.control-plane.oxide.internal SRV port 5353 95c17269-db13-4002-b61c-cbb273c0748e.host.control-plane.oxide.internal - name: _internal-ntp._tcp (records: 4) - SRV port 123 1ac82e10-6e82-4d4d-a1a7-c2aaf0284408.host.control-plane.oxide.internal + name: _internal-ntp._tcp (records: 5) SRV port 123 62620961-fc4a-481e-968b-f5acbac0dc63.host.control-plane.oxide.internal SRV port 123 6444f8a5-6465-4f0b-a549-1993c113569c.host.control-plane.oxide.internal + SRV port 123 805d9b32-915a-4e16-aff8-dbf31cc21373.host.control-plane.oxide.internal + SRV port 123 a9847fba-3865-4af1-981d-bade5b56fb5d.host.control-plane.oxide.internal SRV port 123 ec43962b-e9f7-4a86-9503-19d0b83e8731.host.control-plane.oxide.internal name: _nameservice._tcp (records: 3) SRV port 5353 427ec88f-f467-42fa-9bbb-66a91a36103c.host.control-plane.oxide.internal @@ -477,21 +552,28 @@ internal DNS: SRV port 12221 5dbfcb13-d190-4b6b-8d26-c246d7b2335e.host.control-plane.oxide.internal name: _oximeter-reader._tcp (records: 1) SRV port 9000 353b3b65-20f7-48c3-88f7-495bd5d31545.host.control-plane.oxide.internal - name: _repo-depot._tcp (records: 4) + name: _repo-depot._tcp (records: 5) SRV port 12348 2b8f0cb3-0295-4b3c-bc58-4fe88b57112c.sled.control-plane.oxide.internal SRV port 12348 98e6b7c2-2efa-41ca-b20a-0a4d61102fe6.sled.control-plane.oxide.internal SRV port 12348 aff6c093-197d-42c5-ad80-9f10ba051a34.sled.control-plane.oxide.internal + SRV port 12348 b82ede02-399c-48c6-a1de-411df4fa49a7.sled.control-plane.oxide.internal SRV port 12348 d81c6a84-79b8-4958-ae41-ea46c9b19763.sled.control-plane.oxide.internal + name: a9847fba-3865-4af1-981d-bade5b56fb5d.host (records: 1) + AAAA fd00:1122:3344:103::21 name: ad6a3a03-8d0f-4504-99a4-cbf73d69b973.host (records: 1) AAAA fd00:1122:3344:102::25 name: aff6c093-197d-42c5-ad80-9f10ba051a34.sled (records: 1) AAAA fd00:1122:3344:104::1 name: b3638aec-f470-4176-bb15-e58c28bf67fd.host (records: 1) AAAA fd00:1122:3344:3::1 + name: b82ede02-399c-48c6-a1de-411df4fa49a7.sled (records: 1) + AAAA fd00:1122:3344:105::1 name: ba4994a8-23f9-4b1a-a84f-a08d74591389.host (records: 1) AAAA fd00:1122:3344:101::24 name: bd354eef-d8a6-4165-9124-283fb5e46d77.host (records: 1) AAAA fd00:1122:3344:102::26 + name: c247aef2-4e7c-4690-8a5d-c040b2644ebf.host (records: 1) + AAAA fd00:1122:3344:105::22 name: d81c6a84-79b8-4958-ae41-ea46c9b19763.sled (records: 1) AAAA fd00:1122:3344:103::1 name: ec43962b-e9f7-4a86-9503-19d0b83e8731.host (records: 1) diff --git a/update-common/manifests/fake-non-semver.toml b/update-common/manifests/fake-non-semver.toml index ba805e1ba21..055d5261d2a 100644 --- a/update-common/manifests/fake-non-semver.toml +++ b/update-common/manifests/fake-non-semver.toml @@ -41,8 +41,17 @@ version = "2.0.0" [artifact.control_plane.source] kind = "composite-control-plane" zones = [ - { kind = "fake", name = "zone1", size = "1MiB" }, - { kind = "fake", name = "zone2", size = "1MiB" }, + { kind = "fake", name = "clickhouse", size = "128KiB" }, + { kind = "fake", name = "clickhouse_keeper", size = "128KiB" }, + { kind = "fake", name = "clickhouse_server", size = "128KiB" }, + { kind = "fake", name = "cockroachdb", size = "128KiB" }, + { kind = "fake", name = "crucible-zone", size = "128KiB" }, + { kind = "fake", name = "crucible-pantry-zone", size = "128KiB" }, + { kind = "fake", name = "external-dns", size = "128KiB" }, + { kind = "fake", name = "internal-dns", size = "128KiB" }, + { kind = "fake", name = "ntp", size = "128KiB" }, + { kind = "fake", name = "nexus", size = "128KiB" }, + { kind = "fake", name = "oximeter", size = "128KiB" }, ] [[artifact.psc_sp]] From d44824e70f51792bdfb34f3dae598b29bcc9b07c Mon Sep 17 00:00:00 2001 From: Rain Date: Mon, 30 Jun 2025 23:58:31 +0000 Subject: [PATCH 3/3] update wicketd integration test Created using spr 1.3.6-beta.1 --- wicketd/tests/integration_tests/updates.rs | 48 ++++++++++++++-------- 1 file changed, 30 insertions(+), 18 deletions(-) diff --git a/wicketd/tests/integration_tests/updates.rs b/wicketd/tests/integration_tests/updates.rs index 00ed5825e74..93bcb5ff734 100644 --- a/wicketd/tests/integration_tests/updates.rs +++ b/wicketd/tests/integration_tests/updates.rs @@ -41,6 +41,22 @@ use wicketd_client::types::{ GetInventoryParams, GetInventoryResponse, StartUpdateParams, }; +/// The list of zone file names defined in fake-non-semver.toml. +static FAKE_NON_SEMVER_ZONE_FILE_NAMES: &[&str] = &[ + "clickhouse.tar.gz", + "clickhouse_keeper.tar.gz", + "clickhouse_server.tar.gz", + "cockroachdb.tar.gz", + "crucible-zone.tar.gz", + "crucible-pantry-zone.tar.gz", + "external-dns.tar.gz", + "external-dns.tar.gz", + "external-dns.tar.gz", + "ntp.tar.gz", + "nexus.tar.gz", + "oximeter.tar.gz", +]; + // See documentation for extract_nested_artifact_pair in update_plan.rs for why // multi_thread is required. #[tokio::test(flavor = "multi_thread", worker_threads = 2)] @@ -413,15 +429,15 @@ async fn test_installinator_fetch() { // Check that the host and control plane artifacts were downloaded // correctly. // - // The control plane zone names here are defined in `fake.toml` which we - // load above. - for file_name in - [HOST_PHASE_2_FILE_NAME, "install/zone1.tar.gz", "install/zone2.tar.gz"] - { - let a_path = a_path.join(file_name); + // The control plane zone names here are defined in `fake-non-semver.toml` + // which we load above. + for file_name in [HOST_PHASE_2_FILE_NAME.to_owned()].into_iter().chain( + FAKE_NON_SEMVER_ZONE_FILE_NAMES.iter().map(|z| format!("install/{z}")), + ) { + let a_path = a_path.join(&file_name); assert!(a_path.is_file(), "{a_path} was written out"); - let b_path = b_path.join(file_name); + let b_path = b_path.join(&file_name); assert!(b_path.is_file(), "{b_path} was written out"); } @@ -481,17 +497,13 @@ async fn test_installinator_fetch() { "mupdate ID matches", ); - // Check that the zone1 and zone2 images are present in the zone set. (The - // names come from fake-non-semver.toml, under - // [artifact.control-plane.source]). - assert!( - a_manifest.zones.contains_key("zone1.tar.gz"), - "zone1 is present in the zone set" - ); - assert!( - a_manifest.zones.contains_key("zone2.tar.gz"), - "zone2 is present in the zone set" - ); + // Check that the images are present in the zone set. + for file_name in FAKE_NON_SEMVER_ZONE_FILE_NAMES { + assert!( + a_manifest.zones.contains_key(file_name), + "{file_name} is present in the zone set" + ); + } // Ensure that the B path also had the same file written out. let b_manifest_path =