diff --git a/Release.toml b/Release.toml index 73b1b64fa9e..72c21bad27a 100644 --- a/Release.toml +++ b/Release.toml @@ -1,4 +1,4 @@ -version = "1.32.0" +version = "1.33.0" [migrations] "(0.3.1, 0.3.2)" = ["migrate_v0.3.2_admin-container-v0-5-0.lz4"] @@ -395,3 +395,10 @@ version = "1.32.0" "migrate_v1.31.0_public-control-container-v0-7-20.lz4", ] "(1.31.0, 1.32.0)" = [] +"(1.32.0, 1.33.0)" = [ + "migrate_v1.33.0_aws-remove-schnauzer-admin.lz4", + "migrate_v1.33.0_aws-remove-schnauzer-control.lz4", + "migrate_v1.33.0_public-remove-source-admin.lz4", + "migrate_v1.33.0_public-remove-source-control.lz4", + "migrate_v1.33.0_remove-metadata-and-weak-settings-migration.lz4", +] diff --git a/Twoliter.toml b/Twoliter.toml index afa4c3cf408..2e0686d8913 100644 --- a/Twoliter.toml +++ b/Twoliter.toml @@ -1,5 +1,5 @@ schema-version = 1 -release-version = "1.32.0" +release-version = "1.33.0" [vendor.bottlerocket] registry = "public.ecr.aws/bottlerocket" diff --git a/sources/Cargo.lock b/sources/Cargo.lock index 619b96bf5da..5a7b32fd11f 100644 --- a/sources/Cargo.lock +++ b/sources/Cargo.lock @@ -436,6 +436,20 @@ dependencies = [ "paste", ] +[[package]] +name = "aws-remove-schnauzer-admin" +version = "0.1.0" +dependencies = [ + "migration-helpers", +] + +[[package]] +name = "aws-remove-schnauzer-control" +version = "0.1.0" +dependencies = [ + "migration-helpers", +] + [[package]] name = "backtrace" version = "0.3.69" @@ -1032,6 +1046,7 @@ dependencies = [ "percent-encoding", "serde", "serde_json", + "serde_plain", "snafu", "toml", "walkdir", @@ -1743,6 +1758,7 @@ dependencies = [ "datastore", "handlebars", "maplit", + "models", "schnauzer", "serde", "serde_json", @@ -1800,6 +1816,7 @@ dependencies = [ "libc", "serde", "serde_json", + "serde_plain", "toml", ] @@ -2193,6 +2210,20 @@ dependencies = [ "migration-helpers", ] +[[package]] +name = "public-remove-source-admin" +version = "0.1.0" +dependencies = [ + "migration-helpers", +] + +[[package]] +name = "public-remove-source-control" +version = "0.1.0" +dependencies = [ + "migration-helpers", +] + [[package]] name = "quinn" version = "0.11.5" @@ -2319,6 +2350,13 @@ version = "0.8.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c08c74e62047bb2de4ff487b251e4a92e24f48745648451635cec7d591162d9f" +[[package]] +name = "remove-metadata-and-weak-settings-migration" +version = "0.1.0" +dependencies = [ + "migration-helpers", +] + [[package]] name = "repr_offset" version = "0.2.2" diff --git a/sources/Cargo.toml b/sources/Cargo.toml index f42413782a4..56e2e4eb389 100644 --- a/sources/Cargo.toml +++ b/sources/Cargo.toml @@ -83,6 +83,11 @@ members = [ "settings-migrations/v1.31.0/public-admin-container-v0-11-16", "settings-migrations/v1.31.0/aws-control-container-v0-7-20", "settings-migrations/v1.31.0/public-control-container-v0-7-20", + "settings-migrations/v1.33.0/aws-remove-schnauzer-admin", + "settings-migrations/v1.33.0/aws-remove-schnauzer-control", + "settings-migrations/v1.33.0/public-remove-source-admin", + "settings-migrations/v1.33.0/public-remove-source-control", + "settings-migrations/v1.33.0/remove-metadata-and-weak-settings-migration", "settings-plugins/aws-dev", "settings-plugins/aws-ecs-1", diff --git a/sources/api/apiclient/README.md b/sources/api/apiclient/README.md index dea3dfc89e6..78694f7bbdf 100644 --- a/sources/api/apiclient/README.md +++ b/sources/api/apiclient/README.md @@ -174,6 +174,10 @@ You can see all your pending settings like this: ```shell apiclient raw -u /tx ``` +You can also see pending metadata along with pending setting using version 2 of `/tx` like this: +```shell +apiclient raw -u /v2/tx +``` To *commit* the settings, and let the system apply them to any relevant configuration files or services, do this: ```shell diff --git a/sources/api/apiclient/README.tpl b/sources/api/apiclient/README.tpl index 9604d7f6869..e886246a40c 100644 --- a/sources/api/apiclient/README.tpl +++ b/sources/api/apiclient/README.tpl @@ -174,6 +174,10 @@ You can see all your pending settings like this: ```shell apiclient raw -u /tx ``` +You can also see pending metadata along with pending setting using version 2 of `/tx` like this: +```shell +apiclient raw -u /v2/tx +``` To *commit* the settings, and let the system apply them to any relevant configuration files or services, do this: ```shell diff --git a/sources/api/datastore/Cargo.toml b/sources/api/datastore/Cargo.toml index c5e88fe9bda..c1e7b41a8d8 100644 --- a/sources/api/datastore/Cargo.toml +++ b/sources/api/datastore/Cargo.toml @@ -16,6 +16,7 @@ serde = { workspace = true, features = ["derive"] } serde_json.workspace = true snafu.workspace = true walkdir.workspace = true +serde_plain.workspace = true [build-dependencies] generate-readme.workspace = true diff --git a/sources/api/datastore/src/constraints_check.rs b/sources/api/datastore/src/constraints_check.rs new file mode 100644 index 00000000000..1a162f3508f --- /dev/null +++ b/sources/api/datastore/src/constraints_check.rs @@ -0,0 +1,52 @@ +//! The outcome of the constraint check determines whether the transaction can proceed to commit. +//! A ‘rejected’ result means that one or more constraints have not been satisfied, +//! preventing the transaction from being committed. On the other hand, an ‘approved’ +//! result confirms that all constraints are satisfied and provides the required +//! settings and metadata for the commit. +//! Constraint checks can alter the write. + +use std::collections::HashMap; + +use crate::{error, Key}; + +type RejectReason = String; + +/// Represents a successful write operation after constraints have been approved. +/// Contains the following fields: +/// - `settings`: A collection of key-value pairs representing the settings to be committed. +/// - `metadata`: A collection of metadata entries. +#[derive(PartialEq)] +pub struct ApprovedWrite { + pub settings: HashMap, + pub metadata: Vec<(Key, Key, String)>, +} + +/// Represents the result of a constraint check. +/// The result can either reject the operation or approve it with the required data. +#[derive(PartialEq)] +pub enum ConstraintCheckResult { + Reject(RejectReason), + Approve(ApprovedWrite), +} + +impl TryFrom for ApprovedWrite { + type Error = error::Error; + + fn try_from(constraint_check_result: ConstraintCheckResult) -> Result { + match constraint_check_result { + ConstraintCheckResult::Reject(err) => error::ConstraintCheckRejectSnafu { err }.fail(), + ConstraintCheckResult::Approve(approved_write) => Ok(approved_write), + } + } +} + +impl From> for ConstraintCheckResult { + fn from(approved_write: Option) -> Self { + match approved_write { + None => ConstraintCheckResult::Reject( + "The write for the given transaction is rejected".to_string(), + ), + Some(approved_write) => ConstraintCheckResult::Approve(approved_write), + } + } +} diff --git a/sources/api/datastore/src/error.rs b/sources/api/datastore/src/error.rs index d1c7207ae1a..9a326381472 100644 --- a/sources/api/datastore/src/error.rs +++ b/sources/api/datastore/src/error.rs @@ -59,6 +59,20 @@ pub enum Error { #[snafu(display("Key name beyond maximum length {}: {}", name, max))] KeyTooLong { name: String, max: usize }, + + #[snafu(display("Unable to serialize data: {}", source))] + Serialize { source: serde_json::Error }, + + #[snafu(display("Unable to run the check constraint function: {}", source))] + CheckConstraintExecution { + source: Box, + }, + + #[snafu(display( + "Check constraint function rejected the transaction. Aborting commit : {}", + err + ))] + ConstraintCheckReject { err: String }, } -pub type Result = std::result::Result; +pub type Result = std::result::Result; diff --git a/sources/api/datastore/src/filesystem.rs b/sources/api/datastore/src/filesystem.rs index bf89eed783f..2aa1c4eb7d7 100644 --- a/sources/api/datastore/src/filesystem.rs +++ b/sources/api/datastore/src/filesystem.rs @@ -13,6 +13,8 @@ use std::io; use std::path::{self, Path, PathBuf}; use walkdir::{DirEntry, WalkDir}; +use crate::constraints_check::{ApprovedWrite, ConstraintCheckResult}; + use super::key::{Key, KeyType}; use super::{error, Committed, DataStore, Result}; @@ -413,6 +415,7 @@ impl DataStore for FilesystemDataStore { fn list_populated_metadata( &self, prefix: S1, + committed: &Committed, metadata_key_name: &Option, ) -> Result>> where @@ -420,7 +423,7 @@ impl DataStore for FilesystemDataStore { S2: AsRef, { // Find metadata key paths on disk - let key_paths = find_populated_key_paths(self, KeyType::Meta, prefix, &Committed::Live)?; + let key_paths = find_populated_key_paths(self, KeyType::Meta, prefix, committed)?; // For each file on disk, check the user's conditions, and add it to our output let mut result = HashMap::new(); @@ -460,8 +463,13 @@ impl DataStore for FilesystemDataStore { self.delete_key_path(path, committed) } - fn get_metadata_raw(&self, metadata_key: &Key, data_key: &Key) -> Result> { - let path = self.metadata_path(metadata_key, data_key, &Committed::Live)?; + fn get_metadata_raw( + &self, + metadata_key: &Key, + data_key: &Key, + committed: &Committed, + ) -> Result> { + let path = self.metadata_path(metadata_key, data_key, committed)?; read_file_for_key(metadata_key, &path) } @@ -470,8 +478,9 @@ impl DataStore for FilesystemDataStore { metadata_key: &Key, data_key: &Key, value: S, + committed: &Committed, ) -> Result<()> { - let path = self.metadata_path(metadata_key, data_key, &Committed::Live)?; + let path = self.metadata_path(metadata_key, data_key, committed)?; write_file_mkdir(path, value) } @@ -482,27 +491,57 @@ impl DataStore for FilesystemDataStore { /// We commit by copying pending keys to live, then removing pending. Something smarter (lock, /// atomic flip, etc.) will be required to make the server concurrent. - fn commit_transaction(&mut self, transaction: S) -> Result> + fn commit_transaction( + &mut self, + transaction: S, + constraint_check: &C, + ) -> Result> where S: Into + AsRef, + C: Fn( + &mut Self, + &Committed, + ) -> std::result::Result< + ConstraintCheckResult, + Box, + >, { + let mut pending_keys: HashSet = Default::default(); + + let transactions = self.list_transactions()?; + if !transactions.contains(transaction.as_ref()) { + return Ok(pending_keys); + } + let pending = Committed::Pending { tx: transaction.into(), }; - // Get data for changed keys - let pending_data = self.get_prefix("settings.", &pending)?; - // Nothing to do if no keys are present in pending - if pending_data.is_empty() { - return Ok(Default::default()); + let constraints_check_result = + constraint_check(self, &pending).context(error::CheckConstraintExecutionSnafu)?; + + let approved_write = ApprovedWrite::try_from(constraints_check_result)?; + + trace!( + "commit_transaction: transaction_metadata: {:?}", + approved_write.metadata + ); + + // write the metadata. + for (metadata_key, data_key, value) in approved_write.metadata { + self.set_metadata(&metadata_key, &data_key, value, &Committed::Live)?; } - // Save Keys for return value - let pending_keys: HashSet = pending_data.keys().cloned().collect(); + let pending_data = approved_write.settings; - // Apply changes to live - debug!("Writing pending keys to live"); - self.set_keys(&pending_data, &Committed::Live)?; + if !pending_data.is_empty() { + // Save Keys for return value + pending_keys = pending_data.keys().cloned().collect(); + + // Apply changes to live + debug!("Writing pending keys to live"); + self.set_keys(&pending_data, &Committed::Live)?; + } // Remove pending debug!("Removing old pending keys"); diff --git a/sources/api/datastore/src/lib.rs b/sources/api/datastore/src/lib.rs index 9156a525a19..221a608e003 100644 --- a/sources/api/datastore/src/lib.rs +++ b/sources/api/datastore/src/lib.rs @@ -27,6 +27,7 @@ The `deserialization` module provides code to deserialize datastore-acceptable k * The `serialization` module can't handle complex types under lists; it assumes lists can be serialized as scalars. */ +pub mod constraints_check; pub mod deserialization; pub mod error; pub mod filesystem; @@ -34,6 +35,7 @@ pub mod key; pub mod memory; pub mod serialization; +use constraints_check::ConstraintCheckResult; pub use error::{Error, Result}; pub use filesystem::FilesystemDataStore; pub use key::{Key, KeyType, KEY_SEPARATOR, KEY_SEPARATOR_STR}; @@ -72,6 +74,7 @@ pub trait DataStore { fn list_populated_metadata( &self, prefix: S1, + committed: &Committed, metadata_key_name: &Option, ) -> Result>> where @@ -89,7 +92,12 @@ pub trait DataStore { /// Retrieve the value for a single metadata key from the datastore. Values will inherit from /// earlier in the tree, if more specific values are not found later. - fn get_metadata(&self, metadata_key: &Key, data_key: &Key) -> Result> { + fn get_metadata( + &self, + metadata_key: &Key, + data_key: &Key, + committed: &Committed, + ) -> Result> { let mut result = Ok(None); let mut current_path = Vec::new(); @@ -101,7 +109,7 @@ pub trait DataStore { unreachable!("Prefix of Key failed to make Key: {:?}", current_path) }); - if let Some(md) = self.get_metadata_raw(metadata_key, &data_key)? { + if let Some(md) = self.get_metadata_raw(metadata_key, &data_key, committed)? { result = Ok(Some(md)); } } @@ -110,13 +118,19 @@ pub trait DataStore { /// Retrieve the value for a single metadata key from the datastore, without taking into /// account inheritance of metadata from earlier in the tree. - fn get_metadata_raw(&self, metadata_key: &Key, data_key: &Key) -> Result>; + fn get_metadata_raw( + &self, + metadata_key: &Key, + data_key: &Key, + committed: &Committed, + ) -> Result>; /// Set the value of a single metadata key in the datastore. fn set_metadata>( &mut self, metadata_key: &Key, data_key: &Key, value: S, + committed: &Committed, ) -> Result<()>; /// Removes the given metadata key from the given data key in the datastore. If we /// succeeded, we return Ok(()); if the data or metadata key didn't exist, we also return @@ -125,9 +139,20 @@ pub trait DataStore { /// Applies pending changes from the given transaction to the live datastore. Returns the /// list of changed keys. - fn commit_transaction(&mut self, transaction: S) -> Result> + fn commit_transaction( + &mut self, + transaction: S, + constraint_check: &C, + ) -> Result> where - S: Into + AsRef; + S: Into + AsRef, + C: Fn( + &mut Self, + &Committed, + ) -> std::result::Result< + ConstraintCheckResult, + Box, + >; /// Remove the given pending transaction from the datastore. Returns the list of removed /// keys. If the transaction doesn't exist, will return Ok with an empty list. @@ -205,13 +230,14 @@ pub trait DataStore { fn get_metadata_prefix( &self, find_prefix: S1, + committed: &Committed, metadata_key_name: &Option, ) -> Result>> where S1: AsRef, S2: AsRef, { - let meta_map = self.list_populated_metadata(&find_prefix, metadata_key_name)?; + let meta_map = self.list_populated_metadata(&find_prefix, committed, metadata_key_name)?; trace!("Found populated metadata: {:?}", meta_map); if meta_map.is_empty() { return Ok(HashMap::new()); @@ -234,12 +260,12 @@ pub trait DataStore { meta_key, &data_key ); - let value = self.get_metadata(&meta_key, &data_key)?.context( - error::ListedMetaNotPresentSnafu { + let value = self + .get_metadata(&meta_key, &data_key, committed)? + .context(error::ListedMetaNotPresentSnafu { meta_key: meta_key.name(), data_key: data_key.name(), - }, - )?; + })?; // Insert a top-level map entry for the data key if we've found metadata. let data_entry = result.entry(data_key.clone()).or_insert_with(HashMap::new); @@ -336,14 +362,20 @@ mod test { let grandchild = Key::new(KeyType::Data, "a.b.c").unwrap(); // Set metadata on parent - m.set_metadata(&meta, &parent, "value").unwrap(); + m.set_metadata(&meta, &parent, "value", &Committed::Live) + .unwrap(); // Metadata shows up on grandchild... assert_eq!( - m.get_metadata(&meta, &grandchild).unwrap(), + m.get_metadata(&meta, &grandchild, &Committed::Live) + .unwrap(), Some("value".to_string()) ); // ...but only through inheritance, not directly. - assert_eq!(m.get_metadata_raw(&meta, &grandchild).unwrap(), None); + assert_eq!( + m.get_metadata_raw(&meta, &grandchild, &Committed::Live) + .unwrap(), + None + ); } #[test] @@ -379,20 +411,92 @@ mod test { let mk1 = Key::new(KeyType::Meta, "metatest1").unwrap(); let mk2 = Key::new(KeyType::Meta, "metatest2").unwrap(); let mk3 = Key::new(KeyType::Meta, "metatest3").unwrap(); - m.set_metadata(&mk1, &k1, "41").unwrap(); - m.set_metadata(&mk2, &k2, "42").unwrap(); - m.set_metadata(&mk3, &k3, "43").unwrap(); + m.set_metadata(&mk1, &k1, "41", &Committed::Live).unwrap(); + m.set_metadata(&mk2, &k2, "42", &Committed::Live).unwrap(); + m.set_metadata(&mk3, &k3, "43", &Committed::Live).unwrap(); + + // Check all metadata + assert_eq!( + m.get_metadata_prefix("x.", &Committed::Live, &None as &Option<&str>) + .unwrap(), + hashmap!(k1 => hashmap!(mk1 => "41".to_string()), + k2.clone() => hashmap!(mk2.clone() => "42".to_string())) + ); + + // Check metadata matching a given name + assert_eq!( + m.get_metadata_prefix("x.", &Committed::Live, &Some("metatest2")) + .unwrap(), + hashmap!(k2 => hashmap!(mk2 => "42".to_string())) + ); + } + + #[test] + fn get_metadata_prefix_from_pending() { + let mut m = MemoryDataStore::new(); + + // Build some data keys to which we can attach metadata; they don't actually have to be + // set in the data store. + let k1 = Key::new(KeyType::Data, "x.1").unwrap(); + let k2 = Key::new(KeyType::Data, "x.2").unwrap(); + let k3 = Key::new(KeyType::Data, "y.3").unwrap(); + + // Set some metadata to check + let mk1 = Key::new(KeyType::Meta, "metatest1").unwrap(); + let mk2 = Key::new(KeyType::Meta, "metatest2").unwrap(); + let mk3 = Key::new(KeyType::Meta, "metatest3").unwrap(); + m.set_metadata( + &mk1, + &k1, + "41", + &Committed::Pending { + tx: "test".to_owned(), + }, + ) + .unwrap(); + m.set_metadata( + &mk2, + &k2, + "42", + &Committed::Pending { + tx: "test".to_owned(), + }, + ) + .unwrap(); + m.set_metadata( + &mk3, + &k3, + "43", + &Committed::Pending { + tx: "test".to_owned(), + }, + ) + .unwrap(); // Check all metadata assert_eq!( - m.get_metadata_prefix("x.", &None as &Option<&str>).unwrap(), + m.get_metadata_prefix( + "x.", + &Committed::Pending { + tx: "test".to_owned() + }, + &None as &Option<&str> + ) + .unwrap(), hashmap!(k1 => hashmap!(mk1 => "41".to_string()), k2.clone() => hashmap!(mk2.clone() => "42".to_string())) ); // Check metadata matching a given name assert_eq!( - m.get_metadata_prefix("x.", &Some("metatest2")).unwrap(), + m.get_metadata_prefix( + "x.", + &Committed::Pending { + tx: "test".to_owned() + }, + &Some("metatest2") + ) + .unwrap(), hashmap!(k2 => hashmap!(mk2 => "42".to_string())) ); } diff --git a/sources/api/datastore/src/memory.rs b/sources/api/datastore/src/memory.rs index 4ffc397912f..a4b91fc7592 100644 --- a/sources/api/datastore/src/memory.rs +++ b/sources/api/datastore/src/memory.rs @@ -5,6 +5,8 @@ use std::collections::{HashMap, HashSet}; +use crate::constraints_check::{ApprovedWrite, ConstraintCheckResult}; + use super::{Committed, DataStore, Key, Result}; #[derive(Debug, Default)] @@ -16,6 +18,9 @@ pub struct MemoryDataStore { // Map of data keys to their metadata, which in turn is a mapping of metadata keys to // arbitrary (string/serialized) values. metadata: HashMap>, + // Map of data keys to their metadata, which in turn is a mapping of metadata keys to + // arbitrary (string/serialized) values in pending transaction + pending_metadata: HashMap>, } impl MemoryDataStore { @@ -57,14 +62,21 @@ impl DataStore for MemoryDataStore { fn list_populated_metadata( &self, prefix: S1, + committed: &Committed, metadata_key_name: &Option, ) -> Result>> where S1: AsRef, S2: AsRef, { + let metadata_to_use = match committed { + Committed::Live => &self.metadata, + Committed::Pending { .. } => &self.pending_metadata, + }; + let mut result = HashMap::new(); - for (data_key, meta_map) in self.metadata.iter() { + + for (data_key, meta_map) in metadata_to_use.iter() { // Confirm data key matches requested prefix. if !data_key.name().starts_with(prefix.as_ref()) { continue; @@ -112,8 +124,19 @@ impl DataStore for MemoryDataStore { Ok(dataset.contains_key(key)) } - fn get_metadata_raw(&self, metadata_key: &Key, data_key: &Key) -> Result> { - let metadata_for_data = self.metadata.get(data_key); + fn get_metadata_raw( + &self, + metadata_key: &Key, + data_key: &Key, + committed: &Committed, + ) -> Result> { + let metadata_to_use = match committed { + Committed::Live => &self.metadata, + Committed::Pending { .. } => &self.pending_metadata, + }; + + let metadata_for_data = metadata_to_use.get(data_key); + // If we have a metadata entry for this data key, then we can try fetching the requested // metadata key, otherwise we'll return early with Ok(None). let result = metadata_for_data.and_then(|m| m.get(metadata_key)); @@ -125,17 +148,14 @@ impl DataStore for MemoryDataStore { metadata_key: &Key, data_key: &Key, value: S, + committed: &Committed, ) -> Result<()> { - // If we don't already have a metadata entry for this data key, insert one. - let metadata_for_data = self - .metadata - // Clone data key because we want the HashMap key type to be Key, not &Key, and we - // can't pass ownership because we only have a reference from our parameters. - .entry(data_key.clone()) - .or_default(); - - metadata_for_data.insert(metadata_key.clone(), value.as_ref().to_owned()); - Ok(()) + match committed { + Committed::Live => set_metadata_raw(&mut self.metadata, metadata_key, data_key, value), + Committed::Pending { .. } => { + set_metadata_raw(&mut self.pending_metadata, metadata_key, data_key, value) + } + } } fn unset_metadata(&mut self, metadata_key: &Key, data_key: &Key) -> Result<()> { @@ -146,19 +166,45 @@ impl DataStore for MemoryDataStore { Ok(()) } - fn commit_transaction(&mut self, transaction: S) -> Result> + fn commit_transaction( + &mut self, + transaction: S, + constraint_check: &C, + ) -> Result> where S: Into + AsRef, + C: Fn( + &mut Self, + &Committed, + ) -> std::result::Result< + ConstraintCheckResult, + Box, + >, { + let tx = transaction.as_ref(); + let pending = Committed::Pending { tx: tx.into() }; + + let constraint_check_result = + constraint_check(self, &pending).unwrap_or(ConstraintCheckResult::Reject( + "Check constraint function rejected the transaction. Aborting commit".to_string(), + )); + let approved_write = ApprovedWrite::try_from(constraint_check_result)?; + + let mut pending_keys: HashSet = Default::default(); // Remove anything pending for this transaction - if let Some(pending) = self.pending.remove(transaction.as_ref()) { + + if !approved_write.settings.is_empty() { + // Save Keys for return value + pending_keys = approved_write.settings.keys().cloned().collect(); + // Apply pending changes to live - self.set_keys(&pending, &Committed::Live)?; - // Return keys that were committed - Ok(pending.keys().cloned().collect()) - } else { - Ok(HashSet::new()) + self.set_keys(&approved_write.settings, &Committed::Live)?; } + + self.pending.remove(tx); + + // Return keys that were committed + Ok(pending_keys) } fn delete_transaction(&mut self, transaction: S) -> Result> @@ -179,12 +225,74 @@ impl DataStore for MemoryDataStore { } } +fn set_metadata_raw>( + metadata_to_use: &mut HashMap>, + metadata_key: &Key, + data_key: &Key, + value: S, +) -> Result<()> { + // If we don't already have a metadata entry for this data key, insert one. + let metadata_for_data = metadata_to_use + // Clone data key because we want the HashMap key type to be Key, not &Key, and we + // can't pass ownership because we only have a reference from our parameters. + .entry(data_key.clone()) + .or_default(); + + metadata_for_data.insert(metadata_key.clone(), value.as_ref().to_owned()); + Ok(()) +} + #[cfg(test)] mod test { + use std::collections::HashMap; + use super::super::{Committed, DataStore, Key, KeyType}; use super::MemoryDataStore; + use crate::constraints_check::{ApprovedWrite, ConstraintCheckResult}; + use crate::{deserialize_scalar, serialize_scalar, ScalarError}; use maplit::hashset; + fn constraint_check( + datastore: &mut MemoryDataStore, + committed: &Committed, + ) -> super::Result> + { + let mut transaction_metadata = datastore + .get_metadata_prefix("settings.", committed, &None as &Option<&str>) + .unwrap(); + + let settings_to_commit: HashMap = match committed { + Committed::Pending { tx: transaction } => datastore + .pending + .get(transaction) + .unwrap_or(&HashMap::new()) + .clone(), + Committed::Live => HashMap::new(), + }; + + let mut metadata_to_commit: Vec<(Key, Key, String)> = Vec::new(); + + for (key, value) in transaction_metadata.iter_mut() { + for (metadata_key, metadata_value) in value { + if metadata_key.name() != "strength" { + continue; + } + + // strength in pending transaction + let pending_strength: String = + deserialize_scalar::<_, ScalarError>(&metadata_value.clone()).unwrap(); + let met_value = serialize_scalar::<_, ScalarError>(&pending_strength).unwrap(); + metadata_to_commit.push((metadata_key.clone(), key.clone(), met_value)); + } + } + let approved_write = ApprovedWrite { + settings: settings_to_commit, + metadata: metadata_to_commit, + }; + + Ok(ConstraintCheckResult::from(Some(approved_write))) + } + #[test] fn get_set_unset() { let mut m = MemoryDataStore::new(); @@ -198,14 +306,38 @@ mod test { let mdkey = Key::new(KeyType::Meta, "testmd").unwrap(); let md = "mdval"; - m.set_metadata(&mdkey, &k, md).unwrap(); + m.set_metadata(&mdkey, &k, md, &Committed::Live).unwrap(); assert_eq!( - m.get_metadata_raw(&mdkey, &k).unwrap(), + m.get_metadata_raw(&mdkey, &k, &Committed::Live).unwrap(), + Some(md.to_string()) + ); + + m.set_metadata( + &mdkey, + &k, + md, + &Committed::Pending { + tx: "test".to_owned(), + }, + ) + .unwrap(); + assert_eq!( + m.get_metadata_raw( + &mdkey, + &k, + &Committed::Pending { + tx: "test".to_owned() + } + ) + .unwrap(), Some(md.to_string()) ); m.unset_metadata(&mdkey, &k).unwrap(); - assert_eq!(m.get_metadata_raw(&mdkey, &k).unwrap(), None); + assert_eq!( + m.get_metadata_raw(&mdkey, &k, &Committed::Live).unwrap(), + None + ); m.unset_key(&k, &Committed::Live).unwrap(); assert_eq!(m.get_key(&k, &Committed::Live).unwrap(), None); @@ -242,7 +374,7 @@ mod test { assert!(m.key_populated(&k, &pending).unwrap()); assert!(!m.key_populated(&k, &Committed::Live).unwrap()); - m.commit_transaction(tx).unwrap(); + m.commit_transaction(tx, &constraint_check).unwrap(); assert!(!m.key_populated(&k, &pending).unwrap()); assert!(m.key_populated(&k, &Committed::Live).unwrap()); } diff --git a/sources/api/migration/migration-helpers/Cargo.toml b/sources/api/migration/migration-helpers/Cargo.toml index 0464988bd28..3a333e43d45 100644 --- a/sources/api/migration/migration-helpers/Cargo.toml +++ b/sources/api/migration/migration-helpers/Cargo.toml @@ -12,6 +12,7 @@ exclude = ["README.md"] bottlerocket-release.workspace = true datastore.workspace = true handlebars.workspace = true +models.workspace = true schnauzer.workspace = true serde.workspace = true serde_json.workspace = true diff --git a/sources/api/migration/migration-helpers/src/common_migrations.rs b/sources/api/migration/migration-helpers/src/common_migrations.rs index 561b6299d76..898c4f689a2 100644 --- a/sources/api/migration/migration-helpers/src/common_migrations.rs +++ b/sources/api/migration/migration-helpers/src/common_migrations.rs @@ -1,9 +1,10 @@ use crate::{error, Migration, MigrationData, Result}; use schnauzer::import::{json_settings::JsonSettingsResolver, StaticHelperResolver}; use serde::Serialize; +use serde_json::Value; use shlex::Shlex; use snafu::{OptionExt, ResultExt}; -use std::collections::HashMap; +use std::collections::{HashMap, HashSet}; /// We use this migration when we add settings and want to make sure they're removed before we go /// back to old versions that don't understand them. @@ -1912,3 +1913,231 @@ impl Migration for NoOpMigration { Ok(input) } } + +// =^..^= =^..^= =^..^= =^..^= =^..^= =^..^= =^..^= =^..^= =^..^= +/// We use this migration to remove the metadata(always) and data if it matches with existing value. +/// This is done so that, the values can be repopulated from defaults using Storewolf and Sundog. +/// We will need this migration once to adapt the concept of Strength on settings. +pub struct RemoveSchnauzerMigration { + pub setting: &'static str, + pub old_cmdline: &'static str, +} + +impl RemoveSchnauzerMigration { + fn update_data_and_metadata( + &self, + outgoing_setting_data: &str, + outgoing_schnauzer_cmdline: &str, + input: &mut MigrationData, + ) -> Result<()> { + // We just need to delete the data if it matches the old setting data in datastore. + // Though we are aware that all the metadata will be removed by the migrator/another migration + // We will delete the metadata here to keep this migration complete. + + let metadata = input.metadata.entry(self.setting.to_string()).or_default(); + metadata.remove("setting-generator"); + + let input_data = structure_migration_data_for_templates(&input.data)?; + let input_data = + serde_json::to_value(input_data).context(error::SerializeTemplateDataSnafu)?; + + // Generate settings data using the setting's outgoing template so we can confirm + // it matches our expected value; if not, the user has changed it and we should stop. + let template_importer = SchnauzerMigrationTemplateImporter::new(input_data); + let outgoing_command_args = Shlex::new(outgoing_schnauzer_cmdline); + + let tokio_runtime = tokio::runtime::Builder::new_multi_thread() + .enable_all() + .build() + .context(error::CreateTokioRuntimeSnafu)?; + + let generated_old_data = tokio_runtime + .block_on(async { + schnauzer::v2::cli::run_with_args(outgoing_command_args, &template_importer).await + }) + .with_context(|_| error::RenderSchnauzerV2TemplateSnafu { + cmdline: outgoing_schnauzer_cmdline.to_string(), + })?; + + if generated_old_data == *outgoing_setting_data { + // Remove the setting from the datastore; a new value will be generated using + // Storewolf and Sundog + println!( + "Existing setting value '{}' for setting '{}' is same as outgoing value. '{}'", + generated_old_data, outgoing_setting_data, self.setting + ); + + println!("Removing setting '{}' from datastore", self.setting); + + input.data.remove(self.setting); + } else { + println!( + "'{}' is not set to '{}', leaving alone", + self.setting, generated_old_data + ); + } + + Ok(()) + } +} + +impl Migration for RemoveSchnauzerMigration { + fn forward(&mut self, mut input: MigrationData) -> Result { + if let Some(input_value) = input.data.get(self.setting) { + let data = input_value + .as_str() + .context(error::NonStringSettingDataTypeSnafu { + setting: self.setting, + })?; + + self.update_data_and_metadata( + // Clone the input string; we need to give the function mutable access to + // the structure that contains the string, so we can't pass a reference into the + // structure. + #[allow(clippy::unnecessary_to_owned)] + &data.to_owned(), + self.old_cmdline, + &mut input, + )?; + } else { + println!("Found no '{}' to change on upgrade", self.setting); + } + + Ok(input) + } + + fn backward(&mut self, input: MigrationData) -> Result { + println!("RemoveSchnauzerMigration has no work to do on downgrade.",); + Ok(input) + } +} + +#[cfg(test)] +mod remove_schnauzer_migration { + use std::collections::HashMap; + + use super::RemoveSchnauzerMigration; + use crate::{Migration, MigrationData}; + use maplit::hashmap; + use serde_json::json; + + #[test] + fn test_replaces_data_and_generator() { + // Given a schnauzer migration where the settings generator and generated data are both set + // to the input values, + // When the RemoveSchnauzerMigration is performed, + // Both the generator and data are deleted. + let mut migration = RemoveSchnauzerMigration { + setting: "settings.output", + old_cmdline: + "schnauzer-v2 render --requires 'input@v1' --template '{{ settings.input }}'", + }; + + let input = MigrationData { + data: hashmap! { + "settings.input".into() => json!("hello"), + "settings.output".into() => json!("hello"), + "os".into() => json!({}), + }, + metadata: hashmap! { + "settings.output".into() => hashmap!{"setting-generator".into() => migration.old_cmdline.into()} + }, + }; + + let forward_result = migration.forward(input.clone()); + println!("{:?}", forward_result); + let forward_result = forward_result.unwrap(); + + assert_eq!(forward_result.data.get("settings.output"), Option::None); + + assert_eq!( + forward_result + .metadata + .get("settings.output") + .unwrap_or(&HashMap::new()) + .get("setting-generator"), + Option::None + ); + } +} + +// =^..^= =^..^= =^..^= =^..^= =^..^= =^..^= =^..^= =^..^= =^..^= + +/// We use this migration to remove a setting string if it matches the old value. +/// We will need this migration once to adapt the concept of Strength on settings. +pub struct RemoveMatchingString { + pub setting: &'static str, + pub old_val: &'static str, +} + +impl Migration for RemoveMatchingString { + fn forward(&mut self, mut input: MigrationData) -> Result { + if let Some(data) = input.data.get_mut(self.setting) { + match data { + serde_json::Value::String(data) => { + if data == self.old_val { + input.data.remove(self.setting); + } else { + println!( + "'{}' is not set to '{}', leaving alone", + self.setting, self.old_val + ); + } + } + _ => { + println!( + "'{}' is set to non-string value '{}'; RemoveOldData expects a string setting value", + self.setting, data + ); + } + } + } else { + println!("Found no '{}' to change on upgrade", self.setting); + } + Ok(input) + } + + fn backward(&mut self, input: MigrationData) -> Result { + println!("RemoveOldData has no work to do on downgrade.",); + Ok(input) + } +} + +// =^..^= =^..^= =^..^= =^..^= =^..^= =^..^= =^..^= =^..^= =^..^= + +// When we downgrade multiple version to a version where migrator is not aware of deleting the +// setting-generator as struct or the strength file. +// This migration will remove the setting-generator as struct and strength metadata. +// Also We will delete the metadata on downgrade and +// depend on storewolf to populate the metadata from defaults. +#[derive(Debug)] +pub struct RemoveMetadataAndWeakSettingsMigration; + +impl Migration for RemoveMetadataAndWeakSettingsMigration { + /// No work to do on forward migrations, copy the same datastore + fn forward(&mut self, input: MigrationData) -> Result { + println!("RemoveMetadataAndWeakSettingsMigration has no work to do on upgrade.",); + Ok(input) + } + + /// Delete all the weak settings on backward migrations + fn backward(&mut self, mut input: MigrationData) -> Result { + let mut keys_to_remove = HashSet::new(); + // Collect keys where the inner HashMap contains the key "strength" + for (key, inner_map) in &input.metadata { + if let Some(strength) = inner_map.get("strength") { + if strength == &Value::String("weak".to_string()) { + keys_to_remove.insert(key.clone()); + } + } + } + // Remove weak settings + for key in keys_to_remove { + input.data.remove(&key); + } + + // Remove all the metadata + input.metadata = HashMap::new(); + Ok(input) + } +} diff --git a/sources/api/migration/migration-helpers/src/datastore_helper.rs b/sources/api/migration/migration-helpers/src/datastore_helper.rs index cfb31685ebb..f2986d8a385 100644 --- a/sources/api/migration/migration-helpers/src/datastore_helper.rs +++ b/sources/api/migration/migration-helpers/src/datastore_helper.rs @@ -53,7 +53,7 @@ pub(crate) fn get_input_data( let mut metadata = HashMap::new(); if let Committed::Live = committed { let raw_metadata = datastore - .get_metadata_prefix("", &None as &Option<&str>) + .get_metadata_prefix("", committed, &None as &Option<&str>) .context(error::GetMetadataSnafu)?; for (data_key, meta_map) in raw_metadata.into_iter() { // See notes above about storing key Strings and Values. @@ -114,7 +114,7 @@ pub(crate) fn set_output_data( })?; let value = serialize_scalar(&raw_value).context(error::SerializeSnafu)?; datastore - .set_metadata(&metadata_key, &data_key, value) + .set_metadata(&metadata_key, &data_key, value, committed) .context(error::DataStoreWriteSnafu)?; } } diff --git a/sources/api/migration/migration-helpers/src/error.rs b/sources/api/migration/migration-helpers/src/error.rs index 2abd5bc62a3..04e5042ec5b 100644 --- a/sources/api/migration/migration-helpers/src/error.rs +++ b/sources/api/migration/migration-helpers/src/error.rs @@ -131,6 +131,12 @@ pub enum Error { #[snafu(display("Failed to create async runtime: {}", source))] CreateTokioRuntime { source: std::io::Error }, + + #[snafu(display( + "Error in deserializing response value to SettingsGenerator: {}", + source + ))] + DeserializeSettingsGenerator { source: serde_json::Error }, } /// Result alias containing our Error type. diff --git a/sources/models/Cargo.toml b/sources/models/Cargo.toml index ce679bbbc6d..3d8cfa2ab16 100644 --- a/sources/models/Cargo.toml +++ b/sources/models/Cargo.toml @@ -14,6 +14,7 @@ bottlerocket-release.workspace = true libc.workspace = true serde = { workspace = true, features = ["derive"] } serde_json.workspace = true +serde_plain.workspace = true toml.workspace = true # settings plugins diff --git a/sources/models/src/generator.rs b/sources/models/src/generator.rs new file mode 100644 index 00000000000..df53431c3e4 --- /dev/null +++ b/sources/models/src/generator.rs @@ -0,0 +1,258 @@ +//! The 'generator' module holds types that handles the settings generator metadata +//! definition (containing command, strength and depth) among various systems +//! like apiserver, sundog, datastore. +//! +//! The command field defines the command that needs to be executed to populate the +//! setting. +//! The strength field defines whether a setting needs to be deleted on reboot. +//! The depth field defines how metadata is inherited across hierarchical levels, +//! allowing a parent to provide metadata that can be applied at children at a given depth. +//! +//! SettingsGenerator type is used to hold generator that is applied strictly +//! to the given setting and have depth 0. +//! The RawSettingsGenerator holds the generators that can be dynamically applied +//! to the successors at the given depth where a depth '0' means the generator +//! should be applied on the given key. +//! +//! We use a custom deserializer because the metadata may not always be +//! structured as an object; it can also appear as a string. This deserializer +//! handles both formats, keeping the deserialization logic close to the struct +//! for maintainability and clarity. + +use serde::{ + de::{self, MapAccess, Visitor}, + Deserialize, Deserializer, Serialize, +}; +use serde_plain::derive_fromstr_from_deserialize; +use std::fmt::{self, Display}; + +/// Weak settings are ephemeral and deleted on upgrade/downgrade, regardless of whether or not it +/// is written by a setting generator. +#[derive(Default, Deserialize, Serialize, Debug, Clone, Copy, PartialEq)] +#[serde(rename_all = "kebab-case")] +pub enum Strength { + #[default] + Strong, + Weak, +} + +impl Display for Strength { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + Strength::Strong => write!(f, "strong"), + Strength::Weak => write!(f, "weak"), + } + } +} + +derive_fromstr_from_deserialize!(Strength); + +/// Struct to hold the setting generator definition containing +/// command, strength, depth +#[derive(Clone, Default, Serialize, Debug, PartialEq)] +#[serde(rename_all = "kebab-case", deny_unknown_fields)] +pub struct RawSettingsGenerator { + pub command: String, + pub strength: Strength, + pub depth: u32, +} + +impl RawSettingsGenerator { + pub fn is_weak(&self) -> bool { + self.strength == Strength::Weak + } +} + +impl<'de> Deserialize<'de> for RawSettingsGenerator { + fn deserialize(deserializer: D) -> Result + where + D: Deserializer<'de>, + { + struct SettingsGeneratorVisitor; + impl<'de> Visitor<'de> for SettingsGeneratorVisitor { + type Value = RawSettingsGenerator; + + fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("a string or a map") + } + + fn visit_str(self, value: &str) -> Result + where + E: de::Error, + { + // If the value is a string, use it as the `command` with defaults for other fields. + Ok(RawSettingsGenerator { + command: value.to_string(), + ..RawSettingsGenerator::default() + }) + } + + fn visit_map(self, mut map: M) -> Result + where + M: MapAccess<'de>, + { + // Extract values from the map + let mut command = None; + let mut strength = None; + let mut depth = None; + while let Some(key) = map.next_key::()? { + match key.as_str() { + "command" => command = Some(map.next_value()?), + "strength" => strength = Some(map.next_value()?), + "depth" => depth = Some(map.next_value()?), + _ => { + return Err(de::Error::unknown_field( + &key, + &["command", "strength", "depth"], + )) + } + } + } + Ok(RawSettingsGenerator { + command: command.ok_or_else(|| de::Error::missing_field("command"))?, + strength: strength.unwrap_or_default(), + depth: depth.unwrap_or_default(), + }) + } + } + deserializer.deserialize_any(SettingsGeneratorVisitor) + } +} + +#[cfg(test)] +mod test { + use std::collections::HashMap; + + use super::*; + + #[test] + fn test_setting_generator_deserialization() { + let api_response = r#" + { + "host-containers.admin.source": "generator1", + "host-containers.control.source": { + "command": "generator2", + "strength": "weak", + "depth": 0 + }, + "host-containers.no_depth.source": { + "command": "generator3", + "strength": "weak" + }, + "host-containers.depth_given.source": { + "command": "generator4", + "strength": "weak", + "depth": 1 + } + }"#; + + let expected_admin = RawSettingsGenerator { + command: "generator1".to_string(), + strength: Strength::Strong, + depth: 0, + }; + + let expected_control = RawSettingsGenerator { + command: "generator2".to_string(), + strength: Strength::Weak, + depth: 0, + }; + + let expected_no_depth = RawSettingsGenerator { + command: "generator3".to_string(), + strength: Strength::Weak, + depth: 0, + }; + + let expected_depth_given = RawSettingsGenerator { + command: "generator4".to_string(), + strength: Strength::Weak, + depth: 1, + }; + + let result: HashMap = + serde_json::from_str(api_response).unwrap(); + + assert_eq!( + result.get("host-containers.admin.source").unwrap(), + &expected_admin + ); + assert_eq!( + result.get("host-containers.control.source").unwrap(), + &expected_control + ); + assert_eq!( + result.get("host-containers.no_depth.source").unwrap(), + &expected_no_depth + ); + assert_eq!( + result.get("host-containers.depth_given.source").unwrap(), + &expected_depth_given + ); + } +} + +/// Struct to hold the setting generator definition containing +/// command, strength +#[derive(Default, Serialize, std::fmt::Debug, PartialEq)] +pub struct SettingsGenerator { + pub command: String, + pub strength: Strength, +} + +impl From for SettingsGenerator { + fn from(value: RawSettingsGenerator) -> Self { + SettingsGenerator { + command: value.command, + strength: value.strength, + } + } +} + +impl<'de> Deserialize<'de> for SettingsGenerator { + fn deserialize(deserializer: D) -> Result + where + D: Deserializer<'de>, + { + struct SettingsGeneratorVisitor; + impl<'de> Visitor<'de> for SettingsGeneratorVisitor { + type Value = SettingsGenerator; + + fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("a string or a map") + } + + fn visit_str(self, value: &str) -> Result + where + E: de::Error, + { + // If the value is a string, use it as the `command` with defaults for other fields. + Ok(SettingsGenerator { + command: value.to_string(), + ..SettingsGenerator::default() + }) + } + + fn visit_map(self, mut map: M) -> Result + where + M: MapAccess<'de>, + { + // Extract values from the map + let mut command = None; + let mut strength = None; + while let Some(key) = map.next_key::()? { + match key.as_str() { + "command" => command = Some(map.next_value()?), + "strength" => strength = Some(map.next_value()?), + _ => return Err(de::Error::unknown_field(&key, &["command", "strength"])), + } + } + Ok(SettingsGenerator { + command: command.ok_or_else(|| de::Error::missing_field("command"))?, + strength: strength.unwrap_or_default(), + }) + } + } + deserializer.deserialize_any(SettingsGeneratorVisitor) + } +} diff --git a/sources/models/src/lib.rs b/sources/models/src/lib.rs index 8dd88bc4e78..6361ebd4008 100644 --- a/sources/models/src/lib.rs +++ b/sources/models/src/lib.rs @@ -5,7 +5,7 @@ Bottlerocket has different variants supporting different features and use cases. Each variant has its own set of software, and therefore needs its own configuration. We support having an API model for each variant to support these different configurations. -The model here defines a top-level `Settings` structure, and delegates the actual implementation to a ["settings plugin"](https://github.com/bottlerocket-os/bottlerocket-settings-sdk/tree/develop/bottlerocket-settings-plugin). +The model here defines a top-level `Settings` structure, and delegates the actual implementation to a ["settings plugin"](https://github.com/bottlerocket/bottlerocket-settings-sdk/tree/settings-plugins). Settings plugin are written in Rust as a "cdylib" crate, and loaded at runtime. Each settings plugin must define its own private `Settings` structure. @@ -13,7 +13,7 @@ It can use pre-defined structures inside, or custom ones as needed. `apiserver::datastore` offers serialization and deserialization modules that make it easy to map between Rust types and the data store, and thus, all inputs and outputs are type-checked. -At the field level, standard Rust types can be used, or ["modeled types"](https://github.com/bottlerocket-os/bottlerocket-settings-sdk/tree/develop/bottlerocket-settings-models/modeled-types) that add input validation. +At the field level, standard Rust types can be used, or ["modeled types"](src/modeled_types) that add input validation. The `#[model]` attribute on Settings and its sub-structs reduces duplication and adds some required metadata; see [its docs](model-derive/) for details. */ @@ -21,6 +21,9 @@ The `#[model]` attribute on Settings and its sub-structs reduces duplication and // Types used to communicate between client and server for 'apiclient exec'. pub mod exec; +// Types used to handle the settings generator metadata among various systems +pub mod generator; + use bottlerocket_release::BottlerocketRelease; use bottlerocket_settings_models::model_derive::model; use bottlerocket_settings_plugin::BottlerocketSettings; diff --git a/sources/settings-migrations/v1.33.0/aws-remove-schnauzer-admin/Cargo.toml b/sources/settings-migrations/v1.33.0/aws-remove-schnauzer-admin/Cargo.toml new file mode 100644 index 00000000000..df0be552f2b --- /dev/null +++ b/sources/settings-migrations/v1.33.0/aws-remove-schnauzer-admin/Cargo.toml @@ -0,0 +1,15 @@ +[package] +name = "aws-remove-schnauzer-admin" +version = "0.1.0" +authors = ["Shikha Vyaghra "] +license = "Apache-2.0 OR MIT" +edition = "2021" +publish = false +# Don't rebuild crate just because of changes to README. +exclude = ["README.md"] + + +# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html + +[dependencies] +migration-helpers.workspace = true diff --git a/sources/settings-migrations/v1.33.0/aws-remove-schnauzer-admin/src/main.rs b/sources/settings-migrations/v1.33.0/aws-remove-schnauzer-admin/src/main.rs new file mode 100644 index 00000000000..d2f949b9e1d --- /dev/null +++ b/sources/settings-migrations/v1.33.0/aws-remove-schnauzer-admin/src/main.rs @@ -0,0 +1,25 @@ +use migration_helpers::common_migrations::RemoveSchnauzerMigration; +use migration_helpers::{migrate, Result}; +use std::process; + +const OLD_ADMIN_CTR_CMDLINE: &str = + "schnauzer-v2 render --requires 'aws@v1(helpers=[ecr-prefix])' --template '{{ ecr-prefix settings.aws.region }}/bottlerocket-admin:v0.11.16'"; + +/// We are removing settings.host-containers.admin.source setting +/// to populate it from defaults. +fn run() -> Result<()> { + migrate(RemoveSchnauzerMigration { + setting: "settings.host-containers.admin.source", + old_cmdline: OLD_ADMIN_CTR_CMDLINE, + }) +} + +// Returning a Result from main makes it print a Debug representation of the error, but with Snafu +// we have nice Display representations of the error, so we wrap "main" (run) and print any error. +// https://github.com/shepmaster/snafu/issues/110 +fn main() { + if let Err(e) = run() { + eprintln!("{}", e); + process::exit(1); + } +} diff --git a/sources/settings-migrations/v1.33.0/aws-remove-schnauzer-control/Cargo.toml b/sources/settings-migrations/v1.33.0/aws-remove-schnauzer-control/Cargo.toml new file mode 100644 index 00000000000..4824049f24c --- /dev/null +++ b/sources/settings-migrations/v1.33.0/aws-remove-schnauzer-control/Cargo.toml @@ -0,0 +1,15 @@ +[package] +name = "aws-remove-schnauzer-control" +version = "0.1.0" +authors = ["Shikha Vyaghra "] +license = "Apache-2.0 OR MIT" +edition = "2021" +publish = false +# Don't rebuild crate just because of changes to README. +exclude = ["README.md"] + + +# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html + +[dependencies] +migration-helpers.workspace = true diff --git a/sources/settings-migrations/v1.33.0/aws-remove-schnauzer-control/src/main.rs b/sources/settings-migrations/v1.33.0/aws-remove-schnauzer-control/src/main.rs new file mode 100644 index 00000000000..f9b8056f56b --- /dev/null +++ b/sources/settings-migrations/v1.33.0/aws-remove-schnauzer-control/src/main.rs @@ -0,0 +1,25 @@ +use migration_helpers::common_migrations::RemoveSchnauzerMigration; +use migration_helpers::{migrate, Result}; +use std::process; + +const OLD_CONTROL_CTR_CMDLINE: &str = + "schnauzer-v2 render --requires 'aws@v1(helpers=[ecr-prefix])' --template '{{ ecr-prefix settings.aws.region }}/bottlerocket-control:v0.7.20'"; + +/// We are removing settings.host-containers.control.source setting +/// to populate it from defaults. +fn run() -> Result<()> { + migrate(RemoveSchnauzerMigration { + setting: "settings.host-containers.control.source", + old_cmdline: OLD_CONTROL_CTR_CMDLINE, + }) +} + +// Returning a Result from main makes it print a Debug representation of the error, but with Snafu +// we have nice Display representations of the error, so we wrap "main" (run) and print any error. +// https://github.com/shepmaster/snafu/issues/110 +fn main() { + if let Err(e) = run() { + eprintln!("{}", e); + process::exit(1); + } +} diff --git a/sources/settings-migrations/v1.33.0/public-remove-source-admin/Cargo.toml b/sources/settings-migrations/v1.33.0/public-remove-source-admin/Cargo.toml new file mode 100644 index 00000000000..5cbc5eaf457 --- /dev/null +++ b/sources/settings-migrations/v1.33.0/public-remove-source-admin/Cargo.toml @@ -0,0 +1,15 @@ +[package] +name = "public-remove-source-admin" +version = "0.1.0" +authors = ["Shikha Vyaghra "] +license = "Apache-2.0 OR MIT" +edition = "2021" +publish = false +# Don't rebuild crate just because of changes to README. +exclude = ["README.md"] + + +# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html + +[dependencies] +migration-helpers.workspace = true diff --git a/sources/settings-migrations/v1.33.0/public-remove-source-admin/src/main.rs b/sources/settings-migrations/v1.33.0/public-remove-source-admin/src/main.rs new file mode 100644 index 00000000000..91a432fc384 --- /dev/null +++ b/sources/settings-migrations/v1.33.0/public-remove-source-admin/src/main.rs @@ -0,0 +1,24 @@ +use migration_helpers::common_migrations::RemoveMatchingString; +use migration_helpers::{migrate, Result}; +use std::process; + +const OLD_ADMIN_CTR: &str = "public.ecr.aws/bottlerocket/bottlerocket-admin:v0.11.16"; + +/// We are removing settings.host-containers.admin.source setting +/// to populate it from defaults. +fn run() -> Result<()> { + migrate(RemoveMatchingString { + setting: "settings.host-containers.admin.source", + old_val: OLD_ADMIN_CTR, + }) +} + +// Returning a Result from main makes it print a Debug representation of the error, but with Snafu +// we have nice Display representations of the error, so we wrap "main" (run) and print any error. +// https://github.com/shepmaster/snafu/issues/110 +fn main() { + if let Err(e) = run() { + eprintln!("{}", e); + process::exit(1); + } +} diff --git a/sources/settings-migrations/v1.33.0/public-remove-source-control/Cargo.toml b/sources/settings-migrations/v1.33.0/public-remove-source-control/Cargo.toml new file mode 100644 index 00000000000..e4b172646c7 --- /dev/null +++ b/sources/settings-migrations/v1.33.0/public-remove-source-control/Cargo.toml @@ -0,0 +1,15 @@ +[package] +name = "public-remove-source-control" +version = "0.1.0" +authors = ["Shikha Vyaghra "] +license = "Apache-2.0 OR MIT" +edition = "2021" +publish = false +# Don't rebuild crate just because of changes to README. +exclude = ["README.md"] + + +# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html + +[dependencies] +migration-helpers.workspace = true diff --git a/sources/settings-migrations/v1.33.0/public-remove-source-control/src/main.rs b/sources/settings-migrations/v1.33.0/public-remove-source-control/src/main.rs new file mode 100644 index 00000000000..591989cb123 --- /dev/null +++ b/sources/settings-migrations/v1.33.0/public-remove-source-control/src/main.rs @@ -0,0 +1,24 @@ +use migration_helpers::common_migrations::RemoveMatchingString; +use migration_helpers::{migrate, Result}; +use std::process; + +const OLD_CONTROL_CTR: &str = "public.ecr.aws/bottlerocket/bottlerocket-control:v0.7.20"; + +/// We are removing settings.host-containers.admin.source setting +/// to populate it from defaults. +fn run() -> Result<()> { + migrate(RemoveMatchingString { + setting: "settings.host-containers.control.source", + old_val: OLD_CONTROL_CTR, + }) +} + +// Returning a Result from main makes it print a Debug representation of the error, but with Snafu +// we have nice Display representations of the error, so we wrap "main" (run) and print any error. +// https://github.com/shepmaster/snafu/issues/110 +fn main() { + if let Err(e) = run() { + eprintln!("{}", e); + process::exit(1); + } +} diff --git a/sources/settings-migrations/v1.33.0/remove-metadata-and-weak-settings-migration/Cargo.toml b/sources/settings-migrations/v1.33.0/remove-metadata-and-weak-settings-migration/Cargo.toml new file mode 100644 index 00000000000..44c04a2b33a --- /dev/null +++ b/sources/settings-migrations/v1.33.0/remove-metadata-and-weak-settings-migration/Cargo.toml @@ -0,0 +1,15 @@ +[package] +name = "remove-metadata-and-weak-settings-migration" +version = "0.1.0" +authors = ["Shikha Vyaghra "] +license = "Apache-2.0 OR MIT" +edition = "2021" +publish = false +# Don't rebuild crate just because of changes to README. +exclude = ["README.md"] + + +# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html + +[dependencies] +migration-helpers.workspace = true diff --git a/sources/settings-migrations/v1.33.0/remove-metadata-and-weak-settings-migration/src/main.rs b/sources/settings-migrations/v1.33.0/remove-metadata-and-weak-settings-migration/src/main.rs new file mode 100644 index 00000000000..b5c6caf08f0 --- /dev/null +++ b/sources/settings-migrations/v1.33.0/remove-metadata-and-weak-settings-migration/src/main.rs @@ -0,0 +1,18 @@ +use migration_helpers::common_migrations::RemoveMetadataAndWeakSettingsMigration; +use migration_helpers::{migrate, Result}; +use std::process; + +// Remove the weak settings and metadata on downgrade +fn run() -> Result<()> { + migrate(RemoveMetadataAndWeakSettingsMigration) +} + +// Returning a Result from main makes it print a Debug representation of the error, but with Snafu +// we have nice Display representations of the error, so we wrap "main" (run) and print any error. +// https://github.com/shepmaster/snafu/issues/110 +fn main() { + if let Err(e) = run() { + eprintln!("{}", e); + process::exit(1); + } +} diff --git a/sources/shared-defaults/aws-host-containers.toml b/sources/shared-defaults/aws-host-containers.toml index c114bb4c6eb..d2ecf72dc34 100644 --- a/sources/shared-defaults/aws-host-containers.toml +++ b/sources/shared-defaults/aws-host-containers.toml @@ -2,8 +2,9 @@ enabled = false superpowered = true -[metadata.settings.host-containers.admin.source] -setting-generator = "schnauzer-v2 render --requires 'aws@v1(helpers=[ecr-prefix])' --template '{{ ecr-prefix settings.aws.region }}/bottlerocket-admin:v0.11.16'" +[metadata.settings.host-containers.admin.source.setting-generator] +command = "schnauzer-v2 render --requires 'aws@v1(helpers=[ecr-prefix])' --template '{{ ecr-prefix settings.aws.region }}/bottlerocket-admin:v0.11.16'" +strength = "weak" [metadata.settings.host-containers.admin.user-data] setting-generator = "shibaken generate-admin-userdata" @@ -12,5 +13,6 @@ setting-generator = "shibaken generate-admin-userdata" enabled = true superpowered = false -[metadata.settings.host-containers.control.source] -setting-generator = "schnauzer-v2 render --requires 'aws@v1(helpers=[ecr-prefix])' --template '{{ ecr-prefix settings.aws.region }}/bottlerocket-control:v0.7.20'" +[metadata.settings.host-containers.control.source.setting-generator] +command = "schnauzer-v2 render --requires 'aws@v1(helpers=[ecr-prefix])' --template '{{ ecr-prefix settings.aws.region }}/bottlerocket-control:v0.7.20'" +strength = "weak" diff --git a/sources/shared-defaults/public-host-containers.toml b/sources/shared-defaults/public-host-containers.toml index 98d304ee758..a752a9de4dc 100644 --- a/sources/shared-defaults/public-host-containers.toml +++ b/sources/shared-defaults/public-host-containers.toml @@ -6,9 +6,15 @@ [settings.host-containers.admin] enabled = false superpowered = true -source = "public.ecr.aws/bottlerocket/bottlerocket-admin:v0.11.16" + +[metadata.settings.host-containers.admin.source.setting-generator] +command = "schnauzer-v2 render --template 'public.ecr.aws/bottlerocket/bottlerocket-admin:v0.11.16'" +strength = "weak" [settings.host-containers.control] enabled = false superpowered = false -source = "public.ecr.aws/bottlerocket/bottlerocket-control:v0.7.20" + +[metadata.settings.host-containers.control.source.setting-generator] +command = "schnauzer-v2 render --template 'public.ecr.aws/bottlerocket/bottlerocket-control:v0.7.20'" +strength = "weak"