diff --git a/Cargo.lock b/Cargo.lock index 7a7c0756a..7f93d9436 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -3815,6 +3815,7 @@ version = "0.4.5" dependencies = [ "anyhow", "async-recursion", + "async-trait", "async-walkdir", "base64 0.22.1", "buildsys", diff --git a/twoliter/Cargo.toml b/twoliter/Cargo.toml index e2208acb3..20bf9c335 100644 --- a/twoliter/Cargo.toml +++ b/twoliter/Cargo.toml @@ -13,6 +13,7 @@ exclude = ["/design", "/target", "/dockerfiles", "/scripts"] anyhow.workspace = true async-recursion.workspace = true async-walkdir.workspace = true +async-trait.workspace = true base64.workspace = true buildsys-config.workspace = true clap = { workspace = true, features = ["derive", "env", "std"] } diff --git a/twoliter/src/lock.rs b/twoliter/src/lock.rs deleted file mode 100644 index 95cf8b13f..000000000 --- a/twoliter/src/lock.rs +++ /dev/null @@ -1,657 +0,0 @@ -use crate::common::fs::{create_dir_all, read, remove_dir_all, write}; -use crate::project::{Image, Project, ValidIdentifier, Vendor}; -use crate::schema_version::SchemaVersion; -use anyhow::{bail, ensure, Context, Result}; -use base64::Engine; -use futures::pin_mut; -use futures::stream::{self, StreamExt, TryStreamExt}; -use oci_cli_wrapper::{DockerArchitecture, ImageTool}; -use olpc_cjson::CanonicalFormatter as CanonicalJsonFormatter; -use semver::Version; -use serde::de::Error; -use serde::{Deserialize, Deserializer, Serialize}; -use sha2::Digest; -use std::cmp::PartialEq; -use std::collections::{HashMap, HashSet}; -use std::fmt::{Debug, Display, Formatter}; -use std::fs::File; -use std::hash::{Hash, Hasher}; -use std::mem::take; -use std::path::{Path, PathBuf}; -use tar::Archive as TarArchive; -use tokio::fs::read_to_string; -use tracing::{debug, error, info, instrument, trace}; - -const TWOLITER_LOCK: &str = "Twoliter.lock"; - -/// Represents a locked dependency on an image -#[derive(Debug, Clone, Eq, Ord, PartialOrd, Serialize, Deserialize)] -pub(crate) struct LockedImage { - /// The name of the dependency - pub name: String, - /// The version of the dependency - pub version: Version, - /// The vendor this dependency came from - pub vendor: String, - /// The resolved image uri of the dependency - pub source: String, - /// The digest of the image - pub digest: String, - #[serde(skip)] - pub(crate) manifest: Vec, -} - -impl PartialEq for LockedImage { - fn eq(&self, other: &Self) -> bool { - self.source == other.source && self.digest == other.digest - } -} - -impl LockedImage { - pub async fn new(image_tool: &ImageTool, vendor: &Vendor, image: &Image) -> Result { - let source = format!("{}/{}:v{}", vendor.registry, image.name, image.version); - debug!("Pulling image manifest for locked image '{}'", source); - let manifest_bytes = image_tool.get_manifest(source.as_str()).await?; - - // We calculate a 'digest' of the manifest to use as our unique id - let digest = sha2::Sha256::digest(manifest_bytes.as_slice()); - let digest = base64::engine::general_purpose::STANDARD.encode(digest.as_slice()); - trace!( - "Calculated digest for locked image '{}': '{}'", - source, - digest - ); - - Ok(Self { - name: image.name.to_string(), - version: image.version.clone(), - vendor: image.vendor.to_string(), - source, - digest, - manifest: manifest_bytes, - }) - } - - pub fn digest_uri(&self, digest: &str) -> String { - self.source.replace( - format!(":v{}", self.version).as_str(), - format!("@{}", digest).as_str(), - ) - } -} - -impl Display for LockedImage { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - f.write_fmt(format_args!( - "{}-{}@{} ({})", - self.name, self.version, self.vendor, self.source, - )) - } -} - -/// The hash should not contain the source to allow for collision detection -impl Hash for LockedImage { - fn hash(&self, state: &mut H) { - self.name.hash(state); - self.version.hash(state); - self.vendor.hash(state); - } -} - -#[derive(Deserialize, Debug, Clone)] -struct ImageMetadata { - /// The name of the kit - #[allow(dead_code)] - pub name: String, - /// The version of the kit - #[allow(dead_code)] - pub version: Version, - /// The required sdk of the kit, - pub sdk: Image, - /// Any dependent kits - #[serde(rename = "kit")] - pub kits: Vec, -} - -impl TryFrom for ImageMetadata { - type Error = anyhow::Error; - - fn try_from(value: EncodedKitMetadata) -> Result { - let bytes = base64::engine::general_purpose::STANDARD - .decode(value.0) - .context("failed to decode kit metadata as base64")?; - serde_json::from_slice(bytes.as_slice()).context("failed to parse kit metadata json") - } -} - -/// Encoded kit metadata, which is embedded in a label of the OCI image config. -#[derive(Clone, Eq, PartialEq)] -struct EncodedKitMetadata(String); - -impl EncodedKitMetadata { - #[instrument(level = "trace")] - async fn try_from_image(image_uri: &str, image_tool: &ImageTool) -> Result { - trace!(image_uri, "Extracting kit metadata from OCI image config"); - let config = image_tool.get_config(image_uri).await?; - let kit_metadata = EncodedKitMetadata( - config - .labels - .get("dev.bottlerocket.kit.v1") - .context("no metadata stored on image, this image appears to not be a kit")? - .to_owned(), - ); - - trace!( - image_uri, - image_config = ?config, - ?kit_metadata, - "Kit metadata retrieved from image config" - ); - - Ok(kit_metadata) - } - - /// Infallible method to provide debugging insights into encoded `ImageMetadata` - /// - /// Shows a `Debug` view of the encoded `ImageMetadata` if possible, otherwise shows - /// the encoded form. - fn try_debug_image_metadata(&self) -> String { - self.debug_image_metadata().unwrap_or_else(|| { - format!("", self.0.replace("\n", "\\n")) - }) - } - - fn debug_image_metadata(&self) -> Option { - base64::engine::general_purpose::STANDARD - .decode(&self.0) - .ok() - .and_then(|bytes| serde_json::from_slice(bytes.as_slice()).ok()) - .map(|metadata: ImageMetadata| format!("", metadata)) - } -} - -impl Debug for EncodedKitMetadata { - fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { - write!(f, "{}", self.try_debug_image_metadata()) - } -} - -#[derive(Deserialize, Debug)] -struct ManifestListView { - manifests: Vec, -} - -#[derive(Deserialize, Debug, Clone)] -struct ManifestView { - digest: String, - platform: Option, -} - -#[derive(Deserialize, Debug, Clone)] -struct Platform { - architecture: DockerArchitecture, -} - -#[derive(Deserialize, Debug)] -struct IndexView { - manifests: Vec, -} - -#[derive(Deserialize, Debug)] -struct ManifestLayoutView { - layers: Vec, -} - -#[derive(Deserialize, Debug)] -struct Layer { - digest: ContainerDigest, -} - -#[derive(Debug)] -struct ContainerDigest(String); - -impl<'de> Deserialize<'de> for ContainerDigest { - fn deserialize(deserializer: D) -> std::result::Result - where - D: Deserializer<'de>, - { - let digest = String::deserialize(deserializer)?; - if !digest.starts_with("sha256:") { - return Err(D::Error::custom(format!( - "invalid digest detected in layer: {}", - digest - ))); - }; - Ok(Self(digest)) - } -} - -impl Display for ContainerDigest { - fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { - f.write_str(self.0.as_str()) - } -} - -#[derive(Serialize, Debug)] -struct ExternalKitMetadata { - sdk: LockedImage, - #[serde(rename = "kit")] - kits: Vec, -} - -#[derive(Debug)] -struct OCIArchive { - image: LockedImage, - digest: String, - cache_dir: PathBuf, -} - -impl OCIArchive { - fn new

(image: &LockedImage, digest: &str, cache_dir: P) -> Result - where - P: AsRef, - { - Ok(Self { - image: image.clone(), - digest: digest.into(), - cache_dir: cache_dir.as_ref().to_path_buf(), - }) - } - - fn archive_path(&self) -> PathBuf { - self.cache_dir.join(self.digest.replace(':', "-")) - } - - #[instrument(level = "trace", skip_all, fields(image = %self.image))] - async fn pull_image(&self, image_tool: &ImageTool) -> Result<()> { - debug!("Pulling image '{}'", self.image); - let digest_uri = self.image.digest_uri(self.digest.as_str()); - let oci_archive_path = self.archive_path(); - if !oci_archive_path.exists() { - create_dir_all(&oci_archive_path).await?; - image_tool - .pull_oci_image(oci_archive_path.as_path(), digest_uri.as_str()) - .await?; - } else { - debug!("Image '{}' already present -- no need to pull.", self.image); - } - Ok(()) - } - - #[instrument( - level = "trace", - skip_all, - fields(image = %self.image, out_dir = %out_dir.as_ref().display()), - )] - async fn unpack_layers

(&self, out_dir: P) -> Result<()> - where - P: AsRef, - { - let path = out_dir.as_ref(); - let digest_file = path.join("digest"); - if digest_file.exists() { - let digest = read_to_string(&digest_file).await.context(format!( - "failed to read digest file at {}", - digest_file.display() - ))?; - if digest == self.digest { - trace!( - "Found existing digest file for image '{}' at '{}'", - self.image, - digest_file.display() - ); - return Ok(()); - } - } - - debug!("Unpacking layers for image '{}'", self.image); - remove_dir_all(path).await?; - create_dir_all(path).await?; - let index_bytes = read(self.archive_path().join("index.json")).await?; - let index: IndexView = serde_json::from_slice(index_bytes.as_slice()) - .context("failed to deserialize oci image index")?; - - // Read the manifest so we can get the layer digests - trace!(image = %self.image, "Extracting layer digests from image manifest"); - let digest = index - .manifests - .first() - .context("empty oci image")? - .digest - .replace(':', "/"); - let manifest_bytes = read(self.archive_path().join(format!("blobs/{digest}"))) - .await - .context("failed to read manifest blob")?; - let manifest_layout: ManifestLayoutView = serde_json::from_slice(manifest_bytes.as_slice()) - .context("failed to deserialize oci manifest")?; - - // Extract each layer into the target directory - trace!(image = %self.image, "Extracting image layers"); - for layer in manifest_layout.layers { - let digest = layer.digest.to_string().replace(':', "/"); - let layer_blob = File::open(self.archive_path().join(format!("blobs/{digest}"))) - .context("failed to read layer of oci image")?; - let mut layer_archive = TarArchive::new(layer_blob); - layer_archive - .unpack(path) - .context("failed to unpack layer to disk")?; - } - write(&digest_file, self.digest.as_str()) - .await - .context(format!( - "failed to record digest to {}", - digest_file.display() - ))?; - - Ok(()) - } -} - -/// Represents the structure of a `Twoliter.lock` lock file. -#[derive(Debug, Clone, PartialEq, Eq, Ord, PartialOrd, Serialize, Deserialize)] -#[serde(rename_all = "kebab-case")] -pub(crate) struct Lock { - /// The version of the Twoliter.toml this was generated from - pub schema_version: SchemaVersion<1>, - /// The resolved bottlerocket sdk - pub sdk: LockedImage, - /// Resolved kit dependencies - pub kit: Vec, -} - -#[allow(dead_code)] -impl Lock { - #[instrument(level = "trace", skip(project))] - pub(crate) async fn create(project: &Project) -> Result { - let lock_file_path = project.project_dir().join(TWOLITER_LOCK); - - info!("Resolving project references to create lock file"); - let lock_state = Self::resolve(project).await?; - let lock_str = toml::to_string(&lock_state).context("failed to serialize lock file")?; - - debug!("Writing new lock file to '{}'", lock_file_path.display()); - write(&lock_file_path, lock_str) - .await - .context("failed to write lock file")?; - Ok(lock_state) - } - - #[instrument(level = "trace", skip(project))] - pub(crate) async fn load(project: &Project) -> Result { - let lock_file_path = project.project_dir().join(TWOLITER_LOCK); - ensure!( - lock_file_path.exists(), - "Twoliter.lock does not exist, please run `twoliter update` first" - ); - debug!("Loading existing lockfile '{}'", lock_file_path.display()); - let lock_str = read_to_string(&lock_file_path) - .await - .context("failed to read lockfile")?; - let lock: Self = - toml::from_str(lock_str.as_str()).context("failed to deserialize lockfile")?; - - info!("Resolving project references to check against lock file"); - let lock_state = Self::resolve(project).await?; - - ensure!(lock_state == lock, "changes have occured to Twoliter.toml or the remote kit images that require an update to Twoliter.lock"); - Ok(lock) - } - - fn external_kit_metadata(&self) -> ExternalKitMetadata { - ExternalKitMetadata { - sdk: self.sdk.clone(), - kits: self.kit.clone(), - } - } - - /// Fetches all external kits defined in a Twoliter.lock to the build directory - #[instrument(level = "trace", skip_all)] - pub(crate) async fn fetch(&self, project: &Project, arch: &str) -> Result<()> { - let image_tool = ImageTool::from_environment()?; - let target_dir = project.external_kits_dir(); - create_dir_all(&target_dir).await.context(format!( - "failed to create external-kits directory at {}", - target_dir.display() - ))?; - - info!( - dependencies = ?self.kit.iter().map(ToString::to_string).collect::>(), - "Extracting kit dependencies." - ); - for image in self.kit.iter() { - self.extract_kit(&image_tool, &project.external_kits_dir(), image, arch) - .await?; - } - let mut kit_list = Vec::new(); - let mut ser = - serde_json::Serializer::with_formatter(&mut kit_list, CanonicalJsonFormatter::new()); - self.external_kit_metadata() - .serialize(&mut ser) - .context("failed to serialize external kit metadata")?; - // Compare the output of the serialize if the file exists - let external_metadata_file = project.external_kits_metadata(); - if external_metadata_file.exists() { - let existing = read(&external_metadata_file).await.context(format!( - "failed to read external kit metadata: {}", - external_metadata_file.display() - ))?; - // If this is the same as what we generated skip the write - if existing == kit_list { - return Ok(()); - } - } - write(project.external_kits_metadata(), kit_list.as_slice()) - .await - .context(format!( - "failed to write external kit metadata: {}", - project.external_kits_metadata().display() - ))?; - - Ok(()) - } - - #[instrument(level = "trace", skip(image), fields(image = %image))] - async fn get_manifest( - &self, - image_tool: &ImageTool, - image: &LockedImage, - arch: &str, - ) -> Result { - let manifest_bytes = image_tool.get_manifest(image.source.as_str()).await?; - let manifest_list: ManifestListView = serde_json::from_slice(manifest_bytes.as_slice()) - .context("failed to deserialize manifest list")?; - let docker_arch = DockerArchitecture::try_from(arch)?; - manifest_list - .manifests - .iter() - .find(|x| x.platform.as_ref().unwrap().architecture == docker_arch) - .cloned() - .context(format!( - "could not find kit image for architecture '{}' at {}", - docker_arch, image.source - )) - } - - #[instrument( - level = "trace", - skip(image), - fields(image = %image, path = %path.as_ref().display()) - )] - async fn extract_kit

( - &self, - image_tool: &ImageTool, - path: P, - image: &LockedImage, - arch: &str, - ) -> Result<()> - where - P: AsRef, - { - info!( - "Extracting kit '{}' to '{}'", - image, - path.as_ref().display() - ); - let vendor = image.vendor.clone(); - let name = image.name.clone(); - let target_path = path.as_ref().join(format!("{vendor}/{name}/{arch}")); - let cache_path = path.as_ref().join("cache"); - create_dir_all(&target_path).await?; - create_dir_all(&cache_path).await?; - - // First get the manifest for the specific requested architecture - let manifest = self.get_manifest(image_tool, image, arch).await?; - let oci_archive = OCIArchive::new(image, manifest.digest.as_str(), &cache_path)?; - - // Checks for the saved image locally, or else pulls and saves it - oci_archive.pull_image(image_tool).await?; - - // Checks if this archive has already been extracted by checking a digest file - // otherwise cleans up the path and unpacks the archive - oci_archive.unpack_layers(&target_path).await?; - - Ok(()) - } - - #[instrument(level = "trace", skip(project))] - async fn resolve(project: &Project) -> Result { - let vendor_table = project.vendor(); - let mut known: HashMap<(ValidIdentifier, ValidIdentifier), Version> = HashMap::new(); - let mut locked: Vec = Vec::new(); - let image_tool = ImageTool::from_environment()?; - - let mut remaining: Vec = project.kits(); - let mut sdk_set: HashSet = HashSet::new(); - if let Some(sdk) = project.sdk_image() { - // We don't scan over the sdk images as they are not kit images and there is no kit metadata to fetch - sdk_set.insert(sdk.clone()); - } - while !remaining.is_empty() { - let working_set: Vec<_> = take(&mut remaining); - for image in working_set.iter() { - debug!(%image, "Resolving kit '{}'", image.name); - if let Some(version) = known.get(&(image.name.clone(), image.vendor.clone())) { - let name = image.name.clone(); - let left_version = image.version.clone(); - let vendor = image.vendor.clone(); - ensure!( - image.version == *version, - "cannot have multiple versions of the same kit ({name}-{left_version}@{vendor} != {name}-{version}@{vendor}", - ); - debug!( - ?image, - "Skipping kit '{}' as it has already been resolved", image.name - ); - continue; - } - let vendor = vendor_table.get(&image.vendor).context(format!( - "vendor '{}' is not specified in Twoliter.toml", - image.vendor - ))?; - known.insert( - (image.name.clone(), image.vendor.clone()), - image.version.clone(), - ); - let locked_image = LockedImage::new(&image_tool, vendor, image).await?; - let kit = Self::find_kit(&image_tool, vendor, &locked_image).await?; - locked.push(locked_image); - sdk_set.insert(kit.sdk); - for dep in kit.kits { - remaining.push(dep); - } - } - } - - debug!(?sdk_set, "Resolving workspace SDK"); - ensure!( - sdk_set.len() <= 1, - "cannot use multiple sdks (found sdk: {})", - sdk_set - .iter() - .map(ToString::to_string) - .collect::>() - .join(", ") - ); - let sdk = sdk_set - .iter() - .next() - .context("no sdk was found for use, please specify a sdk in Twoliter.toml")?; - let vendor = vendor_table.get(&sdk.vendor).context(format!( - "vendor '{}' is not specified in Twoliter.toml", - sdk.vendor - ))?; - Ok(Self { - schema_version: project.schema_version(), - sdk: LockedImage::new(&image_tool, vendor, sdk).await?, - kit: locked, - }) - } - - #[instrument(level = "trace", skip(image), fields(image = %image))] - async fn find_kit( - image_tool: &ImageTool, - vendor: &Vendor, - image: &LockedImage, - ) -> Result { - debug!(kit_image = %image, "Searching for kit"); - let manifest_list: ManifestListView = serde_json::from_slice(image.manifest.as_slice()) - .context("failed to deserialize manifest list")?; - trace!(manifest_list = ?manifest_list, "Deserialized manifest list"); - debug!("Extracting kit metadata from OCI image"); - let embedded_kit_metadata = - stream::iter(manifest_list.manifests).then(|manifest| async move { - let image_uri = format!("{}/{}@{}", vendor.registry, image.name, manifest.digest); - EncodedKitMetadata::try_from_image(&image_uri, image_tool).await - }); - pin_mut!(embedded_kit_metadata); - - let canonical_metadata = embedded_kit_metadata - .try_next() - .await? - .context(format!("could not find metadata for kit {}", image))?; - - trace!("Checking that all manifests refer to the same kit."); - while let Some(kit_metadata) = embedded_kit_metadata.try_next().await? { - if kit_metadata != canonical_metadata { - error!( - ?canonical_metadata, - ?kit_metadata, - "Mismatched kit metadata in manifest list" - ); - bail!("Metadata does not match between images in manifest list"); - } - } - - canonical_metadata - .try_into() - .context("Failed to decode and parse kit metadata") - } -} - -#[cfg(test)] -mod test { - use super::*; - #[test] - fn test_try_debug_image_metadata_succeeds() { - // Given a valid encoded metadata string, - // When we attempt to decode it for debugging, - // Then the debug string is marked as having been decoded. - let encoded = EncodedKitMetadata( - "eyJraXQiOltdLCJuYW1lIjoiYm90dGxlcm9ja2V0LWNvcmUta2l0Iiwic2RrIjp7ImRpZ2VzdCI6ImlyY09EUl\ - d3ZmxjTTdzaisrMmszSk5RWkovb3ZDUVRpUlkrRFpvaGdrNlk9IiwibmFtZSI6InRoYXItYmUtYmV0YS1zZGsiL\ - CJzb3VyY2UiOiJwdWJsaWMuZWNyLmF3cy91MWczYzh6NC90aGFyLWJlLWJldGEtc2RrOnYwLjQzLjAiLCJ2ZW5k\ - b3IiOiJib3R0bGVyb2NrZXQtbmV3IiwidmVyc2lvbiI6IjAuNDMuMCJ9LCJ2ZXJzaW9uIjoiMi4wLjAifQo=" - .to_string() - ); - assert!(encoded.debug_image_metadata().is_some()); - } - - #[test] - fn test_try_debug_image_metadata_fails() { - // Given an invalid encoded metadata string, - // When we attempt to decode it for debugging, - // Then the debug string is marked as remaining encoded. - let junk_data = EncodedKitMetadata("abcdefghijklmnophello".to_string()); - assert!(junk_data.debug_image_metadata().is_none()); - } -} diff --git a/twoliter/src/lock/archive.rs b/twoliter/src/lock/archive.rs new file mode 100644 index 000000000..b9f74f36e --- /dev/null +++ b/twoliter/src/lock/archive.rs @@ -0,0 +1,126 @@ +use super::views::{IndexView, ManifestLayoutView}; +use crate::common::fs::{create_dir_all, read, read_to_string, remove_dir_all, write}; +use anyhow::{Context, Result}; +use oci_cli_wrapper::ImageTool; +use std::fs::File; +use std::path::{Path, PathBuf}; +use tar::Archive as TarArchive; +use tracing::{debug, instrument, trace}; + +#[derive(Debug)] +pub(crate) struct OCIArchive { + registry: String, + repository: String, + digest: String, + cache_dir: PathBuf, +} + +impl OCIArchive { + pub fn new

(registry: &str, repository: &str, digest: &str, cache_dir: P) -> Result + where + P: AsRef, + { + Ok(Self { + registry: registry.into(), + repository: repository.into(), + digest: digest.into(), + cache_dir: cache_dir.as_ref().to_path_buf(), + }) + } + + pub fn archive_path(&self) -> PathBuf { + self.cache_dir.join(self.digest.replace(':', "-")) + } + + pub fn uri(&self) -> String { + format!("{}/{}@{}", self.registry, self.repository, self.digest) + } + + #[instrument(level = "trace", skip_all, fields(registry = %self.registry, repository = %self.repository, digest = %self.digest))] + pub async fn pull_image(&self, image_tool: &ImageTool) -> Result<()> { + let digest_uri = self.uri(); + debug!("Pulling image '{}'", digest_uri); + let oci_archive_path = self.archive_path(); + if !oci_archive_path.exists() { + create_dir_all(&oci_archive_path).await?; + image_tool + .pull_oci_image(oci_archive_path.as_path(), digest_uri.as_str()) + .await?; + } else { + debug!( + "Image from '{}' already present -- no need to pull.", + digest_uri + ); + } + Ok(()) + } + + #[instrument( + level = "trace", + skip_all, + fields(registry = %self.registry, repository = %self.repository, digest = %self.digest, out_dir = %out_dir.as_ref().display()), + )] + pub async fn unpack_layers

(&self, out_dir: P) -> Result<()> + where + P: AsRef, + { + let path = out_dir.as_ref(); + let digest_file = path.join("digest"); + let digest_uri = self.uri(); + if digest_file.exists() { + let digest = read_to_string(&digest_file).await.context(format!( + "failed to read digest file at {}", + digest_file.display() + ))?; + if digest == self.digest { + trace!( + "Found existing digest file for image from '{}' at '{}'", + digest_uri, + digest_file.display() + ); + return Ok(()); + } + } + + debug!("Unpacking layers for image from '{}'", digest_uri); + remove_dir_all(path).await?; + create_dir_all(path).await?; + let index_bytes = read(self.archive_path().join("index.json")).await?; + let index: IndexView = serde_json::from_slice(index_bytes.as_slice()) + .context("failed to deserialize oci image index")?; + + // Read the manifest so we can get the layer digests + trace!(from = %digest_uri, "Extracting layer digests from image manifest"); + let digest = index + .manifests + .first() + .context("empty oci image")? + .digest + .replace(':', "/"); + let manifest_bytes = read(self.archive_path().join(format!("blobs/{digest}"))) + .await + .context("failed to read manifest blob")?; + let manifest_layout: ManifestLayoutView = serde_json::from_slice(manifest_bytes.as_slice()) + .context("failed to deserialize oci manifest")?; + + // Extract each layer into the target directory + trace!(from = %digest_uri, "Extracting image layers"); + for layer in manifest_layout.layers { + let digest = layer.digest.to_string().replace(':', "/"); + let layer_blob = File::open(self.archive_path().join(format!("blobs/{digest}"))) + .context("failed to read layer of oci image")?; + let mut layer_archive = TarArchive::new(layer_blob); + layer_archive + .unpack(path) + .context("failed to unpack layer to disk")?; + } + write(&digest_file, self.digest.as_str()) + .await + .context(format!( + "failed to record digest to {}", + digest_file.display() + ))?; + + Ok(()) + } +} diff --git a/twoliter/src/lock/image.rs b/twoliter/src/lock/image.rs new file mode 100644 index 000000000..558a3262f --- /dev/null +++ b/twoliter/src/lock/image.rs @@ -0,0 +1,433 @@ +use super::archive::OCIArchive; +use super::views::ManifestListView; +use super::Override; +use crate::common::fs::create_dir_all; +use crate::docker::ImageUri; +use crate::project::{Image, Vendor}; +use anyhow::{bail, Context, Result}; +use base64::Engine; +use futures::{pin_mut, stream, StreamExt, TryStreamExt}; +use log::trace; +use oci_cli_wrapper::{DockerArchitecture, ImageTool}; +use semver::Version; +use serde::{Deserialize, Serialize}; +use sha2::Digest; +use std::fmt::{Debug, Display, Formatter}; +use std::path::Path; +use tracing::{debug, error, info, instrument}; + +/// Represents a locked dependency on an image +#[derive(Debug, Clone, Eq, Ord, PartialOrd, Serialize, Deserialize)] +pub(crate) struct LockedImage { + /// The name of the dependency + pub name: String, + /// The version of the dependency + pub version: Version, + /// The vendor this dependency came from + pub vendor: String, + /// The resolved image uri of the dependency + pub source: String, + /// The digest of the image + pub digest: String, +} + +impl PartialEq for LockedImage { + fn eq(&self, other: &Self) -> bool { + self.source == other.source && self.digest == other.digest + } +} + +impl Display for LockedImage { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + f.write_fmt(format_args!( + "{}-{}@{} ({})", + self.name, self.version, self.vendor, self.source, + )) + } +} + +#[derive(Deserialize, Debug, Clone)] +pub(crate) struct ImageMetadata { + /// The name of the kit + #[allow(dead_code)] + pub name: String, + /// The version of the kit + #[allow(dead_code)] + pub version: Version, + /// The required sdk of the kit, + pub sdk: Image, + /// Any dependent kits + #[serde(rename = "kit")] + pub kits: Vec, +} + +impl TryFrom for ImageMetadata { + type Error = anyhow::Error; + + fn try_from(value: EncodedKitMetadata) -> Result { + let bytes = base64::engine::general_purpose::STANDARD + .decode(value.0) + .context("failed to decode kit metadata as base64")?; + serde_json::from_slice(bytes.as_slice()).context("failed to parse kit metadata json") + } +} + +/// Encoded kit metadata, which is embedded in a label of the OCI image config. +#[derive(Clone, Eq, PartialEq)] +pub(crate) struct EncodedKitMetadata(String); + +impl EncodedKitMetadata { + #[instrument(level = "trace")] + async fn try_from_image(image_uri: &str, image_tool: &ImageTool) -> Result { + tracing::trace!(image_uri, "Extracting kit metadata from OCI image config"); + let config = image_tool.get_config(image_uri).await?; + let kit_metadata = EncodedKitMetadata( + config + .labels + .get("dev.bottlerocket.kit.v1") + .context("no metadata stored on image, this image appears to not be a kit")? + .to_owned(), + ); + + tracing::trace!( + image_uri, + image_config = ?config, + ?kit_metadata, + "Kit metadata retrieved from image config" + ); + + Ok(kit_metadata) + } + + /// Infallible method to provide debugging insights into encoded `ImageMetadata` + /// + /// Shows a `Debug` view of the encoded `ImageMetadata` if possible, otherwise shows + /// the encoded form. + fn try_debug_image_metadata(&self) -> String { + self.debug_image_metadata().unwrap_or_else(|| { + format!("", self.0.replace("\n", "\\n")) + }) + } + + fn debug_image_metadata(&self) -> Option { + base64::engine::general_purpose::STANDARD + .decode(&self.0) + .ok() + .and_then(|bytes| serde_json::from_slice(bytes.as_slice()).ok()) + .map(|metadata: ImageMetadata| format!("", metadata)) + } +} + +impl Debug for EncodedKitMetadata { + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + write!(f, "{}", self.try_debug_image_metadata()) + } +} + +pub trait ImageResolverImpl: Debug { + fn name(&self) -> String; + fn version(&self) -> Result; + fn vendor(&self) -> String; + fn source(&self) -> String; + fn uri(&self) -> ImageUri; +} + +#[derive(Debug)] +pub struct VerbatimImage { + uri: ImageUri, + vendor: String, +} + +impl ImageResolverImpl for VerbatimImage { + fn name(&self) -> String { + self.uri.repo.clone() + } + + fn version(&self) -> Result { + Version::parse(self.uri.tag.trim_start_matches('v')).context("invalid version tag") + } + + fn source(&self) -> String { + self.uri.to_string() + } + + fn vendor(&self) -> String { + self.vendor.clone() + } + + fn uri(&self) -> ImageUri { + self.uri.clone() + } +} + +#[derive(Debug)] +pub struct OverriddenImage { + base_uri: ImageUri, + vendor: String, + override_: Override, +} + +impl ImageResolverImpl for OverriddenImage { + fn name(&self) -> String { + self.base_uri.repo.clone() + } + + fn version(&self) -> Result { + Version::parse(self.base_uri.tag.trim_start_matches('v')).context("invalid version tag") + } + + fn source(&self) -> String { + self.base_uri.to_string() + } + + fn vendor(&self) -> String { + self.vendor.clone() + } + + fn uri(&self) -> ImageUri { + ImageUri { + registry: self + .override_ + .registry + .clone() + .or(self.base_uri.registry.clone()), + repo: self + .override_ + .name + .clone() + .unwrap_or(self.base_uri.repo.clone()), + tag: self.base_uri.tag.clone(), + } + } +} + +#[derive(Debug)] +pub struct ImageResolver { + image_resolver_impl: Box, +} + +impl ImageResolver { + pub(crate) fn from_image( + image: &Image, + vendor_name: &str, + vendor: &Vendor, + override_: Option<&Override>, + ) -> Self { + Self { + image_resolver_impl: if let Some(override_) = override_ { + Box::new(OverriddenImage { + base_uri: ImageUri { + registry: Some(vendor.registry.clone()), + repo: image.name.to_string(), + tag: format!("v{}", image.version), + }, + vendor: vendor_name.to_string(), + override_: override_.clone(), + }) + } else { + Box::new(VerbatimImage { + vendor: vendor_name.to_string(), + uri: ImageUri { + registry: Some(vendor.registry.clone()), + repo: image.name.to_string(), + tag: format!("v{}", image.version), + }, + }) + }, + } + } + + pub(crate) fn from_locked_image( + locked_image: &LockedImage, + vendor_name: &str, + vendor: &Vendor, + override_: Option<&Override>, + ) -> Self { + Self { + image_resolver_impl: if let Some(override_) = override_ { + Box::new(OverriddenImage { + base_uri: ImageUri { + registry: Some(vendor.registry.clone()), + repo: locked_image.name.to_string(), + tag: format!("v{}", locked_image.version), + }, + vendor: vendor_name.to_string(), + override_: override_.clone(), + }) + } else { + Box::new(VerbatimImage { + vendor: vendor_name.to_string(), + uri: ImageUri { + registry: Some(vendor.registry.clone()), + repo: locked_image.name.to_string(), + tag: format!("v{}", locked_image.version), + }, + }) + }, + } + } + + /// Calculate the digest of the locked image + async fn calculate_digest(&self, image_tool: &ImageTool) -> Result { + let image_uri = self.image_resolver_impl.uri(); + let image_uri_str = image_uri.to_string(); + let manifest_bytes = image_tool.get_manifest(image_uri_str.as_str()).await?; + let digest = sha2::Sha256::digest(manifest_bytes.as_slice()); + let digest = base64::engine::general_purpose::STANDARD.encode(digest.as_slice()); + trace!( + "Calculated digest for locked image '{}': '{}'", + image_uri, + digest, + ); + Ok(digest) + } + + async fn get_manifest(&self, image_tool: &ImageTool) -> Result { + let uri = self.image_resolver_impl.uri().to_string(); + let manifest_bytes = image_tool.get_manifest(uri.as_str()).await?; + serde_json::from_slice(manifest_bytes.as_slice()) + .context("failed to deserialize manifest list") + } + + pub(crate) async fn resolve( + &self, + image_tool: &ImageTool, + skip_metadata: bool, + ) -> Result<(LockedImage, Option)> { + // First get the manifest list + let uri = self.image_resolver_impl.uri(); + let manifest_list = self.get_manifest(image_tool).await?; + let registry = uri + .registry + .as_ref() + .context("no registry found for image")?; + + let locked_image = LockedImage { + name: self.image_resolver_impl.name(), + version: self.image_resolver_impl.version()?, + vendor: self.image_resolver_impl.vendor(), + // The source is the image uri without the tag, which is the digest + source: self.image_resolver_impl.source(), + digest: self.calculate_digest(image_tool).await?, + }; + + if skip_metadata { + return Ok((locked_image, None)); + } + + debug!("Extracting kit metadata from OCI image"); + let embedded_kit_metadata = stream::iter(manifest_list.manifests).then(|manifest| { + let registry = registry.clone(); + let repo = uri.repo.clone(); + async move { + let image_uri = format!("{registry}/{repo}@{}", manifest.digest); + EncodedKitMetadata::try_from_image(&image_uri, image_tool).await + } + }); + pin_mut!(embedded_kit_metadata); + + let canonical_metadata = embedded_kit_metadata + .try_next() + .await? + .context(format!("could not find metadata for kit {}", uri))?; + + trace!("Checking that all manifests refer to the same kit."); + while let Some(kit_metadata) = embedded_kit_metadata.try_next().await? { + if kit_metadata != canonical_metadata { + error!( + ?canonical_metadata, + ?kit_metadata, + "Mismatched kit metadata in manifest list" + ); + bail!("Metadata does not match between images in manifest list"); + } + } + let metadata = canonical_metadata + .try_into() + .context("Failed to decode and parse kit metadata")?; + + Ok((locked_image, Some(metadata))) + } + + #[instrument( + level = "trace", + fields(uri = %self.image_resolver_impl.uri(), path = %path.as_ref().display()) + )] + pub(crate) async fn extract

(&self, image_tool: &ImageTool, path: P, arch: &str) -> Result<()> + where + P: AsRef, + { + info!( + "Extracting kit '{}' to '{}'", + self.image_resolver_impl.name(), + path.as_ref().display() + ); + let target_path = path.as_ref().join(format!( + "{}/{}/{arch}", + self.image_resolver_impl.vendor(), + self.image_resolver_impl.name() + )); + let cache_path = path.as_ref().join("cache"); + create_dir_all(&target_path).await?; + create_dir_all(&cache_path).await?; + + // First get the manifest for the specific requested architecture + let uri = self.image_resolver_impl.uri(); + let manifest_list = self.get_manifest(image_tool).await?; + let docker_arch = DockerArchitecture::try_from(arch)?; + let manifest = manifest_list + .manifests + .iter() + .find(|x| x.platform.as_ref().unwrap().architecture == docker_arch) + .cloned() + .context(format!( + "could not find image for architecture '{}' at {}", + docker_arch, uri + ))?; + + let registry = uri.registry.context("failed to resolve image registry")?; + let oci_archive = OCIArchive::new( + registry.as_str(), + uri.repo.as_str(), + manifest.digest.as_str(), + &cache_path, + )?; + + // Checks for the saved image locally, or else pulls and saves it + oci_archive.pull_image(image_tool).await?; + + // Checks if this archive has already been extracted by checking a digest file + // otherwise cleans up the path and unpacks the archive + oci_archive.unpack_layers(&target_path).await?; + + Ok(()) + } +} + +#[cfg(test)] +mod test { + use super::*; + #[test] + fn test_try_debug_image_metadata_succeeds() { + // Given a valid encoded metadata string, + // When we attempt to decode it for debugging, + // Then the debug string is marked as having been decoded. + let encoded = EncodedKitMetadata( + "eyJraXQiOltdLCJuYW1lIjoiYm90dGxlcm9ja2V0LWNvcmUta2l0Iiwic2RrIjp7ImRpZ2VzdCI6ImlyY09EUl\ + d3ZmxjTTdzaisrMmszSk5RWkovb3ZDUVRpUlkrRFpvaGdrNlk9IiwibmFtZSI6InRoYXItYmUtYmV0YS1zZGsiL\ + CJzb3VyY2UiOiJwdWJsaWMuZWNyLmF3cy91MWczYzh6NC90aGFyLWJlLWJldGEtc2RrOnYwLjQzLjAiLCJ2ZW5k\ + b3IiOiJib3R0bGVyb2NrZXQtbmV3IiwidmVyc2lvbiI6IjAuNDMuMCJ9LCJ2ZXJzaW9uIjoiMi4wLjAifQo=" + .to_string() + ); + assert!(encoded.debug_image_metadata().is_some()); + } + + #[test] + fn test_try_debug_image_metadata_fails() { + // Given an invalid encoded metadata string, + // When we attempt to decode it for debugging, + // Then the debug string is marked as remaining encoded. + let junk_data = EncodedKitMetadata("abcdefghijklmnophello".to_string()); + assert!(junk_data.debug_image_metadata().is_none()); + } +} diff --git a/twoliter/src/lock/mod.rs b/twoliter/src/lock/mod.rs new file mode 100644 index 000000000..47f70e4f7 --- /dev/null +++ b/twoliter/src/lock/mod.rs @@ -0,0 +1,266 @@ +/// Covers the functionality and implementation of Twoliter.lock which is generated using +/// `twoliter update`. It acts similarly to Cargo.lock as a flattened out representation of all kit +/// and sdk image dependencies with associated digests so twoliter can validate that contents of a kit +/// do not mutate unexpectedly. + +/// Contains operations for working with an OCI Archive +pub mod archive; +/// Covers resolution and validation of a single image dependency in a lock file +pub mod image; +/// Implements view models of common OCI manifest and configuration types +pub mod views; + +use crate::common::fs::{create_dir_all, read, write}; +use crate::project::{Image, Project, ValidIdentifier}; +use crate::schema_version::SchemaVersion; +use anyhow::{ensure, Context, Result}; +use image::{ImageResolver, LockedImage}; +use oci_cli_wrapper::ImageTool; +use olpc_cjson::CanonicalFormatter as CanonicalJsonFormatter; +use semver::Version; +use serde::{Deserialize, Serialize}; +use std::cmp::PartialEq; +use std::collections::{HashMap, HashSet}; +use std::fmt::Debug; +use std::mem::take; +use tokio::fs::read_to_string; +use tracing::{debug, info, instrument}; + +const TWOLITER_LOCK: &str = "Twoliter.lock"; + +#[derive(Serialize, Debug)] +struct ExternalKitMetadata { + sdk: LockedImage, + #[serde(rename = "kit")] + kits: Vec, +} + +#[derive(Debug, Clone, PartialEq, Eq, Ord, PartialOrd, Serialize, Deserialize)] +#[serde(rename_all = "kebab-case")] +pub(crate) struct Override { + pub name: Option, + pub registry: Option, +} + +/// Represents the structure of a `Twoliter.lock` lock file. +#[derive(Debug, Clone, Eq, Ord, PartialOrd, Serialize, Deserialize)] +#[serde(rename_all = "kebab-case")] +pub(crate) struct Lock { + /// The version of the Twoliter.toml this was generated from + pub schema_version: SchemaVersion<1>, + /// The resolved bottlerocket sdk + pub sdk: LockedImage, + /// Resolved kit dependencies + pub kit: Vec, +} + +impl PartialEq for Lock { + fn eq(&self, other: &Self) -> bool { + self.schema_version == other.schema_version + && self.sdk == other.sdk + && self.kit == other.kit + } +} + +#[allow(dead_code)] +impl Lock { + #[instrument(level = "trace", skip(project))] + pub(crate) async fn create(project: &Project) -> Result { + let lock_file_path = project.project_dir().join(TWOLITER_LOCK); + + info!("Resolving project references to create lock file"); + let lock_state = Self::resolve(project).await?; + let lock_str = toml::to_string(&lock_state).context("failed to serialize lock file")?; + + debug!("Writing new lock file to '{}'", lock_file_path.display()); + write(&lock_file_path, lock_str) + .await + .context("failed to write lock file")?; + Ok(lock_state) + } + + #[instrument(level = "trace", skip(project))] + pub(crate) async fn load(project: &Project) -> Result { + let lock_file_path = project.project_dir().join(TWOLITER_LOCK); + ensure!( + lock_file_path.exists(), + "Twoliter.lock does not exist, please run `twoliter update` first" + ); + debug!("Loading existing lockfile '{}'", lock_file_path.display()); + let lock_str = read_to_string(&lock_file_path) + .await + .context("failed to read lockfile")?; + let lock: Self = + toml::from_str(lock_str.as_str()).context("failed to deserialize lockfile")?; + info!("Resolving project references to check against lock file"); + let lock_state = Self::resolve(project).await?; + + ensure!(lock_state == lock, "changes have occured to Twoliter.toml or the remote kit images that require an update to Twoliter.lock"); + Ok(lock) + } + + fn external_kit_metadata(&self) -> ExternalKitMetadata { + ExternalKitMetadata { + sdk: self.sdk.clone(), + kits: self.kit.clone(), + } + } + + /// Fetches all external kits defined in a Twoliter.lock to the build directory + #[instrument(level = "trace", skip_all)] + pub(crate) async fn fetch(&self, project: &Project, arch: &str) -> Result<()> { + let image_tool = ImageTool::from_environment()?; + let target_dir = project.external_kits_dir(); + create_dir_all(&target_dir).await.context(format!( + "failed to create external-kits directory at {}", + target_dir.display() + ))?; + + info!( + dependencies = ?self.kit.iter().map(ToString::to_string).collect::>(), + "Extracting kit dependencies." + ); + for image in self.kit.iter() { + let vendor = project + .vendor() + .get(&ValidIdentifier(image.vendor.clone())) + .context(format!( + "failed to find vendor for kit with name '{}' and vendor '{}'", + image.name, image.vendor + ))?; + let override_ = project + .overrides() + .get(&image.vendor) + .and_then(|x| x.get(&image.name)); + let resolver = + ImageResolver::from_locked_image(image, image.vendor.as_str(), vendor, override_); + resolver + .extract(&image_tool, &project.external_kits_dir(), arch) + .await?; + } + + self.synchronize_metadata(project).await + } + + pub(crate) async fn synchronize_metadata(&self, project: &Project) -> Result<()> { + let mut kit_list = Vec::new(); + let mut ser = + serde_json::Serializer::with_formatter(&mut kit_list, CanonicalJsonFormatter::new()); + self.external_kit_metadata() + .serialize(&mut ser) + .context("failed to serialize external kit metadata")?; + // Compare the output of the serialize if the file exists + let external_metadata_file = project.external_kits_metadata(); + if external_metadata_file.exists() { + let existing = read(&external_metadata_file).await.context(format!( + "failed to read external kit metadata: {}", + external_metadata_file.display() + ))?; + // If this is the same as what we generated skip the write + if existing == kit_list { + return Ok(()); + } + } + write(project.external_kits_metadata(), kit_list.as_slice()) + .await + .context(format!( + "failed to write external kit metadata: {}", + project.external_kits_metadata().display() + ))?; + Ok(()) + } + + #[instrument(level = "trace", skip(project))] + async fn resolve(project: &Project) -> Result { + let vendor_table = project.vendor(); + let mut known: HashMap<(ValidIdentifier, ValidIdentifier), Version> = HashMap::new(); + let mut locked: Vec = Vec::new(); + let image_tool = ImageTool::from_environment()?; + let overrides = project.overrides(); + let mut remaining: Vec = project.kits(); + let mut sdk_set: HashSet = HashSet::new(); + if let Some(sdk) = project.sdk_image() { + // We don't scan over the sdk images as they are not kit images and there is no kit metadata to fetch + sdk_set.insert(sdk.clone()); + } + while !remaining.is_empty() { + let working_set: Vec<_> = take(&mut remaining); + for image in working_set.iter() { + debug!(%image, "Resolving kit '{}'", image.name); + if let Some(version) = known.get(&(image.name.clone(), image.vendor.clone())) { + let name = image.name.clone(); + let left_version = image.version.clone(); + let vendor = image.vendor.clone(); + ensure!( + image.version == *version, + "cannot have multiple versions of the same kit ({name}-{left_version}@{vendor} != {name}-{version}@{vendor}", + ); + debug!( + ?image, + "Skipping kit '{}' as it has already been resolved", image.name + ); + continue; + } + let vendor = vendor_table.get(&image.vendor).context(format!( + "vendor '{}' is not specified in Twoliter.toml", + image.vendor + ))?; + known.insert( + (image.name.clone(), image.vendor.clone()), + image.version.clone(), + ); + let override_ = overrides + .get(&image.vendor.to_string()) + .and_then(|x| x.get(&image.name.to_string())); + if let Some(override_) = override_.as_ref() { + debug!( + ?override_, + "Found override for kit '{}' with vendor '{}'", image.name, image.vendor + ); + } + let image_resolver = + ImageResolver::from_image(image, image.vendor.0.as_str(), vendor, override_); + let (locked_image, metadata) = image_resolver.resolve(&image_tool, false).await?; + let metadata = metadata.context(format!( + "failed to validate kit image with name {} from vendor {}", + locked_image.name, locked_image.vendor + ))?; + locked.push(locked_image); + sdk_set.insert(metadata.sdk); + for dep in metadata.kits { + remaining.push(dep); + } + } + } + + debug!(?sdk_set, "Resolving workspace SDK"); + ensure!( + sdk_set.len() <= 1, + "cannot use multiple sdks (found sdk: {})", + sdk_set + .iter() + .map(ToString::to_string) + .collect::>() + .join(", ") + ); + let sdk = sdk_set + .iter() + .next() + .context("no sdk was found for use, please specify a sdk in Twoliter.toml")?; + let vendor = vendor_table.get(&sdk.vendor).context(format!( + "vendor '{}' is not specified in Twoliter.toml", + sdk.vendor + ))?; + let sdk_override = overrides + .get(&sdk.vendor.to_string()) + .and_then(|x| x.get(&sdk.name.to_string())); + let sdk_resolver = + ImageResolver::from_image(sdk, sdk.vendor.0.as_str(), vendor, sdk_override); + let (sdk, _) = sdk_resolver.resolve(&image_tool, true).await?; + Ok(Self { + schema_version: project.schema_version(), + sdk, + kit: locked, + }) + } +} diff --git a/twoliter/src/lock/views.rs b/twoliter/src/lock/views.rs new file mode 100644 index 000000000..e5cd6db08 --- /dev/null +++ b/twoliter/src/lock/views.rs @@ -0,0 +1,60 @@ +use oci_cli_wrapper::DockerArchitecture; +use serde::de::Error; +use serde::{Deserialize, Deserializer}; +use std::fmt::{Display, Formatter}; + +#[derive(Deserialize, Debug)] +pub(crate) struct ManifestListView { + pub manifests: Vec, +} + +#[derive(Deserialize, Debug, Clone)] +pub(crate) struct ManifestView { + pub digest: String, + pub platform: Option, +} + +#[derive(Deserialize, Debug, Clone)] +pub(crate) struct Platform { + pub architecture: DockerArchitecture, +} + +#[derive(Deserialize, Debug)] +pub(crate) struct IndexView { + pub manifests: Vec, +} + +#[derive(Deserialize, Debug)] +pub(crate) struct ManifestLayoutView { + pub layers: Vec, +} + +#[derive(Deserialize, Debug)] +pub(crate) struct Layer { + pub digest: ContainerDigest, +} + +#[derive(Debug)] +pub(crate) struct ContainerDigest(String); + +impl<'de> Deserialize<'de> for ContainerDigest { + fn deserialize(deserializer: D) -> std::result::Result + where + D: Deserializer<'de>, + { + let digest = String::deserialize(deserializer)?; + if !digest.starts_with("sha256:") { + return Err(D::Error::custom(format!( + "invalid digest detected in layer: {}", + digest + ))); + }; + Ok(Self(digest)) + } +} + +impl Display for ContainerDigest { + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + f.write_str(self.0.as_str()) + } +} diff --git a/twoliter/src/project.rs b/twoliter/src/project.rs index 1e0151aa6..5904a1f0c 100644 --- a/twoliter/src/project.rs +++ b/twoliter/src/project.rs @@ -1,5 +1,6 @@ -use crate::common::fs; +use crate::common::fs::{self, read_to_string}; use crate::docker::ImageUri; +use crate::lock::Override; use crate::schema_version::SchemaVersion; use anyhow::{ensure, Context, Result}; use async_recursion::async_recursion; @@ -17,6 +18,8 @@ use std::path::{Path, PathBuf}; use toml::Table; use tracing::{debug, info, instrument, trace, warn}; +const TWOLITER_OVERRIDES: &str = "Twoliter.override"; + /// Common functionality in commands, if the user gave a path to the `Twoliter.toml` file, /// we use it, otherwise we search for the file. Returns the `Project` and the path at which it was /// found (this is the same as `user_path` if provided). @@ -34,12 +37,9 @@ pub(crate) async fn load_or_find_project(user_path: Option) -> Result

, + + overrides: BTreeMap>, } impl Project { @@ -105,6 +107,10 @@ impl Project { self.filepath.clone() } + pub(crate) fn overrides(&self) -> &BTreeMap> { + &self.overrides + } + pub(crate) fn project_dir(&self) -> PathBuf { self.project_dir.clone() } @@ -305,18 +311,39 @@ impl UnvalidatedProject { self.check_vendor_availability().await?; self.check_release_toml(&project_dir).await?; + let overrides = self.check_and_load_overrides(&project_dir).await?; Ok(Project { filepath, - project_dir, + project_dir: project_dir.clone(), schema_version: self.schema_version, release_version: self.release_version, sdk: self.sdk, vendor: self.vendor.unwrap_or_default(), kit: self.kit.unwrap_or_default(), + overrides, }) } + /// Checks if an override file exists and if so loads it + async fn check_and_load_overrides( + &self, + path: impl AsRef, + ) -> Result>> { + let overrides_file_path = path.as_ref().join(TWOLITER_OVERRIDES); + if !overrides_file_path.exists() { + return Ok(BTreeMap::new()); + } + info!("Detected override file, loading override information"); + let overrides_str = read_to_string(&overrides_file_path) + .await + .context("failed to read overrides file")?; + let overrides: BTreeMap> = + toml::from_str(overrides_str.as_str()) + .context("failed to deserialize overrides file")?; + Ok(overrides) + } + /// Errors if the user has defined a sdk and/or kit dependency without specifying the associated /// vendor async fn check_vendor_availability(&self) -> Result<()> { diff --git a/twoliter/src/test/cargo_make.rs b/twoliter/src/test/cargo_make.rs index da0c3c787..41ffcf11b 100644 --- a/twoliter/src/test/cargo_make.rs +++ b/twoliter/src/test/cargo_make.rs @@ -1,7 +1,9 @@ +use std::collections::BTreeMap; + use semver::Version; use serde::Deserialize; -use crate::lock::{Lock, LockedImage}; +use crate::lock::{image::LockedImage, Lock}; use crate::project::ValidIdentifier; use crate::{cargo_make::CargoMake, project::Project, test::data_dir}; @@ -17,11 +19,10 @@ async fn test_cargo_make() { kit: Vec::new(), sdk: LockedImage { name: "my-bottlerocket-sdk".to_string(), - version: version, + version, vendor: "my-vendor".to_string(), source: format!("{}/{}:v{}", vendor.registry, "my-bottlerocket-sdk", "1.2.3"), digest: "abc".to_string(), - manifest: Vec::new(), }, }; let cargo_make = CargoMake::new(&lock.sdk.source)