From eb8ac03b85ff46e805190135784e8ea862b61686 Mon Sep 17 00:00:00 2001 From: Justin Schneck Date: Mon, 5 Jan 2026 16:59:14 -0500 Subject: [PATCH 01/23] Update prebuilt extensions to ext source --- src/commands/ext/build.rs | 18 ++ src/commands/ext/clean.rs | 9 + src/commands/ext/deps.rs | 3 + src/commands/ext/dnf.rs | 12 + src/commands/ext/fetch.rs | 216 ++++++++++++++ src/commands/ext/image.rs | 18 ++ src/commands/ext/install.rs | 20 ++ src/commands/ext/mod.rs | 2 + src/commands/ext/package.rs | 458 ++++++++--------------------- src/commands/fetch.rs | 3 + src/commands/install.rs | 77 ++--- src/commands/sdk/install.rs | 44 +++ src/main.rs | 42 ++- src/utils/config.rs | 554 ++++++++++++++++++++++++++++++++++-- src/utils/ext_fetch.rs | 415 +++++++++++++++++++++++++++ src/utils/lockfile.rs | 11 +- src/utils/mod.rs | 1 + src/utils/stamps.rs | 18 +- 18 files changed, 1510 insertions(+), 411 deletions(-) create mode 100644 src/commands/ext/fetch.rs create mode 100644 src/utils/ext_fetch.rs diff --git a/src/commands/ext/build.rs b/src/commands/ext/build.rs index 7baeab8..3577d5f 100644 --- a/src/commands/ext/build.rs +++ b/src/commands/ext/build.rs @@ -1,3 +1,6 @@ +// Allow deprecated variants for backward compatibility during migration +#![allow(deprecated)] + use anyhow::{Context, Result}; use crate::commands::sdk::SdkCompileCommand; @@ -145,6 +148,15 @@ impl ExtBuildCommand { let ext_config_path = match &extension_location { ExtensionLocation::Local { config_path, .. } => config_path.clone(), ExtensionLocation::External { config_path, .. } => config_path.clone(), + ExtensionLocation::Remote { name, .. } => { + // Remote extensions are installed to $AVOCADO_PREFIX/includes// + let ext_install_path = + config.get_extension_install_path(&self.config_path, name, &target); + ext_install_path + .join("avocado.yaml") + .to_string_lossy() + .to_string() + } }; if self.verbose { @@ -161,6 +173,12 @@ impl ExtBuildCommand { OutputLevel::Normal, ); } + ExtensionLocation::Remote { name, source } => { + print_info( + &format!("Found remote extension '{name}' with source: {source:?}"), + OutputLevel::Normal, + ); + } } } diff --git a/src/commands/ext/clean.rs b/src/commands/ext/clean.rs index 2f5f0d9..0291310 100644 --- a/src/commands/ext/clean.rs +++ b/src/commands/ext/clean.rs @@ -1,3 +1,6 @@ +// Allow deprecated variants for backward compatibility during migration +#![allow(deprecated)] + use anyhow::Result; use crate::utils::config::{Config, ExtensionLocation}; @@ -74,6 +77,12 @@ impl ExtCleanCommand { OutputLevel::Normal, ); } + ExtensionLocation::Remote { name, source } => { + print_info( + &format!("Found remote extension '{name}' with source: {source:?}"), + OutputLevel::Normal, + ); + } } } Ok(location) diff --git a/src/commands/ext/deps.rs b/src/commands/ext/deps.rs index 7c1cf95..793fe06 100644 --- a/src/commands/ext/deps.rs +++ b/src/commands/ext/deps.rs @@ -1,3 +1,6 @@ +// Allow deprecated variants for backward compatibility during migration +#![allow(deprecated)] + use anyhow::Result; use std::collections::HashSet; diff --git a/src/commands/ext/dnf.rs b/src/commands/ext/dnf.rs index f5d1abb..50db198 100644 --- a/src/commands/ext/dnf.rs +++ b/src/commands/ext/dnf.rs @@ -1,3 +1,6 @@ +// Allow deprecated variants for backward compatibility during migration +#![allow(deprecated)] + use anyhow::Result; use crate::utils::config::{Config, ExtensionLocation}; @@ -91,6 +94,12 @@ impl ExtDnfCommand { OutputLevel::Normal, ); } + ExtensionLocation::Remote { name, source } => { + print_info( + &format!("Found remote extension '{name}' with source: {source:?}"), + OutputLevel::Normal, + ); + } } } Ok(location) @@ -173,6 +182,7 @@ impl ExtDnfCommand { let extension_name = match extension_location { ExtensionLocation::Local { name, .. } => name, ExtensionLocation::External { name, .. } => name, + ExtensionLocation::Remote { name, .. } => name, }; let check_cmd = format!("test -d $AVOCADO_EXT_SYSROOTS/{extension_name}"); @@ -222,6 +232,7 @@ impl ExtDnfCommand { let extension_name = match extension_location { ExtensionLocation::Local { name, .. } => name, ExtensionLocation::External { name, .. } => name, + ExtensionLocation::Remote { name, .. } => name, }; let setup_cmd = format!( "mkdir -p $AVOCADO_EXT_SYSROOTS/{extension_name}/var/lib && cp -rf $AVOCADO_PREFIX/rootfs/var/lib/rpm $AVOCADO_EXT_SYSROOTS/{extension_name}/var/lib" @@ -306,6 +317,7 @@ impl ExtDnfCommand { let extension_name = match extension_location { ExtensionLocation::Local { name, .. } => name, ExtensionLocation::External { name, .. } => name, + ExtensionLocation::Remote { name, .. } => name, }; let installroot = format!("$AVOCADO_EXT_SYSROOTS/{extension_name}"); let command_args_str = self.command.join(" "); diff --git a/src/commands/ext/fetch.rs b/src/commands/ext/fetch.rs new file mode 100644 index 0000000..f9a2b3e --- /dev/null +++ b/src/commands/ext/fetch.rs @@ -0,0 +1,216 @@ +//! Extension fetch command implementation. +//! +//! This command fetches remote extensions from various sources (repo, git, path) +//! and installs them to `$AVOCADO_PREFIX/includes//`. + +use anyhow::{Context, Result}; + +use crate::utils::config::{Config, ExtensionSource}; +use crate::utils::ext_fetch::ExtensionFetcher; +use crate::utils::output::{print_info, print_success, print_warning, OutputLevel}; +use crate::utils::target::resolve_target_required; + +/// Command to fetch remote extensions +pub struct ExtFetchCommand { + /// Path to configuration file + pub config_path: String, + /// Specific extension to fetch (if None, fetches all remote extensions) + pub extension: Option, + /// Enable verbose output + pub verbose: bool, + /// Force re-fetch even if already installed + pub force: bool, + /// Target architecture + pub target: Option, + /// Additional arguments to pass to the container runtime + pub container_args: Option>, +} + +impl ExtFetchCommand { + /// Create a new ExtFetchCommand instance + pub fn new( + config_path: String, + extension: Option, + verbose: bool, + force: bool, + target: Option, + container_args: Option>, + ) -> Self { + Self { + config_path, + extension, + verbose, + force, + target, + container_args, + } + } + + /// Execute the fetch command + pub async fn execute(&self) -> Result<()> { + // Load configuration + let config = Config::load(&self.config_path) + .with_context(|| format!("Failed to load config from {}", self.config_path))?; + + // Resolve target + let target = resolve_target_required(self.target.as_deref(), &config)?; + + // Get container image + let container_image = config + .get_sdk_image() + .ok_or_else(|| anyhow::anyhow!("No SDK container image specified in configuration"))?; + + // Discover remote extensions + let remote_extensions = Config::discover_remote_extensions(&self.config_path)?; + + if remote_extensions.is_empty() { + print_info( + "No remote extensions found in configuration.", + OutputLevel::Normal, + ); + return Ok(()); + } + + // Filter to specific extension if requested + let extensions_to_fetch: Vec<(String, ExtensionSource)> = + if let Some(ref ext_name) = self.extension { + remote_extensions + .into_iter() + .filter(|(name, _)| name == ext_name) + .collect() + } else { + remote_extensions + }; + + if extensions_to_fetch.is_empty() { + if let Some(ref ext_name) = self.extension { + return Err(anyhow::anyhow!( + "Extension '{}' not found in configuration or is not a remote extension", + ext_name + )); + } + return Ok(()); + } + + // Get the extensions install directory + let extensions_dir = config.get_extensions_dir(&self.config_path, &target); + + // Ensure the extensions directory exists + std::fs::create_dir_all(&extensions_dir).with_context(|| { + format!( + "Failed to create extensions directory: {}", + extensions_dir.display() + ) + })?; + + if self.verbose { + print_info( + &format!( + "Fetching {} remote extension(s) to {}", + extensions_to_fetch.len(), + extensions_dir.display() + ), + OutputLevel::Normal, + ); + } + + // Create the fetcher + let fetcher = ExtensionFetcher::new( + self.config_path.clone(), + target.clone(), + container_image.to_string(), + self.verbose, + ) + .with_repo_url(config.get_sdk_repo_url()) + .with_repo_release(config.get_sdk_repo_release()) + .with_container_args(config.merge_sdk_container_args(self.container_args.as_ref())); + + // Fetch each extension + let mut fetched_count = 0; + let mut skipped_count = 0; + + for (ext_name, source) in &extensions_to_fetch { + // Check if already installed + if !self.force && ExtensionFetcher::is_extension_installed(&extensions_dir, ext_name) { + if self.verbose { + print_info( + &format!("Extension '{ext_name}' is already installed, skipping (use --force to re-fetch)"), + OutputLevel::Normal, + ); + } + skipped_count += 1; + continue; + } + + print_info( + &format!("Fetching extension '{ext_name}'..."), + OutputLevel::Normal, + ); + + match fetcher.fetch(ext_name, source, &extensions_dir).await { + Ok(install_path) => { + print_success( + &format!( + "Successfully fetched extension '{ext_name}' to {}", + install_path.display() + ), + OutputLevel::Normal, + ); + fetched_count += 1; + } + Err(e) => { + print_warning( + &format!("Failed to fetch extension '{ext_name}': {e}"), + OutputLevel::Normal, + ); + // Continue with other extensions instead of failing entirely + } + } + } + + // Summary + if fetched_count > 0 || skipped_count > 0 { + let mut summary_parts = Vec::new(); + if fetched_count > 0 { + summary_parts.push(format!("{fetched_count} fetched")); + } + if skipped_count > 0 { + summary_parts.push(format!("{skipped_count} skipped")); + } + print_info( + &format!("Extension fetch complete: {}", summary_parts.join(", ")), + OutputLevel::Normal, + ); + } + + Ok(()) + } + + /// Get the list of remote extensions that would be fetched + #[allow(dead_code)] + pub fn get_remote_extensions(&self) -> Result> { + Config::discover_remote_extensions(&self.config_path) + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_ext_fetch_command_creation() { + let cmd = ExtFetchCommand::new( + "avocado.yaml".to_string(), + Some("test-ext".to_string()), + true, + false, + Some("x86_64-unknown-linux-gnu".to_string()), + None, + ); + + assert_eq!(cmd.config_path, "avocado.yaml"); + assert_eq!(cmd.extension, Some("test-ext".to_string())); + assert!(cmd.verbose); + assert!(!cmd.force); + } +} diff --git a/src/commands/ext/image.rs b/src/commands/ext/image.rs index 82a6145..a2a886d 100644 --- a/src/commands/ext/image.rs +++ b/src/commands/ext/image.rs @@ -1,3 +1,6 @@ +// Allow deprecated variants for backward compatibility during migration +#![allow(deprecated)] + use anyhow::{Context, Result}; use crate::utils::config::{Config, ExtensionLocation}; @@ -135,6 +138,15 @@ impl ExtImageCommand { let ext_config_path = match &extension_location { ExtensionLocation::Local { config_path, .. } => config_path.clone(), ExtensionLocation::External { config_path, .. } => config_path.clone(), + ExtensionLocation::Remote { name, .. } => { + // Remote extensions are installed to $AVOCADO_PREFIX/includes// + let ext_install_path = + config.get_extension_install_path(&self.config_path, name, &target); + ext_install_path + .join("avocado.yaml") + .to_string_lossy() + .to_string() + } }; if self.verbose { @@ -151,6 +163,12 @@ impl ExtImageCommand { OutputLevel::Normal, ); } + ExtensionLocation::Remote { name, source } => { + print_info( + &format!("Found remote extension '{name}' with source: {source:?}"), + OutputLevel::Normal, + ); + } } } diff --git a/src/commands/ext/install.rs b/src/commands/ext/install.rs index f225b11..ddc5691 100644 --- a/src/commands/ext/install.rs +++ b/src/commands/ext/install.rs @@ -1,3 +1,6 @@ +// Allow deprecated variants for backward compatibility during migration +#![allow(deprecated)] + use anyhow::{Context, Result}; use std::path::{Path, PathBuf}; @@ -106,6 +109,14 @@ impl ExtInstallCommand { OutputLevel::Normal, ); } + ExtensionLocation::Remote { name, source } => { + print_info( + &format!( + "Found remote extension '{name}' with source: {source:?}" + ), + OutputLevel::Normal, + ); + } } } vec![(extension_name.clone(), location)] @@ -284,6 +295,15 @@ impl ExtInstallCommand { .to_string_lossy() .to_string() } + ExtensionLocation::Remote { name, .. } => { + // Remote extensions are installed to $AVOCADO_PREFIX/includes// + let ext_install_path = + config.get_extension_install_path(&self.config_path, name, target); + ext_install_path + .join("avocado.yaml") + .to_string_lossy() + .to_string() + } }; if !self diff --git a/src/commands/ext/mod.rs b/src/commands/ext/mod.rs index b8c5f0e..36297f5 100644 --- a/src/commands/ext/mod.rs +++ b/src/commands/ext/mod.rs @@ -3,6 +3,7 @@ pub mod checkout; pub mod clean; pub mod deps; pub mod dnf; +pub mod fetch; pub mod image; pub mod install; pub mod list; @@ -17,6 +18,7 @@ pub use clean::ExtCleanCommand; pub use deps::ExtDepsCommand; #[allow(unused_imports)] pub use dnf::ExtDnfCommand; +pub use fetch::ExtFetchCommand; pub use image::ExtImageCommand; pub use install::ExtInstallCommand; #[allow(unused_imports)] diff --git a/src/commands/ext/package.rs b/src/commands/ext/package.rs index f67fd81..ae5c9fb 100644 --- a/src/commands/ext/package.rs +++ b/src/commands/ext/package.rs @@ -1,3 +1,6 @@ +// Allow deprecated variants for backward compatibility during migration +#![allow(deprecated)] + use anyhow::{Context, Result}; use std::collections::HashMap; @@ -5,11 +8,10 @@ use std::fs; use std::path::PathBuf; use crate::utils::config::{Config, ExtensionLocation}; -use crate::utils::container::{RunConfig, SdkContainer}; -use crate::utils::output::{print_info, print_success, OutputLevel}; -use crate::utils::stamps::{ - generate_batch_read_stamps_script, validate_stamps_batch, StampRequirement, -}; +use crate::utils::container::SdkContainer; +use crate::utils::output::{print_info, print_success, print_warning, OutputLevel}; +// Note: Stamp imports removed - we no longer validate build stamps for packaging +// since we now package src_dir instead of built sysroot use crate::utils::target::resolve_target_required; /// Command to package an extension sysroot into an RPM @@ -22,6 +24,9 @@ pub struct ExtPackageCommand { pub container_args: Option>, #[allow(dead_code)] pub dnf_args: Option>, + /// Note: no_stamps is kept for API compatibility but is not used for ext package + /// since we now package src_dir directly without requiring build stamps. + #[allow(dead_code)] pub no_stamps: bool, } @@ -57,54 +62,22 @@ impl ExtPackageCommand { // Load configuration let config = Config::load(&self.config_path)?; - // Resolve target early for stamp validation + // Resolve target let target = resolve_target_required(self.target.as_deref(), &config)?; - // Validate stamps before proceeding (unless --no-stamps) - // Package requires extension to be installed AND built - if !self.no_stamps { - let container_image = config - .get_sdk_image() - .context("No SDK container image specified in configuration")?; - let container_helper = - SdkContainer::from_config(&self.config_path, &config)?.verbose(self.verbose); - - let requirements = vec![ - StampRequirement::sdk_install(), - StampRequirement::ext_install(&self.extension), - StampRequirement::ext_build(&self.extension), - ]; - - let batch_script = generate_batch_read_stamps_script(&requirements); - let run_config = RunConfig { - container_image: container_image.to_string(), - target: target.clone(), - command: batch_script, - verbose: false, - source_environment: true, - interactive: false, - repo_url: config.get_sdk_repo_url(), - repo_release: config.get_sdk_repo_release(), - container_args: config.merge_sdk_container_args(self.container_args.as_ref()), - ..Default::default() - }; - - let output = container_helper - .run_in_container_with_output(run_config) - .await?; - - let validation = - validate_stamps_batch(&requirements, output.as_deref().unwrap_or(""), None); - - if !validation.is_satisfied() { - let error = validation - .into_error(&format!("Cannot package extension '{}'", self.extension)); - return Err(error.into()); - } - } + // With the new src_dir packaging approach, we no longer require + // ext_install and ext_build stamps. We're packaging the source directory, + // not the built sysroot. The consumer will build the extension themselves. + // + // Issue a warning to remind users to test builds before packaging. + print_warning( + "Packaging extension source directory. It is recommended to run \ + 'avocado ext build' before packaging to verify the extension builds correctly.", + OutputLevel::Normal, + ); - // Read config content for extension SDK dependencies parsing - let content = std::fs::read_to_string(&self.config_path)?; + // Note: We no longer need to parse SDK dependencies since they're merged + // from the extension's config when it's installed // Find extension using comprehensive lookup let extension_location = config @@ -117,6 +90,15 @@ impl ExtPackageCommand { let ext_config_path = match &extension_location { ExtensionLocation::Local { config_path, .. } => config_path.clone(), ExtensionLocation::External { config_path, .. } => config_path.clone(), + ExtensionLocation::Remote { name, .. } => { + // Remote extensions are installed to $AVOCADO_PREFIX/includes// + let ext_install_path = + config.get_extension_install_path(&self.config_path, name, &target); + ext_install_path + .join("avocado.yaml") + .to_string_lossy() + .to_string() + } }; if self.verbose { @@ -133,6 +115,12 @@ impl ExtPackageCommand { OutputLevel::Normal, ); } + ExtensionLocation::Remote { name, source } => { + print_info( + &format!("Found remote extension '{name}' with source: {source:?}"), + OutputLevel::Normal, + ); + } } } @@ -158,8 +146,9 @@ impl ExtPackageCommand { } // Create main RPM package in container + // This packages the extension's src_dir (directory containing avocado.yaml) let output_path = self - .create_rpm_package_in_container(&rpm_metadata, &config, &target) + .create_rpm_package_in_container(&rpm_metadata, &config, &target, &ext_config_path) .await?; print_success( @@ -170,36 +159,8 @@ impl ExtPackageCommand { OutputLevel::Normal, ); - // Check if extension has SDK dependencies and create SDK package if needed - let sdk_dependencies = self.get_extension_sdk_dependencies(&config, &content, &target)?; - if !sdk_dependencies.is_empty() { - if self.verbose { - print_info( - &format!( - "Extension '{}' has SDK dependencies, creating SDK package...", - self.extension - ), - OutputLevel::Normal, - ); - } - - let sdk_output_path = self - .create_sdk_rpm_package_in_container( - &rpm_metadata, - &config, - &sdk_dependencies, - &target, - ) - .await?; - - print_success( - &format!( - "Successfully created SDK RPM package: {}", - sdk_output_path.display() - ), - OutputLevel::Normal, - ); - } + // Note: SDK dependencies are now merged from the extension's config when installed, + // so we no longer need to create a separate SDK package. Ok(()) } @@ -348,12 +309,17 @@ impl ExtPackageCommand { Ok(()) } - /// Create the RPM package inside the container at $AVOCADO_PREFIX/output/extensions + /// Create the RPM package containing the extension's src_dir + /// + /// The package root (/) maps to the extension's src_dir contents. + /// This allows the extension to be installed to $AVOCADO_PREFIX/includes// + /// and its config merged into the main config. async fn create_rpm_package_in_container( &self, metadata: &RpmMetadata, config: &Config, target: &str, + ext_config_path: &str, ) -> Result { let container_image = config .get_sdk_image() @@ -367,6 +333,20 @@ impl ExtPackageCommand { crate::utils::volume::VolumeManager::new("docker".to_string(), self.verbose); let volume_state = volume_manager.get_or_create_volume(&cwd).await?; + // Determine the extension's src_dir (directory containing avocado.yaml) + let ext_src_dir = std::path::Path::new(ext_config_path) + .parent() + .unwrap_or(std::path::Path::new(".")) + .to_string_lossy() + .to_string(); + + // Convert to container path (relative paths become /opt/src/) + let container_src_dir = if ext_src_dir.starts_with('/') { + ext_src_dir.clone() + } else { + format!("/opt/src/{}", ext_src_dir) + }; + // Create the RPM filename let rpm_filename = format!( "{}-{}-{}.{}.rpm", @@ -374,23 +354,35 @@ impl ExtPackageCommand { ); // Create RPM using rpmbuild in container + // Package root (/) maps to the extension's src_dir contents let rpm_build_script = format!( r#" +set -e + +# Extension source directory +EXT_SRC_DIR="{container_src_dir}" + # Ensure output directory exists mkdir -p $AVOCADO_PREFIX/output/extensions -# Check if extension sysroot exists -if [ ! -d "$AVOCADO_EXT_SYSROOTS/{}" ]; then - echo "Extension sysroot not found: $AVOCADO_EXT_SYSROOTS/{}" +# Check if extension source directory exists +if [ ! -d "$EXT_SRC_DIR" ]; then + echo "Extension source directory not found: $EXT_SRC_DIR" + exit 1 +fi + +# Check for avocado config file +if [ ! -f "$EXT_SRC_DIR/avocado.yaml" ] && [ ! -f "$EXT_SRC_DIR/avocado.yml" ] && [ ! -f "$EXT_SRC_DIR/avocado.toml" ]; then + echo "No avocado.yaml/yml/toml found in $EXT_SRC_DIR" exit 1 fi # Count files -FILE_COUNT=$(find "$AVOCADO_EXT_SYSROOTS/{}" -type f | wc -l) -echo "Creating RPM with $FILE_COUNT files..." +FILE_COUNT=$(find "$EXT_SRC_DIR" -type f | wc -l) +echo "Creating RPM with $FILE_COUNT files from source directory..." if [ "$FILE_COUNT" -eq 0 ]; then - echo "No files found in sysroot" + echo "No files found in source directory" exit 1 fi @@ -402,20 +394,21 @@ cd "$TMPDIR" mkdir -p BUILD RPMS SOURCES SPECS SRPMS # Create spec file +# Package root (/) maps to the extension's src_dir cat > SPECS/package.spec << 'SPEC_EOF' %define _buildhost reproducible AutoReqProv: no -Name: {} -Version: {} -Release: {} -Summary: {} -License: {} -Vendor: {} -Group: {}{} +Name: {name} +Version: {version} +Release: {release} +Summary: {summary} +License: {license} +Vendor: {vendor} +Group: {group}{url_line} %description -{} +{description} %files /* @@ -428,7 +421,9 @@ Group: {}{} %install mkdir -p %{{buildroot}} -cp -rp $AVOCADO_EXT_SYSROOTS/{}/* %{{buildroot}}/ +# Copy src_dir contents to buildroot root +# This allows installation to $AVOCADO_PREFIX/includes// +cp -rp "$EXT_SRC_DIR"/* %{{buildroot}}/ %clean # Skip clean section - not needed for our use case @@ -436,45 +431,38 @@ cp -rp $AVOCADO_EXT_SYSROOTS/{}/* %{{buildroot}}/ %changelog SPEC_EOF -# Build the RPM with custom architecture target and define the arch macro -rpmbuild --define "_topdir $TMPDIR" --define "_arch {}" --target {} -bb SPECS/package.spec +# Build the RPM with custom architecture target +rpmbuild --define "_topdir $TMPDIR" --define "_arch {arch}" --target {arch} -bb SPECS/package.spec # Move RPM to output directory -mv RPMS/{}/*.rpm $AVOCADO_PREFIX/output/extensions/{} || {{ - mv RPMS/*/*.rpm $AVOCADO_PREFIX/output/extensions/{} 2>/dev/null || {{ +mv RPMS/{arch}/*.rpm $AVOCADO_PREFIX/output/extensions/{rpm_filename} || {{ + mv RPMS/*/*.rpm $AVOCADO_PREFIX/output/extensions/{rpm_filename} 2>/dev/null || {{ echo "Failed to find built RPM" exit 1 }} }} -echo "RPM created successfully: $AVOCADO_PREFIX/output/extensions/{}" +echo "RPM created successfully: $AVOCADO_PREFIX/output/extensions/{rpm_filename}" # Cleanup rm -rf "$TMPDIR" "#, - self.extension, - self.extension, - self.extension, - metadata.name, - metadata.version, - metadata.release, - metadata.summary, - metadata.license, - metadata.vendor, - metadata.group, - if let Some(url) = &metadata.url { + name = metadata.name, + version = metadata.version, + release = metadata.release, + summary = metadata.summary, + license = metadata.license, + vendor = metadata.vendor, + group = metadata.group, + url_line = if let Some(url) = &metadata.url { format!("\nURL: {url}") } else { String::new() }, - metadata.description, - self.extension, - metadata.arch, - metadata.arch, - metadata.arch, - rpm_filename, - rpm_filename, - rpm_filename, + description = metadata.description, + arch = metadata.arch, + rpm_filename = rpm_filename, + container_src_dir = container_src_dir, ); // Run the RPM build in the container @@ -636,6 +624,7 @@ rm -rf "$TMPDIR" } /// Get SDK dependencies for the current extension + #[allow(dead_code)] fn get_extension_sdk_dependencies( &self, config: &Config, @@ -657,6 +646,7 @@ rm -rf "$TMPDIR" } /// Create the SDK RPM package inside the container at $AVOCADO_PREFIX/output/extensions + #[allow(dead_code)] async fn create_sdk_rpm_package_in_container( &self, metadata: &RpmMetadata, @@ -1213,37 +1203,20 @@ ext: } // ======================================================================== - // Stamp Dependency Tests + // Note: Stamp Dependency Tests Removed // ======================================================================== - - #[test] - fn test_package_stamp_requirements() { - use crate::utils::stamps::get_local_arch; - - // ext package requires: SDK install + ext install + ext build - // Verify the stamp requirements are correct - let requirements = [ - StampRequirement::sdk_install(), - StampRequirement::ext_install("my-ext"), - StampRequirement::ext_build("my-ext"), - ]; - - // Verify correct stamp paths (SDK path includes local architecture) - assert_eq!( - requirements[0].relative_path(), - format!("sdk/{}/install.stamp", get_local_arch()) - ); - assert_eq!(requirements[1].relative_path(), "ext/my-ext/install.stamp"); - assert_eq!(requirements[2].relative_path(), "ext/my-ext/build.stamp"); - - // Verify fix commands are correct - assert_eq!(requirements[0].fix_command(), "avocado sdk install"); - assert_eq!( - requirements[1].fix_command(), - "avocado ext install -e my-ext" - ); - assert_eq!(requirements[2].fix_command(), "avocado ext build -e my-ext"); - } + // The stamp validation tests have been removed because ext package now + // packages the extension's src_dir directly instead of the built sysroot. + // This means we no longer require ext_install and ext_build stamps before + // packaging - the consumer will build the extension themselves. + // + // The old behavior required: + // - SDK install stamp + // - Extension install stamp + // - Extension build stamp + // + // The new behavior only requires the extension's avocado.yaml to exist + // in its src_dir. #[test] fn test_package_with_no_stamps_flag() { @@ -1257,192 +1230,11 @@ ext: None, ); - // Default should have stamps enabled + // Default should have stamps enabled (though not used for src_dir packaging) assert!(!cmd.no_stamps); // Test with_no_stamps builder let cmd = cmd.with_no_stamps(true); assert!(cmd.no_stamps); } - - #[test] - fn test_package_fails_without_sdk_install() { - use crate::utils::stamps::{get_local_arch, validate_stamps_batch}; - - let requirements = vec![ - StampRequirement::sdk_install(), - StampRequirement::ext_install("my-ext"), - StampRequirement::ext_build("my-ext"), - ]; - - // All stamps missing - let output = format!( - "sdk/{}/install.stamp:::null\next/my-ext/install.stamp:::null\next/my-ext/build.stamp:::null", - get_local_arch() - ); - let result = validate_stamps_batch(&requirements, &output, None); - - assert!(!result.is_satisfied()); - assert_eq!(result.missing.len(), 3); - } - - #[test] - fn test_package_fails_without_ext_build() { - use crate::utils::stamps::{ - get_local_arch, validate_stamps_batch, Stamp, StampInputs, StampOutputs, - }; - - let requirements = vec![ - StampRequirement::sdk_install(), - StampRequirement::ext_install("my-ext"), - StampRequirement::ext_build("my-ext"), - ]; - - // SDK and ext install present, but build missing - let sdk_stamp = Stamp::sdk_install( - get_local_arch(), - StampInputs::new("hash1".to_string()), - StampOutputs::default(), - ); - let ext_install_stamp = Stamp::ext_install( - "my-ext", - "qemux86-64", - StampInputs::new("hash2".to_string()), - StampOutputs::default(), - ); - - let sdk_json = serde_json::to_string(&sdk_stamp).unwrap(); - let ext_json = serde_json::to_string(&ext_install_stamp).unwrap(); - - let output = format!( - "sdk/{}/install.stamp:::{}\next/my-ext/install.stamp:::{}\next/my-ext/build.stamp:::null", - get_local_arch(), - sdk_json, - ext_json - ); - - let result = validate_stamps_batch(&requirements, &output, None); - - assert!(!result.is_satisfied()); - assert_eq!(result.missing.len(), 1); - assert_eq!(result.missing[0].relative_path(), "ext/my-ext/build.stamp"); - } - - #[test] - fn test_package_succeeds_with_all_stamps() { - use crate::utils::stamps::{ - get_local_arch, validate_stamps_batch, Stamp, StampInputs, StampOutputs, - }; - - let requirements = vec![ - StampRequirement::sdk_install(), - StampRequirement::ext_install("my-ext"), - StampRequirement::ext_build("my-ext"), - ]; - - // All stamps present - let sdk_stamp = Stamp::sdk_install( - get_local_arch(), - StampInputs::new("hash1".to_string()), - StampOutputs::default(), - ); - let ext_install_stamp = Stamp::ext_install( - "my-ext", - "qemux86-64", - StampInputs::new("hash2".to_string()), - StampOutputs::default(), - ); - let ext_build_stamp = Stamp::ext_build( - "my-ext", - "qemux86-64", - StampInputs::new("hash3".to_string()), - StampOutputs::default(), - ); - - let sdk_json = serde_json::to_string(&sdk_stamp).unwrap(); - let ext_install_json = serde_json::to_string(&ext_install_stamp).unwrap(); - let ext_build_json = serde_json::to_string(&ext_build_stamp).unwrap(); - - let output = format!( - "sdk/{}/install.stamp:::{}\next/my-ext/install.stamp:::{}\next/my-ext/build.stamp:::{}", - get_local_arch(), - sdk_json, - ext_install_json, - ext_build_json - ); - - let result = validate_stamps_batch(&requirements, &output, None); - - assert!(result.is_satisfied()); - assert_eq!(result.satisfied.len(), 3); - } - - #[test] - fn test_package_clean_lifecycle() { - use crate::utils::stamps::{ - get_local_arch, validate_stamps_batch, Stamp, StampInputs, StampOutputs, - }; - - let requirements = vec![ - StampRequirement::sdk_install(), - StampRequirement::ext_install("gpu-driver"), - StampRequirement::ext_build("gpu-driver"), - ]; - - // Before clean: all stamps present - let sdk_stamp = Stamp::sdk_install( - get_local_arch(), - StampInputs::new("hash1".to_string()), - StampOutputs::default(), - ); - let ext_install = Stamp::ext_install( - "gpu-driver", - "qemux86-64", - StampInputs::new("hash2".to_string()), - StampOutputs::default(), - ); - let ext_build = Stamp::ext_build( - "gpu-driver", - "qemux86-64", - StampInputs::new("hash3".to_string()), - StampOutputs::default(), - ); - - let sdk_json = serde_json::to_string(&sdk_stamp).unwrap(); - let install_json = serde_json::to_string(&ext_install).unwrap(); - let build_json = serde_json::to_string(&ext_build).unwrap(); - - let output_before = format!( - "sdk/{}/install.stamp:::{}\next/gpu-driver/install.stamp:::{}\next/gpu-driver/build.stamp:::{}", - get_local_arch(), - sdk_json, - install_json, - build_json - ); - - let result_before = validate_stamps_batch(&requirements, &output_before, None); - assert!( - result_before.is_satisfied(), - "Should be satisfied before clean" - ); - - // After ext clean: SDK still there, ext stamps gone (simulating rm -rf .stamps/ext/gpu-driver) - let output_after = format!( - "sdk/{}/install.stamp:::{}\next/gpu-driver/install.stamp:::null\next/gpu-driver/build.stamp:::null", - get_local_arch(), - sdk_json - ); - - let result_after = validate_stamps_batch(&requirements, &output_after, None); - assert!(!result_after.is_satisfied(), "Should fail after clean"); - assert_eq!( - result_after.missing.len(), - 2, - "Both ext stamps should be missing" - ); - assert!( - result_after.satisfied.len() == 1, - "Only SDK should be satisfied" - ); - } } diff --git a/src/commands/fetch.rs b/src/commands/fetch.rs index 3255534..170972c 100644 --- a/src/commands/fetch.rs +++ b/src/commands/fetch.rs @@ -1,3 +1,6 @@ +// Allow deprecated variants for backward compatibility during migration +#![allow(deprecated)] + use anyhow::{Context, Result}; use std::collections::HashSet; use tokio::process::Command as AsyncCommand; diff --git a/src/commands/install.rs b/src/commands/install.rs index 776a63b..e68074e 100644 --- a/src/commands/install.rs +++ b/src/commands/install.rs @@ -1,5 +1,8 @@ //! Install command implementation that runs SDK, extension, and runtime installs. +// Allow deprecated variants for backward compatibility during migration +#![allow(deprecated)] + use anyhow::{Context, Result}; use std::path::PathBuf; @@ -15,14 +18,18 @@ use crate::utils::{ target::validate_and_log_target, }; -/// Represents an extension dependency that can be either local or external +/// Represents an extension dependency #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub enum ExtensionDependency { - /// Extension defined in the main config file + /// Extension defined in the config (local or fetched remote) Local(String), - /// Extension defined in an external config file + /// DEPRECATED: Extension from an external config file + /// Use source: path in the ext section instead + #[deprecated(since = "0.23.0", note = "Use Local with source: path instead")] External { name: String, config_path: String }, - /// Extension resolved via DNF with a version specification + /// DEPRECATED: Extension resolved via DNF with a version specification + /// Use source: repo in the ext section instead + #[deprecated(since = "0.23.0", note = "Use Local with source: repo instead")] Versioned { name: String, version: String }, } @@ -108,7 +115,7 @@ impl InstallCommand { ); // Load lock file for reproducible builds (used for versioned extensions in this command) - let src_dir = config + let _src_dir = config .get_resolved_src_dir(&self.config_path) .unwrap_or_else(|| { PathBuf::from(&self.config_path) @@ -117,8 +124,8 @@ impl InstallCommand { .to_path_buf() }); - // We'll load the lock file lazily when needed (for external/versioned extensions) - let mut lock_file; + // Note: Lock file loading for external/versioned extensions has been removed + // as those deprecated code paths now error out with migration messages // 1. Install SDK dependencies print_info("Step 1/3: Installing SDK dependencies", OutputLevel::Normal); @@ -178,38 +185,31 @@ impl InstallCommand { name, config_path: ext_config_path, } => { - if self.verbose { - print_info( - &format!("Installing external extension dependencies for '{name}' from config '{ext_config_path}'"), - OutputLevel::Normal, - ); - } - - // Reload lock file from disk to get latest state from previous installs - lock_file = LockFile::load(&src_dir)?; - - // Install external extension to ${AVOCADO_PREFIX}/extensions/ - self.install_external_extension(config, &self.config_path, name, ext_config_path, &_target, &mut lock_file).await.with_context(|| { - format!("Failed to install external extension '{name}' from config '{ext_config_path}'") - })?; + // DEPRECATED: config: syntax is no longer supported + // Users should migrate to the new source-based approach in the ext section + return Err(anyhow::anyhow!( + "Deprecated 'config:' syntax found for extension '{name}' with config '{ext_config_path}'.\n\n\ + The 'config:' syntax for external extensions is no longer supported.\n\n\ + To use extensions from another path, define them in the 'ext' section with a 'source' field:\n\n\ + ext:\n {name}:\n source:\n type: path\n path: \"{ext_config_path}\"\n\n\ + Then reference the extension in runtime dependencies simply by name:\n\n\ + runtime:\n your-runtime:\n dependencies:\n {name}: ext\n\n\ + Path-based extensions are automatically processed during config loading." + )); } ExtensionDependency::Versioned { name, version } => { - if self.verbose { - print_info( - &format!( - "Installing versioned extension '{name}' version '{version}'" - ), - OutputLevel::Normal, - ); - } - - // Reload lock file from disk to get latest state from previous installs - lock_file = LockFile::load(&src_dir)?; - - // Install versioned extension to its own sysroot - self.install_versioned_extension(config, name, version, &_target, &mut lock_file).await.with_context(|| { - format!("Failed to install versioned extension '{name}' version '{version}'") - })?; + // DEPRECATED: vsn: syntax is no longer supported + // Users should migrate to the new source-based approach in the ext section + return Err(anyhow::anyhow!( + "Deprecated 'vsn:' syntax found for extension '{name}' version '{version}'.\n\n\ + The 'vsn:' syntax for versioned extensions is no longer supported.\n\n\ + To use remote extensions, define them in the 'ext' section with a 'source' field:\n\n\ + ext:\n {name}:\n source:\n type: repo\n version: \"{version}\"\n\n\ + Then reference the extension in runtime dependencies simply by name:\n\n\ + runtime:\n your-runtime:\n dependencies:\n {name}: ext\n\n\ + Remote extensions are automatically fetched during 'avocado sdk install' or\n\ + can be manually fetched with 'avocado ext fetch'." + )); } } } @@ -564,6 +564,7 @@ impl InstallCommand { } /// Install an external extension to ${AVOCADO_PREFIX}/extensions/ + #[allow(dead_code)] async fn install_external_extension( &self, config: &Config, @@ -906,6 +907,7 @@ $DNF_SDK_HOST \ } /// Install a versioned extension using DNF to its own sysroot + #[allow(dead_code)] async fn install_versioned_extension( &self, config: &Config, @@ -1091,6 +1093,7 @@ $DNF_SDK_HOST \ } /// Install SDK dependencies from an external extension's config + #[allow(dead_code)] async fn install_external_extension_sdk_deps( &self, config: &Config, diff --git a/src/commands/sdk/install.rs b/src/commands/sdk/install.rs index 460b683..d42af3d 100644 --- a/src/commands/sdk/install.rs +++ b/src/commands/sdk/install.rs @@ -82,7 +82,12 @@ impl SdkInstallCommand { .with_context(|| format!("Failed to load config from {}", self.config_path))?; let target = validate_and_log_target(self.target.as_deref(), &basic_config)?; + // Fetch remote extensions before loading composed config + // This ensures their configs can be merged in + self.fetch_remote_extensions(&basic_config, &target).await?; + // Load the composed configuration (merges external configs, applies interpolation) + // This now includes configs from fetched remote extensions let composed = Config::load_composed(&self.config_path, self.target.as_deref()) .with_context(|| format!("Failed to load composed config from {}", self.config_path))?; @@ -166,6 +171,45 @@ impl SdkInstallCommand { result } + /// Fetch remote extensions before SDK install + /// + /// This discovers extensions with a `source` field and fetches them + /// to `$AVOCADO_PREFIX/includes/` so their configs can be merged. + async fn fetch_remote_extensions(&self, _config: &Config, target: &str) -> Result<()> { + use crate::commands::ext::ExtFetchCommand; + + // Discover remote extensions + let remote_extensions = Config::discover_remote_extensions(&self.config_path)?; + + if remote_extensions.is_empty() { + return Ok(()); + } + + if self.verbose { + print_info( + &format!( + "Fetching {} remote extension(s) before SDK install...", + remote_extensions.len() + ), + OutputLevel::Normal, + ); + } + + // Use ExtFetchCommand to fetch extensions + let fetch_cmd = ExtFetchCommand::new( + self.config_path.clone(), + None, // Fetch all remote extensions + self.verbose, + false, // Don't force re-fetch + Some(target.to_string()), + self.container_args.clone(), + ); + + fetch_cmd.execute().await?; + + Ok(()) + } + /// Internal implementation of the install logic #[allow(clippy::too_many_arguments)] async fn execute_install( diff --git a/src/main.rs b/src/main.rs index b78f1fc..9e64d79 100644 --- a/src/main.rs +++ b/src/main.rs @@ -9,7 +9,7 @@ use commands::build::BuildCommand; use commands::clean::CleanCommand; use commands::ext::{ ExtBuildCommand, ExtCheckoutCommand, ExtCleanCommand, ExtDepsCommand, ExtDnfCommand, - ExtImageCommand, ExtInstallCommand, ExtListCommand, ExtPackageCommand, + ExtFetchCommand, ExtImageCommand, ExtInstallCommand, ExtListCommand, ExtPackageCommand, }; use commands::fetch::FetchCommand; use commands::hitl::HitlServerCommand; @@ -1186,6 +1186,25 @@ async fn main() -> Result<()> { install_cmd.execute().await?; Ok(()) } + ExtCommands::Fetch { + config, + verbose, + force, + extension, + target, + container_args, + } => { + let fetch_cmd = ExtFetchCommand::new( + config, + extension, + verbose, + force, + target.or(cli.target.clone()), + container_args, + ); + fetch_cmd.execute().await?; + Ok(()) + } ExtCommands::Build { extension, config, @@ -1509,6 +1528,27 @@ enum ExtCommands { #[arg(long = "dnf-arg", num_args = 1, allow_hyphen_values = true, action = clap::ArgAction::Append)] dnf_args: Option>, }, + /// Fetch remote extensions from repo, git, or path sources + Fetch { + /// Path to avocado.yaml configuration file + #[arg(short = 'C', long, default_value = "avocado.yaml")] + config: String, + /// Enable verbose output + #[arg(short, long)] + verbose: bool, + /// Force re-fetch even if already installed + #[arg(short, long)] + force: bool, + /// Name of the extension to fetch (if not provided, fetches all remote extensions) + #[arg(short = 'e', long = "extension")] + extension: Option, + /// Target architecture + #[arg(short, long)] + target: Option, + /// Additional arguments to pass to the container runtime + #[arg(long = "container-arg", num_args = 1, allow_hyphen_values = true, action = clap::ArgAction::Append)] + container_args: Option>, + }, /// Build sysext and/or confext extensions from configuration Build { /// Path to avocado.yaml configuration file diff --git a/src/utils/config.rs b/src/utils/config.rs index 1fe3ac4..bbc86df 100644 --- a/src/utils/config.rs +++ b/src/utils/config.rs @@ -1,5 +1,8 @@ //! Configuration utilities for Avocado CLI. +// Allow deprecated variants for backward compatibility during migration +#![allow(deprecated)] + use anyhow::{Context, Result}; use serde::{Deserialize, Serialize}; use std::collections::HashMap; @@ -103,23 +106,65 @@ mod container_args_deserializer { } } -/// Represents the location of an extension (local or external) +/// Represents the location of an extension #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub enum ExtensionLocation { /// Extension defined in the main config file Local { name: String, config_path: String }, - /// Extension defined in an external config file + /// DEPRECATED: Extension from an external config file + /// Use source: path in the ext section instead + #[deprecated(since = "0.23.0", note = "Use Local with source: path instead")] External { name: String, config_path: String }, + /// Remote extension fetched from a source (repo, git, or path) + #[allow(dead_code)] + Remote { + name: String, + source: ExtensionSource, + }, +} + +/// Represents the source configuration for fetching a remote extension +#[derive(Debug, Clone, PartialEq, Eq, Hash, Deserialize, Serialize)] +#[serde(tag = "type", rename_all = "lowercase")] +pub enum ExtensionSource { + /// Extension from the avocado package repository + Repo { + /// Version to fetch (e.g., "0.1.0" or "*") + version: String, + /// Optional custom repository name + #[serde(skip_serializing_if = "Option::is_none")] + repo_name: Option, + }, + /// Extension from a git repository + Git { + /// Git repository URL + url: String, + /// Git ref (branch, tag, or commit hash) + #[serde(rename = "ref", skip_serializing_if = "Option::is_none")] + git_ref: Option, + /// Optional sparse checkout paths + #[serde(skip_serializing_if = "Option::is_none")] + sparse_checkout: Option>, + }, + /// Extension from a local filesystem path + Path { + /// Path to the extension directory (relative to config or absolute) + path: String, + }, } /// Represents an extension dependency for a runtime with type information #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub enum RuntimeExtDep { - /// Extension defined in the main config file (needs install + build) + /// Extension defined in the config (local or fetched remote) Local(String), - /// Extension from an external config file (needs install + build) + /// DEPRECATED: Extension from an external config file + /// Use source: path in the ext section instead + #[deprecated(since = "0.23.0", note = "Use Local with source: path instead")] External { name: String, config_path: String }, - /// Prebuilt extension from package repo (needs install only, no build) + /// DEPRECATED: Prebuilt extension from package repo + /// Use source: repo in the ext section instead + #[deprecated(since = "0.23.0", note = "Use Local with source: repo instead")] Versioned { name: String, version: String }, } @@ -134,6 +179,64 @@ impl RuntimeExtDep { } } +/// Result of parsing an extension reference from a dependency spec +#[derive(Debug, Clone)] +pub enum ExtRefParsed { + /// Extension reference found + Extension { + /// Extension name + name: String, + /// Optional external config path + config: Option, + /// Optional version (for versioned/deprecated syntax) + version: Option, + }, + /// Not an extension reference (e.g., package dependency) + NotExtension, +} + +/// Parse an extension reference from a dependency specification. +/// +/// Handles both shorthand and object forms: +/// - `key: ext` → Extension { name: key, config: None, version: None } +/// - `key: { ext: name }` → Extension { name, config: None, version: None } +/// - `key: { ext: name, config: path }` → Extension { name, config: Some(path), version: None } +/// - `key: { ext: name, vsn: ver }` → Extension { name, config: None, version: Some(ver) } (deprecated) +/// - `key: "version"` → NotExtension (package dependency) +pub fn parse_ext_ref(dep_name: &str, dep_spec: &serde_yaml::Value) -> ExtRefParsed { + // Shorthand: "my-ext: ext" means { ext: my-ext } + if let Some(value_str) = dep_spec.as_str() { + if value_str == "ext" { + return ExtRefParsed::Extension { + name: dep_name.to_string(), + config: None, + version: None, + }; + } + // Otherwise it's a package dependency with version string + return ExtRefParsed::NotExtension; + } + + // Object form: { ext: name, ... } + if let Some(ext_name) = dep_spec.get("ext").and_then(|v| v.as_str()) { + let config = dep_spec + .get("config") + .and_then(|v| v.as_str()) + .map(|s| s.to_string()); + let version = dep_spec + .get("vsn") + .and_then(|v| v.as_str()) + .map(|s| s.to_string()); + return ExtRefParsed::Extension { + name: ext_name.to_string(), + config, + version, + }; + } + + ExtRefParsed::NotExtension +} + /// A composed configuration that merges the main config with external extension configs. /// /// This struct provides a unified view where: @@ -151,6 +254,94 @@ pub struct ComposedConfig { pub merged_value: serde_yaml::Value, /// The path to the main config file pub config_path: String, + /// Maps extension names to their source config file paths. + /// + /// This is used to resolve relative paths within extension configs. + /// Extensions from the main config will map to the main config path. + /// Extensions from remote/external sources will map to their respective config paths. + pub extension_sources: std::collections::HashMap, +} + +impl ComposedConfig { + /// Get the source config path for an extension. + /// + /// Returns the path to the config file where the extension is defined. + /// Falls back to the main config path if the extension is not found. + #[allow(dead_code)] + pub fn get_extension_source_config(&self, ext_name: &str) -> &str { + self.extension_sources + .get(ext_name) + .map(|s| s.as_str()) + .unwrap_or(&self.config_path) + } + + /// Resolve a path relative to an extension's source directory. + /// + /// For extensions from remote/external sources, paths are resolved relative to + /// that extension's src_dir (or config directory if src_dir is not specified). + /// For extensions from the main config, paths resolve relative to the main src_dir. + /// + /// # Arguments + /// * `ext_name` - The name of the extension + /// * `path` - The path to resolve (may be relative or absolute) + /// + /// # Returns + /// The resolved absolute path + #[allow(dead_code)] + pub fn resolve_path_for_extension(&self, ext_name: &str, path: &str) -> PathBuf { + let target_path = Path::new(path); + + // If it's already absolute, return as-is + if target_path.is_absolute() { + return target_path.to_path_buf(); + } + + // Get the source config path for this extension + let source_config = self.get_extension_source_config(ext_name); + let source_config_path = Path::new(source_config); + + // Try to load the source config to get its src_dir + // This handles the case where the extension's config has its own src_dir + if let Ok(content) = fs::read_to_string(source_config_path) { + if let Ok(parsed) = Config::parse_config_value(source_config, &content) { + if let Ok(ext_config) = serde_yaml::from_value::(parsed) { + // Use the extension's resolved src_dir + if let Some(src_dir) = ext_config.get_resolved_src_dir(source_config) { + return src_dir.join(target_path); + } + } + } + } + + // Fallback: resolve relative to the source config's directory + let config_dir = source_config_path.parent().unwrap_or(Path::new(".")); + config_dir.join(target_path) + } + + /// Get the src_dir for an extension. + /// + /// Returns the src_dir from the extension's source config, or the directory + /// containing that config file if src_dir is not specified. + #[allow(dead_code)] + pub fn get_extension_src_dir(&self, ext_name: &str) -> PathBuf { + let source_config = self.get_extension_source_config(ext_name); + let source_config_path = Path::new(source_config); + let config_dir = source_config_path.parent().unwrap_or(Path::new(".")); + + // Try to load the source config to get its src_dir + if let Ok(content) = fs::read_to_string(source_config_path) { + if let Ok(parsed) = Config::parse_config_value(source_config, &content) { + if let Ok(ext_config) = serde_yaml::from_value::(parsed) { + if let Some(src_dir) = ext_config.get_resolved_src_dir(source_config) { + return src_dir; + } + } + } + } + + // Fallback: use the config directory + config_dir.to_path_buf() + } } /// Configuration error type @@ -384,10 +575,11 @@ impl Config { /// /// This method: /// 1. Loads the main config (raw, without interpolation) - /// 2. Discovers all external config references in runtime and ext dependencies - /// 3. Loads each external config (raw) - /// 4. Merges external `ext.*`, `sdk.dependencies`, and `sdk.compile` sections - /// 5. Applies interpolation to the composed model + /// 2. Discovers installed remote extensions in avocado-extensions/ and merges their configs + /// 3. Discovers all external config references in runtime and ext dependencies + /// 4. Loads each external config (raw) + /// 5. Merges external `ext.*`, `sdk.dependencies`, and `sdk.compile` sections + /// 6. Applies interpolation to the composed model /// /// The `distro`, `default_target`, and `supported_targets` sections come from the main config only, /// allowing external configs to reference `{{ config.distro.version }}` and resolve to main config values. @@ -398,11 +590,31 @@ impl Config { let path = config_path.as_ref(); let config_path_str = path.to_string_lossy().to_string(); + // Track which config file each extension comes from + let mut extension_sources: std::collections::HashMap = + std::collections::HashMap::new(); + // Load main config content (raw, no interpolation yet) let content = fs::read_to_string(path) .with_context(|| format!("Failed to read config file: {}", path.display()))?; let mut main_config = Self::parse_config_value(&config_path_str, &content)?; + // Record extensions from the main config + if let Some(ext_section) = main_config.get("ext").and_then(|e| e.as_mapping()) { + for (ext_key, _) in ext_section { + if let Some(ext_name) = ext_key.as_str() { + extension_sources.insert(ext_name.to_string(), config_path_str.clone()); + } + } + } + + // Discover and merge installed remote extension configs + // Remote extensions are those with a 'source' field that have been fetched + // to $AVOCADO_PREFIX/includes// + let remote_ext_sources = + Self::merge_installed_remote_extensions(&mut main_config, path, target)?; + extension_sources.extend(remote_ext_sources); + // Discover all external config references let external_refs = Self::discover_external_config_refs(&main_config); @@ -431,6 +643,22 @@ impl Config { // Merge external config into main config Self::merge_external_config(&mut main_config, &external_config, ext_name); + + // Record this extension's source (the external config path) + let resolved_path_str = resolved_path.to_string_lossy().to_string(); + extension_sources.insert(ext_name.clone(), resolved_path_str.clone()); + + // Also record any extensions defined within this external config + if let Some(nested_ext_section) = + external_config.get("ext").and_then(|e| e.as_mapping()) + { + for (nested_ext_key, _) in nested_ext_section { + if let Some(nested_ext_name) = nested_ext_key.as_str() { + extension_sources + .insert(nested_ext_name.to_string(), resolved_path_str.clone()); + } + } + } } // Apply interpolation to the composed model @@ -445,9 +673,113 @@ impl Config { config, merged_value: main_config, config_path: config_path_str, + extension_sources, }) } + /// Merge installed remote extension configs into the main config + /// + /// For each extension with a `source` field that has been installed to + /// `$AVOCADO_PREFIX/includes//`, load and merge its avocado.yaml + /// + /// Returns a HashMap mapping extension names to their source config file paths. + fn merge_installed_remote_extensions( + main_config: &mut serde_yaml::Value, + config_path: &Path, + target: Option<&str>, + ) -> Result> { + let mut extension_sources: std::collections::HashMap = + std::collections::HashMap::new(); + + // Discover remote extensions from the main config + let remote_extensions = Self::discover_remote_extensions_from_value(main_config)?; + + if remote_extensions.is_empty() { + return Ok(extension_sources); + } + + // Get the src_dir and target to find the extensions directory + // First deserialize just to get src_dir and default_target + let temp_config: Config = + serde_yaml::from_value(main_config.clone()).unwrap_or_else(|_| Config { + default_target: None, + supported_targets: None, + src_dir: None, + distro: None, + runtime: None, + sdk: None, + provision: None, + signing_keys: None, + }); + + // Resolve target: CLI arg > env var > config default + let resolved_target = target + .map(|s| s.to_string()) + .or_else(|| std::env::var("AVOCADO_TARGET").ok()) + .or_else(|| temp_config.default_target.clone()); + + // If we don't have a target, we can't determine the extensions path + let resolved_target = match resolved_target { + Some(t) => t, + None => { + // No target available - can't locate extensions, skip merging + return Ok(extension_sources); + } + }; + + // Get extensions directory from volume or fallback path + let extensions_dir = + temp_config.get_extensions_dir(&config_path.to_string_lossy(), &resolved_target); + + // For each remote extension, check if it's installed and merge its config + for (ext_name, _source) in remote_extensions { + let ext_install_path = extensions_dir.join(&ext_name); + + // Try to find the extension's config file + let ext_config_path = if ext_install_path.join("avocado.yaml").exists() { + ext_install_path.join("avocado.yaml") + } else if ext_install_path.join("avocado.yml").exists() { + ext_install_path.join("avocado.yml") + } else if ext_install_path.join("avocado.toml").exists() { + ext_install_path.join("avocado.toml") + } else { + // Extension not installed yet, skip + continue; + }; + + // Load the remote extension's config + let ext_content = fs::read_to_string(&ext_config_path).with_context(|| { + format!( + "Failed to read remote extension config: {}", + ext_config_path.display() + ) + })?; + let ext_config = Self::parse_config_value( + ext_config_path.to_str().unwrap_or(&ext_name), + &ext_content, + )?; + + // Record this extension's source config path + let ext_config_path_str = ext_config_path.to_string_lossy().to_string(); + extension_sources.insert(ext_name.clone(), ext_config_path_str.clone()); + + // Also record any extensions defined within this remote extension's config + if let Some(nested_ext_section) = ext_config.get("ext").and_then(|e| e.as_mapping()) { + for (nested_ext_key, _) in nested_ext_section { + if let Some(nested_ext_name) = nested_ext_key.as_str() { + extension_sources + .insert(nested_ext_name.to_string(), ext_config_path_str.clone()); + } + } + } + + // Merge the remote extension config + Self::merge_external_config(main_config, &ext_config, &ext_name); + } + + Ok(extension_sources) + } + /// Discover all external config references in runtime and ext dependencies. /// /// Scans these locations: @@ -835,28 +1167,31 @@ impl Config { let mut ext_deps = Vec::new(); - for (_dep_name, dep_spec) in dependencies { - // Check if this dependency references an extension - if let Some(ext_name) = dep_spec.get("ext").and_then(|v| v.as_str()) { - // Check if it has a version (versioned/prebuilt extension) - if let Some(version) = dep_spec.get("vsn").and_then(|v| v.as_str()) { - ext_deps.push(RuntimeExtDep::Versioned { - name: ext_name.to_string(), - version: version.to_string(), - }); - } - // Check if it has an external config - else if let Some(external_config) = - dep_spec.get("config").and_then(|v| v.as_str()) - { - ext_deps.push(RuntimeExtDep::External { - name: ext_name.to_string(), - config_path: external_config.to_string(), - }); + for (dep_name, dep_spec) in dependencies { + let dep_name_str = dep_name.as_str().unwrap_or(""); + + match parse_ext_ref(dep_name_str, dep_spec) { + ExtRefParsed::Extension { + name, + config, + version, + } => { + if let Some(ver) = version { + // Versioned extension (deprecated syntax) + ext_deps.push(RuntimeExtDep::Versioned { name, version: ver }); + } else if let Some(cfg_path) = config { + // External extension with config path + ext_deps.push(RuntimeExtDep::External { + name, + config_path: cfg_path, + }); + } else { + // Local extension + ext_deps.push(RuntimeExtDep::Local(name)); + } } - // Otherwise it's a local extension - else { - ext_deps.push(RuntimeExtDep::Local(ext_name.to_string())); + ExtRefParsed::NotExtension => { + // Package dependency, skip } } } @@ -1489,6 +1824,159 @@ impl Config { Ok(external_extensions) } + /// Parse the source field from an extension configuration + /// + /// Returns Some(ExtensionSource) if the extension has a source field, + /// None if it's a local extension (no source field) + pub fn parse_extension_source( + ext_config: &serde_yaml::Value, + ) -> Result> { + let source = ext_config.get("source"); + + match source { + None => Ok(None), // Local extension + Some(source_value) => { + // Deserialize the source block into ExtensionSource + let source: ExtensionSource = serde_yaml::from_value(source_value.clone()) + .with_context(|| "Failed to parse extension source configuration")?; + Ok(Some(source)) + } + } + } + + /// Discover all remote extensions in the configuration + /// + /// Returns a list of (extension_name, ExtensionSource) tuples for extensions + /// that have a `source` field in their configuration + pub fn discover_remote_extensions(config_path: &str) -> Result> { + let content = std::fs::read_to_string(config_path) + .with_context(|| format!("Failed to read config file: {config_path}"))?; + let parsed = Self::parse_config_value(config_path, &content)?; + + Self::discover_remote_extensions_from_value(&parsed) + } + + /// Discover remote extensions from a parsed config value + pub fn discover_remote_extensions_from_value( + parsed: &serde_yaml::Value, + ) -> Result> { + let mut remote_extensions = Vec::new(); + + if let Some(ext_section) = parsed.get("ext").and_then(|e| e.as_mapping()) { + for (ext_name_key, ext_config) in ext_section { + if let Some(ext_name) = ext_name_key.as_str() { + if let Some(source) = Self::parse_extension_source(ext_config)? { + remote_extensions.push((ext_name.to_string(), source)); + } + } + } + } + + Ok(remote_extensions) + } + + /// Get the path where remote extensions should be installed on the host filesystem. + /// + /// This resolves the Docker volume mountpoint to access `$AVOCADO_PREFIX/includes` from the host. + /// Returns: `//includes/` + /// + /// Falls back to `/.avocado//includes/` if volume state is not available. + pub fn get_extensions_dir(&self, config_path: &str, target: &str) -> PathBuf { + let src_dir = self.get_resolved_src_dir(config_path).unwrap_or_else(|| { + PathBuf::from(config_path) + .parent() + .unwrap_or(Path::new(".")) + .to_path_buf() + }); + + // Try to load volume state and get the mountpoint + if let Ok(Some(volume_state)) = crate::utils::volume::VolumeState::load_from_dir(&src_dir) { + // Use synchronous Docker inspect to get the mountpoint + if let Ok(mountpoint) = Self::get_volume_mountpoint_sync(&volume_state) { + return mountpoint.join(target).join("includes"); + } + } + + // Fallback: use a local path in src_dir for development/testing + src_dir.join(".avocado").join(target).join("includes") + } + + /// Get the path where a specific remote extension should be installed + /// + /// Returns: `//includes//` + pub fn get_extension_install_path( + &self, + config_path: &str, + ext_name: &str, + target: &str, + ) -> PathBuf { + self.get_extensions_dir(config_path, target).join(ext_name) + } + + /// Get the container path expression for extensions directory + /// + /// Returns: `$AVOCADO_PREFIX/includes` + #[allow(dead_code)] + pub fn get_extensions_container_path() -> &'static str { + "$AVOCADO_PREFIX/includes" + } + + /// Get the volume mountpoint synchronously (for use in non-async contexts) + fn get_volume_mountpoint_sync( + volume_state: &crate::utils::volume::VolumeState, + ) -> Result { + let output = std::process::Command::new(&volume_state.container_tool) + .args([ + "volume", + "inspect", + &volume_state.volume_name, + "--format", + "{{.Mountpoint}}", + ]) + .output() + .with_context(|| { + format!( + "Failed to inspect Docker volume '{}'", + volume_state.volume_name + ) + })?; + + if !output.status.success() { + let stderr = String::from_utf8_lossy(&output.stderr); + anyhow::bail!( + "Failed to get mountpoint for volume '{}': {}", + volume_state.volume_name, + stderr + ); + } + + let mountpoint = String::from_utf8_lossy(&output.stdout).trim().to_string(); + if mountpoint.is_empty() { + anyhow::bail!( + "Docker volume '{}' has no mountpoint", + volume_state.volume_name + ); + } + + Ok(PathBuf::from(mountpoint)) + } + + /// Check if a remote extension is already installed + #[allow(dead_code)] + pub fn is_remote_extension_installed( + &self, + config_path: &str, + ext_name: &str, + target: &str, + ) -> bool { + let install_path = self.get_extension_install_path(config_path, ext_name, target); + // Check if the directory exists and contains an avocado.yaml or avocado.toml + install_path.exists() + && (install_path.join("avocado.yaml").exists() + || install_path.join("avocado.yml").exists() + || install_path.join("avocado.toml").exists()) + } + /// Find an extension in the full dependency tree (local and external) /// This is a comprehensive search that looks through all runtime dependencies /// and their transitive extension dependencies @@ -1581,6 +2069,7 @@ impl Config { let found_name = match &ext_location { ExtensionLocation::Local { name, .. } => name, ExtensionLocation::External { name, .. } => name, + ExtensionLocation::Remote { name, .. } => name, }; if found_name == extension_name { @@ -1613,6 +2102,11 @@ impl Config { visited, ); } + ExtensionLocation::Remote { .. } => { + // Remote extensions don't have nested dependencies to discover here + // Their configs are merged separately after fetching + return Ok(()); + } }; // Cycle detection: check if we've already processed this extension diff --git a/src/utils/ext_fetch.rs b/src/utils/ext_fetch.rs new file mode 100644 index 0000000..9653146 --- /dev/null +++ b/src/utils/ext_fetch.rs @@ -0,0 +1,415 @@ +//! Extension fetching utilities for remote extensions. +//! +//! This module provides functionality to fetch extensions from various sources: +//! - Package repository (avocado extension repo) +//! - Git repositories (with optional sparse checkout) +//! - Local filesystem paths + +use anyhow::{Context, Result}; +use std::path::{Path, PathBuf}; + +use crate::utils::config::ExtensionSource; +use crate::utils::container::{RunConfig, SdkContainer}; +use crate::utils::output::{print_info, OutputLevel}; + +/// Extension fetcher for downloading and installing remote extensions +pub struct ExtensionFetcher { + /// Path to the main configuration file + config_path: String, + /// Target architecture + target: String, + /// Enable verbose output + verbose: bool, + /// Container image for running fetch operations + container_image: String, + /// Repository URL for package fetching + repo_url: Option, + /// Repository release for package fetching + repo_release: Option, + /// Container arguments + container_args: Option>, +} + +impl ExtensionFetcher { + /// Create a new ExtensionFetcher + pub fn new( + config_path: String, + target: String, + container_image: String, + verbose: bool, + ) -> Self { + Self { + config_path, + target, + verbose, + container_image, + repo_url: None, + repo_release: None, + container_args: None, + } + } + + /// Set repository URL + pub fn with_repo_url(mut self, repo_url: Option) -> Self { + self.repo_url = repo_url; + self + } + + /// Set repository release + pub fn with_repo_release(mut self, repo_release: Option) -> Self { + self.repo_release = repo_release; + self + } + + /// Set container arguments + pub fn with_container_args(mut self, container_args: Option>) -> Self { + self.container_args = container_args; + self + } + + /// Fetch an extension based on its source configuration + /// + /// Returns the path where the extension was installed + pub async fn fetch( + &self, + ext_name: &str, + source: &ExtensionSource, + install_dir: &Path, + ) -> Result { + let ext_install_path = install_dir.join(ext_name); + + match source { + ExtensionSource::Repo { version, repo_name } => { + self.fetch_from_repo(ext_name, version, repo_name.as_deref(), &ext_install_path) + .await?; + } + ExtensionSource::Git { + url, + git_ref, + sparse_checkout, + } => { + self.fetch_from_git( + ext_name, + url, + git_ref.as_deref(), + sparse_checkout.as_deref(), + &ext_install_path, + ) + .await?; + } + ExtensionSource::Path { path } => { + self.fetch_from_path(ext_name, path, &ext_install_path) + .await?; + } + } + + Ok(ext_install_path) + } + + /// Fetch an extension from the avocado package repository + async fn fetch_from_repo( + &self, + ext_name: &str, + version: &str, + repo_name: Option<&str>, + install_path: &Path, + ) -> Result<()> { + if self.verbose { + print_info( + &format!( + "Fetching extension '{ext_name}' version '{version}' from package repository" + ), + OutputLevel::Normal, + ); + } + + // Create the install directory + std::fs::create_dir_all(install_path).with_context(|| { + format!( + "Failed to create extension directory: {}", + install_path.display() + ) + })?; + + // Build the package spec + let package_spec = if version == "*" { + ext_name.to_string() + } else { + format!("{ext_name}-{version}") + }; + + // Build the DNF command to download and extract the package + // We use --downloadonly and then extract the RPM contents + let repo_arg = repo_name.map(|r| format!("--repo={r}")).unwrap_or_default(); + + let install_path_str = install_path.to_string_lossy(); + + // The fetch script downloads the package and extracts it to the install path + let fetch_script = format!( + r#" +set -e + +# Create temp directory for download +TMPDIR=$(mktemp -d) +cd "$TMPDIR" + +# Download the extension package +dnf download {repo_arg} --destdir="$TMPDIR" {package_spec} + +# Find the downloaded RPM +RPM_FILE=$(ls -1 *.rpm 2>/dev/null | head -1) +if [ -z "$RPM_FILE" ]; then + echo "ERROR: Failed to download extension package '{ext_name}'" + exit 1 +fi + +# Extract RPM contents to install path +# The package root / maps to the extension's src_dir +mkdir -p "{install_path_str}" +cd "{install_path_str}" +rpm2cpio "$TMPDIR/$RPM_FILE" | cpio -idmv + +echo "Successfully fetched extension '{ext_name}' to {install_path_str}" + +# Cleanup +rm -rf "$TMPDIR" +"# + ); + + let container_helper = SdkContainer::new().verbose(self.verbose); + let run_config = RunConfig { + container_image: self.container_image.clone(), + target: self.target.clone(), + command: fetch_script, + verbose: self.verbose, + source_environment: true, + interactive: false, + repo_url: self.repo_url.clone(), + repo_release: self.repo_release.clone(), + container_args: self.container_args.clone(), + ..Default::default() + }; + + let success = container_helper.run_in_container(run_config).await?; + if !success { + return Err(anyhow::anyhow!( + "Failed to fetch extension '{ext_name}' from package repository" + )); + } + + Ok(()) + } + + /// Fetch an extension from a git repository + async fn fetch_from_git( + &self, + ext_name: &str, + url: &str, + git_ref: Option<&str>, + sparse_checkout: Option<&[String]>, + install_path: &Path, + ) -> Result<()> { + if self.verbose { + print_info( + &format!("Fetching extension '{ext_name}' from git: {url}"), + OutputLevel::Normal, + ); + } + + // Create parent directory + if let Some(parent) = install_path.parent() { + std::fs::create_dir_all(parent).with_context(|| { + format!("Failed to create parent directory: {}", parent.display()) + })?; + } + + let install_path_str = install_path.to_string_lossy(); + let ref_arg = git_ref.unwrap_or("HEAD"); + + // Build the git clone command + let git_cmd = if let Some(sparse_paths) = sparse_checkout { + // Use sparse checkout for specific paths + let sparse_paths_str = sparse_paths.join(" "); + format!( + r#" +set -e +rm -rf "{install_path_str}" +mkdir -p "{install_path_str}" +cd "{install_path_str}" +git init +git remote add origin "{url}" +git config core.sparseCheckout true +echo "{sparse_paths_str}" | tr ' ' '\n' > .git/info/sparse-checkout +git fetch --depth 1 origin {ref_arg} +git checkout FETCH_HEAD +# Move sparse checkout contents to root if needed +if [ -d "{sparse_paths_str}" ]; then + mv {sparse_paths_str}/* . 2>/dev/null || true + rm -rf {sparse_paths_str} +fi +echo "Successfully fetched extension '{ext_name}' from git" +"# + ) + } else { + // Full clone + format!( + r#" +set -e +rm -rf "{install_path_str}" +git clone --depth 1 --branch {ref_arg} "{url}" "{install_path_str}" || \ +git clone --depth 1 "{url}" "{install_path_str}" +cd "{install_path_str}" +if [ "{ref_arg}" != "HEAD" ]; then + git checkout {ref_arg} 2>/dev/null || true +fi +echo "Successfully fetched extension '{ext_name}' from git" +"# + ) + }; + + let container_helper = SdkContainer::new().verbose(self.verbose); + let run_config = RunConfig { + container_image: self.container_image.clone(), + target: self.target.clone(), + command: git_cmd, + verbose: self.verbose, + source_environment: true, + interactive: false, + repo_url: self.repo_url.clone(), + repo_release: self.repo_release.clone(), + container_args: self.container_args.clone(), + ..Default::default() + }; + + let success = container_helper.run_in_container(run_config).await?; + if !success { + return Err(anyhow::anyhow!( + "Failed to fetch extension '{ext_name}' from git repository" + )); + } + + Ok(()) + } + + /// Fetch an extension from a local filesystem path + async fn fetch_from_path( + &self, + ext_name: &str, + source_path: &str, + install_path: &Path, + ) -> Result<()> { + if self.verbose { + print_info( + &format!("Fetching extension '{ext_name}' from path: {source_path}"), + OutputLevel::Normal, + ); + } + + // Resolve the source path relative to the config file + let config_dir = Path::new(&self.config_path) + .parent() + .unwrap_or(Path::new(".")); + let resolved_source = if Path::new(source_path).is_absolute() { + PathBuf::from(source_path) + } else { + config_dir.join(source_path) + }; + + if !resolved_source.exists() { + return Err(anyhow::anyhow!( + "Extension source path does not exist: {}", + resolved_source.display() + )); + } + + // Create the install directory + if let Some(parent) = install_path.parent() { + std::fs::create_dir_all(parent).with_context(|| { + format!("Failed to create parent directory: {}", parent.display()) + })?; + } + + // Remove existing install path if it exists + if install_path.exists() { + std::fs::remove_dir_all(install_path).with_context(|| { + format!( + "Failed to remove existing directory: {}", + install_path.display() + ) + })?; + } + + // Copy the directory (or create symlink for efficiency) + // For now, we'll copy to ensure isolation + Self::copy_dir_recursive(&resolved_source, install_path)?; + + if self.verbose { + print_info( + &format!( + "Successfully copied extension '{ext_name}' from {} to {}", + resolved_source.display(), + install_path.display() + ), + OutputLevel::Normal, + ); + } + + Ok(()) + } + + /// Recursively copy a directory + fn copy_dir_recursive(src: &Path, dst: &Path) -> Result<()> { + std::fs::create_dir_all(dst)?; + + for entry in std::fs::read_dir(src)? { + let entry = entry?; + let src_path = entry.path(); + let dst_path = dst.join(entry.file_name()); + + if src_path.is_dir() { + Self::copy_dir_recursive(&src_path, &dst_path)?; + } else { + std::fs::copy(&src_path, &dst_path)?; + } + } + + Ok(()) + } + + /// Check if an extension is already fetched/installed + pub fn is_extension_installed(install_dir: &Path, ext_name: &str) -> bool { + let ext_path = install_dir.join(ext_name); + // Check if the directory exists and has an avocado config file + ext_path.exists() + && (ext_path.join("avocado.yaml").exists() + || ext_path.join("avocado.yml").exists() + || ext_path.join("avocado.toml").exists()) + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_extension_fetcher_creation() { + let fetcher = ExtensionFetcher::new( + "avocado.yaml".to_string(), + "x86_64-unknown-linux-gnu".to_string(), + "docker.io/avocadolinux/sdk:latest".to_string(), + false, + ); + + assert!(!fetcher.verbose); + assert_eq!(fetcher.target, "x86_64-unknown-linux-gnu"); + } + + #[test] + fn test_is_extension_installed() { + // This would need a temp directory to test properly + // For now just verify the function exists + let result = + ExtensionFetcher::is_extension_installed(Path::new("/nonexistent"), "test-ext"); + assert!(!result); + } +} diff --git a/src/utils/lockfile.rs b/src/utils/lockfile.rs index 9d9f40f..1cf11f3 100644 --- a/src/utils/lockfile.rs +++ b/src/utils/lockfile.rs @@ -3,6 +3,9 @@ //! This module provides functionality to track and pin package versions //! across different sysroots to ensure reproducible builds. +// Allow deprecated variants for backward compatibility during migration +#![allow(deprecated)] + use anyhow::{Context, Result}; use serde::{Deserialize, Serialize}; use std::collections::HashMap; @@ -34,8 +37,12 @@ pub enum SysrootType { /// Local/external extension sysroot ($AVOCADO_EXT_SYSROOTS/{name}) /// Uses ext-rpm-config-scripts for RPM database Extension(String), - /// Versioned extension sysroot ($AVOCADO_EXT_SYSROOTS/{name}) - /// Uses ext-rpm-config for RPM database (different location than local extensions) + /// DEPRECATED: Versioned extension sysroot + /// The vsn: syntax is no longer supported. Remote extensions are now defined + /// in the ext section with source: field and are treated as local extensions + /// after being fetched to $AVOCADO_PREFIX/includes//. + #[deprecated(since = "0.23.0", note = "Use Extension variant for all extensions")] + #[allow(dead_code)] VersionedExtension(String), /// Runtime sysroot ($AVOCADO_PREFIX/runtimes/{name}) Runtime(String), diff --git a/src/utils/mod.rs b/src/utils/mod.rs index c37ea6d..cc34db5 100644 --- a/src/utils/mod.rs +++ b/src/utils/mod.rs @@ -1,5 +1,6 @@ pub mod config; pub mod container; +pub mod ext_fetch; pub mod image_signing; pub mod interpolation; pub mod lockfile; diff --git a/src/utils/stamps.rs b/src/utils/stamps.rs index 2a7b29a..b60167e 100644 --- a/src/utils/stamps.rs +++ b/src/utils/stamps.rs @@ -7,6 +7,9 @@ //! 2. Detects staleness via content-addressable hashing (config + package list) //! 3. Enforces command ordering with dependency resolution from config +// Allow deprecated variants for backward compatibility during migration +#![allow(deprecated)] + use anyhow::{Context, Result}; use chrono::{DateTime, Utc}; use serde::{Deserialize, Serialize}; @@ -986,9 +989,8 @@ pub fn resolve_required_stamps_for_arch( /// This properly handles different extension types: /// - Local extensions: require install + build + image stamps /// - External extensions: require install + build + image stamps -/// - Versioned extensions: NO stamp requirements - they're prebuilt packages -/// installed directly via DNF during `runtime install`. The package repository -/// contains the complete extension images, so no local build/image steps needed. +/// - Versioned extensions: DEPRECATED - should error during config parsing +/// Remote extensions are now defined in the ext section with source: field pub fn resolve_required_stamps_for_runtime_build( runtime_name: &str, ext_dependencies: &[RuntimeExtDep], @@ -1028,12 +1030,12 @@ pub fn resolve_required_stamps_for_runtime_build_with_arch( reqs.push(StampRequirement::ext_build(ext_name)); reqs.push(StampRequirement::ext_image(ext_name)); } - // Versioned extensions: NO stamp requirements - // They're prebuilt packages from the package repository, installed - // directly via DNF during `runtime install`. No local ext install, - // ext build, or ext image steps are needed. + // DEPRECATED: Versioned extensions with vsn: syntax + // This case should not be reached as vsn: syntax now errors early. + // Remote extensions are now handled through the ext section with source: field, + // and are treated as local extensions after being fetched. RuntimeExtDep::Versioned { .. } => { - // No stamps required - covered by runtime install + // Should not be reached - vsn: syntax errors during config parsing } } } From d92872f54faebe76faffde5cef7948417ca1572e Mon Sep 17 00:00:00 2001 From: Justin Schneck Date: Mon, 5 Jan 2026 17:09:40 -0500 Subject: [PATCH 02/23] add --sdk-arch to control the sdk container arch through docker buildx --- src/commands/sdk/clean.rs | 14 +++++- src/commands/sdk/compile.rs | 11 +++++ src/commands/sdk/dnf.rs | 10 ++++ src/commands/sdk/install.rs | 95 ++++++++++++++++++++++++++++++------- src/commands/sdk/run.rs | 10 ++++ src/main.rs | 19 ++++++-- src/utils/container.rs | 89 ++++++++++++++++++++++++++++++++++ 7 files changed, 225 insertions(+), 23 deletions(-) diff --git a/src/commands/sdk/clean.rs b/src/commands/sdk/clean.rs index 2f0bd73..02769ee 100644 --- a/src/commands/sdk/clean.rs +++ b/src/commands/sdk/clean.rs @@ -21,6 +21,8 @@ pub struct SdkCleanCommand { pub container_args: Option>, /// Additional arguments to pass to DNF commands pub dnf_args: Option>, + /// SDK container architecture for cross-arch emulation + pub sdk_arch: Option, } impl SdkCleanCommand { @@ -38,9 +40,16 @@ impl SdkCleanCommand { target, container_args, dnf_args, + sdk_arch: None, } } + /// Set SDK container architecture for cross-arch emulation + pub fn with_sdk_arch(mut self, sdk_arch: Option) -> Self { + self.sdk_arch = sdk_arch; + self + } + /// Execute the sdk clean command pub async fn execute(&self) -> Result<()> { // Load the configuration @@ -74,7 +83,7 @@ impl SdkCleanCommand { } let remove_command = "rm -rf $AVOCADO_SDK_PREFIX"; - let config = RunConfig { + let run_config = RunConfig { container_image: container_image.to_string(), target: target.clone(), command: remove_command.to_string(), @@ -85,9 +94,10 @@ impl SdkCleanCommand { repo_release, container_args: merged_container_args.clone(), dnf_args: self.dnf_args.clone(), + sdk_arch: self.sdk_arch.clone(), ..Default::default() }; - let success = container_helper.run_in_container(config).await?; + let success = container_helper.run_in_container(run_config).await?; if success { print_success("Successfully removed SDK directory.", OutputLevel::Normal); diff --git a/src/commands/sdk/compile.rs b/src/commands/sdk/compile.rs index 73ba11f..9893bb0 100644 --- a/src/commands/sdk/compile.rs +++ b/src/commands/sdk/compile.rs @@ -33,6 +33,8 @@ pub struct SdkCompileCommand { pub dnf_args: Option>, /// Disable stamp validation pub no_stamps: bool, + /// SDK container architecture for cross-arch emulation + pub sdk_arch: Option, } impl SdkCompileCommand { @@ -53,6 +55,7 @@ impl SdkCompileCommand { container_args, dnf_args, no_stamps: false, + sdk_arch: None, } } @@ -62,6 +65,12 @@ impl SdkCompileCommand { self } + /// Set SDK container architecture for cross-arch emulation + pub fn with_sdk_arch(mut self, sdk_arch: Option) -> Self { + self.sdk_arch = sdk_arch; + self + } + /// Execute the sdk compile command pub async fn execute(&self) -> Result<()> { // Load the configuration @@ -98,6 +107,7 @@ impl SdkCompileCommand { repo_release: config.get_sdk_repo_release(), container_args: config.merge_sdk_container_args(self.container_args.as_ref()), dnf_args: self.dnf_args.clone(), + sdk_arch: self.sdk_arch.clone(), ..Default::default() }; @@ -242,6 +252,7 @@ impl SdkCompileCommand { repo_release: repo_release.clone(), container_args: merged_container_args.clone(), dnf_args: self.dnf_args.clone(), + sdk_arch: self.sdk_arch.clone(), ..Default::default() }; let success = container_helper.run_in_container(config).await?; diff --git a/src/commands/sdk/dnf.rs b/src/commands/sdk/dnf.rs index ed7b25e..49b84a4 100644 --- a/src/commands/sdk/dnf.rs +++ b/src/commands/sdk/dnf.rs @@ -23,6 +23,8 @@ pub struct SdkDnfCommand { pub container_args: Option>, /// Additional arguments to pass to DNF commands pub dnf_args: Option>, + /// SDK container architecture for cross-arch emulation + pub sdk_arch: Option, } impl SdkDnfCommand { @@ -42,9 +44,16 @@ impl SdkDnfCommand { target, container_args, dnf_args, + sdk_arch: None, } } + /// Set SDK container architecture for cross-arch emulation + pub fn with_sdk_arch(mut self, sdk_arch: Option) -> Self { + self.sdk_arch = sdk_arch; + self + } + /// Execute the sdk dnf command pub async fn execute(&self) -> Result<()> { if self.command.is_empty() { @@ -134,6 +143,7 @@ impl SdkDnfCommand { repo_release: repo_release.cloned(), container_args: container_args.cloned(), dnf_args: self.dnf_args.clone(), + sdk_arch: self.sdk_arch.clone(), ..Default::default() }; container_helper.run_in_container(config).await diff --git a/src/commands/sdk/install.rs b/src/commands/sdk/install.rs index d42af3d..2b38bc5 100644 --- a/src/commands/sdk/install.rs +++ b/src/commands/sdk/install.rs @@ -37,6 +37,8 @@ pub struct SdkInstallCommand { pub runs_on: Option, /// NFS port for remote execution pub nfs_port: Option, + /// SDK container architecture for cross-arch emulation + pub sdk_arch: Option, } impl SdkInstallCommand { @@ -59,6 +61,7 @@ impl SdkInstallCommand { no_stamps: false, runs_on: None, nfs_port: None, + sdk_arch: None, } } @@ -75,6 +78,12 @@ impl SdkInstallCommand { self } + /// Set SDK container architecture for cross-arch emulation + pub fn with_sdk_arch(mut self, sdk_arch: Option) -> Self { + self.sdk_arch = sdk_arch; + self + } + /// Execute the sdk install command pub async fn execute(&self) -> Result<()> { // Early target validation - load basic config first @@ -431,8 +440,13 @@ MACROS_EOF ..Default::default() }; - let init_success = - run_container_command(container_helper, run_config, runs_on_context).await?; + let init_success = run_container_command( + container_helper, + run_config, + runs_on_context, + self.sdk_arch.as_ref(), + ) + .await?; if init_success { print_success("Initialized SDK environment.", OutputLevel::Normal); @@ -489,8 +503,13 @@ $DNF_SDK_HOST $DNF_NO_SCRIPTS \ ..Default::default() }; - let sdk_target_success = - run_container_command(container_helper, run_config, runs_on_context).await?; + let sdk_target_success = run_container_command( + container_helper, + run_config, + runs_on_context, + self.sdk_arch.as_ref(), + ) + .await?; // Track all SDK packages installed for lock file update at the end let mut all_sdk_package_names: Vec = Vec::new(); @@ -536,7 +555,13 @@ $DNF_SDK_HOST \ ..Default::default() }; - run_container_command(container_helper, run_config, runs_on_context).await?; + run_container_command( + container_helper, + run_config, + runs_on_context, + self.sdk_arch.as_ref(), + ) + .await?; // Install avocado-sdk-bootstrap with version from distro.version print_info("Installing SDK bootstrap.", OutputLevel::Normal); @@ -586,8 +611,13 @@ $DNF_SDK_HOST $DNF_NO_SCRIPTS \ ..Default::default() }; - let bootstrap_success = - run_container_command(container_helper, run_config, runs_on_context).await?; + let bootstrap_success = run_container_command( + container_helper, + run_config, + runs_on_context, + self.sdk_arch.as_ref(), + ) + .await?; if bootstrap_success { print_success("Installed SDK bootstrap.", OutputLevel::Normal); @@ -638,7 +668,13 @@ fi ..Default::default() }; - run_container_command(container_helper, run_config, runs_on_context).await?; + run_container_command( + container_helper, + run_config, + runs_on_context, + self.sdk_arch.as_ref(), + ) + .await?; // Install SDK dependencies (into SDK) let mut sdk_packages = Vec::new(); @@ -716,8 +752,13 @@ $DNF_SDK_HOST \ // runs_on handled by shared context ..Default::default() }; - let install_success = - run_container_command(container_helper, run_config, runs_on_context).await?; + let install_success = run_container_command( + container_helper, + run_config, + runs_on_context, + self.sdk_arch.as_ref(), + ) + .await?; if install_success { print_success("Installed SDK dependencies.", OutputLevel::Normal); @@ -809,8 +850,13 @@ $DNF_SDK_HOST $DNF_NO_SCRIPTS $DNF_SDK_TARGET_REPO_CONF \ ..Default::default() }; - let rootfs_success = - run_container_command(container_helper, run_config, runs_on_context).await?; + let rootfs_success = run_container_command( + container_helper, + run_config, + runs_on_context, + self.sdk_arch.as_ref(), + ) + .await?; if rootfs_success { print_success("Installed rootfs sysroot.", OutputLevel::Normal); @@ -931,8 +977,13 @@ $DNF_SDK_HOST $DNF_NO_SCRIPTS $DNF_SDK_TARGET_REPO_CONF \ ..Default::default() }; - let install_success = - run_container_command(container_helper, run_config, runs_on_context).await?; + let install_success = run_container_command( + container_helper, + run_config, + runs_on_context, + self.sdk_arch.as_ref(), + ) + .await?; if install_success { print_success( @@ -1016,7 +1067,13 @@ $DNF_SDK_HOST $DNF_NO_SCRIPTS $DNF_SDK_TARGET_REPO_CONF \ ..Default::default() }; - run_container_command(container_helper, run_config, runs_on_context).await?; + run_container_command( + container_helper, + run_config, + runs_on_context, + self.sdk_arch.as_ref(), + ) + .await?; if self.verbose { print_info("Wrote SDK install stamp.", OutputLevel::Normal); @@ -1068,9 +1125,15 @@ $DNF_SDK_HOST $DNF_NO_SCRIPTS $DNF_SDK_TARGET_REPO_CONF \ /// Helper function to run a container command, using shared context if available async fn run_container_command( container_helper: &SdkContainer, - config: RunConfig, + mut config: RunConfig, runs_on_context: Option<&RunsOnContext>, + sdk_arch: Option<&String>, ) -> Result { + // Inject sdk_arch if provided + if let Some(arch) = sdk_arch { + config.sdk_arch = Some(arch.clone()); + } + if let Some(context) = runs_on_context { // Use the shared context - don't set runs_on in config as we're handling it container_helper diff --git a/src/commands/sdk/run.rs b/src/commands/sdk/run.rs index 3c2c760..950672b 100644 --- a/src/commands/sdk/run.rs +++ b/src/commands/sdk/run.rs @@ -47,6 +47,8 @@ pub struct SdkRunCommand { pub runs_on: Option, /// NFS port for remote execution pub nfs_port: Option, + /// SDK container architecture for cross-arch emulation + pub sdk_arch: Option, /// Signing service handle (Unix only) #[cfg(unix)] signing_service: Option, @@ -88,6 +90,7 @@ impl SdkRunCommand { no_bootstrap, runs_on: None, nfs_port: None, + sdk_arch: None, #[cfg(unix)] signing_service: None, } @@ -100,6 +103,12 @@ impl SdkRunCommand { self } + /// Set SDK container architecture for cross-arch emulation + pub fn with_sdk_arch(mut self, sdk_arch: Option) -> Self { + self.sdk_arch = sdk_arch; + self + } + /// Setup signing service for runtime if signing is configured #[cfg(unix)] async fn setup_signing_service( @@ -326,6 +335,7 @@ impl SdkRunCommand { no_bootstrap: self.no_bootstrap, runs_on: self.runs_on.clone(), nfs_port: self.nfs_port, + sdk_arch: self.sdk_arch.clone(), ..Default::default() }; diff --git a/src/main.rs b/src/main.rs index 9e64d79..a2677be 100644 --- a/src/main.rs +++ b/src/main.rs @@ -57,6 +57,10 @@ struct Cli { /// NFS port for remote execution (auto-selects from 12050-12099 if not specified) #[arg(long, global = true)] nfs_port: Option, + + /// SDK container architecture for cross-arch emulation via Docker buildx/QEMU (aarch64 or x86-64) + #[arg(long, value_name = "ARCH", global = true)] + sdk_arch: Option, } #[derive(Subcommand)] @@ -1387,7 +1391,8 @@ async fn main() -> Result<()> { dnf_args, ) .with_no_stamps(cli.no_stamps) - .with_runs_on(cli.runs_on.clone(), cli.nfs_port); + .with_runs_on(cli.runs_on.clone(), cli.nfs_port) + .with_sdk_arch(cli.sdk_arch.clone()); install_cmd.execute().await?; Ok(()) } @@ -1428,7 +1433,8 @@ async fn main() -> Result<()> { dnf_args, no_bootstrap, ) - .with_runs_on(cli.runs_on.clone(), cli.nfs_port); + .with_runs_on(cli.runs_on.clone(), cli.nfs_port) + .with_sdk_arch(cli.sdk_arch.clone()); run_cmd.execute().await?; Ok(()) } @@ -1458,7 +1464,8 @@ async fn main() -> Result<()> { container_args, dnf_args, ) - .with_no_stamps(cli.no_stamps); + .with_no_stamps(cli.no_stamps) + .with_sdk_arch(cli.sdk_arch.clone()); compile_cmd.execute().await?; Ok(()) } @@ -1477,7 +1484,8 @@ async fn main() -> Result<()> { target.or(cli.target), container_args, dnf_args, - ); + ) + .with_sdk_arch(cli.sdk_arch.clone()); dnf_cmd.execute().await?; Ok(()) } @@ -1494,7 +1502,8 @@ async fn main() -> Result<()> { target.or(cli.target), container_args, dnf_args, - ); + ) + .with_sdk_arch(cli.sdk_arch.clone()); clean_cmd.execute().await?; Ok(()) } diff --git a/src/utils/container.rs b/src/utils/container.rs index c37f764..d6266d9 100644 --- a/src/utils/container.rs +++ b/src/utils/container.rs @@ -55,6 +55,24 @@ pub fn is_docker_desktop() -> bool { cfg!(target_os = "macos") || cfg!(target_os = "windows") } +/// Convert SDK arch specification to Docker platform format. +/// +/// # Arguments +/// * `sdk_arch` - Architecture string (e.g., "aarch64", "x86-64", "arm64", "amd64") +/// +/// # Returns +/// Docker platform string (e.g., "linux/arm64", "linux/amd64") +pub fn sdk_arch_to_platform(sdk_arch: &str) -> Result { + match sdk_arch.to_lowercase().as_str() { + "aarch64" | "arm64" => Ok("linux/arm64".to_string()), + "x86-64" | "x86_64" | "amd64" => Ok("linux/amd64".to_string()), + _ => Err(anyhow::anyhow!( + "Unsupported SDK architecture: '{}'. Supported values: aarch64, x86-64", + sdk_arch + )), + } +} + /// Add security options to container command based on host security module. /// - SELinux (Fedora/RHEL): adds --security-opt label=disable /// - AppArmor (Ubuntu/Debian): adds --security-opt apparmor=unconfined @@ -102,6 +120,8 @@ pub struct RunConfig { pub runs_on: Option, /// NFS port for remote execution (auto-selected if None) pub nfs_port: Option, + /// SDK container architecture for cross-arch emulation (e.g., "aarch64", "x86-64") + pub sdk_arch: Option, } impl Default for RunConfig { @@ -132,6 +152,7 @@ impl Default for RunConfig { signing_checksum_algorithm: None, runs_on: None, nfs_port: None, + sdk_arch: None, } } } @@ -494,6 +515,13 @@ impl SdkContainer { "label=disable".to_string(), ]; + // Add platform flag for cross-architecture emulation via Docker buildx/QEMU + if let Some(ref sdk_arch) = config.sdk_arch { + let platform = sdk_arch_to_platform(sdk_arch)?; + extra_args.push("--platform".to_string()); + extra_args.push(platform); + } + if let Some(ref args) = config.container_args { extra_args.extend(args.clone()); } @@ -536,6 +564,13 @@ impl SdkContainer { ) -> Result> { let mut container_cmd = vec![self.container_tool.clone(), "run".to_string()]; + // Add platform flag for cross-architecture emulation via Docker buildx/QEMU + if let Some(ref sdk_arch) = config.sdk_arch { + let platform = sdk_arch_to_platform(sdk_arch)?; + container_cmd.push("--platform".to_string()); + container_cmd.push(platform); + } + // Container options if config.rm { container_cmd.push("--rm".to_string()); @@ -966,6 +1001,13 @@ impl SdkContainer { "label=disable".to_string(), ]; + // Add platform flag for cross-architecture emulation via Docker buildx/QEMU + if let Some(ref sdk_arch) = config.sdk_arch { + let platform = sdk_arch_to_platform(sdk_arch)?; + extra_args.push("--platform".to_string()); + extra_args.push(platform); + } + if let Some(ref args) = config.container_args { extra_args.extend(args.clone()); } @@ -1840,6 +1882,7 @@ mod tests { signing_checksum_algorithm: None, runs_on: None, nfs_port: None, + sdk_arch: None, }; let result = container.build_container_command(&config, &command, &env_vars, &volume_state); @@ -1977,4 +2020,50 @@ mod tests { vec!["-e", "VAR=value with spaces", "--name", "test"] ); } + + #[test] + fn test_sdk_arch_to_platform_aarch64() { + let result = sdk_arch_to_platform("aarch64").unwrap(); + assert_eq!(result, "linux/arm64"); + } + + #[test] + fn test_sdk_arch_to_platform_arm64() { + let result = sdk_arch_to_platform("arm64").unwrap(); + assert_eq!(result, "linux/arm64"); + } + + #[test] + fn test_sdk_arch_to_platform_x86_64() { + let result = sdk_arch_to_platform("x86-64").unwrap(); + assert_eq!(result, "linux/amd64"); + } + + #[test] + fn test_sdk_arch_to_platform_x86_64_underscore() { + let result = sdk_arch_to_platform("x86_64").unwrap(); + assert_eq!(result, "linux/amd64"); + } + + #[test] + fn test_sdk_arch_to_platform_amd64() { + let result = sdk_arch_to_platform("amd64").unwrap(); + assert_eq!(result, "linux/amd64"); + } + + #[test] + fn test_sdk_arch_to_platform_case_insensitive() { + let result = sdk_arch_to_platform("AARCH64").unwrap(); + assert_eq!(result, "linux/arm64"); + } + + #[test] + fn test_sdk_arch_to_platform_unsupported() { + let result = sdk_arch_to_platform("riscv64"); + assert!(result.is_err()); + assert!(result + .unwrap_err() + .to_string() + .contains("Unsupported SDK architecture")); + } } From 38110befe751381370484ac5298a4cf15f041512 Mon Sep 17 00:00:00 2001 From: Justin Schneck Date: Mon, 5 Jan 2026 17:20:20 -0500 Subject: [PATCH 03/23] fixup ext package src_dir --- src/commands/ext/package.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/commands/ext/package.rs b/src/commands/ext/package.rs index ae5c9fb..3b02ebf 100644 --- a/src/commands/ext/package.rs +++ b/src/commands/ext/package.rs @@ -395,7 +395,7 @@ mkdir -p BUILD RPMS SOURCES SPECS SRPMS # Create spec file # Package root (/) maps to the extension's src_dir -cat > SPECS/package.spec << 'SPEC_EOF' +cat > SPECS/package.spec << SPEC_EOF %define _buildhost reproducible AutoReqProv: no @@ -422,7 +422,7 @@ Group: {group}{url_line} %install mkdir -p %{{buildroot}} # Copy src_dir contents to buildroot root -# This allows installation to $AVOCADO_PREFIX/includes// +# This allows installation to \$AVOCADO_PREFIX/includes// cp -rp "$EXT_SRC_DIR"/* %{{buildroot}}/ %clean From be891d662fe0ee56a4f55a0df6b5cd4979945677 Mon Sep 17 00:00:00 2001 From: Justin Schneck Date: Mon, 5 Jan 2026 18:00:30 -0500 Subject: [PATCH 04/23] support --sdk-arch across all container commands --- src/commands/build.rs | 9 +++ src/commands/ext/build.rs | 13 +++++ src/commands/ext/checkout.rs | 9 +++ src/commands/ext/clean.rs | 13 ++++- src/commands/ext/dnf.rs | 11 ++++ src/commands/ext/fetch.rs | 19 +++++-- src/commands/ext/image.rs | 10 ++++ src/commands/ext/install.rs | 10 ++++ src/commands/ext/package.rs | 10 ++++ src/commands/fetch.rs | 19 +++++++ src/commands/hitl/server.rs | 12 ++++ src/commands/install.rs | 17 ++++++ src/commands/provision.rs | 7 +++ src/commands/runtime/build.rs | 12 ++++ src/commands/runtime/clean.rs | 9 +++ src/commands/runtime/deploy.rs | 10 ++++ src/commands/runtime/dnf.rs | 11 ++++ src/commands/runtime/install.rs | 12 ++++ src/commands/runtime/provision.rs | 10 ++++ src/commands/runtime/sign.rs | 10 ++++ src/commands/sdk/install.rs | 5 +- src/main.rs | 57 +++++++++++++------ src/utils/config.rs | 57 ++++++++++++++----- src/utils/container.rs | 92 +++++++++++++++++++++++++------ src/utils/ext_fetch.rs | 52 ++++++++++++----- src/utils/interpolation/mod.rs | 23 ++++++++ 26 files changed, 447 insertions(+), 72 deletions(-) diff --git a/src/commands/build.rs b/src/commands/build.rs index af8a57e..12a4151 100644 --- a/src/commands/build.rs +++ b/src/commands/build.rs @@ -45,6 +45,8 @@ pub struct BuildCommand { pub runs_on: Option, /// NFS port for remote execution pub nfs_port: Option, + /// SDK container architecture for cross-arch emulation + pub sdk_arch: Option, } impl BuildCommand { @@ -69,6 +71,7 @@ impl BuildCommand { no_stamps: false, runs_on: None, nfs_port: None, + sdk_arch: None, } } @@ -85,6 +88,12 @@ impl BuildCommand { self } + /// Set SDK container architecture for cross-arch emulation + pub fn with_sdk_arch(mut self, sdk_arch: Option) -> Self { + self.sdk_arch = sdk_arch; + self + } + /// Execute the build command pub async fn execute(&self) -> Result<()> { // Early target validation - load basic config first diff --git a/src/commands/ext/build.rs b/src/commands/ext/build.rs index 3577d5f..ac9a4b6 100644 --- a/src/commands/ext/build.rs +++ b/src/commands/ext/build.rs @@ -37,6 +37,7 @@ pub struct ExtBuildCommand { pub no_stamps: bool, pub runs_on: Option, pub nfs_port: Option, + pub sdk_arch: Option, } impl ExtBuildCommand { @@ -58,6 +59,7 @@ impl ExtBuildCommand { no_stamps: false, runs_on: None, nfs_port: None, + sdk_arch: None, } } @@ -74,6 +76,12 @@ impl ExtBuildCommand { self } + /// Set SDK container architecture for cross-arch emulation + pub fn with_sdk_arch(mut self, sdk_arch: Option) -> Self { + self.sdk_arch = sdk_arch; + self + } + pub async fn execute(&self) -> Result<()> { // Load configuration and parse raw TOML let config = Config::load(&self.config_path)?; @@ -119,6 +127,7 @@ impl ExtBuildCommand { repo_release: repo_release.clone(), container_args: processed_container_args.clone(), dnf_args: self.dnf_args.clone(), + sdk_arch: self.sdk_arch.clone(), ..Default::default() }; @@ -460,6 +469,7 @@ impl ExtBuildCommand { repo_release: repo_release.clone(), container_args: processed_container_args.clone(), dnf_args: self.dnf_args.clone(), + sdk_arch: self.sdk_arch.clone(), ..Default::default() }; @@ -529,6 +539,7 @@ impl ExtBuildCommand { repo_release: repo_release.cloned(), container_args: processed_container_args.clone(), dnf_args: self.dnf_args.clone(), + sdk_arch: self.sdk_arch.clone(), ..Default::default() }; let result = container_helper.run_in_container(config).await?; @@ -594,6 +605,7 @@ impl ExtBuildCommand { repo_release: repo_release.cloned(), container_args: processed_container_args.clone(), dnf_args: self.dnf_args.clone(), + sdk_arch: self.sdk_arch.clone(), ..Default::default() }; let result = container_helper.run_in_container(config).await?; @@ -1579,6 +1591,7 @@ echo "Set proper permissions on authentication files""#, repo_release: repo_release.clone(), container_args: merged_container_args.clone(), dnf_args: self.dnf_args.clone(), + sdk_arch: self.sdk_arch.clone(), ..Default::default() }; diff --git a/src/commands/ext/checkout.rs b/src/commands/ext/checkout.rs index f5e7bf6..8883d8a 100644 --- a/src/commands/ext/checkout.rs +++ b/src/commands/ext/checkout.rs @@ -20,6 +20,7 @@ pub struct ExtCheckoutCommand { container_tool: String, target: Option, no_stamps: bool, + sdk_arch: Option, } impl ExtCheckoutCommand { @@ -41,6 +42,7 @@ impl ExtCheckoutCommand { container_tool, target, no_stamps: false, + sdk_arch: None, } } @@ -50,6 +52,12 @@ impl ExtCheckoutCommand { self } + /// Set SDK container architecture for cross-arch emulation + pub fn with_sdk_arch(mut self, sdk_arch: Option) -> Self { + self.sdk_arch = sdk_arch; + self + } + pub async fn execute(&self) -> Result<()> { let cwd = std::env::current_dir().context("Failed to get current directory")?; @@ -79,6 +87,7 @@ impl ExtCheckoutCommand { repo_url: config.get_sdk_repo_url(), repo_release: config.get_sdk_repo_release(), container_args: config.merge_sdk_container_args(None), + sdk_arch: self.sdk_arch.clone(), ..Default::default() }; diff --git a/src/commands/ext/clean.rs b/src/commands/ext/clean.rs index 0291310..21c3886 100644 --- a/src/commands/ext/clean.rs +++ b/src/commands/ext/clean.rs @@ -15,6 +15,7 @@ pub struct ExtCleanCommand { target: Option, container_args: Option>, dnf_args: Option>, + sdk_arch: Option, } impl ExtCleanCommand { @@ -33,9 +34,16 @@ impl ExtCleanCommand { target, container_args, dnf_args, + sdk_arch: None, } } + /// Set SDK container architecture for cross-arch emulation + pub fn with_sdk_arch(mut self, sdk_arch: Option) -> Self { + self.sdk_arch = sdk_arch; + self + } + pub async fn execute(&self) -> Result<()> { let config = Config::load(&self.config_path)?; let content = std::fs::read_to_string(&self.config_path)?; @@ -136,7 +144,7 @@ rm -rf "$AVOCADO_PREFIX/.stamps/ext/{ext}" ); } - let config = RunConfig { + let run_config = RunConfig { container_image: container_image.to_string(), target: target.to_string(), command: clean_command, @@ -147,9 +155,10 @@ rm -rf "$AVOCADO_PREFIX/.stamps/ext/{ext}" self.container_args.as_ref(), ), dnf_args: self.dnf_args.clone(), + sdk_arch: self.sdk_arch.clone(), ..Default::default() }; - let success = container_helper.run_in_container(config).await?; + let success = container_helper.run_in_container(run_config).await?; if success { print_success( diff --git a/src/commands/ext/dnf.rs b/src/commands/ext/dnf.rs index 50db198..9c2238f 100644 --- a/src/commands/ext/dnf.rs +++ b/src/commands/ext/dnf.rs @@ -16,6 +16,7 @@ pub struct ExtDnfCommand { target: Option, container_args: Option>, dnf_args: Option>, + sdk_arch: Option, } impl ExtDnfCommand { @@ -36,9 +37,16 @@ impl ExtDnfCommand { target, container_args, dnf_args, + sdk_arch: None, } } + /// Set SDK container architecture for cross-arch emulation + pub fn with_sdk_arch(mut self, sdk_arch: Option) -> Self { + self.sdk_arch = sdk_arch; + self + } + pub async fn execute(&self) -> Result<()> { let config = Config::load(&self.config_path)?; let merged_container_args = config.merge_sdk_container_args(self.container_args.as_ref()); @@ -197,6 +205,7 @@ impl ExtDnfCommand { repo_release: repo_release.cloned(), container_args: merged_container_args.clone(), dnf_args: self.dnf_args.clone(), + sdk_arch: self.sdk_arch.clone(), ..Default::default() }; let dir_exists = container_helper.run_in_container(config).await?; @@ -249,6 +258,7 @@ impl ExtDnfCommand { repo_release: repo_release.cloned(), container_args: merged_container_args.clone(), dnf_args: self.dnf_args.clone(), + sdk_arch: self.sdk_arch.clone(), ..Default::default() }; let setup_success = container_helper.run_in_container(config).await?; @@ -300,6 +310,7 @@ impl ExtDnfCommand { repo_release: repo_release.cloned(), container_args: merged_container_args.clone(), dnf_args: self.dnf_args.clone(), + sdk_arch: self.sdk_arch.clone(), ..Default::default() }; let success = container_helper.run_in_container(config).await?; diff --git a/src/commands/ext/fetch.rs b/src/commands/ext/fetch.rs index f9a2b3e..6ec02be 100644 --- a/src/commands/ext/fetch.rs +++ b/src/commands/ext/fetch.rs @@ -24,6 +24,8 @@ pub struct ExtFetchCommand { pub target: Option, /// Additional arguments to pass to the container runtime pub container_args: Option>, + /// SDK container architecture for cross-arch emulation + pub sdk_arch: Option, } impl ExtFetchCommand { @@ -43,9 +45,16 @@ impl ExtFetchCommand { force, target, container_args, + sdk_arch: None, } } + /// Set SDK container architecture for cross-arch emulation + pub fn with_sdk_arch(mut self, sdk_arch: Option) -> Self { + self.sdk_arch = sdk_arch; + self + } + /// Execute the fetch command pub async fn execute(&self) -> Result<()> { // Load configuration @@ -60,8 +69,9 @@ impl ExtFetchCommand { .get_sdk_image() .ok_or_else(|| anyhow::anyhow!("No SDK container image specified in configuration"))?; - // Discover remote extensions - let remote_extensions = Config::discover_remote_extensions(&self.config_path)?; + // Discover remote extensions (with target interpolation for extension names) + let remote_extensions = + Config::discover_remote_extensions(&self.config_path, Some(&target))?; if remote_extensions.is_empty() { print_info( @@ -123,7 +133,8 @@ impl ExtFetchCommand { ) .with_repo_url(config.get_sdk_repo_url()) .with_repo_release(config.get_sdk_repo_release()) - .with_container_args(config.merge_sdk_container_args(self.container_args.as_ref())); + .with_container_args(config.merge_sdk_container_args(self.container_args.as_ref())) + .with_sdk_arch(self.sdk_arch.clone()); // Fetch each extension let mut fetched_count = 0; @@ -189,7 +200,7 @@ impl ExtFetchCommand { /// Get the list of remote extensions that would be fetched #[allow(dead_code)] pub fn get_remote_extensions(&self) -> Result> { - Config::discover_remote_extensions(&self.config_path) + Config::discover_remote_extensions(&self.config_path, self.target.as_deref()) } } diff --git a/src/commands/ext/image.rs b/src/commands/ext/image.rs index a2a886d..0b3ebb5 100644 --- a/src/commands/ext/image.rs +++ b/src/commands/ext/image.rs @@ -23,6 +23,7 @@ pub struct ExtImageCommand { no_stamps: bool, runs_on: Option, nfs_port: Option, + sdk_arch: Option, } impl ExtImageCommand { @@ -44,6 +45,7 @@ impl ExtImageCommand { no_stamps: false, runs_on: None, nfs_port: None, + sdk_arch: None, } } @@ -60,6 +62,12 @@ impl ExtImageCommand { self } + /// Set SDK container architecture for cross-arch emulation + pub fn with_sdk_arch(mut self, sdk_arch: Option) -> Self { + self.sdk_arch = sdk_arch; + self + } + pub async fn execute(&self) -> Result<()> { // Load configuration and parse raw TOML let config = Config::load(&self.config_path)?; @@ -107,6 +115,7 @@ impl ExtImageCommand { dnf_args: self.dnf_args.clone(), runs_on: self.runs_on.clone(), nfs_port: self.nfs_port, + sdk_arch: self.sdk_arch.clone(), ..Default::default() }; @@ -278,6 +287,7 @@ impl ExtImageCommand { repo_release: repo_release.clone(), container_args: merged_container_args.clone(), dnf_args: self.dnf_args.clone(), + sdk_arch: self.sdk_arch.clone(), ..Default::default() }; diff --git a/src/commands/ext/install.rs b/src/commands/ext/install.rs index ddc5691..f6385c9 100644 --- a/src/commands/ext/install.rs +++ b/src/commands/ext/install.rs @@ -25,6 +25,7 @@ pub struct ExtInstallCommand { no_stamps: bool, runs_on: Option, nfs_port: Option, + sdk_arch: Option, } impl ExtInstallCommand { @@ -48,6 +49,7 @@ impl ExtInstallCommand { no_stamps: false, runs_on: None, nfs_port: None, + sdk_arch: None, } } @@ -64,6 +66,12 @@ impl ExtInstallCommand { self } + /// Set SDK container architecture for cross-arch emulation + pub fn with_sdk_arch(mut self, sdk_arch: Option) -> Self { + self.sdk_arch = sdk_arch; + self + } + pub async fn execute(&self) -> Result<()> { // Load the composed configuration (merges external configs, applies interpolation) let composed = Config::load_composed(&self.config_path, self.target.as_deref()) @@ -346,6 +354,7 @@ impl ExtInstallCommand { container_args: merged_container_args.clone(), dnf_args: self.dnf_args.clone(), // runs_on handled by shared context + sdk_arch: self.sdk_arch.clone(), ..Default::default() }; @@ -631,6 +640,7 @@ $DNF_SDK_HOST \ dnf_args: self.dnf_args.clone(), disable_weak_dependencies, // runs_on handled by shared context + sdk_arch: self.sdk_arch.clone(), ..Default::default() }; let install_success = diff --git a/src/commands/ext/package.rs b/src/commands/ext/package.rs index 3b02ebf..523de93 100644 --- a/src/commands/ext/package.rs +++ b/src/commands/ext/package.rs @@ -28,6 +28,7 @@ pub struct ExtPackageCommand { /// since we now package src_dir directly without requiring build stamps. #[allow(dead_code)] pub no_stamps: bool, + pub sdk_arch: Option, } impl ExtPackageCommand { @@ -49,6 +50,7 @@ impl ExtPackageCommand { container_args, dnf_args, no_stamps: false, + sdk_arch: None, } } @@ -58,6 +60,12 @@ impl ExtPackageCommand { self } + /// Set SDK container architecture for cross-arch emulation + pub fn with_sdk_arch(mut self, sdk_arch: Option) -> Self { + self.sdk_arch = sdk_arch; + self + } + pub async fn execute(&self) -> Result<()> { // Load configuration let config = Config::load(&self.config_path)?; @@ -478,6 +486,7 @@ rm -rf "$TMPDIR" repo_release: config.get_sdk_repo_release(), container_args: merged_container_args, dnf_args: self.dnf_args.clone(), + sdk_arch: self.sdk_arch.clone(), ..Default::default() }; @@ -800,6 +809,7 @@ rm -rf "$TMPDIR" repo_release: config.get_sdk_repo_release(), container_args: merged_container_args, dnf_args: self.dnf_args.clone(), + sdk_arch: self.sdk_arch.clone(), ..Default::default() }; diff --git a/src/commands/fetch.rs b/src/commands/fetch.rs index 170972c..a3682a3 100644 --- a/src/commands/fetch.rs +++ b/src/commands/fetch.rs @@ -32,6 +32,7 @@ pub struct FetchCommand { target: Option, container_args: Option>, dnf_args: Option>, + sdk_arch: Option, } impl FetchCommand { @@ -52,9 +53,16 @@ impl FetchCommand { target, container_args, dnf_args, + sdk_arch: None, } } + /// Set SDK container architecture for cross-arch emulation + pub fn with_sdk_arch(mut self, sdk_arch: Option) -> Self { + self.sdk_arch = sdk_arch; + self + } + pub async fn execute(&self) -> Result<()> { // Load configuration let config = Config::load(&self.config_path)?; @@ -177,6 +185,7 @@ impl FetchCommand { repo_release: container_config.repo_release.cloned(), container_args: container_config.container_args.clone(), dnf_args: self.dnf_args.clone(), + sdk_arch: self.sdk_arch.clone(), ..Default::default() }; let sysroot_exists = container_config.helper.run_in_container(run_config).await?; @@ -227,6 +236,7 @@ $DNF_SDK_HOST \ repo_release: container_config.repo_release.cloned(), container_args: container_config.container_args.clone(), dnf_args: self.dnf_args.clone(), + sdk_arch: self.sdk_arch.clone(), ..Default::default() }; let success = container_config.helper.run_in_container(run_config).await?; @@ -280,6 +290,7 @@ $DNF_SDK_HOST \ repo_release: container_config.repo_release.cloned(), container_args: container_config.container_args.clone(), dnf_args: self.dnf_args.clone(), + sdk_arch: self.sdk_arch.clone(), ..Default::default() }; let sysroot_exists = container_config.helper.run_in_container(run_config).await?; @@ -329,6 +340,7 @@ $DNF_SDK_HOST \ repo_release: container_config.repo_release.cloned(), container_args: container_config.container_args.clone(), dnf_args: self.dnf_args.clone(), + sdk_arch: self.sdk_arch.clone(), ..Default::default() }; let success = container_config.helper.run_in_container(run_config).await?; @@ -464,6 +476,7 @@ $DNF_SDK_HOST $DNF_SDK_HOST_OPTS $DNF_SDK_HOST_REPO_CONF \ repo_release: container_config.repo_release.cloned(), container_args: container_config.container_args.clone(), dnf_args: self.dnf_args.clone(), + sdk_arch: self.sdk_arch.clone(), ..Default::default() }; let success = container_config.helper.run_in_container(run_config).await?; @@ -495,6 +508,7 @@ $DNF_SDK_HOST $DNF_SDK_HOST_OPTS $DNF_SDK_HOST_REPO_CONF \ repo_release: container_config.repo_release.cloned(), container_args: container_config.container_args.clone(), dnf_args: self.dnf_args.clone(), + sdk_arch: self.sdk_arch.clone(), ..Default::default() }; let rootfs_exists = container_config.helper.run_in_container(run_config).await?; @@ -543,6 +557,7 @@ $DNF_SDK_HOST \ repo_release: container_config.repo_release.cloned(), container_args: container_config.container_args.clone(), dnf_args: self.dnf_args.clone(), + sdk_arch: self.sdk_arch.clone(), ..Default::default() }; let success = container_config.helper.run_in_container(run_config).await?; @@ -574,6 +589,7 @@ $DNF_SDK_HOST \ repo_release: container_config.repo_release.cloned(), container_args: container_config.container_args.clone(), dnf_args: self.dnf_args.clone(), + sdk_arch: self.sdk_arch.clone(), ..Default::default() }; let target_sysroot_exists = container_config.helper.run_in_container(run_config).await?; @@ -622,6 +638,7 @@ $DNF_SDK_HOST \ repo_release: container_config.repo_release.cloned(), container_args: container_config.container_args.clone(), dnf_args: self.dnf_args.clone(), + sdk_arch: self.sdk_arch.clone(), ..Default::default() }; let success = container_config.helper.run_in_container(run_config).await?; @@ -859,6 +876,7 @@ $DNF_SDK_HOST \ repo_release: container_config.repo_release.cloned(), container_args: container_config.container_args.clone(), dnf_args: self.dnf_args.clone(), + sdk_arch: self.sdk_arch.clone(), ..Default::default() }; let sysroot_exists = container_config.helper.run_in_container(run_config).await?; @@ -913,6 +931,7 @@ $DNF_SDK_HOST \ repo_release: container_config.repo_release.cloned(), container_args: container_config.container_args.clone(), dnf_args: self.dnf_args.clone(), + sdk_arch: self.sdk_arch.clone(), ..Default::default() }; let success = container_config.helper.run_in_container(run_config).await?; diff --git a/src/commands/hitl/server.rs b/src/commands/hitl/server.rs index ddae204..657e874 100644 --- a/src/commands/hitl/server.rs +++ b/src/commands/hitl/server.rs @@ -42,6 +42,10 @@ pub struct HitlServerCommand { /// Disable stamp validation #[arg(long)] pub no_stamps: bool, + + /// SDK container architecture for cross-arch emulation + #[arg(skip)] + pub sdk_arch: Option, } impl HitlServerCommand { @@ -96,6 +100,7 @@ impl HitlServerCommand { interactive: false, repo_url: repo_url.cloned(), repo_release: repo_release.cloned(), + sdk_arch: self.sdk_arch.clone(), ..Default::default() }; @@ -198,6 +203,7 @@ impl HitlServerCommand { repo_release: repo_release.cloned(), container_args: Some(container_args), dnf_args: self.dnf_args.clone(), + sdk_arch: self.sdk_arch.clone(), ..Default::default() }; @@ -307,6 +313,7 @@ mod tests { verbose: false, port: None, no_stamps: false, + sdk_arch: None, }; let commands = cmd.generate_export_setup_commands(); @@ -329,6 +336,7 @@ mod tests { verbose: false, port: Some(2049), no_stamps: false, + sdk_arch: None, }; let commands = cmd.generate_export_setup_commands(); @@ -349,6 +357,7 @@ mod tests { verbose: true, port: Some(3049), no_stamps: false, + sdk_arch: None, }; let commands = cmd.generate_export_setup_commands(); @@ -370,6 +379,7 @@ mod tests { verbose: false, port: Some(4049), no_stamps: false, + sdk_arch: None, }; let commands = cmd.generate_export_setup_commands(); @@ -418,6 +428,7 @@ mod tests { verbose: false, port: None, no_stamps: true, + sdk_arch: None, }; // With no_stamps, validation should be skipped @@ -436,6 +447,7 @@ mod tests { verbose: false, port: None, no_stamps: false, + sdk_arch: None, }; // With no extensions, the stamp validation loop is skipped entirely diff --git a/src/commands/install.rs b/src/commands/install.rs index e68074e..8ce3f9f 100644 --- a/src/commands/install.rs +++ b/src/commands/install.rs @@ -55,6 +55,8 @@ pub struct InstallCommand { pub runs_on: Option, /// NFS port for remote execution pub nfs_port: Option, + /// SDK container architecture for cross-arch emulation + pub sdk_arch: Option, } impl InstallCommand { @@ -79,6 +81,7 @@ impl InstallCommand { no_stamps: false, runs_on: None, nfs_port: None, + sdk_arch: None, } } @@ -95,6 +98,12 @@ impl InstallCommand { self } + /// Set SDK container architecture for cross-arch emulation + pub fn with_sdk_arch(mut self, sdk_arch: Option) -> Self { + self.sdk_arch = sdk_arch; + self + } + /// Execute the install command pub async fn execute(&self) -> Result<()> { // Early target validation - load basic config first to validate target @@ -611,6 +620,7 @@ impl InstallCommand { container_args: merged_container_args.clone(), dnf_args: self.dnf_args.clone(), disable_weak_dependencies: config.get_sdk_disable_weak_dependencies(), + sdk_arch: self.sdk_arch.clone(), ..Default::default() }; let sysroot_exists = container_helper.run_in_container(run_config).await?; @@ -632,6 +642,7 @@ impl InstallCommand { container_args: merged_container_args.clone(), dnf_args: self.dnf_args.clone(), disable_weak_dependencies: config.get_sdk_disable_weak_dependencies(), + sdk_arch: self.sdk_arch.clone(), ..Default::default() }; let success = container_helper.run_in_container(run_config).await?; @@ -787,6 +798,7 @@ $DNF_SDK_HOST \ container_args: merged_container_args.clone(), dnf_args: self.dnf_args.clone(), disable_weak_dependencies: config.get_sdk_disable_weak_dependencies(), + sdk_arch: self.sdk_arch.clone(), ..Default::default() }; @@ -890,6 +902,7 @@ $DNF_SDK_HOST \ repo_release: stamp_repo_release, container_args: stamp_container_args, dnf_args: self.dnf_args.clone(), + sdk_arch: self.sdk_arch.clone(), ..Default::default() }; @@ -942,6 +955,7 @@ $DNF_SDK_HOST \ container_args: merged_container_args.clone(), dnf_args: self.dnf_args.clone(), disable_weak_dependencies: config.get_sdk_disable_weak_dependencies(), + sdk_arch: self.sdk_arch.clone(), ..Default::default() }; let sysroot_exists = container_helper.run_in_container(run_config).await?; @@ -962,6 +976,7 @@ $DNF_SDK_HOST \ container_args: merged_container_args.clone(), dnf_args: self.dnf_args.clone(), disable_weak_dependencies: config.get_sdk_disable_weak_dependencies(), + sdk_arch: self.sdk_arch.clone(), ..Default::default() }; let success = container_helper.run_in_container(run_config).await?; @@ -1033,6 +1048,7 @@ $DNF_SDK_HOST \ container_args: merged_container_args.clone(), dnf_args: self.dnf_args.clone(), disable_weak_dependencies: config.get_sdk_disable_weak_dependencies(), + sdk_arch: self.sdk_arch.clone(), ..Default::default() }; @@ -1255,6 +1271,7 @@ $DNF_SDK_HOST \ container_args: merged_container_args.clone(), dnf_args: self.dnf_args.clone(), disable_weak_dependencies: config.get_sdk_disable_weak_dependencies(), + sdk_arch: self.sdk_arch.clone(), ..Default::default() }; diff --git a/src/commands/provision.rs b/src/commands/provision.rs index d61ddcc..d986cc5 100644 --- a/src/commands/provision.rs +++ b/src/commands/provision.rs @@ -33,6 +33,8 @@ pub struct ProvisionConfig { pub runs_on: Option, /// NFS port for remote execution pub nfs_port: Option, + /// SDK container architecture for cross-arch emulation + pub sdk_arch: Option, } /// Implementation of the 'provision' command that calls through to runtime provision. @@ -76,6 +78,7 @@ impl ProvisionCommand { no_stamps: self.config.no_stamps, runs_on: self.config.runs_on.clone(), nfs_port: self.config.nfs_port, + sdk_arch: self.config.sdk_arch.clone(), }, ); @@ -106,6 +109,7 @@ mod tests { no_stamps: false, runs_on: None, nfs_port: None, + sdk_arch: None, }; let cmd = ProvisionCommand::new(config); @@ -139,6 +143,7 @@ mod tests { no_stamps: false, runs_on: None, nfs_port: None, + sdk_arch: None, }; let cmd = ProvisionCommand::new(config); @@ -175,6 +180,7 @@ mod tests { no_stamps: false, runs_on: None, nfs_port: None, + sdk_arch: None, }; let cmd = ProvisionCommand::new(config); @@ -198,6 +204,7 @@ mod tests { no_stamps: false, runs_on: None, nfs_port: None, + sdk_arch: None, }; let cmd = ProvisionCommand::new(config); diff --git a/src/commands/runtime/build.rs b/src/commands/runtime/build.rs index bc7b400..9494c37 100644 --- a/src/commands/runtime/build.rs +++ b/src/commands/runtime/build.rs @@ -22,6 +22,7 @@ pub struct RuntimeBuildCommand { no_stamps: bool, runs_on: Option, nfs_port: Option, + sdk_arch: Option, } impl RuntimeBuildCommand { @@ -43,6 +44,7 @@ impl RuntimeBuildCommand { no_stamps: false, runs_on: None, nfs_port: None, + sdk_arch: None, } } @@ -59,6 +61,12 @@ impl RuntimeBuildCommand { self } + /// Set SDK container architecture for cross-arch emulation + pub fn with_sdk_arch(mut self, sdk_arch: Option) -> Self { + self.sdk_arch = sdk_arch; + self + } + pub async fn execute(&self) -> Result<()> { // Load configuration and parse raw TOML let config = load_config(&self.config_path)?; @@ -182,6 +190,7 @@ impl RuntimeBuildCommand { container_args: merged_container_args.clone(), dnf_args: self.dnf_args.clone(), // runs_on handled by shared context + sdk_arch: self.sdk_arch.clone(), ..Default::default() }; @@ -295,6 +304,7 @@ impl RuntimeBuildCommand { dnf_args: self.dnf_args.clone(), env_vars, // runs_on handled by shared context + sdk_arch: self.sdk_arch.clone(), ..Default::default() }; let complete_result = run_container_command(container_helper, run_config, runs_on_context) @@ -332,6 +342,7 @@ impl RuntimeBuildCommand { container_args: merged_container_args.clone(), dnf_args: self.dnf_args.clone(), // runs_on handled by shared context + sdk_arch: self.sdk_arch.clone(), ..Default::default() }; @@ -930,6 +941,7 @@ rpm --root="$AVOCADO_EXT_SYSROOTS/{ext_name}" --dbpath=/var/lib/extension.d/rpm interactive: false, // runs_on handled by shared context container_args, + sdk_arch: self.sdk_arch.clone(), ..Default::default() }; diff --git a/src/commands/runtime/clean.rs b/src/commands/runtime/clean.rs index fd4f7d5..d6b5539 100644 --- a/src/commands/runtime/clean.rs +++ b/src/commands/runtime/clean.rs @@ -12,6 +12,7 @@ pub struct RuntimeCleanCommand { target: Option, container_args: Option>, dnf_args: Option>, + sdk_arch: Option, } impl RuntimeCleanCommand { @@ -30,9 +31,16 @@ impl RuntimeCleanCommand { target, container_args, dnf_args, + sdk_arch: None, } } + /// Set SDK container architecture for cross-arch emulation + pub fn with_sdk_arch(mut self, sdk_arch: Option) -> Self { + self.sdk_arch = sdk_arch; + self + } + pub async fn execute(&self) -> Result<()> { let config = load_config(&self.config_path)?; let content = std::fs::read_to_string(&self.config_path)?; @@ -122,6 +130,7 @@ rm -rf "$AVOCADO_PREFIX/.stamps/runtime/{runtime}" self.container_args.as_ref(), ), dnf_args: self.dnf_args.clone(), + sdk_arch: self.sdk_arch.clone(), ..Default::default() }; let success = container_helper.run_in_container(config).await?; diff --git a/src/commands/runtime/deploy.rs b/src/commands/runtime/deploy.rs index 6c0688e..be5a271 100644 --- a/src/commands/runtime/deploy.rs +++ b/src/commands/runtime/deploy.rs @@ -17,6 +17,7 @@ pub struct RuntimeDeployCommand { container_args: Option>, dnf_args: Option>, no_stamps: bool, + sdk_arch: Option, } impl RuntimeDeployCommand { @@ -38,6 +39,7 @@ impl RuntimeDeployCommand { container_args, dnf_args, no_stamps: false, + sdk_arch: None, } } @@ -47,6 +49,12 @@ impl RuntimeDeployCommand { self } + /// Set SDK container architecture for cross-arch emulation + pub fn with_sdk_arch(mut self, sdk_arch: Option) -> Self { + self.sdk_arch = sdk_arch; + self + } + pub async fn execute(&self) -> Result<()> { // Load configuration let config = load_config(&self.config_path)?; @@ -99,6 +107,7 @@ impl RuntimeDeployCommand { verbose: false, source_environment: true, interactive: false, + sdk_arch: self.sdk_arch.clone(), ..Default::default() }; @@ -149,6 +158,7 @@ impl RuntimeDeployCommand { env_vars: Some(env_vars), container_args: config.merge_sdk_container_args(self.container_args.as_ref()), dnf_args: self.dnf_args.clone(), + sdk_arch: self.sdk_arch.clone(), ..Default::default() }; let deploy_result = container_helper diff --git a/src/commands/runtime/dnf.rs b/src/commands/runtime/dnf.rs index 4ef18a8..a9d670a 100644 --- a/src/commands/runtime/dnf.rs +++ b/src/commands/runtime/dnf.rs @@ -13,6 +13,7 @@ pub struct RuntimeDnfCommand { target: Option, container_args: Option>, dnf_args: Option>, + sdk_arch: Option, } impl RuntimeDnfCommand { @@ -33,9 +34,16 @@ impl RuntimeDnfCommand { target, container_args, dnf_args, + sdk_arch: None, } } + /// Set SDK container architecture for cross-arch emulation + pub fn with_sdk_arch(mut self, sdk_arch: Option) -> Self { + self.sdk_arch = sdk_arch; + self + } + pub async fn execute(&self) -> Result<()> { let config = Config::load(&self.config_path)?; let merged_container_args = config.merge_sdk_container_args(self.container_args.as_ref()); @@ -160,6 +168,7 @@ impl RuntimeDnfCommand { repo_release: repo_release.cloned(), container_args: merged_container_args.clone(), dnf_args: self.dnf_args.clone(), + sdk_arch: self.sdk_arch.clone(), ..Default::default() }; let dir_exists = container_helper.run_in_container(config).await?; @@ -205,6 +214,7 @@ impl RuntimeDnfCommand { repo_release: repo_release.cloned(), container_args: merged_container_args.clone(), dnf_args: self.dnf_args.clone(), + sdk_arch: self.sdk_arch.clone(), ..Default::default() }; let setup_success = container_helper.run_in_container(config).await?; @@ -256,6 +266,7 @@ impl RuntimeDnfCommand { repo_release: repo_release.cloned(), container_args: merged_container_args.clone(), dnf_args: self.dnf_args.clone(), + sdk_arch: self.sdk_arch.clone(), ..Default::default() }; let success = container_helper.run_in_container(config).await?; diff --git a/src/commands/runtime/install.rs b/src/commands/runtime/install.rs index c4f4e50..40d1cca 100644 --- a/src/commands/runtime/install.rs +++ b/src/commands/runtime/install.rs @@ -22,6 +22,7 @@ pub struct RuntimeInstallCommand { no_stamps: bool, runs_on: Option, nfs_port: Option, + sdk_arch: Option, } impl RuntimeInstallCommand { @@ -45,6 +46,7 @@ impl RuntimeInstallCommand { no_stamps: false, runs_on: None, nfs_port: None, + sdk_arch: None, } } @@ -61,6 +63,12 @@ impl RuntimeInstallCommand { self } + /// Set SDK container architecture for cross-arch emulation + pub fn with_sdk_arch(mut self, sdk_arch: Option) -> Self { + self.sdk_arch = sdk_arch; + self + } + pub async fn execute(&self) -> Result<()> { // Load the configuration and parse raw TOML let config = Config::load(&self.config_path)?; @@ -264,6 +272,7 @@ impl RuntimeInstallCommand { container_args: merged_container_args.clone(), dnf_args: self.dnf_args.clone(), // runs_on handled by shared context + sdk_arch: self.sdk_arch.clone(), ..Default::default() }; @@ -337,6 +346,7 @@ impl RuntimeInstallCommand { container_args: merged_container_args.clone(), dnf_args: self.dnf_args.clone(), // runs_on handled by shared context + sdk_arch: self.sdk_arch.clone(), ..Default::default() }; let installroot_exists = @@ -356,6 +366,7 @@ impl RuntimeInstallCommand { container_args: merged_container_args.clone(), dnf_args: self.dnf_args.clone(), // runs_on handled by shared context + sdk_arch: self.sdk_arch.clone(), ..Default::default() }; let success = @@ -498,6 +509,7 @@ $DNF_SDK_HOST \ dnf_args: self.dnf_args.clone(), disable_weak_dependencies: config.get_sdk_disable_weak_dependencies(), // runs_on handled by shared context + sdk_arch: self.sdk_arch.clone(), ..Default::default() }; let success = diff --git a/src/commands/runtime/provision.rs b/src/commands/runtime/provision.rs index f34981a..d219460 100644 --- a/src/commands/runtime/provision.rs +++ b/src/commands/runtime/provision.rs @@ -37,6 +37,8 @@ pub struct RuntimeProvisionConfig { pub runs_on: Option, /// NFS port for remote execution pub nfs_port: Option, + /// SDK container architecture for cross-arch emulation + pub sdk_arch: Option, } pub struct RuntimeProvisionCommand { @@ -134,6 +136,7 @@ impl RuntimeProvisionCommand { interactive: false, runs_on: self.config.runs_on.clone(), nfs_port: self.config.nfs_port, + sdk_arch: self.config.sdk_arch.clone(), ..Default::default() }; @@ -326,6 +329,7 @@ impl RuntimeProvisionCommand { dnf_args: self.config.dnf_args.clone(), runs_on: self.config.runs_on.clone(), nfs_port: self.config.nfs_port, + sdk_arch: self.config.sdk_arch.clone(), ..Default::default() }; @@ -394,6 +398,7 @@ impl RuntimeProvisionCommand { interactive: false, runs_on: self.config.runs_on.clone(), nfs_port: self.config.nfs_port, + sdk_arch: self.config.sdk_arch.clone(), ..Default::default() }; @@ -960,6 +965,7 @@ mod tests { no_stamps: false, runs_on: None, nfs_port: None, + sdk_arch: None, }; let cmd = RuntimeProvisionCommand::new(config); @@ -989,6 +995,7 @@ mod tests { no_stamps: false, runs_on: None, nfs_port: None, + sdk_arch: None, }; let cmd = RuntimeProvisionCommand::new(config); @@ -1040,6 +1047,7 @@ runtime: no_stamps: false, runs_on: None, nfs_port: None, + sdk_arch: None, }; let command = RuntimeProvisionCommand::new(provision_config); @@ -1085,6 +1093,7 @@ runtime: no_stamps: false, runs_on: None, nfs_port: None, + sdk_arch: None, }; let cmd = RuntimeProvisionCommand::new(config); @@ -1123,6 +1132,7 @@ runtime: no_stamps: false, runs_on: None, nfs_port: None, + sdk_arch: None, }; let cmd = RuntimeProvisionCommand::new(config); diff --git a/src/commands/runtime/sign.rs b/src/commands/runtime/sign.rs index 2686e2a..8674787 100644 --- a/src/commands/runtime/sign.rs +++ b/src/commands/runtime/sign.rs @@ -27,6 +27,7 @@ pub struct RuntimeSignCommand { #[allow(dead_code)] // Included for API consistency with other commands dnf_args: Option>, no_stamps: bool, + sdk_arch: Option, } impl RuntimeSignCommand { @@ -46,6 +47,7 @@ impl RuntimeSignCommand { container_args, dnf_args, no_stamps: false, + sdk_arch: None, } } @@ -55,6 +57,12 @@ impl RuntimeSignCommand { self } + /// Set SDK container architecture for cross-arch emulation + pub fn with_sdk_arch(mut self, sdk_arch: Option) -> Self { + self.sdk_arch = sdk_arch; + self + } + pub async fn execute(&self) -> Result<()> { // Load configuration let config = load_config(&self.config_path)?; @@ -89,6 +97,7 @@ impl RuntimeSignCommand { verbose: false, source_environment: true, interactive: false, + sdk_arch: self.sdk_arch.clone(), ..Default::default() }; @@ -179,6 +188,7 @@ impl RuntimeSignCommand { verbose: self.verbose, source_environment: true, interactive: false, + sdk_arch: self.sdk_arch.clone(), ..Default::default() }; diff --git a/src/commands/sdk/install.rs b/src/commands/sdk/install.rs index 2b38bc5..1774365 100644 --- a/src/commands/sdk/install.rs +++ b/src/commands/sdk/install.rs @@ -187,8 +187,9 @@ impl SdkInstallCommand { async fn fetch_remote_extensions(&self, _config: &Config, target: &str) -> Result<()> { use crate::commands::ext::ExtFetchCommand; - // Discover remote extensions - let remote_extensions = Config::discover_remote_extensions(&self.config_path)?; + // Discover remote extensions (with target interpolation for extension names) + let remote_extensions = + Config::discover_remote_extensions(&self.config_path, Some(target))?; if remote_extensions.is_empty() { return Ok(()); diff --git a/src/main.rs b/src/main.rs index a2677be..8dd72bc 100644 --- a/src/main.rs +++ b/src/main.rs @@ -807,7 +807,8 @@ async fn main() -> Result<()> { dnf_args, ) .with_no_stamps(cli.no_stamps) - .with_runs_on(cli.runs_on.clone(), cli.nfs_port); + .with_runs_on(cli.runs_on.clone(), cli.nfs_port) + .with_sdk_arch(cli.sdk_arch.clone()); install_cmd.execute().await?; Ok(()) } @@ -830,7 +831,8 @@ async fn main() -> Result<()> { dnf_args, ) .with_no_stamps(cli.no_stamps) - .with_runs_on(cli.runs_on.clone(), cli.nfs_port); + .with_runs_on(cli.runs_on.clone(), cli.nfs_port) + .with_sdk_arch(cli.sdk_arch.clone()); build_cmd.execute().await?; Ok(()) } @@ -851,7 +853,8 @@ async fn main() -> Result<()> { target.or(cli.target), container_args, dnf_args, - ); + ) + .with_sdk_arch(cli.sdk_arch.clone()); fetch_cmd.execute().await?; Ok(()) } @@ -887,6 +890,7 @@ async fn main() -> Result<()> { no_stamps: cli.no_stamps, runs_on: cli.runs_on.clone(), nfs_port: cli.nfs_port, + sdk_arch: cli.sdk_arch.clone(), }); provision_cmd.execute().await?; Ok(()) @@ -909,7 +913,8 @@ async fn main() -> Result<()> { container_args, dnf_args, ) - .with_no_stamps(cli.no_stamps); + .with_no_stamps(cli.no_stamps) + .with_sdk_arch(cli.sdk_arch.clone()); deploy_cmd.execute().await?; Ok(()) } @@ -1012,7 +1017,8 @@ async fn main() -> Result<()> { container_args, dnf_args, ) - .with_no_stamps(cli.no_stamps); + .with_no_stamps(cli.no_stamps) + .with_sdk_arch(cli.sdk_arch.clone()); install_cmd.execute().await?; Ok(()) } @@ -1033,7 +1039,8 @@ async fn main() -> Result<()> { container_args, dnf_args, ) - .with_no_stamps(cli.no_stamps); + .with_no_stamps(cli.no_stamps) + .with_sdk_arch(cli.sdk_arch.clone()); build_cmd.execute().await?; Ok(()) } @@ -1065,6 +1072,7 @@ async fn main() -> Result<()> { no_stamps: cli.no_stamps, runs_on: cli.runs_on.clone(), nfs_port: cli.nfs_port, + sdk_arch: cli.sdk_arch.clone(), }, ); provision_cmd.execute().await?; @@ -1101,7 +1109,8 @@ async fn main() -> Result<()> { target.or(cli.target), container_args, dnf_args, - ); + ) + .with_sdk_arch(cli.sdk_arch.clone()); dnf_cmd.execute().await?; Ok(()) } @@ -1120,7 +1129,8 @@ async fn main() -> Result<()> { target.or(cli.target), container_args, dnf_args, - ); + ) + .with_sdk_arch(cli.sdk_arch.clone()); clean_cmd.execute().await?; Ok(()) } @@ -1142,7 +1152,8 @@ async fn main() -> Result<()> { container_args, dnf_args, ) - .with_no_stamps(cli.no_stamps); + .with_no_stamps(cli.no_stamps) + .with_sdk_arch(cli.sdk_arch.clone()); deploy_cmd.execute().await?; Ok(()) } @@ -1162,7 +1173,8 @@ async fn main() -> Result<()> { container_args, dnf_args, ) - .with_no_stamps(cli.no_stamps); + .with_no_stamps(cli.no_stamps) + .with_sdk_arch(cli.sdk_arch.clone()); sign_cmd.execute().await?; Ok(()) } @@ -1186,7 +1198,8 @@ async fn main() -> Result<()> { container_args, dnf_args, ) - .with_no_stamps(cli.no_stamps); + .with_no_stamps(cli.no_stamps) + .with_sdk_arch(cli.sdk_arch.clone()); install_cmd.execute().await?; Ok(()) } @@ -1205,7 +1218,8 @@ async fn main() -> Result<()> { force, target.or(cli.target.clone()), container_args, - ); + ) + .with_sdk_arch(cli.sdk_arch.clone()); fetch_cmd.execute().await?; Ok(()) } @@ -1225,7 +1239,8 @@ async fn main() -> Result<()> { container_args, dnf_args, ) - .with_no_stamps(cli.no_stamps); + .with_no_stamps(cli.no_stamps) + .with_sdk_arch(cli.sdk_arch.clone()); build_cmd.execute().await?; Ok(()) } @@ -1247,7 +1262,8 @@ async fn main() -> Result<()> { container_tool, target.or(cli.target), ) - .with_no_stamps(cli.no_stamps); + .with_no_stamps(cli.no_stamps) + .with_sdk_arch(cli.sdk_arch.clone()); checkout_cmd.execute().await?; Ok(()) } @@ -1282,7 +1298,8 @@ async fn main() -> Result<()> { target.or(cli.target), container_args, dnf_args, - ); + ) + .with_sdk_arch(cli.sdk_arch.clone()); dnf_cmd.execute().await?; Ok(()) } @@ -1301,7 +1318,8 @@ async fn main() -> Result<()> { target.or(cli.target), container_args, dnf_args, - ); + ) + .with_sdk_arch(cli.sdk_arch.clone()); clean_cmd.execute().await?; Ok(()) } @@ -1321,7 +1339,8 @@ async fn main() -> Result<()> { container_args, dnf_args, ) - .with_no_stamps(cli.no_stamps); + .with_no_stamps(cli.no_stamps) + .with_sdk_arch(cli.sdk_arch.clone()); image_cmd.execute().await?; Ok(()) } @@ -1343,7 +1362,8 @@ async fn main() -> Result<()> { container_args, dnf_args, ) - .with_no_stamps(cli.no_stamps); + .with_no_stamps(cli.no_stamps) + .with_sdk_arch(cli.sdk_arch.clone()); package_cmd.execute().await?; Ok(()) } @@ -1368,6 +1388,7 @@ async fn main() -> Result<()> { verbose, port, no_stamps: no_stamps || cli.no_stamps, + sdk_arch: cli.sdk_arch.clone(), }; hitl_cmd.execute().await?; Ok(()) diff --git a/src/utils/config.rs b/src/utils/config.rs index bbc86df..ccb5f4f 100644 --- a/src/utils/config.rs +++ b/src/utils/config.rs @@ -131,6 +131,9 @@ pub enum ExtensionSource { Repo { /// Version to fetch (e.g., "0.1.0" or "*") version: String, + /// Optional RPM package name (defaults to extension name if not specified) + #[serde(skip_serializing_if = "Option::is_none")] + package: Option, /// Optional custom repository name #[serde(skip_serializing_if = "Option::is_none")] repo_name: Option, @@ -691,13 +694,6 @@ impl Config { let mut extension_sources: std::collections::HashMap = std::collections::HashMap::new(); - // Discover remote extensions from the main config - let remote_extensions = Self::discover_remote_extensions_from_value(main_config)?; - - if remote_extensions.is_empty() { - return Ok(extension_sources); - } - // Get the src_dir and target to find the extensions directory // First deserialize just to get src_dir and default_target let temp_config: Config = @@ -727,6 +723,14 @@ impl Config { } }; + // Discover remote extensions from the main config (with target interpolation) + let remote_extensions = + Self::discover_remote_extensions_from_value(main_config, Some(&resolved_target))?; + + if remote_extensions.is_empty() { + return Ok(extension_sources); + } + // Get extensions directory from volume or fallback path let extensions_dir = temp_config.get_extensions_dir(&config_path.to_string_lossy(), &resolved_target); @@ -1829,6 +1833,7 @@ impl Config { /// Returns Some(ExtensionSource) if the extension has a source field, /// None if it's a local extension (no source field) pub fn parse_extension_source( + ext_name: &str, ext_config: &serde_yaml::Value, ) -> Result> { let source = ext_config.get("source"); @@ -1838,7 +1843,12 @@ impl Config { Some(source_value) => { // Deserialize the source block into ExtensionSource let source: ExtensionSource = serde_yaml::from_value(source_value.clone()) - .with_context(|| "Failed to parse extension source configuration")?; + .with_context(|| { + format!( + "Failed to parse source configuration for extension '{}'", + ext_name + ) + })?; Ok(Some(source)) } } @@ -1847,26 +1857,45 @@ impl Config { /// Discover all remote extensions in the configuration /// /// Returns a list of (extension_name, ExtensionSource) tuples for extensions - /// that have a `source` field in their configuration - pub fn discover_remote_extensions(config_path: &str) -> Result> { + /// that have a `source` field in their configuration. + /// + /// If `target` is provided, extension names containing `{{ avocado.target }}` + /// will be interpolated with the target value. + pub fn discover_remote_extensions( + config_path: &str, + target: Option<&str>, + ) -> Result> { let content = std::fs::read_to_string(config_path) .with_context(|| format!("Failed to read config file: {config_path}"))?; let parsed = Self::parse_config_value(config_path, &content)?; - Self::discover_remote_extensions_from_value(&parsed) + Self::discover_remote_extensions_from_value(&parsed, target) } /// Discover remote extensions from a parsed config value + /// + /// If `target` is provided, extension names containing `{{ avocado.target }}` + /// will be interpolated with the target value. pub fn discover_remote_extensions_from_value( parsed: &serde_yaml::Value, + target: Option<&str>, ) -> Result> { + use crate::utils::interpolation::interpolate_name; + let mut remote_extensions = Vec::new(); if let Some(ext_section) = parsed.get("ext").and_then(|e| e.as_mapping()) { for (ext_name_key, ext_config) in ext_section { - if let Some(ext_name) = ext_name_key.as_str() { - if let Some(source) = Self::parse_extension_source(ext_config)? { - remote_extensions.push((ext_name.to_string(), source)); + if let Some(raw_ext_name) = ext_name_key.as_str() { + // Interpolate extension name if target is provided + let ext_name = if let Some(t) = target { + interpolate_name(raw_ext_name, t) + } else { + raw_ext_name.to_string() + }; + + if let Some(source) = Self::parse_extension_source(&ext_name, ext_config)? { + remote_extensions.push((ext_name, source)); } } } diff --git a/src/utils/container.rs b/src/utils/container.rs index d6266d9..5adcdaf 100644 --- a/src/utils/container.rs +++ b/src/utils/container.rs @@ -73,6 +73,31 @@ pub fn sdk_arch_to_platform(sdk_arch: &str) -> Result { } } +/// Get the host's native platform in Docker format (e.g., "linux/amd64" or "linux/arm64"). +/// This is used to explicitly request the native platform variant from multi-arch images, +/// ensuring Docker keeps both variants cached when switching between native and emulated runs. +pub fn get_host_platform() -> String { + let arch = std::env::consts::ARCH; + match arch { + "x86_64" => "linux/amd64".to_string(), + "aarch64" => "linux/arm64".to_string(), + // Fallback for other architectures + "arm" => "linux/arm/v7".to_string(), + "riscv64" => "linux/riscv64".to_string(), + _ => format!("linux/{}", arch), + } +} + +/// Get the platform to use for container execution. +/// If sdk_arch is specified, use that platform (for cross-arch emulation). +/// Otherwise, use the host's native platform to ensure Docker pulls/uses the correct variant. +pub fn get_container_platform(sdk_arch: Option<&str>) -> Result { + match sdk_arch { + Some(arch) => sdk_arch_to_platform(arch), + None => Ok(get_host_platform()), + } +} + /// Add security options to container command based on host security module. /// - SELinux (Fedora/RHEL): adds --security-opt label=disable /// - AppArmor (Ubuntu/Debian): adds --security-opt apparmor=unconfined @@ -515,12 +540,12 @@ impl SdkContainer { "label=disable".to_string(), ]; - // Add platform flag for cross-architecture emulation via Docker buildx/QEMU - if let Some(ref sdk_arch) = config.sdk_arch { - let platform = sdk_arch_to_platform(sdk_arch)?; - extra_args.push("--platform".to_string()); - extra_args.push(platform); - } + // Always add platform flag to ensure Docker uses the correct image variant. + // This prevents Docker from caching only one variant when switching between + // native and cross-arch emulated runs. + let platform = get_container_platform(config.sdk_arch.as_deref())?; + extra_args.push("--platform".to_string()); + extra_args.push(platform); if let Some(ref args) = config.container_args { extra_args.extend(args.clone()); @@ -564,12 +589,12 @@ impl SdkContainer { ) -> Result> { let mut container_cmd = vec![self.container_tool.clone(), "run".to_string()]; - // Add platform flag for cross-architecture emulation via Docker buildx/QEMU - if let Some(ref sdk_arch) = config.sdk_arch { - let platform = sdk_arch_to_platform(sdk_arch)?; - container_cmd.push("--platform".to_string()); - container_cmd.push(platform); - } + // Always add platform flag to ensure Docker uses the correct image variant. + // This prevents Docker from caching only one variant when switching between + // native and cross-arch emulated runs. + let platform = get_container_platform(config.sdk_arch.as_deref())?; + container_cmd.push("--platform".to_string()); + container_cmd.push(platform); // Container options if config.rm { @@ -1001,12 +1026,12 @@ impl SdkContainer { "label=disable".to_string(), ]; - // Add platform flag for cross-architecture emulation via Docker buildx/QEMU - if let Some(ref sdk_arch) = config.sdk_arch { - let platform = sdk_arch_to_platform(sdk_arch)?; - extra_args.push("--platform".to_string()); - extra_args.push(platform); - } + // Always add platform flag to ensure Docker uses the correct image variant. + // This prevents Docker from caching only one variant when switching between + // native and cross-arch emulated runs. + let platform = get_container_platform(config.sdk_arch.as_deref())?; + extra_args.push("--platform".to_string()); + extra_args.push(platform); if let Some(ref args) = config.container_args { extra_args.extend(args.clone()); @@ -2066,4 +2091,35 @@ mod tests { .to_string() .contains("Unsupported SDK architecture")); } + + #[test] + fn test_get_host_platform_returns_valid_format() { + let platform = get_host_platform(); + assert!(platform.starts_with("linux/")); + // Should be one of the common architectures + let valid_archs = ["amd64", "arm64", "arm/v7", "riscv64"]; + let arch_part = platform.strip_prefix("linux/").unwrap(); + assert!( + valid_archs.contains(&arch_part) || !arch_part.is_empty(), + "Unexpected platform: {}", + platform + ); + } + + #[test] + fn test_get_container_platform_with_sdk_arch() { + let result = get_container_platform(Some("aarch64")).unwrap(); + assert_eq!(result, "linux/arm64"); + + let result = get_container_platform(Some("x86-64")).unwrap(); + assert_eq!(result, "linux/amd64"); + } + + #[test] + fn test_get_container_platform_without_sdk_arch() { + let result = get_container_platform(None).unwrap(); + // Should return the host platform + assert!(result.starts_with("linux/")); + assert_eq!(result, get_host_platform()); + } } diff --git a/src/utils/ext_fetch.rs b/src/utils/ext_fetch.rs index 9653146..6201016 100644 --- a/src/utils/ext_fetch.rs +++ b/src/utils/ext_fetch.rs @@ -28,6 +28,8 @@ pub struct ExtensionFetcher { repo_release: Option, /// Container arguments container_args: Option>, + /// SDK container architecture for cross-arch emulation + sdk_arch: Option, } impl ExtensionFetcher { @@ -46,6 +48,7 @@ impl ExtensionFetcher { repo_url: None, repo_release: None, container_args: None, + sdk_arch: None, } } @@ -67,6 +70,12 @@ impl ExtensionFetcher { self } + /// Set SDK container architecture for cross-arch emulation + pub fn with_sdk_arch(mut self, sdk_arch: Option) -> Self { + self.sdk_arch = sdk_arch; + self + } + /// Fetch an extension based on its source configuration /// /// Returns the path where the extension was installed @@ -79,9 +88,19 @@ impl ExtensionFetcher { let ext_install_path = install_dir.join(ext_name); match source { - ExtensionSource::Repo { version, repo_name } => { - self.fetch_from_repo(ext_name, version, repo_name.as_deref(), &ext_install_path) - .await?; + ExtensionSource::Repo { + version, + package, + repo_name, + } => { + self.fetch_from_repo( + ext_name, + version, + package.as_deref(), + repo_name.as_deref(), + &ext_install_path, + ) + .await?; } ExtensionSource::Git { url, @@ -111,13 +130,17 @@ impl ExtensionFetcher { &self, ext_name: &str, version: &str, + package: Option<&str>, repo_name: Option<&str>, install_path: &Path, ) -> Result<()> { + // Use explicit package name if provided, otherwise fall back to extension name + let package_name = package.unwrap_or(ext_name); + if self.verbose { print_info( &format!( - "Fetching extension '{ext_name}' version '{version}' from package repository" + "Fetching extension '{ext_name}' (package: '{package_name}') version '{version}' from package repository" ), OutputLevel::Normal, ); @@ -131,11 +154,11 @@ impl ExtensionFetcher { ) })?; - // Build the package spec + // Build the package spec using the package name (not extension name) let package_spec = if version == "*" { - ext_name.to_string() + package_name.to_string() } else { - format!("{ext_name}-{version}") + format!("{package_name}-{version}") }; // Build the DNF command to download and extract the package @@ -151,15 +174,14 @@ set -e # Create temp directory for download TMPDIR=$(mktemp -d) -cd "$TMPDIR" -# Download the extension package -dnf download {repo_arg} --destdir="$TMPDIR" {package_spec} +# Download the extension package using dnf install --downloadonly +dnf install -y {repo_arg} --downloadonly --downloaddir="$TMPDIR" {package_spec} # Find the downloaded RPM -RPM_FILE=$(ls -1 *.rpm 2>/dev/null | head -1) +RPM_FILE=$(ls -1 "$TMPDIR"/*.rpm 2>/dev/null | head -1) if [ -z "$RPM_FILE" ]; then - echo "ERROR: Failed to download extension package '{ext_name}'" + echo "ERROR: Failed to download package '{package_spec}' for extension '{ext_name}'" exit 1 fi @@ -167,9 +189,9 @@ fi # The package root / maps to the extension's src_dir mkdir -p "{install_path_str}" cd "{install_path_str}" -rpm2cpio "$TMPDIR/$RPM_FILE" | cpio -idmv +rpm2cpio "$RPM_FILE" | cpio -idmv -echo "Successfully fetched extension '{ext_name}' to {install_path_str}" +echo "Successfully fetched extension '{ext_name}' (package: {package_spec}) to {install_path_str}" # Cleanup rm -rf "$TMPDIR" @@ -187,6 +209,7 @@ rm -rf "$TMPDIR" repo_url: self.repo_url.clone(), repo_release: self.repo_release.clone(), container_args: self.container_args.clone(), + sdk_arch: self.sdk_arch.clone(), ..Default::default() }; @@ -278,6 +301,7 @@ echo "Successfully fetched extension '{ext_name}' from git" repo_url: self.repo_url.clone(), repo_release: self.repo_release.clone(), container_args: self.container_args.clone(), + sdk_arch: self.sdk_arch.clone(), ..Default::default() }; diff --git a/src/utils/interpolation/mod.rs b/src/utils/interpolation/mod.rs index 81caac3..6b4bc1e 100644 --- a/src/utils/interpolation/mod.rs +++ b/src/utils/interpolation/mod.rs @@ -54,6 +54,29 @@ pub mod env; const MAX_ITERATIONS: usize = 100; +/// Interpolate a simple string with the target value. +/// +/// This is a lightweight interpolation for extension names and other strings +/// that only need `{{ avocado.target }}` interpolation without the full config context. +/// +/// # Arguments +/// * `input` - The string to interpolate +/// * `target` - The target architecture value +/// +/// # Returns +/// The interpolated string with `{{ avocado.target }}` replaced +/// +/// # Examples +/// ``` +/// # use avocado_cli::utils::interpolation::interpolate_name; +/// let result = interpolate_name("my-ext-{{ avocado.target }}", "raspberrypi4"); +/// assert_eq!(result, "my-ext-raspberrypi4"); +/// ``` +pub fn interpolate_name(input: &str, target: &str) -> String { + let re = Regex::new(r"\{\{\s*avocado\.target\s*\}\}").unwrap(); + re.replace_all(input, target).to_string() +} + /// Interpolate configuration values in a YAML structure. /// /// This function recursively walks the YAML structure and replaces template strings From 815be9e211cb05ad809aa0a70a77605f6a3b7f15 Mon Sep 17 00:00:00 2001 From: Justin Schneck Date: Mon, 5 Jan 2026 18:48:42 -0500 Subject: [PATCH 05/23] update ext package arch --- src/commands/ext/fetch.rs | 34 ++++++--- src/commands/ext/package.rs | 48 ++++++------ src/commands/sdk/install.rs | 89 ++++++++++++---------- src/utils/ext_fetch.rs | 147 +++++++++++++++++------------------- 4 files changed, 165 insertions(+), 153 deletions(-) diff --git a/src/commands/ext/fetch.rs b/src/commands/ext/fetch.rs index 6ec02be..54dbcfc 100644 --- a/src/commands/ext/fetch.rs +++ b/src/commands/ext/fetch.rs @@ -26,6 +26,10 @@ pub struct ExtFetchCommand { pub container_args: Option>, /// SDK container architecture for cross-arch emulation pub sdk_arch: Option, + /// Run command on remote host + pub runs_on: Option, + /// NFS port for remote execution + pub nfs_port: Option, } impl ExtFetchCommand { @@ -46,6 +50,8 @@ impl ExtFetchCommand { target, container_args, sdk_arch: None, + runs_on: None, + nfs_port: None, } } @@ -55,6 +61,13 @@ impl ExtFetchCommand { self } + /// Set remote execution host and NFS port + pub fn with_runs_on(mut self, runs_on: String, nfs_port: Option) -> Self { + self.runs_on = Some(runs_on); + self.nfs_port = nfs_port; + self + } + /// Execute the fetch command pub async fn execute(&self) -> Result<()> { // Load configuration @@ -102,17 +115,10 @@ impl ExtFetchCommand { return Ok(()); } - // Get the extensions install directory + // Get the extensions install directory (container path) + // The directory will be created inside the container, not on the host let extensions_dir = config.get_extensions_dir(&self.config_path, &target); - // Ensure the extensions directory exists - std::fs::create_dir_all(&extensions_dir).with_context(|| { - format!( - "Failed to create extensions directory: {}", - extensions_dir.display() - ) - })?; - if self.verbose { print_info( &format!( @@ -125,6 +131,14 @@ impl ExtFetchCommand { } // Create the fetcher + // If container_args were already passed (e.g., from sdk install), use them directly + // Otherwise, merge from config + let effective_container_args = if self.container_args.is_some() { + self.container_args.clone() + } else { + config.merge_sdk_container_args(None) + }; + let fetcher = ExtensionFetcher::new( self.config_path.clone(), target.clone(), @@ -133,7 +147,7 @@ impl ExtFetchCommand { ) .with_repo_url(config.get_sdk_repo_url()) .with_repo_release(config.get_sdk_repo_release()) - .with_container_args(config.merge_sdk_container_args(self.container_args.as_ref())) + .with_container_args(effective_container_args) .with_sdk_arch(self.sdk_arch.clone()); // Fetch each extension diff --git a/src/commands/ext/package.rs b/src/commands/ext/package.rs index 523de93..72a2013 100644 --- a/src/commands/ext/package.rs +++ b/src/commands/ext/package.rs @@ -177,7 +177,7 @@ impl ExtPackageCommand { fn extract_rpm_metadata( &self, ext_config: &serde_yaml::Value, - target: &str, + _target: &str, // Not used - extensions default to noarch ) -> Result { // Version is required let version = ext_config @@ -225,11 +225,13 @@ impl ExtPackageCommand { .unwrap_or("Unspecified") .to_string(); + // Default to noarch for extension source packages since they contain + // configs/code, not compiled binaries. Can be overridden in ext config. let arch = ext_config .get("arch") .and_then(|v| v.as_str()) .map(|s| s.to_string()) - .unwrap_or_else(|| self.generate_arch_from_target(target)); + .unwrap_or_else(|| "noarch".to_string()); let vendor = ext_config .get("vendor") @@ -282,6 +284,9 @@ impl ExtPackageCommand { } /// Generate architecture from target by replacing dashes with underscores + /// Generate a target-specific architecture name (for binaries) + /// Note: Extension source packages should use "noarch" instead + #[allow(dead_code)] fn generate_arch_from_target(&self, target: &str) -> String { format!("avocado_{}", target.replace('-', "_")) } @@ -994,7 +999,7 @@ mod tests { "System extension package for test-extension" ); assert_eq!(metadata.license, "Unspecified"); - assert_eq!(metadata.arch, "avocado_x86_64_unknown_linux_gnu"); + assert_eq!(metadata.arch, "noarch"); // Extension source packages default to noarch assert_eq!(metadata.vendor, "Unspecified"); assert_eq!(metadata.group, "system-extension"); assert_eq!(metadata.url, None); @@ -1088,7 +1093,7 @@ mod tests { } #[test] - fn test_arch_generation_with_different_targets() { + fn test_arch_defaults_to_noarch_for_all_targets() { let cmd = ExtPackageCommand::new( "test.yaml".to_string(), "test-ext".to_string(), @@ -1105,30 +1110,23 @@ mod tests { serde_yaml::Value::String("1.0.0".to_string()), ); - // Test various target architectures - let test_cases = vec![ - ( - "x86_64-unknown-linux-gnu", - "avocado_x86_64_unknown_linux_gnu", - ), - ( - "aarch64-unknown-linux-gnu", - "avocado_aarch64_unknown_linux_gnu", - ), - ( - "riscv64-unknown-linux-gnu", - "avocado_riscv64_unknown_linux_gnu", - ), - ("i686-unknown-linux-gnu", "avocado_i686_unknown_linux_gnu"), - ( - "armv7-unknown-linux-gnueabihf", - "avocado_armv7_unknown_linux_gnueabihf", - ), + // Extension source packages should default to noarch regardless of target + // since they contain configs/code, not compiled binaries + let targets = vec![ + "x86_64-unknown-linux-gnu", + "aarch64-unknown-linux-gnu", + "riscv64-unknown-linux-gnu", + "i686-unknown-linux-gnu", + "armv7-unknown-linux-gnueabihf", + "raspberrypi4", ]; - for (target, expected_arch) in test_cases { + for target in targets { let metadata = cmd.extract_rpm_metadata(&ext_config, target).unwrap(); - assert_eq!(metadata.arch, expected_arch, "Failed for target: {target}"); + assert_eq!( + metadata.arch, "noarch", + "Extension should default to noarch for target: {target}" + ); } } diff --git a/src/commands/sdk/install.rs b/src/commands/sdk/install.rs index 1774365..139d30f 100644 --- a/src/commands/sdk/install.rs +++ b/src/commands/sdk/install.rs @@ -91,12 +91,8 @@ impl SdkInstallCommand { .with_context(|| format!("Failed to load config from {}", self.config_path))?; let target = validate_and_log_target(self.target.as_deref(), &basic_config)?; - // Fetch remote extensions before loading composed config - // This ensures their configs can be merged in - self.fetch_remote_extensions(&basic_config, &target).await?; - - // Load the composed configuration (merges external configs, applies interpolation) - // This now includes configs from fetched remote extensions + // Load initial composed configuration (without remote extensions yet) + // Remote extensions will be fetched after SDK bootstrap when repos are available let composed = Config::load_composed(&self.config_path, self.target.as_deref()) .with_context(|| format!("Failed to load composed config from {}", self.config_path))?; @@ -105,10 +101,6 @@ impl SdkInstallCommand { // Merge container args from config with CLI args let merged_container_args = config.merge_sdk_container_args(self.container_args.as_ref()); - // Serialize the merged config back to string for extension parsing methods - let config_content = serde_yaml::to_string(&composed.merged_value) - .with_context(|| "Failed to serialize composed config")?; - // Get the SDK image from configuration let container_image = config.get_sdk_image().ok_or_else(|| { anyhow::anyhow!("No container image specified in config under 'sdk.image'") @@ -121,14 +113,8 @@ impl SdkInstallCommand { .get_sdk_dependencies_for_target(&self.config_path, &target) .with_context(|| "Failed to get SDK dependencies with target interpolation")?; - // Get extension SDK dependencies (from the composed, interpolated config) - let extension_sdk_dependencies = config - .get_extension_sdk_dependencies_with_config_path_and_target( - &config_content, - Some(&self.config_path), - Some(&target), - ) - .with_context(|| "Failed to parse extension SDK dependencies")?; + // Note: extension_sdk_dependencies is computed inside execute_install after + // fetching remote extensions, since we need SDK repos to be available first // Get repo_url and repo_release from config let repo_url = config.get_sdk_repo_url(); @@ -154,11 +140,9 @@ impl SdkInstallCommand { let result = self .execute_install( config, - &composed, &target, container_image, &sdk_dependencies, - &extension_sdk_dependencies, repo_url.as_deref(), repo_release.as_deref(), &container_helper, @@ -180,11 +164,15 @@ impl SdkInstallCommand { result } - /// Fetch remote extensions before SDK install + /// Fetch remote extensions after SDK bootstrap /// /// This discovers extensions with a `source` field and fetches them - /// to `$AVOCADO_PREFIX/includes/` so their configs can be merged. - async fn fetch_remote_extensions(&self, _config: &Config, target: &str) -> Result<()> { + /// using the SDK environment where repos are already configured. + async fn fetch_remote_extensions_in_sdk( + &self, + target: &str, + merged_container_args: Option<&Vec>, + ) -> Result<()> { use crate::commands::ext::ExtFetchCommand; // Discover remote extensions (with target interpolation for extension names) @@ -195,25 +183,29 @@ impl SdkInstallCommand { return Ok(()); } - if self.verbose { - print_info( - &format!( - "Fetching {} remote extension(s) before SDK install...", - remote_extensions.len() - ), - OutputLevel::Normal, - ); - } + print_info( + &format!( + "Fetching {} remote extension(s)...", + remote_extensions.len() + ), + OutputLevel::Normal, + ); - // Use ExtFetchCommand to fetch extensions - let fetch_cmd = ExtFetchCommand::new( + // Use ExtFetchCommand to fetch extensions with SDK environment + let mut fetch_cmd = ExtFetchCommand::new( self.config_path.clone(), None, // Fetch all remote extensions self.verbose, false, // Don't force re-fetch Some(target.to_string()), - self.container_args.clone(), - ); + merged_container_args.cloned(), + ) + .with_sdk_arch(self.sdk_arch.clone()); + + // Pass through the runs_on context for remote execution + if let Some(runs_on) = &self.runs_on { + fetch_cmd = fetch_cmd.with_runs_on(runs_on.clone(), self.nfs_port); + } fetch_cmd.execute().await?; @@ -225,11 +217,9 @@ impl SdkInstallCommand { async fn execute_install( &self, config: &Config, - composed: &crate::utils::config::ComposedConfig, target: &str, container_image: &str, sdk_dependencies: &Option>, - extension_sdk_dependencies: &HashMap>, repo_url: Option<&str>, repo_release: Option<&str>, container_helper: &SdkContainer, @@ -628,6 +618,27 @@ $DNF_SDK_HOST $DNF_NO_SCRIPTS \ return Err(anyhow::anyhow!("Failed to install SDK bootstrap.")); } + // Fetch remote extensions now that SDK repos are available + // This uses the SDK environment with configured repos to download extension packages + self.fetch_remote_extensions_in_sdk(target, merged_container_args) + .await?; + + // Reload composed config to include extension configs + let composed = Config::load_composed(&self.config_path, Some(target)) + .with_context(|| "Failed to reload composed config after fetching extensions")?; + let config = &composed.config; + + // Re-compute extension SDK dependencies now that extension configs are available + let config_content = serde_yaml::to_string(&composed.merged_value) + .with_context(|| "Failed to serialize composed config")?; + let extension_sdk_dependencies = config + .get_extension_sdk_dependencies_with_config_path_and_target( + &config_content, + Some(&self.config_path), + Some(target), + ) + .with_context(|| "Failed to parse extension SDK dependencies")?; + // After bootstrap, source environment-setup and configure SSL certs for subsequent commands if self.verbose { print_info( @@ -693,7 +704,7 @@ fi } // Add extension SDK dependencies to the package list - for (ext_name, ext_deps) in extension_sdk_dependencies { + for (ext_name, ext_deps) in &extension_sdk_dependencies { if self.verbose { print_info( &format!("Adding SDK dependencies from extension '{ext_name}'"), diff --git a/src/utils/ext_fetch.rs b/src/utils/ext_fetch.rs index 6201016..68a3818 100644 --- a/src/utils/ext_fetch.rs +++ b/src/utils/ext_fetch.rs @@ -5,7 +5,7 @@ //! - Git repositories (with optional sparse checkout) //! - Local filesystem paths -use anyhow::{Context, Result}; +use anyhow::Result; use std::path::{Path, PathBuf}; use crate::utils::config::ExtensionSource; @@ -132,7 +132,7 @@ impl ExtensionFetcher { version: &str, package: Option<&str>, repo_name: Option<&str>, - install_path: &Path, + _install_path: &Path, // Host path - not used, we use container path instead ) -> Result<()> { // Use explicit package name if provided, otherwise fall back to extension name let package_name = package.unwrap_or(ext_name); @@ -146,14 +146,6 @@ impl ExtensionFetcher { ); } - // Create the install directory - std::fs::create_dir_all(install_path).with_context(|| { - format!( - "Failed to create extension directory: {}", - install_path.display() - ) - })?; - // Build the package spec using the package name (not extension name) let package_spec = if version == "*" { package_name.to_string() @@ -165,9 +157,12 @@ impl ExtensionFetcher { // We use --downloadonly and then extract the RPM contents let repo_arg = repo_name.map(|r| format!("--repo={r}")).unwrap_or_default(); - let install_path_str = install_path.to_string_lossy(); + // Use container path $AVOCADO_PREFIX/includes/ instead of host path + // This ensures the directory is created inside the container with proper permissions + let container_install_path = format!("$AVOCADO_PREFIX/includes/{ext_name}"); // The fetch script downloads the package and extracts it to the install path + // Use $DNF_SDK_HOST with $DNF_SDK_COMBINED_REPO_CONF to access target-specific repos let fetch_script = format!( r#" set -e @@ -175,8 +170,19 @@ set -e # Create temp directory for download TMPDIR=$(mktemp -d) -# Download the extension package using dnf install --downloadonly -dnf install -y {repo_arg} --downloadonly --downloaddir="$TMPDIR" {package_spec} +# Download the extension package using SDK DNF with combined repo config +# This includes both SDK repos and target-specific repos (like $AVOCADO_TARGET-ext) +RPM_CONFIGDIR=$AVOCADO_SDK_PREFIX/usr/lib/rpm \ +RPM_ETCCONFIGDIR=$AVOCADO_SDK_PREFIX \ +$DNF_SDK_HOST \ + $DNF_SDK_HOST_OPTS \ + $DNF_SDK_COMBINED_REPO_CONF \ + {repo_arg} \ + --downloadonly \ + --downloaddir="$TMPDIR" \ + -y \ + install \ + {package_spec} # Find the downloaded RPM RPM_FILE=$(ls -1 "$TMPDIR"/*.rpm 2>/dev/null | head -1) @@ -185,13 +191,13 @@ if [ -z "$RPM_FILE" ]; then exit 1 fi -# Extract RPM contents to install path +# Extract RPM contents to install path (using container path) # The package root / maps to the extension's src_dir -mkdir -p "{install_path_str}" -cd "{install_path_str}" +mkdir -p "{container_install_path}" +cd "{container_install_path}" rpm2cpio "$RPM_FILE" | cpio -idmv -echo "Successfully fetched extension '{ext_name}' (package: {package_spec}) to {install_path_str}" +echo "Successfully fetched extension '{ext_name}' (package: {package_spec}) to {container_install_path}" # Cleanup rm -rf "$TMPDIR" @@ -230,7 +236,7 @@ rm -rf "$TMPDIR" url: &str, git_ref: Option<&str>, sparse_checkout: Option<&[String]>, - install_path: &Path, + _install_path: &Path, // Host path - not used, we use container path instead ) -> Result<()> { if self.verbose { print_info( @@ -239,14 +245,8 @@ rm -rf "$TMPDIR" ); } - // Create parent directory - if let Some(parent) = install_path.parent() { - std::fs::create_dir_all(parent).with_context(|| { - format!("Failed to create parent directory: {}", parent.display()) - })?; - } - - let install_path_str = install_path.to_string_lossy(); + // Use container path $AVOCADO_PREFIX/includes/ instead of host path + let container_install_path = format!("$AVOCADO_PREFIX/includes/{ext_name}"); let ref_arg = git_ref.unwrap_or("HEAD"); // Build the git clone command @@ -256,9 +256,9 @@ rm -rf "$TMPDIR" format!( r#" set -e -rm -rf "{install_path_str}" -mkdir -p "{install_path_str}" -cd "{install_path_str}" +rm -rf "{container_install_path}" +mkdir -p "{container_install_path}" +cd "{container_install_path}" git init git remote add origin "{url}" git config core.sparseCheckout true @@ -278,10 +278,10 @@ echo "Successfully fetched extension '{ext_name}' from git" format!( r#" set -e -rm -rf "{install_path_str}" -git clone --depth 1 --branch {ref_arg} "{url}" "{install_path_str}" || \ -git clone --depth 1 "{url}" "{install_path_str}" -cd "{install_path_str}" +rm -rf "{container_install_path}" +git clone --depth 1 --branch {ref_arg} "{url}" "{container_install_path}" || \ +git clone --depth 1 "{url}" "{container_install_path}" +cd "{container_install_path}" if [ "{ref_arg}" != "HEAD" ]; then git checkout {ref_arg} 2>/dev/null || true fi @@ -320,7 +320,7 @@ echo "Successfully fetched extension '{ext_name}' from git" &self, ext_name: &str, source_path: &str, - install_path: &Path, + _install_path: &Path, // Host path - not used, we use container path instead ) -> Result<()> { if self.verbose { print_info( @@ -346,55 +346,44 @@ echo "Successfully fetched extension '{ext_name}' from git" )); } - // Create the install directory - if let Some(parent) = install_path.parent() { - std::fs::create_dir_all(parent).with_context(|| { - format!("Failed to create parent directory: {}", parent.display()) - })?; - } + // Use container path $AVOCADO_PREFIX/includes/ + let container_install_path = format!("$AVOCADO_PREFIX/includes/{ext_name}"); - // Remove existing install path if it exists - if install_path.exists() { - std::fs::remove_dir_all(install_path).with_context(|| { - format!( - "Failed to remove existing directory: {}", - install_path.display() - ) - })?; - } - - // Copy the directory (or create symlink for efficiency) - // For now, we'll copy to ensure isolation - Self::copy_dir_recursive(&resolved_source, install_path)?; + // The source path needs to be accessible from inside the container + // Since the workspace is mounted at $AVOCADO_SRC_DIR, convert the path + let resolved_source_str = resolved_source.to_string_lossy(); - if self.verbose { - print_info( - &format!( - "Successfully copied extension '{ext_name}' from {} to {}", - resolved_source.display(), - install_path.display() - ), - OutputLevel::Normal, - ); - } - - Ok(()) - } - - /// Recursively copy a directory - fn copy_dir_recursive(src: &Path, dst: &Path) -> Result<()> { - std::fs::create_dir_all(dst)?; + // Build copy command to run inside the container + let copy_cmd = format!( + r#" +set -e +rm -rf "{container_install_path}" +mkdir -p "{container_install_path}" +cp -r "{resolved_source_str}/." "{container_install_path}/" +echo "Successfully copied extension '{ext_name}' from {resolved_source_str} to {container_install_path}" +"# + ); - for entry in std::fs::read_dir(src)? { - let entry = entry?; - let src_path = entry.path(); - let dst_path = dst.join(entry.file_name()); + let container_helper = SdkContainer::new().verbose(self.verbose); + let run_config = RunConfig { + container_image: self.container_image.clone(), + target: self.target.clone(), + command: copy_cmd, + verbose: self.verbose, + source_environment: true, + interactive: false, + repo_url: self.repo_url.clone(), + repo_release: self.repo_release.clone(), + container_args: self.container_args.clone(), + sdk_arch: self.sdk_arch.clone(), + ..Default::default() + }; - if src_path.is_dir() { - Self::copy_dir_recursive(&src_path, &dst_path)?; - } else { - std::fs::copy(&src_path, &dst_path)?; - } + let success = container_helper.run_in_container(run_config).await?; + if !success { + return Err(anyhow::anyhow!( + "Failed to copy extension '{ext_name}' from path" + )); } Ok(()) From 4ef76c4446091612aa62fde11bbff16c7064fead Mon Sep 17 00:00:00 2001 From: Justin Schneck Date: Mon, 5 Jan 2026 18:54:08 -0500 Subject: [PATCH 06/23] fixup package rm commands. --- src/commands/ext/package.rs | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/src/commands/ext/package.rs b/src/commands/ext/package.rs index 72a2013..90b1d90 100644 --- a/src/commands/ext/package.rs +++ b/src/commands/ext/package.rs @@ -445,7 +445,8 @@ cp -rp "$EXT_SRC_DIR"/* %{{buildroot}}/ SPEC_EOF # Build the RPM with custom architecture target -rpmbuild --define "_topdir $TMPDIR" --define "_arch {arch}" --target {arch} -bb SPECS/package.spec +# Override %__rm macro since /usr/bin/rm may not exist in container +rpmbuild --define "_topdir $TMPDIR" --define "_arch {arch}" --define "__rm /bin/rm" --target {arch} -bb SPECS/package.spec # Move RPM to output directory mv RPMS/{arch}/*.rpm $AVOCADO_PREFIX/output/extensions/{rpm_filename} || {{ @@ -764,7 +765,8 @@ Group: {}{} SPEC_EOF # Build the RPM with custom architecture target and define the arch macro -rpmbuild --define "_topdir $TMPDIR" --define "_arch {}" --target {} -bb SPECS/sdk-package.spec +# Override %__rm macro since /usr/bin/rm may not exist in container +rpmbuild --define "_topdir $TMPDIR" --define "_arch {}" --define "__rm /bin/rm" --target {} -bb SPECS/sdk-package.spec # Move RPM to output directory mv RPMS/{}/*.rpm $AVOCADO_PREFIX/output/extensions/{} || {{ From 6b18bb090444879b66b474796d657e845ffdc16d Mon Sep 17 00:00:00 2001 From: Justin Schneck Date: Mon, 5 Jan 2026 21:28:54 -0500 Subject: [PATCH 07/23] add ext package_files --- src/commands/ext/package.rs | 296 ++++++++++++++++++++++++++++++++++-- 1 file changed, 280 insertions(+), 16 deletions(-) diff --git a/src/commands/ext/package.rs b/src/commands/ext/package.rs index 90b1d90..0d9054a 100644 --- a/src/commands/ext/package.rs +++ b/src/commands/ext/package.rs @@ -143,6 +143,9 @@ impl ExtPackageCommand { // Extract RPM metadata with defaults let rpm_metadata = self.extract_rpm_metadata(&ext_config, &target)?; + // Determine which files to package + let package_files = self.get_package_files(&ext_config); + if self.verbose { print_info( &format!( @@ -151,12 +154,22 @@ impl ExtPackageCommand { ), OutputLevel::Normal, ); + print_info( + &format!("Package files: {:?}", package_files), + OutputLevel::Normal, + ); } // Create main RPM package in container // This packages the extension's src_dir (directory containing avocado.yaml) let output_path = self - .create_rpm_package_in_container(&rpm_metadata, &config, &target, &ext_config_path) + .create_rpm_package_in_container( + &rpm_metadata, + &config, + &target, + &ext_config_path, + &package_files, + ) .await?; print_success( @@ -173,6 +186,45 @@ impl ExtPackageCommand { Ok(()) } + /// Determine which files to package based on the extension configuration. + /// + /// If `package_files` is specified in the extension config, use those patterns. + /// Otherwise, default to: + /// - The avocado config file (avocado.yaml, avocado.yml, or avocado.toml) + /// - The overlay directory if defined + fn get_package_files(&self, ext_config: &serde_yaml::Value) -> Vec { + // Check if package_files is explicitly defined + if let Some(package_files) = ext_config.get("package_files") { + if let Some(files_array) = package_files.as_sequence() { + let files: Vec = files_array + .iter() + .filter_map(|v| v.as_str().map(|s| s.to_string())) + .collect(); + if !files.is_empty() { + return files; + } + } + } + + // Default behavior: avocado.yaml + overlay directory if defined + let mut default_files = vec!["avocado.yaml".to_string()]; + + // Check for overlay configuration + if let Some(overlay) = ext_config.get("overlay") { + if let Some(overlay_dir) = overlay.as_str() { + // Simple string format: overlay = "directory" + default_files.push(overlay_dir.to_string()); + } else if let Some(overlay_table) = overlay.as_mapping() { + // Table format: overlay = { dir = "directory", ... } + if let Some(dir) = overlay_table.get("dir").and_then(|d| d.as_str()) { + default_files.push(dir.to_string()); + } + } + } + + default_files + } + /// Extract RPM metadata from extension configuration with defaults fn extract_rpm_metadata( &self, @@ -327,12 +379,20 @@ impl ExtPackageCommand { /// The package root (/) maps to the extension's src_dir contents. /// This allows the extension to be installed to $AVOCADO_PREFIX/includes// /// and its config merged into the main config. + /// + /// # Arguments + /// * `metadata` - RPM metadata for the package + /// * `config` - The avocado configuration + /// * `target` - The target architecture + /// * `ext_config_path` - Path to the extension's config file + /// * `package_files` - List of files/directories to package (supports glob patterns like * and **) async fn create_rpm_package_in_container( &self, metadata: &RpmMetadata, config: &Config, target: &str, ext_config_path: &str, + package_files: &[String], ) -> Result { let container_image = config .get_sdk_image() @@ -366,6 +426,9 @@ impl ExtPackageCommand { metadata.name, metadata.version, metadata.release, metadata.arch ); + // Convert package_files to a space-separated string for the shell script + let package_files_str = package_files.join(" "); + // Create RPM using rpmbuild in container // Package root (/) maps to the extension's src_dir contents let rpm_build_script = format!( @@ -375,6 +438,9 @@ set -e # Extension source directory EXT_SRC_DIR="{container_src_dir}" +# Package files patterns (may contain globs like * and **) +PACKAGE_FILES="{package_files_str}" + # Ensure output directory exists mkdir -p $AVOCADO_PREFIX/output/extensions @@ -390,22 +456,49 @@ if [ ! -f "$EXT_SRC_DIR/avocado.yaml" ] && [ ! -f "$EXT_SRC_DIR/avocado.yml" ] & exit 1 fi -# Count files -FILE_COUNT=$(find "$EXT_SRC_DIR" -type f | wc -l) -echo "Creating RPM with $FILE_COUNT files from source directory..." - -if [ "$FILE_COUNT" -eq 0 ]; then - echo "No files found in source directory" - exit 1 -fi - # Create temporary directory for RPM build TMPDIR=$(mktemp -d) +STAGING_DIR="$TMPDIR/staging" +mkdir -p "$STAGING_DIR" cd "$TMPDIR" # Create directory structure for rpmbuild mkdir -p BUILD RPMS SOURCES SPECS SRPMS +# Enable globstar for ** pattern support +shopt -s globstar nullglob + +# Copy files matching patterns to staging directory +cd "$EXT_SRC_DIR" +FILE_COUNT=0 +for pattern in $PACKAGE_FILES; do + # Expand the glob pattern + for file in $pattern; do + if [ -e "$file" ]; then + # Create parent directory in staging and copy + parent_dir=$(dirname "$file") + if [ "$parent_dir" != "." ]; then + mkdir -p "$STAGING_DIR/$parent_dir" + fi + cp -rp "$file" "$STAGING_DIR/$file" + if [ -f "$file" ]; then + FILE_COUNT=$((FILE_COUNT + 1)) + elif [ -d "$file" ]; then + dir_files=$(find "$file" -type f | wc -l) + FILE_COUNT=$((FILE_COUNT + dir_files)) + fi + fi + done +done +cd "$TMPDIR" + +echo "Creating RPM with $FILE_COUNT files from source directory..." + +if [ "$FILE_COUNT" -eq 0 ]; then + echo "No files matched the package_files patterns: $PACKAGE_FILES" + exit 1 +fi + # Create spec file # Package root (/) maps to the extension's src_dir cat > SPECS/package.spec << SPEC_EOF @@ -434,9 +527,9 @@ Group: {group}{url_line} %install mkdir -p %{{buildroot}} -# Copy src_dir contents to buildroot root +# Copy staged files to buildroot root # This allows installation to \$AVOCADO_PREFIX/includes// -cp -rp "$EXT_SRC_DIR"/* %{{buildroot}}/ +cp -rp "$STAGING_DIR"/* %{{buildroot}}/ %clean # Skip clean section - not needed for our use case @@ -445,8 +538,7 @@ cp -rp "$EXT_SRC_DIR"/* %{{buildroot}}/ SPEC_EOF # Build the RPM with custom architecture target -# Override %__rm macro since /usr/bin/rm may not exist in container -rpmbuild --define "_topdir $TMPDIR" --define "_arch {arch}" --define "__rm /bin/rm" --target {arch} -bb SPECS/package.spec +rpmbuild --define "_topdir $TMPDIR" --define "_arch {arch}" --target {arch} -bb SPECS/package.spec # Move RPM to output directory mv RPMS/{arch}/*.rpm $AVOCADO_PREFIX/output/extensions/{rpm_filename} || {{ @@ -477,6 +569,7 @@ rm -rf "$TMPDIR" arch = metadata.arch, rpm_filename = rpm_filename, container_src_dir = container_src_dir, + package_files_str = package_files_str, ); // Run the RPM build in the container @@ -765,8 +858,7 @@ Group: {}{} SPEC_EOF # Build the RPM with custom architecture target and define the arch macro -# Override %__rm macro since /usr/bin/rm may not exist in container -rpmbuild --define "_topdir $TMPDIR" --define "_arch {}" --define "__rm /bin/rm" --target {} -bb SPECS/sdk-package.spec +rpmbuild --define "_topdir $TMPDIR" --define "_arch {}" --target {} -bb SPECS/sdk-package.spec # Move RPM to output directory mv RPMS/{}/*.rpm $AVOCADO_PREFIX/output/extensions/{} || {{ @@ -1247,4 +1339,176 @@ ext: let cmd = cmd.with_no_stamps(true); assert!(cmd.no_stamps); } + + #[test] + fn test_get_package_files_default_no_overlay() { + let cmd = ExtPackageCommand::new( + "test.yaml".to_string(), + "test-ext".to_string(), + None, + None, + false, + None, + None, + ); + + // Config without package_files or overlay - should default to just avocado.yaml + let mut ext_config = serde_yaml::Value::Mapping(serde_yaml::Mapping::new()); + ext_config.as_mapping_mut().unwrap().insert( + serde_yaml::Value::String("version".to_string()), + serde_yaml::Value::String("1.0.0".to_string()), + ); + + let files = cmd.get_package_files(&ext_config); + assert_eq!(files, vec!["avocado.yaml".to_string()]); + } + + #[test] + fn test_get_package_files_default_with_overlay_string() { + let cmd = ExtPackageCommand::new( + "test.yaml".to_string(), + "test-ext".to_string(), + None, + None, + false, + None, + None, + ); + + // Config with overlay as string - should include avocado.yaml and overlay dir + let mut ext_config = serde_yaml::Value::Mapping(serde_yaml::Mapping::new()); + let config_map = ext_config.as_mapping_mut().unwrap(); + config_map.insert( + serde_yaml::Value::String("version".to_string()), + serde_yaml::Value::String("1.0.0".to_string()), + ); + config_map.insert( + serde_yaml::Value::String("overlay".to_string()), + serde_yaml::Value::String("my-overlay".to_string()), + ); + + let files = cmd.get_package_files(&ext_config); + assert_eq!( + files, + vec!["avocado.yaml".to_string(), "my-overlay".to_string()] + ); + } + + #[test] + fn test_get_package_files_default_with_overlay_table() { + let cmd = ExtPackageCommand::new( + "test.yaml".to_string(), + "test-ext".to_string(), + None, + None, + false, + None, + None, + ); + + // Config with overlay as table { dir = "..." } - should include avocado.yaml and overlay dir + let mut ext_config = serde_yaml::Value::Mapping(serde_yaml::Mapping::new()); + let config_map = ext_config.as_mapping_mut().unwrap(); + config_map.insert( + serde_yaml::Value::String("version".to_string()), + serde_yaml::Value::String("1.0.0".to_string()), + ); + + let mut overlay_table = serde_yaml::Mapping::new(); + overlay_table.insert( + serde_yaml::Value::String("dir".to_string()), + serde_yaml::Value::String("overlays/prod".to_string()), + ); + overlay_table.insert( + serde_yaml::Value::String("mode".to_string()), + serde_yaml::Value::String("opaque".to_string()), + ); + config_map.insert( + serde_yaml::Value::String("overlay".to_string()), + serde_yaml::Value::Mapping(overlay_table), + ); + + let files = cmd.get_package_files(&ext_config); + assert_eq!( + files, + vec!["avocado.yaml".to_string(), "overlays/prod".to_string()] + ); + } + + #[test] + fn test_get_package_files_explicit_list() { + let cmd = ExtPackageCommand::new( + "test.yaml".to_string(), + "test-ext".to_string(), + None, + None, + false, + None, + None, + ); + + // Config with explicit package_files list + let mut ext_config = serde_yaml::Value::Mapping(serde_yaml::Mapping::new()); + let config_map = ext_config.as_mapping_mut().unwrap(); + config_map.insert( + serde_yaml::Value::String("version".to_string()), + serde_yaml::Value::String("1.0.0".to_string()), + ); + + let package_files = vec![ + serde_yaml::Value::String("avocado.yaml".to_string()), + serde_yaml::Value::String("config/**".to_string()), + serde_yaml::Value::String("scripts/*.sh".to_string()), + serde_yaml::Value::String("README.md".to_string()), + ]; + config_map.insert( + serde_yaml::Value::String("package_files".to_string()), + serde_yaml::Value::Sequence(package_files), + ); + + // Also add overlay - should be ignored when package_files is set + config_map.insert( + serde_yaml::Value::String("overlay".to_string()), + serde_yaml::Value::String("my-overlay".to_string()), + ); + + let files = cmd.get_package_files(&ext_config); + assert_eq!( + files, + vec![ + "avocado.yaml".to_string(), + "config/**".to_string(), + "scripts/*.sh".to_string(), + "README.md".to_string(), + ] + ); + } + + #[test] + fn test_get_package_files_empty_list_uses_default() { + let cmd = ExtPackageCommand::new( + "test.yaml".to_string(), + "test-ext".to_string(), + None, + None, + false, + None, + None, + ); + + // Config with empty package_files list - should fall back to default + let mut ext_config = serde_yaml::Value::Mapping(serde_yaml::Mapping::new()); + let config_map = ext_config.as_mapping_mut().unwrap(); + config_map.insert( + serde_yaml::Value::String("version".to_string()), + serde_yaml::Value::String("1.0.0".to_string()), + ); + config_map.insert( + serde_yaml::Value::String("package_files".to_string()), + serde_yaml::Value::Sequence(vec![]), + ); + + let files = cmd.get_package_files(&ext_config); + assert_eq!(files, vec!["avocado.yaml".to_string()]); + } } From a23f8e6196002d4477348d8b8dd3fe8bb852e21e Mon Sep 17 00:00:00 2001 From: Justin Schneck Date: Mon, 5 Jan 2026 21:45:42 -0500 Subject: [PATCH 08/23] update sdk dnf to enable ext repos --- src/commands/sdk/dnf.rs | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/src/commands/sdk/dnf.rs b/src/commands/sdk/dnf.rs index 49b84a4..5b01196 100644 --- a/src/commands/sdk/dnf.rs +++ b/src/commands/sdk/dnf.rs @@ -84,13 +84,15 @@ impl SdkDnfCommand { let container_helper = SdkContainer::new(); // Build DNF command + // Use $DNF_SDK_COMBINED_REPO_CONF to include both SDK repos and target-specific repos + // (including the extension repo: ${AVOCADO_TARGET}-target-ext) let dnf_args_str = if let Some(args) = &self.dnf_args { format!(" {} ", args.join(" ")) } else { String::new() }; let command = format!( - "RPM_CONFIGDIR=$AVOCADO_SDK_PREFIX/usr/lib/rpm $DNF_SDK_HOST $DNF_SDK_HOST_OPTS $DNF_SDK_REPO_CONF --disablerepo=${{AVOCADO_TARGET}}-target-ext {} {}", + "RPM_CONFIGDIR=$AVOCADO_SDK_PREFIX/usr/lib/rpm $DNF_SDK_HOST $DNF_SDK_HOST_OPTS $DNF_SDK_COMBINED_REPO_CONF {} {}", dnf_args_str, self.command.join(" ") ); From 32903aa5c3b460ffc70bae35707857a1f0eb5fd7 Mon Sep 17 00:00:00 2001 From: Justin Schneck Date: Tue, 6 Jan 2026 09:26:52 -0500 Subject: [PATCH 09/23] clean up package output --- src/commands/ext/package.rs | 348 ------------------------------------ 1 file changed, 348 deletions(-) diff --git a/src/commands/ext/package.rs b/src/commands/ext/package.rs index 0d9054a..960a182 100644 --- a/src/commands/ext/package.rs +++ b/src/commands/ext/package.rs @@ -3,7 +3,6 @@ use anyhow::{Context, Result}; -use std::collections::HashMap; use std::fs; use std::path::PathBuf; @@ -335,14 +334,6 @@ impl ExtPackageCommand { format!("System extension package for {name}") } - /// Generate architecture from target by replacing dashes with underscores - /// Generate a target-specific architecture name (for binaries) - /// Note: Extension source packages should use "noarch" instead - #[allow(dead_code)] - fn generate_arch_from_target(&self, target: &str) -> String { - format!("avocado_{}", target.replace('-', "_")) - } - /// Validate semantic versioning format (X.Y.Z where X, Y, Z are non-negative integers) fn validate_semver(version: &str) -> Result<()> { let parts: Vec<&str> = version.split('.').collect(); @@ -730,235 +721,6 @@ rm -rf "$TMPDIR" let container_id = String::from_utf8_lossy(&output.stdout).trim().to_string(); Ok(container_id) } - - /// Get SDK dependencies for the current extension - #[allow(dead_code)] - fn get_extension_sdk_dependencies( - &self, - config: &Config, - config_content: &str, - target: &str, - ) -> Result> { - let extension_sdk_deps = config - .get_extension_sdk_dependencies_with_config_path_and_target( - config_content, - Some(&self.config_path), - Some(target), - )?; - - // Return the SDK dependencies for this specific extension, or empty if none - Ok(extension_sdk_deps - .get(&self.extension) - .cloned() - .unwrap_or_default()) - } - - /// Create the SDK RPM package inside the container at $AVOCADO_PREFIX/output/extensions - #[allow(dead_code)] - async fn create_sdk_rpm_package_in_container( - &self, - metadata: &RpmMetadata, - config: &Config, - sdk_dependencies: &HashMap, - target: &str, - ) -> Result { - let container_image = config - .get_sdk_image() - .ok_or_else(|| anyhow::anyhow!("No SDK container image specified in configuration."))?; - - let merged_container_args = config.merge_sdk_container_args(self.container_args.as_ref()); - - // Get the volume state - let cwd = std::env::current_dir().context("Failed to get current directory")?; - let volume_manager = - crate::utils::volume::VolumeManager::new("docker".to_string(), self.verbose); - let volume_state = volume_manager.get_or_create_volume(&cwd).await?; - - // Create SDK RPM metadata with nativesdk- prefix and all_avocadosdk architecture - let sdk_metadata = RpmMetadata { - name: format!("nativesdk-{}", metadata.name), - version: metadata.version.clone(), - release: metadata.release.clone(), - summary: format!("{} SDK dependencies", metadata.summary), - description: format!("SDK dependencies for {}", metadata.description), - license: metadata.license.clone(), - arch: "all_avocadosdk".to_string(), - vendor: metadata.vendor.clone(), - group: metadata.group.clone(), - url: metadata.url.clone(), - }; - - // Create the RPM filename - let rpm_filename = format!( - "{}-{}-{}.{}.rpm", - sdk_metadata.name, sdk_metadata.version, sdk_metadata.release, sdk_metadata.arch - ); - - // Build dependency list for RPM spec - let mut requires_list = Vec::new(); - for (dep_name, dep_value) in sdk_dependencies { - let version_spec = match dep_value { - serde_yaml::Value::String(version) if version == "*" => String::new(), - serde_yaml::Value::String(version) => format!(" = {version}"), - _ => String::new(), - }; - requires_list.push(format!("{dep_name}{version_spec}")); - } - let requires_section = if requires_list.is_empty() { - String::new() - } else { - format!("Requires: {}", requires_list.join(", ")) - }; - - // Create SDK RPM using rpmbuild in container - let rpm_build_script = format!( - r#" -# Ensure output directory exists -mkdir -p $AVOCADO_PREFIX/output/extensions - -# Create temporary directory for RPM build -TMPDIR=$(mktemp -d) -cd "$TMPDIR" - -# Create directory structure for rpmbuild -mkdir -p BUILD RPMS SOURCES SPECS SRPMS - -# Create spec file for SDK package (no files, only dependencies) -cat > SPECS/sdk-package.spec << 'SPEC_EOF' -%define _buildhost reproducible - -Name: {} -Version: {} -Release: {} -Summary: {} -License: {} -Vendor: {} -Group: {}{} -{} - -%description -{} - -%files -# No files - this is a dependency-only package - -%prep -# No prep needed - -%build -# No build needed - -%install -# No install needed - dependency-only package - -%clean -# Skip clean section - not needed for our use case - -%changelog -SPEC_EOF - -# Build the RPM with custom architecture target and define the arch macro -rpmbuild --define "_topdir $TMPDIR" --define "_arch {}" --target {} -bb SPECS/sdk-package.spec - -# Move RPM to output directory -mv RPMS/{}/*.rpm $AVOCADO_PREFIX/output/extensions/{} || {{ - mv RPMS/*/*.rpm $AVOCADO_PREFIX/output/extensions/{} 2>/dev/null || {{ - echo "Failed to find built SDK RPM" - exit 1 - }} -}} - -echo "SDK RPM created successfully: $AVOCADO_PREFIX/output/extensions/{}" - -# Cleanup -rm -rf "$TMPDIR" -"#, - sdk_metadata.name, - sdk_metadata.version, - sdk_metadata.release, - sdk_metadata.summary, - sdk_metadata.license, - sdk_metadata.vendor, - sdk_metadata.group, - if let Some(url) = &sdk_metadata.url { - format!("\nURL: {url}") - } else { - String::new() - }, - requires_section, - sdk_metadata.description, - sdk_metadata.arch, - sdk_metadata.arch, - sdk_metadata.arch, - rpm_filename, - rpm_filename, - rpm_filename, - ); - - // Run the RPM build in the container - let container_helper = SdkContainer::new(); - let run_config = crate::utils::container::RunConfig { - container_image: container_image.to_string(), - target: target.to_string(), - command: rpm_build_script, - verbose: self.verbose, - source_environment: true, - interactive: false, - repo_url: config.get_sdk_repo_url(), - repo_release: config.get_sdk_repo_release(), - container_args: merged_container_args, - dnf_args: self.dnf_args.clone(), - sdk_arch: self.sdk_arch.clone(), - ..Default::default() - }; - - if self.verbose { - print_info( - "Creating SDK RPM package in container...", - OutputLevel::Normal, - ); - } - - let success = container_helper.run_in_container(run_config).await?; - if !success { - return Err(anyhow::anyhow!( - "Failed to create SDK RPM package in container" - )); - } - - // RPM is now created in the container at $AVOCADO_PREFIX/output/extensions/{rpm_filename} - let container_rpm_path = format!("/opt/_avocado/{target}/output/extensions/{rpm_filename}"); - - // If --out is specified, copy the RPM to the host - if let Some(output_dir) = &self.output_dir { - self.copy_rpm_to_host( - &volume_state.volume_name, - &container_rpm_path, - output_dir, - &rpm_filename, - container_image, - ) - .await?; - - // Return the host path (canonicalized for clean display) - let host_output_path = if output_dir.starts_with('/') { - // Absolute path - PathBuf::from(output_dir).join(&rpm_filename) - } else { - // Relative path from current directory - std::env::current_dir()? - .join(output_dir) - .join(&rpm_filename) - }; - - // Canonicalize the path to resolve . and .. components for clean display - let canonical_path = host_output_path.canonicalize().unwrap_or(host_output_path); - Ok(canonical_path) - } else { - // Return the container path for informational purposes - Ok(PathBuf::from(container_rpm_path)) - } - } } /// RPM metadata structure @@ -1032,36 +794,6 @@ mod tests { ); } - #[test] - fn test_generate_arch_from_target() { - let cmd = ExtPackageCommand::new( - "test.yaml".to_string(), - "test-ext".to_string(), - Some("x86_64-unknown-linux-gnu".to_string()), - None, - false, - None, - None, - ); - - assert_eq!( - cmd.generate_arch_from_target("x86_64-unknown-linux-gnu"), - "avocado_x86_64_unknown_linux_gnu" - ); - assert_eq!( - cmd.generate_arch_from_target("aarch64-unknown-linux-gnu"), - "avocado_aarch64_unknown_linux_gnu" - ); - assert_eq!( - cmd.generate_arch_from_target("riscv64-unknown-linux-gnu"), - "avocado_riscv64_unknown_linux_gnu" - ); - assert_eq!( - cmd.generate_arch_from_target("i686-unknown-linux-gnu"), - "avocado_i686_unknown_linux_gnu" - ); - } - #[test] fn test_extract_rpm_metadata_minimal() { let cmd = ExtPackageCommand::new( @@ -1224,86 +956,6 @@ mod tests { } } - #[test] - fn test_get_extension_sdk_dependencies_empty() { - use crate::utils::config::Config; - - let cmd = ExtPackageCommand::new( - "test.yaml".to_string(), - "test-ext".to_string(), - Some("x86_64-unknown-linux-gnu".to_string()), - None, - false, - None, - None, - ); - - // Create a minimal config without SDK dependencies - let config_content = r#" -ext: - test-ext: - version: "1.0.0" -"#; - - let config = serde_yaml::from_str::(config_content).unwrap(); - let sdk_deps = cmd - .get_extension_sdk_dependencies(&config, config_content, "x86_64-unknown-linux-gnu") - .unwrap(); - - assert!(sdk_deps.is_empty()); - } - - #[test] - fn test_get_extension_sdk_dependencies_with_deps() { - use crate::utils::config::Config; - - let cmd = ExtPackageCommand::new( - "test.yaml".to_string(), - "test-ext".to_string(), - Some("x86_64-unknown-linux-gnu".to_string()), - None, - false, - None, - None, - ); - - // Create a config with SDK dependencies - let config_content = r#" -ext: - test-ext: - version: "1.0.0" - sdk: - dependencies: - nativesdk-avocado-hitl: "*" - nativesdk-openssh-ssh: "*" - nativesdk-rsync: "1.2.3" -"#; - - let config = serde_yaml::from_str::(config_content).unwrap(); - let sdk_deps = cmd - .get_extension_sdk_dependencies(&config, config_content, "x86_64-unknown-linux-gnu") - .unwrap(); - - assert_eq!(sdk_deps.len(), 3); - assert!(sdk_deps.contains_key("nativesdk-avocado-hitl")); - assert!(sdk_deps.contains_key("nativesdk-openssh-ssh")); - assert!(sdk_deps.contains_key("nativesdk-rsync")); - - // Check version values - assert_eq!( - sdk_deps["nativesdk-avocado-hitl"], - serde_yaml::Value::String("*".to_string()) - ); - assert_eq!( - sdk_deps["nativesdk-openssh-ssh"], - serde_yaml::Value::String("*".to_string()) - ); - assert_eq!( - sdk_deps["nativesdk-rsync"], - serde_yaml::Value::String("1.2.3".to_string()) - ); - } - // ======================================================================== // Note: Stamp Dependency Tests Removed // ======================================================================== From 544a476e479687b458d8856b5c18b7446cb7d501 Mon Sep 17 00:00:00 2001 From: Justin Schneck Date: Tue, 6 Jan 2026 09:37:18 -0500 Subject: [PATCH 10/23] add target override overlays to packages --- src/commands/ext/package.rs | 179 ++++++++++++++++++++++++++++++++---- 1 file changed, 160 insertions(+), 19 deletions(-) diff --git a/src/commands/ext/package.rs b/src/commands/ext/package.rs index 960a182..19ac775 100644 --- a/src/commands/ext/package.rs +++ b/src/commands/ext/package.rs @@ -139,11 +139,15 @@ impl ExtPackageCommand { anyhow::anyhow!("Extension '{}' not found in configuration.", self.extension) })?; + // Also get the raw (unmerged) extension config to find all target-specific overlays + let raw_ext_config = self.get_raw_extension_config(&ext_config_path)?; + // Extract RPM metadata with defaults let rpm_metadata = self.extract_rpm_metadata(&ext_config, &target)?; // Determine which files to package - let package_files = self.get_package_files(&ext_config); + // Pass both merged config (for package_files) and raw config (for all target overlays) + let package_files = self.get_package_files(&ext_config, raw_ext_config.as_ref()); if self.verbose { print_info( @@ -185,13 +189,60 @@ impl ExtPackageCommand { Ok(()) } + /// Get the raw (unmerged) extension configuration from the config file. + /// + /// This is used to find all target-specific overlays that should be included + /// in the package (since the package is noarch and needs all target overlays). + fn get_raw_extension_config(&self, ext_config_path: &str) -> Result> { + let content = fs::read_to_string(ext_config_path) + .with_context(|| format!("Failed to read config file: {ext_config_path}"))?; + + let parsed: serde_yaml::Value = serde_yaml::from_str(&content) + .with_context(|| format!("Failed to parse config file: {ext_config_path}"))?; + + // Get the ext section + let ext_section = parsed.get("ext"); + if ext_section.is_none() { + return Ok(None); + } + + // Get this specific extension's config + Ok(ext_section + .and_then(|ext| ext.get(&self.extension)) + .cloned()) + } + + /// Extract overlay directory from an overlay configuration value. + fn extract_overlay_dir(overlay_value: &serde_yaml::Value) -> Option { + if let Some(overlay_dir) = overlay_value.as_str() { + // Simple string format: overlay = "directory" + Some(overlay_dir.to_string()) + } else if let Some(overlay_table) = overlay_value.as_mapping() { + // Table format: overlay = { dir = "directory", ... } + overlay_table + .get("dir") + .and_then(|d| d.as_str()) + .map(|s| s.to_string()) + } else { + None + } + } + /// Determine which files to package based on the extension configuration. /// /// If `package_files` is specified in the extension config, use those patterns. /// Otherwise, default to: /// - The avocado config file (avocado.yaml, avocado.yml, or avocado.toml) - /// - The overlay directory if defined - fn get_package_files(&self, ext_config: &serde_yaml::Value) -> Vec { + /// - All overlay directories (base level and target-specific) + /// + /// # Arguments + /// * `ext_config` - The merged extension config (for package_files check) + /// * `raw_ext_config` - The raw unmerged extension config (to find all target-specific overlays) + fn get_package_files( + &self, + ext_config: &serde_yaml::Value, + raw_ext_config: Option<&serde_yaml::Value>, + ) -> Vec { // Check if package_files is explicitly defined if let Some(package_files) = ext_config.get("package_files") { if let Some(files_array) = package_files.as_sequence() { @@ -205,18 +256,39 @@ impl ExtPackageCommand { } } - // Default behavior: avocado.yaml + overlay directory if defined + // Default behavior: avocado.yaml + all overlay directories let mut default_files = vec!["avocado.yaml".to_string()]; - - // Check for overlay configuration - if let Some(overlay) = ext_config.get("overlay") { - if let Some(overlay_dir) = overlay.as_str() { - // Simple string format: overlay = "directory" - default_files.push(overlay_dir.to_string()); - } else if let Some(overlay_table) = overlay.as_mapping() { - // Table format: overlay = { dir = "directory", ... } - if let Some(dir) = overlay_table.get("dir").and_then(|d| d.as_str()) { - default_files.push(dir.to_string()); + let mut seen_overlays = std::collections::HashSet::new(); + + // If we have the raw extension config, scan for all overlays + if let Some(raw_config) = raw_ext_config { + if let Some(mapping) = raw_config.as_mapping() { + for (key, value) in mapping { + // Check if this is the base-level overlay + if key.as_str() == Some("overlay") { + if let Some(overlay_dir) = Self::extract_overlay_dir(value) { + if seen_overlays.insert(overlay_dir.clone()) { + default_files.push(overlay_dir); + } + } + } + // Check if this is a target-specific section with an overlay + else if let Some(target_config) = value.as_mapping() { + if let Some(overlay_value) = target_config.get("overlay") { + if let Some(overlay_dir) = Self::extract_overlay_dir(overlay_value) { + if seen_overlays.insert(overlay_dir.clone()) { + default_files.push(overlay_dir); + } + } + } + } + } + } + } else { + // Fallback: just check the merged config for overlay (current target only) + if let Some(overlay) = ext_config.get("overlay") { + if let Some(overlay_dir) = Self::extract_overlay_dir(overlay) { + default_files.push(overlay_dir); } } } @@ -1011,7 +1083,7 @@ mod tests { serde_yaml::Value::String("1.0.0".to_string()), ); - let files = cmd.get_package_files(&ext_config); + let files = cmd.get_package_files(&ext_config, None); assert_eq!(files, vec!["avocado.yaml".to_string()]); } @@ -1039,7 +1111,8 @@ mod tests { serde_yaml::Value::String("my-overlay".to_string()), ); - let files = cmd.get_package_files(&ext_config); + // Use the same config as raw config to test overlay extraction + let files = cmd.get_package_files(&ext_config, Some(&ext_config)); assert_eq!( files, vec!["avocado.yaml".to_string(), "my-overlay".to_string()] @@ -1080,7 +1153,8 @@ mod tests { serde_yaml::Value::Mapping(overlay_table), ); - let files = cmd.get_package_files(&ext_config); + // Use the same config as raw config to test overlay extraction + let files = cmd.get_package_files(&ext_config, Some(&ext_config)); assert_eq!( files, vec!["avocado.yaml".to_string(), "overlays/prod".to_string()] @@ -1124,7 +1198,7 @@ mod tests { serde_yaml::Value::String("my-overlay".to_string()), ); - let files = cmd.get_package_files(&ext_config); + let files = cmd.get_package_files(&ext_config, Some(&ext_config)); assert_eq!( files, vec![ @@ -1160,7 +1234,74 @@ mod tests { serde_yaml::Value::Sequence(vec![]), ); - let files = cmd.get_package_files(&ext_config); + let files = cmd.get_package_files(&ext_config, None); assert_eq!(files, vec!["avocado.yaml".to_string()]); } + + #[test] + fn test_get_package_files_with_target_specific_overlays() { + let cmd = ExtPackageCommand::new( + "test.yaml".to_string(), + "test-ext".to_string(), + None, + None, + false, + None, + None, + ); + + // Create a raw config that simulates target-specific overlays + // like: ext.test-ext.reterminal.overlay and ext.test-ext.reterminal-dm.overlay + let mut raw_config = serde_yaml::Value::Mapping(serde_yaml::Mapping::new()); + let config_map = raw_config.as_mapping_mut().unwrap(); + + config_map.insert( + serde_yaml::Value::String("version".to_string()), + serde_yaml::Value::String("1.0.0".to_string()), + ); + + // Target: reterminal with overlay + let mut reterminal_config = serde_yaml::Mapping::new(); + reterminal_config.insert( + serde_yaml::Value::String("overlay".to_string()), + serde_yaml::Value::String("overlays/reterminal".to_string()), + ); + config_map.insert( + serde_yaml::Value::String("reterminal".to_string()), + serde_yaml::Value::Mapping(reterminal_config), + ); + + // Target: reterminal-dm with overlay + let mut reterminal_dm_config = serde_yaml::Mapping::new(); + reterminal_dm_config.insert( + serde_yaml::Value::String("overlay".to_string()), + serde_yaml::Value::String("overlays/reterminal-dm".to_string()), + ); + config_map.insert( + serde_yaml::Value::String("reterminal-dm".to_string()), + serde_yaml::Value::Mapping(reterminal_dm_config), + ); + + // Target: icam-540 without overlay (should not add anything) + let mut icam_config = serde_yaml::Mapping::new(); + icam_config.insert( + serde_yaml::Value::String("some_other_setting".to_string()), + serde_yaml::Value::String("value".to_string()), + ); + config_map.insert( + serde_yaml::Value::String("icam-540".to_string()), + serde_yaml::Value::Mapping(icam_config), + ); + + // Merged config (for a specific target, but package_files not set) + let merged_config = serde_yaml::Value::Mapping(serde_yaml::Mapping::new()); + + let files = cmd.get_package_files(&merged_config, Some(&raw_config)); + + // Should include avocado.yaml and both target-specific overlays + assert!(files.contains(&"avocado.yaml".to_string())); + assert!(files.contains(&"overlays/reterminal".to_string())); + assert!(files.contains(&"overlays/reterminal-dm".to_string())); + assert_eq!(files.len(), 3); + } } From 07a141503d0774fc06565fbb42b6e80472f8bc15 Mon Sep 17 00:00:00 2001 From: Justin Schneck Date: Thu, 8 Jan 2026 09:23:23 -0500 Subject: [PATCH 11/23] fix race conditions in tests --- src/commands/ext/fetch.rs | 3 +-- src/commands/ext/package.rs | 4 ++-- src/commands/unlock.rs | 10 ++++++++++ src/utils/config.rs | 5 +---- src/utils/container.rs | 5 ++--- 5 files changed, 16 insertions(+), 11 deletions(-) diff --git a/src/commands/ext/fetch.rs b/src/commands/ext/fetch.rs index 54dbcfc..ff5fdd6 100644 --- a/src/commands/ext/fetch.rs +++ b/src/commands/ext/fetch.rs @@ -108,8 +108,7 @@ impl ExtFetchCommand { if extensions_to_fetch.is_empty() { if let Some(ref ext_name) = self.extension { return Err(anyhow::anyhow!( - "Extension '{}' not found in configuration or is not a remote extension", - ext_name + "Extension '{ext_name}' not found in configuration or is not a remote extension" )); } return Ok(()); diff --git a/src/commands/ext/package.rs b/src/commands/ext/package.rs index 19ac775..9adb171 100644 --- a/src/commands/ext/package.rs +++ b/src/commands/ext/package.rs @@ -158,7 +158,7 @@ impl ExtPackageCommand { OutputLevel::Normal, ); print_info( - &format!("Package files: {:?}", package_files), + &format!("Package files: {package_files:?}"), OutputLevel::Normal, ); } @@ -480,7 +480,7 @@ impl ExtPackageCommand { let container_src_dir = if ext_src_dir.starts_with('/') { ext_src_dir.clone() } else { - format!("/opt/src/{}", ext_src_dir) + format!("/opt/src/{ext_src_dir}") }; // Create the RPM filename diff --git a/src/commands/unlock.rs b/src/commands/unlock.rs index 7ee099a..62f4597 100644 --- a/src/commands/unlock.rs +++ b/src/commands/unlock.rs @@ -168,6 +168,8 @@ impl UnlockCommand { mod tests { use super::*; use crate::utils::lockfile::SysrootType; + use serial_test::serial; + use std::env; use std::fs; use tempfile::TempDir; @@ -223,7 +225,9 @@ runtime: } #[test] + #[serial] fn test_unlock_all() { + env::remove_var("AVOCADO_TARGET"); let temp_dir = TempDir::new().unwrap(); let config_path = create_test_config(&temp_dir); create_test_lock_file(&temp_dir); @@ -244,7 +248,9 @@ runtime: } #[test] + #[serial] fn test_unlock_sdk() { + env::remove_var("AVOCADO_TARGET"); let temp_dir = TempDir::new().unwrap(); let config_path = create_test_config(&temp_dir); create_test_lock_file(&temp_dir); @@ -290,7 +296,9 @@ runtime: } #[test] + #[serial] fn test_unlock_extension() { + env::remove_var("AVOCADO_TARGET"); let temp_dir = TempDir::new().unwrap(); let config_path = create_test_config(&temp_dir); create_test_lock_file(&temp_dir); @@ -325,7 +333,9 @@ runtime: } #[test] + #[serial] fn test_unlock_runtime() { + env::remove_var("AVOCADO_TARGET"); let temp_dir = TempDir::new().unwrap(); let config_path = create_test_config(&temp_dir); create_test_lock_file(&temp_dir); diff --git a/src/utils/config.rs b/src/utils/config.rs index ccb5f4f..05f0f45 100644 --- a/src/utils/config.rs +++ b/src/utils/config.rs @@ -1844,10 +1844,7 @@ impl Config { // Deserialize the source block into ExtensionSource let source: ExtensionSource = serde_yaml::from_value(source_value.clone()) .with_context(|| { - format!( - "Failed to parse source configuration for extension '{}'", - ext_name - ) + format!("Failed to parse source configuration for extension '{ext_name}'") })?; Ok(Some(source)) } diff --git a/src/utils/container.rs b/src/utils/container.rs index 5adcdaf..33045ff 100644 --- a/src/utils/container.rs +++ b/src/utils/container.rs @@ -67,8 +67,7 @@ pub fn sdk_arch_to_platform(sdk_arch: &str) -> Result { "aarch64" | "arm64" => Ok("linux/arm64".to_string()), "x86-64" | "x86_64" | "amd64" => Ok("linux/amd64".to_string()), _ => Err(anyhow::anyhow!( - "Unsupported SDK architecture: '{}'. Supported values: aarch64, x86-64", - sdk_arch + "Unsupported SDK architecture: '{sdk_arch}'. Supported values: aarch64, x86-64" )), } } @@ -84,7 +83,7 @@ pub fn get_host_platform() -> String { // Fallback for other architectures "arm" => "linux/arm/v7".to_string(), "riscv64" => "linux/riscv64".to_string(), - _ => format!("linux/{}", arch), + _ => format!("linux/{arch}"), } } From fee6dd66b078fcc4192ea41aa262f3dd72e385ad Mon Sep 17 00:00:00 2001 From: Justin Schneck Date: Thu, 8 Jan 2026 12:37:32 -0500 Subject: [PATCH 12/23] merge external extension configs for all container calls --- src/commands/ext/build.rs | 11 +- src/commands/ext/dnf.rs | 12 +- src/commands/ext/install.rs | 50 +- src/commands/install.rs | 15 +- src/commands/provision.rs | 8 +- src/commands/runtime/build.rs | 15 +- src/commands/runtime/dnf.rs | 16 +- src/commands/runtime/install.rs | 9 +- src/commands/runtime/provision.rs | 23 +- src/commands/sdk/compile.rs | 7 +- src/commands/sdk/dnf.rs | 9 +- src/utils/config.rs | 771 +++++++++++++++++++++++++++--- src/utils/ext_fetch.rs | 4 +- 13 files changed, 797 insertions(+), 153 deletions(-) diff --git a/src/commands/ext/build.rs b/src/commands/ext/build.rs index ac9a4b6..715e23b 100644 --- a/src/commands/ext/build.rs +++ b/src/commands/ext/build.rs @@ -83,10 +83,11 @@ impl ExtBuildCommand { } pub async fn execute(&self) -> Result<()> { - // Load configuration and parse raw TOML - let config = Config::load(&self.config_path)?; - let content = std::fs::read_to_string(&self.config_path)?; - let _parsed: serde_yaml::Value = serde_yaml::from_str(&content)?; + // Load composed configuration (includes remote extension configs with compile sections) + let composed = Config::load_composed(&self.config_path, self.target.as_deref()) + .with_context(|| format!("Failed to load composed config from {}", self.config_path))?; + let config = &composed.config; + let _parsed = &composed.merged_value; // Merge container args from config and CLI (similar to SDK commands) let processed_container_args = @@ -94,7 +95,7 @@ impl ExtBuildCommand { // Get repo_url and repo_release from config let repo_url = config.get_sdk_repo_url(); let repo_release = config.get_sdk_repo_release(); - let target = resolve_target_required(self.target.as_deref(), &config)?; + let target = resolve_target_required(self.target.as_deref(), config)?; // Get SDK configuration from interpolated config let container_image = config diff --git a/src/commands/ext/dnf.rs b/src/commands/ext/dnf.rs index 9c2238f..446babf 100644 --- a/src/commands/ext/dnf.rs +++ b/src/commands/ext/dnf.rs @@ -1,7 +1,7 @@ // Allow deprecated variants for backward compatibility during migration #![allow(deprecated)] -use anyhow::Result; +use anyhow::{Context, Result}; use crate::utils::config::{Config, ExtensionLocation}; use crate::utils::container::{RunConfig, SdkContainer}; @@ -48,12 +48,14 @@ impl ExtDnfCommand { } pub async fn execute(&self) -> Result<()> { - let config = Config::load(&self.config_path)?; + // Load composed configuration (includes remote extension configs) + let composed = Config::load_composed(&self.config_path, self.target.as_deref()) + .context("Failed to load composed config")?; + let config = &composed.config; let merged_container_args = config.merge_sdk_container_args(self.container_args.as_ref()); - let content = std::fs::read_to_string(&self.config_path)?; - let parsed: serde_yaml::Value = serde_yaml::from_str(&content)?; + let parsed = &composed.merged_value; - let target = self.resolve_target_architecture(&config)?; + let target = self.resolve_target_architecture(config)?; let extension_location = self.find_extension_in_dependency_tree(&config, &target)?; let container_image = self.get_container_image(&config)?; diff --git a/src/commands/ext/install.rs b/src/commands/ext/install.rs index f6385c9..284fc02 100644 --- a/src/commands/ext/install.rs +++ b/src/commands/ext/install.rs @@ -290,35 +290,12 @@ impl ExtInstallCommand { ); } - // Get the config path where this extension is actually defined - let ext_config_path = match ext_location { - ExtensionLocation::Local { config_path, .. } => config_path.clone(), - ExtensionLocation::External { config_path, .. } => { - // Resolve relative path against main config directory - let main_config_dir = std::path::Path::new(&self.config_path) - .parent() - .unwrap_or(std::path::Path::new(".")); - main_config_dir - .join(config_path) - .to_string_lossy() - .to_string() - } - ExtensionLocation::Remote { name, .. } => { - // Remote extensions are installed to $AVOCADO_PREFIX/includes// - let ext_install_path = - config.get_extension_install_path(&self.config_path, name, target); - ext_install_path - .join("avocado.yaml") - .to_string_lossy() - .to_string() - } - }; - if !self .install_single_extension( config, + parsed, ext_name, - &ext_config_path, + ext_location, container_helper, container_image, target, @@ -383,8 +360,9 @@ impl ExtInstallCommand { async fn install_single_extension( &self, config: &Config, + parsed: &serde_yaml::Value, extension: &str, - ext_config_path: &str, + ext_location: &ExtensionLocation, container_helper: &SdkContainer, container_image: &str, target: &str, @@ -453,9 +431,23 @@ impl ExtInstallCommand { } } - // Get merged extension configuration from the correct config file - // This properly handles both local and external extensions - let ext_config = config.get_merged_ext_config(extension, target, ext_config_path)?; + // Get extension configuration from the composed/merged config + // For remote extensions, this comes from the merged remote extension config + // For local extensions, this comes from the main config's ext section + let ext_config = match ext_location { + ExtensionLocation::Remote { .. } | ExtensionLocation::Local { .. } => { + // Use the already-merged config from `parsed` which contains remote extension configs + parsed + .get("ext") + .and_then(|ext| ext.get(extension)) + .cloned() + } + #[allow(deprecated)] + ExtensionLocation::External { config_path, .. } => { + // For deprecated external configs, read from the file + config.get_merged_ext_config(extension, target, config_path)? + } + }; // Install dependencies if they exist let dependencies = ext_config.as_ref().and_then(|ec| ec.get("dependencies")); diff --git a/src/commands/install.rs b/src/commands/install.rs index 8ce3f9f..8d1e7d9 100644 --- a/src/commands/install.rs +++ b/src/commands/install.rs @@ -116,7 +116,8 @@ impl InstallCommand { .with_context(|| format!("Failed to load composed config from {}", self.config_path))?; let config = &composed.config; - let parsed = &composed.merged_value; + // parsed from initial load is not used after sdk install reloads config + let _parsed = &composed.merged_value; print_info( "Starting comprehensive install process...", @@ -153,6 +154,18 @@ impl InstallCommand { .await .with_context(|| "Failed to install SDK dependencies")?; + // Reload composed config after SDK install to pick up newly fetched remote extensions + // SDK install includes ext fetch which downloads remote extensions to $AVOCADO_PREFIX/includes/ + let composed = Config::load_composed(&self.config_path, self.target.as_deref()) + .with_context(|| { + format!( + "Failed to reload composed config from {} after SDK install", + self.config_path + ) + })?; + let config = &composed.config; + let parsed = &composed.merged_value; + // 2. Install extension dependencies print_info( "Step 2/3: Installing extension dependencies", diff --git a/src/commands/provision.rs b/src/commands/provision.rs index d986cc5..3c7c4b2 100644 --- a/src/commands/provision.rs +++ b/src/commands/provision.rs @@ -50,8 +50,12 @@ impl ProvisionCommand { /// Execute the provision command by calling runtime provision pub async fn execute(&self) -> Result<()> { - // Load config to access provision profiles - let config = crate::utils::config::Config::load(&self.config.config_path)?; + // Load composed config to access provision profiles (including from remote extensions) + let composed = crate::utils::config::Config::load_composed( + &self.config.config_path, + self.config.target.as_deref(), + )?; + let config = &composed.config; // Get state file path from provision profile if available let state_file = self diff --git a/src/commands/runtime/build.rs b/src/commands/runtime/build.rs index 9494c37..44fe435 100644 --- a/src/commands/runtime/build.rs +++ b/src/commands/runtime/build.rs @@ -1,5 +1,5 @@ use crate::utils::{ - config::load_config, + config::Config, container::{RunConfig, SdkContainer}, output::{print_error, print_info, print_success, OutputLevel}, runs_on::RunsOnContext, @@ -68,10 +68,11 @@ impl RuntimeBuildCommand { } pub async fn execute(&self) -> Result<()> { - // Load configuration and parse raw TOML - let config = load_config(&self.config_path)?; - let content = std::fs::read_to_string(&self.config_path)?; - let parsed: serde_yaml::Value = serde_yaml::from_str(&content)?; + // Load composed configuration (includes remote extension configs) + let composed = Config::load_composed(&self.config_path, self.target.as_deref()) + .with_context(|| format!("Failed to load composed config from {}", self.config_path))?; + let config = &composed.config; + let parsed = &composed.merged_value; // Merge container args from config and CLI with environment variable expansion let merged_container_args = config.merge_sdk_container_args(self.container_args.as_ref()); @@ -366,7 +367,9 @@ impl RuntimeBuildCommand { resolved_extensions: &[String], ) -> Result { // Get merged runtime configuration including target-specific dependencies - let config = crate::utils::config::Config::load(&self.config_path)?; + // Use load_composed to include remote extension configs + let composed = Config::load_composed(&self.config_path, Some(target_arch))?; + let config = &composed.config; let merged_runtime = config .get_merged_runtime_config(&self.runtime_name, target_arch, &self.config_path)? .with_context(|| { diff --git a/src/commands/runtime/dnf.rs b/src/commands/runtime/dnf.rs index a9d670a..dce981d 100644 --- a/src/commands/runtime/dnf.rs +++ b/src/commands/runtime/dnf.rs @@ -1,4 +1,4 @@ -use anyhow::Result; +use anyhow::{Context, Result}; use crate::utils::config::Config; use crate::utils::container::{RunConfig, SdkContainer}; @@ -45,14 +45,16 @@ impl RuntimeDnfCommand { } pub async fn execute(&self) -> Result<()> { - let config = Config::load(&self.config_path)?; + // Load composed configuration (includes remote extension configs) + let composed = Config::load_composed(&self.config_path, self.target.as_deref()) + .context("Failed to load composed config")?; + let config = &composed.config; let merged_container_args = config.merge_sdk_container_args(self.container_args.as_ref()); - let content = std::fs::read_to_string(&self.config_path)?; - let parsed: serde_yaml::Value = serde_yaml::from_str(&content)?; + let parsed = &composed.merged_value; - self.validate_runtime_exists(&parsed)?; - let container_image = self.get_container_image(&config)?; - let target = self.resolve_target_architecture(&config)?; + self.validate_runtime_exists(parsed)?; + let container_image = self.get_container_image(config)?; + let target = self.resolve_target_architecture(config)?; // Get repo_url and repo_release from config let repo_url = config.get_sdk_repo_url(); diff --git a/src/commands/runtime/install.rs b/src/commands/runtime/install.rs index 40d1cca..51eed64 100644 --- a/src/commands/runtime/install.rs +++ b/src/commands/runtime/install.rs @@ -70,10 +70,11 @@ impl RuntimeInstallCommand { } pub async fn execute(&self) -> Result<()> { - // Load the configuration and parse raw TOML - let config = Config::load(&self.config_path)?; - let content = std::fs::read_to_string(&self.config_path)?; - let parsed: serde_yaml::Value = serde_yaml::from_str(&content)?; + // Load composed configuration (includes remote extension configs) + let composed = Config::load_composed(&self.config_path, self.target.as_deref()) + .with_context(|| format!("Failed to load composed config from {}", self.config_path))?; + let config = &composed.config; + let parsed = &composed.merged_value; // Merge container args from config and CLI (similar to SDK commands) let merged_container_args = config.merge_sdk_container_args(self.container_args.as_ref()); diff --git a/src/commands/runtime/provision.rs b/src/commands/runtime/provision.rs index d219460..4f9844f 100644 --- a/src/commands/runtime/provision.rs +++ b/src/commands/runtime/provision.rs @@ -1,7 +1,7 @@ #[cfg(unix)] use crate::utils::signing_service::{generate_helper_script, SigningService, SigningServiceConfig}; use crate::utils::{ - config::load_config, + config::Config, container::{RunConfig, SdkContainer}, output::{print_info, print_success, OutputLevel}, remote::{RemoteHost, SshClient}, @@ -57,10 +57,11 @@ impl RuntimeProvisionCommand { } pub async fn execute(&mut self) -> Result<()> { - // Load configuration - let config = load_config(&self.config.config_path)?; - let content = std::fs::read_to_string(&self.config.config_path)?; - let parsed: serde_yaml::Value = serde_yaml::from_str(&content)?; + // Load composed configuration (includes remote extension configs with provision profiles) + let composed = + Config::load_composed(&self.config.config_path, self.config.target.as_deref())?; + let config = &composed.config; + let parsed = &composed.merged_value; // Get SDK configuration from interpolated config let container_image = config @@ -587,8 +588,10 @@ avocado-provision-{} {} ); } - // Load configuration to get container image - let config = load_config(&self.config.config_path)?; + // Load composed configuration to get container image + let composed = + Config::load_composed(&self.config.config_path, self.config.target.as_deref())?; + let config = &composed.config; let container_image = config .get_sdk_image() .context("No SDK container image specified in configuration")?; @@ -677,8 +680,10 @@ avocado-provision-{} {} ); } - // Load configuration to get container image - let config = load_config(&self.config.config_path)?; + // Load composed configuration to get container image + let composed = + Config::load_composed(&self.config.config_path, self.config.target.as_deref())?; + let config = &composed.config; let container_image = config .get_sdk_image() .context("No SDK container image specified in configuration")?; diff --git a/src/commands/sdk/compile.rs b/src/commands/sdk/compile.rs index 9893bb0..8ec8720 100644 --- a/src/commands/sdk/compile.rs +++ b/src/commands/sdk/compile.rs @@ -73,15 +73,16 @@ impl SdkCompileCommand { /// Execute the sdk compile command pub async fn execute(&self) -> Result<()> { - // Load the configuration + // Load composed configuration (includes remote extension compile sections) if self.verbose { print_info( &format!("Loading SDK compile config from: {}", self.config_path), OutputLevel::Normal, ); } - let config = Config::load(&self.config_path) - .with_context(|| format!("Failed to load config from {}", self.config_path))?; + let composed = Config::load_composed(&self.config_path, self.target.as_deref()) + .with_context(|| format!("Failed to load composed config from {}", self.config_path))?; + let config = &composed.config; // Validate stamps before proceeding (unless --no-stamps) // SDK compile requires SDK to be installed diff --git a/src/commands/sdk/dnf.rs b/src/commands/sdk/dnf.rs index 5b01196..947c3e2 100644 --- a/src/commands/sdk/dnf.rs +++ b/src/commands/sdk/dnf.rs @@ -62,9 +62,10 @@ impl SdkDnfCommand { )); } - // Load the configuration - let config = Config::load(&self.config_path) - .with_context(|| format!("Failed to load config from {}", self.config_path))?; + // Load composed configuration (includes remote extension configs) + let composed = Config::load_composed(&self.config_path, self.target.as_deref()) + .with_context(|| format!("Failed to load composed config from {}", self.config_path))?; + let config = &composed.config; // Get the SDK image from configuration let container_image = config.get_sdk_image().ok_or_else(|| { @@ -79,7 +80,7 @@ impl SdkDnfCommand { let merged_container_args = config.merge_sdk_container_args(self.container_args.as_ref()); // Resolve target with proper precedence - let target = resolve_target_required(self.target.as_deref(), &config)?; + let target = resolve_target_required(self.target.as_deref(), config)?; let container_helper = SdkContainer::new(); diff --git a/src/utils/config.rs b/src/utils/config.rs index 05f0f45..1d2f541 100644 --- a/src/utils/config.rs +++ b/src/utils/config.rs @@ -137,6 +137,12 @@ pub enum ExtensionSource { /// Optional custom repository name #[serde(skip_serializing_if = "Option::is_none")] repo_name: Option, + /// Optional list of config sections to include from the remote extension. + /// Supports dot-separated paths (e.g., "provision.tegraflash") and wildcards (e.g., "provision.*"). + /// The extension's own `ext.` section is always included. + /// Referenced `sdk.compile.*` sections are auto-included based on compile dependencies. + #[serde(default, skip_serializing_if = "Option::is_none")] + include: Option>, }, /// Extension from a git repository Git { @@ -148,14 +154,70 @@ pub enum ExtensionSource { /// Optional sparse checkout paths #[serde(skip_serializing_if = "Option::is_none")] sparse_checkout: Option>, + /// Optional list of config sections to include from the remote extension. + /// Supports dot-separated paths (e.g., "provision.tegraflash") and wildcards (e.g., "provision.*"). + /// The extension's own `ext.` section is always included. + /// Referenced `sdk.compile.*` sections are auto-included based on compile dependencies. + #[serde(default, skip_serializing_if = "Option::is_none")] + include: Option>, }, /// Extension from a local filesystem path Path { /// Path to the extension directory (relative to config or absolute) path: String, + /// Optional list of config sections to include from the remote extension. + /// Supports dot-separated paths (e.g., "provision.tegraflash") and wildcards (e.g., "provision.*"). + /// The extension's own `ext.` section is always included. + /// Referenced `sdk.compile.*` sections are auto-included based on compile dependencies. + #[serde(default, skip_serializing_if = "Option::is_none")] + include: Option>, }, } +impl ExtensionSource { + /// Get the include patterns for this extension source. + /// Returns an empty slice if no include patterns are specified. + pub fn get_include_patterns(&self) -> &[String] { + match self { + ExtensionSource::Repo { include, .. } => { + include.as_ref().map(|v| v.as_slice()).unwrap_or(&[]) + } + ExtensionSource::Git { include, .. } => { + include.as_ref().map(|v| v.as_slice()).unwrap_or(&[]) + } + ExtensionSource::Path { include, .. } => { + include.as_ref().map(|v| v.as_slice()).unwrap_or(&[]) + } + } + } + + /// Check if a config path matches any of the include patterns. + /// + /// Supports: + /// - Exact matches: "provision.tegraflash" matches "provision.tegraflash" + /// - Wildcard suffix: "provision.*" matches "provision.tegraflash", "provision.usb", etc. + /// + /// Returns true if the path matches at least one include pattern. + pub fn matches_include_pattern(config_path: &str, patterns: &[String]) -> bool { + for pattern in patterns { + if pattern.ends_with(".*") { + // Wildcard pattern: check if config_path starts with the prefix + let prefix = &pattern[..pattern.len() - 2]; // Remove ".*" + if config_path.starts_with(prefix) + && (config_path.len() == prefix.len() + || config_path.chars().nth(prefix.len()) == Some('.')) + { + return true; + } + } else if config_path == pattern { + // Exact match + return true; + } + } + false + } +} + /// Represents an extension dependency for a runtime with type information #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub enum RuntimeExtDep { @@ -644,8 +706,24 @@ impl Config { &external_content, )?; + // For external configs (deprecated `config: path` syntax), use permissive include patterns + // to maintain backward compatibility - merge all sections + let legacy_include_patterns = vec![ + "provision.*".to_string(), + "sdk.dependencies.*".to_string(), + "sdk.compile.*".to_string(), + ]; + let auto_include_compile = + Self::find_compile_dependencies_in_ext(&external_config, ext_name); + // Merge external config into main config - Self::merge_external_config(&mut main_config, &external_config, ext_name); + Self::merge_external_config( + &mut main_config, + &external_config, + ext_name, + &legacy_include_patterns, + &auto_include_compile, + ); // Record this extension's source (the external config path) let resolved_path_str = resolved_path.to_string_lossy().to_string(); @@ -731,40 +809,63 @@ impl Config { return Ok(extension_sources); } - // Get extensions directory from volume or fallback path - let extensions_dir = - temp_config.get_extensions_dir(&config_path.to_string_lossy(), &resolved_target); - - // For each remote extension, check if it's installed and merge its config - for (ext_name, _source) in remote_extensions { - let ext_install_path = extensions_dir.join(&ext_name); - - // Try to find the extension's config file - let ext_config_path = if ext_install_path.join("avocado.yaml").exists() { - ext_install_path.join("avocado.yaml") - } else if ext_install_path.join("avocado.yml").exists() { - ext_install_path.join("avocado.yml") - } else if ext_install_path.join("avocado.toml").exists() { - ext_install_path.join("avocado.toml") - } else { - // Extension not installed yet, skip - continue; + // Get src_dir for loading volume state + let config_path_str = config_path.to_string_lossy(); + let src_dir = temp_config + .get_resolved_src_dir(config_path_str.as_ref()) + .unwrap_or_else(|| config_path.parent().unwrap_or(Path::new(".")).to_path_buf()); + + // Try to load volume state for container-based config reading + let volume_state = crate::utils::volume::VolumeState::load_from_dir(&src_dir) + .ok() + .flatten(); + + // For each remote extension, try to read its config via container + for (ext_name, source) in remote_extensions { + // Try to read extension config via container command + let ext_content = match &volume_state { + Some(vs) => { + match Self::read_extension_config_via_container(vs, &resolved_target, &ext_name) + { + Ok(content) => content, + Err(_) => { + // Extension not installed yet or config not found, skip + continue; + } + } + } + None => { + // No volume state - try fallback to local path (for development) + let fallback_dir = src_dir + .join(".avocado") + .join(&resolved_target) + .join("includes") + .join(&ext_name); + let config_path_local = fallback_dir.join("avocado.yaml"); + if config_path_local.exists() { + match fs::read_to_string(&config_path_local) { + Ok(content) => content, + Err(_) => continue, + } + } else { + continue; + } + } }; - // Load the remote extension's config - let ext_content = fs::read_to_string(&ext_config_path).with_context(|| { - format!( - "Failed to read remote extension config: {}", - ext_config_path.display() - ) - })?; - let ext_config = Self::parse_config_value( - ext_config_path.to_str().unwrap_or(&ext_name), - &ext_content, - )?; + // Use a .yaml extension so parse_config_value knows to parse as YAML + let ext_config_path = format!("{ext_name}/avocado.yaml"); + let ext_config = match Self::parse_config_value(&ext_config_path, &ext_content) { + Ok(cfg) => cfg, + Err(_) => { + // Failed to parse config, skip this extension + continue; + } + }; - // Record this extension's source config path - let ext_config_path_str = ext_config_path.to_string_lossy().to_string(); + // Record this extension's source (container path for reference) + let ext_config_path_str = + format!("/opt/_avocado/{resolved_target}/includes/{ext_name}/avocado.yaml"); extension_sources.insert(ext_name.clone(), ext_config_path_str.clone()); // Also record any extensions defined within this remote extension's config @@ -777,13 +878,85 @@ impl Config { } } - // Merge the remote extension config - Self::merge_external_config(main_config, &ext_config, &ext_name); + // Get include patterns from the extension source + let include_patterns = source.get_include_patterns(); + + // Find compile dependencies to auto-include from the extension's own section + let auto_include_compile = + Self::find_compile_dependencies_in_ext(&ext_config, &ext_name); + + // Merge the remote extension config with include patterns + Self::merge_external_config( + main_config, + &ext_config, + &ext_name, + include_patterns, + &auto_include_compile, + ); } Ok(extension_sources) } + /// Read a remote extension's config file by running a container command. + /// + /// This runs a lightweight container to cat the extension's avocado.yaml from + /// the Docker volume, avoiding permission issues with direct host access. + fn read_extension_config_via_container( + volume_state: &crate::utils::volume::VolumeState, + target: &str, + ext_name: &str, + ) -> Result { + // The extension config path inside the container + let container_config_path = + format!("/opt/_avocado/{target}/includes/{ext_name}/avocado.yaml"); + + // Run a minimal container to cat the config file + // We use busybox as a lightweight image, but fall back to alpine if needed + let images_to_try = [ + "busybox:latest", + "alpine:latest", + "docker.io/library/busybox:latest", + ]; + + for image in &images_to_try { + let output = std::process::Command::new(&volume_state.container_tool) + .args([ + "run", + "--rm", + "-v", + &format!("{}:/opt/_avocado:ro", volume_state.volume_name), + image, + "cat", + &container_config_path, + ]) + .output(); + + match output { + Ok(out) if out.status.success() => { + let content = String::from_utf8_lossy(&out.stdout).to_string(); + if content.is_empty() { + anyhow::bail!("Extension config file is empty"); + } + return Ok(content); + } + Ok(out) => { + let stderr = String::from_utf8_lossy(&out.stderr); + // If file not found, bail immediately (no point trying other images) + if stderr.contains("No such file") || stderr.contains("not found") { + anyhow::bail!("Extension config not found: {container_config_path}"); + } + // Otherwise, continue to try next image + } + Err(_) => { + // Continue to try next image + } + } + } + + anyhow::bail!("Failed to read extension config via container for '{ext_name}'") + } + /// Discover all external config references in runtime and ext dependencies. /// /// Scans these locations: @@ -912,21 +1085,34 @@ impl Config { /// Merge an external config into the main config. /// - /// Merges: - /// - `ext.*` sections (external extensions added to main ext section) - /// - `sdk.dependencies` (merged, main takes precedence on conflicts) - /// - `sdk.compile` (merged, main takes precedence on conflicts) + /// Always merges: + /// - `ext.` section (the extension's own section) + /// + /// Conditionally merges (based on include_patterns): + /// - `provision.` sections (if pattern matches) + /// - `sdk.dependencies.` (if pattern matches) + /// - `sdk.compile.
` (if pattern matches) /// - /// Does NOT merge: - /// - `distro` (main config only) - /// - `default_target` (main config only) - /// - `supported_targets` (main config only) + /// Does NOT merge (main config only): + /// - `distro` + /// - `default_target` + /// - `supported_targets` + /// - `sdk.image`, `sdk.container_args`, etc. (base SDK settings) + /// + /// # Arguments + /// * `main_config` - The main config to merge into + /// * `external_config` - The external config to merge from + /// * `ext_name` - The name of the extension (its `ext.` is always merged) + /// * `include_patterns` - Patterns for additional sections to include (e.g., "provision.*") + /// * `auto_include_compile` - List of sdk.compile section names to auto-include (from compile deps) fn merge_external_config( main_config: &mut serde_yaml::Value, external_config: &serde_yaml::Value, - _ext_name: &str, + ext_name: &str, + include_patterns: &[String], + auto_include_compile: &[String], ) { - // Merge ext sections + // Always merge the extension's own ext. section if let Some(external_ext) = external_config.get("ext").and_then(|e| e.as_mapping()) { let main_ext = main_config .as_mapping_mut() @@ -942,60 +1128,163 @@ impl Config { .and_then(|e| e.as_mapping_mut()); if let Some(main_ext_map) = main_ext { - for (ext_key, ext_value) in external_ext { - // Only add if not already present in main config - if !main_ext_map.contains_key(ext_key) { - main_ext_map.insert(ext_key.clone(), ext_value.clone()); + // Deep-merge the extension's own section (ext.) + // This handles the case where main config has a stub with just `source:` + // and the remote extension has the full definition with `dependencies:` etc. + let ext_key = serde_yaml::Value::String(ext_name.to_string()); + if let Some(ext_value) = external_ext.get(&ext_key) { + if let Some(existing_ext) = main_ext_map.get_mut(&ext_key) { + // Deep-merge: add fields from remote that don't exist in main + // Main config values take precedence on conflicts + Self::deep_merge_ext_section(existing_ext, ext_value); + } else { + // Extension not in main config, just add it + main_ext_map.insert(ext_key, ext_value.clone()); + } + } + } + } + + // Merge provision sections based on include patterns + if let Some(external_provision) = external_config + .get("provision") + .and_then(|p| p.as_mapping()) + { + for (profile_key, profile_value) in external_provision { + if let Some(profile_name) = profile_key.as_str() { + let config_path = format!("provision.{profile_name}"); + if ExtensionSource::matches_include_pattern(&config_path, include_patterns) { + Self::ensure_provision_section(main_config); + if let Some(main_provision) = main_config + .get_mut("provision") + .and_then(|p| p.as_mapping_mut()) + { + // Only add if not already present (main takes precedence) + if !main_provision.contains_key(profile_key) { + main_provision.insert(profile_key.clone(), profile_value.clone()); + } + } } } } } - // Merge sdk.dependencies + // Merge sdk.dependencies based on include patterns if let Some(external_sdk_deps) = external_config .get("sdk") .and_then(|s| s.get("dependencies")) .and_then(|d| d.as_mapping()) { - Self::ensure_sdk_dependencies_section(main_config); - - if let Some(main_sdk_deps) = main_config - .get_mut("sdk") - .and_then(|s| s.get_mut("dependencies")) - .and_then(|d| d.as_mapping_mut()) - { - for (dep_key, dep_value) in external_sdk_deps { - // Only add if not already present (main takes precedence) - if !main_sdk_deps.contains_key(dep_key) { - main_sdk_deps.insert(dep_key.clone(), dep_value.clone()); + for (dep_key, dep_value) in external_sdk_deps { + if let Some(dep_name) = dep_key.as_str() { + let config_path = format!("sdk.dependencies.{dep_name}"); + if ExtensionSource::matches_include_pattern(&config_path, include_patterns) { + Self::ensure_sdk_dependencies_section(main_config); + if let Some(main_sdk_deps) = main_config + .get_mut("sdk") + .and_then(|s| s.get_mut("dependencies")) + .and_then(|d| d.as_mapping_mut()) + { + // Only add if not already present (main takes precedence) + if !main_sdk_deps.contains_key(dep_key) { + main_sdk_deps.insert(dep_key.clone(), dep_value.clone()); + } + } } } } } - // Merge sdk.compile + // Merge sdk.compile based on include patterns OR auto_include_compile list if let Some(external_sdk_compile) = external_config .get("sdk") .and_then(|s| s.get("compile")) .and_then(|c| c.as_mapping()) { - Self::ensure_sdk_compile_section(main_config); - - if let Some(main_sdk_compile) = main_config - .get_mut("sdk") - .and_then(|s| s.get_mut("compile")) - .and_then(|c| c.as_mapping_mut()) - { - for (compile_key, compile_value) in external_sdk_compile { - // Only add if not already present (main takes precedence) - if !main_sdk_compile.contains_key(compile_key) { - main_sdk_compile.insert(compile_key.clone(), compile_value.clone()); + for (compile_key, compile_value) in external_sdk_compile { + if let Some(compile_name) = compile_key.as_str() { + let config_path = format!("sdk.compile.{compile_name}"); + let should_include = + ExtensionSource::matches_include_pattern(&config_path, include_patterns) + || auto_include_compile.contains(&compile_name.to_string()); + + if should_include { + Self::ensure_sdk_compile_section(main_config); + if let Some(main_sdk_compile) = main_config + .get_mut("sdk") + .and_then(|s| s.get_mut("compile")) + .and_then(|c| c.as_mapping_mut()) + { + // Only add if not already present (main takes precedence) + if !main_sdk_compile.contains_key(compile_key) { + main_sdk_compile.insert(compile_key.clone(), compile_value.clone()); + } + } } } } } } + /// Ensure the provision section exists in the config. + fn ensure_provision_section(config: &mut serde_yaml::Value) { + if let Some(main_map) = config.as_mapping_mut() { + if !main_map.contains_key(serde_yaml::Value::String("provision".to_string())) { + main_map.insert( + serde_yaml::Value::String("provision".to_string()), + serde_yaml::Value::Mapping(serde_yaml::Mapping::new()), + ); + } + } + } + + /// Deep-merge an extension section from external config into main config. + /// + /// This handles the case where main config has a stub definition (with just `source:`) + /// and the remote extension has the full definition (with `dependencies:`, `version:`, etc.). + /// + /// Main config values take precedence on conflicts. + fn deep_merge_ext_section(main_ext: &mut serde_yaml::Value, external_ext: &serde_yaml::Value) { + // Only merge if both are mappings + if let (Some(main_map), Some(external_map)) = + (main_ext.as_mapping_mut(), external_ext.as_mapping()) + { + for (key, external_value) in external_map { + if !main_map.contains_key(key) { + // Key doesn't exist in main, add it from external + main_map.insert(key.clone(), external_value.clone()); + } + // If key exists in main, keep main's value (main takes precedence) + } + } + } + + /// Find compile dependencies in an extension's dependencies section. + /// + /// Scans `ext..dependencies` for entries with a `compile` key + /// and returns the list of compile section names that should be auto-included. + fn find_compile_dependencies_in_ext( + ext_config: &serde_yaml::Value, + ext_name: &str, + ) -> Vec { + let mut compile_deps = Vec::new(); + + if let Some(ext_section) = ext_config + .get("ext") + .and_then(|e| e.get(ext_name)) + .and_then(|e| e.get("dependencies")) + .and_then(|d| d.as_mapping()) + { + for (_dep_name, dep_spec) in ext_section { + if let Some(compile_name) = dep_spec.get("compile").and_then(|c| c.as_str()) { + compile_deps.push(compile_name.to_string()); + } + } + } + + compile_deps + } + /// Ensure the sdk.dependencies section exists in the config. fn ensure_sdk_dependencies_section(config: &mut serde_yaml::Value) { if let Some(main_map) = config.as_mapping_mut() { @@ -2015,10 +2304,20 @@ impl Config { let content = std::fs::read_to_string(config_path)?; let parsed = Self::parse_config_value(config_path, &content)?; - // First check if it's a local extension + // First check if it's defined in the ext section if let Some(ext_section) = parsed.get("ext") { if let Some(ext_map) = ext_section.as_mapping() { - if ext_map.contains_key(serde_yaml::Value::String(extension_name.to_string())) { + let ext_key = serde_yaml::Value::String(extension_name.to_string()); + if let Some(ext_config) = ext_map.get(&ext_key) { + // Check if this is a remote extension (has source: field) + if let Some(source) = Self::parse_extension_source(extension_name, ext_config)? + { + return Ok(Some(ExtensionLocation::Remote { + name: extension_name.to_string(), + source, + })); + } + // Otherwise it's a local extension return Ok(Some(ExtensionLocation::Local { name: extension_name.to_string(), config_path: config_path.to_string(), @@ -6258,7 +6557,8 @@ ext: let external_config: serde_yaml::Value = serde_yaml::from_str(external_config_content).unwrap(); - Config::merge_external_config(&mut main_config, &external_config, "external-ext"); + // Use empty include patterns - ext. is always merged + Config::merge_external_config(&mut main_config, &external_config, "external-ext", &[], &[]); // Check that both extensions are present let ext_section = main_config.get("ext").unwrap().as_mapping().unwrap(); @@ -6285,7 +6585,15 @@ sdk: let external_config: serde_yaml::Value = serde_yaml::from_str(external_config_content).unwrap(); - Config::merge_external_config(&mut main_config, &external_config, "test-ext"); + // Include sdk.dependencies.* to merge SDK dependencies + let include_patterns = vec!["sdk.dependencies.*".to_string()]; + Config::merge_external_config( + &mut main_config, + &external_config, + "test-ext", + &include_patterns, + &[], + ); let sdk_deps = main_config .get("sdk") @@ -6332,7 +6640,15 @@ distro: let external_config: serde_yaml::Value = serde_yaml::from_str(external_config_content).unwrap(); - Config::merge_external_config(&mut main_config, &external_config, "test-ext"); + // Distro is never merged regardless of include patterns + let include_patterns = vec!["distro.*".to_string()]; // Even with this, distro won't be merged + Config::merge_external_config( + &mut main_config, + &external_config, + "test-ext", + &include_patterns, + &[], + ); // Distro should remain unchanged from main config let distro = main_config.get("distro").unwrap(); @@ -6428,4 +6744,305 @@ sdk: assert!(sdk_deps.contains_key(serde_yaml::Value::String("main-sdk-dep".to_string()))); assert!(sdk_deps.contains_key(serde_yaml::Value::String("external-sdk-dep".to_string()))); } + + #[test] + fn test_extension_source_get_include_patterns() { + // Test Repo variant with include patterns + let source = ExtensionSource::Repo { + version: "*".to_string(), + package: None, + repo_name: None, + include: Some(vec![ + "provision.tegraflash".to_string(), + "sdk.compile.*".to_string(), + ]), + }; + let patterns = source.get_include_patterns(); + assert_eq!(patterns.len(), 2); + assert_eq!(patterns[0], "provision.tegraflash"); + assert_eq!(patterns[1], "sdk.compile.*"); + + // Test Repo variant without include patterns + let source_no_include = ExtensionSource::Repo { + version: "*".to_string(), + package: None, + repo_name: None, + include: None, + }; + assert!(source_no_include.get_include_patterns().is_empty()); + + // Test Git variant with include patterns + let git_source = ExtensionSource::Git { + url: "https://example.com/repo.git".to_string(), + git_ref: Some("main".to_string()), + sparse_checkout: None, + include: Some(vec!["provision.*".to_string()]), + }; + assert_eq!(git_source.get_include_patterns().len(), 1); + + // Test Path variant with include patterns + let path_source = ExtensionSource::Path { + path: "./external".to_string(), + include: Some(vec!["sdk.dependencies.*".to_string()]), + }; + assert_eq!(path_source.get_include_patterns().len(), 1); + } + + #[test] + fn test_matches_include_pattern_exact() { + let patterns = vec![ + "provision.tegraflash".to_string(), + "sdk.compile.nvidia-l4t".to_string(), + ]; + + // Exact matches should return true + assert!(ExtensionSource::matches_include_pattern( + "provision.tegraflash", + &patterns + )); + assert!(ExtensionSource::matches_include_pattern( + "sdk.compile.nvidia-l4t", + &patterns + )); + + // Non-matches should return false + assert!(!ExtensionSource::matches_include_pattern( + "provision.usb", + &patterns + )); + assert!(!ExtensionSource::matches_include_pattern( + "sdk.compile.other", + &patterns + )); + assert!(!ExtensionSource::matches_include_pattern( + "provision", + &patterns + )); + } + + #[test] + fn test_matches_include_pattern_wildcard() { + let patterns = vec!["provision.*".to_string(), "sdk.compile.*".to_string()]; + + // Wildcard matches should work + assert!(ExtensionSource::matches_include_pattern( + "provision.tegraflash", + &patterns + )); + assert!(ExtensionSource::matches_include_pattern( + "provision.usb", + &patterns + )); + assert!(ExtensionSource::matches_include_pattern( + "sdk.compile.nvidia-l4t", + &patterns + )); + assert!(ExtensionSource::matches_include_pattern( + "sdk.compile.custom-lib", + &patterns + )); + + // Non-matches should return false + assert!(!ExtensionSource::matches_include_pattern( + "sdk.dependencies.package1", + &patterns + )); + assert!(!ExtensionSource::matches_include_pattern( + "runtime.prod", + &patterns + )); + + // Partial prefix matches without proper dot separator should not match + assert!(!ExtensionSource::matches_include_pattern( + "provisionExtra", + &patterns + )); + } + + #[test] + fn test_matches_include_pattern_empty() { + let empty_patterns: Vec = vec![]; + + // Empty patterns should never match + assert!(!ExtensionSource::matches_include_pattern( + "provision.tegraflash", + &empty_patterns + )); + assert!(!ExtensionSource::matches_include_pattern( + "anything", + &empty_patterns + )); + } + + #[test] + fn test_merge_external_config_with_include_patterns() { + let main_config_content = r#" +ext: + local-ext: + types: + - sysext +provision: + existing-profile: + script: provision.sh +"#; + let external_config_content = r#" +ext: + remote-ext: + types: + - sysext + dependencies: + some-dep: "*" +provision: + tegraflash: + script: flash.sh + usb: + script: usb-provision.sh +sdk: + dependencies: + external-dep: "*" + compile: + nvidia-l4t: + compile: build.sh +"#; + + let mut main_config: serde_yaml::Value = serde_yaml::from_str(main_config_content).unwrap(); + let external_config: serde_yaml::Value = + serde_yaml::from_str(external_config_content).unwrap(); + + // Only include provision.tegraflash (not provision.usb) + let include_patterns = vec!["provision.tegraflash".to_string()]; + Config::merge_external_config( + &mut main_config, + &external_config, + "remote-ext", + &include_patterns, + &[], + ); + + // Check that ext.remote-ext was merged (always happens) + let ext_section = main_config.get("ext").unwrap().as_mapping().unwrap(); + assert!(ext_section.contains_key(serde_yaml::Value::String("remote-ext".to_string()))); + + // Check that provision.tegraflash was included + let provision = main_config.get("provision").unwrap().as_mapping().unwrap(); + assert!(provision.contains_key(serde_yaml::Value::String("tegraflash".to_string()))); + assert!(provision.contains_key(serde_yaml::Value::String("existing-profile".to_string()))); + + // Check that provision.usb was NOT included (not in patterns) + assert!(!provision.contains_key(serde_yaml::Value::String("usb".to_string()))); + + // Check that sdk.dependencies was NOT merged (not in patterns) + assert!(main_config.get("sdk").is_none()); + } + + #[test] + fn test_merge_external_config_auto_include_compile() { + let main_config_content = r#" +ext: + local-ext: + types: + - sysext +"#; + let external_config_content = r#" +ext: + remote-ext: + types: + - sysext + dependencies: + nvidia-l4t: + compile: nvidia-l4t +sdk: + compile: + nvidia-l4t: + compile: build-nvidia.sh + other-lib: + compile: build-other.sh +"#; + + let mut main_config: serde_yaml::Value = serde_yaml::from_str(main_config_content).unwrap(); + let external_config: serde_yaml::Value = + serde_yaml::from_str(external_config_content).unwrap(); + + // Use auto_include_compile to include nvidia-l4t + let auto_include = vec!["nvidia-l4t".to_string()]; + Config::merge_external_config( + &mut main_config, + &external_config, + "remote-ext", + &[], // No explicit include patterns + &auto_include, // Auto-include nvidia-l4t compile section + ); + + // Check that sdk.compile.nvidia-l4t was included + let sdk_compile = main_config + .get("sdk") + .unwrap() + .get("compile") + .unwrap() + .as_mapping() + .unwrap(); + assert!(sdk_compile.contains_key(serde_yaml::Value::String("nvidia-l4t".to_string()))); + + // Check that sdk.compile.other-lib was NOT included + assert!(!sdk_compile.contains_key(serde_yaml::Value::String("other-lib".to_string()))); + } + + #[test] + fn test_find_compile_dependencies_in_ext() { + let ext_config_content = r#" +ext: + my-extension: + dependencies: + nvidia-l4t: + compile: nvidia-l4t + some-package: + version: "1.0" + custom-lib: + compile: custom-compile-section +"#; + let ext_config: serde_yaml::Value = serde_yaml::from_str(ext_config_content).unwrap(); + + let compile_deps = Config::find_compile_dependencies_in_ext(&ext_config, "my-extension"); + + assert_eq!(compile_deps.len(), 2); + assert!(compile_deps.contains(&"nvidia-l4t".to_string())); + assert!(compile_deps.contains(&"custom-compile-section".to_string())); + } + + #[test] + fn test_extension_source_include_serialization() { + let source = ExtensionSource::Repo { + version: "*".to_string(), + package: None, + repo_name: None, + include: Some(vec![ + "provision.tegraflash".to_string(), + "sdk.compile.*".to_string(), + ]), + }; + + let serialized = serde_yaml::to_string(&source).unwrap(); + assert!(serialized.contains("include:")); + assert!(serialized.contains("provision.tegraflash")); + assert!(serialized.contains("sdk.compile.*")); + + // Test deserialization + let yaml_content = r#" +type: repo +version: "*" +include: + - provision.tegraflash + - sdk.compile.* +"#; + let deserialized: ExtensionSource = serde_yaml::from_str(yaml_content).unwrap(); + match deserialized { + ExtensionSource::Repo { include, .. } => { + assert!(include.is_some()); + let patterns = include.unwrap(); + assert_eq!(patterns.len(), 2); + assert_eq!(patterns[0], "provision.tegraflash"); + } + _ => panic!("Expected Repo variant"), + } + } } diff --git a/src/utils/ext_fetch.rs b/src/utils/ext_fetch.rs index 68a3818..682213a 100644 --- a/src/utils/ext_fetch.rs +++ b/src/utils/ext_fetch.rs @@ -92,6 +92,7 @@ impl ExtensionFetcher { version, package, repo_name, + .. // include field not needed for fetching } => { self.fetch_from_repo( ext_name, @@ -106,6 +107,7 @@ impl ExtensionFetcher { url, git_ref, sparse_checkout, + .. // include field not needed for fetching } => { self.fetch_from_git( ext_name, @@ -116,7 +118,7 @@ impl ExtensionFetcher { ) .await?; } - ExtensionSource::Path { path } => { + ExtensionSource::Path { path, .. } => { self.fetch_from_path(ext_name, path, &ext_install_path) .await?; } From 733671854d38edaf6ff9bcad8cff86daccbd5da8 Mon Sep 17 00:00:00 2001 From: Justin Schneck Date: Thu, 8 Jan 2026 14:08:20 -0500 Subject: [PATCH 13/23] update ext image to support external extension configs --- src/commands/build.rs | 151 ++++++++++++- src/commands/ext/build.rs | 174 +++++++++----- src/commands/ext/image.rs | 135 +++++++++-- src/commands/ext/package.rs | 62 ++++- src/utils/config.rs | 438 ++++++++++++++++++++++++++++++++++-- 5 files changed, 853 insertions(+), 107 deletions(-) diff --git a/src/commands/build.rs b/src/commands/build.rs index 12a4151..04655ec 100644 --- a/src/commands/build.rs +++ b/src/commands/build.rs @@ -8,19 +8,21 @@ use crate::commands::{ runtime::RuntimeBuildCommand, }; use crate::utils::{ - config::Config, + config::{Config, ExtensionSource}, output::{print_info, print_success, OutputLevel}, }; -/// Represents an extension dependency that can be either local, external, or version-based +/// Represents an extension dependency that can be either local, external, remote, or version-based #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub enum ExtensionDependency { /// Extension defined in the main config file Local(String), - /// Extension defined in an external config file + /// Extension defined in an external config file (deprecated) External { name: String, config_path: String }, /// Extension resolved via DNF with a version specification Versioned { name: String, version: String }, + /// Remote extension with source field (repo, git, or path) + Remote { name: String, source: ExtensionSource }, } /// Implementation of the 'build' command that runs all build subcommands. @@ -208,6 +210,29 @@ impl BuildCommand { } // Versioned extensions are installed via DNF and don't need building } + ExtensionDependency::Remote { name, source: _ } => { + if self.verbose { + print_info( + &format!("Building remote extension '{name}'"), + OutputLevel::Normal, + ); + } + + // Build remote extension - ExtBuildCommand will load config from container + let ext_build_cmd = ExtBuildCommand::new( + name.clone(), + self.config_path.clone(), + self.verbose, + self.target.clone(), + self.container_args.clone(), + self.dnf_args.clone(), + ) + .with_no_stamps(self.no_stamps) + .with_runs_on(self.runs_on.clone(), self.nfs_port); + ext_build_cmd.execute().await.with_context(|| { + format!("Failed to build remote extension '{name}'") + })?; + } } } } else { @@ -267,6 +292,29 @@ impl BuildCommand { format!("Failed to create image for versioned extension '{name}' version '{version}'") })?; } + ExtensionDependency::Remote { name, source: _ } => { + if self.verbose { + print_info( + &format!("Creating image for remote extension '{name}'"), + OutputLevel::Normal, + ); + } + + // Create image for remote extension - ExtImageCommand will load config from container + let ext_image_cmd = ExtImageCommand::new( + name.clone(), + self.config_path.clone(), + self.verbose, + self.target.clone(), + self.container_args.clone(), + self.dnf_args.clone(), + ) + .with_no_stamps(self.no_stamps) + .with_runs_on(self.runs_on.clone(), self.nfs_port); + ext_image_cmd.execute().await.with_context(|| { + format!("Failed to create image for remote extension '{name}'") + })?; + } } } } else { @@ -436,9 +484,24 @@ impl BuildCommand { &mut visited, )?; } else { - // Local extension - required_extensions - .insert(ExtensionDependency::Local(ext_name.to_string())); + // Check if this extension has a source: field (remote extension) + let ext_source = parsed + .get("ext") + .and_then(|e| e.get(ext_name)) + .and_then(|ext| ext.get("source")) + .and_then(|s| ExtensionSource::from_yaml(s)); + + if let Some(source) = ext_source { + // Remote extension with source field + required_extensions.insert(ExtensionDependency::Remote { + name: ext_name.to_string(), + source, + }); + } else { + // Local extension + required_extensions + .insert(ExtensionDependency::Local(ext_name.to_string())); + } } } } @@ -452,11 +515,13 @@ impl BuildCommand { ExtensionDependency::Local(name) => name, ExtensionDependency::External { name, .. } => name, ExtensionDependency::Versioned { name, .. } => name, + ExtensionDependency::Remote { name, .. } => name, }; let name_b = match b { ExtensionDependency::Local(name) => name, ExtensionDependency::External { name, .. } => name, ExtensionDependency::Versioned { name, .. } => name, + ExtensionDependency::Remote { name, .. } => name, }; name_a.cmp(name_b) }); @@ -475,6 +540,7 @@ impl BuildCommand { ExtensionDependency::External { name, config_path } => (name, config_path), ExtensionDependency::Local(_) => return Ok(()), // Local extensions don't have nested external deps ExtensionDependency::Versioned { .. } => return Ok(()), // Versioned extensions don't have nested deps + ExtensionDependency::Remote { .. } => return Ok(()), // Remote extensions are handled separately }; // Cycle detection: check if we've already processed this extension @@ -964,6 +1030,22 @@ echo "Successfully created image for versioned extension '$EXT_NAME-$EXT_VERSION "Cannot build individual versioned extension '{name}' version '{version}'. Versioned extensions are installed via DNF." )); } + ExtensionDependency::Remote { name, source: _ } => { + let ext_build_cmd = ExtBuildCommand::new( + name.clone(), + self.config_path.clone(), + self.verbose, + self.target.clone(), + self.container_args.clone(), + self.dnf_args.clone(), + ) + .with_no_stamps(self.no_stamps) + .with_runs_on(self.runs_on.clone(), self.nfs_port); + ext_build_cmd + .execute() + .await + .with_context(|| format!("Failed to build remote extension '{name}'"))?; + } } // Step 2: Create extension image @@ -1007,6 +1089,21 @@ echo "Successfully created image for versioned extension '$EXT_NAME-$EXT_VERSION "Cannot create image for individual versioned extension '{name}' version '{version}'. Versioned extensions are installed via DNF." )); } + ExtensionDependency::Remote { name, source: _ } => { + let ext_image_cmd = ExtImageCommand::new( + name.clone(), + self.config_path.clone(), + self.verbose, + self.target.clone(), + self.container_args.clone(), + self.dnf_args.clone(), + ) + .with_no_stamps(self.no_stamps) + .with_runs_on(self.runs_on.clone(), self.nfs_port); + ext_image_cmd.execute().await.with_context(|| { + format!("Failed to create image for remote extension '{name}'") + })?; + } } print_success( @@ -1161,6 +1258,44 @@ echo "Successfully created image for versioned extension '$EXT_NAME-$EXT_VERSION format!("Failed to create image for versioned extension '{name}' version '{version}'") })?; } + ExtensionDependency::Remote { name, source: _ } => { + if self.verbose { + print_info( + &format!("Building remote extension '{name}'"), + OutputLevel::Normal, + ); + } + + // Build remote extension + let ext_build_cmd = ExtBuildCommand::new( + name.clone(), + self.config_path.clone(), + self.verbose, + self.target.clone(), + self.container_args.clone(), + self.dnf_args.clone(), + ) + .with_no_stamps(self.no_stamps) + .with_runs_on(self.runs_on.clone(), self.nfs_port); + ext_build_cmd.execute().await.with_context(|| { + format!("Failed to build remote extension '{name}'") + })?; + + // Create extension image + let ext_image_cmd = ExtImageCommand::new( + name.clone(), + self.config_path.clone(), + self.verbose, + self.target.clone(), + self.container_args.clone(), + self.dnf_args.clone(), + ) + .with_no_stamps(self.no_stamps) + .with_runs_on(self.runs_on.clone(), self.nfs_port); + ext_image_cmd.execute().await.with_context(|| { + format!("Failed to create image for remote extension '{name}'") + })?; + } } } } else { @@ -1261,11 +1396,13 @@ echo "Successfully created image for versioned extension '$EXT_NAME-$EXT_VERSION ExtensionDependency::Local(name) => name, ExtensionDependency::External { name, .. } => name, ExtensionDependency::Versioned { name, .. } => name, + ExtensionDependency::Remote { name, .. } => name, }; let name_b = match b { ExtensionDependency::Local(name) => name, ExtensionDependency::External { name, .. } => name, ExtensionDependency::Versioned { name, .. } => name, + ExtensionDependency::Remote { name, .. } => name, }; name_a.cmp(name_b) }); @@ -1359,6 +1496,7 @@ echo "Successfully created image for versioned extension '$EXT_NAME-$EXT_VERSION ExtensionDependency::Local(name) => name, ExtensionDependency::External { name, .. } => name, ExtensionDependency::Versioned { name, .. } => name, + ExtensionDependency::Remote { name, .. } => name, }; if found_name == extension_name { @@ -1390,6 +1528,7 @@ echo "Successfully created image for versioned extension '$EXT_NAME-$EXT_VERSION ); } ExtensionDependency::Versioned { .. } => return Ok(()), // Versioned extensions don't have nested deps + ExtensionDependency::Remote { .. } => return Ok(()), // Remote extensions are handled separately }; // Cycle detection: check if we've already processed this extension diff --git a/src/commands/ext/build.rs b/src/commands/ext/build.rs index 715e23b..d317efe 100644 --- a/src/commands/ext/build.rs +++ b/src/commands/ext/build.rs @@ -87,7 +87,7 @@ impl ExtBuildCommand { let composed = Config::load_composed(&self.config_path, self.target.as_deref()) .with_context(|| format!("Failed to load composed config from {}", self.config_path))?; let config = &composed.config; - let _parsed = &composed.merged_value; + let parsed = &composed.merged_value; // Merge container args from config and CLI (similar to SDK commands) let processed_container_args = @@ -192,13 +192,41 @@ impl ExtBuildCommand { } } - // Get merged extension configuration with target-specific overrides - // Use the config path where the extension is actually defined for proper interpolation - let ext_config = config - .get_merged_ext_config(&self.extension, &target, &ext_config_path)? - .ok_or_else(|| { - anyhow::anyhow!("Extension '{}' not found in configuration.", self.extension) - })?; + // Get extension configuration from the composed/merged config + // For remote extensions, this comes from the merged remote extension config (already read via container) + // For local extensions, this uses get_merged_ext_config which reads from the file + let ext_config = match &extension_location { + ExtensionLocation::Remote { .. } => { + // Use the already-merged config from `parsed` which contains remote extension configs + // Then apply target-specific overrides manually + let ext_section = parsed.get("ext").and_then(|ext| ext.get(&self.extension)); + if let Some(ext_val) = ext_section { + let base_ext = ext_val.clone(); + // Check for target-specific override within this extension + let target_override = ext_val.get(&target).cloned(); + if let Some(override_val) = target_override { + // Merge target override into base, filtering out other target sections + Some(config.merge_target_override(base_ext, override_val, &target)) + } else { + Some(base_ext) + } + } else { + None + } + } + ExtensionLocation::Local { config_path, .. } => { + // For local extensions, read from the file with proper target merging + config.get_merged_ext_config(&self.extension, &target, config_path)? + } + #[allow(deprecated)] + ExtensionLocation::External { config_path, .. } => { + // For deprecated external configs, read from the file + config.get_merged_ext_config(&self.extension, &target, config_path)? + } + } + .ok_or_else(|| { + anyhow::anyhow!("Extension '{}' not found in configuration.", self.extension) + })?; // Handle compile dependencies with install scripts before building the extension // Pass the ext_config_path so SDK compile sections are loaded from the correct config @@ -371,6 +399,18 @@ impl ExtBuildCommand { // Initialize SDK container helper let container_helper = SdkContainer::from_config(&self.config_path, &config)?; + // Determine the extension source path for overlay resolution + // For remote extensions, files are in $AVOCADO_PREFIX/includes// + // For local extensions, files are in /opt/src (the mounted src_dir) + let ext_src_path = match &extension_location { + ExtensionLocation::Remote { name, .. } => { + format!("$AVOCADO_PREFIX/includes/{name}") + } + ExtensionLocation::Local { .. } | ExtensionLocation::External { .. } => { + "/opt/src".to_string() + } + }; + // Build extensions based on configuration let mut overall_success = true; @@ -398,6 +438,7 @@ impl ExtBuildCommand { users_config, groups_config, reload_service_manager, + &ext_src_path, ) .await? } @@ -418,6 +459,7 @@ impl ExtBuildCommand { users_config, groups_config, reload_service_manager, + &ext_src_path, ) .await? } @@ -507,6 +549,7 @@ impl ExtBuildCommand { users_config: Option<&serde_yaml::Mapping>, groups_config: Option<&serde_yaml::Mapping>, reload_service_manager: bool, + ext_src_path: &str, ) -> Result { // Create the build script for sysext extension let build_script = self.create_sysext_build_script( @@ -519,6 +562,7 @@ impl ExtBuildCommand { users_config, groups_config, reload_service_manager, + ext_src_path, ); // Execute the build script in the SDK container @@ -573,6 +617,7 @@ impl ExtBuildCommand { users_config: Option<&serde_yaml::Mapping>, groups_config: Option<&serde_yaml::Mapping>, reload_service_manager: bool, + ext_src_path: &str, ) -> Result { // Create the build script for confext extension let build_script = self.create_confext_build_script( @@ -585,6 +630,7 @@ impl ExtBuildCommand { users_config, groups_config, reload_service_manager, + ext_src_path, ); // Execute the build script in the SDK container @@ -633,58 +679,52 @@ impl ExtBuildCommand { users_config: Option<&serde_yaml::Mapping>, groups_config: Option<&serde_yaml::Mapping>, reload_service_manager: bool, + ext_src_path: &str, ) -> String { let overlay_section = if let Some(overlay_config) = overlay_config { match overlay_config.mode { OverlayMode::Merge => format!( r#" # Merge overlay directory into extension sysroot -if [ -d "/opt/src/{}" ]; then - echo "Merging overlay directory '{}' into extension sysroot with root:root ownership" +if [ -d "{src_path}/{overlay_dir}" ]; then + echo "Merging overlay directory '{overlay_dir}' into extension sysroot with root:root ownership" # Use rsync to merge directories and set ownership during copy - rsync -a --chown=root:root /opt/src/{}/ "$AVOCADO_EXT_SYSROOTS/{}/" + rsync -a --chown=root:root {src_path}/{overlay_dir}/ "$AVOCADO_EXT_SYSROOTS/{ext_name}/" else - echo "Error: Overlay directory '{}' not found in source" + echo "Error: Overlay directory '{overlay_dir}' not found in source" exit 1 fi "#, - overlay_config.dir, - overlay_config.dir, - overlay_config.dir, - self.extension, - overlay_config.dir + src_path = ext_src_path, + overlay_dir = overlay_config.dir, + ext_name = self.extension, ), OverlayMode::Opaque => format!( r#" # Copy overlay directory to extension sysroot (opaque mode) -if [ -d "/opt/src/{}" ]; then - echo "Copying overlay directory '{}' to extension sysroot (opaque mode)" +if [ -d "{src_path}/{overlay_dir}" ]; then + echo "Copying overlay directory '{overlay_dir}' to extension sysroot (opaque mode)" # Use cp -a to replace directory contents completely while preserving permissions - cp -a /opt/src/{}/* "$AVOCADO_EXT_SYSROOTS/{}/" + cp -a {src_path}/{overlay_dir}/* "$AVOCADO_EXT_SYSROOTS/{ext_name}/" # Fix ownership to root:root for copied overlay files only (permissions are preserved) echo "Setting ownership to root:root for overlay files" - find "/opt/src/{}" -mindepth 1 | while IFS= read -r srcpath; do - relpath="$(echo "$srcpath" | sed "s|^/opt/src/{}||" | sed "s|^/||")" + find "{src_path}/{overlay_dir}" -mindepth 1 | while IFS= read -r srcpath; do + relpath="$(echo "$srcpath" | sed "s|^{src_path}/{overlay_dir}||" | sed "s|^/||")" if [ -n "$relpath" ]; then - destpath="$AVOCADO_EXT_SYSROOTS/{}/$relpath" + destpath="$AVOCADO_EXT_SYSROOTS/{ext_name}/$relpath" if [ -e "$destpath" ]; then chown root:root "$destpath" 2>/dev/null || true fi fi done else - echo "Error: Overlay directory '{}' not found in source" + echo "Error: Overlay directory '{overlay_dir}' not found in source" exit 1 fi "#, - overlay_config.dir, - overlay_config.dir, - overlay_config.dir, - self.extension, - overlay_config.dir, - overlay_config.dir, - self.extension, - overlay_config.dir + src_path = ext_src_path, + overlay_dir = overlay_config.dir, + ext_name = self.extension, ), } } else { @@ -806,58 +846,52 @@ fi users_config: Option<&serde_yaml::Mapping>, groups_config: Option<&serde_yaml::Mapping>, reload_service_manager: bool, + ext_src_path: &str, ) -> String { let overlay_section = if let Some(overlay_config) = overlay_config { match overlay_config.mode { OverlayMode::Merge => format!( r#" # Merge overlay directory into extension sysroot -if [ -d "/opt/src/{}" ]; then - echo "Merging overlay directory '{}' into extension sysroot with root:root ownership" +if [ -d "{src_path}/{overlay_dir}" ]; then + echo "Merging overlay directory '{overlay_dir}' into extension sysroot with root:root ownership" # Use rsync to merge directories and set ownership during copy - rsync -a --chown=root:root /opt/src/{}/ "$AVOCADO_EXT_SYSROOTS/{}/" + rsync -a --chown=root:root {src_path}/{overlay_dir}/ "$AVOCADO_EXT_SYSROOTS/{ext_name}/" else - echo "Error: Overlay directory '{}' not found in source" + echo "Error: Overlay directory '{overlay_dir}' not found in source" exit 1 fi "#, - overlay_config.dir, - overlay_config.dir, - overlay_config.dir, - self.extension, - overlay_config.dir + src_path = ext_src_path, + overlay_dir = overlay_config.dir, + ext_name = self.extension, ), OverlayMode::Opaque => format!( r#" # Copy overlay directory to extension sysroot (opaque mode) -if [ -d "/opt/src/{}" ]; then - echo "Copying overlay directory '{}' to extension sysroot (opaque mode)" +if [ -d "{src_path}/{overlay_dir}" ]; then + echo "Copying overlay directory '{overlay_dir}' to extension sysroot (opaque mode)" # Use cp -a to replace directory contents completely while preserving permissions - cp -a /opt/src/{}/* "$AVOCADO_EXT_SYSROOTS/{}/" + cp -a {src_path}/{overlay_dir}/* "$AVOCADO_EXT_SYSROOTS/{ext_name}/" # Fix ownership to root:root for copied overlay files only (permissions are preserved) echo "Setting ownership to root:root for overlay files" - find "/opt/src/{}" -mindepth 1 | while IFS= read -r srcpath; do - relpath="$(echo "$srcpath" | sed "s|^/opt/src/{}||" | sed "s|^/||")" + find "{src_path}/{overlay_dir}" -mindepth 1 | while IFS= read -r srcpath; do + relpath="$(echo "$srcpath" | sed "s|^{src_path}/{overlay_dir}||" | sed "s|^/||")" if [ -n "$relpath" ]; then - destpath="$AVOCADO_EXT_SYSROOTS/{}/$relpath" + destpath="$AVOCADO_EXT_SYSROOTS/{ext_name}/$relpath" if [ -e "$destpath" ]; then chown root:root "$destpath" 2>/dev/null || true fi fi done else - echo "Error: Overlay directory '{}' not found in source" + echo "Error: Overlay directory '{overlay_dir}' not found in source" exit 1 fi "#, - overlay_config.dir, - overlay_config.dir, - overlay_config.dir, - self.extension, - overlay_config.dir, - overlay_config.dir, - self.extension, - overlay_config.dir + src_path = ext_src_path, + overlay_dir = overlay_config.dir, + ext_name = self.extension, ), } } else { @@ -1672,6 +1706,7 @@ mod tests { None, None, false, + "/opt/src", ); // Print the actual script for debugging @@ -1730,6 +1765,7 @@ mod tests { None, None, false, + "/opt/src", ); assert!(script @@ -1785,6 +1821,7 @@ mod tests { None, None, false, + "/opt/src", ); assert!(script.contains("echo \"SYSEXT_SCOPE=system portable\" >> \"$release_file\"")); @@ -1814,6 +1851,7 @@ mod tests { None, None, false, + "/opt/src", ); assert!(script.contains("echo \"CONFEXT_SCOPE=system portable\" >> \"$release_file\"")); @@ -1844,6 +1882,7 @@ mod tests { None, None, false, + "/opt/src", ); // Check that service enabling commands are present using [Install] section parser @@ -1896,6 +1935,7 @@ mod tests { None, None, false, + "/opt/src", ); // Verify the find command looks for common kernel module extensions @@ -1934,6 +1974,7 @@ mod tests { None, None, false, + "/opt/src", ); // Verify overlay merging commands are present @@ -1974,6 +2015,7 @@ mod tests { None, None, false, + "/opt/src", ); // Verify overlay merging commands are present @@ -2014,6 +2056,7 @@ mod tests { None, None, false, + "/opt/src", ); // Verify overlay opaque mode commands are present @@ -2056,6 +2099,7 @@ mod tests { None, None, false, + "/opt/src", ); // Verify overlay opaque mode commands are present @@ -2094,6 +2138,7 @@ mod tests { None, None, false, + "/opt/src", ); let script_confext = cmd.create_confext_build_script( "1.0", @@ -2105,6 +2150,7 @@ mod tests { None, None, false, + "/opt/src", ); // Verify no overlay merging commands are present @@ -2140,6 +2186,7 @@ mod tests { None, None, false, + "/opt/src", ); // Verify separate AVOCADO_ON_MERGE entries are added for kernel modules and modprobe commands @@ -2184,6 +2231,7 @@ mod tests { None, None, false, + "/opt/src", ); // Verify sysusers.d detection logic is present @@ -2222,6 +2270,7 @@ mod tests { None, None, false, + "/opt/src", ); // Verify sysusers.d detection logic is present for confext @@ -2257,6 +2306,7 @@ mod tests { None, None, false, + "/opt/src", ); // Verify ld.so.conf.d detection logic is present for confext @@ -2295,6 +2345,7 @@ mod tests { None, None, false, + "/opt/src", ); // Verify custom on_merge commands are added as separate entries @@ -2335,6 +2386,7 @@ mod tests { None, None, false, + "/opt/src", ); // Verify custom on_unmerge commands are added as separate entries @@ -2375,6 +2427,7 @@ mod tests { None, None, false, + "/opt/src", ); // Verify custom on_unmerge commands are added as separate entries @@ -2410,6 +2463,7 @@ mod tests { None, None, false, + "/opt/src", ); // Verify custom on_merge commands are added as separate entries @@ -2444,6 +2498,7 @@ mod tests { None, None, false, + "/opt/src", ); // Verify both kernel modules and sysusers.d are handled correctly with separate lines @@ -2477,6 +2532,7 @@ mod tests { None, None, false, + "/opt/src", ); // Verify that without modprobe modules, only depmod is added when kernel modules are found @@ -2510,6 +2566,7 @@ mod tests { None, None, false, + "/opt/src", ); // Verify separate AVOCADO_ON_MERGE entries are added @@ -2563,6 +2620,7 @@ mod tests { None, None, false, + "/opt/src", ); // Verify that each command gets its own separate AVOCADO_ON_MERGE entry @@ -2767,6 +2825,7 @@ mod tests { Some(&users_config), None, false, + "/opt/src", ); // Verify the complete build script includes users functionality @@ -2822,6 +2881,7 @@ mod tests { Some(&users_config), None, false, + "/opt/src", ); // Verify the complete build script includes users functionality @@ -3306,6 +3366,7 @@ mod tests { None, None, true, + "/opt/src", ); // Verify that reload_service_manager = true sets EXTENSION_RELOAD_MANAGER=1 @@ -3335,6 +3396,7 @@ mod tests { None, None, true, + "/opt/src", ); // Verify that reload_service_manager = true sets EXTENSION_RELOAD_MANAGER=1 diff --git a/src/commands/ext/image.rs b/src/commands/ext/image.rs index 0b3ebb5..4ee5876 100644 --- a/src/commands/ext/image.rs +++ b/src/commands/ext/image.rs @@ -69,10 +69,11 @@ impl ExtImageCommand { } pub async fn execute(&self) -> Result<()> { - // Load configuration and parse raw TOML - let config = Config::load(&self.config_path)?; - let content = std::fs::read_to_string(&self.config_path)?; - let parsed: serde_yaml::Value = serde_yaml::from_str(&content)?; + // Load composed configuration (includes remote extension configs) + let composed = Config::load_composed(&self.config_path, self.target.as_deref()) + .with_context(|| format!("Failed to load composed config from {}", self.config_path))?; + let config = &composed.config; + let parsed = &composed.merged_value; // Merge container args from config and CLI let merged_container_args = config.merge_sdk_container_args(self.container_args.as_ref()); @@ -136,15 +137,49 @@ impl ExtImageCommand { } } - // Find extension using comprehensive lookup - let extension_location = config - .find_extension_in_dependency_tree(&self.config_path, &self.extension, &target)? - .ok_or_else(|| { - anyhow::anyhow!("Extension '{}' not found in configuration.", self.extension) - })?; + // Determine extension location by checking the composed (interpolated) config + // This is more reliable than find_extension_in_dependency_tree which reads the raw file + // and may not find templated extension names like "avocado-bsp-{{ avocado.target }}" + let extension_location = { + // First check if extension exists in the composed config's ext section + let ext_in_composed = parsed + .get("ext") + .and_then(|e| e.get(&self.extension)); + + if let Some(ext_config) = ext_in_composed { + // Check if it has a source: field (indicating remote extension) + if ext_config.get("source").is_some() { + // Parse the source to get ExtensionSource + let source = Config::parse_extension_source(&self.extension, ext_config)? + .ok_or_else(|| { + anyhow::anyhow!( + "Extension '{}' has source field but failed to parse it", + self.extension + ) + })?; + ExtensionLocation::Remote { + name: self.extension.clone(), + source, + } + } else { + // Local extension defined in main config + ExtensionLocation::Local { + name: self.extension.clone(), + config_path: self.config_path.clone(), + } + } + } else { + // Fall back to comprehensive lookup for external extensions + config + .find_extension_in_dependency_tree(&self.config_path, &self.extension, &target)? + .ok_or_else(|| { + anyhow::anyhow!("Extension '{}' not found in configuration.", self.extension) + })? + } + }; // Get the config path where this extension is actually defined - let ext_config_path = match &extension_location { + let _ext_config_path = match &extension_location { ExtensionLocation::Local { config_path, .. } => config_path.clone(), ExtensionLocation::External { config_path, .. } => config_path.clone(), ExtensionLocation::Remote { name, .. } => { @@ -181,13 +216,77 @@ impl ExtImageCommand { } } - // Get merged extension configuration with target-specific overrides - // Use the config path where the extension is actually defined for proper interpolation - let ext_config = config - .get_merged_ext_config(&self.extension, &target, &ext_config_path)? - .ok_or_else(|| { - anyhow::anyhow!("Extension '{}' not found in configuration.", self.extension) - })?; + // Get extension configuration from the composed/merged config + // For remote extensions, this comes from the merged remote extension config (already read via container) + // For local extensions, this uses get_merged_ext_config which reads from the file + let ext_config = match &extension_location { + ExtensionLocation::Remote { .. } => { + // Use the already-merged config from `parsed` which contains remote extension configs + // Then apply target-specific overrides manually + let ext_section = parsed.get("ext").and_then(|ext| ext.get(&self.extension)); + + if self.verbose { + if let Some(all_ext) = parsed.get("ext") { + if let Some(ext_map) = all_ext.as_mapping() { + let ext_names: Vec<_> = ext_map + .keys() + .filter_map(|k| k.as_str()) + .collect(); + eprintln!( + "[DEBUG] Available extensions in composed config: {:?}", + ext_names + ); + } + } + eprintln!( + "[DEBUG] Looking for extension '{}' in composed config, found: {}", + self.extension, + ext_section.is_some() + ); + if let Some(ext_val) = &ext_section { + eprintln!( + "[DEBUG] Extension '{}' config:\n{}", + self.extension, + serde_yaml::to_string(ext_val).unwrap_or_default() + ); + } + } + + if let Some(ext_val) = ext_section { + let base_ext = ext_val.clone(); + // Check for target-specific override within this extension + let target_override = ext_val.get(&target).cloned(); + if let Some(override_val) = target_override { + // Merge target override into base, filtering out other target sections + Some(config.merge_target_override(base_ext, override_val, &target)) + } else { + Some(base_ext) + } + } else { + None + } + } + ExtensionLocation::Local { config_path, .. } => { + // For local extensions, read from the file with proper target merging + config.get_merged_ext_config(&self.extension, &target, config_path)? + } + #[allow(deprecated)] + ExtensionLocation::External { config_path, .. } => { + // For deprecated external configs, read from the file + config.get_merged_ext_config(&self.extension, &target, config_path)? + } + } + .ok_or_else(|| { + anyhow::anyhow!("Extension '{}' not found in configuration.", self.extension) + })?; + + if self.verbose { + eprintln!( + "[DEBUG] Final ext_config for '{}':\n{}", + self.extension, + serde_yaml::to_string(&ext_config).unwrap_or_default() + ); + } // Get extension version let ext_version = ext_config diff --git a/src/commands/ext/package.rs b/src/commands/ext/package.rs index 9adb171..d86a64f 100644 --- a/src/commands/ext/package.rs +++ b/src/commands/ext/package.rs @@ -66,11 +66,14 @@ impl ExtPackageCommand { } pub async fn execute(&self) -> Result<()> { - // Load configuration - let config = Config::load(&self.config_path)?; + // Load composed configuration (includes remote extension configs) + let composed = Config::load_composed(&self.config_path, self.target.as_deref()) + .with_context(|| format!("Failed to load composed config from {}", self.config_path))?; + let config = &composed.config; + let parsed = &composed.merged_value; // Resolve target - let target = resolve_target_required(self.target.as_deref(), &config)?; + let target = resolve_target_required(self.target.as_deref(), config)?; // With the new src_dir packaging approach, we no longer require // ext_install and ext_build stamps. We're packaging the source directory, @@ -131,16 +134,53 @@ impl ExtPackageCommand { } } - // Get merged extension configuration with target-specific overrides and interpolation - // Use the config path where the extension is actually defined for proper interpolation - let ext_config = config - .get_merged_ext_config(&self.extension, &target, &ext_config_path)? - .ok_or_else(|| { - anyhow::anyhow!("Extension '{}' not found in configuration.", self.extension) - })?; + // Get extension configuration from the composed/merged config + // For remote extensions, this comes from the merged remote extension config (already read via container) + // For local extensions, this uses get_merged_ext_config which reads from the file + let ext_config = match &extension_location { + ExtensionLocation::Remote { .. } => { + // Use the already-merged config from `parsed` which contains remote extension configs + // Then apply target-specific overrides manually + let ext_section = parsed.get("ext").and_then(|ext| ext.get(&self.extension)); + if let Some(ext_val) = ext_section { + let base_ext = ext_val.clone(); + // Check for target-specific override within this extension + let target_override = ext_val.get(&target).cloned(); + if let Some(override_val) = target_override { + // Merge target override into base, filtering out other target sections + Some(config.merge_target_override(base_ext, override_val, &target)) + } else { + Some(base_ext) + } + } else { + None + } + } + ExtensionLocation::Local { config_path, .. } => { + // For local extensions, read from the file with proper target merging + config.get_merged_ext_config(&self.extension, &target, config_path)? + } + #[allow(deprecated)] + ExtensionLocation::External { config_path, .. } => { + // For deprecated external configs, read from the file + config.get_merged_ext_config(&self.extension, &target, config_path)? + } + } + .ok_or_else(|| { + anyhow::anyhow!("Extension '{}' not found in configuration.", self.extension) + })?; // Also get the raw (unmerged) extension config to find all target-specific overlays - let raw_ext_config = self.get_raw_extension_config(&ext_config_path)?; + // For remote extensions, use the parsed config; for local, read from file + let raw_ext_config = match &extension_location { + ExtensionLocation::Remote { .. } => { + parsed + .get("ext") + .and_then(|ext| ext.get(&self.extension)) + .cloned() + } + _ => self.get_raw_extension_config(&ext_config_path)?, + }; // Extract RPM metadata with defaults let rpm_metadata = self.extract_rpm_metadata(&ext_config, &target)?; diff --git a/src/utils/config.rs b/src/utils/config.rs index 1d2f541..eb03922 100644 --- a/src/utils/config.rs +++ b/src/utils/config.rs @@ -216,6 +216,89 @@ impl ExtensionSource { } false } + + /// Parse ExtensionSource from a YAML value + pub fn from_yaml(value: &serde_yaml::Value) -> Option { + let map = value.as_mapping()?; + + // Check for "repo" source type + if let Some(repo_val) = map.get(&serde_yaml::Value::String("repo".to_string())) { + let version = repo_val.as_str().unwrap_or("*").to_string(); + let package = map + .get(&serde_yaml::Value::String("package".to_string())) + .and_then(|v| v.as_str()) + .map(|s| s.to_string()); + let repo_name = map + .get(&serde_yaml::Value::String("repo_name".to_string())) + .and_then(|v| v.as_str()) + .map(|s| s.to_string()); + let include = map + .get(&serde_yaml::Value::String("include".to_string())) + .and_then(|v| v.as_sequence()) + .map(|seq| { + seq.iter() + .filter_map(|v| v.as_str()) + .map(|s| s.to_string()) + .collect() + }); + return Some(ExtensionSource::Repo { + version, + package, + repo_name, + include, + }); + } + + // Check for "git" source type + if let Some(git_val) = map.get(&serde_yaml::Value::String("git".to_string())) { + let url = git_val.as_str()?.to_string(); + let git_ref = map + .get(&serde_yaml::Value::String("ref".to_string())) + .and_then(|v| v.as_str()) + .map(|s| s.to_string()); + let sparse_checkout = map + .get(&serde_yaml::Value::String("sparse_checkout".to_string())) + .and_then(|v| v.as_sequence()) + .map(|seq| { + seq.iter() + .filter_map(|v| v.as_str()) + .map(|s| s.to_string()) + .collect() + }); + let include = map + .get(&serde_yaml::Value::String("include".to_string())) + .and_then(|v| v.as_sequence()) + .map(|seq| { + seq.iter() + .filter_map(|v| v.as_str()) + .map(|s| s.to_string()) + .collect() + }); + return Some(ExtensionSource::Git { + url, + git_ref, + sparse_checkout, + include, + }); + } + + // Check for "path" source type + if let Some(path_val) = map.get(&serde_yaml::Value::String("path".to_string())) { + let path = path_val.as_str()?.to_string(); + let include = map + .get(&serde_yaml::Value::String("include".to_string())) + .and_then(|v| v.as_sequence()) + .map(|seq| { + seq.iter() + .filter_map(|v| v.as_str()) + .map(|s| s.to_string()) + .collect() + }); + return Some(ExtensionSource::Path { path, include }); + } + + None + } } /// Represents an extension dependency for a runtime with type information @@ -820,34 +903,132 @@ impl Config { .ok() .flatten(); - // For each remote extension, try to read its config via container + // Check for verbose/debug mode via environment variable + let verbose = std::env::var("AVOCADO_DEBUG").is_ok() + || std::env::var("AVOCADO_VERBOSE").is_ok(); + + // Always output merge info to stderr for debugging (TODO: remove after fixing) + eprintln!( + "[DEBUG-MERGE] merge_installed_remote_extensions: found {} remote extensions: {:?}", + remote_extensions.len(), + remote_extensions.iter().map(|(n, _)| n).collect::>() + ); + eprintln!( + "[DEBUG-MERGE] resolved_target: {}, volume_state: {:?}", + resolved_target, + volume_state.as_ref().map(|v| &v.volume_name) + ); + + if verbose { + eprintln!( + "[DEBUG] merge_installed_remote_extensions: found {} remote extensions: {:?}", + remote_extensions.len(), + remote_extensions.iter().map(|(n, _)| n).collect::>() + ); + } + + // For each remote extension, try to read its config for (ext_name, source) in remote_extensions { - // Try to read extension config via container command - let ext_content = match &volume_state { - Some(vs) => { + // Try multiple methods to read the extension config: + // 1. Direct container path (when running inside a container) + // 2. Via container command (when running on host) + // 3. Local fallback path (for development) + + let ext_content = { + // Method 1: Check if we're inside a container and can read directly + // The standard container path is /opt/_avocado//includes//avocado.yaml + let container_direct_path = format!( + "/opt/_avocado/{}/includes/{}/avocado.yaml", + resolved_target, ext_name + ); + let container_path = Path::new(&container_direct_path); + + // Always output for debugging (TODO: remove after fixing) + eprintln!( + "[DEBUG-MERGE] Checking for '{}' config at: {} (exists: {})", + ext_name, container_direct_path, container_path.exists() + ); + + if verbose { + eprintln!( + "[DEBUG] Checking for remote extension '{}' config at: {}", + ext_name, container_direct_path + ); + eprintln!( + "[DEBUG] Path exists: {}", + container_path.exists() + ); + } + + if container_path.exists() { + // We're inside a container, read directly + match fs::read_to_string(container_path) { + Ok(content) => { + if verbose { + eprintln!( + "[DEBUG] Read {} bytes from container path", + content.len() + ); + } + content + } + Err(e) => { + if verbose { + eprintln!("[DEBUG] Failed to read: {}", e); + } + continue; + } + } + } else if let Some(vs) = &volume_state { + // Method 2: Use container command to read from Docker volume + if verbose { + eprintln!( + "[DEBUG] Trying via container command (volume: {})", + vs.volume_name + ); + } match Self::read_extension_config_via_container(vs, &resolved_target, &ext_name) { - Ok(content) => content, - Err(_) => { + Ok(content) => { + if verbose { + eprintln!( + "[DEBUG] Read {} bytes via container", + content.len() + ); + } + content + } + Err(e) => { + if verbose { + eprintln!("[DEBUG] Container read failed: {}", e); + } // Extension not installed yet or config not found, skip continue; } } - } - None => { - // No volume state - try fallback to local path (for development) + } else { + // Method 3: Fallback to local path (for development) let fallback_dir = src_dir .join(".avocado") .join(&resolved_target) .join("includes") .join(&ext_name); let config_path_local = fallback_dir.join("avocado.yaml"); + if verbose { + eprintln!( + "[DEBUG] Trying fallback path: {}", + config_path_local.display() + ); + } if config_path_local.exists() { match fs::read_to_string(&config_path_local) { Ok(content) => content, Err(_) => continue, } } else { + if verbose { + eprintln!("[DEBUG] No config found for '{}', skipping", ext_name); + } continue; } } @@ -856,8 +1037,42 @@ impl Config { // Use a .yaml extension so parse_config_value knows to parse as YAML let ext_config_path = format!("{ext_name}/avocado.yaml"); let ext_config = match Self::parse_config_value(&ext_config_path, &ext_content) { - Ok(cfg) => cfg, - Err(_) => { + Ok(cfg) => { + if verbose { + eprintln!("[DEBUG] Successfully parsed config for '{}'", ext_name); + // Show what extensions are defined in this remote config + if let Some(ext_section) = cfg.get("ext").and_then(|e| e.as_mapping()) { + let ext_names: Vec<_> = ext_section + .keys() + .filter_map(|k| k.as_str()) + .collect(); + eprintln!( + "[DEBUG] Remote config defines extensions: {:?}", + ext_names + ); + // Show the extension section that matches our name + if let Some(our_ext) = ext_section.get(serde_yaml::Value::String(ext_name.clone())) { + eprintln!( + "[DEBUG] Extension '{}' in remote config:\n{}", + ext_name, + serde_yaml::to_string(our_ext).unwrap_or_default() + ); + } else { + eprintln!( + "[DEBUG] Extension '{}' NOT found in remote config's ext section", + ext_name + ); + } + } else { + eprintln!("[DEBUG] Remote config has no 'ext' section"); + } + } + cfg + } + Err(e) => { + if verbose { + eprintln!("[DEBUG] Failed to parse config for '{}': {}", ext_name, e); + } // Failed to parse config, skip this extension continue; } @@ -885,6 +1100,19 @@ impl Config { let auto_include_compile = Self::find_compile_dependencies_in_ext(&ext_config, &ext_name); + // Always output for debugging (TODO: remove after fixing) + eprintln!( + "[DEBUG-MERGE] Merging '{}' into main config", + ext_name + ); + + if verbose { + eprintln!( + "[DEBUG] Merging '{}' with include_patterns: {:?}, auto_include_compile: {:?}", + ext_name, include_patterns, auto_include_compile + ); + } + // Merge the remote extension config with include patterns Self::merge_external_config( main_config, @@ -893,6 +1121,26 @@ impl Config { include_patterns, &auto_include_compile, ); + + // Always output for debugging (TODO: remove after fixing) + if let Some(main_ext) = main_config.get("ext").and_then(|e| e.get(&ext_name)) { + eprintln!( + "[DEBUG-MERGE] After merge, ext.{}:\n{}", + ext_name, + serde_yaml::to_string(main_ext).unwrap_or_default() + ); + } + + if verbose { + // Show what the main config's ext section looks like after merge + if let Some(main_ext) = main_config.get("ext").and_then(|e| e.get(&ext_name)) { + eprintln!( + "[DEBUG] After merge, main config ext.{}:\n{}", + ext_name, + serde_yaml::to_string(main_ext).unwrap_or_default() + ); + } + } } Ok(extension_sources) @@ -1132,11 +1380,47 @@ impl Config { // This handles the case where main config has a stub with just `source:` // and the remote extension has the full definition with `dependencies:` etc. let ext_key = serde_yaml::Value::String(ext_name.to_string()); - if let Some(ext_value) = external_ext.get(&ext_key) { - if let Some(existing_ext) = main_ext_map.get_mut(&ext_key) { - // Deep-merge: add fields from remote that don't exist in main - // Main config values take precedence on conflicts - Self::deep_merge_ext_section(existing_ext, ext_value); + + // Try to find the extension's config in the remote extension's config. + // We use multiple strategies: + // 1. Exact match (e.g., "avocado-bsp-raspberrypi4") + // 2. Base name match (e.g., "avocado-bsp" when looking for "avocado-bsp-raspberrypi4") + // 3. Single extension in remote config (if there's only one, use it) + let ext_value = external_ext + .get(&ext_key) + .or_else(|| { + // Try base name: strip common target suffixes from ext_name + let base_names = Self::get_base_extension_names(ext_name); + for base in &base_names { + let base_key = serde_yaml::Value::String(base.clone()); + if let Some(val) = external_ext.get(&base_key) { + return Some(val); + } + } + None + }) + .or_else(|| { + // If remote config has exactly one extension defined, use it + if external_ext.len() == 1 { + external_ext.values().next() + } else { + None + } + }); + + if let Some(ext_value) = ext_value { + // Try to find the existing key in main config that matches ext_name. + // This handles template keys like "avocado-bsp-{{ avocado.target }}" + // that interpolate to "avocado-bsp-raspberrypi4". + let existing_key = Self::find_matching_ext_key(main_ext_map, ext_name); + + if let Some(existing_key) = existing_key { + // Found an existing key (possibly a template) - merge into it + if let Some(existing_ext) = main_ext_map.get_mut(&existing_key) { + // Deep-merge: add fields from remote that don't exist in main + // Main config values take precedence on conflicts + Self::deep_merge_ext_section(existing_ext, ext_value); + } } else { // Extension not in main config, just add it main_ext_map.insert(ext_key, ext_value.clone()); @@ -1226,6 +1510,69 @@ impl Config { } } + /// Find a matching extension key in the main config's ext section. + /// + /// This handles the case where the main config has template keys like + /// "avocado-bsp-{{ avocado.target }}" that should match the interpolated + /// name "avocado-bsp-raspberrypi4". + /// + /// Returns the original key (possibly a template) if found. + fn find_matching_ext_key( + ext_map: &serde_yaml::Mapping, + interpolated_name: &str, + ) -> Option { + // First try exact match + let exact_key = serde_yaml::Value::String(interpolated_name.to_string()); + if ext_map.contains_key(&exact_key) { + return Some(exact_key); + } + + // Look for template keys that would match after interpolation + // Common template patterns: {{ avocado.target }}, {{ config.* }} + for key in ext_map.keys() { + if let Some(key_str) = key.as_str() { + // Check if this is a template key + if key_str.contains("{{") && key_str.contains("}}") { + // Try to match by replacing common template patterns + // with regex-like patterns + + // Handle {{ avocado.target }} - this is the most common case + // The key might be "avocado-bsp-{{ avocado.target }}" + // and we're looking for "avocado-bsp-raspberrypi4" + if key_str.contains("{{ avocado.target }}") + || key_str.contains("{{avocado.target}}") + { + // Extract the prefix and suffix around the template + let parts: Vec<&str> = if key_str.contains("{{ avocado.target }}") { + key_str.split("{{ avocado.target }}").collect() + } else { + key_str.split("{{avocado.target}}").collect() + }; + + if parts.len() == 2 { + let prefix = parts[0]; + let suffix = parts[1]; + + // Check if the interpolated name matches the pattern + if interpolated_name.starts_with(prefix) + && interpolated_name.ends_with(suffix) + { + // Verify the middle part (the target) is reasonable + let middle_len = + interpolated_name.len() - prefix.len() - suffix.len(); + if middle_len > 0 { + return Some(key.clone()); + } + } + } + } + } + } + } + + None + } + /// Ensure the provision section exists in the config. fn ensure_provision_section(config: &mut serde_yaml::Value) { if let Some(main_map) = config.as_mapping_mut() { @@ -1238,6 +1585,50 @@ impl Config { } } + /// Get possible base extension names by stripping common target suffixes. + /// + /// For an extension like "avocado-bsp-raspberrypi4", this returns: + /// - "avocado-bsp" (common pattern for BSP packages) + /// - Names with common target suffixes stripped + fn get_base_extension_names(ext_name: &str) -> Vec { + let mut names = Vec::new(); + + // Common target suffixes to try stripping + let target_suffixes = [ + "-raspberrypi4", + "-raspberrypi5", + "-rpi4", + "-rpi5", + "-jetson-orin-nano", + "-jetson-orin-nx", + "-jetson", + "-x86_64", + "-aarch64", + ]; + + for suffix in &target_suffixes { + if ext_name.ends_with(suffix) { + let base = &ext_name[..ext_name.len() - suffix.len()]; + if !base.is_empty() && !names.contains(&base.to_string()) { + names.push(base.to_string()); + } + } + } + + // Also try splitting on last dash as a generic approach + // e.g., "my-ext-target" -> "my-ext" + if let Some(last_dash) = ext_name.rfind('-') { + if last_dash > 0 { + let base = &ext_name[..last_dash]; + if !names.contains(&base.to_string()) { + names.push(base.to_string()); + } + } + } + + names + } + /// Deep-merge an extension section from external config into main config. /// /// This handles the case where main config has a stub definition (with just `source:`) @@ -1398,6 +1789,21 @@ impl Config { (_, target_value) => target_value, } } + /// Merge a target-specific override into a base config value + /// This filters out other target sections from the base and merges the override + pub fn merge_target_override( + &self, + base: serde_yaml::Value, + target_override: serde_yaml::Value, + _current_target: &str, + ) -> serde_yaml::Value { + // Filter out target-specific subsections from base before merging + let supported_targets = self.get_supported_targets().unwrap_or_default(); + let filtered_base = self.filter_target_subsections(base, &supported_targets); + // Merge the target override into the filtered base + self.merge_values(filtered_base, target_override) + } + /// Get merged runtime configuration for a specific runtime and target #[allow(dead_code)] // Future API for command integration pub fn get_merged_runtime_config( From 433ccca8cac1b0f3424193d7fdf0e1bfc9dbc7c5 Mon Sep 17 00:00:00 2001 From: Justin Schneck Date: Thu, 8 Jan 2026 14:31:41 -0500 Subject: [PATCH 14/23] Clean up debug from logs --- src/commands/build.rs | 206 ++-------- src/commands/ext/build.rs | 10 +- src/commands/ext/dnf.rs | 6 +- src/commands/ext/image.rs | 28 +- src/commands/ext/package.rs | 12 +- src/commands/runtime/build.rs | 250 +++--------- src/commands/runtime/deps.rs | 72 ++-- src/commands/runtime/dnf.rs | 2 +- src/commands/runtime/install.rs | 34 +- src/commands/runtime/provision.rs | 12 +- src/commands/sdk/compile.rs | 10 +- src/utils/config.rs | 623 ++++++------------------------ src/utils/container.rs | 3 +- src/utils/stamps.rs | 179 +++------ 14 files changed, 320 insertions(+), 1127 deletions(-) diff --git a/src/commands/build.rs b/src/commands/build.rs index 04655ec..2b8bef1 100644 --- a/src/commands/build.rs +++ b/src/commands/build.rs @@ -22,7 +22,10 @@ pub enum ExtensionDependency { /// Extension resolved via DNF with a version specification Versioned { name: String, version: String }, /// Remote extension with source field (repo, git, or path) - Remote { name: String, source: ExtensionSource }, + Remote { + name: String, + source: ExtensionSource, + }, } /// Implementation of the 'build' command that runs all build subcommands. @@ -436,72 +439,56 @@ impl BuildCommand { runtimes: &[String], target: &str, ) -> Result> { + use crate::utils::interpolation::interpolate_name; + let mut required_extensions = HashSet::new(); - let mut visited = HashSet::new(); // For cycle detection + let _visited = HashSet::::new(); // For cycle detection // If no runtimes are found for this target, don't build any extensions if runtimes.is_empty() { return Ok(vec![]); } - let _runtime_section = parsed.get("runtime").and_then(|r| r.as_mapping()).unwrap(); + // Build a map of interpolated ext names to their source config + // This is needed because ext section keys may contain templates like {{ avocado.target }} + let mut ext_sources: std::collections::HashMap> = + std::collections::HashMap::new(); + if let Some(ext_section) = parsed.get("ext").and_then(|e| e.as_mapping()) { + for (ext_key, ext_config) in ext_section { + if let Some(raw_name) = ext_key.as_str() { + // Interpolate the extension name with the target + let interpolated_name = interpolate_name(raw_name, target); + // Use parse_extension_source which properly deserializes the source field + let source = Config::parse_extension_source(&interpolated_name, ext_config) + .ok() + .flatten(); + ext_sources.insert(interpolated_name, source); + } + } + } for runtime_name in runtimes { // Get merged runtime config for this target let merged_runtime = config.get_merged_runtime_config(runtime_name, target, &self.config_path)?; if let Some(merged_value) = merged_runtime { - if let Some(dependencies) = merged_value - .get("dependencies") - .and_then(|d| d.as_mapping()) + // Read extensions from the new `extensions` array format + if let Some(extensions) = + merged_value.get("extensions").and_then(|e| e.as_sequence()) { - for (_dep_name, dep_spec) in dependencies { - // Check for extension dependency - if let Some(ext_name) = dep_spec.get("ext").and_then(|v| v.as_str()) { - // Check if this is a versioned extension (has vsn field) - if let Some(version) = dep_spec.get("vsn").and_then(|v| v.as_str()) { - let ext_dep = ExtensionDependency::Versioned { + for ext in extensions { + if let Some(ext_name) = ext.as_str() { + // Check if this extension has a source: field (remote extension) + if let Some(Some(source)) = ext_sources.get(ext_name) { + // Remote extension with source field + required_extensions.insert(ExtensionDependency::Remote { name: ext_name.to_string(), - version: version.to_string(), - }; - required_extensions.insert(ext_dep); - } - // Check if this is an external extension (has config field) - else if let Some(external_config) = - dep_spec.get("config").and_then(|v| v.as_str()) - { - let ext_dep = ExtensionDependency::External { - name: ext_name.to_string(), - config_path: external_config.to_string(), - }; - required_extensions.insert(ext_dep.clone()); - - // Recursively find nested external extension dependencies - self.find_nested_external_extensions( - config, - &ext_dep, - &mut required_extensions, - &mut visited, - )?; + source: source.clone(), + }); } else { - // Check if this extension has a source: field (remote extension) - let ext_source = parsed - .get("ext") - .and_then(|e| e.get(ext_name)) - .and_then(|ext| ext.get("source")) - .and_then(|s| ExtensionSource::from_yaml(s)); - - if let Some(source) = ext_source { - // Remote extension with source field - required_extensions.insert(ExtensionDependency::Remote { - name: ext_name.to_string(), - source, - }); - } else { - // Local extension - required_extensions - .insert(ExtensionDependency::Local(ext_name.to_string())); - } + // Local extension (defined in ext section without source, or not in ext section) + required_extensions + .insert(ExtensionDependency::Local(ext_name.to_string())); } } } @@ -528,123 +515,6 @@ impl BuildCommand { Ok(extensions) } - /// Recursively find nested external extension dependencies - fn find_nested_external_extensions( - &self, - config: &Config, - ext_dep: &ExtensionDependency, - required_extensions: &mut HashSet, - visited: &mut HashSet, - ) -> Result<()> { - let (ext_name, ext_config_path) = match ext_dep { - ExtensionDependency::External { name, config_path } => (name, config_path), - ExtensionDependency::Local(_) => return Ok(()), // Local extensions don't have nested external deps - ExtensionDependency::Versioned { .. } => return Ok(()), // Versioned extensions don't have nested deps - ExtensionDependency::Remote { .. } => return Ok(()), // Remote extensions are handled separately - }; - - // Cycle detection: check if we've already processed this extension - let ext_key = format!("{ext_name}:{ext_config_path}"); - if visited.contains(&ext_key) { - if self.verbose { - print_info( - &format!("Skipping already processed extension '{ext_name}' to avoid cycles"), - OutputLevel::Normal, - ); - } - return Ok(()); - } - visited.insert(ext_key); - - // Load the external extension configuration - let resolved_external_config_path = - config.resolve_path_relative_to_src_dir(&self.config_path, ext_config_path); - let external_extensions = - config.load_external_extensions(&self.config_path, ext_config_path)?; - - let extension_config = external_extensions.get(ext_name).ok_or_else(|| { - anyhow::anyhow!( - "Extension '{ext_name}' not found in external config file '{ext_config_path}'" - ) - })?; - - // Load the nested config file to get its src_dir setting - let nested_config_content = std::fs::read_to_string(&resolved_external_config_path) - .with_context(|| { - format!( - "Failed to read nested config file: {}", - resolved_external_config_path.display() - ) - })?; - let nested_config: serde_yaml::Value = serde_yaml::from_str(&nested_config_content) - .with_context(|| { - format!( - "Failed to parse nested config file: {}", - resolved_external_config_path.display() - ) - })?; - - // Create a temporary Config object for the nested config to handle its src_dir - let nested_config_obj = serde_yaml::from_value::(nested_config.clone())?; - - // Check if this external extension has dependencies - if let Some(dependencies) = extension_config - .get("dependencies") - .and_then(|d| d.as_mapping()) - { - for (_dep_name, dep_spec) in dependencies { - // Check for nested extension dependency - if let Some(nested_ext_name) = dep_spec.get("ext").and_then(|v| v.as_str()) { - // Check if this is a nested external extension (has config field) - if let Some(nested_external_config) = - dep_spec.get("config").and_then(|v| v.as_str()) - { - // Resolve the nested config path relative to the nested config's src_dir - let nested_config_path = nested_config_obj - .resolve_path_relative_to_src_dir( - &resolved_external_config_path, - nested_external_config, - ); - - let nested_ext_dep = ExtensionDependency::External { - name: nested_ext_name.to_string(), - config_path: nested_config_path.to_string_lossy().to_string(), - }; - - // Add the nested extension to required extensions - required_extensions.insert(nested_ext_dep.clone()); - - if self.verbose { - print_info( - &format!("Found nested external extension '{nested_ext_name}' required by '{ext_name}' at '{}'", nested_config_path.display()), - OutputLevel::Normal, - ); - } - - // Recursively process the nested extension - self.find_nested_external_extensions( - config, - &nested_ext_dep, - required_extensions, - visited, - )?; - } else { - // This is a local extension dependency within the external config - // We don't need to process it further as it will be handled during build - if self.verbose { - print_info( - &format!("Found local extension dependency '{nested_ext_name}' in external extension '{ext_name}'"), - OutputLevel::Normal, - ); - } - } - } - } - } - - Ok(()) - } - /// Build an external extension using its own config file async fn build_external_extension( &self, diff --git a/src/commands/ext/build.rs b/src/commands/ext/build.rs index d317efe..f859339 100644 --- a/src/commands/ext/build.rs +++ b/src/commands/ext/build.rs @@ -105,7 +105,7 @@ impl ExtBuildCommand { // Validate stamps before proceeding (unless --no-stamps) if !self.no_stamps { let container_helper = - SdkContainer::from_config(&self.config_path, &config)?.verbose(self.verbose); + SdkContainer::from_config(&self.config_path, config)?.verbose(self.verbose); // Resolve required stamps for extension build let required = resolve_required_stamps( @@ -230,7 +230,7 @@ impl ExtBuildCommand { // Handle compile dependencies with install scripts before building the extension // Pass the ext_config_path so SDK compile sections are loaded from the correct config - self.handle_compile_dependencies(&config, &ext_config, &target, &ext_config_path) + self.handle_compile_dependencies(config, &ext_config, &target, &ext_config_path) .await?; // Get extension types from the types array (defaults to ["sysext", "confext"]) @@ -394,10 +394,10 @@ impl ExtBuildCommand { .ok_or_else(|| anyhow::anyhow!("No SDK container image specified in configuration."))?; // Resolve target with proper precedence - let target_arch = resolve_target_required(self.target.as_deref(), &config)?; + let target_arch = resolve_target_required(self.target.as_deref(), config)?; // Initialize SDK container helper - let container_helper = SdkContainer::from_config(&self.config_path, &config)?; + let container_helper = SdkContainer::from_config(&self.config_path, config)?; // Determine the extension source path for overlay resolution // For remote extensions, files are in $AVOCADO_PREFIX/includes// @@ -517,7 +517,7 @@ impl ExtBuildCommand { }; let container_helper = - SdkContainer::from_config(&self.config_path, &config)?.verbose(self.verbose); + SdkContainer::from_config(&self.config_path, config)?.verbose(self.verbose); container_helper.run_in_container(run_config).await?; if self.verbose { diff --git a/src/commands/ext/dnf.rs b/src/commands/ext/dnf.rs index 446babf..ff0b7ae 100644 --- a/src/commands/ext/dnf.rs +++ b/src/commands/ext/dnf.rs @@ -56,15 +56,15 @@ impl ExtDnfCommand { let parsed = &composed.merged_value; let target = self.resolve_target_architecture(config)?; - let extension_location = self.find_extension_in_dependency_tree(&config, &target)?; - let container_image = self.get_container_image(&config)?; + let extension_location = self.find_extension_in_dependency_tree(config, &target)?; + let container_image = self.get_container_image(config)?; // Get repo_url and repo_release from config let repo_url = config.get_sdk_repo_url(); let repo_release = config.get_sdk_repo_release(); self.execute_dnf_command( - &parsed, + parsed, &container_image, &target, repo_url.as_ref(), diff --git a/src/commands/ext/image.rs b/src/commands/ext/image.rs index 4ee5876..231ee02 100644 --- a/src/commands/ext/image.rs +++ b/src/commands/ext/image.rs @@ -81,7 +81,7 @@ impl ExtImageCommand { // Get repo_url and repo_release from config let repo_url = config.get_sdk_repo_url(); let repo_release = config.get_sdk_repo_release(); - let target = resolve_target_required(self.target.as_deref(), &config)?; + let target = resolve_target_required(self.target.as_deref(), config)?; // Get SDK configuration from interpolated config (needed for stamp validation) let container_image = config @@ -91,7 +91,7 @@ impl ExtImageCommand { // Validate stamps before proceeding (unless --no-stamps) if !self.no_stamps { let container_helper = - SdkContainer::from_config(&self.config_path, &config)?.verbose(self.verbose); + SdkContainer::from_config(&self.config_path, config)?.verbose(self.verbose); // Resolve required stamps for extension image let required = resolve_required_stamps( @@ -142,9 +142,7 @@ impl ExtImageCommand { // and may not find templated extension names like "avocado-bsp-{{ avocado.target }}" let extension_location = { // First check if extension exists in the composed config's ext section - let ext_in_composed = parsed - .get("ext") - .and_then(|e| e.get(&self.extension)); + let ext_in_composed = parsed.get("ext").and_then(|e| e.get(&self.extension)); if let Some(ext_config) = ext_in_composed { // Check if it has a source: field (indicating remote extension) @@ -173,7 +171,10 @@ impl ExtImageCommand { config .find_extension_in_dependency_tree(&self.config_path, &self.extension, &target)? .ok_or_else(|| { - anyhow::anyhow!("Extension '{}' not found in configuration.", self.extension) + anyhow::anyhow!( + "Extension '{}' not found in configuration.", + self.extension + ) })? } }; @@ -228,13 +229,10 @@ impl ExtImageCommand { if self.verbose { if let Some(all_ext) = parsed.get("ext") { if let Some(ext_map) = all_ext.as_mapping() { - let ext_names: Vec<_> = ext_map - .keys() - .filter_map(|k| k.as_str()) - .collect(); + let ext_names: Vec<_> = + ext_map.keys().filter_map(|k| k.as_str()).collect(); eprintln!( - "[DEBUG] Available extensions in composed config: {:?}", - ext_names + "[DEBUG] Available extensions in composed config: {ext_names:?}" ); } } @@ -328,7 +326,7 @@ impl ExtImageCommand { .and_then(|runtime_config| runtime_config.get("target")) .and_then(|target| target.as_str()) .map(|s| s.to_string()); - let target_arch = resolve_target_required(self.target.as_deref(), &config)?; + let target_arch = resolve_target_required(self.target.as_deref(), config)?; // Initialize SDK container helper let container_helper = SdkContainer::new(); @@ -370,7 +368,7 @@ impl ExtImageCommand { // Write extension image stamp (unless --no-stamps) if !self.no_stamps { - let inputs = compute_ext_input_hash(&parsed, &self.extension)?; + let inputs = compute_ext_input_hash(parsed, &self.extension)?; let outputs = StampOutputs::default(); let stamp = Stamp::ext_image(&self.extension, &target, inputs, outputs); let stamp_script = generate_write_stamp_script(&stamp)?; @@ -391,7 +389,7 @@ impl ExtImageCommand { }; let container_helper = - SdkContainer::from_config(&self.config_path, &config)?.verbose(self.verbose); + SdkContainer::from_config(&self.config_path, config)?.verbose(self.verbose); container_helper.run_in_container(run_config).await?; if self.verbose { diff --git a/src/commands/ext/package.rs b/src/commands/ext/package.rs index d86a64f..b01269b 100644 --- a/src/commands/ext/package.rs +++ b/src/commands/ext/package.rs @@ -173,12 +173,10 @@ impl ExtPackageCommand { // Also get the raw (unmerged) extension config to find all target-specific overlays // For remote extensions, use the parsed config; for local, read from file let raw_ext_config = match &extension_location { - ExtensionLocation::Remote { .. } => { - parsed - .get("ext") - .and_then(|ext| ext.get(&self.extension)) - .cloned() - } + ExtensionLocation::Remote { .. } => parsed + .get("ext") + .and_then(|ext| ext.get(&self.extension)) + .cloned(), _ => self.get_raw_extension_config(&ext_config_path)?, }; @@ -208,7 +206,7 @@ impl ExtPackageCommand { let output_path = self .create_rpm_package_in_container( &rpm_metadata, - &config, + config, &target, &ext_config_path, &package_files, diff --git a/src/commands/runtime/build.rs b/src/commands/runtime/build.rs index 44fe435..fed254a 100644 --- a/src/commands/runtime/build.rs +++ b/src/commands/runtime/build.rs @@ -103,11 +103,11 @@ impl RuntimeBuildCommand { .map(|s| s.to_string()); // Resolve target architecture - let target_arch = resolve_target_required(self.target.as_deref(), &config)?; + let target_arch = resolve_target_required(self.target.as_deref(), config)?; // Initialize SDK container helper let container_helper = - SdkContainer::from_config(&self.config_path, &config)?.verbose(self.verbose); + SdkContainer::from_config(&self.config_path, config)?.verbose(self.verbose); // Create shared RunsOnContext if running on remote host let mut runs_on_context: Option = if let Some(ref runs_on) = self.runs_on { @@ -123,8 +123,8 @@ impl RuntimeBuildCommand { // Execute the build and ensure cleanup let result = self .execute_build_internal( - &config, - &parsed, + config, + parsed, container_image, &target_arch, &merged_container_args, @@ -379,38 +379,25 @@ impl RuntimeBuildCommand { ) })?; - let binding = serde_yaml::Mapping::new(); - let runtime_deps = merged_runtime - .get("dependencies") - .and_then(|v| v.as_mapping()) - .unwrap_or(&binding); - - // Extract extension names and any type overrides from runtime dependencies + // Extract extension names from the `extensions` array let mut required_extensions = HashSet::new(); - let mut extension_type_overrides: HashMap> = HashMap::new(); - - // First, collect direct runtime dependencies - for (_dep_name, dep_spec) in runtime_deps { - if let Some(ext_name) = dep_spec.get("ext").and_then(|v| v.as_str()) { - required_extensions.insert(ext_name.to_string()); - - // Check if the runtime dependency specifies custom types - if let Some(types) = dep_spec.get("types").and_then(|v| v.as_sequence()) { - let type_strings: Vec = types - .iter() - .filter_map(|v| v.as_str()) - .map(|s| s.to_string()) - .collect(); - if !type_strings.is_empty() { - extension_type_overrides.insert(ext_name.to_string(), type_strings); - } + let _extension_type_overrides: HashMap> = HashMap::new(); + + // Collect extensions from the new `extensions` array format + if let Some(extensions) = merged_runtime + .get("extensions") + .and_then(|e| e.as_sequence()) + { + for ext in extensions { + if let Some(ext_name) = ext.as_str() { + required_extensions.insert(ext_name.to_string()); } } } // Recursively discover all extension dependencies (including nested external extensions) let all_required_extensions = - self.find_all_extension_dependencies(&config, &required_extensions, target_arch)?; + self.find_all_extension_dependencies(config, &required_extensions, target_arch)?; // Build a map from extension name to versioned name from resolved_extensions // Format of resolved_extensions items: "ext_name-version" (e.g., "my-ext-1.0.0") @@ -648,11 +635,11 @@ avocado-build-$TARGET_ARCH $RUNTIME_NAME /// Recursively collect all dependencies for a single extension fn collect_extension_dependencies( &self, - config: &crate::utils::config::Config, + _config: &crate::utils::config::Config, ext_name: &str, all_extensions: &mut HashSet, visited: &mut HashSet, - target_arch: &str, + _target_arch: &str, ) -> Result<()> { // Avoid infinite loops if visited.contains(ext_name) { @@ -667,119 +654,26 @@ avocado-build-$TARGET_ARCH $RUNTIME_NAME let content = std::fs::read_to_string(&self.config_path)?; let parsed: serde_yaml::Value = serde_yaml::from_str(&content)?; - // Check if this is a local extension + // Check if this is a local extension defined in the ext section + // Extension source configuration (repo, git, path) is now in the ext section if let Some(ext_config) = parsed .get("ext") .and_then(|e| e.as_mapping()) .and_then(|table| table.get(ext_name)) { - // This is a local extension - check its dependencies - if let Some(dependencies) = ext_config.get("dependencies").and_then(|d| d.as_mapping()) - { - for (_dep_name, dep_spec) in dependencies { - if let Some(nested_ext_name) = dep_spec.get("ext").and_then(|v| v.as_str()) { - // Check if this is an external extension dependency - if let Some(external_config_path) = - dep_spec.get("config").and_then(|v| v.as_str()) - { - // This is an external extension - load its config and process recursively - let external_extensions = config.load_external_extensions( - &self.config_path, - external_config_path, - )?; - - // Add the external extension itself - self.collect_extension_dependencies( - config, - nested_ext_name, - all_extensions, - visited, - target_arch, - )?; - - // Process its dependencies from the external config - if let Some(ext_config) = external_extensions.get(nested_ext_name) { - if let Some(nested_deps) = - ext_config.get("dependencies").and_then(|d| d.as_mapping()) - { - for (_nested_dep_name, nested_dep_spec) in nested_deps { - if let Some(nested_nested_ext_name) = - nested_dep_spec.get("ext").and_then(|v| v.as_str()) - { - self.collect_extension_dependencies( - config, - nested_nested_ext_name, - all_extensions, - visited, - target_arch, - )?; - } - } - } - } - } else { - // This is a local extension dependency - self.collect_extension_dependencies( - config, - nested_ext_name, - all_extensions, - visited, - target_arch, - )?; - } - } - } - } - } else { - // This might be an external extension - we need to find it in the runtime dependencies - // to get its config path, then process its dependencies - let merged_runtime = config - .get_merged_runtime_config(&self.runtime_name, target_arch, &self.config_path)? - .with_context(|| { - format!( - "Runtime '{}' not found or has no configuration for target '{}'", - self.runtime_name, target_arch - ) - })?; - - if let Some(runtime_deps) = merged_runtime - .get("dependencies") - .and_then(|v| v.as_mapping()) + // This is a local extension - check if it has an extensions array for nested deps + if let Some(nested_extensions) = + ext_config.get("extensions").and_then(|e| e.as_sequence()) { - for (_dep_name, dep_spec) in runtime_deps { - if let Some(dep_ext_name) = dep_spec.get("ext").and_then(|v| v.as_str()) { - if dep_ext_name == ext_name { - if let Some(external_config_path) = - dep_spec.get("config").and_then(|v| v.as_str()) - { - // Found the external extension - process its dependencies - let external_extensions = config.load_external_extensions( - &self.config_path, - external_config_path, - )?; - - if let Some(ext_config) = external_extensions.get(ext_name) { - if let Some(nested_deps) = - ext_config.get("dependencies").and_then(|d| d.as_mapping()) - { - for (_nested_dep_name, nested_dep_spec) in nested_deps { - if let Some(nested_ext_name) = - nested_dep_spec.get("ext").and_then(|v| v.as_str()) - { - self.collect_extension_dependencies( - config, - nested_ext_name, - all_extensions, - visited, - target_arch, - )?; - } - } - } - } - } - break; - } + for nested_ext in nested_extensions { + if let Some(nested_ext_name) = nested_ext.as_str() { + self.collect_extension_dependencies( + _config, + nested_ext_name, + all_extensions, + visited, + _target_arch, + )?; } } } @@ -807,29 +701,29 @@ avocado-build-$TARGET_ARCH $RUNTIME_NAME let merged_runtime = config.get_merged_runtime_config(runtime_name, target_arch, config_path)?; - let runtime_dep_table = merged_runtime + let mut extensions = Vec::new(); + + // Read extensions from the new `extensions` array format + let ext_list = merged_runtime .as_ref() - .and_then(|value| value.get("dependencies").and_then(|d| d.as_mapping())) + .and_then(|value| value.get("extensions").and_then(|e| e.as_sequence())) .or_else(|| { parsed .get("runtime") .and_then(|r| r.get(runtime_name)) - .and_then(|runtime_value| runtime_value.get("dependencies")) - .and_then(|d| d.as_mapping()) + .and_then(|runtime_value| runtime_value.get("extensions")) + .and_then(|e| e.as_sequence()) }); - let mut extensions = Vec::new(); - - if let Some(deps) = runtime_dep_table { - for dep_spec in deps.values() { - if let Some(ext_name) = dep_spec.get("ext").and_then(|v| v.as_str()) { + if let Some(ext_seq) = ext_list { + for ext in ext_seq { + if let Some(ext_name) = ext.as_str() { let version = self .resolve_extension_version( parsed, config, config_path, ext_name, - dep_spec, container_image, target_arch, container_args.clone(), @@ -846,51 +740,22 @@ avocado-build-$TARGET_ARCH $RUNTIME_NAME Ok(extensions) } - /// Resolve the version for an extension dependency. + /// Resolve the version for an extension. /// /// Priority order: - /// 1. Explicit `vsn` field in the dependency spec (unless it's "*") - /// 2. Version from external config file (if `config` field is specified) - /// 3. Version from local `[ext]` section - /// 4. Query RPM database for installed version + /// 1. Version from local `[ext]` section + /// 2. Query RPM database for installed version (for repo-sourced extensions) #[allow(clippy::too_many_arguments)] async fn resolve_extension_version( &self, parsed: &serde_yaml::Value, - config: &crate::utils::config::Config, - config_path: &str, + _config: &crate::utils::config::Config, + _config_path: &str, ext_name: &str, - dep_spec: &serde_yaml::Value, container_image: &str, target_arch: &str, container_args: Option>, ) -> Result { - // If version is explicitly specified with vsn field, use it (unless it's a wildcard) - if let Some(version) = dep_spec.get("vsn").and_then(|v| v.as_str()) { - if version != "*" { - return Ok(version.to_string()); - } - // If vsn is "*", fall through to query RPM for the actual installed version - } - - // If external config is specified, try to get version from it - if let Some(external_config_path) = dep_spec.get("config").and_then(|v| v.as_str()) { - let external_extensions = - config.load_external_extensions(config_path, external_config_path)?; - if let Some(ext_config) = external_extensions.get(ext_name) { - if let Some(version) = ext_config.get("version").and_then(|v| v.as_str()) { - if version != "*" { - return Ok(version.to_string()); - } - // If version is "*", fall through to query RPM - } - } - // External config but no version found or version is "*" - query RPM database - return self - .query_rpm_version(ext_name, container_image, target_arch, container_args) - .await; - } - // Try to get version from local [ext] section if let Some(version) = parsed .get("ext") @@ -1068,9 +933,8 @@ sdk: runtime: test-runtime: target: "x86_64" - dependencies: - test-dep: - ext: test-ext + extensions: + - test-ext ext: test-ext: @@ -1102,7 +966,7 @@ ext: } #[test] - fn test_create_build_script_with_type_overrides() { + fn test_create_build_script_with_extension_types() { let temp_dir = TempDir::new().unwrap(); let config_content = r#" sdk: @@ -1111,11 +975,8 @@ sdk: runtime: test-runtime: target: "x86_64" - dependencies: - test-dep: - ext: test-ext - types: - - sysext + extensions: + - test-ext ext: test-ext: @@ -1150,7 +1011,7 @@ ext: } #[test] - fn test_create_build_script_no_type_override_uses_extension_defaults() { + fn test_create_build_script_uses_extension_defaults() { let temp_dir = TempDir::new().unwrap(); let config_content = r#" sdk: @@ -1159,9 +1020,8 @@ sdk: runtime: test-runtime: target: "x86_64" - dependencies: - test-dep: - ext: test-ext + extensions: + - test-ext ext: test-ext: diff --git a/src/commands/runtime/deps.rs b/src/commands/runtime/deps.rs index 8ac2295..9b465a3 100644 --- a/src/commands/runtime/deps.rs +++ b/src/commands/runtime/deps.rs @@ -65,18 +65,25 @@ impl RuntimeDepsCommand { .get(runtime_name) .with_context(|| format!("Runtime '{runtime_name}' not found"))?; - let runtime_deps = runtime_spec - .get("dependencies") - .and_then(|v| v.as_mapping()); - let mut dependencies = Vec::new(); - if let Some(deps_table) = runtime_deps { + // New way: Read extensions from the `extensions` array + if let Some(extensions) = runtime_spec.get("extensions").and_then(|e| e.as_sequence()) { + for ext in extensions { + if let Some(ext_name) = ext.as_str() { + dependencies.push(self.resolve_extension_dependency(parsed, ext_name)); + } + } + } + + // Read package dependencies from the `dependencies` section + if let Some(deps_table) = runtime_spec + .get("dependencies") + .and_then(|v| v.as_mapping()) + { for (dep_name_val, dep_spec) in deps_table { if let Some(dep_name) = dep_name_val.as_str() { - if let Some(dependency) = self.resolve_dependency(parsed, dep_name, dep_spec) { - dependencies.push(dependency); - } + dependencies.push(self.resolve_package_dependency(dep_name, dep_spec)); } } } @@ -85,32 +92,6 @@ impl RuntimeDepsCommand { Ok(dependencies) } - fn resolve_dependency( - &self, - parsed: &serde_yaml::Value, - dep_name: &str, - dep_spec: &serde_yaml::Value, - ) -> Option<(String, String, String)> { - // Try to resolve as extension reference first - if let Some(ext_name) = dep_spec.get("ext").and_then(|v| v.as_str()) { - // Check if this is a versioned extension (has vsn field) - if let Some(version) = dep_spec.get("vsn").and_then(|v| v.as_str()) { - return Some(("ext".to_string(), ext_name.to_string(), version.to_string())); - } - // Check if this is an external extension (has config field) - else if dep_spec.get("config").is_some() { - // For external extensions, we don't have a local version, so use "*" - return Some(("ext".to_string(), ext_name.to_string(), "*".to_string())); - } else { - // Local extension - resolve from local config - return Some(self.resolve_extension_dependency(parsed, ext_name)); - } - } - - // Otherwise treat as package dependency - Some(self.resolve_package_dependency(dep_name, dep_spec)) - } - fn resolve_extension_dependency( &self, parsed: &serde_yaml::Value, @@ -132,12 +113,18 @@ impl RuntimeDepsCommand { dep_name: &str, dep_spec: &serde_yaml::Value, ) -> (String, String, String) { - let version = dep_spec - .get("version") - .and_then(|v| v.as_str()) - .unwrap_or("*"); + // Version can be a string directly or in a mapping with 'version' key + let version = if let Some(v) = dep_spec.as_str() { + v.to_string() + } else { + dep_spec + .get("version") + .and_then(|v| v.as_str()) + .unwrap_or("*") + .to_string() + }; - ("pkg".to_string(), dep_name.to_string(), version.to_string()) + ("pkg".to_string(), dep_name.to_string(), version) } fn sort_dependencies(&self, dependencies: &mut [(String, String, String)]) { @@ -169,11 +156,10 @@ sdk: runtime: test-runtime: target: "x86_64" + extensions: + - my-extension dependencies: - gcc: - version: "11.0" - app-ext: - ext: my-extension + gcc: "11.0" ext: my-extension: diff --git a/src/commands/runtime/dnf.rs b/src/commands/runtime/dnf.rs index dce981d..d0c38c0 100644 --- a/src/commands/runtime/dnf.rs +++ b/src/commands/runtime/dnf.rs @@ -61,7 +61,7 @@ impl RuntimeDnfCommand { let repo_release = config.get_sdk_repo_release(); self.execute_dnf_command( - &parsed, + parsed, &container_image, &target, repo_url.as_ref(), diff --git a/src/commands/runtime/install.rs b/src/commands/runtime/install.rs index 51eed64..3daddb4 100644 --- a/src/commands/runtime/install.rs +++ b/src/commands/runtime/install.rs @@ -159,8 +159,8 @@ impl RuntimeInstallCommand { // Execute installation and ensure cleanup let result = self .execute_install_internal( - &parsed, - &config, + parsed, + config, &runtimes_to_install, &container_helper, container_image, @@ -397,7 +397,9 @@ impl RuntimeInstallCommand { let sysroot = SysrootType::Runtime(runtime.to_string()); if let Some(serde_yaml::Value::Mapping(deps_map)) = dependencies { - // Build list of packages to install (excluding extension references) + // Build list of packages to install + // Note: Extensions are now listed in the separate `extensions` array, + // so dependencies should only contain package references. let mut packages = Vec::new(); let mut package_names = Vec::new(); for (package_name_val, version_spec) in deps_map { @@ -407,32 +409,6 @@ impl RuntimeInstallCommand { None => continue, // Skip if package name is not a string }; - // Skip extension dependencies (identified by 'ext' key) - // Note: Extension dependencies are handled by the main install command, - // not by individual runtime install - if let serde_yaml::Value::Mapping(spec_map) = version_spec { - if spec_map.contains_key(serde_yaml::Value::String("ext".to_string())) { - if self.verbose { - let dep_type = if spec_map - .contains_key(serde_yaml::Value::String("vsn".to_string())) - { - "versioned extension" - } else if spec_map - .contains_key(serde_yaml::Value::String("config".to_string())) - { - "external extension" - } else { - "local extension" - }; - print_debug( - &format!("Skipping {dep_type} dependency '{package_name}' (handled by main install command)"), - OutputLevel::Normal, - ); - } - continue; - } - } - let config_version = if let Some(version) = version_spec.as_str() { version.to_string() } else if let serde_yaml::Value::Mapping(spec_map) = version_spec { diff --git a/src/commands/runtime/provision.rs b/src/commands/runtime/provision.rs index 4f9844f..37ea126 100644 --- a/src/commands/runtime/provision.rs +++ b/src/commands/runtime/provision.rs @@ -90,7 +90,7 @@ impl RuntimeProvisionCommand { .map(|s| s.to_string()); // Resolve target architecture - let target_arch = resolve_target_required(self.config.target.as_deref(), &config)?; + let target_arch = resolve_target_required(self.config.target.as_deref(), config)?; // Detect remote host architecture if using --runs-on // This is needed to check if the SDK is installed for the remote's architecture @@ -113,7 +113,7 @@ impl RuntimeProvisionCommand { // Validate stamps before proceeding (unless --no-stamps) if !self.config.no_stamps { - let container_helper = SdkContainer::from_config(&self.config.config_path, &config)? + let container_helper = SdkContainer::from_config(&self.config.config_path, config)? .verbose(self.config.verbose); // Provision requires runtime build stamp @@ -180,8 +180,8 @@ impl RuntimeProvisionCommand { // For package repository extensions, we query the RPM database to get actual installed versions let resolved_extensions = self .collect_runtime_extensions( - &parsed, - &config, + parsed, + config, &self.config.runtime_name, target_arch.as_str(), &self.config.config_path, @@ -303,7 +303,7 @@ impl RuntimeProvisionCommand { }; // Check if runtime has signing configured - let signing_config = self.setup_signing_service(&config).await?; + let signing_config = self.setup_signing_service(config).await?; // Initialize SDK container helper let container_helper = SdkContainer::new(); @@ -381,7 +381,7 @@ impl RuntimeProvisionCommand { // Write provision stamp (unless --no-stamps) if !self.config.no_stamps { - let container_helper = SdkContainer::from_config(&self.config.config_path, &config)? + let container_helper = SdkContainer::from_config(&self.config.config_path, config)? .verbose(self.config.verbose); let inputs = StampInputs::new("provision".to_string()); diff --git a/src/commands/sdk/compile.rs b/src/commands/sdk/compile.rs index 8ec8720..ef3c11f 100644 --- a/src/commands/sdk/compile.rs +++ b/src/commands/sdk/compile.rs @@ -90,9 +90,9 @@ impl SdkCompileCommand { let container_image = config .get_sdk_image() .context("No SDK container image specified in configuration")?; - let target = resolve_target_required(self.target.as_deref(), &config)?; + let target = resolve_target_required(self.target.as_deref(), config)?; let container_helper = - SdkContainer::from_config(&self.config_path, &config)?.verbose(self.verbose); + SdkContainer::from_config(&self.config_path, config)?.verbose(self.verbose); let requirements = vec![StampRequirement::sdk_install()]; @@ -154,7 +154,7 @@ impl SdkCompileCommand { let merged_container_args = config.merge_sdk_container_args(self.container_args.as_ref()); // Get compile sections from config - let compile_sections = self.get_compile_sections_from_config(&config); + let compile_sections = self.get_compile_sections_from_config(config); if compile_sections.is_empty() { // If specific sections were requested but none found, this is an error @@ -221,7 +221,7 @@ impl SdkCompileCommand { let repo_release = config.get_sdk_repo_release(); // Resolve target with proper precedence - let target = resolve_target_required(self.target.as_deref(), &config)?; + let target = resolve_target_required(self.target.as_deref(), config)?; let mut overall_success = true; @@ -235,7 +235,7 @@ impl SdkCompileCommand { ); let container_helper = - SdkContainer::from_config(&self.config_path, &config)?.verbose(self.verbose); + SdkContainer::from_config(&self.config_path, config)?.verbose(self.verbose); let compile_command = format!( r#"if [ -f '{}' ]; then echo 'Running compile script: {}'; AVOCADO_SDK_PREFIX=$AVOCADO_SDK_PREFIX bash '{}'; else echo 'Compile script {} not found.' && ls -la; exit 1; fi"#, diff --git a/src/utils/config.rs b/src/utils/config.rs index eb03922..228986b 100644 --- a/src/utils/config.rs +++ b/src/utils/config.rs @@ -108,6 +108,7 @@ mod container_args_deserializer { /// Represents the location of an extension #[derive(Debug, Clone, PartialEq, Eq, Hash)] +#[allow(dead_code)] pub enum ExtensionLocation { /// Extension defined in the main config file Local { name: String, config_path: String }, @@ -116,7 +117,6 @@ pub enum ExtensionLocation { #[deprecated(since = "0.23.0", note = "Use Local with source: path instead")] External { name: String, config_path: String }, /// Remote extension fetched from a source (repo, git, or path) - #[allow(dead_code)] Remote { name: String, source: ExtensionSource, @@ -217,92 +217,11 @@ impl ExtensionSource { false } - /// Parse ExtensionSource from a YAML value - pub fn from_yaml(value: &serde_yaml::Value) -> Option { - let map = value.as_mapping()?; - - // Check for "repo" source type - if let Some(repo_val) = map.get(&serde_yaml::Value::String("repo".to_string())) { - let version = repo_val.as_str().unwrap_or("*").to_string(); - let package = map - .get(&serde_yaml::Value::String("package".to_string())) - .and_then(|v| v.as_str()) - .map(|s| s.to_string()); - let repo_name = map - .get(&serde_yaml::Value::String("repo_name".to_string())) - .and_then(|v| v.as_str()) - .map(|s| s.to_string()); - let include = map - .get(&serde_yaml::Value::String("include".to_string())) - .and_then(|v| v.as_sequence()) - .map(|seq| { - seq.iter() - .filter_map(|v| v.as_str()) - .map(|s| s.to_string()) - .collect() - }); - return Some(ExtensionSource::Repo { - version, - package, - repo_name, - include, - }); - } - - // Check for "git" source type - if let Some(git_val) = map.get(&serde_yaml::Value::String("git".to_string())) { - let url = git_val.as_str()?.to_string(); - let git_ref = map - .get(&serde_yaml::Value::String("ref".to_string())) - .and_then(|v| v.as_str()) - .map(|s| s.to_string()); - let sparse_checkout = map - .get(&serde_yaml::Value::String("sparse_checkout".to_string())) - .and_then(|v| v.as_sequence()) - .map(|seq| { - seq.iter() - .filter_map(|v| v.as_str()) - .map(|s| s.to_string()) - .collect() - }); - let include = map - .get(&serde_yaml::Value::String("include".to_string())) - .and_then(|v| v.as_sequence()) - .map(|seq| { - seq.iter() - .filter_map(|v| v.as_str()) - .map(|s| s.to_string()) - .collect() - }); - return Some(ExtensionSource::Git { - url, - git_ref, - sparse_checkout, - include, - }); - } - - // Check for "path" source type - if let Some(path_val) = map.get(&serde_yaml::Value::String("path".to_string())) { - let path = path_val.as_str()?.to_string(); - let include = map - .get(&serde_yaml::Value::String("include".to_string())) - .and_then(|v| v.as_sequence()) - .map(|seq| { - seq.iter() - .filter_map(|v| v.as_str()) - .map(|s| s.to_string()) - .collect() - }); - return Some(ExtensionSource::Path { path, include }); - } - - None - } } /// Represents an extension dependency for a runtime with type information #[derive(Debug, Clone, PartialEq, Eq, Hash)] +#[allow(dead_code)] pub enum RuntimeExtDep { /// Extension defined in the config (local or fetched remote) Local(String), @@ -318,6 +237,7 @@ pub enum RuntimeExtDep { impl RuntimeExtDep { /// Get the extension name + #[allow(deprecated)] pub fn name(&self) -> &str { match self { RuntimeExtDep::Local(name) => name, @@ -327,64 +247,6 @@ impl RuntimeExtDep { } } -/// Result of parsing an extension reference from a dependency spec -#[derive(Debug, Clone)] -pub enum ExtRefParsed { - /// Extension reference found - Extension { - /// Extension name - name: String, - /// Optional external config path - config: Option, - /// Optional version (for versioned/deprecated syntax) - version: Option, - }, - /// Not an extension reference (e.g., package dependency) - NotExtension, -} - -/// Parse an extension reference from a dependency specification. -/// -/// Handles both shorthand and object forms: -/// - `key: ext` → Extension { name: key, config: None, version: None } -/// - `key: { ext: name }` → Extension { name, config: None, version: None } -/// - `key: { ext: name, config: path }` → Extension { name, config: Some(path), version: None } -/// - `key: { ext: name, vsn: ver }` → Extension { name, config: None, version: Some(ver) } (deprecated) -/// - `key: "version"` → NotExtension (package dependency) -pub fn parse_ext_ref(dep_name: &str, dep_spec: &serde_yaml::Value) -> ExtRefParsed { - // Shorthand: "my-ext: ext" means { ext: my-ext } - if let Some(value_str) = dep_spec.as_str() { - if value_str == "ext" { - return ExtRefParsed::Extension { - name: dep_name.to_string(), - config: None, - version: None, - }; - } - // Otherwise it's a package dependency with version string - return ExtRefParsed::NotExtension; - } - - // Object form: { ext: name, ... } - if let Some(ext_name) = dep_spec.get("ext").and_then(|v| v.as_str()) { - let config = dep_spec - .get("config") - .and_then(|v| v.as_str()) - .map(|s| s.to_string()); - let version = dep_spec - .get("vsn") - .and_then(|v| v.as_str()) - .map(|s| s.to_string()); - return ExtRefParsed::Extension { - name: ext_name.to_string(), - config, - version, - }; - } - - ExtRefParsed::NotExtension -} - /// A composed configuration that merges the main config with external extension configs. /// /// This struct provides a unified view where: @@ -904,20 +766,8 @@ impl Config { .flatten(); // Check for verbose/debug mode via environment variable - let verbose = std::env::var("AVOCADO_DEBUG").is_ok() - || std::env::var("AVOCADO_VERBOSE").is_ok(); - - // Always output merge info to stderr for debugging (TODO: remove after fixing) - eprintln!( - "[DEBUG-MERGE] merge_installed_remote_extensions: found {} remote extensions: {:?}", - remote_extensions.len(), - remote_extensions.iter().map(|(n, _)| n).collect::>() - ); - eprintln!( - "[DEBUG-MERGE] resolved_target: {}, volume_state: {:?}", - resolved_target, - volume_state.as_ref().map(|v| &v.volume_name) - ); + let verbose = + std::env::var("AVOCADO_DEBUG").is_ok() || std::env::var("AVOCADO_VERBOSE").is_ok(); if verbose { eprintln!( @@ -937,27 +787,15 @@ impl Config { let ext_content = { // Method 1: Check if we're inside a container and can read directly // The standard container path is /opt/_avocado//includes//avocado.yaml - let container_direct_path = format!( - "/opt/_avocado/{}/includes/{}/avocado.yaml", - resolved_target, ext_name - ); + let container_direct_path = + format!("/opt/_avocado/{resolved_target}/includes/{ext_name}/avocado.yaml"); let container_path = Path::new(&container_direct_path); - // Always output for debugging (TODO: remove after fixing) - eprintln!( - "[DEBUG-MERGE] Checking for '{}' config at: {} (exists: {})", - ext_name, container_direct_path, container_path.exists() - ); - if verbose { eprintln!( - "[DEBUG] Checking for remote extension '{}' config at: {}", - ext_name, container_direct_path - ); - eprintln!( - "[DEBUG] Path exists: {}", - container_path.exists() + "[DEBUG] Checking for remote extension '{ext_name}' config at: {container_direct_path}" ); + eprintln!("[DEBUG] Path exists: {}", container_path.exists()); } if container_path.exists() { @@ -974,7 +812,7 @@ impl Config { } Err(e) => { if verbose { - eprintln!("[DEBUG] Failed to read: {}", e); + eprintln!("[DEBUG] Failed to read: {e}"); } continue; } @@ -991,16 +829,13 @@ impl Config { { Ok(content) => { if verbose { - eprintln!( - "[DEBUG] Read {} bytes via container", - content.len() - ); + eprintln!("[DEBUG] Read {} bytes via container", content.len()); } content } Err(e) => { if verbose { - eprintln!("[DEBUG] Container read failed: {}", e); + eprintln!("[DEBUG] Container read failed: {e}"); } // Extension not installed yet or config not found, skip continue; @@ -1027,7 +862,7 @@ impl Config { } } else { if verbose { - eprintln!("[DEBUG] No config found for '{}', skipping", ext_name); + eprintln!("[DEBUG] No config found for '{ext_name}', skipping"); } continue; } @@ -1039,19 +874,16 @@ impl Config { let ext_config = match Self::parse_config_value(&ext_config_path, &ext_content) { Ok(cfg) => { if verbose { - eprintln!("[DEBUG] Successfully parsed config for '{}'", ext_name); + eprintln!("[DEBUG] Successfully parsed config for '{ext_name}'"); // Show what extensions are defined in this remote config if let Some(ext_section) = cfg.get("ext").and_then(|e| e.as_mapping()) { - let ext_names: Vec<_> = ext_section - .keys() - .filter_map(|k| k.as_str()) - .collect(); - eprintln!( - "[DEBUG] Remote config defines extensions: {:?}", - ext_names - ); + let ext_names: Vec<_> = + ext_section.keys().filter_map(|k| k.as_str()).collect(); + eprintln!("[DEBUG] Remote config defines extensions: {ext_names:?}"); // Show the extension section that matches our name - if let Some(our_ext) = ext_section.get(serde_yaml::Value::String(ext_name.clone())) { + if let Some(our_ext) = + ext_section.get(serde_yaml::Value::String(ext_name.clone())) + { eprintln!( "[DEBUG] Extension '{}' in remote config:\n{}", ext_name, @@ -1059,8 +891,7 @@ impl Config { ); } else { eprintln!( - "[DEBUG] Extension '{}' NOT found in remote config's ext section", - ext_name + "[DEBUG] Extension '{ext_name}' NOT found in remote config's ext section" ); } } else { @@ -1071,7 +902,7 @@ impl Config { } Err(e) => { if verbose { - eprintln!("[DEBUG] Failed to parse config for '{}': {}", ext_name, e); + eprintln!("[DEBUG] Failed to parse config for '{ext_name}': {e}"); } // Failed to parse config, skip this extension continue; @@ -1100,16 +931,9 @@ impl Config { let auto_include_compile = Self::find_compile_dependencies_in_ext(&ext_config, &ext_name); - // Always output for debugging (TODO: remove after fixing) - eprintln!( - "[DEBUG-MERGE] Merging '{}' into main config", - ext_name - ); - if verbose { eprintln!( - "[DEBUG] Merging '{}' with include_patterns: {:?}, auto_include_compile: {:?}", - ext_name, include_patterns, auto_include_compile + "[DEBUG] Merging '{ext_name}' with include_patterns: {include_patterns:?}, auto_include_compile: {auto_include_compile:?}" ); } @@ -1122,15 +946,6 @@ impl Config { &auto_include_compile, ); - // Always output for debugging (TODO: remove after fixing) - if let Some(main_ext) = main_config.get("ext").and_then(|e| e.get(&ext_name)) { - eprintln!( - "[DEBUG-MERGE] After merge, ext.{}:\n{}", - ext_name, - serde_yaml::to_string(main_ext).unwrap_or_default() - ); - } - if verbose { // Show what the main config's ext section looks like after merge if let Some(main_ext) = main_config.get("ext").and_then(|e| e.get(&ext_name)) { @@ -1607,8 +1422,7 @@ impl Config { ]; for suffix in &target_suffixes { - if ext_name.ends_with(suffix) { - let base = &ext_name[..ext_name.len() - suffix.len()]; + if let Some(base) = ext_name.strip_suffix(suffix) { if !base.is_empty() && !names.contains(&base.to_string()) { names.push(base.to_string()); } @@ -1841,10 +1655,20 @@ impl Config { /// Get detailed extension dependencies for a runtime (with type information) /// - /// Returns a list of extension dependencies with their type: - /// - Local: extension defined in the main config file (needs install + build) - /// - External: extension from an external config file (needs install + build) - /// - Versioned: prebuilt extension from package repo (needs install only) + /// Returns a list of extension dependencies from the `extensions` array. + /// All extensions are returned as Local type - extension source configuration + /// (repo, git, path) is defined in the ext section, not in the runtime. + /// + /// New format (extensions array): + /// ```yaml + /// runtime: + /// dev: + /// extensions: + /// - avocado-ext-dev + /// - avocado-ext-sshd-dev + /// dependencies: + /// avocado-runtime: '0.1.0' + /// ``` pub fn get_runtime_extension_dependencies_detailed( &self, runtime_name: &str, @@ -1857,40 +1681,16 @@ impl Config { return Ok(vec![]); }; - let Some(dependencies) = runtime_config - .get("dependencies") - .and_then(|d| d.as_mapping()) - else { - return Ok(vec![]); - }; - let mut ext_deps = Vec::new(); - for (dep_name, dep_spec) in dependencies { - let dep_name_str = dep_name.as_str().unwrap_or(""); - - match parse_ext_ref(dep_name_str, dep_spec) { - ExtRefParsed::Extension { - name, - config, - version, - } => { - if let Some(ver) = version { - // Versioned extension (deprecated syntax) - ext_deps.push(RuntimeExtDep::Versioned { name, version: ver }); - } else if let Some(cfg_path) = config { - // External extension with config path - ext_deps.push(RuntimeExtDep::External { - name, - config_path: cfg_path, - }); - } else { - // Local extension - ext_deps.push(RuntimeExtDep::Local(name)); - } - } - ExtRefParsed::NotExtension => { - // Package dependency, skip + // New way: Read from the `extensions` array + if let Some(extensions) = runtime_config + .get("extensions") + .and_then(|e| e.as_sequence()) + { + for ext in extensions { + if let Some(ext_name) = ext.as_str() { + ext_deps.push(RuntimeExtDep::Local(ext_name.to_string())); } } } @@ -2707,36 +2507,41 @@ impl Config { extension_name: &str, target: &str, ) -> Result> { + use crate::utils::interpolation::interpolate_name; + let content = std::fs::read_to_string(config_path)?; let parsed = Self::parse_config_value(config_path, &content)?; // First check if it's defined in the ext section + // Need to iterate and interpolate keys since they may contain templates like {{ avocado.target }} if let Some(ext_section) = parsed.get("ext") { if let Some(ext_map) = ext_section.as_mapping() { - let ext_key = serde_yaml::Value::String(extension_name.to_string()); - if let Some(ext_config) = ext_map.get(&ext_key) { - // Check if this is a remote extension (has source: field) - if let Some(source) = Self::parse_extension_source(extension_name, ext_config)? - { - return Ok(Some(ExtensionLocation::Remote { - name: extension_name.to_string(), - source, - })); + for (ext_key, ext_config) in ext_map { + if let Some(raw_name) = ext_key.as_str() { + // Interpolate the extension name with the target + let interpolated_name = interpolate_name(raw_name, target); + if interpolated_name == extension_name { + // Check if this is a remote extension (has source: field) + if let Some(source) = + Self::parse_extension_source(extension_name, ext_config)? + { + return Ok(Some(ExtensionLocation::Remote { + name: extension_name.to_string(), + source, + })); + } + // Otherwise it's a local extension + return Ok(Some(ExtensionLocation::Local { + name: extension_name.to_string(), + config_path: config_path.to_string(), + })); + } } - // Otherwise it's a local extension - return Ok(Some(ExtensionLocation::Local { - name: extension_name.to_string(), - config_path: config_path.to_string(), - })); } } } - // If not local, search through the full dependency tree - let mut all_extensions = std::collections::HashSet::new(); - let mut visited = std::collections::HashSet::new(); - - // Get all extensions from runtime dependencies (this will recursively traverse) + // If not found in ext section, search through runtime extensions array let runtime_section = parsed.get("runtime").and_then(|r| r.as_mapping()); if let Some(runtime_section) = runtime_section { @@ -2746,46 +2551,48 @@ impl Config { let merged_runtime = self.get_merged_runtime_config(runtime_name, target, config_path)?; if let Some(merged_value) = merged_runtime { - if let Some(dependencies) = merged_value - .get("dependencies") - .and_then(|d| d.as_mapping()) + // Check the new `extensions` array format + if let Some(extensions) = + merged_value.get("extensions").and_then(|e| e.as_sequence()) { - for (_dep_name, dep_spec) in dependencies { - // Check for extension dependency - if let Some(ext_name) = dep_spec.get("ext").and_then(|v| v.as_str()) - { - // Check if this is an external extension (has config field) - if let Some(external_config) = - dep_spec.get("config").and_then(|v| v.as_str()) - { - let ext_location = ExtensionLocation::External { - name: ext_name.to_string(), - config_path: external_config.to_string(), - }; - all_extensions.insert(ext_location.clone()); - - // Recursively find nested external extension dependencies - self.find_all_nested_extensions_for_lookup( - config_path, - &ext_location, - &mut all_extensions, - &mut visited, - )?; - } else { - // Local extension - all_extensions.insert(ExtensionLocation::Local { - name: ext_name.to_string(), - config_path: config_path.to_string(), - }); - - // Also check local extension dependencies - self.find_local_extension_dependencies_for_lookup( - config_path, - &parsed, - ext_name, - &mut all_extensions, - &mut visited, - )?; + for ext in extensions { + if let Some(ext_name) = ext.as_str() { + if ext_name == extension_name { + // Found in extensions array - now find its definition in ext section + if let Some(ext_section) = parsed.get("ext") { + if let Some(ext_map) = ext_section.as_mapping() { + for (ext_key, ext_config) in ext_map { + if let Some(raw_name) = ext_key.as_str() { + let interpolated = + interpolate_name(raw_name, target); + if interpolated == extension_name { + if let Some(source) = + Self::parse_extension_source( + extension_name, + ext_config, + )? + { + return Ok(Some( + ExtensionLocation::Remote { + name: extension_name + .to_string(), + source, + }, + )); + } + return Ok(Some( + ExtensionLocation::Local { + name: extension_name + .to_string(), + config_path: config_path + .to_string(), + }, + )); + } + } + } + } + } } } } @@ -2795,217 +2602,9 @@ impl Config { } } - // Now search for the target extension in all collected extensions - for ext_location in all_extensions { - let found_name = match &ext_location { - ExtensionLocation::Local { name, .. } => name, - ExtensionLocation::External { name, .. } => name, - ExtensionLocation::Remote { name, .. } => name, - }; - - if found_name == extension_name { - return Ok(Some(ext_location)); - } - } - Ok(None) } - /// Recursively find all nested extensions for lookup - fn find_all_nested_extensions_for_lookup( - &self, - base_config_path: &str, - ext_location: &ExtensionLocation, - all_extensions: &mut std::collections::HashSet, - visited: &mut std::collections::HashSet, - ) -> Result<()> { - let (ext_name, ext_config_path) = match ext_location { - ExtensionLocation::External { name, config_path } => (name, config_path), - ExtensionLocation::Local { name, config_path } => { - // For local extensions, we need to check their dependencies too - let content = std::fs::read_to_string(config_path)?; - let parsed = Self::parse_config_value(config_path, &content)?; - return self.find_local_extension_dependencies_for_lookup( - config_path, - &parsed, - name, - all_extensions, - visited, - ); - } - ExtensionLocation::Remote { .. } => { - // Remote extensions don't have nested dependencies to discover here - // Their configs are merged separately after fetching - return Ok(()); - } - }; - - // Cycle detection: check if we've already processed this extension - let ext_key = format!("{ext_name}:{ext_config_path}"); - if visited.contains(&ext_key) { - return Ok(()); - } - visited.insert(ext_key); - - // Load the external extension configuration - let resolved_external_config_path = - self.resolve_path_relative_to_src_dir(base_config_path, ext_config_path); - let external_extensions = - self.load_external_extensions(base_config_path, ext_config_path)?; - - let extension_config = external_extensions.get(ext_name).ok_or_else(|| { - anyhow::anyhow!( - "Extension '{ext_name}' not found in external config file '{ext_config_path}'" - ) - })?; - - // Load the nested config file to get its src_dir setting - let nested_config_content = std::fs::read_to_string(&resolved_external_config_path) - .with_context(|| { - format!( - "Failed to read nested config file: {}", - resolved_external_config_path.display() - ) - })?; - let nested_config = Self::parse_config_value( - resolved_external_config_path - .to_str() - .unwrap_or(ext_config_path), - &nested_config_content, - ) - .with_context(|| { - format!( - "Failed to parse nested config file: {}", - resolved_external_config_path.display() - ) - })?; - - // Create a temporary Config object for the nested config to handle its src_dir - let nested_config_obj = serde_yaml::from_value::(nested_config.clone())?; - - // Check if this external extension has dependencies - if let Some(dependencies) = extension_config - .get("dependencies") - .and_then(|d| d.as_mapping()) - { - for (_dep_name, dep_spec) in dependencies { - // Check for nested extension dependency - if let Some(nested_ext_name) = dep_spec.get("ext").and_then(|v| v.as_str()) { - // Check if this is a nested external extension (has config field) - if let Some(nested_external_config) = - dep_spec.get("config").and_then(|v| v.as_str()) - { - // Resolve the nested config path relative to the nested config's src_dir - let nested_config_path = nested_config_obj - .resolve_path_relative_to_src_dir( - &resolved_external_config_path, - nested_external_config, - ); - - let nested_ext_location = ExtensionLocation::External { - name: nested_ext_name.to_string(), - config_path: nested_config_path.to_string_lossy().to_string(), - }; - - // Add the nested extension to all extensions - all_extensions.insert(nested_ext_location.clone()); - - // Recursively process the nested extension - self.find_all_nested_extensions_for_lookup( - base_config_path, - &nested_ext_location, - all_extensions, - visited, - )?; - } else { - // This is a local extension dependency within the external config - all_extensions.insert(ExtensionLocation::Local { - name: nested_ext_name.to_string(), - config_path: resolved_external_config_path - .to_string_lossy() - .to_string(), - }); - - // Check dependencies of this local extension in the external config - self.find_local_extension_dependencies_for_lookup( - &resolved_external_config_path.to_string_lossy(), - &nested_config, - nested_ext_name, - all_extensions, - visited, - )?; - } - } - } - } - - Ok(()) - } - - /// Find dependencies of local extensions for lookup - fn find_local_extension_dependencies_for_lookup( - &self, - config_path: &str, - parsed_config: &serde_yaml::Value, - ext_name: &str, - all_extensions: &mut std::collections::HashSet, - visited: &mut std::collections::HashSet, - ) -> Result<()> { - // Cycle detection for local extensions - let ext_key = format!("local:{ext_name}:{config_path}"); - if visited.contains(&ext_key) { - return Ok(()); - } - visited.insert(ext_key); - - // Get the local extension configuration - if let Some(ext_config) = parsed_config.get("ext").and_then(|ext| ext.get(ext_name)) { - // Check if this local extension has dependencies - if let Some(dependencies) = ext_config.get("dependencies").and_then(|d| d.as_mapping()) - { - for (_dep_name, dep_spec) in dependencies { - // Check for extension dependency - if let Some(nested_ext_name) = dep_spec.get("ext").and_then(|v| v.as_str()) { - // Check if this is an external extension (has config field) - if let Some(external_config) = - dep_spec.get("config").and_then(|v| v.as_str()) - { - let ext_location = ExtensionLocation::External { - name: nested_ext_name.to_string(), - config_path: external_config.to_string(), - }; - all_extensions.insert(ext_location.clone()); - - // Recursively find nested external extension dependencies - self.find_all_nested_extensions_for_lookup( - config_path, - &ext_location, - all_extensions, - visited, - )?; - } else { - // Local extension dependency - all_extensions.insert(ExtensionLocation::Local { - name: nested_ext_name.to_string(), - config_path: config_path.to_string(), - }); - - // Recursively check this local extension's dependencies - self.find_local_extension_dependencies_for_lookup( - config_path, - parsed_config, - nested_ext_name, - all_extensions, - visited, - )?; - } - } - } - } - } - - Ok(()) - } /// Expand environment variables in a string pub fn expand_env_vars(input: &str) -> String { diff --git a/src/utils/container.rs b/src/utils/container.rs index 33045ff..5aa2be4 100644 --- a/src/utils/container.rs +++ b/src/utils/container.rs @@ -2100,8 +2100,7 @@ mod tests { let arch_part = platform.strip_prefix("linux/").unwrap(); assert!( valid_archs.contains(&arch_part) || !arch_part.is_empty(), - "Unexpected platform: {}", - platform + "Unexpected platform: {platform}" ); } diff --git a/src/utils/stamps.rs b/src/utils/stamps.rs index b60167e..8041f80 100644 --- a/src/utils/stamps.rs +++ b/src/utils/stamps.rs @@ -1014,30 +1014,13 @@ pub fn resolve_required_stamps_for_runtime_build_with_arch( let mut reqs = vec![sdk_install, StampRequirement::runtime_install(runtime_name)]; + // All extensions now require install + build + image stamps + // Extension source configuration (repo, git, path) is defined in the ext section for ext_dep in ext_dependencies { let ext_name = ext_dep.name(); - - match ext_dep { - // Local extensions: require install + build + image stamps - RuntimeExtDep::Local(_) => { - reqs.push(StampRequirement::ext_install(ext_name)); - reqs.push(StampRequirement::ext_build(ext_name)); - reqs.push(StampRequirement::ext_image(ext_name)); - } - // External extensions: require install + build + image stamps - RuntimeExtDep::External { .. } => { - reqs.push(StampRequirement::ext_install(ext_name)); - reqs.push(StampRequirement::ext_build(ext_name)); - reqs.push(StampRequirement::ext_image(ext_name)); - } - // DEPRECATED: Versioned extensions with vsn: syntax - // This case should not be reached as vsn: syntax now errors early. - // Remote extensions are now handled through the ext section with source: field, - // and are treated as local extensions after being fetched. - RuntimeExtDep::Versioned { .. } => { - // Should not be reached - vsn: syntax errors during config parsing - } - } + reqs.push(StampRequirement::ext_install(ext_name)); + reqs.push(StampRequirement::ext_build(ext_name)); + reqs.push(StampRequirement::ext_image(ext_name)); } reqs @@ -1429,23 +1412,15 @@ mod tests { } #[test] - fn test_resolve_required_stamps_for_runtime_build_with_mixed_extensions() { + fn test_resolve_required_stamps_for_runtime_build_with_multiple_extensions() { use crate::utils::config::RuntimeExtDep; - // Test with mixed extension types: - // - local-ext: needs install + build + image stamps - // - external-ext: needs install + build + image stamps - // - versioned-ext: NO stamps (prebuilt package from repo) + // Test with multiple extensions: + // All extensions are now Local type - source config (repo, git, path) is in ext section let ext_deps = vec![ - RuntimeExtDep::Local("local-ext".to_string()), - RuntimeExtDep::External { - name: "external-ext".to_string(), - config_path: "../external/avocado.yaml".to_string(), - }, - RuntimeExtDep::Versioned { - name: "versioned-ext".to_string(), - version: "1.0.0".to_string(), - }, + RuntimeExtDep::Local("app".to_string()), + RuntimeExtDep::Local("config-dev".to_string()), + RuntimeExtDep::Local("avocado-ext-dev".to_string()), ]; let reqs = resolve_required_stamps_for_runtime_build("my-runtime", &ext_deps); @@ -1453,108 +1428,35 @@ mod tests { // Should have: // - SDK install (1) // - Runtime install (1) - // - local-ext install + build + image (3) - // - external-ext install + build + image (3) - // - versioned-ext: NOTHING (prebuilt package from repo) - // Total: 8 - assert_eq!(reqs.len(), 8); + // - app install + build + image (3) + // - config-dev install + build + image (3) + // - avocado-ext-dev install + build + image (3) + // Total: 11 + assert_eq!(reqs.len(), 11); // Verify SDK and runtime install are present assert!(reqs.contains(&StampRequirement::sdk_install())); assert!(reqs.contains(&StampRequirement::runtime_install("my-runtime"))); - // Verify local extension has install, build, and image - assert!(reqs.contains(&StampRequirement::ext_install("local-ext"))); - assert!(reqs.contains(&StampRequirement::ext_build("local-ext"))); - assert!(reqs.contains(&StampRequirement::ext_image("local-ext"))); - - // Verify external extension has install, build, and image - assert!(reqs.contains(&StampRequirement::ext_install("external-ext"))); - assert!(reqs.contains(&StampRequirement::ext_build("external-ext"))); - assert!(reqs.contains(&StampRequirement::ext_image("external-ext"))); - - // Verify versioned extension has NO stamps at all - // (they're prebuilt packages installed via DNF during runtime install) - assert!(!reqs.contains(&StampRequirement::ext_install("versioned-ext"))); - assert!(!reqs.contains(&StampRequirement::ext_build("versioned-ext"))); - assert!(!reqs.contains(&StampRequirement::ext_image("versioned-ext"))); - } - - #[test] - fn test_resolve_required_stamps_runtime_build_only_versioned_extensions() { - use crate::utils::config::RuntimeExtDep; - - // Runtime with ONLY versioned extensions (common for prebuilt extensions from package repo) - // Example: avocado-ext-dev, avocado-ext-sshd-dev - // Versioned extensions are prebuilt packages - NO stamps required - let ext_deps = vec![ - RuntimeExtDep::Versioned { - name: "avocado-ext-dev".to_string(), - version: "0.1.0".to_string(), - }, - RuntimeExtDep::Versioned { - name: "avocado-ext-sshd-dev".to_string(), - version: "0.1.0".to_string(), - }, - ]; - - let reqs = resolve_required_stamps_for_runtime_build("dev", &ext_deps); - - // Should ONLY have SDK install + runtime install (2 total) - // Versioned extensions don't add any stamp requirements - assert_eq!(reqs.len(), 2); - assert!(reqs.contains(&StampRequirement::sdk_install())); - assert!(reqs.contains(&StampRequirement::runtime_install("dev"))); - - // Verify NO extension stamps are required for versioned extensions - assert!(!reqs.contains(&StampRequirement::ext_install("avocado-ext-dev"))); - assert!(!reqs.contains(&StampRequirement::ext_build("avocado-ext-dev"))); - assert!(!reqs.contains(&StampRequirement::ext_image("avocado-ext-dev"))); - assert!(!reqs.contains(&StampRequirement::ext_install("avocado-ext-sshd-dev"))); - assert!(!reqs.contains(&StampRequirement::ext_build("avocado-ext-sshd-dev"))); - assert!(!reqs.contains(&StampRequirement::ext_image("avocado-ext-sshd-dev"))); - } - - #[test] - fn test_resolve_required_stamps_runtime_build_only_external_extensions() { - use crate::utils::config::RuntimeExtDep; - - // Runtime with ONLY external extensions (from external config files) - let ext_deps = vec![ - RuntimeExtDep::External { - name: "avocado-ext-peridio".to_string(), - config_path: "avocado-ext-peridio/avocado.yml".to_string(), - }, - RuntimeExtDep::External { - name: "custom-ext".to_string(), - config_path: "../custom/avocado.yaml".to_string(), - }, - ]; - - let reqs = resolve_required_stamps_for_runtime_build("my-runtime", &ext_deps); + // Verify all extensions have install, build, and image + assert!(reqs.contains(&StampRequirement::ext_install("app"))); + assert!(reqs.contains(&StampRequirement::ext_build("app"))); + assert!(reqs.contains(&StampRequirement::ext_image("app"))); - // Should have: - // - SDK install (1) - // - Runtime install (1) - // - avocado-ext-peridio install + build + image (3) - // - custom-ext install + build + image (3) - // Total: 8 - assert_eq!(reqs.len(), 8); + assert!(reqs.contains(&StampRequirement::ext_install("config-dev"))); + assert!(reqs.contains(&StampRequirement::ext_build("config-dev"))); + assert!(reqs.contains(&StampRequirement::ext_image("config-dev"))); - // Verify external extensions require install, build, and image - assert!(reqs.contains(&StampRequirement::ext_install("avocado-ext-peridio"))); - assert!(reqs.contains(&StampRequirement::ext_build("avocado-ext-peridio"))); - assert!(reqs.contains(&StampRequirement::ext_image("avocado-ext-peridio"))); - assert!(reqs.contains(&StampRequirement::ext_install("custom-ext"))); - assert!(reqs.contains(&StampRequirement::ext_build("custom-ext"))); - assert!(reqs.contains(&StampRequirement::ext_image("custom-ext"))); + assert!(reqs.contains(&StampRequirement::ext_install("avocado-ext-dev"))); + assert!(reqs.contains(&StampRequirement::ext_build("avocado-ext-dev"))); + assert!(reqs.contains(&StampRequirement::ext_image("avocado-ext-dev"))); } #[test] - fn test_resolve_required_stamps_runtime_build_only_local_extensions() { + fn test_resolve_required_stamps_runtime_build_local_extensions() { use crate::utils::config::RuntimeExtDep; - // Runtime with ONLY local extensions (defined in main config) + // Runtime with extensions (all are now Local type) let ext_deps = vec![ RuntimeExtDep::Local("app".to_string()), RuntimeExtDep::Local("config-dev".to_string()), @@ -1633,20 +1535,25 @@ mod tests { fn test_runtime_ext_dep_name() { use crate::utils::config::RuntimeExtDep; + // Test the Local variant (the primary way to specify extensions) let local = RuntimeExtDep::Local("my-local-ext".to_string()); assert_eq!(local.name(), "my-local-ext"); - let external = RuntimeExtDep::External { - name: "my-external-ext".to_string(), - config_path: "path/to/config.yaml".to_string(), - }; - assert_eq!(external.name(), "my-external-ext"); + // Test deprecated variants for backward compatibility + #[allow(deprecated)] + { + let external = RuntimeExtDep::External { + name: "my-external-ext".to_string(), + config_path: "path/to/config.yaml".to_string(), + }; + assert_eq!(external.name(), "my-external-ext"); - let versioned = RuntimeExtDep::Versioned { - name: "my-versioned-ext".to_string(), - version: "1.2.3".to_string(), - }; - assert_eq!(versioned.name(), "my-versioned-ext"); + let versioned = RuntimeExtDep::Versioned { + name: "my-versioned-ext".to_string(), + version: "1.2.3".to_string(), + }; + assert_eq!(versioned.name(), "my-versioned-ext"); + } } #[test] From 56a932da399dadbd21bf7f65206a315094928983 Mon Sep 17 00:00:00 2001 From: Justin Schneck Date: Thu, 8 Jan 2026 18:12:33 -0500 Subject: [PATCH 15/23] pluralize keys MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This commit modernizes the Avocado configuration format with clearer, more consistent key names and removes deprecated TOML support. Config Key Changes: - Rename top-level keys: * runtime → runtimes * ext → extensions * provision → provision_profiles - Rename dependency keys throughout: * dependencies → packages (in all contexts) - Rename extension source types: * ExtensionSource::Repo → ExtensionSource::Package * source.type: repo → source.type: package - Update CLI flags: * --provision-profile → --profile Runtime Extensions: - Extensions now listed directly in runtimes.*.extensions as array - Removed deprecated nested dependency syntax (vsn:, config:) - Fixed find_required_extensions() to use new structure TOML Support Removal: - Remove toml crate dependency from Cargo.toml - Remove deprecated parsing functions: * load_from_toml_str() * toml_to_yaml() * migrate_toml_to_yaml() - Remove TOML fallback logic from parse_config_value() - Simplify Config::load() to YAML-only - Remove .toml file checks from is_extension_installed() Code Cleanup: - Remove unused find_nested_external_extensions() method - Simplify RuntimeExtDep enum (keep only Local variant) - Add #[allow(dead_code)] to ExtensionDependency deprecated variants - Update all config templates (7 files) to use new keys - Convert 60+ tests from TOML to YAML format - Update 7 integration tests to use YAML syntax --- Cargo.lock | 55 - Cargo.toml | 1 - configs/advantech/icam-540.yaml | 39 +- configs/default.yaml | 46 +- configs/nvidia/jetson-orin-nano-devkit.yaml | 31 +- .../raspberry-pi/raspberrypi-4-model-b.yaml | 43 +- configs/raspberry-pi/raspberrypi-5.yaml | 43 +- configs/seeed/reterminal-dm.yaml | 43 +- configs/seeed/reterminal.yaml | 43 +- src/commands/build.rs | 43 +- src/commands/ext/build.rs | 18 +- src/commands/ext/checkout.rs | 2 +- src/commands/ext/deps.rs | 18 +- src/commands/ext/image.rs | 12 +- src/commands/ext/install.rs | 11 +- src/commands/ext/list.rs | 2 +- src/commands/ext/package.rs | 14 +- src/commands/fetch.rs | 22 +- src/commands/hitl/server.rs | 2 +- src/commands/init.rs | 59 +- src/commands/install.rs | 196 +- src/commands/runtime/build.rs | 26 +- src/commands/runtime/clean.rs | 2 +- src/commands/runtime/deploy.rs | 2 +- src/commands/runtime/deps.rs | 17 +- src/commands/runtime/dnf.rs | 2 +- src/commands/runtime/install.rs | 14 +- src/commands/runtime/list.rs | 4 +- src/commands/runtime/provision.rs | 80 +- src/commands/runtime/sign.rs | 38 +- src/commands/sdk/compile.rs | 25 +- src/commands/sdk/deps.rs | 25 +- src/commands/sdk/run.rs | 2 +- src/commands/sign.rs | 2 +- src/commands/unlock.rs | 4 +- src/main.rs | 4 +- src/utils/config.rs | 1692 ++++++++--------- src/utils/ext_fetch.rs | 6 +- src/utils/interpolation/mod.rs | 20 +- src/utils/stamps.rs | 24 +- src/utils/target.rs | 12 +- tests/fixtures/configs/complex.yaml | 4 +- tests/fixtures/configs/external-config.yaml | 4 +- tests/fixtures/configs/minimal.yaml | 4 +- tests/fixtures/configs/nested-config.yaml | 2 +- .../configs/with-both-extensions.yaml | 4 +- tests/fixtures/configs/with-confext.yaml | 4 +- .../configs/with-external-extensions.yaml | 4 +- .../fixtures/configs/with-interpolation.yaml | 10 +- .../configs/with-nested-target-config.yaml | 4 +- .../fixtures/configs/with-overlay-merge.yaml | 4 +- .../fixtures/configs/with-overlay-opaque.yaml | 4 +- tests/fixtures/configs/with-overlay.yaml | 6 +- tests/fixtures/configs/with-signing-keys.yaml | 2 +- tests/fixtures/configs/with-sysext.yaml | 4 +- tests/fixtures/configs/with-users.yaml | 4 +- tests/interpolation.rs | 18 +- tests/target_precedence.rs | 89 +- 58 files changed, 1247 insertions(+), 1668 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index a3db91e..6d4c0c2 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -159,7 +159,6 @@ dependencies = [ "thiserror", "tokio", "tokio-test", - "toml", "uuid", "walkdir", ] @@ -1509,15 +1508,6 @@ dependencies = [ "serde_core", ] -[[package]] -name = "serde_spanned" -version = "1.0.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f8bbf91e5a4d6315eee45e704372590b30e260ee83af6639d64557f51b067776" -dependencies = [ - "serde_core", -] - [[package]] name = "serde_urlencoded" version = "0.7.1" @@ -1814,45 +1804,6 @@ dependencies = [ "tokio", ] -[[package]] -name = "toml" -version = "0.9.9+spec-1.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eb5238e643fc34a1d5d7e753e1532a91912d74b63b92b3ea51fde8d1b7bc79dd" -dependencies = [ - "indexmap", - "serde_core", - "serde_spanned", - "toml_datetime", - "toml_parser", - "toml_writer", - "winnow", -] - -[[package]] -name = "toml_datetime" -version = "0.7.4+spec-1.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fe3cea6b2aa3b910092f6abd4053ea464fab5f9c170ba5e9a6aead16ec4af2b6" -dependencies = [ - "serde_core", -] - -[[package]] -name = "toml_parser" -version = "1.0.5+spec-1.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4c03bee5ce3696f31250db0bbaff18bc43301ce0e8db2ed1f07cbb2acf89984c" -dependencies = [ - "winnow", -] - -[[package]] -name = "toml_writer" -version = "1.0.5+spec-1.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a9cd6190959dce0994aa8970cd32ab116d1851ead27e866039acaf2524ce44fa" - [[package]] name = "tower" version = "0.5.2" @@ -2367,12 +2318,6 @@ version = "0.53.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d6bbff5f0aada427a1e5a6da5f1f98158182f26556f345ac9e04d36d0ebed650" -[[package]] -name = "winnow" -version = "0.7.14" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5a5364e9d77fcdeeaa6062ced926ee3381faa2ee02d3eb83a5c27a8825540829" - [[package]] name = "wit-bindgen" version = "0.46.0" diff --git a/Cargo.toml b/Cargo.toml index 4c05524..4b479f8 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -19,7 +19,6 @@ path = "src/lib.rs" [dependencies] serde = { version = "1.0", features = ["derive"] } -toml = "0.9" serde_yaml = "0.9" anyhow = "1.0" clap = { version = "4.0", features = ["derive"] } diff --git a/configs/advantech/icam-540.yaml b/configs/advantech/icam-540.yaml index 02cbe0a..ec56752 100644 --- a/configs/advantech/icam-540.yaml +++ b/configs/advantech/icam-540.yaml @@ -5,32 +5,35 @@ src_dir: ../../ distro: channel: apollo-edge version: 0.1.0 -runtime: +runtimes: dev: - dependencies: + extensions: + - avocado-ext-dev + - avocado-bsp-{{ avocado.target }} + - config + - app + packages: avocado-img-bootfiles: '*' avocado-img-rootfs: '*' avocado-img-initramfs: '*' - avocado-ext-dev: - ext: avocado-ext-dev - vsn: "*" - avocado-bsp: - ext: avocado-bsp-{{ avocado.target }} - vsn: "*" - config: - ext: config - app: - ext: app icam-540: - dependencies: + packages: avocado-img-tegraflash: '*' -ext: +extensions: + avocado-ext-dev: + source: + type: package + version: '*' + avocado-bsp-{{ avocado.target }}: + source: + type: package + version: '*' app: types: - sysext - confext version: 0.1.0 - dependencies: + packages: i2c-tools: '*' pylon: '*' pylon-dev: '*' @@ -58,11 +61,11 @@ ext: password: '' sdk: image: docker.io/avocadolinux/sdk:{{ config.distro.channel }} - dependencies: + packages: avocado-sdk-toolchain: "{{ config.distro.version }}" nativesdk-avocado-ext-dev: "*" icam-540: - dependencies: + packages: nativesdk-util-linux-mount: '*' container_args: - --network=host @@ -71,7 +74,7 @@ sdk: - -v - /sys:/sys:ro - --privileged -provision: +provision_profiles: tegraflash: container_args: - -v diff --git a/configs/default.yaml b/configs/default.yaml index 581c398..8775bad 100644 --- a/configs/default.yaml +++ b/configs/default.yaml @@ -16,31 +16,39 @@ distro: ## Runtimes ## -runtime: +runtimes: dev: - dependencies: + extensions: + - avocado-ext-dev + - avocado-ext-sshd-dev + - avocado-bsp-{{ avocado.target }} + - config + - app + packages: avocado-img-bootfiles: "*" avocado-img-rootfs: "*" avocado-img-initramfs: "*" - avocado-ext-dev: - ext: avocado-ext-dev - vsn: "*" - avocado-ext-sshd-dev: - ext: avocado-ext-sshd-dev - vsn: "*" - avocado-bsp: - ext: avocado-bsp-{{ avocado.target }} - vsn: "*" - config: - ext: config - app: - ext: app ## ## Extensions ## -ext: +extensions: + avocado-ext-dev: + source: + type: package + version: "*" + + avocado-ext-sshd-dev: + source: + type: package + version: "*" + + avocado-bsp-{{ avocado.target }}: + source: + type: package + version: "*" + # Generated default app extension # Use or modify this to install dependencies and or include sdk compiled code app: @@ -50,7 +58,7 @@ ext: version: "0.1.0" # Install application dependencies - # dependencies: + # packages: #curl = "*" #iperf3 = "*" @@ -74,7 +82,7 @@ ext: sdk: image: "docker.io/avocadolinux/sdk:{{ config.distro.channel }}" - dependencies: + packages: avocado-sdk-toolchain: "{{ config.distro.version }}" nativesdk-avocado-ext-dev: "*" @@ -89,7 +97,7 @@ sdk: # When provisioning using usb or sd provisioning profiles, set extra sdk # container arguments to allow access to these devices -provision: +provision_profiles: usb: container_args: - -v diff --git a/configs/nvidia/jetson-orin-nano-devkit.yaml b/configs/nvidia/jetson-orin-nano-devkit.yaml index ed3b21d..012a3f1 100644 --- a/configs/nvidia/jetson-orin-nano-devkit.yaml +++ b/configs/nvidia/jetson-orin-nano-devkit.yaml @@ -4,29 +4,30 @@ supported_targets: distro: channel: apollo-edge version: 0.1.0 -runtime: +runtimes: dev: - dependencies: + extensions: + - avocado-bsp-{{ avocado.target }} + - config + - app + packages: avocado-img-bootfiles: '*' avocado-img-rootfs: '*' avocado-img-initramfs: '*' - avocado-bsp: - ext: avocado-bsp-{{ avocado.target }} - vsn: "*" - config: - ext: config - app: - ext: app jetson-orin-nano-devkit: - dependencies: + packages: avocado-img-tegraflash: '*' -ext: +extensions: + avocado-bsp-{{ avocado.target }}: + source: + type: package + version: '*' app: types: - sysext - confext version: 0.1.0 - dependencies: {} + packages: {} config: types: - confext @@ -36,7 +37,7 @@ ext: password: '' sdk: image: docker.io/avocadolinux/sdk:{{ config.distro.channel }} - dependencies: + packages: avocado-sdk-toolchain: "{{ config.distro.version }}" nativesdk-avocado-ext-dev: "*" jetson-orin-nano-devkit: @@ -47,9 +48,9 @@ sdk: - -v - /sys:/sys:ro - --privileged - dependencies: + packages: nativesdk-util-linux-mount: '*' -provision: +provision_profiles: tegraflash: container_args: - -v diff --git a/configs/raspberry-pi/raspberrypi-4-model-b.yaml b/configs/raspberry-pi/raspberrypi-4-model-b.yaml index ab434f7..19ca35b 100644 --- a/configs/raspberry-pi/raspberrypi-4-model-b.yaml +++ b/configs/raspberry-pi/raspberrypi-4-model-b.yaml @@ -4,32 +4,37 @@ supported_targets: distro: channel: apollo-edge version: 0.1.0 -runtime: +runtimes: dev: - dependencies: + extensions: + - avocado-ext-dev + - avocado-ext-sshd-dev + - avocado-bsp-{{ avocado.target }} + - config + - app + packages: avocado-img-bootfiles: '*' avocado-img-rootfs: '*' avocado-img-initramfs: '*' - avocado-ext-dev: - ext: avocado-ext-dev - vsn: '*' - avocado-ext-sshd-dev: - ext: avocado-ext-sshd-dev - vsn: '*' - avocado-bsp: - ext: avocado-bsp-{{ avocado.target }} - vsn: "*" - config: - ext: config - app: - ext: app -ext: +extensions: + avocado-ext-dev: + source: + type: package + version: '*' + avocado-ext-sshd-dev: + source: + type: package + version: '*' + avocado-bsp-{{ avocado.target }}: + source: + type: package + version: '*' app: types: - sysext - confext version: 0.1.0 - dependencies: {} + packages: {} config: types: - confext @@ -39,13 +44,13 @@ ext: password: '' sdk: image: docker.io/avocadolinux/sdk:{{ config.distro.channel }} - dependencies: + packages: avocado-sdk-toolchain: "{{ config.distro.version }}" nativesdk-avocado-ext-dev: "*" raspberrypi4: container_args: - --network=host -provision: +provision_profiles: img: container_args: - -v diff --git a/configs/raspberry-pi/raspberrypi-5.yaml b/configs/raspberry-pi/raspberrypi-5.yaml index bbc7e48..a66c175 100644 --- a/configs/raspberry-pi/raspberrypi-5.yaml +++ b/configs/raspberry-pi/raspberrypi-5.yaml @@ -4,32 +4,37 @@ supported_targets: distro: channel: apollo-edge version: 0.1.0 -runtime: +runtimes: dev: - dependencies: + extensions: + - avocado-ext-dev + - avocado-ext-sshd-dev + - avocado-bsp-{{ avocado.target }} + - config + - app + packages: avocado-img-bootfiles: '*' avocado-img-rootfs: '*' avocado-img-initramfs: '*' - avocado-ext-dev: - ext: avocado-ext-dev - vsn: '*' - avocado-ext-sshd-dev: - ext: avocado-ext-sshd-dev - vsn: '*' - avocado-bsp: - ext: avocado-bsp-{{ avocado.target }} - vsn: "*" - config: - ext: config - app: - ext: app -ext: +extensions: + avocado-ext-dev: + source: + type: package + version: '*' + avocado-ext-sshd-dev: + source: + type: package + version: '*' + avocado-bsp-{{ avocado.target }}: + source: + type: package + version: '*' app: types: - sysext - confext version: 0.1.0 - dependencies: {} + packages: {} config: types: - confext @@ -39,13 +44,13 @@ ext: password: '' sdk: image: docker.io/avocadolinux/sdk:{{ config.distro.channel }} - dependencies: + packages: avocado-sdk-toolchain: "{{ config.distro.version }}" nativesdk-avocado-ext-dev: "*" raspberrypi5: container_args: - --network=host -provision: +provision_profiles: img: container_args: - -v diff --git a/configs/seeed/reterminal-dm.yaml b/configs/seeed/reterminal-dm.yaml index 78fb998..26c1de1 100644 --- a/configs/seeed/reterminal-dm.yaml +++ b/configs/seeed/reterminal-dm.yaml @@ -4,32 +4,37 @@ supported_targets: distro: channel: apollo-edge version: 0.1.0 -runtime: +runtimes: dev: - dependencies: + extensions: + - avocado-ext-dev + - avocado-ext-sshd-dev + - avocado-bsp-{{ avocado.target }} + - config + - app + packages: avocado-img-bootfiles: '*' avocado-img-rootfs: '*' avocado-img-initramfs: '*' - avocado-ext-dev: - ext: avocado-ext-dev - vsn: '*' - avocado-ext-sshd-dev: - ext: avocado-ext-sshd-dev - vsn: '*' - avocado-bsp: - ext: avocado-bsp-{{ avocado.target }} - vsn: "*" - config: - ext: config - app: - ext: app -ext: +extensions: + avocado-ext-dev: + source: + type: package + version: '*' + avocado-ext-sshd-dev: + source: + type: package + version: '*' + avocado-bsp-{{ avocado.target }}: + source: + type: package + version: '*' app: types: - sysext - confext version: 0.1.0 - dependencies: {} + packages: {} config: types: - confext @@ -39,13 +44,13 @@ ext: password: '' sdk: image: docker.io/avocadolinux/sdk:{{ config.distro.channel }} - dependencies: + packages: avocado-sdk-toolchain: "{{ config.distro.version }}" nativesdk-avocado-ext-dev: "*" reterminal-dm: container_args: - --network=host -provision: +provision_profiles: usb: container_args: - -v diff --git a/configs/seeed/reterminal.yaml b/configs/seeed/reterminal.yaml index e0f374d..21166bc 100644 --- a/configs/seeed/reterminal.yaml +++ b/configs/seeed/reterminal.yaml @@ -4,32 +4,37 @@ supported_targets: distro: channel: apollo-edge version: 0.1.0 -runtime: +runtimes: dev: - dependencies: + extensions: + - avocado-ext-dev + - avocado-ext-sshd-dev + - avocado-bsp-{{ avocado.target }} + - config + - app + packages: avocado-img-bootfiles: '*' avocado-img-rootfs: '*' avocado-img-initramfs: '*' - avocado-ext-dev: - ext: avocado-ext-dev - vsn: '*' - avocado-ext-sshd-dev: - ext: avocado-ext-sshd-dev - vsn: '*' - avocado-bsp: - ext: avocado-bsp-{{ avocado.target }} - vsn: "*" - config: - ext: config - app: - ext: app -ext: +extensions: + avocado-ext-dev: + source: + type: package + version: '*' + avocado-ext-sshd-dev: + source: + type: package + version: '*' + avocado-bsp-{{ avocado.target }}: + source: + type: package + version: '*' app: types: - sysext - confext version: 0.1.0 - dependencies: {} + packages: {} config: types: - confext @@ -39,13 +44,13 @@ ext: password: '' sdk: image: docker.io/avocadolinux/sdk:{{ config.distro.channel }} - dependencies: + packages: avocado-sdk-toolchain: "{{ config.distro.version }}" nativesdk-avocado-ext-dev: "*" reterminal: container_args: - --network=host -provision: +provision_profiles: usb: container_args: - -v diff --git a/src/commands/build.rs b/src/commands/build.rs index 2b8bef1..17d9f86 100644 --- a/src/commands/build.rs +++ b/src/commands/build.rs @@ -370,7 +370,7 @@ impl BuildCommand { target: &str, ) -> Result> { let runtime_section = parsed - .get("runtime") + .get("runtimes") .and_then(|r| r.as_mapping()) .ok_or_else(|| anyhow::anyhow!("No runtime configuration found"))?; @@ -453,7 +453,7 @@ impl BuildCommand { // This is needed because ext section keys may contain templates like {{ avocado.target }} let mut ext_sources: std::collections::HashMap> = std::collections::HashMap::new(); - if let Some(ext_section) = parsed.get("ext").and_then(|e| e.as_mapping()) { + if let Some(ext_section) = parsed.get("extensions").and_then(|e| e.as_mapping()) { for (ext_key, ext_config) in ext_section { if let Some(raw_name) = ext_key.as_str() { // Interpolate the extension name with the target @@ -849,7 +849,9 @@ echo "Successfully created image for versioned extension '$EXT_NAME-$EXT_VERSION ); // Check if this is a local extension or needs to be found in external configs - let ext_config = parsed.get("ext").and_then(|ext| ext.get(extension_name)); + let ext_config = parsed + .get("extensions") + .and_then(|ext| ext.get(extension_name)); let extension_dep = if ext_config.is_some() { // Local extension @@ -998,7 +1000,7 @@ echo "Successfully created image for versioned extension '$EXT_NAME-$EXT_VERSION // Verify the runtime exists and is configured for this target let runtime_section = parsed - .get("runtime") + .get("runtimes") .and_then(|r| r.as_mapping()) .ok_or_else(|| anyhow::anyhow!("No runtime configuration found"))?; @@ -1210,13 +1212,10 @@ echo "Successfully created image for versioned extension '$EXT_NAME-$EXT_VERSION let mut visited = HashSet::new(); // Check runtime dependencies for extensions - if let Some(dependencies) = runtime_config - .get("dependencies") - .and_then(|d| d.as_mapping()) - { + if let Some(dependencies) = runtime_config.get("packages").and_then(|d| d.as_mapping()) { for (_dep_name, dep_spec) in dependencies { // Check for extension dependency - if let Some(ext_name) = dep_spec.get("ext").and_then(|v| v.as_str()) { + if let Some(ext_name) = dep_spec.get("extensions").and_then(|v| v.as_str()) { // Check if this is a versioned extension (has vsn field) if let Some(version) = dep_spec.get("vsn").and_then(|v| v.as_str()) { let ext_dep = ExtensionDependency::Versioned { @@ -1293,7 +1292,7 @@ echo "Successfully created image for versioned extension '$EXT_NAME-$EXT_VERSION // Get all extensions from runtime dependencies (this will recursively traverse) let runtime_section = parsed - .get("runtime") + .get("runtimes") .and_then(|r| r.as_mapping()) .ok_or_else(|| anyhow::anyhow!("No runtime configuration found"))?; @@ -1308,13 +1307,13 @@ echo "Successfully created image for versioned extension '$EXT_NAME-$EXT_VERSION let merged_runtime = config.get_merged_runtime_config(runtime_name, target, &self.config_path)?; if let Some(merged_value) = merged_runtime { - if let Some(dependencies) = merged_value - .get("dependencies") - .and_then(|d| d.as_mapping()) + if let Some(dependencies) = + merged_value.get("packages").and_then(|d| d.as_mapping()) { for (_dep_name, dep_spec) in dependencies { // Check for extension dependency - if let Some(ext_name) = dep_spec.get("ext").and_then(|v| v.as_str()) { + if let Some(ext_name) = dep_spec.get("extensions").and_then(|v| v.as_str()) + { // Check if this is a versioned extension (has vsn field) if let Some(version) = dep_spec.get("vsn").and_then(|v| v.as_str()) { let ext_dep = ExtensionDependency::Versioned { @@ -1447,12 +1446,12 @@ echo "Successfully created image for versioned extension '$EXT_NAME-$EXT_VERSION // Check if this external extension has dependencies if let Some(dependencies) = extension_config - .get("dependencies") + .get("packages") .and_then(|d| d.as_mapping()) { for (_dep_name, dep_spec) in dependencies { // Check for nested extension dependency - if let Some(nested_ext_name) = dep_spec.get("ext").and_then(|v| v.as_str()) { + if let Some(nested_ext_name) = dep_spec.get("extensions").and_then(|v| v.as_str()) { // Check if this is a nested external extension (has config field) if let Some(nested_external_config) = dep_spec.get("config").and_then(|v| v.as_str()) @@ -1552,13 +1551,17 @@ echo "Successfully created image for versioned extension '$EXT_NAME-$EXT_VERSION visited.insert(ext_key); // Get the local extension configuration - if let Some(ext_config) = parsed_config.get("ext").and_then(|ext| ext.get(ext_name)) { + if let Some(ext_config) = parsed_config + .get("extensions") + .and_then(|ext| ext.get(ext_name)) + { // Check if this local extension has dependencies - if let Some(dependencies) = ext_config.get("dependencies").and_then(|d| d.as_mapping()) - { + if let Some(dependencies) = ext_config.get("packages").and_then(|d| d.as_mapping()) { for (_dep_name, dep_spec) in dependencies { // Check for extension dependency - if let Some(nested_ext_name) = dep_spec.get("ext").and_then(|v| v.as_str()) { + if let Some(nested_ext_name) = + dep_spec.get("extensions").and_then(|v| v.as_str()) + { // Check if this is an external extension (has config field) if let Some(external_config) = dep_spec.get("config").and_then(|v| v.as_str()) diff --git a/src/commands/ext/build.rs b/src/commands/ext/build.rs index f859339..d2cd20d 100644 --- a/src/commands/ext/build.rs +++ b/src/commands/ext/build.rs @@ -199,7 +199,9 @@ impl ExtBuildCommand { ExtensionLocation::Remote { .. } => { // Use the already-merged config from `parsed` which contains remote extension configs // Then apply target-specific overrides manually - let ext_section = parsed.get("ext").and_then(|ext| ext.get(&self.extension)); + let ext_section = parsed + .get("extensions") + .and_then(|ext| ext.get(&self.extension)); if let Some(ext_val) = ext_section { let base_ext = ext_val.clone(); // Check for target-specific override within this extension @@ -1508,7 +1510,7 @@ echo "Set proper permissions on authentication files""#, sdk_config_path: &str, ) -> Result<()> { // Get dependencies from extension configuration - let dependencies = ext_config.get("dependencies").and_then(|v| v.as_mapping()); + let dependencies = ext_config.get("packages").and_then(|v| v.as_mapping()); let Some(deps_table) = dependencies else { return Ok(()); @@ -3407,11 +3409,11 @@ mod tests { fn test_handle_compile_dependencies_parsing() { // Test that the new compile dependency syntax is properly parsed let config_content = r#" -ext: +extensions: my-extension: types: - sysext - dependencies: + packages: my-app: compile: my-app install: ext-install.sh @@ -3429,19 +3431,19 @@ sdk: let parsed: serde_yaml::Value = serde_yaml::from_str(config_content).unwrap(); let ext_config = parsed - .get(serde_yaml::Value::String("ext".to_string())) + .get(serde_yaml::Value::String("extensions".to_string())) .unwrap() .get(serde_yaml::Value::String("my-extension".to_string())) .unwrap(); - let dependencies = ext_config - .get(serde_yaml::Value::String("dependencies".to_string())) + let packages = ext_config + .get(serde_yaml::Value::String("packages".to_string())) .unwrap() .as_mapping() .unwrap(); // Check that we can identify compile dependencies with install scripts let mut compile_install_deps = Vec::new(); - for (dep_name, dep_spec) in dependencies { + for (dep_name, dep_spec) in packages { if let serde_yaml::Value::Mapping(spec_map) = dep_spec { if let ( Some(serde_yaml::Value::String(compile_section)), diff --git a/src/commands/ext/checkout.rs b/src/commands/ext/checkout.rs index 8883d8a..4f79a68 100644 --- a/src/commands/ext/checkout.rs +++ b/src/commands/ext/checkout.rs @@ -300,7 +300,7 @@ impl ExtCheckoutCommand { // Get target from runtime configuration let target = parsed - .get("runtime") + .get("runtimes") .and_then(|runtime| runtime.as_mapping()) .and_then(|runtime_table| { if runtime_table.len() == 1 { diff --git a/src/commands/ext/deps.rs b/src/commands/ext/deps.rs index 793fe06..9398e35 100644 --- a/src/commands/ext/deps.rs +++ b/src/commands/ext/deps.rs @@ -68,7 +68,7 @@ impl ExtDepsCommand { } None => { // For listing all extensions, still use local extensions only - let ext_section = parsed.get("ext"); + let ext_section = parsed.get("extensions"); match ext_section { Some(ext) => { let ext_table = ext @@ -165,7 +165,7 @@ impl ExtDepsCommand { } // Try extension reference - if let Some(serde_yaml::Value::String(ext_name)) = spec_map.get("ext") { + if let Some(serde_yaml::Value::String(ext_name)) = spec_map.get("extensions") { // Check if this is a versioned extension (has vsn field) if let Some(serde_yaml::Value::String(version)) = spec_map.get("vsn") { return vec![("ext".to_string(), ext_name.clone(), version.clone())]; @@ -206,7 +206,7 @@ impl ExtDepsCommand { ext_name: &str, ) -> Vec<(String, String, String)> { let version = config - .get("ext") + .get("extensions") .and_then(|ext_section| ext_section.get(ext_name)) .and_then(|ext_config| ext_config.get("version")) .and_then(|v| v.as_str()) @@ -224,7 +224,7 @@ impl ExtDepsCommand { .get("sdk") .and_then(|sdk| sdk.get("compile")) .and_then(|compile| compile.get(compile_name)) - .and_then(|compile_config| compile_config.get("dependencies")) + .and_then(|compile_config| compile_config.get("packages")) .and_then(|deps| deps.as_mapping()); let Some(deps_table) = compile_deps else { @@ -264,9 +264,9 @@ impl ExtDepsCommand { extension: &str, ) -> Vec<(String, String, String)> { let dependencies = config - .get("ext") + .get("extensions") .and_then(|ext_section| ext_section.get(extension)) - .and_then(|ext_config| ext_config.get("dependencies")) + .and_then(|ext_config| ext_config.get("packages")) .and_then(|deps| deps.as_mapping()); let Some(deps_table) = dependencies else { @@ -314,11 +314,11 @@ mod tests { #[test] fn test_resolve_compile_dependency_with_install() { let config_content = r#" -ext: +extensions: my-extension: types: - sysext - dependencies: + packages: my-app: compile: my-app install: ext-install.sh @@ -373,7 +373,7 @@ sdk: #[test] fn test_resolve_regular_dependencies() { let config_content = r#" -ext: +extensions: test-ext: types: - sysext diff --git a/src/commands/ext/image.rs b/src/commands/ext/image.rs index 231ee02..88466ba 100644 --- a/src/commands/ext/image.rs +++ b/src/commands/ext/image.rs @@ -142,7 +142,9 @@ impl ExtImageCommand { // and may not find templated extension names like "avocado-bsp-{{ avocado.target }}" let extension_location = { // First check if extension exists in the composed config's ext section - let ext_in_composed = parsed.get("ext").and_then(|e| e.get(&self.extension)); + let ext_in_composed = parsed + .get("extensions") + .and_then(|e| e.get(&self.extension)); if let Some(ext_config) = ext_in_composed { // Check if it has a source: field (indicating remote extension) @@ -224,10 +226,12 @@ impl ExtImageCommand { ExtensionLocation::Remote { .. } => { // Use the already-merged config from `parsed` which contains remote extension configs // Then apply target-specific overrides manually - let ext_section = parsed.get("ext").and_then(|ext| ext.get(&self.extension)); + let ext_section = parsed + .get("extensions") + .and_then(|ext| ext.get(&self.extension)); if self.verbose { - if let Some(all_ext) = parsed.get("ext") { + if let Some(all_ext) = parsed.get("extensions") { if let Some(ext_map) = all_ext.as_mapping() { let ext_names: Vec<_> = ext_map.keys().filter_map(|k| k.as_str()).collect(); @@ -314,7 +318,7 @@ impl ExtImageCommand { // Use resolved target (from CLI/env) if available, otherwise fall back to config let _config_target = parsed - .get("runtime") + .get("runtimes") .and_then(|runtime| runtime.as_mapping()) .and_then(|runtime_table| { if runtime_table.len() == 1 { diff --git a/src/commands/ext/install.rs b/src/commands/ext/install.rs index 284fc02..f1febc6 100644 --- a/src/commands/ext/install.rs +++ b/src/commands/ext/install.rs @@ -139,7 +139,7 @@ impl ExtInstallCommand { } } else { // No extension specified - install all local extensions - match parsed.get("ext") { + match parsed.get("extensions") { Some(ext_section) => match ext_section.as_mapping() { Some(table) => table .keys() @@ -189,7 +189,7 @@ impl ExtInstallCommand { // Use resolved target (from CLI/env) if available, otherwise fall back to config let _config_target = parsed - .get("runtime") + .get("runtimes") .and_then(|runtime| runtime.as_mapping()) .and_then(|runtime_table| { if runtime_table.len() == 1 { @@ -438,7 +438,7 @@ impl ExtInstallCommand { ExtensionLocation::Remote { .. } | ExtensionLocation::Local { .. } => { // Use the already-merged config from `parsed` which contains remote extension configs parsed - .get("ext") + .get("extensions") .and_then(|ext| ext.get(extension)) .cloned() } @@ -450,7 +450,7 @@ impl ExtInstallCommand { }; // Install dependencies if they exist - let dependencies = ext_config.as_ref().and_then(|ec| ec.get("dependencies")); + let dependencies = ext_config.as_ref().and_then(|ec| ec.get("packages")); let sysroot = SysrootType::Extension(extension.to_string()); @@ -498,7 +498,8 @@ impl ExtInstallCommand { // Check for extension dependency // Format: { ext: "extension-name" } or { ext: "name", config: "path" } or { ext: "name", vsn: "version" } - if let Some(ext_name) = spec_map.get("ext").and_then(|v| v.as_str()) { + if let Some(ext_name) = spec_map.get("extensions").and_then(|v| v.as_str()) + { // Check if this is a versioned extension (has vsn field) if let Some(version) = spec_map.get("vsn").and_then(|v| v.as_str()) { extension_dependencies diff --git a/src/commands/ext/list.rs b/src/commands/ext/list.rs index 7b1bee8..fad1b4a 100644 --- a/src/commands/ext/list.rs +++ b/src/commands/ext/list.rs @@ -30,7 +30,7 @@ impl ExtListCommand { fn get_extensions(&self, parsed: &serde_yaml::Value) -> Vec { parsed - .get("ext") + .get("extensions") .and_then(|ext_section| ext_section.as_mapping()) .map(|table| { table diff --git a/src/commands/ext/package.rs b/src/commands/ext/package.rs index b01269b..d832513 100644 --- a/src/commands/ext/package.rs +++ b/src/commands/ext/package.rs @@ -141,7 +141,9 @@ impl ExtPackageCommand { ExtensionLocation::Remote { .. } => { // Use the already-merged config from `parsed` which contains remote extension configs // Then apply target-specific overrides manually - let ext_section = parsed.get("ext").and_then(|ext| ext.get(&self.extension)); + let ext_section = parsed + .get("extensions") + .and_then(|ext| ext.get(&self.extension)); if let Some(ext_val) = ext_section { let base_ext = ext_val.clone(); // Check for target-specific override within this extension @@ -174,7 +176,7 @@ impl ExtPackageCommand { // For remote extensions, use the parsed config; for local, read from file let raw_ext_config = match &extension_location { ExtensionLocation::Remote { .. } => parsed - .get("ext") + .get("extensions") .and_then(|ext| ext.get(&self.extension)) .cloned(), _ => self.get_raw_extension_config(&ext_config_path)?, @@ -239,7 +241,7 @@ impl ExtPackageCommand { .with_context(|| format!("Failed to parse config file: {ext_config_path}"))?; // Get the ext section - let ext_section = parsed.get("ext"); + let ext_section = parsed.get("extensions"); if ext_section.is_none() { return Ok(None); } @@ -270,7 +272,7 @@ impl ExtPackageCommand { /// /// If `package_files` is specified in the extension config, use those patterns. /// Otherwise, default to: - /// - The avocado config file (avocado.yaml, avocado.yml, or avocado.toml) + /// - The avocado config file (avocado.yaml or avocado.yml) /// - All overlay directories (base level and target-specific) /// /// # Arguments @@ -552,8 +554,8 @@ if [ ! -d "$EXT_SRC_DIR" ]; then fi # Check for avocado config file -if [ ! -f "$EXT_SRC_DIR/avocado.yaml" ] && [ ! -f "$EXT_SRC_DIR/avocado.yml" ] && [ ! -f "$EXT_SRC_DIR/avocado.toml" ]; then - echo "No avocado.yaml/yml/toml found in $EXT_SRC_DIR" +if [ ! -f "$EXT_SRC_DIR/avocado.yaml" ] && [ ! -f "$EXT_SRC_DIR/avocado.yml" ]; then + echo "No avocado.yaml/yml found in $EXT_SRC_DIR" exit 1 fi diff --git a/src/commands/fetch.rs b/src/commands/fetch.rs index a3682a3..3595e1c 100644 --- a/src/commands/fetch.rs +++ b/src/commands/fetch.rs @@ -163,7 +163,7 @@ impl FetchCommand { // Check if extension exists in configuration if config_toml - .get("ext") + .get("extensions") .and_then(|ext| ext.get(extension)) .is_none() { @@ -267,7 +267,7 @@ $DNF_SDK_HOST \ // Check if runtime exists in configuration if config_toml - .get("runtime") + .get("runtimes") .and_then(|rt| rt.get(runtime)) .is_none() { @@ -378,7 +378,10 @@ $DNF_SDK_HOST \ self.fetch_sdk_target_metadata(container_config).await?; // 4. Fetch all extension metadata (including nested external extensions) - if let Some(extensions) = config_toml.get("ext").and_then(|ext| ext.as_mapping()) { + if let Some(extensions) = config_toml + .get("extensions") + .and_then(|ext| ext.as_mapping()) + { for extension_name_val in extensions.keys() { if let Some(extension_name) = extension_name_val.as_str() { if let Err(e) = self @@ -419,7 +422,7 @@ $DNF_SDK_HOST \ } // 5. Fetch all runtime metadata - if let Some(runtimes) = config_toml.get("runtime").and_then(|rt| rt.as_mapping()) { + if let Some(runtimes) = config_toml.get("runtimes").and_then(|rt| rt.as_mapping()) { for runtime_name_val in runtimes.keys() { if let Some(runtime_name) = runtime_name_val.as_str() { if let Err(e) = self @@ -706,11 +709,14 @@ $DNF_SDK_HOST \ let mut visited = HashSet::new(); // Find external extensions from main config - if let Some(extensions) = config_toml.get("ext").and_then(|ext| ext.as_mapping()) { + if let Some(extensions) = config_toml + .get("extensions") + .and_then(|ext| ext.as_mapping()) + { for (ext_name_val, ext_config) in extensions { if let Some(ext_name) = ext_name_val.as_str() { if let Some(dependencies) = - ext_config.get("dependencies").and_then(|d| d.as_mapping()) + ext_config.get("packages").and_then(|d| d.as_mapping()) { for (_dep_name, dep_spec) in dependencies { // Check for external extension dependency @@ -800,12 +806,12 @@ $DNF_SDK_HOST \ // Check if this external extension has dependencies if let Some(dependencies) = extension_config - .get("dependencies") + .get("packages") .and_then(|d| d.as_mapping()) { for (_dep_name, dep_spec) in dependencies { // Check for nested extension dependency - if let Some(nested_ext_name) = dep_spec.get("ext").and_then(|v| v.as_str()) { + if let Some(nested_ext_name) = dep_spec.get("extensions").and_then(|v| v.as_str()) { // Check if this is a nested external extension (has config field) if let Some(nested_external_config) = dep_spec.get("config").and_then(|v| v.as_str()) diff --git a/src/commands/hitl/server.rs b/src/commands/hitl/server.rs index 657e874..95d9c3b 100644 --- a/src/commands/hitl/server.rs +++ b/src/commands/hitl/server.rs @@ -12,7 +12,7 @@ use std::path::PathBuf; #[derive(Args, Debug)] pub struct HitlServerCommand { - /// Path to the avocado.toml configuration file + /// Path to the avocado.yaml configuration file #[arg(short, long, default_value = "avocado.yaml")] pub config_path: String, diff --git a/src/commands/init.rs b/src/commands/init.rs index 7b9cbaf..b0c5a0d 100644 --- a/src/commands/init.rs +++ b/src/commands/init.rs @@ -278,16 +278,16 @@ impl InitCommand { /// Checks if a target is supported in the given TOML content. /// /// # Arguments - /// * `toml_content` - The content of the avocado.yaml file + /// * `yaml_content` - The content of the avocado.yaml file /// * `target` - The target to check for /// /// # Returns /// * `Ok(true)` if the target is supported or if supported_targets contains "*" /// * `Ok(false)` if the target is not supported - /// * `Err` if the TOML cannot be parsed or doesn't have supported_targets - fn is_target_supported(toml_content: &str, target: &str) -> Result { - let config: toml::Value = - toml::from_str(toml_content).with_context(|| "Failed to parse avocado.yaml")?; + /// * `Err` if the YAML cannot be parsed or doesn't have supported_targets + fn is_target_supported(yaml_content: &str, target: &str) -> Result { + let config: serde_yaml::Value = + serde_yaml::from_str(yaml_content).with_context(|| "Failed to parse avocado.yaml")?; let supported_targets_value = config.get("supported_targets").ok_or_else(|| { anyhow::anyhow!("Reference avocado.yaml missing 'supported_targets' field") @@ -299,7 +299,7 @@ impl InitCommand { } // Handle supported_targets as an array - if let Some(array) = supported_targets_value.as_array() { + if let Some(array) = supported_targets_value.as_sequence() { // Check if "*" is in supported_targets (means all targets supported) let has_wildcard = array.iter().any(|v| v.as_str() == Some("*")); @@ -319,38 +319,38 @@ impl InitCommand { /// Updates the default_target in the avocado.yaml file. /// /// # Arguments - /// * `toml_path` - Path to the avocado.yaml file + /// * `yaml_path` - Path to the avocado.yaml file /// * `new_target` - The new target to set as default /// /// # Returns /// * `Ok(())` if successful /// * `Err` if the file cannot be read, parsed, or written - fn update_default_target(toml_path: &Path, new_target: &str) -> Result<()> { - let content = fs::read_to_string(toml_path) - .with_context(|| format!("Failed to read '{}'", toml_path.display()))?; + fn update_default_target(yaml_path: &Path, new_target: &str) -> Result<()> { + let content = fs::read_to_string(yaml_path) + .with_context(|| format!("Failed to read '{}'", yaml_path.display()))?; - // Parse as toml::Value to preserve structure - let mut config: toml::Value = - toml::from_str(&content).with_context(|| "Failed to parse avocado.yaml")?; + // Parse as serde_yaml::Value to preserve structure + let mut config: serde_yaml::Value = + serde_yaml::from_str(&content).with_context(|| "Failed to parse avocado.yaml")?; // Update the default_target field - if let Some(table) = config.as_table_mut() { - table.insert( - "default_target".to_string(), - toml::Value::String(new_target.to_string()), + if let Some(mapping) = config.as_mapping_mut() { + mapping.insert( + serde_yaml::Value::String("default_target".to_string()), + serde_yaml::Value::String(new_target.to_string()), ); } else { - anyhow::bail!("avocado.yaml is not a valid TOML table"); + anyhow::bail!("avocado.yaml is not a valid YAML mapping"); } // Write back to file - let updated_content = toml::to_string_pretty(&config) - .with_context(|| "Failed to serialize updated config")?; + let updated_content = + serde_yaml::to_string(&config).with_context(|| "Failed to serialize updated config")?; - fs::write(toml_path, updated_content).with_context(|| { + fs::write(yaml_path, updated_content).with_context(|| { format!( "Failed to write updated config to '{}'", - toml_path.display() + yaml_path.display() ) })?; @@ -913,19 +913,18 @@ mod tests { assert!(content.contains("distro:")); assert!(content.contains("channel: apollo-edge")); assert!(content.contains("version: 0.1.0")); - assert!(content.contains("runtime:")); + assert!(content.contains("runtimes:")); assert!(content.contains("dev:")); - assert!(content.contains("dependencies:")); - assert!(content.contains("avocado-img-bootfiles: \"*\"")); - assert!(content.contains("avocado-img-rootfs: \"*\"")); - assert!(content.contains("avocado-img-initramfs: \"*\"")); + assert!(content.contains("packages:")); + assert!(content.contains("avocado-img-bootfiles:")); + assert!(content.contains("avocado-img-rootfs:")); + assert!(content.contains("avocado-img-initramfs:")); assert!(content.contains("avocado-ext-dev:")); - assert!(content.contains("ext: avocado-ext-dev")); - assert!(content.contains("vsn: \"*\"")); + assert!(content.contains("type: package")); assert!( content.contains("image: \"docker.io/avocadolinux/sdk:{{ config.distro.channel }}\"") ); - assert!(content.contains("ext:")); + assert!(content.contains("extensions:")); assert!(content.contains("app:")); assert!(content.contains("- sysext")); assert!(content.contains("- confext")); diff --git a/src/commands/install.rs b/src/commands/install.rs index 8d1e7d9..715f7a5 100644 --- a/src/commands/install.rs +++ b/src/commands/install.rs @@ -20,16 +20,15 @@ use crate::utils::{ /// Represents an extension dependency #[derive(Debug, Clone, PartialEq, Eq, Hash)] +#[allow(dead_code)] // Deprecated variants kept for backward compatibility pub enum ExtensionDependency { /// Extension defined in the config (local or fetched remote) Local(String), /// DEPRECATED: Extension from an external config file - /// Use source: path in the ext section instead - #[deprecated(since = "0.23.0", note = "Use Local with source: path instead")] + /// Use source: path in the extensions section instead External { name: String, config_path: String }, /// DEPRECATED: Extension resolved via DNF with a version specification - /// Use source: repo in the ext section instead - #[deprecated(since = "0.23.0", note = "Use Local with source: repo instead")] + /// Use source: package in the extensions section instead Versioned { name: String, version: String }, } @@ -225,10 +224,10 @@ impl InstallCommand { return Err(anyhow::anyhow!( "Deprecated 'vsn:' syntax found for extension '{name}' version '{version}'.\n\n\ The 'vsn:' syntax for versioned extensions is no longer supported.\n\n\ - To use remote extensions, define them in the 'ext' section with a 'source' field:\n\n\ - ext:\n {name}:\n source:\n type: repo\n version: \"{version}\"\n\n\ - Then reference the extension in runtime dependencies simply by name:\n\n\ - runtime:\n your-runtime:\n dependencies:\n {name}: ext\n\n\ + To use remote extensions, define them in the 'extensions' section with a 'source' field:\n\n\ + extensions:\n {name}:\n source:\n type: package\n version: \"{version}\"\n\n\ + Then reference the extension in runtimes packages simply by name:\n\n\ + runtimes:\n your-runtime:\n packages:\n {name}: ext\n\n\ Remote extensions are automatically fetched during 'avocado sdk install' or\n\ can be manually fetched with 'avocado ext fetch'." )); @@ -304,7 +303,6 @@ impl InstallCommand { use std::collections::HashSet; let mut required_extensions = HashSet::new(); - let mut visited = HashSet::new(); // For cycle detection let config = &composed.config; let parsed = &composed.merged_value; @@ -321,7 +319,7 @@ impl InstallCommand { ); } // If no runtimes match this target, install all local extensions - if let Some(ext_section) = parsed.get("ext").and_then(|e| e.as_mapping()) { + if let Some(ext_section) = parsed.get("extensions").and_then(|e| e.as_mapping()) { for ext_name_val in ext_section.keys() { if let Some(ext_name) = ext_name_val.as_str() { required_extensions @@ -331,56 +329,22 @@ impl InstallCommand { } } else { // Only install extensions needed by the target-relevant runtimes - if let Some(runtime_section) = parsed.get("runtime").and_then(|r| r.as_mapping()) { + if let Some(runtime_section) = parsed.get("runtimes").and_then(|r| r.as_mapping()) { for runtime_name in &target_runtimes { if let Some(_runtime_config) = runtime_section.get(runtime_name) { // Check both base dependencies and target-specific dependencies let merged_runtime = config.get_merged_runtime_config(runtime_name, target, config_path)?; if let Some(merged_value) = merged_runtime { - if let Some(dependencies) = merged_value - .get("dependencies") - .and_then(|d| d.as_mapping()) + // NEW FORMAT: Extensions are listed directly under runtimes..extensions + if let Some(extensions_list) = + merged_value.get("extensions").and_then(|e| e.as_sequence()) { - for (_dep_name, dep_spec) in dependencies { - // Check for extension dependency - if let Some(ext_name) = - dep_spec.get("ext").and_then(|v| v.as_str()) - { - // Check if this is a versioned extension (has vsn field) - if let Some(version) = - dep_spec.get("vsn").and_then(|v| v.as_str()) - { - let ext_dep = ExtensionDependency::Versioned { - name: ext_name.to_string(), - version: version.to_string(), - }; - required_extensions.insert(ext_dep); - } - // Check if this is an external extension (has config field) - else if let Some(external_config) = - dep_spec.get("config").and_then(|v| v.as_str()) - { - let ext_dep = ExtensionDependency::External { - name: ext_name.to_string(), - config_path: external_config.to_string(), - }; - required_extensions.insert(ext_dep.clone()); - - // Recursively find nested external extension dependencies - self.find_nested_external_extensions( - config, - config_path, - &ext_dep, - &mut required_extensions, - &mut visited, - )?; - } else { - // Local extension - required_extensions.insert(ExtensionDependency::Local( - ext_name.to_string(), - )); - } + for ext_val in extensions_list { + if let Some(ext_name) = ext_val.as_str() { + required_extensions.insert(ExtensionDependency::Local( + ext_name.to_string(), + )); } } } @@ -407,124 +371,6 @@ impl InstallCommand { Ok(extensions) } - /// Recursively find nested external extension dependencies - fn find_nested_external_extensions( - &self, - config: &Config, - base_config_path: &str, - ext_dep: &ExtensionDependency, - required_extensions: &mut std::collections::HashSet, - visited: &mut std::collections::HashSet, - ) -> Result<()> { - let (ext_name, ext_config_path) = match ext_dep { - ExtensionDependency::External { name, config_path } => (name, config_path), - ExtensionDependency::Local(_) => return Ok(()), // Local extensions don't have nested external deps - ExtensionDependency::Versioned { .. } => return Ok(()), // Versioned extensions don't have nested deps - }; - - // Cycle detection: check if we've already processed this extension - let ext_key = format!("{ext_name}:{ext_config_path}"); - if visited.contains(&ext_key) { - if self.verbose { - print_info( - &format!("Skipping already processed extension '{ext_name}' to avoid cycles"), - OutputLevel::Normal, - ); - } - return Ok(()); - } - visited.insert(ext_key); - - // Load the external extension configuration - let resolved_external_config_path = - config.resolve_path_relative_to_src_dir(base_config_path, ext_config_path); - let external_extensions = - config.load_external_extensions(base_config_path, ext_config_path)?; - - let extension_config = external_extensions.get(ext_name).ok_or_else(|| { - anyhow::anyhow!( - "Extension '{ext_name}' not found in external config file '{ext_config_path}'" - ) - })?; - - // Load the nested config file to get its src_dir setting - let nested_config_content = std::fs::read_to_string(&resolved_external_config_path) - .with_context(|| { - format!( - "Failed to read nested config file: {}", - resolved_external_config_path.display() - ) - })?; - let nested_config: serde_yaml::Value = serde_yaml::from_str(&nested_config_content) - .with_context(|| { - format!( - "Failed to parse nested config file: {}", - resolved_external_config_path.display() - ) - })?; - - // Create a temporary Config object for the nested config to handle its src_dir - let nested_config_obj = serde_yaml::from_value::(nested_config.clone())?; - - // Check if this external extension has dependencies - if let Some(dependencies) = extension_config - .get("dependencies") - .and_then(|d| d.as_mapping()) - { - for (_dep_name, dep_spec) in dependencies { - // Check for nested extension dependency - if let Some(nested_ext_name) = dep_spec.get("ext").and_then(|v| v.as_str()) { - // Check if this is a nested external extension (has config field) - if let Some(nested_external_config) = - dep_spec.get("config").and_then(|v| v.as_str()) - { - // Resolve the nested config path relative to the nested config's src_dir - let nested_config_path = nested_config_obj - .resolve_path_relative_to_src_dir( - &resolved_external_config_path, - nested_external_config, - ); - - let nested_ext_dep = ExtensionDependency::External { - name: nested_ext_name.to_string(), - config_path: nested_config_path.to_string_lossy().to_string(), - }; - - // Add the nested extension to required extensions - required_extensions.insert(nested_ext_dep.clone()); - - if self.verbose { - print_info( - &format!("Found nested external extension '{nested_ext_name}' required by '{ext_name}' at '{}'", nested_config_path.display()), - OutputLevel::Normal, - ); - } - - // Recursively process the nested extension - self.find_nested_external_extensions( - config, - base_config_path, - &nested_ext_dep, - required_extensions, - visited, - )?; - } else { - // This is a local extension dependency within the external config - // We don't need to process it further as it will be handled during installation - if self.verbose { - print_info( - &format!("Found local extension dependency '{nested_ext_name}' in external extension '{ext_name}'"), - OutputLevel::Normal, - ); - } - } - } - } - } - - Ok(()) - } - /// Find runtimes that are relevant for the specified target fn find_target_relevant_runtimes( &self, @@ -534,7 +380,7 @@ impl InstallCommand { ) -> Result> { let mut relevant_runtimes = Vec::new(); - if let Some(runtime_section) = parsed.get("runtime").and_then(|r| r.as_mapping()) { + if let Some(runtime_section) = parsed.get("runtimes").and_then(|r| r.as_mapping()) { for runtime_name_val in runtime_section.keys() { if let Some(runtime_name) = runtime_name_val.as_str() { // If a specific runtime is requested, only check that one @@ -703,7 +549,7 @@ impl InstallCommand { // Process the extension's dependencies (packages, not extension or compile dependencies) let sysroot = SysrootType::Extension(extension_name.to_string()); - if let Some(serde_yaml::Value::Mapping(deps_map)) = extension_config.get("dependencies") { + if let Some(serde_yaml::Value::Mapping(deps_map)) = extension_config.get("packages") { if !deps_map.is_empty() { let mut packages = Vec::new(); let mut package_names = Vec::new(); @@ -719,7 +565,7 @@ impl InstallCommand { // Skip non-package dependencies (extension or compile dependencies) if let serde_yaml::Value::Mapping(spec_map) = version_spec { // Skip extension dependencies (they have "ext" field) - handled by recursive logic - if spec_map.get("ext").is_some() { + if spec_map.get("extensions").is_some() { continue; } // Skip compile dependencies (they have "compile" field) - SDK-compiled, not from repo @@ -1164,7 +1010,7 @@ $DNF_SDK_HOST \ // Check if the external config has SDK dependencies let sdk_deps = external_config .get("sdk") - .and_then(|sdk| sdk.get("dependencies")) + .and_then(|sdk| sdk.get("packages")) .and_then(|deps| deps.as_mapping()); let Some(sdk_deps_map) = sdk_deps else { diff --git a/src/commands/runtime/build.rs b/src/commands/runtime/build.rs index fed254a..628ae99 100644 --- a/src/commands/runtime/build.rs +++ b/src/commands/runtime/build.rs @@ -88,7 +88,7 @@ impl RuntimeBuildCommand { // Get runtime configuration let runtime_config = parsed - .get("runtime") + .get("runtimes") .context("No runtime configuration found")?; // Check if runtime exists @@ -427,7 +427,7 @@ impl RuntimeBuildCommand { let mut processed_extensions = HashSet::new(); // Process local extensions defined in [ext.*] sections - if let Some(ext_config) = parsed.get("ext").and_then(|v| v.as_mapping()) { + if let Some(ext_config) = parsed.get("extensions").and_then(|v| v.as_mapping()) { for (ext_name_val, ext_data) in ext_config { if let Some(ext_name) = ext_name_val.as_str() { // Only process extensions that are required by this runtime @@ -657,7 +657,7 @@ avocado-build-$TARGET_ARCH $RUNTIME_NAME // Check if this is a local extension defined in the ext section // Extension source configuration (repo, git, path) is now in the ext section if let Some(ext_config) = parsed - .get("ext") + .get("extensions") .and_then(|e| e.as_mapping()) .and_then(|table| table.get(ext_name)) { @@ -709,7 +709,7 @@ avocado-build-$TARGET_ARCH $RUNTIME_NAME .and_then(|value| value.get("extensions").and_then(|e| e.as_sequence())) .or_else(|| { parsed - .get("runtime") + .get("runtimes") .and_then(|r| r.get(runtime_name)) .and_then(|runtime_value| runtime_value.get("extensions")) .and_then(|e| e.as_sequence()) @@ -758,7 +758,7 @@ avocado-build-$TARGET_ARCH $RUNTIME_NAME ) -> Result { // Try to get version from local [ext] section if let Some(version) = parsed - .get("ext") + .get("extensions") .and_then(|ext_section| ext_section.as_mapping()) .and_then(|ext_table| ext_table.get(ext_name)) .and_then(|ext_config| ext_config.get("version")) @@ -892,10 +892,10 @@ mod tests { sdk: image: "test-image" -runtime: +runtimes: test-runtime: target: "x86_64" - dependencies: + packages: test-dep: ext: test-ext "#; @@ -930,13 +930,13 @@ runtime: sdk: image: "test-image" -runtime: +runtimes: test-runtime: target: "x86_64" extensions: - test-ext -ext: +extensions: test-ext: version: "1.0.0" types: @@ -972,13 +972,13 @@ ext: sdk: image: "test-image" -runtime: +runtimes: test-runtime: target: "x86_64" extensions: - test-ext -ext: +extensions: test-ext: version: "1.0.0" types: @@ -1017,13 +1017,13 @@ ext: sdk: image: "test-image" -runtime: +runtimes: test-runtime: target: "x86_64" extensions: - test-ext -ext: +extensions: test-ext: version: "1.0.0" types: diff --git a/src/commands/runtime/clean.rs b/src/commands/runtime/clean.rs index d6b5539..6c9c9f0 100644 --- a/src/commands/runtime/clean.rs +++ b/src/commands/runtime/clean.rs @@ -54,7 +54,7 @@ impl RuntimeCleanCommand { } fn validate_runtime_exists(&self, parsed: &serde_yaml::Value) -> Result<()> { - let runtime_section = parsed.get("runtime").ok_or_else(|| { + let runtime_section = parsed.get("runtimes").ok_or_else(|| { print_error( &format!("Runtime '{}' not found in configuration.", self.runtime), OutputLevel::Normal, diff --git a/src/commands/runtime/deploy.rs b/src/commands/runtime/deploy.rs index be5a271..6ac8c18 100644 --- a/src/commands/runtime/deploy.rs +++ b/src/commands/runtime/deploy.rs @@ -68,7 +68,7 @@ impl RuntimeDeployCommand { // Get runtime configuration let runtime_config = parsed - .get("runtime") + .get("runtimes") .context("No runtime configuration found")?; // Check if runtime exists diff --git a/src/commands/runtime/deps.rs b/src/commands/runtime/deps.rs index 9b465a3..bb2b4d5 100644 --- a/src/commands/runtime/deps.rs +++ b/src/commands/runtime/deps.rs @@ -36,7 +36,7 @@ impl RuntimeDepsCommand { fn validate_runtime_exists(&self, parsed: &serde_yaml::Value) -> Result<()> { let runtime_config = parsed - .get("runtime") + .get("runtimes") .context("No runtime configuration found")?; runtime_config.get(&self.runtime_name).with_context(|| { @@ -58,7 +58,7 @@ impl RuntimeDepsCommand { runtime_name: &str, ) -> Result> { let runtime_config = parsed - .get("runtime") + .get("runtimes") .context("No runtime configuration found")?; let runtime_spec = runtime_config @@ -77,10 +77,7 @@ impl RuntimeDepsCommand { } // Read package dependencies from the `dependencies` section - if let Some(deps_table) = runtime_spec - .get("dependencies") - .and_then(|v| v.as_mapping()) - { + if let Some(deps_table) = runtime_spec.get("packages").and_then(|v| v.as_mapping()) { for (dep_name_val, dep_spec) in deps_table { if let Some(dep_name) = dep_name_val.as_str() { dependencies.push(self.resolve_package_dependency(dep_name, dep_spec)); @@ -98,7 +95,7 @@ impl RuntimeDepsCommand { ext_name: &str, ) -> (String, String, String) { let version = parsed - .get("ext") + .get("extensions") .and_then(|ext_config| ext_config.as_mapping()) .and_then(|ext_table| ext_table.get(ext_name)) .and_then(|ext_spec| ext_spec.get("version")) @@ -153,15 +150,15 @@ mod tests { sdk: image: "test-image" -runtime: +runtimes: test-runtime: target: "x86_64" extensions: - my-extension - dependencies: + packages: gcc: "11.0" -ext: +extensions: my-extension: version: "2.0.0" types: diff --git a/src/commands/runtime/dnf.rs b/src/commands/runtime/dnf.rs index d0c38c0..ff1f96c 100644 --- a/src/commands/runtime/dnf.rs +++ b/src/commands/runtime/dnf.rs @@ -72,7 +72,7 @@ impl RuntimeDnfCommand { } fn validate_runtime_exists(&self, parsed: &serde_yaml::Value) -> Result<()> { - let runtime_section = parsed.get("runtime").ok_or_else(|| { + let runtime_section = parsed.get("runtimes").ok_or_else(|| { print_error( &format!("Runtime '{}' not found in configuration.", self.runtime), OutputLevel::Normal, diff --git a/src/commands/runtime/install.rs b/src/commands/runtime/install.rs index 3daddb4..31ff84f 100644 --- a/src/commands/runtime/install.rs +++ b/src/commands/runtime/install.rs @@ -84,7 +84,7 @@ impl RuntimeInstallCommand { let repo_release = config.get_sdk_repo_release(); // Check if runtime section exists - let runtime_section = match parsed.get("runtime") { + let runtime_section = match parsed.get("runtimes") { Some(runtime) => runtime, None => { if self.runtime.is_some() { @@ -392,7 +392,7 @@ impl RuntimeInstallCommand { config.get_merged_runtime_config(runtime, &target_arch, &self.config_path)?; let dependencies = merged_runtime .as_ref() - .and_then(|merged| merged.get("dependencies")); + .and_then(|merged| merged.get("packages")); let sysroot = SysrootType::Runtime(runtime.to_string()); @@ -653,7 +653,7 @@ sdk: sdk: image: "test-image" -runtime: +runtimes: other-runtime: target: "x86_64" "#; @@ -678,10 +678,10 @@ runtime: async fn test_execute_no_sdk_config() { let temp_dir = TempDir::new().unwrap(); let config_content = r#" -runtime: +runtimes: test-runtime: target: "x86_64" - dependencies: + packages: gcc: "11.0" "#; let config_path = create_test_config_file(&temp_dir, config_content); @@ -712,10 +712,10 @@ runtime: sdk: # Missing image field -runtime: +runtimes: test-runtime: target: "x86_64" - dependencies: + packages: gcc: "11.0" "#; let config_path = create_test_config_file(&temp_dir, config_content); diff --git a/src/commands/runtime/list.rs b/src/commands/runtime/list.rs index 77d9961..a22a473 100644 --- a/src/commands/runtime/list.rs +++ b/src/commands/runtime/list.rs @@ -20,7 +20,7 @@ impl RuntimeListCommand { let parsed: serde_yaml::Value = serde_yaml::from_str(&content)?; // Check if runtime section exists - if let Some(runtime_config) = parsed.get("runtime").and_then(|v| v.as_mapping()) { + if let Some(runtime_config) = parsed.get("runtimes").and_then(|v| v.as_mapping()) { // List all runtime names let mut runtimes: Vec = runtime_config .keys() @@ -69,7 +69,7 @@ mod tests { sdk: image: "test-image" -runtime: +runtimes: app: target: "x86_64" server: diff --git a/src/commands/runtime/provision.rs b/src/commands/runtime/provision.rs index 37ea126..694dbac 100644 --- a/src/commands/runtime/provision.rs +++ b/src/commands/runtime/provision.rs @@ -70,7 +70,7 @@ impl RuntimeProvisionCommand { // Get runtime configuration let runtime_config = parsed - .get("runtime") + .get("runtimes") .context("No runtime configuration found")?; // Check if runtime exists @@ -458,7 +458,7 @@ impl RuntimeProvisionCommand { // Get checksum algorithm (defaults to sha256) let checksum_str = config - .runtime + .runtimes .as_ref() .and_then(|r| r.get(&self.config.runtime_name)) .and_then(|rc| rc.signing.as_ref()) @@ -793,12 +793,12 @@ avocado-provision-{} {} let runtime_dep_table = merged_runtime .as_ref() - .and_then(|value| value.get("dependencies").and_then(|d| d.as_mapping())) + .and_then(|value| value.get("packages").and_then(|d| d.as_mapping())) .or_else(|| { parsed - .get("runtime") + .get("runtimes") .and_then(|r| r.get(runtime_name)) - .and_then(|runtime_value| runtime_value.get("dependencies")) + .and_then(|runtime_value| runtime_value.get("packages")) .and_then(|d| d.as_mapping()) }); @@ -806,7 +806,7 @@ avocado-provision-{} {} if let Some(deps) = runtime_dep_table { for dep_spec in deps.values() { - if let Some(ext_name) = dep_spec.get("ext").and_then(|v| v.as_str()) { + if let Some(ext_name) = dep_spec.get("extensions").and_then(|v| v.as_str()) { let version = self .resolve_extension_version( parsed, @@ -868,7 +868,7 @@ avocado-provision-{} {} // Try to get version from local [ext] section if let Some(version) = parsed - .get("ext") + .get("extensions") .and_then(|ext_section| ext_section.as_mapping()) .and_then(|ext_table| ext_table.get(ext_name)) .and_then(|ext_config| ext_config.get("version")) @@ -1010,70 +1010,8 @@ mod tests { assert!(script.contains("Running SDK lifecycle hook 'avocado-provision'")); } - #[tokio::test] - async fn test_collect_runtime_extensions() { - use std::fs; - use tempfile::TempDir; - - let config_content = r#" -sdk: - image: "docker.io/avocado/sdk:latest" - -runtime: - test-runtime: - dependencies: - ext_one: - ext: alpha-ext - vsn: "1.0.0" - ext_two: - ext: beta-ext - vsn: "2.0.0" - "#; - - let temp_dir = TempDir::new().unwrap(); - let config_path = temp_dir.path().join("avocado.yaml"); - fs::write(&config_path, config_content).unwrap(); - - let parsed: serde_yaml::Value = serde_yaml::from_str(config_content).unwrap(); - let config = crate::utils::config::Config::load(&config_path).unwrap(); - - let provision_config = RuntimeProvisionConfig { - runtime_name: "test-runtime".to_string(), - config_path: config_path.to_str().unwrap().to_string(), - verbose: false, - force: false, - target: Some("x86_64".to_string()), - provision_profile: None, - env_vars: None, - out: None, - container_args: None, - dnf_args: None, - state_file: None, - no_stamps: false, - runs_on: None, - nfs_port: None, - sdk_arch: None, - }; - - let command = RuntimeProvisionCommand::new(provision_config); - - let extensions = command - .collect_runtime_extensions( - &parsed, - &config, - "test-runtime", - "x86_64", - config_path.to_str().unwrap(), - "docker.io/avocado/sdk:latest", - ) - .await - .unwrap(); - - assert_eq!( - extensions, - vec!["alpha-ext-1.0.0".to_string(), "beta-ext-2.0.0".to_string()] - ); - } + // NOTE: test_collect_runtime_extensions was removed as it tested the deprecated + // ext:/vsn: format inside runtime packages. The new format uses an extensions array. #[test] fn test_new_with_container_args() { diff --git a/src/commands/runtime/sign.rs b/src/commands/runtime/sign.rs index 8674787..16a433e 100644 --- a/src/commands/runtime/sign.rs +++ b/src/commands/runtime/sign.rs @@ -126,7 +126,7 @@ impl RuntimeSignCommand { // Verify runtime exists let runtime_config = parsed - .get("runtime") + .get("runtimes") .context("No runtime configuration found")?; runtime_config.get(&self.runtime_name).with_context(|| { @@ -145,13 +145,13 @@ impl RuntimeSignCommand { let binding = serde_yaml::Mapping::new(); let runtime_deps = merged_runtime - .get("dependencies") + .get("packages") .and_then(|v| v.as_mapping()) .unwrap_or(&binding); let mut required_extensions = HashSet::new(); for (_dep_name, dep_spec) in runtime_deps { - if let Some(ext_name) = dep_spec.get("ext").and_then(|v| v.as_str()) { + if let Some(ext_name) = dep_spec.get("extensions").and_then(|v| v.as_str()) { required_extensions.insert(ext_name.to_string()); } } @@ -253,15 +253,16 @@ impl RuntimeSignCommand { // Check if this is a local extension if let Some(ext_config) = parsed - .get("ext") + .get("extensions") .and_then(|e| e.as_mapping()) .and_then(|table| table.get(ext_name)) { // This is a local extension - check its dependencies - if let Some(dependencies) = ext_config.get("dependencies").and_then(|d| d.as_mapping()) - { + if let Some(dependencies) = ext_config.get("packages").and_then(|d| d.as_mapping()) { for (_dep_name, dep_spec) in dependencies { - if let Some(nested_ext_name) = dep_spec.get("ext").and_then(|v| v.as_str()) { + if let Some(nested_ext_name) = + dep_spec.get("extensions").and_then(|v| v.as_str()) + { // Check if this is an external extension dependency if let Some(external_config_path) = dep_spec.get("config").and_then(|v| v.as_str()) @@ -284,11 +285,12 @@ impl RuntimeSignCommand { // Process its dependencies from the external config if let Some(ext_config) = external_extensions.get(nested_ext_name) { if let Some(nested_deps) = - ext_config.get("dependencies").and_then(|d| d.as_mapping()) + ext_config.get("packages").and_then(|d| d.as_mapping()) { for (_nested_dep_name, nested_dep_spec) in nested_deps { - if let Some(nested_nested_ext_name) = - nested_dep_spec.get("ext").and_then(|v| v.as_str()) + if let Some(nested_nested_ext_name) = nested_dep_spec + .get("extensions") + .and_then(|v| v.as_str()) { self.collect_extension_dependencies( config, @@ -326,12 +328,11 @@ impl RuntimeSignCommand { ) })?; - if let Some(runtime_deps) = merged_runtime - .get("dependencies") - .and_then(|v| v.as_mapping()) + if let Some(runtime_deps) = merged_runtime.get("packages").and_then(|v| v.as_mapping()) { for (_dep_name, dep_spec) in runtime_deps { - if let Some(dep_ext_name) = dep_spec.get("ext").and_then(|v| v.as_str()) { + if let Some(dep_ext_name) = dep_spec.get("extensions").and_then(|v| v.as_str()) + { if dep_ext_name == ext_name { if let Some(external_config_path) = dep_spec.get("config").and_then(|v| v.as_str()) @@ -344,11 +345,12 @@ impl RuntimeSignCommand { if let Some(ext_config) = external_extensions.get(ext_name) { if let Some(nested_deps) = - ext_config.get("dependencies").and_then(|d| d.as_mapping()) + ext_config.get("packages").and_then(|d| d.as_mapping()) { for (_nested_dep_name, nested_dep_spec) in nested_deps { - if let Some(nested_ext_name) = - nested_dep_spec.get("ext").and_then(|v| v.as_str()) + if let Some(nested_ext_name) = nested_dep_spec + .get("extensions") + .and_then(|v| v.as_str()) { self.collect_extension_dependencies( config, @@ -412,7 +414,7 @@ impl RuntimeSignCommand { // Get checksum algorithm (defaults to sha256) let checksum_str = config - .runtime + .runtimes .as_ref() .and_then(|r| r.get(&self.runtime_name)) .and_then(|rc| rc.signing.as_ref()) diff --git a/src/commands/sdk/compile.rs b/src/commands/sdk/compile.rs index ef3c11f..f23bc6e 100644 --- a/src/commands/sdk/compile.rs +++ b/src/commands/sdk/compile.rs @@ -349,16 +349,17 @@ mod tests { let cmd = SdkCompileCommand::new("test.yaml".to_string(), false, vec![], None, None, None); let config_content = r#" -[sdk] -image = "test-image" - -[sdk.compile.app] -compile = "build.sh" -dependencies = { gcc = "*" } - -[sdk.compile.library] -compile = "lib_build.sh" -dependencies = { make = "*" } +sdk: + image: "test-image" + compile: + app: + compile: "build.sh" + packages: + gcc: "*" + library: + compile: "lib_build.sh" + packages: + make: "*" "#; let mut temp_file = NamedTempFile::new().unwrap(); write!(temp_file, "{config_content}").unwrap(); @@ -417,7 +418,7 @@ dependencies = { gcc = "*" } let section_config = crate::utils::config::CompileConfig { compile: Some("my_script.sh".to_string()), - dependencies: Some(deps), + packages: Some(deps), }; let script = cmd.find_compile_script_in_section(§ion_config); @@ -426,7 +427,7 @@ dependencies = { gcc = "*" } // Test section with no compile script let section_config_no_script = crate::utils::config::CompileConfig { compile: None, - dependencies: None, + packages: None, }; let script = cmd.find_compile_script_in_section(§ion_config_no_script); diff --git a/src/commands/sdk/deps.rs b/src/commands/sdk/deps.rs index 1cfecc4..3a2d6d0 100644 --- a/src/commands/sdk/deps.rs +++ b/src/commands/sdk/deps.rs @@ -209,7 +209,7 @@ impl SdkDepsCommand { } // Try extension reference - if let Some(serde_yaml::Value::String(ext_name)) = table.get("ext") { + if let Some(serde_yaml::Value::String(ext_name)) = table.get("extensions") { let version = self.get_extension_version(config, ext_name); return vec![("ext".to_string(), ext_name.clone(), version)]; } @@ -240,7 +240,7 @@ impl SdkDepsCommand { .as_ref() .and_then(|sdk| sdk.compile.as_ref()) .and_then(|compile| compile.get(compile_name)) - .and_then(|compile_config| compile_config.dependencies.as_ref()); + .and_then(|compile_config| compile_config.packages.as_ref()); let Some(deps) = compile_deps else { return Vec::new(); @@ -287,11 +287,10 @@ mod tests { // Create a minimal config for testing let config_content = r#" -[sdk] -image = "test-image" - -[sdk.dependencies] -cmake = "*" +sdk: + image: "test-image" + packages: + cmake: "*" "#; let mut temp_file = NamedTempFile::new().unwrap(); write!(temp_file, "{config_content}").unwrap(); @@ -342,12 +341,12 @@ cmake = "*" let config_content = r#" sdk: image: "test-image" - dependencies: + packages: cmake: "*" gcc: "11.0.0" compile: app: - dependencies: + packages: make: "4.3" "#; let mut temp_file = tempfile::Builder::new().suffix(".yaml").tempfile().unwrap(); @@ -384,16 +383,16 @@ sdk: let config_content = r#" sdk: image: "test-image" - dependencies: + packages: cmake: "*" -ext: +extensions: avocado-dev: types: - sysext - confext sdk: - dependencies: + packages: nativesdk-avocado-hitl: "*" avocado-dev1: @@ -401,7 +400,7 @@ ext: - sysext - confext sdk: - dependencies: + packages: nativesdk-avocado-hitl: "*" "#; let mut temp_file = tempfile::Builder::new().suffix(".yaml").tempfile().unwrap(); diff --git a/src/commands/sdk/run.rs b/src/commands/sdk/run.rs index 950672b..01cca63 100644 --- a/src/commands/sdk/run.rs +++ b/src/commands/sdk/run.rs @@ -147,7 +147,7 @@ impl SdkRunCommand { // Get checksum algorithm (defaults to sha256) let checksum_str = config - .runtime + .runtimes .as_ref() .and_then(|r| r.get(runtime_name)) .and_then(|rc| rc.signing.as_ref()) diff --git a/src/commands/sign.rs b/src/commands/sign.rs index 9273554..c9cd48d 100644 --- a/src/commands/sign.rs +++ b/src/commands/sign.rs @@ -96,7 +96,7 @@ impl SignCommand { let parsed: serde_yaml::Value = serde_yaml::from_str(&content)?; let runtime_section = parsed - .get("runtime") + .get("runtimes") .and_then(|r| r.as_mapping()) .ok_or_else(|| anyhow::anyhow!("No runtime configuration found"))?; diff --git a/src/commands/unlock.rs b/src/commands/unlock.rs index 62f4597..cfc71b7 100644 --- a/src/commands/unlock.rs +++ b/src/commands/unlock.rs @@ -178,10 +178,10 @@ mod tests { default_target: "qemux86-64" sdk: image: "test-image" -ext: +extensions: my-app: version: "1.0.0" -runtime: +runtimes: dev: target: "qemux86-64" "#; diff --git a/src/main.rs b/src/main.rs index 8dd72bc..21c2025 100644 --- a/src/main.rs +++ b/src/main.rs @@ -230,7 +230,7 @@ enum Commands { #[arg(short, long)] target: Option, /// Provision profile to use - #[arg(long = "provision-profile")] + #[arg(long = "profile")] provision_profile: Option, /// Environment variables to pass to the provision process #[arg(long = "env", num_args = 1, action = clap::ArgAction::Append)] @@ -580,7 +580,7 @@ enum RuntimeCommands { #[arg(short, long)] target: Option, /// Provision profile to use - #[arg(long = "provision-profile")] + #[arg(long = "profile")] provision_profile: Option, /// Environment variables to pass to the provision process #[arg(long = "env", num_args = 1, action = clap::ArgAction::Append)] diff --git a/src/utils/config.rs b/src/utils/config.rs index 228986b..bddf3f9 100644 --- a/src/utils/config.rs +++ b/src/utils/config.rs @@ -1,8 +1,5 @@ //! Configuration utilities for Avocado CLI. -// Allow deprecated variants for backward compatibility during migration -#![allow(deprecated)] - use anyhow::{Context, Result}; use serde::{Deserialize, Serialize}; use std::collections::HashMap; @@ -10,18 +7,6 @@ use std::env; use std::fs; use std::path::{Path, PathBuf}; -// ============================================================================= -// DEPRECATION NOTE: TOML Support (Pre-1.0.0) -// ============================================================================= -// TOML configuration file support is DEPRECATED and maintained only for -// backward compatibility and migration purposes. The default format is now YAML. -// -// TOML support will be removed before the 1.0.0 release. -// -// Migration: When a legacy avocado.toml file is detected, it will be -// automatically converted to avocado.yaml format. -// ============================================================================= - /// Custom deserializer module for container_args mod container_args_deserializer { use serde::{Deserialize, Deserializer}; @@ -113,10 +98,10 @@ pub enum ExtensionLocation { /// Extension defined in the main config file Local { name: String, config_path: String }, /// DEPRECATED: Extension from an external config file - /// Use source: path in the ext section instead + /// Use source: path in the extensions section instead #[deprecated(since = "0.23.0", note = "Use Local with source: path instead")] External { name: String, config_path: String }, - /// Remote extension fetched from a source (repo, git, or path) + /// Remote extension fetched from a source (package, git, or path) Remote { name: String, source: ExtensionSource, @@ -128,7 +113,8 @@ pub enum ExtensionLocation { #[serde(tag = "type", rename_all = "lowercase")] pub enum ExtensionSource { /// Extension from the avocado package repository - Repo { + #[serde(alias = "repo")] + Package { /// Version to fetch (e.g., "0.1.0" or "*") version: String, /// Optional RPM package name (defaults to extension name if not specified) @@ -138,7 +124,7 @@ pub enum ExtensionSource { #[serde(skip_serializing_if = "Option::is_none")] repo_name: Option, /// Optional list of config sections to include from the remote extension. - /// Supports dot-separated paths (e.g., "provision.tegraflash") and wildcards (e.g., "provision.*"). + /// Supports dot-separated paths (e.g., "provision_profiles.tegraflash") and wildcards (e.g., "provision_profiles.*"). /// The extension's own `ext.` section is always included. /// Referenced `sdk.compile.*` sections are auto-included based on compile dependencies. #[serde(default, skip_serializing_if = "Option::is_none")] @@ -155,7 +141,7 @@ pub enum ExtensionSource { #[serde(skip_serializing_if = "Option::is_none")] sparse_checkout: Option>, /// Optional list of config sections to include from the remote extension. - /// Supports dot-separated paths (e.g., "provision.tegraflash") and wildcards (e.g., "provision.*"). + /// Supports dot-separated paths (e.g., "provision_profiles.tegraflash") and wildcards (e.g., "provision_profiles.*"). /// The extension's own `ext.` section is always included. /// Referenced `sdk.compile.*` sections are auto-included based on compile dependencies. #[serde(default, skip_serializing_if = "Option::is_none")] @@ -166,7 +152,7 @@ pub enum ExtensionSource { /// Path to the extension directory (relative to config or absolute) path: String, /// Optional list of config sections to include from the remote extension. - /// Supports dot-separated paths (e.g., "provision.tegraflash") and wildcards (e.g., "provision.*"). + /// Supports dot-separated paths (e.g., "provision_profiles.tegraflash") and wildcards (e.g., "provision_profiles.*"). /// The extension's own `ext.` section is always included. /// Referenced `sdk.compile.*` sections are auto-included based on compile dependencies. #[serde(default, skip_serializing_if = "Option::is_none")] @@ -179,7 +165,7 @@ impl ExtensionSource { /// Returns an empty slice if no include patterns are specified. pub fn get_include_patterns(&self) -> &[String] { match self { - ExtensionSource::Repo { include, .. } => { + ExtensionSource::Package { include, .. } => { include.as_ref().map(|v| v.as_slice()).unwrap_or(&[]) } ExtensionSource::Git { include, .. } => { @@ -194,8 +180,8 @@ impl ExtensionSource { /// Check if a config path matches any of the include patterns. /// /// Supports: - /// - Exact matches: "provision.tegraflash" matches "provision.tegraflash" - /// - Wildcard suffix: "provision.*" matches "provision.tegraflash", "provision.usb", etc. + /// - Exact matches: "provision_profiles.tegraflash" matches "provision_profiles.tegraflash" + /// - Wildcard suffix: "provision_profiles.*" matches "provision_profiles.tegraflash", "provision_profiles.usb", etc. /// /// Returns true if the path matches at least one include pattern. pub fn matches_include_pattern(config_path: &str, patterns: &[String]) -> bool { @@ -216,7 +202,6 @@ impl ExtensionSource { } false } - } /// Represents an extension dependency for a runtime with type information @@ -225,24 +210,13 @@ impl ExtensionSource { pub enum RuntimeExtDep { /// Extension defined in the config (local or fetched remote) Local(String), - /// DEPRECATED: Extension from an external config file - /// Use source: path in the ext section instead - #[deprecated(since = "0.23.0", note = "Use Local with source: path instead")] - External { name: String, config_path: String }, - /// DEPRECATED: Prebuilt extension from package repo - /// Use source: repo in the ext section instead - #[deprecated(since = "0.23.0", note = "Use Local with source: repo instead")] - Versioned { name: String, version: String }, } impl RuntimeExtDep { /// Get the extension name - #[allow(deprecated)] pub fn name(&self) -> &str { match self { RuntimeExtDep::Local(name) => name, - RuntimeExtDep::External { name, .. } => name, - RuntimeExtDep::Versioned { name, .. } => name, } } } @@ -252,7 +226,7 @@ impl RuntimeExtDep { /// This struct provides a unified view where: /// - `distro`, `default_target`, `supported_targets` come from the main config only /// - `ext` sections are merged from both main and external configs -/// - `sdk.dependencies` and `sdk.compile` are merged from both main and external configs +/// - `sdk.packages` and `sdk.compile` are merged from both main and external configs /// /// Interpolation is applied after merging, so external configs can reference /// `{{ config.distro.version }}` and resolve to the main config's values. @@ -395,7 +369,8 @@ pub struct RuntimeConfig { #[derive(Debug, Clone, Deserialize, Serialize, Default)] pub struct SdkConfig { pub image: Option, - pub dependencies: Option>, + #[serde(alias = "dependencies")] + pub packages: Option>, pub compile: Option>, pub repo_url: Option, pub repo_release: Option, @@ -412,7 +387,8 @@ pub struct SdkConfig { #[derive(Debug, Clone, Deserialize, Serialize)] pub struct CompileConfig { pub compile: Option, - pub dependencies: Option>, + #[serde(alias = "dependencies")] + pub packages: Option>, } /// Provision profile configuration @@ -478,9 +454,11 @@ pub struct Config { pub supported_targets: Option, pub src_dir: Option, pub distro: Option, - pub runtime: Option>, + #[serde(alias = "runtime")] + pub runtimes: Option>, pub sdk: Option, - pub provision: Option>, + #[serde(alias = "provision")] + pub provision_profiles: Option>, /// Signing keys mapping friendly names to key IDs /// Acts as a local bridge between the config and the global signing keys registry #[serde(default, deserialize_with = "signing_keys_deserializer::deserialize")] @@ -496,7 +474,7 @@ impl Config { /// - For named sections: [section_type.name] + [section_type.name.] /// /// # Arguments - /// * `section_path` - The base section path (e.g., "sdk", "runtime.prod", "ext.avocado-dev") + /// * `section_path` - The base section path (e.g., "sdk", "runtimes.prod", "extensions.avocado-dev") /// * `target` - The target architecture /// * `config_path` - Path to the configuration file for raw TOML access /// @@ -547,19 +525,10 @@ impl Config { } } - /// Parse a config file content into a YAML value (supports both YAML and TOML) + /// Parse a config file content into a YAML value fn parse_config_value(path: &str, content: &str) -> Result { - let is_yaml = path.ends_with(".yaml") || path.ends_with(".yml"); - - if is_yaml { - serde_yaml::from_str(content) - .with_context(|| format!("Failed to parse config file: {path}")) - } else { - // DEPRECATED: Parse TOML and convert to YAML value - let toml_val: toml::Value = toml::from_str(content) - .with_context(|| format!("Failed to parse config file: {path}"))?; - Self::toml_to_yaml(&toml_val) - } + serde_yaml::from_str(content) + .with_context(|| format!("Failed to parse config file: {path}")) } /// Parse config content and apply interpolation with the given target. @@ -588,7 +557,7 @@ impl Config { /// 2. Discovers installed remote extensions in avocado-extensions/ and merges their configs /// 3. Discovers all external config references in runtime and ext dependencies /// 4. Loads each external config (raw) - /// 5. Merges external `ext.*`, `sdk.dependencies`, and `sdk.compile` sections + /// 5. Merges external `extensions.*`, `sdk.packages`, and `sdk.compile` sections /// 6. Applies interpolation to the composed model /// /// The `distro`, `default_target`, and `supported_targets` sections come from the main config only, @@ -610,7 +579,7 @@ impl Config { let mut main_config = Self::parse_config_value(&config_path_str, &content)?; // Record extensions from the main config - if let Some(ext_section) = main_config.get("ext").and_then(|e| e.as_mapping()) { + if let Some(ext_section) = main_config.get("extensions").and_then(|e| e.as_mapping()) { for (ext_key, _) in ext_section { if let Some(ext_name) = ext_key.as_str() { extension_sources.insert(ext_name.to_string(), config_path_str.clone()); @@ -654,8 +623,8 @@ impl Config { // For external configs (deprecated `config: path` syntax), use permissive include patterns // to maintain backward compatibility - merge all sections let legacy_include_patterns = vec![ - "provision.*".to_string(), - "sdk.dependencies.*".to_string(), + "provision_profiles.*".to_string(), + "sdk.packages.*".to_string(), "sdk.compile.*".to_string(), ]; let auto_include_compile = @@ -675,8 +644,9 @@ impl Config { extension_sources.insert(ext_name.clone(), resolved_path_str.clone()); // Also record any extensions defined within this external config - if let Some(nested_ext_section) = - external_config.get("ext").and_then(|e| e.as_mapping()) + if let Some(nested_ext_section) = external_config + .get("extensions") + .and_then(|e| e.as_mapping()) { for (nested_ext_key, _) in nested_ext_section { if let Some(nested_ext_name) = nested_ext_key.as_str() { @@ -725,9 +695,9 @@ impl Config { supported_targets: None, src_dir: None, distro: None, - runtime: None, + runtimes: None, sdk: None, - provision: None, + provision_profiles: None, signing_keys: None, }); @@ -876,7 +846,9 @@ impl Config { if verbose { eprintln!("[DEBUG] Successfully parsed config for '{ext_name}'"); // Show what extensions are defined in this remote config - if let Some(ext_section) = cfg.get("ext").and_then(|e| e.as_mapping()) { + if let Some(ext_section) = + cfg.get("extensions").and_then(|e| e.as_mapping()) + { let ext_names: Vec<_> = ext_section.keys().filter_map(|k| k.as_str()).collect(); eprintln!("[DEBUG] Remote config defines extensions: {ext_names:?}"); @@ -915,7 +887,9 @@ impl Config { extension_sources.insert(ext_name.clone(), ext_config_path_str.clone()); // Also record any extensions defined within this remote extension's config - if let Some(nested_ext_section) = ext_config.get("ext").and_then(|e| e.as_mapping()) { + if let Some(nested_ext_section) = + ext_config.get("extensions").and_then(|e| e.as_mapping()) + { for (nested_ext_key, _) in nested_ext_section { if let Some(nested_ext_name) = nested_ext_key.as_str() { extension_sources @@ -948,7 +922,8 @@ impl Config { if verbose { // Show what the main config's ext section looks like after merge - if let Some(main_ext) = main_config.get("ext").and_then(|e| e.get(&ext_name)) { + if let Some(main_ext) = main_config.get("extensions").and_then(|e| e.get(&ext_name)) + { eprintln!( "[DEBUG] After merge, main config ext.{}:\n{}", ext_name, @@ -1033,7 +1008,7 @@ impl Config { let mut visited = std::collections::HashSet::new(); // Scan runtime dependencies - if let Some(runtime_section) = config.get("runtime").and_then(|r| r.as_mapping()) { + if let Some(runtime_section) = config.get("runtimes").and_then(|r| r.as_mapping()) { for (_runtime_name, runtime_config) in runtime_section { Self::collect_external_refs_from_dependencies( runtime_config, @@ -1047,7 +1022,7 @@ impl Config { // Skip known non-target keys if let Some(key_str) = key.as_str() { if ![ - "dependencies", + "packages", "target", "stone_include_paths", "stone_manifest", @@ -1069,7 +1044,7 @@ impl Config { } // Scan ext dependencies - if let Some(ext_section) = config.get("ext").and_then(|e| e.as_mapping()) { + if let Some(ext_section) = config.get("extensions").and_then(|e| e.as_mapping()) { for (_ext_name, ext_config) in ext_section { Self::collect_external_refs_from_dependencies(ext_config, &mut refs, &mut visited); @@ -1088,7 +1063,6 @@ impl Config { "vendor", "types", "packages", - "dependencies", "sdk", "enable_services", "on_merge", @@ -1125,14 +1099,14 @@ impl Config { refs: &mut Vec<(String, String)>, visited: &mut std::collections::HashSet, ) { - let dependencies = section.get("dependencies").and_then(|d| d.as_mapping()); + let dependencies = section.get("packages").and_then(|d| d.as_mapping()); if let Some(deps_map) = dependencies { for (_dep_name, dep_spec) in deps_map { if let Some(spec_map) = dep_spec.as_mapping() { // Check for external extension reference if let (Some(ext_name), Some(config_path)) = ( - spec_map.get("ext").and_then(|v| v.as_str()), + spec_map.get("extensions").and_then(|v| v.as_str()), spec_map.get("config").and_then(|v| v.as_str()), ) { let key = format!("{ext_name}:{config_path}"); @@ -1153,7 +1127,7 @@ impl Config { /// /// Conditionally merges (based on include_patterns): /// - `provision.` sections (if pattern matches) - /// - `sdk.dependencies.` (if pattern matches) + /// - `sdk.packages.` (if pattern matches) /// - `sdk.compile.
` (if pattern matches) /// /// Does NOT merge (main config only): @@ -1166,7 +1140,7 @@ impl Config { /// * `main_config` - The main config to merge into /// * `external_config` - The external config to merge from /// * `ext_name` - The name of the extension (its `ext.` is always merged) - /// * `include_patterns` - Patterns for additional sections to include (e.g., "provision.*") + /// * `include_patterns` - Patterns for additional sections to include (e.g., "provision_profiles.*") /// * `auto_include_compile` - List of sdk.compile section names to auto-include (from compile deps) fn merge_external_config( main_config: &mut serde_yaml::Value, @@ -1175,18 +1149,21 @@ impl Config { include_patterns: &[String], auto_include_compile: &[String], ) { - // Always merge the extension's own ext. section - if let Some(external_ext) = external_config.get("ext").and_then(|e| e.as_mapping()) { + // Always merge the extension's own extensions. section + if let Some(external_ext) = external_config + .get("extensions") + .and_then(|e| e.as_mapping()) + { let main_ext = main_config .as_mapping_mut() .and_then(|m| { - if !m.contains_key(serde_yaml::Value::String("ext".to_string())) { + if !m.contains_key(serde_yaml::Value::String("extensions".to_string())) { m.insert( - serde_yaml::Value::String("ext".to_string()), + serde_yaml::Value::String("extensions".to_string()), serde_yaml::Value::Mapping(serde_yaml::Mapping::new()), ); } - m.get_mut(serde_yaml::Value::String("ext".to_string())) + m.get_mut(serde_yaml::Value::String("extensions".to_string())) }) .and_then(|e| e.as_mapping_mut()); @@ -1246,16 +1223,16 @@ impl Config { // Merge provision sections based on include patterns if let Some(external_provision) = external_config - .get("provision") + .get("provision_profiles") .and_then(|p| p.as_mapping()) { for (profile_key, profile_value) in external_provision { if let Some(profile_name) = profile_key.as_str() { - let config_path = format!("provision.{profile_name}"); + let config_path = format!("provision_profiles.{profile_name}"); if ExtensionSource::matches_include_pattern(&config_path, include_patterns) { Self::ensure_provision_section(main_config); if let Some(main_provision) = main_config - .get_mut("provision") + .get_mut("provision_profiles") .and_then(|p| p.as_mapping_mut()) { // Only add if not already present (main takes precedence) @@ -1268,20 +1245,20 @@ impl Config { } } - // Merge sdk.dependencies based on include patterns + // Merge sdk.packages based on include patterns if let Some(external_sdk_deps) = external_config .get("sdk") - .and_then(|s| s.get("dependencies")) + .and_then(|s| s.get("packages")) .and_then(|d| d.as_mapping()) { for (dep_key, dep_value) in external_sdk_deps { if let Some(dep_name) = dep_key.as_str() { - let config_path = format!("sdk.dependencies.{dep_name}"); + let config_path = format!("sdk.packages.{dep_name}"); if ExtensionSource::matches_include_pattern(&config_path, include_patterns) { - Self::ensure_sdk_dependencies_section(main_config); + Self::ensure_sdk_packages_section(main_config); if let Some(main_sdk_deps) = main_config .get_mut("sdk") - .and_then(|s| s.get_mut("dependencies")) + .and_then(|s| s.get_mut("packages")) .and_then(|d| d.as_mapping_mut()) { // Only add if not already present (main takes precedence) @@ -1391,9 +1368,9 @@ impl Config { /// Ensure the provision section exists in the config. fn ensure_provision_section(config: &mut serde_yaml::Value) { if let Some(main_map) = config.as_mapping_mut() { - if !main_map.contains_key(serde_yaml::Value::String("provision".to_string())) { + if !main_map.contains_key(serde_yaml::Value::String("provision_profiles".to_string())) { main_map.insert( - serde_yaml::Value::String("provision".to_string()), + serde_yaml::Value::String("provision_profiles".to_string()), serde_yaml::Value::Mapping(serde_yaml::Mapping::new()), ); } @@ -1475,9 +1452,9 @@ impl Config { let mut compile_deps = Vec::new(); if let Some(ext_section) = ext_config - .get("ext") + .get("extensions") .and_then(|e| e.get(ext_name)) - .and_then(|e| e.get("dependencies")) + .and_then(|e| e.get("packages")) .and_then(|d| d.as_mapping()) { for (_dep_name, dep_spec) in ext_section { @@ -1490,8 +1467,8 @@ impl Config { compile_deps } - /// Ensure the sdk.dependencies section exists in the config. - fn ensure_sdk_dependencies_section(config: &mut serde_yaml::Value) { + /// Ensure the sdk.packages section exists in the config. + fn ensure_sdk_packages_section(config: &mut serde_yaml::Value) { if let Some(main_map) = config.as_mapping_mut() { // Ensure sdk section exists if !main_map.contains_key(serde_yaml::Value::String("sdk".to_string())) { @@ -1501,13 +1478,12 @@ impl Config { ); } - // Ensure sdk.dependencies section exists + // Ensure sdk.packages section exists if let Some(sdk) = main_map.get_mut(serde_yaml::Value::String("sdk".to_string())) { if let Some(sdk_map) = sdk.as_mapping_mut() { - if !sdk_map.contains_key(serde_yaml::Value::String("dependencies".to_string())) - { + if !sdk_map.contains_key(serde_yaml::Value::String("packages".to_string())) { sdk_map.insert( - serde_yaml::Value::String("dependencies".to_string()), + serde_yaml::Value::String("packages".to_string()), serde_yaml::Value::Mapping(serde_yaml::Mapping::new()), ); } @@ -1626,7 +1602,7 @@ impl Config { target: &str, config_path: &str, ) -> Result> { - let section_path = format!("runtime.{runtime_name}"); + let section_path = format!("runtimes.{runtime_name}"); self.get_merged_section(§ion_path, target, config_path) } @@ -1638,7 +1614,7 @@ impl Config { target: &str, config_path: &str, ) -> Result> { - let section_path = format!("provision.{profile_name}"); + let section_path = format!("provision_profiles.{profile_name}"); self.get_merged_section(§ion_path, target, config_path) } @@ -1649,7 +1625,7 @@ impl Config { target: &str, config_path: &str, ) -> Result> { - let section_path = format!("ext.{ext_name}"); + let section_path = format!("extensions.{ext_name}"); self.get_merged_section(§ion_path, target, config_path) } @@ -1666,7 +1642,7 @@ impl Config { /// extensions: /// - avocado-ext-dev /// - avocado-ext-sshd-dev - /// dependencies: + /// packages: /// avocado-runtime: '0.1.0' /// ``` pub fn get_runtime_extension_dependencies_detailed( @@ -1701,9 +1677,9 @@ impl Config { Ok(ext_deps) } - /// Get merged section for nested paths (e.g., "ext.name.dependencies", "runtime.name.dependencies") + /// Get merged section for nested paths (e.g., "extensions.name.packages", "runtimes.name.packages") /// For target-specific overrides, the target is inserted between base_path and nested_path: - /// Base: [ext.name.dependencies] + Target: [ext.name..dependencies] + /// Base: [extensions.name.packages] + Target: [extensions.name..packages] #[allow(dead_code)] // Future API for command integration pub fn get_merged_nested_section( &self, @@ -1745,91 +1721,14 @@ impl Config { let path = config_path.as_ref(); if !path.exists() { - // If a YAML file is requested but doesn't exist, check for a TOML version - let is_yaml_request = path - .extension() - .and_then(|e| e.to_str()) - .map(|e| e == "yaml" || e == "yml") - .unwrap_or(false); - - if is_yaml_request { - // Try to find a corresponding TOML file - let toml_path = path.with_extension("toml"); - if toml_path.exists() { - println!( - "⚠ Found legacy TOML config file: {}. Migrating to YAML format...", - toml_path.display() - ); - - // Migrate TOML to YAML - let migrated_path = Self::migrate_toml_to_yaml(&toml_path)?; - - // Load the migrated YAML file - let content = fs::read_to_string(&migrated_path).with_context(|| { - format!( - "Failed to read migrated config file: {}", - migrated_path.display() - ) - })?; - - return Self::load_from_yaml_str(&content).with_context(|| { - format!( - "Failed to parse migrated YAML config file: {}", - migrated_path.display() - ) - }); - } - } - return Err(ConfigError::FileNotFound(path.display().to_string()).into()); } let content = fs::read_to_string(path) .with_context(|| format!("Failed to read config file: {}", path.display()))?; - // Determine format based on file extension - let is_yaml = path - .extension() - .and_then(|e| e.to_str()) - .map(|e| e == "yaml" || e == "yml") - .unwrap_or(false); - - if is_yaml { - Self::load_from_yaml_str(&content) - .with_context(|| format!("Failed to parse YAML config file: {}", path.display())) - } else { - // TOML file detected - migrate to YAML - println!( - "⚠ Found legacy TOML config file: {}. Migrating to YAML...", - path.display() - ); - - // Parse TOML, convert to YAML, and save - #[allow(deprecated)] - let config = Self::load_from_toml_str(&content) - .with_context(|| format!("Failed to parse TOML config file: {}", path.display()))?; - - // Convert to YAML value for saving - let toml_val: toml::Value = - toml::from_str(&content).with_context(|| "Failed to parse TOML for conversion")?; - let yaml_val = Self::toml_to_yaml(&toml_val)?; - - // Save as YAML in the same directory - let yaml_path = path.with_extension("yaml"); - if !yaml_path.exists() { - let yaml_content = serde_yaml::to_string(&yaml_val)?; - fs::write(&yaml_path, yaml_content).with_context(|| { - format!( - "Failed to write migrated YAML config to {}", - yaml_path.display() - ) - })?; - println!("✓ Migrated to {}", yaml_path.display()); - println!(" Note: The old TOML file has been preserved. You can remove it after verifying the migration."); - } - - Ok(config) - } + Self::load_from_yaml_str(&content) + .with_context(|| format!("Failed to parse YAML config file: {}", path.display())) } /// Load configuration from a YAML string @@ -1849,85 +1748,12 @@ impl Config { Ok(config) } - /// Load configuration from a string (auto-detects YAML or TOML format) - /// Used primarily in tests for flexible parsing + /// Load configuration from a YAML string + /// Used primarily in tests #[allow(dead_code)] pub fn load_from_str(content: &str) -> Result { - // Try YAML first (preferred format) - if let Ok(config) = serde_yaml::from_str::(content) { - return Ok(config); - } - - // Fall back to TOML for test compatibility - #[allow(deprecated)] - { - Self::load_from_toml_str(content) - } - } - - // ============================================================================= - // DEPRECATED: TOML Support Functions (Pre-1.0.0) - // ============================================================================= - // The following functions support legacy TOML configuration files. - // These will be removed before the 1.0.0 release. - // ============================================================================= - - /// DEPRECATED: Load configuration from a TOML string - #[allow(dead_code)] // Kept for backward compatibility until 1.0.0 - #[deprecated( - note = "TOML format is deprecated. Use YAML format instead. Will be removed before 1.0.0" - )] - pub fn load_from_toml_str(content: &str) -> Result { - let config: Config = - toml::from_str(content).with_context(|| "Failed to parse TOML configuration")?; - - Ok(config) - } - - /// Convert TOML value to YAML value - fn toml_to_yaml(toml_val: &toml::Value) -> Result { - let json_str = serde_json::to_string(toml_val)?; - let yaml_val = serde_json::from_str(&json_str)?; - Ok(yaml_val) - } - - /// Migrate a TOML config file to YAML format - /// Reads an avocado.toml file, converts it to YAML, and saves as avocado.yaml - #[allow(dead_code)] // Public API for manual migration, kept until 1.0.0 - pub fn migrate_toml_to_yaml>(toml_path: P) -> Result { - let toml_path = toml_path.as_ref(); - - // Read the TOML file - let toml_content = fs::read_to_string(toml_path) - .with_context(|| format!("Failed to read TOML config file: {}", toml_path.display()))?; - - // Parse as TOML - let toml_val: toml::Value = - toml::from_str(&toml_content).with_context(|| "Failed to parse TOML configuration")?; - - // Convert to YAML - let yaml_val = Self::toml_to_yaml(&toml_val)?; - - // Serialize to YAML string - let yaml_content = - serde_yaml::to_string(&yaml_val).with_context(|| "Failed to serialize to YAML")?; - - // Determine output path - let yaml_path = toml_path.with_file_name("avocado.yaml"); - - // Write YAML file - fs::write(&yaml_path, yaml_content).with_context(|| { - format!("Failed to write YAML config file: {}", yaml_path.display()) - })?; - - println!( - "✓ Migrated {} to {}", - toml_path.display(), - yaml_path.display() - ); - println!(" Note: The old TOML file has been preserved. You can remove it after verifying the migration."); - - Ok(yaml_path) + serde_yaml::from_str::(content) + .with_context(|| "Failed to parse YAML configuration") } /// Get the SDK image from configuration @@ -1937,7 +1763,7 @@ impl Config { /// Get SDK dependencies pub fn get_sdk_dependencies(&self) -> Option<&HashMap> { - self.sdk.as_ref()?.dependencies.as_ref() + self.sdk.as_ref()?.packages.as_ref() } /// Get SDK dependencies with target interpolation. @@ -1971,7 +1797,7 @@ impl Config { // Extract SDK dependencies from the interpolated config let sdk_deps = parsed .get("sdk") - .and_then(|sdk| sdk.get("dependencies")) + .and_then(|sdk| sdk.get("packages")) .and_then(|deps| deps.as_mapping()) .map(|mapping| { mapping @@ -2085,7 +1911,7 @@ impl Config { /// None if the runtime doesn't exist or has no signing section. #[allow(dead_code)] // Public API for future use pub fn get_runtime_signing_key_name(&self, runtime_name: &str) -> Option { - let runtime_config = self.runtime.as_ref()?.get(runtime_name)?; + let runtime_config = self.runtimes.as_ref()?.get(runtime_name)?; Some(runtime_config.signing.as_ref()?.key.clone()) } @@ -2098,7 +1924,7 @@ impl Config { /// Returns the resolved key ID. #[allow(dead_code)] // Public API for future use pub fn get_runtime_signing_key(&self, runtime_name: &str) -> Option { - let runtime_config = self.runtime.as_ref()?.get(runtime_name)?; + let runtime_config = self.runtimes.as_ref()?.get(runtime_name)?; let signing_key_name = &runtime_config.signing.as_ref()?.key; // First, check the local signing_keys mapping @@ -2121,7 +1947,7 @@ impl Config { /// Get provision profile configuration pub fn get_provision_profile(&self, profile_name: &str) -> Option<&ProvisionProfileConfig> { - self.provision.as_ref()?.get(profile_name) + self.provision_profiles.as_ref()?.get(profile_name) } /// Get container args from provision profile @@ -2312,7 +2138,7 @@ impl Config { let mut external_extensions = HashMap::new(); // Find all ext.* sections in the external config - if let Some(ext_section) = parsed.get("ext").and_then(|e| e.as_mapping()) { + if let Some(ext_section) = parsed.get("extensions").and_then(|e| e.as_mapping()) { for (ext_name_key, ext_config) in ext_section { if let Some(ext_name) = ext_name_key.as_str() { external_extensions.insert(ext_name.to_string(), ext_config.clone()); @@ -2376,7 +2202,7 @@ impl Config { let mut remote_extensions = Vec::new(); - if let Some(ext_section) = parsed.get("ext").and_then(|e| e.as_mapping()) { + if let Some(ext_section) = parsed.get("extensions").and_then(|e| e.as_mapping()) { for (ext_name_key, ext_config) in ext_section { if let Some(raw_ext_name) = ext_name_key.as_str() { // Interpolate extension name if target is provided @@ -2491,11 +2317,10 @@ impl Config { target: &str, ) -> bool { let install_path = self.get_extension_install_path(config_path, ext_name, target); - // Check if the directory exists and contains an avocado.yaml or avocado.toml + // Check if the directory exists and contains an avocado.yaml install_path.exists() && (install_path.join("avocado.yaml").exists() - || install_path.join("avocado.yml").exists() - || install_path.join("avocado.toml").exists()) + || install_path.join("avocado.yml").exists()) } /// Find an extension in the full dependency tree (local and external) @@ -2514,7 +2339,7 @@ impl Config { // First check if it's defined in the ext section // Need to iterate and interpolate keys since they may contain templates like {{ avocado.target }} - if let Some(ext_section) = parsed.get("ext") { + if let Some(ext_section) = parsed.get("extensions") { if let Some(ext_map) = ext_section.as_mapping() { for (ext_key, ext_config) in ext_map { if let Some(raw_name) = ext_key.as_str() { @@ -2542,7 +2367,7 @@ impl Config { } // If not found in ext section, search through runtime extensions array - let runtime_section = parsed.get("runtime").and_then(|r| r.as_mapping()); + let runtime_section = parsed.get("runtimes").and_then(|r| r.as_mapping()); if let Some(runtime_section) = runtime_section { for (runtime_name_key, _) in runtime_section { @@ -2559,7 +2384,7 @@ impl Config { if let Some(ext_name) = ext.as_str() { if ext_name == extension_name { // Found in extensions array - now find its definition in ext section - if let Some(ext_section) = parsed.get("ext") { + if let Some(ext_section) = parsed.get("extensions") { if let Some(ext_map) = ext_section.as_mapping() { for (ext_key, ext_config) in ext_map { if let Some(raw_name) = ext_key.as_str() { @@ -2605,7 +2430,6 @@ impl Config { Ok(None) } - /// Expand environment variables in a string pub fn expand_env_vars(input: &str) -> String { let mut result = input.to_string(); @@ -2811,7 +2635,7 @@ impl Config { if let Some(sdk) = &self.sdk { if let Some(compile) = &sdk.compile { for (section_name, compile_config) in compile { - if let Some(dependencies) = &compile_config.dependencies { + if let Some(dependencies) = &compile_config.packages { compile_deps.insert(section_name.clone(), dependencies); } } @@ -2882,7 +2706,7 @@ impl Config { let mut visited = std::collections::HashSet::new(); // Process local extensions in the current config - if let Some(ext_section) = parsed.get("ext") { + if let Some(ext_section) = parsed.get("extensions") { if let Some(ext_table) = ext_section.as_mapping() { for (ext_name_val, ext_config) in ext_table { if let Some(ext_name) = ext_name_val.as_str() { @@ -2890,10 +2714,10 @@ impl Config { // Extract SDK dependencies for this extension (base and target-specific) let mut merged_deps = HashMap::new(); - // First, collect base SDK dependencies from [ext..sdk.dependencies] + // First, collect base SDK dependencies from [extensions..sdk.packages] if let Some(sdk_section) = ext_config_table.get("sdk") { if let Some(sdk_table) = sdk_section.as_mapping() { - if let Some(dependencies) = sdk_table.get("dependencies") { + if let Some(dependencies) = sdk_table.get("packages") { if let Some(deps_table) = dependencies.as_mapping() { for (k, v) in deps_table.iter() { if let Some(key_str) = k.as_str() { @@ -2906,14 +2730,14 @@ impl Config { } } - // Then, if we have a target, collect target-specific dependencies from [ext...sdk.dependencies] + // Then, if we have a target, collect target-specific dependencies from [extensions...sdk.packages] if let Some(target) = target { if let Some(target_section) = ext_config_table.get(target) { if let Some(target_table) = target_section.as_mapping() { if let Some(sdk_section) = target_table.get("sdk") { if let Some(sdk_table) = sdk_section.as_mapping() { if let Some(dependencies) = - sdk_table.get("dependencies") + sdk_table.get("packages") { if let Some(deps_table) = dependencies.as_mapping() @@ -2942,7 +2766,7 @@ impl Config { // If we have a config path, traverse external extension dependencies if let Some(config_path) = config_path { - if let Some(dependencies) = ext_config_table.get("dependencies") { + if let Some(dependencies) = ext_config_table.get("packages") { if let Some(deps_table) = dependencies.as_mapping() { self.collect_external_extension_sdk_dependencies_with_target( config_path, @@ -2962,12 +2786,12 @@ impl Config { // Also process extensions referenced in runtime dependencies if let Some(config_path) = config_path { - if let Some(runtime_section) = parsed.get("runtime") { + if let Some(runtime_section) = parsed.get("runtimes") { if let Some(runtime_table) = runtime_section.as_mapping() { for (_runtime_name, runtime_config) in runtime_table { if let Some(runtime_config_table) = runtime_config.as_mapping() { // Check base runtime dependencies - if let Some(dependencies) = runtime_config_table.get("dependencies") { + if let Some(dependencies) = runtime_config_table.get("packages") { if let Some(deps_table) = dependencies.as_mapping() { self.collect_external_extension_sdk_dependencies_with_target( config_path, @@ -2983,8 +2807,7 @@ impl Config { if let Some(target) = target { if let Some(target_section) = runtime_config_table.get(target) { if let Some(target_table) = target_section.as_mapping() { - if let Some(dependencies) = target_table.get("dependencies") - { + if let Some(dependencies) = target_table.get("packages") { if let Some(deps_table) = dependencies.as_mapping() { self.collect_external_extension_sdk_dependencies_with_target( config_path, @@ -3019,7 +2842,7 @@ impl Config { for (_dep_name, dep_spec) in dependencies { if let Some(dep_spec_table) = dep_spec.as_mapping() { // Check for external extension dependency - if let Some(ext_name) = dep_spec_table.get("ext").and_then(|v| v.as_str()) { + if let Some(ext_name) = dep_spec_table.get("extensions").and_then(|v| v.as_str()) { if let Some(external_config) = dep_spec_table.get("config").and_then(|v| v.as_str()) { @@ -3053,7 +2876,9 @@ impl Config { ) { // Only process the specific extension that's being referenced - if let Some(ext_section) = external_parsed.get("ext") { + if let Some(ext_section) = + external_parsed.get("extensions") + { if let Some(ext_table) = ext_section.as_mapping() { if let Some(external_ext_config) = ext_table.get(ext_name) @@ -3064,7 +2889,7 @@ impl Config { // Extract SDK dependencies for this specific external extension (base and target-specific) let mut merged_deps = HashMap::new(); - // First, collect base SDK dependencies from [ext..sdk.dependencies] + // First, collect base SDK dependencies from [extensions..sdk.packages] if let Some(sdk_section) = external_ext_config_table.get("sdk") { @@ -3072,8 +2897,7 @@ impl Config { sdk_section.as_mapping() { if let Some(dependencies) = - sdk_table - .get("dependencies") + sdk_table.get("packages") { if let Some(deps_table) = dependencies @@ -3097,7 +2921,7 @@ impl Config { } } - // Then, if we have a target, collect target-specific dependencies from [ext...sdk.dependencies] + // Then, if we have a target, collect target-specific dependencies from [extensions...sdk.packages] if let Some(target) = target { if let Some(target_section) = external_ext_config_table @@ -3116,9 +2940,8 @@ impl Config { if let Some( dependencies, ) = sdk_table - .get( - "dependencies", - ) { + .get("packages") + { if let Some(deps_table) = dependencies.as_mapping() { // Target-specific dependencies override base dependencies for (k, v) in deps_table.iter() { @@ -3145,7 +2968,7 @@ impl Config { // Recursively process dependencies of this specific external extension if let Some(nested_dependencies) = external_ext_config_table - .get("dependencies") + .get("packages") { if let Some(nested_deps_table) = nested_dependencies.as_mapping() @@ -3186,7 +3009,7 @@ impl Config { /// Get target from configuration /// Returns the target if there's exactly one runtime configuration pub fn get_target(&self) -> Option { - let runtime = self.runtime.as_ref()?; + let runtime = self.runtimes.as_ref()?; // Find all runtime configurations (nested dictionaries) let runtime_configs: Vec<&RuntimeConfig> = runtime.values().collect(); @@ -3282,7 +3105,7 @@ impl Config { /// Get merged SDK dependencies for a specific target. /// - /// This merges [sdk.dependencies] with [sdk..dependencies], + /// This merges [sdk.packages] with [sdk..packages], /// where target-specific dependencies override base dependencies. /// /// # Arguments @@ -3306,7 +3129,7 @@ impl Config { // First, add base SDK dependencies if let Some(sdk_section) = parsed.get("sdk") { - if let Some(deps) = sdk_section.get("dependencies") { + if let Some(deps) = sdk_section.get("packages") { if let Some(deps_table) = deps.as_mapping() { for (key, value) in deps_table { if let Some(key_str) = key.as_str() { @@ -3318,7 +3141,7 @@ impl Config { // Then, add/override with target-specific dependencies if let Some(target_section) = sdk_section.get(target) { - if let Some(target_deps) = target_section.get("dependencies") { + if let Some(target_deps) = target_section.get("packages") { if let Some(target_deps_table) = target_deps.as_mapping() { for (key, value) in target_deps_table { if let Some(key_str) = key.as_str() { @@ -3379,18 +3202,18 @@ fn merge_sdk_configs(mut base: SdkConfig, target: SdkConfig) -> SdkConfig { base.host_gid = target.host_gid; } - // For dependencies and compile, merge the HashMaps - if let Some(target_deps) = target.dependencies { - match base.dependencies { + // For packages and compile, merge the HashMaps + if let Some(target_deps) = target.packages { + match base.packages { Some(ref mut base_deps) => { - // Merge target dependencies into base dependencies + // Merge target packages into base packages for (key, value) in target_deps { base_deps.insert(key, value); } } None => { - // No base dependencies, use target dependencies - base.dependencies = Some(target_deps); + // No base packages, use target packages + base.packages = Some(target_deps); } } } @@ -3479,20 +3302,20 @@ mod tests { #[test] fn test_load_valid_config() { let config_content = r#" -[runtime.default] -target = "qemux86-64" - -[runtime.default.dependencies] -nativesdk-avocado-images = "*" - -[sdk] -image = "docker.io/avocadolinux/sdk:apollo-edge" - -[sdk.dependencies] -cmake = "*" +runtimes: + default: + target: qemux86-64 + packages: + nativesdk-avocado-images: "*" -[sdk.compile.app] -dependencies = { gcc = "*" } +sdk: + image: docker.io/avocadolinux/sdk:apollo-edge + packages: + cmake: "*" + compile: + app: + packages: + gcc: "*" "#; let mut temp_file = NamedTempFile::new().unwrap(); @@ -3518,10 +3341,10 @@ dependencies = { gcc = "*" } #[test] fn test_src_dir_absolute_path() { let config_content = r#" -src_dir = "/absolute/path/to/source" +src_dir: "/absolute/path/to/source" -[sdk] -image = "docker.io/avocadolinux/sdk:apollo-edge" +sdk: + image: "docker.io/avocadolinux/sdk:apollo-edge" "#; let mut temp_file = NamedTempFile::new().unwrap(); @@ -3540,10 +3363,10 @@ image = "docker.io/avocadolinux/sdk:apollo-edge" #[test] fn test_src_dir_relative_path() { let config_content = r#" -src_dir = "../../" +src_dir: "../../" -[sdk] -image = "docker.io/avocadolinux/sdk:apollo-edge" +sdk: + image: "docker.io/avocadolinux/sdk:apollo-edge" "#; let mut temp_file = NamedTempFile::new().unwrap(); @@ -3568,8 +3391,8 @@ image = "docker.io/avocadolinux/sdk:apollo-edge" #[test] fn test_src_dir_not_configured() { let config_content = r#" -[sdk] -image = "docker.io/avocadolinux/sdk:apollo-edge" +sdk: + image: "docker.io/avocadolinux/sdk:apollo-edge" "#; let mut temp_file = NamedTempFile::new().unwrap(); @@ -3587,13 +3410,13 @@ image = "docker.io/avocadolinux/sdk:apollo-edge" sdk: image: "docker.io/avocadolinux/sdk:apollo-edge" -ext: +extensions: avocado-dev: types: - sysext - confext sdk: - dependencies: + packages: nativesdk-avocado-hitl: "*" nativesdk-something-else: "1.2.3" @@ -3601,7 +3424,7 @@ ext: types: - sysext sdk: - dependencies: + packages: nativesdk-tool: "*" "#; @@ -3645,18 +3468,18 @@ ext: sdk: image: "docker.io/avocadolinux/sdk:apollo-edge" -ext: +extensions: avocado-dev: types: - sysext - confext sdk: - dependencies: + packages: nativesdk-avocado-hitl: "*" nativesdk-base-tool: "1.0.0" qemux86-64: sdk: - dependencies: + packages: nativesdk-avocado-hitl: "2.0.0" nativesdk-target-specific: "*" @@ -3664,11 +3487,11 @@ ext: types: - sysext sdk: - dependencies: + packages: nativesdk-tool: "*" qemuarm64: sdk: - dependencies: + packages: nativesdk-arm-tool: "*" "#; @@ -3787,19 +3610,19 @@ ext: sdk: image: "docker.io/avocadolinux/sdk:apollo-edge" -runtime: +runtimes: dev: - dependencies: + packages: avocado-ext-dev: ext: avocado-ext-dev config: "extensions/dev/avocado.yaml" raspberrypi4: - dependencies: + packages: avocado-bsp-raspberrypi4: ext: avocado-bsp-raspberrypi4 config: "bsp/raspberrypi4/avocado.yaml" -ext: +extensions: config: types: - confext @@ -3842,9 +3665,9 @@ ext: #[test] fn test_sdk_container_args() { let config_content = r#" -[sdk] -image = "docker.io/avocadolinux/sdk:apollo-edge" -container_args = ["--network=$USER-avocado", "--privileged"] +sdk: + image: "docker.io/avocadolinux/sdk:apollo-edge" + container_args: ["--network=$USER-avocado", "--privileged"] "#; let config = Config::load_from_str(config_content).unwrap(); @@ -3861,11 +3684,12 @@ container_args = ["--network=$USER-avocado", "--privileged"] #[test] fn test_default_target_field() { let config_content = r#" -default_target = "qemux86-64" +default_target: "qemux86-64" -[runtime.dev] -target = "qemux86-64" -image = "avocadolinux/runtime:apollo-edge" +runtimes: + dev: + target: "qemux86-64" + image: "avocadolinux/runtime:apollo-edge" "#; let config = Config::load_from_str(config_content).unwrap(); @@ -3878,9 +3702,10 @@ image = "avocadolinux/runtime:apollo-edge" #[test] fn test_no_default_target_field() { let config_content = r#" -[runtime.dev] -target = "qemux86-64" -image = "avocadolinux/runtime:apollo-edge" +runtimes: + dev: + target: "qemux86-64" + image: "avocadolinux/runtime:apollo-edge" "#; let config = Config::load_from_str(config_content).unwrap(); @@ -3893,11 +3718,12 @@ image = "avocadolinux/runtime:apollo-edge" #[test] fn test_empty_default_target_field() { let config_content = r#" -default_target = "" +default_target: "" -[runtime.dev] -target = "qemux86-64" -image = "avocadolinux/runtime:apollo-edge" +runtimes: + dev: + target: "qemux86-64" + image: "avocadolinux/runtime:apollo-edge" "#; let config = Config::load_from_str(config_content).unwrap(); @@ -3910,9 +3736,9 @@ image = "avocadolinux/runtime:apollo-edge" #[test] fn test_merge_sdk_container_args() { let config_content = r#" -[sdk] -image = "docker.io/avocadolinux/sdk:apollo-edge" -container_args = ["--network=host", "--privileged"] +sdk: + image: "docker.io/avocadolinux/sdk:apollo-edge" + container_args: ["--network=host", "--privileged"] "#; let config = Config::load_from_str(config_content).unwrap(); @@ -3933,9 +3759,9 @@ container_args = ["--network=host", "--privileged"] #[test] fn test_merge_sdk_container_args_config_only() { let config_content = r#" -[sdk] -image = "docker.io/avocadolinux/sdk:apollo-edge" -container_args = ["--network=host"] +sdk: + image: "docker.io/avocadolinux/sdk:apollo-edge" + container_args: ["--network=host"] "#; let config = Config::load_from_str(config_content).unwrap(); @@ -3952,8 +3778,8 @@ container_args = ["--network=host"] #[test] fn test_merge_sdk_container_args_cli_only() { let config_content = r#" -[sdk] -image = "docker.io/avocadolinux/sdk:apollo-edge" +sdk: + image: "docker.io/avocadolinux/sdk:apollo-edge" "#; let config = Config::load_from_str(config_content).unwrap(); @@ -3971,8 +3797,8 @@ image = "docker.io/avocadolinux/sdk:apollo-edge" #[test] fn test_merge_sdk_container_args_none() { let config_content = r#" -[sdk] -image = "docker.io/avocadolinux/sdk:apollo-edge" +sdk: + image: "docker.io/avocadolinux/sdk:apollo-edge" "#; let config = Config::load_from_str(config_content).unwrap(); @@ -3987,9 +3813,9 @@ image = "docker.io/avocadolinux/sdk:apollo-edge" fn test_get_sdk_repo_url_env_override() { // Test environment variable override for SDK repo URL let config_content = r#" -[sdk] -image = "docker.io/avocadolinux/sdk:apollo-edge" -repo_url = "https://config.example.com" +sdk: + image: "docker.io/avocadolinux/sdk:apollo-edge" + repo_url: "https://config.example.com" "#; let config = Config::load_from_str(config_content).unwrap(); @@ -4012,9 +3838,9 @@ repo_url = "https://config.example.com" fn test_get_sdk_repo_release_env_override() { // Test environment variable override for SDK repo release let config_content = r#" -[sdk] -image = "docker.io/avocadolinux/sdk:apollo-edge" -repo_release = "config-release" +sdk: + image: "docker.io/avocadolinux/sdk:apollo-edge" + repo_release: "config-release" "#; let config = Config::load_from_str(config_content).unwrap(); @@ -4037,8 +3863,8 @@ repo_release = "config-release" fn test_get_sdk_repo_url_env_only() { // Test environment variable when no config value exists let config_content = r#" -[sdk] -image = "docker.io/avocadolinux/sdk:apollo-edge" +sdk: + image: "docker.io/avocadolinux/sdk:apollo-edge" "#; let config = Config::load_from_str(config_content).unwrap(); @@ -4094,9 +3920,9 @@ image = "docker.io/avocadolinux/sdk:apollo-edge" std::env::set_var("TEST_USER", "myuser"); let config_content = r#" -[sdk] -image = "docker.io/avocadolinux/sdk:apollo-edge" -container_args = ["--network=$TEST_USER-avocado", "--privileged"] +sdk: + image: "docker.io/avocadolinux/sdk:apollo-edge" + container_args: ["--network=$TEST_USER-avocado", "--privileged"] "#; let config = Config::load_from_str(config_content).unwrap(); @@ -4147,11 +3973,11 @@ container_args = ["--network=$TEST_USER-avocado", "--privileged"] #[test] fn test_provision_profile_config() { let config_content = r#" -[provision.usb] -container_args = ["-v", "/dev/usb:/dev/usb", "-v", "/sys:/sys:ro"] - -[provision.development] -container_args = ["--privileged", "--network=host"] +provision_profiles: + usb: + container_args: ["-v", "/dev/usb:/dev/usb", "-v", "/sys:/sys:ro"] + development: + container_args: ["--privileged", "--network=host"] "#; let config = Config::load_from_str(config_content).unwrap(); @@ -4178,8 +4004,9 @@ container_args = ["--privileged", "--network=host"] #[test] fn test_merge_provision_container_args() { let config_content = r#" -[provision.usb] -container_args = ["-v", "/dev/usb:/dev/usb", "--privileged"] +provision_profiles: + usb: + container_args: ["-v", "/dev/usb:/dev/usb", "--privileged"] "#; let config = Config::load_from_str(config_content).unwrap(); @@ -4201,8 +4028,9 @@ container_args = ["-v", "/dev/usb:/dev/usb", "--privileged"] #[test] fn test_merge_provision_container_args_profile_only() { let config_content = r#" -[provision.test] -container_args = ["--network=host"] +provision_profiles: + test: + container_args: ["--network=host"] "#; let config = Config::load_from_str(config_content).unwrap(); @@ -4219,8 +4047,8 @@ container_args = ["--network=host"] #[test] fn test_merge_provision_container_args_cli_only() { let config_content = r#" -[sdk] -image = "docker.io/avocadolinux/sdk:apollo-edge" +sdk: + image: "docker.io/avocadolinux/sdk:apollo-edge" "#; let config = Config::load_from_str(config_content).unwrap(); @@ -4238,8 +4066,8 @@ image = "docker.io/avocadolinux/sdk:apollo-edge" #[test] fn test_merge_provision_container_args_none() { let config_content = r#" -[sdk] -image = "docker.io/avocadolinux/sdk:apollo-edge" +sdk: + image: "docker.io/avocadolinux/sdk:apollo-edge" "#; let config = Config::load_from_str(config_content).unwrap(); @@ -4254,12 +4082,13 @@ image = "docker.io/avocadolinux/sdk:apollo-edge" fn test_merge_provision_container_args_with_sdk_defaults() { // Test that SDK container_args are included as base defaults let config_content = r#" -[sdk] -image = "docker.io/avocadolinux/sdk:apollo-edge" -container_args = ["--privileged", "--network=host"] +sdk: + image: "docker.io/avocadolinux/sdk:apollo-edge" + container_args: ["--privileged", "--network=host"] -[provision.usb] -container_args = ["-v", "/dev:/dev"] +provision_profiles: + usb: + container_args: ["-v", "/dev:/dev"] "#; let config = Config::load_from_str(config_content).unwrap(); @@ -4283,9 +4112,9 @@ container_args = ["-v", "/dev:/dev"] fn test_merge_provision_container_args_sdk_defaults_only() { // Test that SDK container_args are used when no provision profile or CLI args let config_content = r#" -[sdk] -image = "docker.io/avocadolinux/sdk:apollo-edge" -container_args = ["--privileged", "-v", "/dev:/dev"] +sdk: + image: "docker.io/avocadolinux/sdk:apollo-edge" + container_args: ["--privileged", "-v", "/dev:/dev"] "#; let config = Config::load_from_str(config_content).unwrap(); @@ -4304,12 +4133,13 @@ container_args = ["--privileged", "-v", "/dev:/dev"] fn test_merge_provision_container_args_deduplication() { // Test that duplicate args are removed (keeping the last occurrence) let config_content = r#" -[sdk] -image = "docker.io/avocadolinux/sdk:apollo-edge" -container_args = ["--privileged", "--network=host", "-v", "/dev:/dev"] +sdk: + image: "docker.io/avocadolinux/sdk:apollo-edge" + container_args: ["--privileged", "--network=host", "-v", "/dev:/dev"] -[provision.tegraflash] -container_args = ["--privileged", "--network=host", "-v", "/dev:/dev", "-v", "/sys:/sys"] +provision_profiles: + tegraflash: + container_args: ["--privileged", "--network=host", "-v", "/dev:/dev", "-v", "/sys:/sys"] "#; let config = Config::load_from_str(config_content).unwrap(); @@ -4342,7 +4172,7 @@ container_args = ["--privileged", "--network=host", "-v", "/dev:/dev", "-v", "/s fn test_provision_state_file_default() { // Test that state_file defaults to .avocado/provision-{profile}.state when not configured let config_content = r#" -provision: +provision_profiles: usb: container_args: - --privileged @@ -4363,7 +4193,7 @@ provision: fn test_provision_state_file_custom() { // Test that custom state_file is used when configured let config_content = r#" -provision: +provision_profiles: production: container_args: - --privileged @@ -4387,23 +4217,20 @@ provision: fn test_merged_sdk_config() { // Create a temporary config file for testing merging let config_content = r#" -default_target = "qemux86-64" -supported_targets = ["qemux86-64"] - -[sdk] -image = "base-image" -repo_url = "http://base-repo" -repo_release = "base-release" - -[sdk.dependencies] -base-package = "*" - -[sdk.qemux86-64] -image = "target-specific-image" -repo_url = "http://target-repo" +default_target: "qemux86-64" +supported_targets: ["qemux86-64"] -[sdk.qemux86-64.dependencies] -target-package = "*" +sdk: + image: "base-image" + repo_url: "http://base-repo" + repo_release: "base-release" + packages: + base-package: "*" + qemux86-64: + image: "target-specific-image" + repo_url: "http://target-repo" + packages: + target-package: "*" "#; let temp_file = tempfile::NamedTempFile::new().unwrap(); @@ -4426,13 +4253,12 @@ target-package = "*" fn test_merged_sdk_config_with_container_args() { // Test that target-specific container_args are properly merged let config_content = r#" -default_target = "qemux86-64" - -[sdk] -image = "base-image" +default_target: "qemux86-64" -[sdk.qemux86-64] -container_args = ["--net=host", "--privileged"] +sdk: + image: "base-image" + qemux86-64: + container_args: ["--net=host", "--privileged"] "#; let temp_file = tempfile::NamedTempFile::new().unwrap(); @@ -4457,16 +4283,17 @@ container_args = ["--net=host", "--privileged"] fn test_merged_sdk_dependencies() { // Create a temporary config file for testing dependency merging let config_content = r#" -default_target = "qemux86-64" -supported_targets = ["qemux86-64"] +default_target: "qemux86-64" +supported_targets: ["qemux86-64"] -[sdk.dependencies] -base-package = "*" -shared-package = "1.0" - -[sdk.qemux86-64.dependencies] -target-package = "*" -shared-package = "2.0" +sdk: + packages: + base-package: "*" + shared-package: "1.0" + qemux86-64: + packages: + target-package: "*" + shared-package: "2.0" "#; let temp_file = tempfile::NamedTempFile::new().unwrap(); @@ -4493,12 +4320,12 @@ shared-package = "2.0" fn test_merged_sdk_config_no_target_section() { // Test merging when there's no target-specific section let config_content = r#" -default_target = "qemux86-64" -supported_targets = ["qemux86-64"] +default_target: "qemux86-64" +supported_targets: ["qemux86-64"] -[sdk] -image = "base-image" -repo_url = "http://base-repo" +sdk: + image: "base-image" + repo_url: "http://base-repo" "#; let temp_file = tempfile::NamedTempFile::new().unwrap(); @@ -4517,32 +4344,31 @@ repo_url = "http://base-repo" #[test] fn test_hierarchical_section_merging() { let config_content = r#" -default_target = "qemux86-64" -supported_targets = ["qemux86-64", "qemuarm64"] - -[sdk] -image = "base-image" -repo_url = "base-repo" - -[sdk.qemuarm64] -image = "arm64-image" - -[provision.usb] -container_args = ["--network=host"] +default_target: "qemux86-64" +supported_targets: ["qemux86-64", "qemuarm64"] -[provision.usb.qemuarm64] -container_args = ["--privileged"] +sdk: + image: "base-image" + repo_url: "base-repo" + qemuarm64: + image: "arm64-image" -[runtime.prod] -some_setting = "base-value" +provision_profiles: + usb: + container_args: ["--network=host"] + qemuarm64: + container_args: ["--privileged"] -[runtime.prod.qemuarm64] -some_setting = "arm64-value" -additional_setting = "arm64-only" +runtimes: + prod: + some_setting: "base-value" + qemuarm64: + some_setting: "arm64-value" + additional_setting: "arm64-only" "#; // Write test config to a temp file - let temp_file = std::env::temp_dir().join("hierarchical_test.toml"); + let temp_file = std::env::temp_dir().join("hierarchical_test.yaml"); std::fs::write(&temp_file, config_content).unwrap(); let config_path = temp_file.to_str().unwrap(); @@ -4653,27 +4479,29 @@ additional_setting = "arm64-only" #[test] fn test_nested_section_merging() { let config_content = r#" -default_target = "qemux86-64" -supported_targets = ["qemux86-64", "qemuarm64"] +default_target: "qemux86-64" +supported_targets: ["qemux86-64", "qemuarm64"] -[ext.avocado-dev.dependencies] -base-dep = "*" -shared-dep = "1.0" - -[ext.avocado-dev.qemuarm64.dependencies] -arm64-dep = "*" -shared-dep = "2.0" - -[ext.avocado-dev.users.root] -password = "" -shell = "/bin/bash" - -[ext.avocado-dev.qemuarm64.users.root] -password = "arm64-password" +extensions: + avocado-dev: + packages: + base-dep: "*" + shared-dep: "1.0" + users: + root: + password: "" + shell: "/bin/bash" + qemuarm64: + packages: + arm64-dep: "*" + shared-dep: "2.0" + users: + root: + password: "arm64-password" "#; // Write test config to a temp file - let temp_file = std::env::temp_dir().join("nested_test.toml"); + let temp_file = std::env::temp_dir().join("nested_test.yaml"); std::fs::write(&temp_file, config_content).unwrap(); let config_path = temp_file.to_str().unwrap(); @@ -4681,7 +4509,12 @@ password = "arm64-password" // Test nested dependencies merging let deps_x86 = config - .get_merged_nested_section("ext.avocado-dev", "dependencies", "qemux86-64", config_path) + .get_merged_nested_section( + "extensions.avocado-dev", + "packages", + "qemux86-64", + config_path, + ) .unwrap(); assert!(deps_x86.is_some()); let deps_x86_value = deps_x86.unwrap(); @@ -4697,7 +4530,12 @@ password = "arm64-password" assert!(deps_x86_table.get("arm64-dep").is_none()); let deps_arm64 = config - .get_merged_nested_section("ext.avocado-dev", "dependencies", "qemuarm64", config_path) + .get_merged_nested_section( + "extensions.avocado-dev", + "packages", + "qemuarm64", + config_path, + ) .unwrap(); assert!(deps_arm64.is_some()); let deps_arm64_value = deps_arm64.unwrap(); @@ -4721,7 +4559,12 @@ password = "arm64-password" // Test nested users merging let users_x86 = config - .get_merged_nested_section("ext.avocado-dev", "users.root", "qemux86-64", config_path) + .get_merged_nested_section( + "extensions.avocado-dev", + "users.root", + "qemux86-64", + config_path, + ) .unwrap(); assert!(users_x86.is_some()); let users_x86_value = users_x86.unwrap(); @@ -4736,7 +4579,12 @@ password = "arm64-password" ); let users_arm64 = config - .get_merged_nested_section("ext.avocado-dev", "users.root", "qemuarm64", config_path) + .get_merged_nested_section( + "extensions.avocado-dev", + "users.root", + "qemuarm64", + config_path, + ) .unwrap(); assert!(users_arm64.is_some()); let users_arm64_value = users_arm64.unwrap(); @@ -4757,15 +4605,19 @@ password = "arm64-password" #[test] fn test_target_only_sections() { let config_content = r#" -default_target = "qemux86-64" -supported_targets = ["qemux86-64", "qemuarm64"] +default_target: "qemux86-64" +supported_targets: ["qemux86-64", "qemuarm64"] # No base section, only target-specific -[runtime.special.qemuarm64] -special_setting = "arm64-only" +runtimes: + special: + qemuarm64: + special_setting: "arm64-only" -[ext.arm-only.qemuarm64] -types = ["sysext"] +extensions: + arm-only: + qemuarm64: + types: ["sysext"] "#; let temp_file = std::env::temp_dir().join("target_only_test.toml"); @@ -4817,11 +4669,11 @@ types = ["sysext"] #[test] fn test_supported_targets_all_format() { let config_content = r#" -default_target = "qemux86-64" -supported_targets = "*" +default_target: "qemux86-64" +supported_targets: "*" -[sdk] -image = "test-image" +sdk: + image: "test-image" "#; let config = Config::load_from_str(config_content).unwrap(); @@ -4837,11 +4689,11 @@ image = "test-image" #[test] fn test_supported_targets_list_format() { let config_content = r#" -default_target = "qemux86-64" -supported_targets = ["qemux86-64", "qemuarm64", "raspberrypi4"] +default_target: "qemux86-64" +supported_targets: ["qemux86-64", "qemuarm64", "raspberrypi4"] -[sdk] -image = "test-image" +sdk: + image: "test-image" "#; let config = Config::load_from_str(config_content).unwrap(); @@ -4863,11 +4715,11 @@ image = "test-image" #[test] fn test_supported_targets_empty_list() { let config_content = r#" -default_target = "qemux86-64" -supported_targets = [] +default_target: "qemux86-64" +supported_targets: [] -[sdk] -image = "test-image" +sdk: + image: "test-image" "#; let config = Config::load_from_str(config_content).unwrap(); @@ -4882,10 +4734,10 @@ image = "test-image" #[test] fn test_supported_targets_missing() { let config_content = r#" -default_target = "qemux86-64" +default_target: "qemux86-64" -[sdk] -image = "test-image" +sdk: + image: "test-image" "#; let config = Config::load_from_str(config_content).unwrap(); @@ -4898,44 +4750,39 @@ image = "test-image" #[test] fn test_comprehensive_sdk_section() { let config_content = r#" -default_target = "qemux86-64" -supported_targets = ["qemux86-64", "qemuarm64"] - -[sdk] -image = "base-sdk-image" -repo_url = "http://base-repo.com" -repo_release = "main" -container_args = ["--network=host", "--privileged"] - -[sdk.dependencies] -cmake = "*" -gcc = ">=9.0" -build-essential = "*" - -[sdk.compile.app] -compile = "make" +default_target: "qemux86-64" +supported_targets: ["qemux86-64", "qemuarm64"] -[sdk.compile.app.dependencies] -libfoo = "*" -libbar = "1.2.3" - -[sdk.qemuarm64] -image = "arm64-sdk-image" -repo_url = "http://arm64-repo.com" -container_args = ["--cap-add=SYS_ADMIN"] - -[sdk.qemuarm64.dependencies] -gcc-aarch64-linux-gnu = "*" -qemu-user-static = "*" - -[sdk.qemuarm64.compile.app] -compile = "cross-make" - -[sdk.qemuarm64.compile.app.dependencies] -libfoo-dev-arm64 = "*" +sdk: + image: "base-sdk-image" + repo_url: "http://base-repo.com" + repo_release: "main" + container_args: ["--network=host", "--privileged"] + packages: + cmake: "*" + gcc: ">=9.0" + build-essential: "*" + compile: + app: + compile: "make" + packages: + libfoo: "*" + libbar: "1.2.3" + qemuarm64: + image: "arm64-sdk-image" + repo_url: "http://arm64-repo.com" + container_args: ["--cap-add=SYS_ADMIN"] + packages: + gcc-aarch64-linux-gnu: "*" + qemu-user-static: "*" + compile: + app: + compile: "cross-make" + packages: + libfoo-dev-arm64: "*" "#; - let temp_file = std::env::temp_dir().join("comprehensive_sdk_test.toml"); + let temp_file = std::env::temp_dir().join("comprehensive_sdk_test.yaml"); std::fs::write(&temp_file, config_content).unwrap(); let config_path = temp_file.to_str().unwrap(); @@ -4953,8 +4800,8 @@ libfoo-dev-arm64 = "*" assert_eq!(merged_x86.repo_release, Some("main".to_string())); assert_eq!(merged_x86.container_args.as_ref().unwrap().len(), 2); - // Test dependencies for base - let deps_x86 = merged_x86.dependencies.unwrap(); + // Test packages for base + let deps_x86 = merged_x86.packages.unwrap(); assert!(deps_x86.contains_key("cmake")); assert!(deps_x86.contains_key("gcc")); assert!(deps_x86.contains_key("build-essential")); @@ -4971,8 +4818,8 @@ libfoo-dev-arm64 = "*" assert_eq!(merged_arm64.repo_release, Some("main".to_string())); // Inherited assert_eq!(merged_arm64.container_args.as_ref().unwrap().len(), 1); // Overridden - // Test merged dependencies - let deps_arm64 = merged_arm64.dependencies.unwrap(); + // Test merged packages + let deps_arm64 = merged_arm64.packages.unwrap(); assert!(deps_arm64.contains_key("cmake")); // From base assert!(deps_arm64.contains_key("gcc")); // From base assert!(deps_arm64.contains_key("gcc-aarch64-linux-gnu")); // Target-specific @@ -4996,13 +4843,14 @@ libfoo-dev-arm64 = "*" fn test_has_compile_sections() { // Test with compile sections defined let config_with_compile = r#" -default_target = "qemux86-64" - -[sdk.compile.app] -compile = "make" +default_target: "qemux86-64" -[sdk.compile.app.dependencies] -libfoo = "*" +sdk: + compile: + app: + compile: "make" + packages: + libfoo: "*" "#; let config = Config::load_from_str(config_with_compile).unwrap(); @@ -5010,10 +4858,12 @@ libfoo = "*" // Test with compile sections but no dependencies let config_no_deps = r#" -default_target = "qemux86-64" +default_target: "qemux86-64" -[sdk.compile.app] -compile = "make" +sdk: + compile: + app: + compile: "make" "#; let config = Config::load_from_str(config_no_deps).unwrap(); @@ -5021,10 +4871,10 @@ compile = "make" // Test with no compile sections let config_no_compile = r#" -default_target = "qemux86-64" +default_target: "qemux86-64" -[sdk] -image = "my-sdk-image" +sdk: + image: "my-sdk-image" "#; let config = Config::load_from_str(config_no_compile).unwrap(); @@ -5032,7 +4882,7 @@ image = "my-sdk-image" // Test with empty config (minimal) let config_minimal = r#" -default_target = "qemux86-64" +default_target: "qemux86-64" "#; let config = Config::load_from_str(config_minimal).unwrap(); @@ -5042,44 +4892,38 @@ default_target = "qemux86-64" #[test] fn test_comprehensive_runtime_section() { let config_content = r#" -default_target = "qemux86-64" -supported_targets = ["qemux86-64", "qemuarm64"] - -[runtime.production] -target = "qemux86-64" -image_version = "v1.0.0" -boot_timeout = 30 - -[runtime.production.dependencies] -avocado-img-bootfiles = "*" -avocado-img-rootfs = "*" -base-system = ">=2.0" - -[runtime.production.qemuarm64] -target = "qemuarm64" -image_version = "v1.0.0-arm64" -memory = "2G" - -[runtime.production.qemuarm64.dependencies] -avocado-img-bootfiles-arm64 = "*" -arm64-specific-pkg = "*" - -[runtime.development] -target = "qemux86-64" -debug_mode = true - -[runtime.development.dependencies] -debug-tools = "*" -gdb = "*" +default_target: "qemux86-64" +supported_targets: ["qemux86-64", "qemuarm64"] -[runtime.development.qemuarm64] -cross_debug = true - -[runtime.development.qemuarm64.dependencies] -gdb-multiarch = "*" +runtimes: + production: + target: "qemux86-64" + image_version: "v1.0.0" + boot_timeout: 30 + packages: + avocado-img-bootfiles: "*" + avocado-img-rootfs: "*" + base-system: ">=2.0" + qemuarm64: + target: "qemuarm64" + image_version: "v1.0.0-arm64" + memory: "2G" + packages: + avocado-img-bootfiles-arm64: "*" + arm64-specific-pkg: "*" + development: + target: "qemux86-64" + debug_mode: true + packages: + debug-tools: "*" + gdb: "*" + qemuarm64: + cross_debug: true + packages: + gdb-multiarch: "*" "#; - let temp_file = std::env::temp_dir().join("comprehensive_runtime_test.toml"); + let temp_file = std::env::temp_dir().join("comprehensive_runtime_test.yaml"); std::fs::write(&temp_file, config_content).unwrap(); let config_path = temp_file.to_str().unwrap(); @@ -5172,28 +5016,26 @@ gdb-multiarch = "*" #[test] fn test_comprehensive_provision_section() { let config_content = r#" -default_target = "qemux86-64" -supported_targets = ["qemux86-64", "qemuarm64"] +default_target: "qemux86-64" +supported_targets: ["qemux86-64", "qemuarm64"] -[provision.usb] -container_args = ["--privileged", "-v", "/dev:/dev"] -timeout = 300 -retry_count = 3 - -[provision.usb.qemuarm64] -container_args = ["--cap-add=SYS_ADMIN", "-v", "/dev:/dev:ro"] -emulation_mode = true - -[provision.network] -container_args = ["--network=host"] -protocol = "ssh" - -[provision.network.qemuarm64] -protocol = "serial" -baud_rate = 115200 +provision_profiles: + usb: + container_args: ["--privileged", "-v", "/dev:/dev"] + timeout: 300 + retry_count: 3 + qemuarm64: + container_args: ["--cap-add=SYS_ADMIN", "-v", "/dev:/dev:ro"] + emulation_mode: true + network: + container_args: ["--network=host"] + protocol: "ssh" + qemuarm64: + protocol: "serial" + baud_rate: 115200 "#; - let temp_file = std::env::temp_dir().join("comprehensive_provision_test.toml"); + let temp_file = std::env::temp_dir().join("comprehensive_provision_test.yaml"); std::fs::write(&temp_file, config_content).unwrap(); let config_path = temp_file.to_str().unwrap(); @@ -5275,63 +5117,58 @@ baud_rate = 115200 #[test] fn test_comprehensive_ext_section() { let config_content = r#" -default_target = "qemux86-64" -supported_targets = ["qemux86-64", "qemuarm64"] - -[ext.avocado-dev] -version = "1.0.0" -types = ["sysext", "confext"] -scopes = ["system"] -overlay = "overlays/avocado-dev" -enable_services = ["sshd.socket"] -modprobe = ["nfs", "overlay"] - -[ext.avocado-dev.dependencies] -openssh = "*" -nfs-utils = "*" -debug-tools = ">=1.0" - -[ext.avocado-dev.sdk.dependencies] -nativesdk-openssh = "*" -nativesdk-gdb = "*" - -[ext.avocado-dev.users.root] -password = "" -shell = "/bin/bash" -home = "/root" - -[ext.avocado-dev.users.developer] -password = "dev123" -groups = ["wheel", "docker"] -home = "/home/developer" - -[ext.avocado-dev.groups.docker] -gid = 999 - -[ext.avocado-dev.qemuarm64] -version = "1.0.0-arm64" -overlay = "overlays/avocado-dev-arm64" - -[ext.avocado-dev.qemuarm64.dependencies] -gdb-multiarch = "*" -arm64-debug-tools = "*" - -[ext.avocado-dev.qemuarm64.sdk.dependencies] -nativesdk-gdb-cross-aarch64 = "*" - -[ext.avocado-dev.qemuarm64.users.root] -password = "arm64-root" - -[ext.peridio] -version = "2.0.0" -types = ["confext"] -enable_services = ["peridiod.service"] - -[ext.peridio.qemuarm64] -enable_services = ["peridiod.service", "peridio-agent.service"] +default_target: "qemux86-64" +supported_targets: ["qemux86-64", "qemuarm64"] + +extensions: + avocado-dev: + version: "1.0.0" + types: ["sysext", "confext"] + scopes: ["system"] + overlay: "overlays/avocado-dev" + enable_services: ["sshd.socket"] + modprobe: ["nfs", "overlay"] + packages: + openssh: "*" + nfs-utils: "*" + debug-tools: ">=1.0" + sdk: + packages: + nativesdk-openssh: "*" + nativesdk-gdb: "*" + users: + root: + password: "" + shell: "/bin/bash" + home: "/root" + developer: + password: "dev123" + groups: ["wheel", "docker"] + home: "/home/developer" + groups: + docker: + gid: 999 + qemuarm64: + version: "1.0.0-arm64" + overlay: "overlays/avocado-dev-arm64" + packages: + gdb-multiarch: "*" + arm64-debug-tools: "*" + sdk: + packages: + nativesdk-gdb-cross-aarch64: "*" + users: + root: + password: "arm64-root" + peridio: + version: "2.0.0" + types: ["confext"] + enable_services: ["peridiod.service"] + qemuarm64: + enable_services: ["peridiod.service", "peridio-agent.service"] "#; - let temp_file = std::env::temp_dir().join("comprehensive_ext_test.toml"); + let temp_file = std::env::temp_dir().join("comprehensive_ext_test.yaml"); std::fs::write(&temp_file, config_content).unwrap(); let config_path = temp_file.to_str().unwrap(); @@ -5375,7 +5212,12 @@ enable_services = ["peridiod.service", "peridio-agent.service"] // Test nested dependencies merging let deps_x86 = config - .get_merged_nested_section("ext.avocado-dev", "dependencies", "qemux86-64", config_path) + .get_merged_nested_section( + "extensions.avocado-dev", + "packages", + "qemux86-64", + config_path, + ) .unwrap(); assert!(deps_x86.is_some()); let deps_x86_value = deps_x86.unwrap(); @@ -5385,7 +5227,12 @@ enable_services = ["peridiod.service", "peridio-agent.service"] assert!(!deps_x86_table.contains_key("gdb-multiarch")); let deps_arm64 = config - .get_merged_nested_section("ext.avocado-dev", "dependencies", "qemuarm64", config_path) + .get_merged_nested_section( + "extensions.avocado-dev", + "packages", + "qemuarm64", + config_path, + ) .unwrap(); assert!(deps_arm64.is_some()); let deps_arm64_value = deps_arm64.unwrap(); @@ -5397,8 +5244,8 @@ enable_services = ["peridiod.service", "peridio-agent.service"] // Test SDK dependencies merging let sdk_deps_x86 = config .get_merged_nested_section( - "ext.avocado-dev", - "sdk.dependencies", + "extensions.avocado-dev", + "sdk.packages", "qemux86-64", config_path, ) @@ -5412,8 +5259,8 @@ enable_services = ["peridiod.service", "peridio-agent.service"] let sdk_deps_arm64 = config .get_merged_nested_section( - "ext.avocado-dev", - "sdk.dependencies", + "extensions.avocado-dev", + "sdk.packages", "qemuarm64", config_path, ) @@ -5426,7 +5273,12 @@ enable_services = ["peridiod.service", "peridio-agent.service"] // Test users merging let users_root_x86 = config - .get_merged_nested_section("ext.avocado-dev", "users.root", "qemux86-64", config_path) + .get_merged_nested_section( + "extensions.avocado-dev", + "users.root", + "qemux86-64", + config_path, + ) .unwrap(); assert!(users_root_x86.is_some()); let users_root_x86_value = users_root_x86.unwrap(); @@ -5445,7 +5297,12 @@ enable_services = ["peridiod.service", "peridio-agent.service"] ); let users_root_arm64 = config - .get_merged_nested_section("ext.avocado-dev", "users.root", "qemuarm64", config_path) + .get_merged_nested_section( + "extensions.avocado-dev", + "users.root", + "qemuarm64", + config_path, + ) .unwrap(); assert!(users_root_arm64.is_some()); let users_root_arm64_value = users_root_arm64.unwrap(); @@ -5502,11 +5359,11 @@ enable_services = ["peridiod.service", "peridio-agent.service"] fn test_invalid_config_handling() { // Test invalid supported_targets format let invalid_supported_targets = r#" -default_target = "qemux86-64" -supported_targets = 123 # Invalid - not string or array +default_target: "qemux86-64" +supported_targets: 123 # Invalid - not string or array -[sdk] -image = "test" +sdk: + image: "test" "#; let result = Config::load_from_str(invalid_supported_targets); @@ -5514,12 +5371,12 @@ image = "test" // Test missing required fields let missing_sdk_image = r#" -default_target = "qemux86-64" -supported_targets = ["qemux86-64"] +default_target: "qemux86-64" +supported_targets: ["qemux86-64"] -[sdk] +sdk: # Missing image field -repo_url = "http://example.com" + repo_url: "http://example.com" "#; let config = Config::load_from_str(missing_sdk_image).unwrap(); @@ -5531,32 +5388,40 @@ repo_url = "http://example.com" assert!(result.default_target.is_none()); assert!(result.supported_targets.is_none()); assert!(result.sdk.is_none()); - assert!(result.runtime.is_none()); - assert!(result.provision.is_none()); + assert!(result.runtimes.is_none()); + assert!(result.provision_profiles.is_none()); } #[test] fn test_complex_nested_overrides() { let config_content = r#" -default_target = "qemux86-64" -supported_targets = ["qemux86-64", "qemuarm64", "raspberrypi4"] +default_target: "qemux86-64" +supported_targets: ["qemux86-64", "qemuarm64", "raspberrypi4"] # Complex nested structure with target-specific overrides -[ext.complex.level1.level2.level3] -base_value = "original" -shared_value = "base" - -[ext.complex.qemuarm64.level1.level2.level3] -override_value = "arm64-specific" -shared_value = "arm64-override" -nested_override = "arm64-nested" - -[ext.complex.raspberrypi4.level1.level2.level3] -rpi_specific = true -shared_value = "rpi-override" +extensions: + complex: + level1: + level2: + level3: + base_value: "original" + shared_value: "base" + qemuarm64: + level1: + level2: + level3: + override_value: "arm64-specific" + shared_value: "arm64-override" + nested_override: "arm64-nested" + raspberrypi4: + level1: + level2: + level3: + rpi_specific: true + shared_value: "rpi-override" "#; - let temp_file = std::env::temp_dir().join("complex_nested_test.toml"); + let temp_file = std::env::temp_dir().join("complex_nested_test.yaml"); std::fs::write(&temp_file, config_content).unwrap(); let config_path = temp_file.to_str().unwrap(); @@ -5565,7 +5430,7 @@ shared_value = "rpi-override" // Test x86-64 (base only) let x86_nested = config .get_merged_nested_section( - "ext.complex", + "extensions.complex", "level1.level2.level3", "qemux86-64", config_path, @@ -5588,7 +5453,7 @@ shared_value = "rpi-override" // Test ARM64 (has target-specific override) let arm64_nested = config .get_merged_nested_section( - "ext.complex", + "extensions.complex", "level1.level2.level3", "qemuarm64", config_path, @@ -5621,7 +5486,7 @@ shared_value = "rpi-override" // Test RaspberryPi4 (different target-specific override) let rpi_nested = config .get_merged_nested_section( - "ext.complex", + "extensions.complex", "level1.level2.level3", "raspberrypi4", config_path, @@ -5650,18 +5515,23 @@ shared_value = "rpi-override" fn test_edge_cases_and_error_conditions() { // Test configuration with only target-specific sections let target_only_config = r#" -default_target = "qemux86-64" -supported_targets = ["qemux86-64", "qemuarm64"] +default_target: "qemux86-64" +supported_targets: ["qemux86-64", "qemuarm64"] # Only target-specific sections, no base -[sdk.qemuarm64] -image = "arm64-only-sdk" +sdk: + qemuarm64: + image: "arm64-only-sdk" -[runtime.special.qemuarm64] -special_mode = true +runtimes: + special: + qemuarm64: + special_mode: true -[ext.arm-only.qemuarm64] -types = ["sysext"] +extensions: + arm-only: + qemuarm64: + types: ["sysext"] "#; let temp_file = std::env::temp_dir().join("target_only_edge_test.toml"); @@ -5720,29 +5590,30 @@ types = ["sysext"] fn test_nested_target_config_merging() { // Create a temporary config file with nested target-specific configuration let config_content = r#" -default_target = "qemux86-64" -supported_targets = ["qemux86-64", "reterminal-dm"] - -[sdk] -image = "ghcr.io/avocado-framework/avocado-sdk:latest" - -[runtime.default] -target = "x86_64-unknown-linux-gnu" - -[ext.avocado-ext-webkit] -version = "1.0.0" -release = "r0" -vendor = "Avocado Linux " -summary = "WPE WebKit browser and display utilities" -description = "WPE WebKit browser and display utilities" -license = "Apache-2.0" -url = "https://github.com/avocadolinux/avocado-ext" -types = ["sysext", "confext"] -enable_services = ["cog.service"] -on_merge = ["systemctl restart --no-block cog.service"] - -[ext.avocado-ext-webkit.reterminal-dm] -overlay = "extensions/webkit/overlays/reterminal-dm" +default_target: "qemux86-64" +supported_targets: ["qemux86-64", "reterminal-dm"] + +sdk: + image: "ghcr.io/avocado-framework/avocado-sdk:latest" + +runtimes: + default: + target: "x86_64-unknown-linux-gnu" + +extensions: + avocado-ext-webkit: + version: "1.0.0" + release: "r0" + vendor: "Avocado Linux " + summary: "WPE WebKit browser and display utilities" + description: "WPE WebKit browser and display utilities" + license: "Apache-2.0" + url: "https://github.com/avocadolinux/avocado-ext" + types: ["sysext", "confext"] + enable_services: ["cog.service"] + on_merge: ["systemctl restart --no-block cog.service"] + reterminal-dm: + overlay: "extensions/webkit/overlays/reterminal-dm" "#; let mut temp_file = NamedTempFile::new().unwrap(); @@ -5827,12 +5698,13 @@ overlay = "extensions/webkit/overlays/reterminal-dm" #[test] fn test_stone_include_paths_basic() { let config_content = r#" -[sdk] -image = "docker.io/avocadolinux/sdk:latest" +sdk: + image: "docker.io/avocadolinux/sdk:latest" -[runtime.test-runtime] -target = "x86_64" -stone_include_paths = ["stone-qemux86-64"] +runtimes: + test-runtime: + target: "x86_64" + stone_include_paths: ["stone-qemux86-64"] "#; let mut temp_file = NamedTempFile::new().unwrap(); @@ -5851,12 +5723,13 @@ stone_include_paths = ["stone-qemux86-64"] #[test] fn test_stone_include_paths_multiple() { let config_content = r#" -[sdk] -image = "docker.io/avocadolinux/sdk:latest" +sdk: + image: "docker.io/avocadolinux/sdk:latest" -[runtime.test-runtime] -target = "x86_64" -stone_include_paths = ["stone-a", "stone-b", "stone-c"] +runtimes: + test-runtime: + target: "x86_64" + stone_include_paths: ["stone-a", "stone-b", "stone-c"] "#; let mut temp_file = NamedTempFile::new().unwrap(); @@ -5875,11 +5748,12 @@ stone_include_paths = ["stone-a", "stone-b", "stone-c"] #[test] fn test_stone_include_paths_not_configured() { let config_content = r#" -[sdk] -image = "docker.io/avocadolinux/sdk:latest" +sdk: + image: "docker.io/avocadolinux/sdk:latest" -[runtime.test-runtime] -target = "x86_64" +runtimes: + test-runtime: + target: "x86_64" "#; let mut temp_file = NamedTempFile::new().unwrap(); @@ -5896,15 +5770,15 @@ target = "x86_64" #[test] fn test_stone_include_paths_target_specific_override() { let config_content = r#" -[sdk] -image = "docker.io/avocadolinux/sdk:latest" - -[runtime.test-runtime] -target = "x86_64" -stone_include_paths = ["stone-default"] - -[runtime.test-runtime.aarch64] -stone_include_paths = ["stone-aarch64"] +sdk: + image: "docker.io/avocadolinux/sdk:latest" + +runtimes: + test-runtime: + target: "x86_64" + stone_include_paths: ["stone-default"] + aarch64: + stone_include_paths: ["stone-aarch64"] "#; let mut temp_file = NamedTempFile::new().unwrap(); @@ -5931,14 +5805,14 @@ stone_include_paths = ["stone-aarch64"] fn test_stone_include_paths_user_example() { // Test the exact example from the user's request let config_content = r#" -[sdk] -image = "docker.io/avocadolinux/sdk:latest" - -[runtime.dev] -stone_include_paths = ["stone-common"] +sdk: + image: "docker.io/avocadolinux/sdk:latest" -[runtime.dev.qemux86-64] -stone_include_paths = ["stone-qemux86-64"] +runtimes: + dev: + stone_include_paths: ["stone-common"] + qemux86-64: + stone_include_paths: ["stone-qemux86-64"] "#; let mut temp_file = NamedTempFile::new().unwrap(); @@ -5971,12 +5845,13 @@ stone_include_paths = ["stone-qemux86-64"] #[test] fn test_stone_include_paths_empty_array() { let config_content = r#" -[sdk] -image = "docker.io/avocadolinux/sdk:latest" +sdk: + image: "docker.io/avocadolinux/sdk:latest" -[runtime.test-runtime] -target = "x86_64" -stone_include_paths = [] +runtimes: + test-runtime: + target: "x86_64" + stone_include_paths: [] "#; let mut temp_file = NamedTempFile::new().unwrap(); @@ -5994,12 +5869,13 @@ stone_include_paths = [] #[test] fn test_stone_manifest_basic() { let config_content = r#" -[sdk] -image = "docker.io/avocadolinux/sdk:latest" +sdk: + image: "docker.io/avocadolinux/sdk:latest" -[runtime.test-runtime] -target = "x86_64" -stone_manifest = "stone-manifest.json" +runtimes: + test-runtime: + target: "x86_64" + stone_manifest: "stone-manifest.json" "#; let mut temp_file = NamedTempFile::new().unwrap(); @@ -6017,11 +5893,12 @@ stone_manifest = "stone-manifest.json" #[test] fn test_stone_manifest_not_configured() { let config_content = r#" -[sdk] -image = "docker.io/avocadolinux/sdk:latest" +sdk: + image: "docker.io/avocadolinux/sdk:latest" -[runtime.test-runtime] -target = "x86_64" +runtimes: + test-runtime: + target: "x86_64" "#; let mut temp_file = NamedTempFile::new().unwrap(); @@ -6039,14 +5916,14 @@ target = "x86_64" fn test_stone_manifest_target_specific_override() { // Test the exact example from the user's request let config_content = r#" -[sdk] -image = "docker.io/avocadolinux/sdk:latest" - -[runtime.dev] -stone_manifest = "stone-common.json" +sdk: + image: "docker.io/avocadolinux/sdk:latest" -[runtime.dev.qemux86-64] -stone_manifest = "stone-qemux86-64.json" +runtimes: + dev: + stone_manifest: "stone-common.json" + qemux86-64: + stone_manifest: "stone-qemux86-64.json" "#; let mut temp_file = NamedTempFile::new().unwrap(); @@ -6083,14 +5960,14 @@ stone_manifest = "stone-qemux86-64.json" fn test_stone_manifest_only_target_specific() { // Test when stone_manifest is only defined in target-specific section let config_content = r#" -[sdk] -image = "docker.io/avocadolinux/sdk:latest" - -[runtime.dev] -target = "x86_64" +sdk: + image: "docker.io/avocadolinux/sdk:latest" -[runtime.dev.qemux86-64] -stone_manifest = "stone-qemux86-64.json" +runtimes: + dev: + target: "x86_64" + qemux86-64: + stone_manifest: "stone-qemux86-64.json" "#; let mut temp_file = NamedTempFile::new().unwrap(); @@ -6207,7 +6084,7 @@ sdk: fn test_container_args_provision_as_string() { // Test that provision profile container_args also supports string format let config_content = r#" -provision: +provision_profiles: usb: container_args: "-v /dev:/dev -v /sys:/sys:ro --privileged" "#; @@ -6308,7 +6185,7 @@ signing_keys: - my-production-key: {production_keyid} - backup-key: {backup_keyid} -runtime: +runtimes: dev: signing: key: my-production-key @@ -6363,21 +6240,21 @@ runtime: assert_eq!(runtime_key, Some(production_keyid.to_string())); // Test runtime signing config - let runtime = config.runtime.as_ref().unwrap().get("dev").unwrap(); + let runtime = config.runtimes.as_ref().unwrap().get("dev").unwrap(); assert!(runtime.signing.is_some()); let signing = runtime.signing.as_ref().unwrap(); assert_eq!(signing.key, "my-production-key"); assert_eq!(signing.checksum_algorithm, "sha256"); // Test production runtime with blake3 - let production = config.runtime.as_ref().unwrap().get("production").unwrap(); + let production = config.runtimes.as_ref().unwrap().get("production").unwrap(); assert!(production.signing.is_some()); let prod_signing = production.signing.as_ref().unwrap(); assert_eq!(prod_signing.key, "backup-key"); assert_eq!(prod_signing.checksum_algorithm, "blake3"); // Test staging runtime with default checksum_algorithm - let staging = config.runtime.as_ref().unwrap().get("staging").unwrap(); + let staging = config.runtimes.as_ref().unwrap().get("staging").unwrap(); assert!(staging.signing.is_some()); let staging_signing = staging.signing.as_ref().unwrap(); assert_eq!(staging_signing.key, "my-production-key"); @@ -6413,7 +6290,7 @@ default_target: qemux86-64 sdk: image: ghcr.io/avocado-framework/avocado-sdk:latest -runtime: +runtimes: dev: signing: key: my-key @@ -6421,7 +6298,7 @@ runtime: signing: key: production-key no-signing: - dependencies: + packages: some-package: '*' "#; @@ -6465,7 +6342,7 @@ sdk: signing_keys: - existing-key: {keyid} -runtime: +runtimes: dev: signing: key: missing-key @@ -6499,45 +6376,52 @@ runtime: #[test] fn test_discover_external_config_refs_from_runtime() { + // New format: external config refs are defined in extensions section with source: path let config_content = r#" -runtime: +runtimes: prod: target: qemux86-64 - dependencies: - peridio: - ext: avocado-ext-peridio - config: avocado-ext-peridio/avocado.yml - local-ext: - ext: local-extension + extensions: + - avocado-ext-peridio + - local-extension +extensions: + avocado-ext-peridio: + source: + type: path + path: avocado-ext-peridio + local-extension: + version: "1.0.0" "#; let parsed: serde_yaml::Value = serde_yaml::from_str(config_content).unwrap(); let refs = Config::discover_external_config_refs(&parsed); - assert_eq!(refs.len(), 1); - assert_eq!(refs[0].0, "avocado-ext-peridio"); - assert_eq!(refs[0].1, "avocado-ext-peridio/avocado.yml"); + // In the new format, source: path extensions are handled differently + // so this test verifies no deprecated refs are found + assert_eq!(refs.len(), 0); } #[test] fn test_discover_external_config_refs_from_ext() { + // New format: external config refs are no longer in packages, they use source: path let config_content = r#" -ext: +extensions: main-ext: types: - sysext - dependencies: - external-dep: - ext: external-extension - config: external/config.yaml + packages: + some-package: "*" + external-extension: + source: + type: path + path: external "#; let parsed: serde_yaml::Value = serde_yaml::from_str(config_content).unwrap(); let refs = Config::discover_external_config_refs(&parsed); - assert_eq!(refs.len(), 1); - assert_eq!(refs[0].0, "external-extension"); - assert_eq!(refs[0].1, "external/config.yaml"); + // In the new format, source: path extensions are handled differently + assert_eq!(refs.len(), 0); } #[test] @@ -6545,13 +6429,13 @@ ext: let main_config_content = r#" distro: version: "1.0.0" -ext: +extensions: local-ext: types: - sysext "#; let external_config_content = r#" -ext: +extensions: external-ext: types: - sysext @@ -6566,7 +6450,7 @@ ext: Config::merge_external_config(&mut main_config, &external_config, "external-ext", &[], &[]); // Check that both extensions are present - let ext_section = main_config.get("ext").unwrap().as_mapping().unwrap(); + let ext_section = main_config.get("extensions").unwrap().as_mapping().unwrap(); assert!(ext_section.contains_key(serde_yaml::Value::String("local-ext".to_string()))); assert!(ext_section.contains_key(serde_yaml::Value::String("external-ext".to_string()))); } @@ -6576,12 +6460,12 @@ ext: let main_config_content = r#" sdk: image: test-image - dependencies: + packages: main-package: "*" "#; let external_config_content = r#" sdk: - dependencies: + packages: external-package: "1.0.0" main-package: "2.0.0" # Should not override main config "#; @@ -6590,8 +6474,8 @@ sdk: let external_config: serde_yaml::Value = serde_yaml::from_str(external_config_content).unwrap(); - // Include sdk.dependencies.* to merge SDK dependencies - let include_patterns = vec!["sdk.dependencies.*".to_string()]; + // Include sdk.packages.* to merge SDK packages + let include_patterns = vec!["sdk.packages.*".to_string()]; Config::merge_external_config( &mut main_config, &external_config, @@ -6603,7 +6487,7 @@ sdk: let sdk_deps = main_config .get("sdk") .unwrap() - .get("dependencies") + .get("packages") .unwrap() .as_mapping() .unwrap(); @@ -6661,114 +6545,29 @@ distro: assert_eq!(distro.get("channel").unwrap().as_str(), Some("stable")); } - #[test] - fn test_load_composed_with_interpolation() { - use tempfile::TempDir; - - // Create a temp directory for our test configs - let temp_dir = TempDir::new().unwrap(); - - // Create main config - let main_config_content = r#" -distro: - version: "1.0.0" - channel: apollo-edge -default_target: qemux86-64 -sdk: - image: "docker.io/test:{{ config.distro.channel }}" - dependencies: - main-sdk-dep: "*" -runtime: - prod: - target: qemux86-64 - dependencies: - peridio: - ext: test-ext - config: external/avocado.yml -"#; - let main_config_path = temp_dir.path().join("avocado.yaml"); - std::fs::write(&main_config_path, main_config_content).unwrap(); - - // Create external config directory and file - let external_dir = temp_dir.path().join("external"); - std::fs::create_dir_all(&external_dir).unwrap(); - - let external_config_content = r#" -ext: - test-ext: - version: "{{ config.distro.version }}" - types: - - sysext -sdk: - dependencies: - external-sdk-dep: "*" -"#; - let external_config_path = external_dir.join("avocado.yml"); - std::fs::write(&external_config_path, external_config_content).unwrap(); - - // Load composed config - let composed = Config::load_composed(&main_config_path, Some("qemux86-64")).unwrap(); - - // Verify the SDK image was interpolated using main config's distro - assert_eq!( - composed - .config - .sdk - .as_ref() - .unwrap() - .image - .as_ref() - .unwrap(), - "docker.io/test:apollo-edge" - ); - - // Verify the external extension was merged - let ext_section = composed - .merged_value - .get("ext") - .unwrap() - .as_mapping() - .unwrap(); - assert!(ext_section.contains_key(serde_yaml::Value::String("test-ext".to_string()))); - - // Verify the external extension's version was interpolated from main config's distro - let test_ext = ext_section - .get(serde_yaml::Value::String("test-ext".to_string())) - .unwrap(); - assert_eq!(test_ext.get("version").unwrap().as_str(), Some("1.0.0")); - - // Verify SDK dependencies were merged - let sdk_deps = composed - .merged_value - .get("sdk") - .unwrap() - .get("dependencies") - .unwrap() - .as_mapping() - .unwrap(); - assert!(sdk_deps.contains_key(serde_yaml::Value::String("main-sdk-dep".to_string()))); - assert!(sdk_deps.contains_key(serde_yaml::Value::String("external-sdk-dep".to_string()))); - } + // NOTE: test_load_composed_with_interpolation was removed as it tested the deprecated + // config: path syntax for external extension loading. The new format uses source: path + // in the extensions section and this functionality is handled by ext fetch. #[test] fn test_extension_source_get_include_patterns() { // Test Repo variant with include patterns - let source = ExtensionSource::Repo { + let source = ExtensionSource::Package { version: "*".to_string(), package: None, repo_name: None, include: Some(vec![ - "provision.tegraflash".to_string(), + "provision_profiles.tegraflash".to_string(), "sdk.compile.*".to_string(), ]), }; let patterns = source.get_include_patterns(); assert_eq!(patterns.len(), 2); - assert_eq!(patterns[0], "provision.tegraflash"); + assert_eq!(patterns[0], "provision_profiles.tegraflash"); assert_eq!(patterns[1], "sdk.compile.*"); // Test Repo variant without include patterns - let source_no_include = ExtensionSource::Repo { + let source_no_include = ExtensionSource::Package { version: "*".to_string(), package: None, repo_name: None, @@ -6781,14 +6580,14 @@ sdk: url: "https://example.com/repo.git".to_string(), git_ref: Some("main".to_string()), sparse_checkout: None, - include: Some(vec!["provision.*".to_string()]), + include: Some(vec!["provision_profiles.*".to_string()]), }; assert_eq!(git_source.get_include_patterns().len(), 1); // Test Path variant with include patterns let path_source = ExtensionSource::Path { path: "./external".to_string(), - include: Some(vec!["sdk.dependencies.*".to_string()]), + include: Some(vec!["sdk.packages.*".to_string()]), }; assert_eq!(path_source.get_include_patterns().len(), 1); } @@ -6796,13 +6595,13 @@ sdk: #[test] fn test_matches_include_pattern_exact() { let patterns = vec![ - "provision.tegraflash".to_string(), + "provision_profiles.tegraflash".to_string(), "sdk.compile.nvidia-l4t".to_string(), ]; // Exact matches should return true assert!(ExtensionSource::matches_include_pattern( - "provision.tegraflash", + "provision_profiles.tegraflash", &patterns )); assert!(ExtensionSource::matches_include_pattern( @@ -6812,7 +6611,7 @@ sdk: // Non-matches should return false assert!(!ExtensionSource::matches_include_pattern( - "provision.usb", + "provision_profiles.usb", &patterns )); assert!(!ExtensionSource::matches_include_pattern( @@ -6827,15 +6626,18 @@ sdk: #[test] fn test_matches_include_pattern_wildcard() { - let patterns = vec!["provision.*".to_string(), "sdk.compile.*".to_string()]; + let patterns = vec![ + "provision_profiles.*".to_string(), + "sdk.compile.*".to_string(), + ]; // Wildcard matches should work assert!(ExtensionSource::matches_include_pattern( - "provision.tegraflash", + "provision_profiles.tegraflash", &patterns )); assert!(ExtensionSource::matches_include_pattern( - "provision.usb", + "provision_profiles.usb", &patterns )); assert!(ExtensionSource::matches_include_pattern( @@ -6849,11 +6651,11 @@ sdk: // Non-matches should return false assert!(!ExtensionSource::matches_include_pattern( - "sdk.dependencies.package1", + "sdk.packages.package1", &patterns )); assert!(!ExtensionSource::matches_include_pattern( - "runtime.prod", + "runtimes.prod", &patterns )); @@ -6870,7 +6672,7 @@ sdk: // Empty patterns should never match assert!(!ExtensionSource::matches_include_pattern( - "provision.tegraflash", + "provision_profiles.tegraflash", &empty_patterns )); assert!(!ExtensionSource::matches_include_pattern( @@ -6882,28 +6684,28 @@ sdk: #[test] fn test_merge_external_config_with_include_patterns() { let main_config_content = r#" -ext: +extensions: local-ext: types: - sysext -provision: +provision_profiles: existing-profile: script: provision.sh "#; let external_config_content = r#" -ext: +extensions: remote-ext: types: - sysext - dependencies: + packages: some-dep: "*" -provision: +provision_profiles: tegraflash: script: flash.sh usb: script: usb-provision.sh sdk: - dependencies: + packages: external-dep: "*" compile: nvidia-l4t: @@ -6914,8 +6716,8 @@ sdk: let external_config: serde_yaml::Value = serde_yaml::from_str(external_config_content).unwrap(); - // Only include provision.tegraflash (not provision.usb) - let include_patterns = vec!["provision.tegraflash".to_string()]; + // Only include provision_profiles.tegraflash (not provision_profiles.usb) + let include_patterns = vec!["provision_profiles.tegraflash".to_string()]; Config::merge_external_config( &mut main_config, &external_config, @@ -6925,35 +6727,39 @@ sdk: ); // Check that ext.remote-ext was merged (always happens) - let ext_section = main_config.get("ext").unwrap().as_mapping().unwrap(); + let ext_section = main_config.get("extensions").unwrap().as_mapping().unwrap(); assert!(ext_section.contains_key(serde_yaml::Value::String("remote-ext".to_string()))); - // Check that provision.tegraflash was included - let provision = main_config.get("provision").unwrap().as_mapping().unwrap(); + // Check that provision_profiles.tegraflash was included + let provision = main_config + .get("provision_profiles") + .unwrap() + .as_mapping() + .unwrap(); assert!(provision.contains_key(serde_yaml::Value::String("tegraflash".to_string()))); assert!(provision.contains_key(serde_yaml::Value::String("existing-profile".to_string()))); // Check that provision.usb was NOT included (not in patterns) assert!(!provision.contains_key(serde_yaml::Value::String("usb".to_string()))); - // Check that sdk.dependencies was NOT merged (not in patterns) + // Check that sdk.packages was NOT merged (not in patterns) assert!(main_config.get("sdk").is_none()); } #[test] fn test_merge_external_config_auto_include_compile() { let main_config_content = r#" -ext: +extensions: local-ext: types: - sysext "#; let external_config_content = r#" -ext: +extensions: remote-ext: types: - sysext - dependencies: + packages: nvidia-l4t: compile: nvidia-l4t sdk: @@ -6995,9 +6801,9 @@ sdk: #[test] fn test_find_compile_dependencies_in_ext() { let ext_config_content = r#" -ext: +extensions: my-extension: - dependencies: + packages: nvidia-l4t: compile: nvidia-l4t some-package: @@ -7016,19 +6822,19 @@ ext: #[test] fn test_extension_source_include_serialization() { - let source = ExtensionSource::Repo { + let source = ExtensionSource::Package { version: "*".to_string(), package: None, repo_name: None, include: Some(vec![ - "provision.tegraflash".to_string(), + "provision_profiles.tegraflash".to_string(), "sdk.compile.*".to_string(), ]), }; let serialized = serde_yaml::to_string(&source).unwrap(); assert!(serialized.contains("include:")); - assert!(serialized.contains("provision.tegraflash")); + assert!(serialized.contains("provision_profiles.tegraflash")); assert!(serialized.contains("sdk.compile.*")); // Test deserialization @@ -7036,16 +6842,16 @@ ext: type: repo version: "*" include: - - provision.tegraflash + - provision_profiles.tegraflash - sdk.compile.* "#; let deserialized: ExtensionSource = serde_yaml::from_str(yaml_content).unwrap(); match deserialized { - ExtensionSource::Repo { include, .. } => { + ExtensionSource::Package { include, .. } => { assert!(include.is_some()); let patterns = include.unwrap(); assert_eq!(patterns.len(), 2); - assert_eq!(patterns[0], "provision.tegraflash"); + assert_eq!(patterns[0], "provision_profiles.tegraflash"); } _ => panic!("Expected Repo variant"), } diff --git a/src/utils/ext_fetch.rs b/src/utils/ext_fetch.rs index 682213a..2b2f6cb 100644 --- a/src/utils/ext_fetch.rs +++ b/src/utils/ext_fetch.rs @@ -88,7 +88,7 @@ impl ExtensionFetcher { let ext_install_path = install_dir.join(ext_name); match source { - ExtensionSource::Repo { + ExtensionSource::Package { version, package, repo_name, @@ -396,9 +396,7 @@ echo "Successfully copied extension '{ext_name}' from {resolved_source_str} to { let ext_path = install_dir.join(ext_name); // Check if the directory exists and has an avocado config file ext_path.exists() - && (ext_path.join("avocado.yaml").exists() - || ext_path.join("avocado.yml").exists() - || ext_path.join("avocado.toml").exists()) + && (ext_path.join("avocado.yaml").exists() || ext_path.join("avocado.yml").exists()) } } diff --git a/src/utils/interpolation/mod.rs b/src/utils/interpolation/mod.rs index 6b4bc1e..fc39f59 100644 --- a/src/utils/interpolation/mod.rs +++ b/src/utils/interpolation/mod.rs @@ -723,9 +723,9 @@ key: "{{ env.TRIMMED }}" let mut config = parse_yaml( r#" default_target: "x86_64" -runtime: +runtimes: dev: - dependencies: + packages: pkg1: "{{ env.PKG_VERSION }}" pkg2: "{{ config.default_target }}" array: @@ -736,9 +736,9 @@ runtime: interpolate_config(&mut config, None).unwrap(); - let runtime = config.get("runtime").unwrap(); + let runtime = config.get("runtimes").unwrap(); let dev = runtime.get("dev").unwrap(); - let deps = dev.get("dependencies").unwrap(); + let deps = dev.get("packages").unwrap(); assert_eq!(deps.get("pkg1").unwrap().as_str().unwrap(), "1.2.3"); assert_eq!(deps.get("pkg2").unwrap().as_str().unwrap(), "x86_64"); @@ -809,7 +809,7 @@ distro: version: 0.1.0 sdk: image: "docker.io/avocadolinux/sdk:{{ config.distro.channel }}" - dependencies: + packages: avocado-sdk-toolchain: "{{ config.distro.version }}" "#, ); @@ -822,7 +822,7 @@ sdk: "docker.io/avocadolinux/sdk:apollo-edge" ); - let deps = sdk.get("dependencies").unwrap(); + let deps = sdk.get("packages").unwrap(); assert_eq!( deps.get("avocado-sdk-toolchain").unwrap().as_str().unwrap(), "0.1.0" @@ -837,7 +837,7 @@ sdk: r#" default_target: qemux86-64 sdk: - dependencies: + packages: packagegroup-rust-cross-canadian-{{ avocado.target }}: "*" regular-package: "1.0.0" "#, @@ -846,7 +846,7 @@ sdk: interpolate_config(&mut config, Some("qemux86-64")).unwrap(); let sdk = config.get("sdk").unwrap(); - let deps = sdk.get("dependencies").unwrap(); + let deps = sdk.get("packages").unwrap(); // The key should be interpolated with the target assert!(deps @@ -879,14 +879,14 @@ sdk: let mut config = parse_yaml( r#" -dependencies: +packages: package-{{ env.MY_SUFFIX }}: "1.0.0" "#, ); interpolate_config(&mut config, None).unwrap(); - let deps = config.get("dependencies").unwrap(); + let deps = config.get("packages").unwrap(); assert!(deps.get("package-custom").is_some()); assert_eq!( deps.get("package-custom").unwrap().as_str().unwrap(), diff --git a/src/utils/stamps.rs b/src/utils/stamps.rs index 8041f80..f91a814 100644 --- a/src/utils/stamps.rs +++ b/src/utils/stamps.rs @@ -660,7 +660,7 @@ pub fn compute_sdk_input_hash(config: &serde_yaml::Value) -> Result // Include sdk.dependencies if let Some(sdk) = config.get("sdk") { - if let Some(deps) = sdk.get("dependencies") { + if let Some(deps) = sdk.get("packages") { hash_data.insert( serde_yaml::Value::String("sdk.dependencies".to_string()), deps.clone(), @@ -696,8 +696,8 @@ pub fn compute_ext_input_hash(config: &serde_yaml::Value, ext_name: &str) -> Res let mut hash_data = serde_yaml::Mapping::new(); // Include ext..dependencies - if let Some(ext) = config.get("ext").and_then(|e| e.get(ext_name)) { - if let Some(deps) = ext.get("dependencies") { + if let Some(ext) = config.get("extensions").and_then(|e| e.get(ext_name)) { + if let Some(deps) = ext.get("packages") { hash_data.insert( serde_yaml::Value::String(format!("ext.{ext_name}.dependencies")), deps.clone(), @@ -725,7 +725,7 @@ pub fn compute_runtime_input_hash( let mut hash_data = serde_yaml::Mapping::new(); // Include the merged dependencies section - if let Some(deps) = merged_runtime.get("dependencies") { + if let Some(deps) = merged_runtime.get("packages") { hash_data.insert( serde_yaml::Value::String(format!("runtime.{runtime_name}.dependencies")), deps.clone(), @@ -1538,22 +1538,6 @@ mod tests { // Test the Local variant (the primary way to specify extensions) let local = RuntimeExtDep::Local("my-local-ext".to_string()); assert_eq!(local.name(), "my-local-ext"); - - // Test deprecated variants for backward compatibility - #[allow(deprecated)] - { - let external = RuntimeExtDep::External { - name: "my-external-ext".to_string(), - config_path: "path/to/config.yaml".to_string(), - }; - assert_eq!(external.name(), "my-external-ext"); - - let versioned = RuntimeExtDep::Versioned { - name: "my-versioned-ext".to_string(), - version: "1.2.3".to_string(), - }; - assert_eq!(versioned.name(), "my-versioned-ext"); - } } #[test] diff --git a/src/utils/target.rs b/src/utils/target.rs index 13109d0..72edba5 100644 --- a/src/utils/target.rs +++ b/src/utils/target.rs @@ -235,9 +235,9 @@ mod tests { supported_targets: None, src_dir: None, distro: None, - runtime: None, + runtimes: None, sdk: None, - provision: None, + provision_profiles: None, signing_keys: None, } } @@ -249,9 +249,9 @@ mod tests { supported_targets: Some(SupportedTargets::List(targets)), src_dir: None, distro: None, - runtime: None, + runtimes: None, sdk: None, - provision: None, + provision_profiles: None, signing_keys: None, } } @@ -263,9 +263,9 @@ mod tests { supported_targets: Some(SupportedTargets::All("*".to_string())), src_dir: None, distro: None, - runtime: None, + runtimes: None, sdk: None, - provision: None, + provision_profiles: None, signing_keys: None, } } diff --git a/tests/fixtures/configs/complex.yaml b/tests/fixtures/configs/complex.yaml index 6a5bd07..ec5c131 100644 --- a/tests/fixtures/configs/complex.yaml +++ b/tests/fixtures/configs/complex.yaml @@ -5,11 +5,11 @@ distro: sdk: image: ghcr.io/avocado-framework/avocado-sdk:v1.0.0 version: 1.0.0 -runtime: +runtimes: default: target: aarch64-unknown-linux-gnu board: raspberry-pi-4 -ext: +extensions: web-server: types: - sysext diff --git a/tests/fixtures/configs/external-config.yaml b/tests/fixtures/configs/external-config.yaml index e8d20f6..6c87ccf 100644 --- a/tests/fixtures/configs/external-config.yaml +++ b/tests/fixtures/configs/external-config.yaml @@ -1,11 +1,11 @@ src_dir: . -ext: +extensions: external-extension: types: - sysext packages: - curl - dependencies: + packages: nested-dep: ext: nested-extension config: nested-config.yaml diff --git a/tests/fixtures/configs/minimal.yaml b/tests/fixtures/configs/minimal.yaml index df5f59e..a80cdd7 100644 --- a/tests/fixtures/configs/minimal.yaml +++ b/tests/fixtures/configs/minimal.yaml @@ -4,7 +4,7 @@ distro: version: 0.1.0 sdk: image: ghcr.io/avocado-framework/avocado-sdk:latest -runtime: +runtimes: default: target: x86_64-unknown-linux-gnu -ext: {} +extensions: {} diff --git a/tests/fixtures/configs/nested-config.yaml b/tests/fixtures/configs/nested-config.yaml index 90c69cd..391593a 100644 --- a/tests/fixtures/configs/nested-config.yaml +++ b/tests/fixtures/configs/nested-config.yaml @@ -1,5 +1,5 @@ src_dir: . -ext: +extensions: nested-extension: types: - sysext diff --git a/tests/fixtures/configs/with-both-extensions.yaml b/tests/fixtures/configs/with-both-extensions.yaml index a548fcf..f45c7c3 100644 --- a/tests/fixtures/configs/with-both-extensions.yaml +++ b/tests/fixtures/configs/with-both-extensions.yaml @@ -1,10 +1,10 @@ default_target: qemux86-64 sdk: image: ghcr.io/avocado-framework/avocado-sdk:latest -runtime: +runtimes: default: target: x86_64-unknown-linux-gnu -ext: +extensions: test-both: sysext: true confext: true diff --git a/tests/fixtures/configs/with-confext.yaml b/tests/fixtures/configs/with-confext.yaml index 7fc5b69..20c548d 100644 --- a/tests/fixtures/configs/with-confext.yaml +++ b/tests/fixtures/configs/with-confext.yaml @@ -1,10 +1,10 @@ default_target: qemux86-64 sdk: image: ghcr.io/avocado-framework/avocado-sdk:latest -runtime: +runtimes: default: target: x86_64-unknown-linux-gnu -ext: +extensions: test-confext: types: - confext diff --git a/tests/fixtures/configs/with-external-extensions.yaml b/tests/fixtures/configs/with-external-extensions.yaml index f079526..8dd4bd5 100644 --- a/tests/fixtures/configs/with-external-extensions.yaml +++ b/tests/fixtures/configs/with-external-extensions.yaml @@ -2,13 +2,13 @@ sdk: image: registry.fedoraproject.org/fedora-toolbox:40 repo_url: https://mirrors.fedoraproject.org/metalink?repo=fedora-40&arch=x86_64 repo_release: '40' -ext: +extensions: main-extension: types: - sysext packages: - nginx - dependencies: + packages: external-dep: ext: external-extension config: external-config.yaml diff --git a/tests/fixtures/configs/with-interpolation.yaml b/tests/fixtures/configs/with-interpolation.yaml index 4e90a8c..61c15f3 100644 --- a/tests/fixtures/configs/with-interpolation.yaml +++ b/tests/fixtures/configs/with-interpolation.yaml @@ -15,29 +15,29 @@ nested: reference_nested: "{{ config.nested.value }}" -runtime: +runtimes: dev: # Test avocado.target interpolation target: "{{ avocado.target }}" - dependencies: + packages: # Test multiple interpolation types base-pkg: "{{ config.base_image }}" target-pkg: "avocado-os-{{ avocado.target }}" env-pkg: "{{ env.TEST_PKG_ENV_VAR_INTERP }}" prod: - dependencies: + packages: # Test combined interpolation combined: "{{ config.base_image }}-{{ avocado.target }}" sdk: # Test distro interpolation via config context image: "docker.io/avocadolinux/sdk:{{ config.distro.channel }}" - dependencies: + packages: avocado-sdk-toolchain: "{{ config.distro.version }}" nativesdk-avocado-ext-dev: "*" -ext: +extensions: test-ext: types: - sysext diff --git a/tests/fixtures/configs/with-nested-target-config.yaml b/tests/fixtures/configs/with-nested-target-config.yaml index 9d3f71d..2e638fc 100644 --- a/tests/fixtures/configs/with-nested-target-config.yaml +++ b/tests/fixtures/configs/with-nested-target-config.yaml @@ -7,10 +7,10 @@ distro: version: 0.1.0 sdk: image: ghcr.io/avocado-framework/avocado-sdk:latest -runtime: +runtimes: default: target: x86_64-unknown-linux-gnu -ext: +extensions: avocado-ext-webkit: version: 1.0.0 release: r0 diff --git a/tests/fixtures/configs/with-overlay-merge.yaml b/tests/fixtures/configs/with-overlay-merge.yaml index baa6bd1..0eda6a2 100644 --- a/tests/fixtures/configs/with-overlay-merge.yaml +++ b/tests/fixtures/configs/with-overlay-merge.yaml @@ -1,10 +1,10 @@ default_target: qemux86-64 sdk: image: ghcr.io/avocado-framework/avocado-sdk:latest -runtime: +runtimes: default: target: x86_64-unknown-linux-gnu -ext: +extensions: peridio: types: - sysext diff --git a/tests/fixtures/configs/with-overlay-opaque.yaml b/tests/fixtures/configs/with-overlay-opaque.yaml index e05db37..6835780 100644 --- a/tests/fixtures/configs/with-overlay-opaque.yaml +++ b/tests/fixtures/configs/with-overlay-opaque.yaml @@ -1,10 +1,10 @@ default_target: qemux86-64 sdk: image: ghcr.io/avocado-framework/avocado-sdk:latest -runtime: +runtimes: default: target: x86_64-unknown-linux-gnu -ext: +extensions: peridio: types: - sysext diff --git a/tests/fixtures/configs/with-overlay.yaml b/tests/fixtures/configs/with-overlay.yaml index 35a97a1..fcea122 100644 --- a/tests/fixtures/configs/with-overlay.yaml +++ b/tests/fixtures/configs/with-overlay.yaml @@ -1,14 +1,14 @@ default_target: qemux86-64 sdk: image: ghcr.io/avocado-framework/avocado-sdk:latest -runtime: +runtimes: default: target: x86_64-unknown-linux-gnu -ext: +extensions: peridio: types: - sysext - confext overlay: peridio - dependencies: + packages: curl: '*' diff --git a/tests/fixtures/configs/with-signing-keys.yaml b/tests/fixtures/configs/with-signing-keys.yaml index 2581319..f29ebaa 100644 --- a/tests/fixtures/configs/with-signing-keys.yaml +++ b/tests/fixtures/configs/with-signing-keys.yaml @@ -8,7 +8,7 @@ signing_keys: - my-production-key: abc123def456abc123def456abc123def456abc123def456abc123def456abc1 - backup-key: 789012fedcba789012fedcba789012fedcba789012fedcba789012fedcba7890 -runtime: +runtimes: default: target: x86_64-unknown-linux-gnu dev: diff --git a/tests/fixtures/configs/with-sysext.yaml b/tests/fixtures/configs/with-sysext.yaml index a1e29d1..f6c951f 100644 --- a/tests/fixtures/configs/with-sysext.yaml +++ b/tests/fixtures/configs/with-sysext.yaml @@ -1,10 +1,10 @@ default_target: qemux86-64 sdk: image: ghcr.io/avocado-framework/avocado-sdk:latest -runtime: +runtimes: default: target: x86_64-unknown-linux-gnu -ext: +extensions: test-sysext: types: - sysext diff --git a/tests/fixtures/configs/with-users.yaml b/tests/fixtures/configs/with-users.yaml index d046f77..c99a213 100644 --- a/tests/fixtures/configs/with-users.yaml +++ b/tests/fixtures/configs/with-users.yaml @@ -1,10 +1,10 @@ default_target: qemux86-64 sdk: image: ghcr.io/avocado-framework/avocado-sdk:latest -runtime: +runtimes: default: target: x86_64-unknown-linux-gnu -ext: +extensions: avocado-dev: types: - sysext diff --git a/tests/interpolation.rs b/tests/interpolation.rs index 9dfea1f..b6caff4 100644 --- a/tests/interpolation.rs +++ b/tests/interpolation.rs @@ -25,7 +25,7 @@ fn test_env_var_interpolation() { let config = avocado_cli::utils::config::Config::load(&config_path).unwrap(); // Verify that runtime dev dependencies include interpolated env var - let runtime = config.runtime.as_ref().unwrap(); + let runtime = config.runtimes.as_ref().unwrap(); let dev = runtime.get("dev").unwrap(); if let Some(deps) = &dev.dependencies { @@ -53,7 +53,7 @@ fn test_missing_env_var_warning() { // Should succeed but replace with empty string let config = avocado_cli::utils::config::Config::load(&config_path).unwrap(); - let runtime = config.runtime.as_ref().unwrap(); + let runtime = config.runtimes.as_ref().unwrap(); let dev = runtime.get("dev").unwrap(); if let Some(deps) = &dev.dependencies { @@ -102,7 +102,7 @@ fn test_avocado_target_from_env() { let config_path = get_interpolation_test_config(); let config = avocado_cli::utils::config::Config::load(&config_path).unwrap(); - let runtime = config.runtime.as_ref().unwrap(); + let runtime = config.runtimes.as_ref().unwrap(); let dev = runtime.get("dev").unwrap(); assert_eq!(dev.target.as_ref().unwrap(), "aarch64-unknown-linux-gnu"); @@ -118,7 +118,7 @@ fn test_avocado_target_from_config() { let config_path = get_interpolation_test_config(); let config = avocado_cli::utils::config::Config::load(&config_path).unwrap(); - let runtime = config.runtime.as_ref().unwrap(); + let runtime = config.runtimes.as_ref().unwrap(); let dev = runtime.get("dev").unwrap(); // Should use default_target from config @@ -132,7 +132,7 @@ fn test_avocado_target_unavailable() { // Create a test config without default_target let test_yaml = r#" -runtime: +runtimes: dev: target: "{{ avocado.target }}" "#; @@ -141,7 +141,7 @@ runtime: avocado_cli::utils::interpolation::interpolate_config(&mut parsed, None).unwrap(); // Should leave template as-is - let runtime = parsed.get("runtime").unwrap(); + let runtime = parsed.get("runtimes").unwrap(); let dev = runtime.get("dev").unwrap(); let target = dev.get("target").unwrap().as_str().unwrap(); @@ -189,7 +189,7 @@ fn test_multiple_interpolation_types() { let config_path = get_interpolation_test_config(); let config = avocado_cli::utils::config::Config::load(&config_path).unwrap(); - let runtime = config.runtime.as_ref().unwrap(); + let runtime = config.runtimes.as_ref().unwrap(); let dev = runtime.get("dev").unwrap(); if let Some(deps) = &dev.dependencies { @@ -224,7 +224,7 @@ fn test_combined_interpolation() { let config_path = get_interpolation_test_config(); let config = avocado_cli::utils::config::Config::load(&config_path).unwrap(); - let runtime = config.runtime.as_ref().unwrap(); + let runtime = config.runtimes.as_ref().unwrap(); let prod = runtime.get("prod").unwrap(); if let Some(deps) = &prod.dependencies { @@ -276,7 +276,7 @@ fn test_config_distro_interpolation_in_sdk() { ); // SDK dependencies should use config.distro.version interpolation - let deps = sdk.dependencies.as_ref().unwrap(); + let deps = sdk.packages.as_ref().unwrap(); let toolchain_version = deps.get("avocado-sdk-toolchain").unwrap(); assert_eq!(toolchain_version.as_str().unwrap(), "0.1.0"); } diff --git a/tests/target_precedence.rs b/tests/target_precedence.rs index b5fe6bb..09721a7 100644 --- a/tests/target_precedence.rs +++ b/tests/target_precedence.rs @@ -16,14 +16,18 @@ fn test_target_precedence_order() { env::remove_var("AVOCADO_TARGET"); let config_content = r#" -default_target = "config-target" -supported_targets = ["cli-target", "env-target", "config-target"] - -[sdk] -image = "ghcr.io/avocado-framework/avocado-sdk:latest" - -[runtime.dev] -target = "qemux86-64" +default_target: "config-target" +supported_targets: + - cli-target + - env-target + - config-target + +sdk: + image: "ghcr.io/avocado-framework/avocado-sdk:latest" + +runtimes: + dev: + target: "qemux86-64" "#; let mut config_file = NamedTempFile::new().unwrap(); @@ -85,11 +89,12 @@ target = "qemux86-64" #[serial] fn test_target_error_when_none_specified() { let config_content = r#" -[sdk] -image = "ghcr.io/avocado-framework/avocado-sdk:latest" +sdk: + image: "ghcr.io/avocado-framework/avocado-sdk:latest" -[runtime.dev] -target = "qemux86-64" +runtimes: + dev: + target: "qemux86-64" "#; let mut config_file = NamedTempFile::new().unwrap(); @@ -124,13 +129,15 @@ fn test_avocado_target_environment_variable() { env::remove_var("AVOCADO_TARGET"); let config_content = r#" -supported_targets = ["test-env-target"] +supported_targets: + - test-env-target -[sdk] -image = "ghcr.io/avocado-framework/avocado-sdk:latest" +sdk: + image: "ghcr.io/avocado-framework/avocado-sdk:latest" -[runtime.dev] -target = "qemux86-64" +runtimes: + dev: + target: "qemux86-64" "#; let mut config_file = NamedTempFile::new().unwrap(); @@ -206,17 +213,21 @@ fn test_all_commands_accept_target_flag() { // Test that major commands accept --target flag without error let config_content = r#" -default_target = "qemux86-64" -supported_targets = ["test", "qemux86-64"] +default_target: "qemux86-64" +supported_targets: + - test + - qemux86-64 -[sdk] -image = "ghcr.io/avocado-framework/avocado-sdk:latest" +sdk: + image: "ghcr.io/avocado-framework/avocado-sdk:latest" -[runtime.default] -target = "x86_64-unknown-linux-gnu" +runtimes: + default: + target: "x86_64-unknown-linux-gnu" -[ext.test-ext] -sysext = true +extensions: + test-ext: + sysext: true "#; let mut config_file = NamedTempFile::new().unwrap(); @@ -274,14 +285,16 @@ fn test_sdk_target_validation_supported() { env::remove_var("AVOCADO_TARGET"); let config_content = r#" -default_target = "qemux86-64" -supported_targets = ["qemux86-64"] +default_target: "qemux86-64" +supported_targets: + - qemux86-64 -[sdk] -image = "ghcr.io/avocado-framework/avocado-sdk:latest" +sdk: + image: "ghcr.io/avocado-framework/avocado-sdk:latest" -[runtime.dev] -target = "qemux86-64" +runtimes: + dev: + target: "qemux86-64" "#; let mut config_file = NamedTempFile::new().unwrap(); @@ -313,14 +326,16 @@ fn test_sdk_target_validation_unsupported() { env::remove_var("AVOCADO_TARGET"); let config_content = r#" -default_target = "unsupported-target" -supported_targets = ["qemux86-64"] +default_target: "unsupported-target" +supported_targets: + - qemux86-64 -[sdk] -image = "ghcr.io/avocado-framework/avocado-sdk:latest" +sdk: + image: "ghcr.io/avocado-framework/avocado-sdk:latest" -[runtime.dev] -target = "qemux86-64" +runtimes: + dev: + target: "qemux86-64" "#; let mut config_file = NamedTempFile::new().unwrap(); From 5665714c5699540356e37f726bf489e226fec804 Mon Sep 17 00:00:00 2001 From: Justin Schneck Date: Thu, 8 Jan 2026 18:23:52 -0500 Subject: [PATCH 16/23] add avocado.distro.{version|channel} to interpolation context --- src/utils/interpolation/avocado.rs | 306 +++++++++++++++++++++++++---- src/utils/interpolation/mod.rs | 207 +++++++++++++++++-- 2 files changed, 460 insertions(+), 53 deletions(-) diff --git a/src/utils/interpolation/avocado.rs b/src/utils/interpolation/avocado.rs index 335e8b0..20587f9 100644 --- a/src/utils/interpolation/avocado.rs +++ b/src/utils/interpolation/avocado.rs @@ -4,42 +4,178 @@ //! //! **Available values:** //! - `{{ avocado.target }}` - Resolved target architecture +//! - `{{ avocado.distro.version }}` - Distro version from main config +//! - `{{ avocado.distro.channel }}` - Distro channel from main config //! //! **Behavior:** //! - Returns None if value is not available (leaves template as-is) //! - Never produces errors - CLI will handle validation later //! - Follows the same precedence as CLI: CLI args > env vars > config +//! - distro values come from the main config context, not the current config use anyhow::Result; use serde_yaml::Value; use std::env; -/// Resolve an avocado computed value. +/// Context for avocado interpolation values. +/// +/// This struct holds values that are set by the main config and should be +/// available to all subsequent configs during interpolation. This ensures +/// that `avocado.*` values always reference the main config's values, +/// while `config.*` values reference the current config being interpolated. +#[derive(Debug, Clone, Default)] +pub struct AvocadoContext { + /// Target architecture (CLI > env > config precedence) + pub target: Option, + /// Distro version from the main config + pub distro_version: Option, + /// Distro channel from the main config + pub distro_channel: Option, +} + +impl AvocadoContext { + /// Create a new empty context. + #[allow(dead_code)] + pub fn new() -> Self { + Self::default() + } + + /// Create a context with just the target value. + /// + /// This is useful for simple interpolation cases where only target is needed. + #[allow(dead_code)] + pub fn with_target(target: Option<&str>) -> Self { + Self { + target: target.map(|s| s.to_string()), + distro_version: None, + distro_channel: None, + } + } + + /// Create a context from a main config YAML value. + /// + /// Extracts target (with CLI override and env precedence) and distro values + /// from the config to be used for interpolation in all subsequent configs. + /// + /// # Arguments + /// * `root` - The main config YAML value + /// * `cli_target` - Optional CLI target override (highest priority) + pub fn from_main_config(root: &Value, cli_target: Option<&str>) -> Self { + // Resolve target with precedence: CLI > env > config + let target = Self::resolve_target_value(root, cli_target); + + // Extract distro values from the main config + let (distro_version, distro_channel) = Self::extract_distro_values(root); + + Self { + target, + distro_version, + distro_channel, + } + } + + /// Resolve the target value with standard precedence. + fn resolve_target_value(root: &Value, cli_target: Option<&str>) -> Option { + // 1. CLI target (highest priority) + if let Some(target) = cli_target { + return Some(target.to_string()); + } + + // 2. Environment variable + if let Ok(target) = env::var("AVOCADO_TARGET") { + return Some(target); + } + + // 3. Config default_target + if let Some(default_target) = root.get("default_target") { + if let Some(target_str) = default_target.as_str() { + return Some(target_str.to_string()); + } + } + + None + } + + /// Extract distro version and channel from the config. + fn extract_distro_values(root: &Value) -> (Option, Option) { + let distro = match root.get("distro") { + Some(d) => d, + None => return (None, None), + }; + + let version = distro + .get("version") + .and_then(|v| v.as_str()) + .map(|s| s.to_string()); + + let channel = distro + .get("channel") + .and_then(|v| v.as_str()) + .map(|s| s.to_string()); + + (version, channel) + } + + /// Create a context with all values explicitly provided. + /// + /// This is useful when constructing from a deserialized Config struct. + /// + /// # Arguments + /// * `target` - The resolved target (CLI > env > config precedence should be applied by caller) + /// * `distro_version` - The distro version from the main config + /// * `distro_channel` - The distro channel from the main config + #[allow(dead_code)] + pub fn with_values( + target: Option, + distro_version: Option, + distro_channel: Option, + ) -> Self { + Self { + target, + distro_version, + distro_channel, + } + } +} + +/// Resolve an avocado computed value using path segments. /// /// # Arguments -/// * `key` - The avocado key (e.g., "target") -/// * `root` - The root YAML value for fallback lookups -/// * `cli_target` - Optional CLI target value (highest priority) +/// * `path` - The avocado path segments (e.g., ["target"] or ["distro", "version"]) +/// * `root` - The root YAML value for fallback lookups (used for target resolution) +/// * `context` - Optional avocado context with pre-resolved values from main config /// /// # Returns /// Result with Option - Some(value) if available, None to leave template as-is /// /// # Examples /// ``` -/// # use avocado_cli::utils::interpolation::avocado::resolve; +/// # use avocado_cli::utils::interpolation::avocado::{resolve, AvocadoContext}; /// let yaml = serde_yaml::from_str("default_target: x86_64-unknown-linux-gnu").unwrap(); /// -/// // With CLI target (highest priority) -/// let result = resolve("target", &yaml, Some("cli-target")).unwrap(); +/// // With context containing target +/// let ctx = AvocadoContext::with_target(Some("cli-target")); +/// let result = resolve(&["target"], &yaml, Some(&ctx)).unwrap(); /// assert_eq!(result, Some("cli-target".to_string())); /// -/// // From config -/// let result = resolve("target", &yaml, None).unwrap(); -/// assert_eq!(result, Some("x86_64-unknown-linux-gnu".to_string())); +/// // With distro context +/// let ctx = AvocadoContext { +/// target: None, +/// distro_version: Some("1.0.0".to_string()), +/// distro_channel: Some("stable".to_string()), +/// }; +/// let result = resolve(&["distro", "version"], &yaml, Some(&ctx)).unwrap(); +/// assert_eq!(result, Some("1.0.0".to_string())); /// ``` -pub fn resolve(key: &str, root: &Value, cli_target: Option<&str>) -> Result> { - match key { - "target" => resolve_target(root, cli_target), +pub fn resolve( + path: &[&str], + root: &Value, + context: Option<&AvocadoContext>, +) -> Result> { + match path { + ["target"] => resolve_target(root, context), + ["distro", "version"] => resolve_distro_version(context), + ["distro", "channel"] => resolve_distro_channel(context), _ => { // Other avocado keys are not yet supported, but don't error // Just leave the template as-is for future extension @@ -50,21 +186,16 @@ pub fn resolve(key: &str, root: &Value, cli_target: Option<&str>) -> Result - Some(target) if available, None if not available -fn resolve_target(root: &Value, cli_target: Option<&str>) -> Result> { - // 1. CLI target (highest priority) - if let Some(target) = cli_target { - return Ok(Some(target.to_string())); +/// 3. Config default_target (from root - the current config) +fn resolve_target(root: &Value, context: Option<&AvocadoContext>) -> Result> { + // 1. Context target (highest priority - from CLI or pre-resolved) + if let Some(ctx) = context { + if let Some(ref target) = ctx.target { + return Ok(Some(target.clone())); + } } // 2. Environment variable @@ -72,7 +203,7 @@ fn resolve_target(root: &Value, cli_target: Option<&str>) -> Result) -> Result) -> Result> { + if let Some(ctx) = context { + if let Some(ref version) = ctx.distro_version { + return Ok(Some(version.clone())); + } + } + // Not available - leave template as-is + Ok(None) +} + +/// Resolve the distro channel from the avocado context. +/// +/// This value comes from the main config and is passed through the context, +/// ensuring all configs use the same distro channel. +fn resolve_distro_channel(context: Option<&AvocadoContext>) -> Result> { + if let Some(ctx) = context { + if let Some(ref channel) = ctx.distro_channel { + return Ok(Some(channel.clone())); + } + } + // Not available - leave template as-is + Ok(None) +} + #[cfg(test)] mod tests { use super::*; @@ -95,9 +254,10 @@ mod tests { #[test] #[serial] - fn test_resolve_target_from_cli() { + fn test_resolve_target_from_context() { let config = parse_yaml("default_target: config-target"); - let result = resolve("target", &config, Some("cli-target")).unwrap(); + let ctx = AvocadoContext::with_target(Some("cli-target")); + let result = resolve(&["target"], &config, Some(&ctx)).unwrap(); assert_eq!(result, Some("cli-target".to_string())); } @@ -106,7 +266,7 @@ mod tests { fn test_resolve_target_from_env() { env::set_var("AVOCADO_TARGET", "env-target"); let config = parse_yaml("default_target: config-target"); - let result = resolve("target", &config, None).unwrap(); + let result = resolve(&["target"], &config, None).unwrap(); assert_eq!(result, Some("env-target".to_string())); env::remove_var("AVOCADO_TARGET"); } @@ -116,7 +276,7 @@ mod tests { fn test_resolve_target_from_config() { env::remove_var("AVOCADO_TARGET"); let config = parse_yaml("default_target: config-target"); - let result = resolve("target", &config, None).unwrap(); + let result = resolve(&["target"], &config, None).unwrap(); assert_eq!(result, Some("config-target".to_string())); } @@ -125,16 +285,92 @@ mod tests { fn test_resolve_target_unavailable() { env::remove_var("AVOCADO_TARGET"); let config = parse_yaml("{}"); - let result = resolve("target", &config, None).unwrap(); + let result = resolve(&["target"], &config, None).unwrap(); // Should return None (leave template as-is) assert_eq!(result, None); } #[test] - fn test_resolve_unknown_key() { + fn test_resolve_unknown_path() { let config = parse_yaml("{}"); - let result = resolve("unknown", &config, None).unwrap(); + let result = resolve(&["unknown"], &config, None).unwrap(); // Should return None (not supported yet, but no error) assert_eq!(result, None); } + + #[test] + fn test_resolve_distro_version_from_context() { + let config = parse_yaml("{}"); + let ctx = AvocadoContext { + target: None, + distro_version: Some("1.2.3".to_string()), + distro_channel: None, + }; + let result = resolve(&["distro", "version"], &config, Some(&ctx)).unwrap(); + assert_eq!(result, Some("1.2.3".to_string())); + } + + #[test] + fn test_resolve_distro_channel_from_context() { + let config = parse_yaml("{}"); + let ctx = AvocadoContext { + target: None, + distro_version: None, + distro_channel: Some("apollo-edge".to_string()), + }; + let result = resolve(&["distro", "channel"], &config, Some(&ctx)).unwrap(); + assert_eq!(result, Some("apollo-edge".to_string())); + } + + #[test] + fn test_resolve_distro_without_context() { + let config = parse_yaml("{}"); + // Without context, distro values should return None + let result = resolve(&["distro", "version"], &config, None).unwrap(); + assert_eq!(result, None); + + let result = resolve(&["distro", "channel"], &config, None).unwrap(); + assert_eq!(result, None); + } + + #[test] + fn test_avocado_context_from_main_config() { + let config = parse_yaml( + r#" +default_target: x86_64-unknown-linux-gnu +distro: + version: 0.1.0 + channel: apollo-edge +"#, + ); + let ctx = AvocadoContext::from_main_config(&config, None); + assert_eq!(ctx.target, Some("x86_64-unknown-linux-gnu".to_string())); + assert_eq!(ctx.distro_version, Some("0.1.0".to_string())); + assert_eq!(ctx.distro_channel, Some("apollo-edge".to_string())); + } + + #[test] + fn test_avocado_context_cli_overrides_config() { + let config = parse_yaml( + r#" +default_target: config-target +distro: + version: 0.1.0 + channel: apollo-edge +"#, + ); + let ctx = AvocadoContext::from_main_config(&config, Some("cli-target")); + assert_eq!(ctx.target, Some("cli-target".to_string())); + assert_eq!(ctx.distro_version, Some("0.1.0".to_string())); + assert_eq!(ctx.distro_channel, Some("apollo-edge".to_string())); + } + + #[test] + fn test_avocado_context_missing_distro() { + let config = parse_yaml("default_target: x86_64"); + let ctx = AvocadoContext::from_main_config(&config, None); + assert_eq!(ctx.target, Some("x86_64".to_string())); + assert_eq!(ctx.distro_version, None); + assert_eq!(ctx.distro_channel, None); + } } diff --git a/src/utils/interpolation/mod.rs b/src/utils/interpolation/mod.rs index fc39f59..1c3a16d 100644 --- a/src/utils/interpolation/mod.rs +++ b/src/utils/interpolation/mod.rs @@ -27,14 +27,21 @@ //! - Navigates the YAML tree using dot notation //! - Returns an error if path doesn't exist (fatal) //! - Converts non-string values to strings +//! - References are scoped to the current config being interpolated //! //! ## [`avocado`] - Computed Internal Values //! ```yaml //! target_pkg: "pkg-{{ avocado.target }}" +//! distro_image: "sdk:{{ avocado.distro.channel }}" +//! version_ref: "{{ avocado.distro.version }}" //! ``` -//! - Provides access to computed values like target architecture +//! - Provides access to computed values from the main config +//! - `avocado.target` - Target architecture (CLI > env > config precedence) +//! - `avocado.distro.version` - Distro version from the main config +//! - `avocado.distro.channel` - Distro channel from the main config //! - Leaves template as-is if value unavailable //! - Never produces errors (CLI handles validation) +//! - Ensures all configs use the same distro values from the main config //! //! # Features //! @@ -52,6 +59,9 @@ pub mod avocado; pub mod config; pub mod env; +// Re-export AvocadoContext for convenience +pub use avocado::AvocadoContext; + const MAX_ITERATIONS: usize = 100; /// Interpolate a simple string with the target value. @@ -102,6 +112,47 @@ pub fn interpolate_name(input: &str, target: &str) -> String { /// assert_eq!(config.get("derived").unwrap().as_str().unwrap(), "value"); /// ``` pub fn interpolate_config(yaml_value: &mut Value, cli_target: Option<&str>) -> Result<()> { + // Create a context with just the target for backward compatibility + let context = AvocadoContext::from_main_config(yaml_value, cli_target); + interpolate_config_with_context(yaml_value, &context) +} + +/// Interpolate configuration values using a pre-built avocado context. +/// +/// This is the preferred method when interpolating multiple configs that should +/// share the same avocado context (e.g., main config + extension configs). +/// +/// # Arguments +/// * `yaml_value` - The YAML value to interpolate (modified in place) +/// * `context` - The avocado context with pre-resolved values from the main config +/// +/// # Returns +/// Result indicating success or error if config references cannot be resolved +/// +/// # Examples +/// ``` +/// # use avocado_cli::utils::interpolation::{interpolate_config_with_context, AvocadoContext}; +/// let main_config = serde_yaml::from_str(r#" +/// distro: +/// version: "1.0.0" +/// channel: "stable" +/// "#).unwrap(); +/// +/// // Create context from main config +/// let context = AvocadoContext::from_main_config(&main_config, Some("x86_64")); +/// +/// // Use context to interpolate an extension config +/// let mut ext_config = serde_yaml::from_str(r#" +/// image: "sdk:{{ avocado.distro.channel }}" +/// "#).unwrap(); +/// +/// interpolate_config_with_context(&mut ext_config, &context).unwrap(); +/// assert_eq!(ext_config.get("image").unwrap().as_str().unwrap(), "sdk:stable"); +/// ``` +pub fn interpolate_config_with_context( + yaml_value: &mut Value, + context: &AvocadoContext, +) -> Result<()> { let mut iteration = 0; let mut changed = true; let mut previous_states: Vec = Vec::new(); @@ -128,7 +179,7 @@ pub fn interpolate_config(yaml_value: &mut Value, cli_target: Option<&str>) -> R let mut resolving_stack = HashSet::new(); // Start with empty path at root level let path: Vec = Vec::new(); - changed = interpolate_value(yaml_value, &root, cli_target, &mut resolving_stack, &path)?; + changed = interpolate_value(yaml_value, &root, context, &mut resolving_stack, &path)?; iteration += 1; } @@ -171,7 +222,7 @@ fn format_yaml_path(path: &[String], location: &YamlLocation) -> String { /// # Arguments /// * `value` - The current value to interpolate /// * `root` - The root YAML value for config references -/// * `cli_target` - Optional CLI target value +/// * `context` - The avocado context /// * `resolving_stack` - Set of templates currently being resolved (for cycle detection) /// * `path` - The current YAML path for error messages /// @@ -180,7 +231,7 @@ fn format_yaml_path(path: &[String], location: &YamlLocation) -> String { fn interpolate_value( value: &mut Value, root: &Value, - cli_target: Option<&str>, + context: &AvocadoContext, resolving_stack: &mut HashSet, path: &[String], ) -> Result { @@ -190,7 +241,7 @@ fn interpolate_value( Value::String(s) => { let location = YamlLocation::Value; if let Some(new_value) = - interpolate_string(s, root, cli_target, resolving_stack, path, &location)? + interpolate_string(s, root, context, resolving_stack, path, &location)? { *s = new_value; changed = true; @@ -207,7 +258,7 @@ fn interpolate_value( if let Some(new_key) = interpolate_string( key_str, root, - cli_target, + context, resolving_stack, path, &location, @@ -232,7 +283,7 @@ fn interpolate_value( }; let mut child_path = path.to_vec(); child_path.push(key_str); - if interpolate_value(v, root, cli_target, resolving_stack, &child_path)? { + if interpolate_value(v, root, context, resolving_stack, &child_path)? { changed = true; } } @@ -241,7 +292,7 @@ fn interpolate_value( for (idx, item) in seq.iter_mut().enumerate() { let mut child_path = path.to_vec(); child_path.push(format!("[{idx}]")); - if interpolate_value(item, root, cli_target, resolving_stack, &child_path)? { + if interpolate_value(item, root, context, resolving_stack, &child_path)? { changed = true; } } @@ -259,7 +310,7 @@ fn interpolate_value( /// # Arguments /// * `input` - The input string that may contain templates /// * `root` - The root YAML value for config references -/// * `cli_target` - Optional CLI target value +/// * `context` - The avocado context /// * `resolving_stack` - Set of templates currently being resolved (for cycle detection) /// * `path` - The current YAML path for error messages /// * `location` - Whether this is a key or value @@ -269,7 +320,7 @@ fn interpolate_value( fn interpolate_string( input: &str, root: &Value, - cli_target: Option<&str>, + context: &AvocadoContext, resolving_stack: &mut HashSet, path: &[String], location: &YamlLocation, @@ -289,7 +340,7 @@ fn interpolate_string( let full_match = capture.get(0).unwrap().as_str(); let template = capture.get(1).unwrap().as_str().trim(); - match resolve_template(template, root, cli_target, resolving_stack) { + match resolve_template(template, root, context, resolving_stack) { Ok(Some(replacement)) => { result = result.replace(full_match, &replacement); any_replaced = true; @@ -317,7 +368,7 @@ fn interpolate_string( /// # Arguments /// * `template` - The template expression (e.g., "env.VAR" or "config.key") /// * `root` - The root YAML value for config references -/// * `cli_target` - Optional CLI target value +/// * `context` - The avocado context /// * `resolving_stack` - Set of templates currently being resolved (for cycle detection) /// /// # Returns @@ -325,7 +376,7 @@ fn interpolate_string( fn resolve_template( template: &str, root: &Value, - cli_target: Option<&str>, + context: &AvocadoContext, resolving_stack: &mut HashSet, ) -> Result> { // Check for circular reference @@ -351,9 +402,9 @@ fn resolve_template( anyhow::bail!("Invalid template syntax: empty template"); } - let context = parts[0]; + let context_name = parts[0]; - let result = match context { + let result = match context_name { "env" => { if parts.len() < 2 { anyhow::bail!("Invalid env template: {template}"); @@ -372,12 +423,13 @@ fn resolve_template( if parts.len() < 2 { anyhow::bail!("Invalid avocado template: {template}"); } - let key = parts[1]; - avocado::resolve(key, root, cli_target) + // Pass the full path (excluding "avocado" prefix) + let path = &parts[1..]; + avocado::resolve(path, root, Some(context)) } _ => { anyhow::bail!( - "Unknown template context: {context}. Expected 'env', 'config', or 'avocado'" + "Unknown template context: {context_name}. Expected 'env', 'config', or 'avocado'" ); } }; @@ -915,4 +967,123 @@ mapping: "value" ); } + + #[test] + fn test_avocado_distro_version_interpolation() { + // Create main config with distro values + let main_config = parse_yaml( + r#" +distro: + version: "1.0.0" + channel: "apollo-edge" +"#, + ); + + // Create context from main config + let context = AvocadoContext::from_main_config(&main_config, Some("x86_64")); + + // Test interpolating an extension config + let mut ext_config = parse_yaml( + r#" +packages: + avocado-runtime: "{{ avocado.distro.version }}" + avocado-sdk: "{{ avocado.distro.channel }}" +"#, + ); + + interpolate_config_with_context(&mut ext_config, &context).unwrap(); + + let packages = ext_config.get("packages").unwrap(); + assert_eq!( + packages.get("avocado-runtime").unwrap().as_str().unwrap(), + "1.0.0" + ); + assert_eq!( + packages.get("avocado-sdk").unwrap().as_str().unwrap(), + "apollo-edge" + ); + } + + #[test] + fn test_avocado_distro_in_same_config() { + // When interpolating main config itself, avocado.distro should work + let mut config = parse_yaml( + r#" +distro: + version: "1.0.0" + channel: "apollo-edge" +sdk: + image: "docker.io/sdk:{{ avocado.distro.channel }}" + packages: + runtime: "{{ avocado.distro.version }}" +"#, + ); + + interpolate_config(&mut config, None).unwrap(); + + let sdk = config.get("sdk").unwrap(); + assert_eq!( + sdk.get("image").unwrap().as_str().unwrap(), + "docker.io/sdk:apollo-edge" + ); + + let packages = sdk.get("packages").unwrap(); + assert_eq!(packages.get("runtime").unwrap().as_str().unwrap(), "1.0.0"); + } + + #[test] + fn test_avocado_distro_unavailable() { + // When distro values are not set, template should be left as-is + let mut config = parse_yaml( + r#" +reference: "{{ avocado.distro.version }}" +"#, + ); + + // No distro in config, so it should remain unresolved + interpolate_config(&mut config, None).unwrap(); + + assert_eq!( + config.get("reference").unwrap().as_str().unwrap(), + "{{ avocado.distro.version }}" + ); + } + + #[test] + fn test_extension_uses_main_config_distro() { + // Main config has distro values + let main_config = parse_yaml( + r#" +distro: + version: "2.0.0" + channel: "stable" +"#, + ); + + let context = AvocadoContext::from_main_config(&main_config, Some("aarch64")); + + // Extension config has its OWN distro values, but avocado.distro should use main config + let mut ext_config = parse_yaml( + r#" +distro: + version: "1.0.0" + channel: "ext-channel" +avocado_version: "{{ avocado.distro.version }}" +config_version: "{{ config.distro.version }}" +"#, + ); + + interpolate_config_with_context(&mut ext_config, &context).unwrap(); + + // avocado.distro should use main config values + assert_eq!( + ext_config.get("avocado_version").unwrap().as_str().unwrap(), + "2.0.0" + ); + // config.distro should use extension's own values + assert_eq!( + ext_config.get("config_version").unwrap().as_str().unwrap(), + "1.0.0" + ); + } } From 51ee629079866f1d6ee5de7d03717ee9236542a6 Mon Sep 17 00:00:00 2001 From: Justin Schneck Date: Thu, 8 Jan 2026 19:14:55 -0500 Subject: [PATCH 17/23] update ext build sdk compile to support remote extensions --- src/commands/ext/build.rs | 70 ++++++++++++++++++++++++++--------- src/commands/ext/package.rs | 73 ++++++++++++++++++++++++++++++------- src/commands/sdk/compile.rs | 29 +++++++++++++-- 3 files changed, 138 insertions(+), 34 deletions(-) diff --git a/src/commands/ext/build.rs b/src/commands/ext/build.rs index d2cd20d..9c4bc0a 100644 --- a/src/commands/ext/build.rs +++ b/src/commands/ext/build.rs @@ -155,17 +155,18 @@ impl ExtBuildCommand { })?; // Get the config path where this extension is actually defined + // Note: For SDK compile operations, we need a path that's accessible from the host. + // For remote extensions, the sdk.compile sections are already merged into the main + // config via load_composed(), so we use the main config path for SDK compile. + // The ext_src_path (for overlay/scripts) is computed separately below. let ext_config_path = match &extension_location { ExtensionLocation::Local { config_path, .. } => config_path.clone(), ExtensionLocation::External { config_path, .. } => config_path.clone(), - ExtensionLocation::Remote { name, .. } => { - // Remote extensions are installed to $AVOCADO_PREFIX/includes// - let ext_install_path = - config.get_extension_install_path(&self.config_path, name, &target); - ext_install_path - .join("avocado.yaml") - .to_string_lossy() - .to_string() + ExtensionLocation::Remote { .. } => { + // For remote extensions, use the main config path because: + // 1. Remote extension sdk.compile sections are merged into main config via load_composed + // 2. The Docker volume path is not accessible from the host for SDK compile operations + self.config_path.clone() } }; @@ -230,10 +231,26 @@ impl ExtBuildCommand { anyhow::anyhow!("Extension '{}' not found in configuration.", self.extension) })?; + // Determine the extension source path for compile/install scripts + // For remote extensions, scripts are in $AVOCADO_PREFIX/includes// + // For local extensions, scripts are in /opt/src (the mounted src_dir) + let ext_script_workdir = match &extension_location { + ExtensionLocation::Remote { name, .. } => { + Some(format!("$AVOCADO_PREFIX/includes/{name}")) + } + ExtensionLocation::Local { .. } | ExtensionLocation::External { .. } => None, + }; + // Handle compile dependencies with install scripts before building the extension // Pass the ext_config_path so SDK compile sections are loaded from the correct config - self.handle_compile_dependencies(config, &ext_config, &target, &ext_config_path) - .await?; + self.handle_compile_dependencies( + config, + &ext_config, + &target, + &ext_config_path, + ext_script_workdir.as_deref(), + ) + .await?; // Get extension types from the types array (defaults to ["sysext", "confext"]) let ext_types = ext_config @@ -1502,12 +1519,16 @@ echo "Set proper permissions on authentication files""#, /// /// `sdk_config_path` is the path to the config file that contains the sdk.compile sections. /// For external extensions, this should be the external config path, not the main config. + /// + /// `ext_script_workdir` is the optional working directory for compile/install scripts + /// (container path). For remote extensions, this is `$AVOCADO_PREFIX/includes//`. async fn handle_compile_dependencies( &self, config: &Config, ext_config: &serde_yaml::Value, target: &str, sdk_config_path: &str, + ext_script_workdir: Option<&str>, ) -> Result<()> { // Get dependencies from extension configuration let dependencies = ext_config.get("packages").and_then(|v| v.as_mapping()); @@ -1577,6 +1598,12 @@ echo "Set proper permissions on authentication files""#, &format!("Using config path for SDK compile: {sdk_config_path}"), OutputLevel::Normal, ); + if let Some(workdir) = ext_script_workdir { + print_info( + &format!("Using script workdir: {workdir}"), + OutputLevel::Normal, + ); + } } let compile_command = SdkCompileCommand::new( sdk_config_path.to_string(), @@ -1585,7 +1612,8 @@ echo "Set proper permissions on authentication files""#, Some(target.to_string()), self.container_args.clone(), self.dnf_args.clone(), - ); + ) + .with_workdir(ext_script_workdir.map(|s| s.to_string())); compile_command.execute().await.with_context(|| { format!( @@ -1594,12 +1622,20 @@ echo "Set proper permissions on authentication files""#, })?; // Then, run the install script - // Note: install_script is already relative to /opt/src (the mounted src_dir in the container) - // so we don't need to prepend src_dir here - just use it directly like compile scripts do - let install_command = format!( - r#"if [ -f '{install_script}' ]; then echo 'Running install script: {install_script}'; export AVOCADO_BUILD_EXT_SYSROOT="$AVOCADO_EXT_SYSROOTS/{extension_name}"; AVOCADO_SDK_PREFIX=$AVOCADO_SDK_PREFIX bash '{install_script}'; else echo 'Install script {install_script} not found.'; ls -la; exit 1; fi"#, - extension_name = self.extension - ); + // For remote extensions, use ext_script_workdir to find the script + // For local extensions, scripts are relative to /opt/src (the mounted src_dir) + // Note: Use double quotes for workdir so $AVOCADO_PREFIX gets expanded by the shell + let install_command = if let Some(workdir) = ext_script_workdir { + format!( + r#"cd "{workdir}" && if [ -f '{install_script}' ]; then echo 'Running install script: {install_script}'; export AVOCADO_BUILD_EXT_SYSROOT="$AVOCADO_EXT_SYSROOTS/{extension_name}"; AVOCADO_SDK_PREFIX=$AVOCADO_SDK_PREFIX bash '{install_script}'; else echo 'Install script {install_script} not found.'; ls -la; exit 1; fi"#, + extension_name = self.extension + ) + } else { + format!( + r#"if [ -f '{install_script}' ]; then echo 'Running install script: {install_script}'; export AVOCADO_BUILD_EXT_SYSROOT="$AVOCADO_EXT_SYSROOTS/{extension_name}"; AVOCADO_SDK_PREFIX=$AVOCADO_SDK_PREFIX bash '{install_script}'; else echo 'Install script {install_script} not found.'; ls -la; exit 1; fi"#, + extension_name = self.extension + ) + }; if self.verbose { print_info( diff --git a/src/commands/ext/package.rs b/src/commands/ext/package.rs index d832513..c7e9d4e 100644 --- a/src/commands/ext/package.rs +++ b/src/commands/ext/package.rs @@ -186,8 +186,9 @@ impl ExtPackageCommand { let rpm_metadata = self.extract_rpm_metadata(&ext_config, &target)?; // Determine which files to package - // Pass both merged config (for package_files) and raw config (for all target overlays) - let package_files = self.get_package_files(&ext_config, raw_ext_config.as_ref()); + // Pass both merged config (for package_files), raw config (for all target overlays), + // and full parsed config (for sdk.compile scripts) + let package_files = self.get_package_files(&ext_config, raw_ext_config.as_ref(), parsed); if self.verbose { print_info( @@ -274,14 +275,18 @@ impl ExtPackageCommand { /// Otherwise, default to: /// - The avocado config file (avocado.yaml or avocado.yml) /// - All overlay directories (base level and target-specific) + /// - Compile scripts from sdk.compile sections + /// - Install scripts from extension package dependencies /// /// # Arguments /// * `ext_config` - The merged extension config (for package_files check) /// * `raw_ext_config` - The raw unmerged extension config (to find all target-specific overlays) + /// * `full_parsed_config` - The full parsed config (to find sdk.compile scripts) fn get_package_files( &self, ext_config: &serde_yaml::Value, raw_ext_config: Option<&serde_yaml::Value>, + full_parsed_config: &serde_yaml::Value, ) -> Vec { // Check if package_files is explicitly defined if let Some(package_files) = ext_config.get("package_files") { @@ -296,9 +301,9 @@ impl ExtPackageCommand { } } - // Default behavior: avocado.yaml + all overlay directories + // Default behavior: avocado.yaml + overlays + compile scripts + install scripts let mut default_files = vec!["avocado.yaml".to_string()]; - let mut seen_overlays = std::collections::HashSet::new(); + let mut seen_files = std::collections::HashSet::new(); // If we have the raw extension config, scan for all overlays if let Some(raw_config) = raw_ext_config { @@ -307,7 +312,7 @@ impl ExtPackageCommand { // Check if this is the base-level overlay if key.as_str() == Some("overlay") { if let Some(overlay_dir) = Self::extract_overlay_dir(value) { - if seen_overlays.insert(overlay_dir.clone()) { + if seen_files.insert(overlay_dir.clone()) { default_files.push(overlay_dir); } } @@ -316,7 +321,7 @@ impl ExtPackageCommand { else if let Some(target_config) = value.as_mapping() { if let Some(overlay_value) = target_config.get("overlay") { if let Some(overlay_dir) = Self::extract_overlay_dir(overlay_value) { - if seen_overlays.insert(overlay_dir.clone()) { + if seen_files.insert(overlay_dir.clone()) { default_files.push(overlay_dir); } } @@ -328,7 +333,37 @@ impl ExtPackageCommand { // Fallback: just check the merged config for overlay (current target only) if let Some(overlay) = ext_config.get("overlay") { if let Some(overlay_dir) = Self::extract_overlay_dir(overlay) { - default_files.push(overlay_dir); + if seen_files.insert(overlay_dir.clone()) { + default_files.push(overlay_dir); + } + } + } + } + + // Collect compile scripts from sdk.compile sections + if let Some(sdk_compile) = full_parsed_config + .get("sdk") + .and_then(|s| s.get("compile")) + .and_then(|c| c.as_mapping()) + { + for (_section_name, section_config) in sdk_compile { + if let Some(compile_script) = section_config.get("compile").and_then(|c| c.as_str()) + { + if seen_files.insert(compile_script.to_string()) { + default_files.push(compile_script.to_string()); + } + } + } + } + + // Collect install scripts from extension package dependencies + // Format: extensions..packages..install = "script.sh" + if let Some(packages) = ext_config.get("packages").and_then(|p| p.as_mapping()) { + for (_dep_name, dep_spec) in packages { + if let Some(install_script) = dep_spec.get("install").and_then(|i| i.as_str()) { + if seen_files.insert(install_script.to_string()) { + default_files.push(install_script.to_string()); + } } } } @@ -1123,7 +1158,9 @@ mod tests { serde_yaml::Value::String("1.0.0".to_string()), ); - let files = cmd.get_package_files(&ext_config, None); + // Pass empty full config since we're not testing compile script extraction + let empty_full_config = serde_yaml::Value::Mapping(serde_yaml::Mapping::new()); + let files = cmd.get_package_files(&ext_config, None, &empty_full_config); assert_eq!(files, vec!["avocado.yaml".to_string()]); } @@ -1152,7 +1189,9 @@ mod tests { ); // Use the same config as raw config to test overlay extraction - let files = cmd.get_package_files(&ext_config, Some(&ext_config)); + // Pass empty full config since we're not testing compile script extraction + let empty_full_config = serde_yaml::Value::Mapping(serde_yaml::Mapping::new()); + let files = cmd.get_package_files(&ext_config, Some(&ext_config), &empty_full_config); assert_eq!( files, vec!["avocado.yaml".to_string(), "my-overlay".to_string()] @@ -1194,7 +1233,9 @@ mod tests { ); // Use the same config as raw config to test overlay extraction - let files = cmd.get_package_files(&ext_config, Some(&ext_config)); + // Pass empty full config since we're not testing compile script extraction + let empty_full_config = serde_yaml::Value::Mapping(serde_yaml::Mapping::new()); + let files = cmd.get_package_files(&ext_config, Some(&ext_config), &empty_full_config); assert_eq!( files, vec!["avocado.yaml".to_string(), "overlays/prod".to_string()] @@ -1238,7 +1279,9 @@ mod tests { serde_yaml::Value::String("my-overlay".to_string()), ); - let files = cmd.get_package_files(&ext_config, Some(&ext_config)); + // Pass empty full config since we're not testing compile script extraction + let empty_full_config = serde_yaml::Value::Mapping(serde_yaml::Mapping::new()); + let files = cmd.get_package_files(&ext_config, Some(&ext_config), &empty_full_config); assert_eq!( files, vec![ @@ -1274,7 +1317,9 @@ mod tests { serde_yaml::Value::Sequence(vec![]), ); - let files = cmd.get_package_files(&ext_config, None); + // Pass empty full config since we're not testing compile script extraction + let empty_full_config = serde_yaml::Value::Mapping(serde_yaml::Mapping::new()); + let files = cmd.get_package_files(&ext_config, None, &empty_full_config); assert_eq!(files, vec!["avocado.yaml".to_string()]); } @@ -1336,7 +1381,9 @@ mod tests { // Merged config (for a specific target, but package_files not set) let merged_config = serde_yaml::Value::Mapping(serde_yaml::Mapping::new()); - let files = cmd.get_package_files(&merged_config, Some(&raw_config)); + // Pass empty full config since we're not testing compile script extraction + let empty_full_config = serde_yaml::Value::Mapping(serde_yaml::Mapping::new()); + let files = cmd.get_package_files(&merged_config, Some(&raw_config), &empty_full_config); // Should include avocado.yaml and both target-specific overlays assert!(files.contains(&"avocado.yaml".to_string())); diff --git a/src/commands/sdk/compile.rs b/src/commands/sdk/compile.rs index f23bc6e..5b8654b 100644 --- a/src/commands/sdk/compile.rs +++ b/src/commands/sdk/compile.rs @@ -35,6 +35,10 @@ pub struct SdkCompileCommand { pub no_stamps: bool, /// SDK container architecture for cross-arch emulation pub sdk_arch: Option, + /// Working directory for compile scripts (container path). + /// If set, scripts are executed from this directory instead of /opt/src. + /// Used for remote extensions where scripts are in $AVOCADO_PREFIX/includes// + pub workdir: Option, } impl SdkCompileCommand { @@ -56,9 +60,16 @@ impl SdkCompileCommand { dnf_args, no_stamps: false, sdk_arch: None, + workdir: None, } } + /// Set the working directory for compile scripts (container path) + pub fn with_workdir(mut self, workdir: Option) -> Self { + self.workdir = workdir; + self + } + /// Set the no_stamps flag pub fn with_no_stamps(mut self, no_stamps: bool) -> Self { self.no_stamps = no_stamps; @@ -237,10 +248,20 @@ impl SdkCompileCommand { let container_helper = SdkContainer::from_config(&self.config_path, config)?.verbose(self.verbose); - let compile_command = format!( - r#"if [ -f '{}' ]; then echo 'Running compile script: {}'; AVOCADO_SDK_PREFIX=$AVOCADO_SDK_PREFIX bash '{}'; else echo 'Compile script {} not found.' && ls -la; exit 1; fi"#, - section.script, section.script, section.script, section.script - ); + // Build compile command with optional workdir prefix + // For remote extensions, scripts are in $AVOCADO_PREFIX/includes// instead of /opt/src + // Note: Use double quotes for workdir so $AVOCADO_PREFIX gets expanded by the shell + let compile_command = if let Some(ref workdir) = self.workdir { + format!( + r#"cd "{workdir}" && if [ -f '{}' ]; then echo 'Running compile script: {}'; AVOCADO_SDK_PREFIX=$AVOCADO_SDK_PREFIX bash '{}'; else echo 'Compile script {} not found.' && ls -la; exit 1; fi"#, + section.script, section.script, section.script, section.script + ) + } else { + format!( + r#"if [ -f '{}' ]; then echo 'Running compile script: {}'; AVOCADO_SDK_PREFIX=$AVOCADO_SDK_PREFIX bash '{}'; else echo 'Compile script {} not found.' && ls -la; exit 1; fi"#, + section.script, section.script, section.script, section.script + ) + }; let config = RunConfig { container_image: container_image.to_string(), From 3fa39dadc2c0d5a656aa0104b4c46b915fa683b7 Mon Sep 17 00:00:00 2001 From: Justin Schneck Date: Thu, 8 Jan 2026 20:55:52 -0500 Subject: [PATCH 18/23] fix up remote extensions using path source --- src/commands/ext/fetch.rs | 16 +-- src/utils/config.rs | 113 +++++++++++++++++++++- src/utils/container.rs | 162 ++++++++++++++++++++++++++++++- src/utils/ext_fetch.rs | 198 +++++++++++++++++++++++++++++--------- 4 files changed, 431 insertions(+), 58 deletions(-) diff --git a/src/commands/ext/fetch.rs b/src/commands/ext/fetch.rs index ff5fdd6..64d4941 100644 --- a/src/commands/ext/fetch.rs +++ b/src/commands/ext/fetch.rs @@ -7,7 +7,7 @@ use anyhow::{Context, Result}; use crate::utils::config::{Config, ExtensionSource}; use crate::utils::ext_fetch::ExtensionFetcher; -use crate::utils::output::{print_info, print_success, print_warning, OutputLevel}; +use crate::utils::output::{print_info, print_success, OutputLevel}; use crate::utils::target::resolve_target_required; /// Command to fetch remote extensions @@ -138,6 +138,9 @@ impl ExtFetchCommand { config.merge_sdk_container_args(None) }; + // Get the resolved src_dir for resolving relative extension paths + let src_dir = config.get_resolved_src_dir(&self.config_path); + let fetcher = ExtensionFetcher::new( self.config_path.clone(), target.clone(), @@ -147,7 +150,8 @@ impl ExtFetchCommand { .with_repo_url(config.get_sdk_repo_url()) .with_repo_release(config.get_sdk_repo_release()) .with_container_args(effective_container_args) - .with_sdk_arch(self.sdk_arch.clone()); + .with_sdk_arch(self.sdk_arch.clone()) + .with_src_dir(src_dir); // Fetch each extension let mut fetched_count = 0; @@ -183,11 +187,9 @@ impl ExtFetchCommand { fetched_count += 1; } Err(e) => { - print_warning( - &format!("Failed to fetch extension '{ext_name}': {e}"), - OutputLevel::Normal, - ); - // Continue with other extensions instead of failing entirely + return Err(anyhow::anyhow!( + "Failed to fetch extension '{ext_name}': {e}" + )); } } } diff --git a/src/utils/config.rs b/src/utils/config.rs index bddf3f9..dce8dc0 100644 --- a/src/utils/config.rs +++ b/src/utils/config.rs @@ -747,14 +747,94 @@ impl Config { ); } + // Load extension path state for path-based extensions + let ext_path_state = crate::utils::ext_fetch::ExtensionPathState::load_from_dir(&src_dir) + .ok() + .flatten(); + // For each remote extension, try to read its config for (ext_name, source) in remote_extensions { // Try multiple methods to read the extension config: + // 0. Path-based extension: read directly from source path (for source: { type: path }) // 1. Direct container path (when running inside a container) // 2. Via container command (when running on host) // 3. Local fallback path (for development) let ext_content = { + // Method 0: Check if this is a path-based extension (source: { type: path }) + // For path-based extensions, read from the registered source path on the host + if let Some(ref state) = ext_path_state { + if let Some(source_path) = state.path_mounts.get(&ext_name) { + let config_path_yaml = source_path.join("avocado.yaml"); + let config_path_yml = source_path.join("avocado.yml"); + + if verbose { + eprintln!( + "[DEBUG] Extension '{}' is path-based, checking: {}", + ext_name, + config_path_yaml.display() + ); + } + + if config_path_yaml.exists() { + match fs::read_to_string(&config_path_yaml) { + Ok(content) => { + if verbose { + eprintln!( + "[DEBUG] Read {} bytes from path-based source", + content.len() + ); + } + content + } + Err(e) => { + if verbose { + eprintln!("[DEBUG] Failed to read: {e}"); + } + continue; + } + } + } else if config_path_yml.exists() { + match fs::read_to_string(&config_path_yml) { + Ok(content) => { + if verbose { + eprintln!( + "[DEBUG] Read {} bytes from path-based source (.yml)", + content.len() + ); + } + content + } + Err(e) => { + if verbose { + eprintln!("[DEBUG] Failed to read: {e}"); + } + continue; + } + } + } else { + if verbose { + eprintln!( + "[DEBUG] Path-based source path has no avocado.yaml/yml: {}", + source_path.display() + ); + } + continue; + } + } else { + // Not a path-based extension, fall through to other methods + "".to_string() + } + } else { + // No path state, fall through to other methods + "".to_string() + } + }; + + // If we got content from path-based source, skip other methods + let ext_content = if !ext_content.is_empty() { + ext_content + } else { // Method 1: Check if we're inside a container and can read directly // The standard container path is /opt/_avocado//includes//avocado.yaml let container_direct_path = @@ -881,9 +961,21 @@ impl Config { } }; - // Record this extension's source (container path for reference) - let ext_config_path_str = - format!("/opt/_avocado/{resolved_target}/includes/{ext_name}/avocado.yaml"); + // Record this extension's source path + // For path-based extensions, use the actual host path + // For other remote extensions, use the container path + let ext_config_path_str = if let Some(ref state) = ext_path_state { + if let Some(source_path) = state.path_mounts.get(&ext_name) { + source_path + .join("avocado.yaml") + .to_string_lossy() + .to_string() + } else { + format!("/opt/_avocado/{resolved_target}/includes/{ext_name}/avocado.yaml") + } + } else { + format!("/opt/_avocado/{resolved_target}/includes/{ext_name}/avocado.yaml") + }; extension_sources.insert(ext_name.clone(), ext_config_path_str.clone()); // Also record any extensions defined within this remote extension's config @@ -899,7 +991,20 @@ impl Config { } // Get include patterns from the extension source - let include_patterns = source.get_include_patterns(); + // For path-based extensions (type: path), use permissive patterns similar to legacy + // external configs to ensure sdk.compile sections are included + let include_patterns: Vec = match &source { + ExtensionSource::Path { include, .. } => { + if let Some(patterns) = include { + patterns.clone() + } else { + // Default: include all sdk sections for path-based extensions + vec!["sdk.packages.*".to_string(), "sdk.compile.*".to_string()] + } + } + _ => source.get_include_patterns().to_vec(), + }; + let include_patterns = include_patterns.as_slice(); // Find compile dependencies to auto-include from the extension's own section let auto_include_compile = diff --git a/src/utils/container.rs b/src/utils/container.rs index 5aa2be4..dc77928 100644 --- a/src/utils/container.rs +++ b/src/utils/container.rs @@ -146,6 +146,9 @@ pub struct RunConfig { pub nfs_port: Option, /// SDK container architecture for cross-arch emulation (e.g., "aarch64", "x86-64") pub sdk_arch: Option, + /// Extension source paths to mount via bindfs (extension name -> host path) + /// These are mounted at /mnt/ext/ and bindfs'd to $AVOCADO_PREFIX/includes/ + pub ext_path_mounts: Option>, } impl Default for RunConfig { @@ -177,6 +180,7 @@ impl Default for RunConfig { runs_on: None, nfs_port: None, sdk_arch: None, + ext_path_mounts: None, } } } @@ -235,6 +239,44 @@ impl SdkContainer { Ok(Self::new().with_src_dir(src_dir)) } + /// Load extension path mounts from the state file + /// + /// Returns a HashMap of extension name -> host path for extensions that use + /// `source: { type: path }` and were registered via `avocado ext fetch`. + /// + /// These paths should be added to `RunConfig.ext_path_mounts` so they get + /// mounted via bindfs at container runtime. + pub fn load_ext_path_mounts(&self) -> Option> { + use crate::utils::ext_fetch::ExtensionPathState; + + let src_dir = self.src_dir.as_ref().unwrap_or(&self.cwd); + + match ExtensionPathState::load_from_dir(src_dir) { + Ok(Some(state)) if !state.path_mounts.is_empty() => { + if self.verbose { + print_info( + &format!( + "Loaded {} extension path mount(s) from state file", + state.path_mounts.len() + ), + OutputLevel::Normal, + ); + } + Some(state.path_mounts) + } + Ok(_) => None, + Err(e) => { + if self.verbose { + print_info( + &format!("Warning: Failed to load extension path state: {e}"), + OutputLevel::Normal, + ); + } + None + } + } + } + /// Create a shared RunsOnContext for running multiple commands on a remote host /// /// This sets up the NFS server and remote volumes once, which can then be reused @@ -359,9 +401,17 @@ impl SdkContainer { config.container_name.clone() }; - // Create a modified config with the container name + // Auto-populate ext_path_mounts from state file if not already set + let effective_ext_path_mounts = if config.ext_path_mounts.is_some() { + config.ext_path_mounts.clone() + } else { + self.load_ext_path_mounts() + }; + + // Create a modified config with the container name and ext_path_mounts let effective_config = RunConfig { container_name: effective_container_name.clone(), + ext_path_mounts: effective_ext_path_mounts, ..config }; @@ -662,6 +712,30 @@ impl SdkContainer { None }; + // Mount extension source paths for bindfs + // Each extension path is mounted at /mnt/ext/ and will be bindfs'd + // to $AVOCADO_PREFIX/includes/ in the entrypoint script + let mut ext_path_names: Vec = Vec::new(); + if let Some(ref ext_mounts) = config.ext_path_mounts { + for (ext_name, host_path) in ext_mounts { + container_cmd.push("-v".to_string()); + container_cmd.push(format!("{}:/mnt/ext/{}:rw", host_path.display(), ext_name)); + ext_path_names.push(ext_name.clone()); + + if self.verbose { + print_info( + &format!( + "Mounting extension '{}' source: {} -> /mnt/ext/{}", + ext_name, + host_path.display(), + ext_name + ), + OutputLevel::Normal, + ); + } + } + } + // Note: Working directory is handled in the entrypoint script based on sysroot parameters // Add environment variables @@ -703,6 +777,16 @@ impl SdkContainer { container_cmd.push(format!("AVOCADO_SIGNING_KEYS_DIR={keys_dir}")); } + // Add extension path mounts env var for entrypoint script + // This is a space-separated list of extension names that have bindfs mounts + if !ext_path_names.is_empty() { + container_cmd.push("-e".to_string()); + container_cmd.push(format!( + "AVOCADO_EXT_PATH_MOUNTS={}", + ext_path_names.join(" ") + )); + } + for (key, value) in env_vars { container_cmd.push("-e".to_string()); container_cmd.push(format!("{key}={value}")); @@ -786,11 +870,24 @@ impl SdkContainer { let bash_cmd = vec!["bash".to_string(), "-c".to_string(), full_command]; + // Auto-populate ext_path_mounts from state file if not already set + let effective_ext_path_mounts = if config.ext_path_mounts.is_some() { + config.ext_path_mounts.clone() + } else { + self.load_ext_path_mounts() + }; + + // Create effective config with ext_path_mounts + let effective_config = RunConfig { + ext_path_mounts: effective_ext_path_mounts, + ..config + }; + // Build container command with volume state let container_cmd = - self.build_container_command(&config, &bash_cmd, &env_vars, &volume_state)?; + self.build_container_command(&effective_config, &bash_cmd, &env_vars, &volume_state)?; - if config.verbose || self.verbose { + if effective_config.verbose || self.verbose { print_info( &format!( "Mounting source directory: {} -> /mnt/src (bindfs -> /opt/src)", @@ -1340,6 +1437,35 @@ else if [ -n "$AVOCADO_VERBOSE" ]; then echo "[INFO] Mounted /mnt/src -> /opt/src (no UID/GID mapping)"; fi fi +# Mount extension source paths with bindfs (for path-based remote extensions) +# These are mounted at /mnt/ext/ and need to be bindfs'd to $AVOCADO_PREFIX/includes/ +if [ -n "$AVOCADO_EXT_PATH_MOUNTS" ]; then + # AVOCADO_PREFIX must be set before this - use the target from environment + EXT_PREFIX="/opt/_avocado/${{AVOCADO_TARGET}}/includes" + for ext_name in $AVOCADO_EXT_PATH_MOUNTS; do + mnt_path="/mnt/ext/$ext_name" + target_path="$EXT_PREFIX/$ext_name" + + if [ -d "$mnt_path" ]; then + mkdir -p "$target_path" + if [ -n "$AVOCADO_HOST_UID" ] && [ -n "$AVOCADO_HOST_GID" ]; then + if [ "$AVOCADO_HOST_UID" = "0" ] && [ "$AVOCADO_HOST_GID" = "0" ]; then + mount --bind "$mnt_path" "$target_path" + if [ -n "$AVOCADO_VERBOSE" ]; then echo "[INFO] Mounted extension '$ext_name': $mnt_path -> $target_path (host is root)"; fi + else + bindfs --map=$AVOCADO_HOST_UID/0:@$AVOCADO_HOST_GID/@0 "$mnt_path" "$target_path" + if [ -n "$AVOCADO_VERBOSE" ]; then echo "[INFO] Mounted extension '$ext_name': $mnt_path -> $target_path with UID/GID mapping"; fi + fi + else + mount --bind "$mnt_path" "$target_path" + if [ -n "$AVOCADO_VERBOSE" ]; then echo "[INFO] Mounted extension '$ext_name': $mnt_path -> $target_path (no UID/GID mapping)"; fi + fi + else + echo "[WARNING] Extension mount path not found: $mnt_path" + fi + done +fi + # Get repo url from environment or default to prod if [ -n "$AVOCADO_SDK_REPO_URL" ]; then REPO_URL="$AVOCADO_SDK_REPO_URL" @@ -1532,6 +1658,35 @@ else if [ -n "$AVOCADO_VERBOSE" ]; then echo "[INFO] Mounted /mnt/src -> /opt/src (no UID/GID mapping)"; fi fi +# Mount extension source paths with bindfs (for path-based remote extensions) +# These are mounted at /mnt/ext/ and need to be bindfs'd to $AVOCADO_PREFIX/includes/ +if [ -n "$AVOCADO_EXT_PATH_MOUNTS" ]; then + # AVOCADO_PREFIX must be set before this - use the target from environment + EXT_PREFIX="/opt/_avocado/${{AVOCADO_TARGET}}/includes" + for ext_name in $AVOCADO_EXT_PATH_MOUNTS; do + mnt_path="/mnt/ext/$ext_name" + target_path="$EXT_PREFIX/$ext_name" + + if [ -d "$mnt_path" ]; then + mkdir -p "$target_path" + if [ -n "$AVOCADO_HOST_UID" ] && [ -n "$AVOCADO_HOST_GID" ]; then + if [ "$AVOCADO_HOST_UID" = "0" ] && [ "$AVOCADO_HOST_GID" = "0" ]; then + mount --bind "$mnt_path" "$target_path" + if [ -n "$AVOCADO_VERBOSE" ]; then echo "[INFO] Mounted extension '$ext_name': $mnt_path -> $target_path (host is root)"; fi + else + bindfs --map=$AVOCADO_HOST_UID/0:@$AVOCADO_HOST_GID/@0 "$mnt_path" "$target_path" + if [ -n "$AVOCADO_VERBOSE" ]; then echo "[INFO] Mounted extension '$ext_name': $mnt_path -> $target_path with UID/GID mapping"; fi + fi + else + mount --bind "$mnt_path" "$target_path" + if [ -n "$AVOCADO_VERBOSE" ]; then echo "[INFO] Mounted extension '$ext_name': $mnt_path -> $target_path (no UID/GID mapping)"; fi + fi + else + echo "[WARNING] Extension mount path not found: $mnt_path" + fi + done +fi + # Get repo url from environment or default to prod if [ -n "$AVOCADO_SDK_REPO_URL" ]; then REPO_URL="$AVOCADO_SDK_REPO_URL" @@ -1907,6 +2062,7 @@ mod tests { runs_on: None, nfs_port: None, sdk_arch: None, + ext_path_mounts: None, }; let result = container.build_container_command(&config, &command, &env_vars, &volume_state); diff --git a/src/utils/ext_fetch.rs b/src/utils/ext_fetch.rs index 2b2f6cb..fbc85b7 100644 --- a/src/utils/ext_fetch.rs +++ b/src/utils/ext_fetch.rs @@ -3,15 +3,93 @@ //! This module provides functionality to fetch extensions from various sources: //! - Package repository (avocado extension repo) //! - Git repositories (with optional sparse checkout) -//! - Local filesystem paths +//! - Local filesystem paths (mounted via bindfs at runtime) -use anyhow::Result; +use anyhow::{Context, Result}; +use serde::{Deserialize, Serialize}; +use std::collections::HashMap; +use std::fs; use std::path::{Path, PathBuf}; use crate::utils::config::ExtensionSource; use crate::utils::container::{RunConfig, SdkContainer}; use crate::utils::output::{print_info, OutputLevel}; +/// State for extension path mounts stored in .avocado/ext-paths.json +#[derive(Debug, Clone, Default, Deserialize, Serialize)] +pub struct ExtensionPathState { + /// Map of extension name to host path for bindfs mounting + pub path_mounts: HashMap, +} + +impl ExtensionPathState { + /// Load extension path state from .avocado/ext-paths.json in the given directory + pub fn load_from_dir(dir_path: &Path) -> Result> { + let state_file = dir_path.join(".avocado").join("ext-paths.json"); + + if !state_file.exists() { + return Ok(None); + } + + let content = fs::read_to_string(&state_file).with_context(|| { + format!( + "Failed to read extension path state file: {}", + state_file.display() + ) + })?; + + let state: Self = serde_json::from_str(&content).with_context(|| { + format!( + "Failed to parse extension path state file: {}", + state_file.display() + ) + })?; + + Ok(Some(state)) + } + + /// Save extension path state to .avocado/ext-paths.json in the given directory + pub fn save_to_dir(&self, dir_path: &Path) -> Result<()> { + let state_dir = dir_path.join(".avocado"); + fs::create_dir_all(&state_dir).with_context(|| { + format!( + "Failed to create .avocado directory: {}", + state_dir.display() + ) + })?; + + let state_file = state_dir.join("ext-paths.json"); + let content = serde_json::to_string_pretty(self) + .with_context(|| "Failed to serialize extension path state".to_string())?; + + fs::write(&state_file, content).with_context(|| { + format!( + "Failed to write extension path state file: {}", + state_file.display() + ) + })?; + + Ok(()) + } + + /// Add a path mount for an extension + pub fn add_path_mount(&mut self, ext_name: String, host_path: PathBuf) { + self.path_mounts.insert(ext_name, host_path); + } + + /// Remove a path mount for an extension + #[allow(dead_code)] + pub fn remove_path_mount(&mut self, ext_name: &str) { + self.path_mounts.remove(ext_name); + } + + /// Get the path mount for an extension + #[allow(dead_code)] + pub fn get_path_mount(&self, ext_name: &str) -> Option<&PathBuf> { + self.path_mounts.get(ext_name) + } +} + /// Extension fetcher for downloading and installing remote extensions pub struct ExtensionFetcher { /// Path to the main configuration file @@ -30,6 +108,8 @@ pub struct ExtensionFetcher { container_args: Option>, /// SDK container architecture for cross-arch emulation sdk_arch: Option, + /// Source directory for resolving relative extension paths + src_dir: Option, } impl ExtensionFetcher { @@ -49,6 +129,7 @@ impl ExtensionFetcher { repo_release: None, container_args: None, sdk_arch: None, + src_dir: None, } } @@ -76,6 +157,12 @@ impl ExtensionFetcher { self } + /// Set source directory for resolving relative extension paths + pub fn with_src_dir(mut self, src_dir: Option) -> Self { + self.src_dir = src_dir; + self + } + /// Fetch an extension based on its source configuration /// /// Returns the path where the extension was installed @@ -318,76 +405,99 @@ echo "Successfully fetched extension '{ext_name}' from git" } /// Fetch an extension from a local filesystem path + /// + /// Instead of copying files, this validates the path exists and stores the + /// mapping for bindfs mounting at container runtime. The extension source + /// will be mounted at `/mnt/ext/` and bindfs'd to + /// `$AVOCADO_PREFIX/includes/`. async fn fetch_from_path( &self, ext_name: &str, source_path: &str, - _install_path: &Path, // Host path - not used, we use container path instead + _install_path: &Path, // Host path - not used, we use bindfs mounting instead ) -> Result<()> { if self.verbose { print_info( - &format!("Fetching extension '{ext_name}' from path: {source_path}"), + &format!("Registering extension '{ext_name}' from path: {source_path}"), OutputLevel::Normal, ); } - // Resolve the source path relative to the config file - let config_dir = Path::new(&self.config_path) - .parent() - .unwrap_or(Path::new(".")); + // Resolve the source path relative to src_dir (or config dir if src_dir not set) let resolved_source = if Path::new(source_path).is_absolute() { PathBuf::from(source_path) } else { - config_dir.join(source_path) + // Use src_dir if available, otherwise fall back to config directory + if let Some(ref src_dir) = self.src_dir { + src_dir.join(source_path) + } else { + let config_dir = Path::new(&self.config_path) + .parent() + .unwrap_or(Path::new(".")); + config_dir.join(source_path) + } }; + // Canonicalize the path to get the absolute path + let resolved_source = resolved_source.canonicalize().unwrap_or(resolved_source); + if !resolved_source.exists() { return Err(anyhow::anyhow!( - "Extension source path does not exist: {}", + "Extension source path does not exist: {}\n\ + Path was resolved relative to: {}", + resolved_source.display(), + self.src_dir + .as_ref() + .map(|p| p.display().to_string()) + .unwrap_or_else(|| "config directory".to_string()) + )); + } + + // Check that the path contains an avocado.yaml or avocado.yml file + let has_config = resolved_source.join("avocado.yaml").exists() + || resolved_source.join("avocado.yml").exists(); + if !has_config { + return Err(anyhow::anyhow!( + "Extension source path does not contain an avocado.yaml or avocado.yml file: {}", resolved_source.display() )); } - // Use container path $AVOCADO_PREFIX/includes/ - let container_install_path = format!("$AVOCADO_PREFIX/includes/{ext_name}"); + // Get the state directory (src_dir or config dir) + let state_dir = self.src_dir.clone().unwrap_or_else(|| { + Path::new(&self.config_path) + .parent() + .unwrap_or(Path::new(".")) + .to_path_buf() + }); - // The source path needs to be accessible from inside the container - // Since the workspace is mounted at $AVOCADO_SRC_DIR, convert the path - let resolved_source_str = resolved_source.to_string_lossy(); + // Load or create extension path state + let mut state = ExtensionPathState::load_from_dir(&state_dir)?.unwrap_or_default(); - // Build copy command to run inside the container - let copy_cmd = format!( - r#" -set -e -rm -rf "{container_install_path}" -mkdir -p "{container_install_path}" -cp -r "{resolved_source_str}/." "{container_install_path}/" -echo "Successfully copied extension '{ext_name}' from {resolved_source_str} to {container_install_path}" -"# - ); + // Add the path mount for this extension + state.add_path_mount(ext_name.to_string(), resolved_source.clone()); - let container_helper = SdkContainer::new().verbose(self.verbose); - let run_config = RunConfig { - container_image: self.container_image.clone(), - target: self.target.clone(), - command: copy_cmd, - verbose: self.verbose, - source_environment: true, - interactive: false, - repo_url: self.repo_url.clone(), - repo_release: self.repo_release.clone(), - container_args: self.container_args.clone(), - sdk_arch: self.sdk_arch.clone(), - ..Default::default() - }; + // Save the state + state.save_to_dir(&state_dir)?; - let success = container_helper.run_in_container(run_config).await?; - if !success { - return Err(anyhow::anyhow!( - "Failed to copy extension '{ext_name}' from path" - )); + if self.verbose { + print_info( + &format!( + "Registered extension '{ext_name}' for bindfs mounting from: {}", + resolved_source.display() + ), + OutputLevel::Normal, + ); } + print_info( + &format!( + "Extension '{ext_name}' will be mounted via bindfs at runtime from: {}", + resolved_source.display() + ), + OutputLevel::Normal, + ); + Ok(()) } From 5f4b0128bdd44df20180ae9744a3c95ca9cfc6e0 Mon Sep 17 00:00:00 2001 From: Justin Schneck Date: Thu, 8 Jan 2026 21:40:20 -0500 Subject: [PATCH 19/23] 0.23.0 release --- Cargo.lock | 2 +- Cargo.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 6d4c0c2..ee97179 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -130,7 +130,7 @@ checksum = "c08606f8c3cbf4ce6ec8e28fb0014a2c086708fe954eaa885384a6165172e7e8" [[package]] name = "avocado-cli" -version = "0.22.1" +version = "0.23.0" dependencies = [ "anyhow", "base64", diff --git a/Cargo.toml b/Cargo.toml index 4b479f8..fbf608a 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "avocado-cli" -version = "0.22.1" +version = "0.23.0" edition = "2021" description = "Command line interface for Avocado." authors = ["Avocado"] From f612ea0dc138f63df23c8d25b5282ee94257f655 Mon Sep 17 00:00:00 2001 From: Justin Schneck Date: Sat, 10 Jan 2026 09:36:35 -0500 Subject: [PATCH 20/23] Add clean to sdk compile sections --- src/commands/ext/clean.rs | 400 +++++++++++++++++++++++++++++++++++- src/commands/sdk/clean.rs | 333 +++++++++++++++++++++++++++++- src/commands/sdk/compile.rs | 2 + src/main.rs | 5 + src/utils/config.rs | 2 + 5 files changed, 729 insertions(+), 13 deletions(-) diff --git a/src/commands/ext/clean.rs b/src/commands/ext/clean.rs index 21c3886..1558933 100644 --- a/src/commands/ext/clean.rs +++ b/src/commands/ext/clean.rs @@ -1,7 +1,7 @@ // Allow deprecated variants for backward compatibility during migration #![allow(deprecated)] -use anyhow::Result; +use anyhow::{Context, Result}; use crate::utils::config::{Config, ExtensionLocation}; use crate::utils::container::{RunConfig, SdkContainer}; @@ -45,17 +45,237 @@ impl ExtCleanCommand { } pub async fn execute(&self) -> Result<()> { - let config = Config::load(&self.config_path)?; - let content = std::fs::read_to_string(&self.config_path)?; - let _parsed: serde_yaml::Value = serde_yaml::from_str(&content)?; + // Load composed configuration (includes remote extension configs with compile sections) + let composed = Config::load_composed(&self.config_path, self.target.as_deref()) + .with_context(|| format!("Failed to load composed config from {}", self.config_path))?; + let config = &composed.config; + let parsed = &composed.merged_value; + + let target = resolve_target_required(self.target.as_deref(), config)?; + let extension_location = self.find_extension_in_dependency_tree(config, &target)?; + let container_image = self.get_container_image(config)?; + + // Get extension configuration from the composed/merged config + let ext_config = self.get_extension_config(config, parsed, &extension_location, &target)?; + + // Determine the extension source path for clean scripts + // For remote extensions, scripts are in $AVOCADO_PREFIX/includes// + // For local extensions, scripts are in /opt/src (the mounted src_dir) + let ext_script_workdir = match &extension_location { + ExtensionLocation::Remote { name, .. } => { + Some(format!("$AVOCADO_PREFIX/includes/{name}")) + } + ExtensionLocation::Local { .. } | ExtensionLocation::External { .. } => None, + }; - let target = resolve_target_required(self.target.as_deref(), &config)?; - let _extension_location = self.find_extension_in_dependency_tree(&config, &target)?; - let container_image = self.get_container_image(&config)?; + // Execute clean scripts for compile dependencies BEFORE cleaning the extension + // This allows clean scripts to access build artifacts if needed + self.execute_compile_clean_scripts( + config, + &ext_config, + &container_image, + &target, + ext_script_workdir.as_deref(), + ) + .await?; self.clean_extension(&container_image, &target).await } + /// Get extension configuration from the composed/merged config + fn get_extension_config( + &self, + config: &Config, + parsed: &serde_yaml::Value, + extension_location: &ExtensionLocation, + target: &str, + ) -> Result { + match extension_location { + ExtensionLocation::Remote { .. } => { + // Use the already-merged config from `parsed` which contains remote extension configs + let ext_section = parsed + .get("extensions") + .and_then(|ext| ext.get(&self.extension)); + if let Some(ext_val) = ext_section { + let base_ext = ext_val.clone(); + // Check for target-specific override within this extension + let target_override = ext_val.get(target).cloned(); + if let Some(override_val) = target_override { + Ok(config.merge_target_override(base_ext, override_val, target)) + } else { + Ok(base_ext) + } + } else { + Ok(serde_yaml::Value::Mapping(serde_yaml::Mapping::new())) + } + } + ExtensionLocation::Local { config_path, .. } => config + .get_merged_ext_config(&self.extension, target, config_path)? + .ok_or_else(|| { + anyhow::anyhow!("Extension '{}' not found in configuration.", self.extension) + }), + #[allow(deprecated)] + ExtensionLocation::External { config_path, .. } => config + .get_merged_ext_config(&self.extension, target, config_path)? + .ok_or_else(|| { + anyhow::anyhow!("Extension '{}' not found in configuration.", self.extension) + }), + } + } + + /// Execute clean scripts for compile dependencies + async fn execute_compile_clean_scripts( + &self, + config: &Config, + ext_config: &serde_yaml::Value, + container_image: &str, + target: &str, + ext_script_workdir: Option<&str>, + ) -> Result<()> { + // Get dependencies from extension configuration + let dependencies = ext_config.get("packages").and_then(|v| v.as_mapping()); + + let Some(deps_table) = dependencies else { + return Ok(()); + }; + + // Find compile dependencies that may have clean scripts + let mut compile_sections_to_clean = Vec::new(); + + for (dep_name_val, dep_spec) in deps_table { + if let Some(dep_name) = dep_name_val.as_str() { + if let serde_yaml::Value::Mapping(spec_map) = dep_spec { + // Check for compile dependency: { compile = "section-name", ... } + if let Some(serde_yaml::Value::String(compile_section)) = + spec_map.get("compile") + { + compile_sections_to_clean + .push((dep_name.to_string(), compile_section.clone())); + } + } + } + } + + if compile_sections_to_clean.is_empty() { + return Ok(()); + } + + // Get clean scripts from SDK compile sections + let clean_scripts = self.get_clean_scripts_for_sections(config, &compile_sections_to_clean); + + if clean_scripts.is_empty() { + if self.verbose { + print_info( + "No clean scripts defined for compile dependencies", + OutputLevel::Normal, + ); + } + return Ok(()); + } + + print_info( + &format!( + "Executing {} clean script(s) for compile dependencies", + clean_scripts.len() + ), + OutputLevel::Normal, + ); + + // Get SDK configuration for container setup + let repo_url = config.get_sdk_repo_url(); + let repo_release = config.get_sdk_repo_release(); + let merged_container_args = config.merge_sdk_container_args(self.container_args.as_ref()); + + // Initialize SDK container helper + let container_helper = SdkContainer::from_config(&self.config_path, config)?; + + // Execute each clean script + for (section_name, clean_script) in clean_scripts { + print_info( + &format!( + "Running clean script for compile section '{section_name}': {clean_script}" + ), + OutputLevel::Normal, + ); + + // Build clean command with optional workdir prefix + // For remote extensions, scripts are in $AVOCADO_PREFIX/includes// instead of /opt/src + let clean_command = if let Some(workdir) = ext_script_workdir { + format!( + r#"cd "{workdir}" && if [ -f '{clean_script}' ]; then echo 'Running clean script: {clean_script}'; AVOCADO_SDK_PREFIX=$AVOCADO_SDK_PREFIX bash '{clean_script}'; else echo 'Clean script {clean_script} not found, skipping.'; fi"# + ) + } else { + format!( + r#"if [ -f '{clean_script}' ]; then echo 'Running clean script: {clean_script}'; AVOCADO_SDK_PREFIX=$AVOCADO_SDK_PREFIX bash '{clean_script}'; else echo 'Clean script {clean_script} not found, skipping.'; fi"# + ) + }; + + if self.verbose { + print_info( + &format!("Running command: {clean_command}"), + OutputLevel::Normal, + ); + } + + let run_config = RunConfig { + container_image: container_image.to_string(), + target: target.to_string(), + command: clean_command, + verbose: self.verbose, + source_environment: true, + interactive: false, + repo_url: repo_url.clone(), + repo_release: repo_release.clone(), + container_args: merged_container_args.clone(), + dnf_args: self.dnf_args.clone(), + sdk_arch: self.sdk_arch.clone(), + ..Default::default() + }; + + let success = container_helper.run_in_container(run_config).await?; + + if success { + print_success( + &format!("Completed clean script for section '{section_name}'."), + OutputLevel::Normal, + ); + } else { + print_error( + &format!("Failed to run clean script for section '{section_name}'."), + OutputLevel::Normal, + ); + return Err(anyhow::anyhow!( + "Clean script failed for section '{section_name}'" + )); + } + } + + Ok(()) + } + + /// Get clean scripts for the specified compile sections + fn get_clean_scripts_for_sections( + &self, + config: &Config, + compile_sections: &[(String, String)], + ) -> Vec<(String, String)> { + let mut clean_scripts = Vec::new(); + + if let Some(sdk) = &config.sdk { + if let Some(compile) = &sdk.compile { + for (_dep_name, section_name) in compile_sections { + if let Some(section_config) = compile.get(section_name) { + if let Some(clean_script) = §ion_config.clean { + clean_scripts.push((section_name.clone(), clean_script.clone())); + } + } + } + } + } + + clean_scripts + } + fn find_extension_in_dependency_tree( &self, config: &Config, @@ -312,4 +532,170 @@ mod tests { ); assert!(script.contains(".stamps/ext"), "Should clean stamps"); } + + #[test] + fn test_get_clean_scripts_for_sections_with_clean_script() { + use std::io::Write; + use tempfile::NamedTempFile; + + let config_content = r#" +sdk: + image: "test-image" + compile: + my-library: + compile: "build.sh" + clean: "clean.sh" + packages: + gcc: "*" + other-library: + compile: "build-other.sh" + packages: + make: "*" +"#; + let mut temp_file = NamedTempFile::new().unwrap(); + write!(temp_file, "{config_content}").unwrap(); + let config = Config::load(temp_file.path()).unwrap(); + + let cmd = ExtCleanCommand::new( + "test-ext".to_string(), + temp_file.path().to_string_lossy().to_string(), + false, + None, + None, + None, + ); + + // Test with compile sections - one has clean script, one doesn't + let compile_sections = vec![ + ("dep1".to_string(), "my-library".to_string()), + ("dep2".to_string(), "other-library".to_string()), + ]; + + let clean_scripts = cmd.get_clean_scripts_for_sections(&config, &compile_sections); + + // Only my-library has a clean script + assert_eq!(clean_scripts.len(), 1); + assert_eq!(clean_scripts[0].0, "my-library"); + assert_eq!(clean_scripts[0].1, "clean.sh"); + } + + #[test] + fn test_get_clean_scripts_for_sections_no_clean_scripts() { + use std::io::Write; + use tempfile::NamedTempFile; + + let config_content = r#" +sdk: + image: "test-image" + compile: + my-library: + compile: "build.sh" + packages: + gcc: "*" +"#; + let mut temp_file = NamedTempFile::new().unwrap(); + write!(temp_file, "{config_content}").unwrap(); + let config = Config::load(temp_file.path()).unwrap(); + + let cmd = ExtCleanCommand::new( + "test-ext".to_string(), + temp_file.path().to_string_lossy().to_string(), + false, + None, + None, + None, + ); + + let compile_sections = vec![("dep1".to_string(), "my-library".to_string())]; + + let clean_scripts = cmd.get_clean_scripts_for_sections(&config, &compile_sections); + + // No clean script defined + assert!(clean_scripts.is_empty()); + } + + #[test] + fn test_get_clean_scripts_for_nonexistent_section() { + use std::io::Write; + use tempfile::NamedTempFile; + + let config_content = r#" +sdk: + image: "test-image" + compile: + my-library: + compile: "build.sh" + clean: "clean.sh" +"#; + let mut temp_file = NamedTempFile::new().unwrap(); + write!(temp_file, "{config_content}").unwrap(); + let config = Config::load(temp_file.path()).unwrap(); + + let cmd = ExtCleanCommand::new( + "test-ext".to_string(), + temp_file.path().to_string_lossy().to_string(), + false, + None, + None, + None, + ); + + // Reference a section that doesn't exist + let compile_sections = vec![("dep1".to_string(), "nonexistent-library".to_string())]; + + let clean_scripts = cmd.get_clean_scripts_for_sections(&config, &compile_sections); + + // No clean script found for nonexistent section + assert!(clean_scripts.is_empty()); + } + + #[test] + fn test_get_clean_scripts_multiple_sections_with_clean() { + use std::io::Write; + use tempfile::NamedTempFile; + + let config_content = r#" +sdk: + image: "test-image" + compile: + lib-a: + compile: "build-a.sh" + clean: "clean-a.sh" + lib-b: + compile: "build-b.sh" + clean: "clean-b.sh" + lib-c: + compile: "build-c.sh" +"#; + let mut temp_file = NamedTempFile::new().unwrap(); + write!(temp_file, "{config_content}").unwrap(); + let config = Config::load(temp_file.path()).unwrap(); + + let cmd = ExtCleanCommand::new( + "test-ext".to_string(), + temp_file.path().to_string_lossy().to_string(), + false, + None, + None, + None, + ); + + let compile_sections = vec![ + ("dep-a".to_string(), "lib-a".to_string()), + ("dep-b".to_string(), "lib-b".to_string()), + ("dep-c".to_string(), "lib-c".to_string()), + ]; + + let clean_scripts = cmd.get_clean_scripts_for_sections(&config, &compile_sections); + + // lib-a and lib-b have clean scripts, lib-c doesn't + assert_eq!(clean_scripts.len(), 2); + + let section_names: Vec<&str> = clean_scripts + .iter() + .map(|(name, _)| name.as_str()) + .collect(); + assert!(section_names.contains(&"lib-a")); + assert!(section_names.contains(&"lib-b")); + } } diff --git a/src/commands/sdk/clean.rs b/src/commands/sdk/clean.rs index 02769ee..66487e4 100644 --- a/src/commands/sdk/clean.rs +++ b/src/commands/sdk/clean.rs @@ -9,12 +9,24 @@ use crate::utils::{ target::resolve_target_required, }; +/// Context for running clean operations in containers +struct CleanContext<'a> { + container_helper: &'a SdkContainer, + container_image: &'a str, + target: &'a str, + repo_url: Option, + repo_release: Option, + merged_container_args: Option>, +} + /// Implementation of the 'sdk clean' command. pub struct SdkCleanCommand { /// Path to configuration file pub config_path: String, /// Enable verbose output pub verbose: bool, + /// Specific compile sections to clean + pub sections: Vec, /// Global target architecture pub target: Option, /// Additional arguments to pass to the container runtime @@ -30,6 +42,7 @@ impl SdkCleanCommand { pub fn new( config_path: String, verbose: bool, + sections: Vec, target: Option, container_args: Option>, dnf_args: Option>, @@ -37,6 +50,7 @@ impl SdkCleanCommand { Self { config_path, verbose, + sections, target, container_args, dnf_args, @@ -52,9 +66,10 @@ impl SdkCleanCommand { /// Execute the sdk clean command pub async fn execute(&self) -> Result<()> { - // Load the configuration - let config = Config::load(&self.config_path) + // Load composed configuration to get sdk.compile sections + let composed = Config::load_composed(&self.config_path, self.target.as_deref()) .with_context(|| format!("Failed to load config from {}", self.config_path))?; + let config = &composed.config; // Merge container args from config with CLI args let merged_container_args = config.merge_sdk_container_args(self.container_args.as_ref()); @@ -69,12 +84,26 @@ impl SdkCleanCommand { let repo_release = config.get_sdk_repo_release(); // Resolve target with proper precedence - let target = resolve_target_required(self.target.as_deref(), &config)?; + let target = resolve_target_required(self.target.as_deref(), config)?; // Create container helper - let container_helper = SdkContainer::new().verbose(self.verbose); + let container_helper = + SdkContainer::from_config(&self.config_path, config)?.verbose(self.verbose); + + // If sections are specified, run clean scripts for those sections + if !self.sections.is_empty() { + let ctx = CleanContext { + container_helper: &container_helper, + container_image, + target: &target, + repo_url: repo_url.clone(), + repo_release: repo_release.clone(), + merged_container_args: merged_container_args.clone(), + }; + return self.clean_sections(config, &ctx).await; + } - // Remove the directory using container helper + // Default behavior: Remove the entire SDK directory if self.verbose { print_info( "Removing SDK directory: $AVOCADO_SDK_PREFIX", @@ -108,17 +137,164 @@ impl SdkCleanCommand { Ok(()) } + + /// Clean specific compile sections by running their clean scripts + async fn clean_sections(&self, config: &Config, ctx: &CleanContext<'_>) -> Result<()> { + // Get clean scripts for the requested sections + let clean_scripts = self.get_clean_scripts_for_sections(config)?; + + if clean_scripts.is_empty() { + print_info( + "No clean scripts defined for the specified sections.", + OutputLevel::Normal, + ); + return Ok(()); + } + + let section_list = clean_scripts + .iter() + .map(|(name, _)| name.as_str()) + .collect::>() + .join(", "); + print_info( + &format!( + "Executing clean scripts for {} section(s): {section_list}", + clean_scripts.len() + ), + OutputLevel::Normal, + ); + + let mut overall_success = true; + + for (section_name, clean_script) in &clean_scripts { + print_info( + &format!("Running clean script for section '{section_name}': {clean_script}"), + OutputLevel::Normal, + ); + + // Build clean command - scripts are relative to src_dir (/opt/src in container) + let clean_command = format!( + r#"if [ -f '{clean_script}' ]; then echo 'Running clean script: {clean_script}'; AVOCADO_SDK_PREFIX=$AVOCADO_SDK_PREFIX bash '{clean_script}'; else echo 'Clean script {clean_script} not found, skipping.'; fi"# + ); + + if self.verbose { + print_info( + &format!("Running command: {clean_command}"), + OutputLevel::Normal, + ); + } + + let run_config = RunConfig { + container_image: ctx.container_image.to_string(), + target: ctx.target.to_string(), + command: clean_command, + verbose: self.verbose, + source_environment: true, + interactive: false, + repo_url: ctx.repo_url.clone(), + repo_release: ctx.repo_release.clone(), + container_args: ctx.merged_container_args.clone(), + dnf_args: self.dnf_args.clone(), + sdk_arch: self.sdk_arch.clone(), + ..Default::default() + }; + + let success = ctx.container_helper.run_in_container(run_config).await?; + + if success { + print_success( + &format!("Completed clean script for section '{section_name}'."), + OutputLevel::Normal, + ); + } else { + print_error( + &format!("Failed to run clean script for section '{section_name}'."), + OutputLevel::Normal, + ); + overall_success = false; + } + } + + if overall_success { + print_success( + &format!( + "All {} clean script(s) completed successfully!", + clean_scripts.len() + ), + OutputLevel::Normal, + ); + } + + if !overall_success { + return Err(anyhow::anyhow!("One or more clean scripts failed.")); + } + + Ok(()) + } + + /// Get clean scripts for the specified sections + fn get_clean_scripts_for_sections(&self, config: &Config) -> Result> { + let mut clean_scripts = Vec::new(); + let mut missing_sections = Vec::new(); + let mut sections_without_clean = Vec::new(); + + if let Some(sdk) = &config.sdk { + if let Some(compile) = &sdk.compile { + for section_name in &self.sections { + if let Some(section_config) = compile.get(section_name) { + if let Some(clean_script) = §ion_config.clean { + clean_scripts.push((section_name.clone(), clean_script.clone())); + } else { + sections_without_clean.push(section_name.clone()); + } + } else { + missing_sections.push(section_name.clone()); + } + } + } else { + // No compile sections at all + missing_sections = self.sections.clone(); + } + } else { + // No SDK section at all + missing_sections = self.sections.clone(); + } + + // Report missing sections as errors + if !missing_sections.is_empty() { + return Err(anyhow::anyhow!( + "The following compile sections were not found: {}", + missing_sections.join(", ") + )); + } + + // Report sections without clean scripts as info + if !sections_without_clean.is_empty() && self.verbose { + print_info( + &format!( + "The following sections have no clean script defined: {}", + sections_without_clean.join(", ") + ), + OutputLevel::Normal, + ); + } + + Ok(clean_scripts) + } } #[cfg(test)] mod tests { use super::*; + use std::io::Write; + use tempfile::NamedTempFile; #[test] fn test_new() { let cmd = SdkCleanCommand::new( "config.toml".to_string(), true, + vec!["section1".to_string()], Some("test-target".to_string()), None, None, @@ -126,15 +302,160 @@ mod tests { assert_eq!(cmd.config_path, "config.toml"); assert!(cmd.verbose); + assert_eq!(cmd.sections, vec!["section1"]); assert_eq!(cmd.target, Some("test-target".to_string())); } #[test] fn test_new_minimal() { - let cmd = SdkCleanCommand::new("config.toml".to_string(), false, None, None, None); + let cmd = SdkCleanCommand::new("config.toml".to_string(), false, vec![], None, None, None); assert_eq!(cmd.config_path, "config.toml"); assert!(!cmd.verbose); + assert!(cmd.sections.is_empty()); assert_eq!(cmd.target, None); } + + #[test] + fn test_get_clean_scripts_for_sections_with_clean_script() { + let config_content = r#" +sdk: + image: "test-image" + compile: + my-library: + compile: "build.sh" + clean: "clean.sh" + packages: + gcc: "*" + other-library: + compile: "build-other.sh" + packages: + make: "*" +"#; + let mut temp_file = NamedTempFile::new().unwrap(); + write!(temp_file, "{config_content}").unwrap(); + let config = Config::load(temp_file.path()).unwrap(); + + let cmd = SdkCleanCommand::new( + temp_file.path().to_string_lossy().to_string(), + false, + vec!["my-library".to_string()], + None, + None, + None, + ); + + let clean_scripts = cmd.get_clean_scripts_for_sections(&config).unwrap(); + + assert_eq!(clean_scripts.len(), 1); + assert_eq!(clean_scripts[0].0, "my-library"); + assert_eq!(clean_scripts[0].1, "clean.sh"); + } + + #[test] + fn test_get_clean_scripts_for_sections_no_clean_script() { + let config_content = r#" +sdk: + image: "test-image" + compile: + my-library: + compile: "build.sh" + packages: + gcc: "*" +"#; + let mut temp_file = NamedTempFile::new().unwrap(); + write!(temp_file, "{config_content}").unwrap(); + let config = Config::load(temp_file.path()).unwrap(); + + let cmd = SdkCleanCommand::new( + temp_file.path().to_string_lossy().to_string(), + false, + vec!["my-library".to_string()], + None, + None, + None, + ); + + let clean_scripts = cmd.get_clean_scripts_for_sections(&config).unwrap(); + + // Section exists but has no clean script + assert!(clean_scripts.is_empty()); + } + + #[test] + fn test_get_clean_scripts_for_nonexistent_section() { + let config_content = r#" +sdk: + image: "test-image" + compile: + my-library: + compile: "build.sh" + clean: "clean.sh" +"#; + let mut temp_file = NamedTempFile::new().unwrap(); + write!(temp_file, "{config_content}").unwrap(); + let config = Config::load(temp_file.path()).unwrap(); + + let cmd = SdkCleanCommand::new( + temp_file.path().to_string_lossy().to_string(), + false, + vec!["nonexistent-library".to_string()], + None, + None, + None, + ); + + // Should return an error for nonexistent section + let result = cmd.get_clean_scripts_for_sections(&config); + assert!(result.is_err()); + assert!(result + .unwrap_err() + .to_string() + .contains("nonexistent-library")); + } + + #[test] + fn test_get_clean_scripts_multiple_sections() { + let config_content = r#" +sdk: + image: "test-image" + compile: + lib-a: + compile: "build-a.sh" + clean: "clean-a.sh" + lib-b: + compile: "build-b.sh" + clean: "clean-b.sh" + lib-c: + compile: "build-c.sh" +"#; + let mut temp_file = NamedTempFile::new().unwrap(); + write!(temp_file, "{config_content}").unwrap(); + let config = Config::load(temp_file.path()).unwrap(); + + let cmd = SdkCleanCommand::new( + temp_file.path().to_string_lossy().to_string(), + false, + vec![ + "lib-a".to_string(), + "lib-b".to_string(), + "lib-c".to_string(), + ], + None, + None, + None, + ); + + let clean_scripts = cmd.get_clean_scripts_for_sections(&config).unwrap(); + + // lib-a and lib-b have clean scripts, lib-c doesn't + assert_eq!(clean_scripts.len(), 2); + + let section_names: Vec<&str> = clean_scripts + .iter() + .map(|(name, _)| name.as_str()) + .collect(); + assert!(section_names.contains(&"lib-a")); + assert!(section_names.contains(&"lib-b")); + } } diff --git a/src/commands/sdk/compile.rs b/src/commands/sdk/compile.rs index 5b8654b..9e40760 100644 --- a/src/commands/sdk/compile.rs +++ b/src/commands/sdk/compile.rs @@ -439,6 +439,7 @@ dependencies = { gcc = "*" } let section_config = crate::utils::config::CompileConfig { compile: Some("my_script.sh".to_string()), + clean: None, packages: Some(deps), }; @@ -448,6 +449,7 @@ dependencies = { gcc = "*" } // Test section with no compile script let section_config_no_script = crate::utils::config::CompileConfig { compile: None, + clean: None, packages: None, }; diff --git a/src/main.rs b/src/main.rs index 21c2025..19ca8a6 100644 --- a/src/main.rs +++ b/src/main.rs @@ -493,6 +493,7 @@ enum SdkCommands { dnf_args: Option>, }, /// Remove the SDK directory + /// Clean the SDK or run clean scripts for specific compile sections Clean { /// Path to avocado.yaml configuration file #[arg(short = 'C', long, default_value = "avocado.yaml")] @@ -503,6 +504,8 @@ enum SdkCommands { /// Target architecture #[arg(short, long)] target: Option, + /// Specific compile sections to clean (runs their clean scripts) + sections: Vec, /// Additional arguments to pass to the container runtime #[arg(long = "container-arg", num_args = 1, allow_hyphen_values = true, action = clap::ArgAction::Append)] container_args: Option>, @@ -1514,12 +1517,14 @@ async fn main() -> Result<()> { config, verbose, target, + sections, container_args, dnf_args, } => { let clean_cmd = SdkCleanCommand::new( config, verbose, + sections, target.or(cli.target), container_args, dnf_args, diff --git a/src/utils/config.rs b/src/utils/config.rs index dce8dc0..ef2e7c9 100644 --- a/src/utils/config.rs +++ b/src/utils/config.rs @@ -387,6 +387,8 @@ pub struct SdkConfig { #[derive(Debug, Clone, Deserialize, Serialize)] pub struct CompileConfig { pub compile: Option, + /// Path to clean script relative to src_dir, executed during `ext clean` + pub clean: Option, #[serde(alias = "dependencies")] pub packages: Option>, } From 7dacc1af14bac855f43a540fc94948d6705e9a9f Mon Sep 17 00:00:00 2001 From: Justin Schneck Date: Sat, 10 Jan 2026 09:56:00 -0500 Subject: [PATCH 21/23] require the sdk to be installed to call sdk clean SECTION ext clean script. --- src/commands/ext/clean.rs | 49 ++++++++++++++++++++++++++++++++------- src/commands/sdk/clean.rs | 31 +++++++++++++++++++++++++ 2 files changed, 72 insertions(+), 8 deletions(-) diff --git a/src/commands/ext/clean.rs b/src/commands/ext/clean.rs index 1558933..62898f8 100644 --- a/src/commands/ext/clean.rs +++ b/src/commands/ext/clean.rs @@ -6,6 +6,9 @@ use anyhow::{Context, Result}; use crate::utils::config::{Config, ExtensionLocation}; use crate::utils::container::{RunConfig, SdkContainer}; use crate::utils::output::{print_error, print_info, print_success, OutputLevel}; +use crate::utils::stamps::{ + generate_batch_read_stamps_script, validate_stamps_batch, StampRequirement, +}; use crate::utils::target::resolve_target_required; pub struct ExtCleanCommand { @@ -173,14 +176,6 @@ impl ExtCleanCommand { return Ok(()); } - print_info( - &format!( - "Executing {} clean script(s) for compile dependencies", - clean_scripts.len() - ), - OutputLevel::Normal, - ); - // Get SDK configuration for container setup let repo_url = config.get_sdk_repo_url(); let repo_release = config.get_sdk_repo_release(); @@ -189,6 +184,44 @@ impl ExtCleanCommand { // Initialize SDK container helper let container_helper = SdkContainer::from_config(&self.config_path, config)?; + // Validate SDK is installed before running clean scripts + let requirements = vec![StampRequirement::sdk_install()]; + let batch_script = generate_batch_read_stamps_script(&requirements); + let run_config = RunConfig { + container_image: container_image.to_string(), + target: target.to_string(), + command: batch_script, + verbose: false, + source_environment: true, + interactive: false, + repo_url: repo_url.clone(), + repo_release: repo_release.clone(), + container_args: merged_container_args.clone(), + dnf_args: self.dnf_args.clone(), + sdk_arch: self.sdk_arch.clone(), + ..Default::default() + }; + + let output = container_helper + .run_in_container_with_output(run_config) + .await?; + + let validation = + validate_stamps_batch(&requirements, output.as_deref().unwrap_or(""), None); + + if !validation.is_satisfied() { + let error = validation.into_error("Cannot run clean scripts for compile dependencies"); + return Err(error.into()); + } + + print_info( + &format!( + "Executing {} clean script(s) for compile dependencies", + clean_scripts.len() + ), + OutputLevel::Normal, + ); + // Execute each clean script for (section_name, clean_script) in clean_scripts { print_info( diff --git a/src/commands/sdk/clean.rs b/src/commands/sdk/clean.rs index 66487e4..2826e45 100644 --- a/src/commands/sdk/clean.rs +++ b/src/commands/sdk/clean.rs @@ -6,6 +6,7 @@ use crate::utils::{ config::Config, container::{RunConfig, SdkContainer}, output::{print_error, print_info, print_success, OutputLevel}, + stamps::{generate_batch_read_stamps_script, validate_stamps_batch, StampRequirement}, target::resolve_target_required, }; @@ -92,6 +93,36 @@ impl SdkCleanCommand { // If sections are specified, run clean scripts for those sections if !self.sections.is_empty() { + // Validate SDK is installed before running clean scripts + let requirements = vec![StampRequirement::sdk_install()]; + let batch_script = generate_batch_read_stamps_script(&requirements); + let run_config = RunConfig { + container_image: container_image.to_string(), + target: target.clone(), + command: batch_script, + verbose: false, + source_environment: true, + interactive: false, + repo_url: repo_url.clone(), + repo_release: repo_release.clone(), + container_args: merged_container_args.clone(), + dnf_args: self.dnf_args.clone(), + sdk_arch: self.sdk_arch.clone(), + ..Default::default() + }; + + let output = container_helper + .run_in_container_with_output(run_config) + .await?; + + let validation = + validate_stamps_batch(&requirements, output.as_deref().unwrap_or(""), None); + + if !validation.is_satisfied() { + let error = validation.into_error("Cannot run SDK clean scripts"); + return Err(error.into()); + } + let ctx = CleanContext { container_helper: &container_helper, container_image, From 872dd375b3a93f4b2010e2136457e59f8a7dfe29 Mon Sep 17 00:00:00 2001 From: Justin Schneck Date: Sat, 10 Jan 2026 11:55:20 -0500 Subject: [PATCH 22/23] optimize composed configuration loading --- src/commands/build.rs | 90 ++++++++++++++++++++----------- src/commands/ext/build.rs | 24 +++++++-- src/commands/ext/checkout.rs | 28 ++++++++-- src/commands/ext/clean.rs | 25 +++++++-- src/commands/ext/deps.rs | 34 +++++++++--- src/commands/ext/dnf.rs | 24 +++++++-- src/commands/ext/fetch.rs | 27 ++++++++-- src/commands/ext/image.rs | 24 +++++++-- src/commands/ext/install.rs | 24 +++++++-- src/commands/ext/list.rs | 28 +++++++--- src/commands/ext/package.rs | 25 +++++++-- src/commands/fetch.rs | 35 ++++++++---- src/commands/hitl/server.rs | 34 ++++++++++-- src/commands/install.rs | 55 ++++++++++++------- src/commands/provision.rs | 36 ++++++++++--- src/commands/runtime/build.rs | 43 ++++++++++----- src/commands/runtime/clean.rs | 32 ++++++++--- src/commands/runtime/deploy.rs | 31 ++++++++--- src/commands/runtime/deps.rs | 26 ++++++--- src/commands/runtime/dnf.rs | 24 +++++++-- src/commands/runtime/install.rs | 24 +++++++-- src/commands/runtime/list.rs | 27 +++++++--- src/commands/runtime/provision.rs | 43 ++++++++------- src/commands/runtime/sign.rs | 36 +++++++++---- src/commands/sdk/clean.rs | 24 +++++++-- src/commands/sdk/compile.rs | 25 +++++++-- src/commands/sdk/deps.rs | 30 +++++++++-- src/commands/sdk/dnf.rs | 25 +++++++-- src/commands/sdk/install.rs | 31 +++++++---- src/commands/sdk/run.rs | 31 ++++++++--- src/commands/sign.rs | 53 +++++++++++++----- src/commands/unlock.rs | 27 ++++++++-- src/main.rs | 1 + 33 files changed, 802 insertions(+), 244 deletions(-) diff --git a/src/commands/build.rs b/src/commands/build.rs index 17d9f86..1aad208 100644 --- a/src/commands/build.rs +++ b/src/commands/build.rs @@ -2,13 +2,14 @@ use anyhow::{Context, Result}; use std::collections::HashSet; +use std::sync::Arc; use crate::commands::{ ext::{ExtBuildCommand, ExtImageCommand}, runtime::RuntimeBuildCommand, }; use crate::utils::{ - config::{Config, ExtensionSource}, + config::{ComposedConfig, Config, ExtensionSource}, output::{print_info, print_success, OutputLevel}, }; @@ -52,6 +53,8 @@ pub struct BuildCommand { pub nfs_port: Option, /// SDK container architecture for cross-arch emulation pub sdk_arch: Option, + /// Pre-composed configuration to avoid reloading + composed_config: Option>, } impl BuildCommand { @@ -77,6 +80,7 @@ impl BuildCommand { runs_on: None, nfs_port: None, sdk_arch: None, + composed_config: None, } } @@ -99,32 +103,40 @@ impl BuildCommand { self } + /// Set pre-composed configuration to avoid reloading + #[allow(dead_code)] + pub fn with_composed_config(mut self, config: Arc) -> Self { + self.composed_config = Some(config); + self + } + /// Execute the build command pub async fn execute(&self) -> Result<()> { - // Early target validation - load basic config first - let basic_config = Config::load(&self.config_path) - .with_context(|| format!("Failed to load config from {}", self.config_path))?; - let target = - crate::utils::target::validate_and_log_target(self.target.as_deref(), &basic_config)?; - - // Load the composed configuration (merges external configs, applies interpolation) - let composed = Config::load_composed(&self.config_path, self.target.as_deref()) - .with_context(|| format!("Failed to load composed config from {}", self.config_path))?; + // Use provided config or load fresh + let composed = match &self.composed_config { + Some(cc) => Arc::clone(cc), + None => Arc::new( + Config::load_composed(&self.config_path, self.target.as_deref()).with_context( + || format!("Failed to load composed config from {}", self.config_path), + )?, + ), + }; let config = &composed.config; let parsed = &composed.merged_value; + let target = crate::utils::target::validate_and_log_target(self.target.as_deref(), config)?; // If a specific extension is requested, build only that extension if let Some(ref ext_name) = self.extension { return self - .build_single_extension(config, parsed, ext_name, &target) + .build_single_extension(&composed, ext_name, &target) .await; } // If a specific runtime is requested, build only that runtime and its dependencies if let Some(ref runtime_name) = self.runtime { return self - .build_single_runtime(config, parsed, runtime_name, &target) + .build_single_runtime(&composed, runtime_name, &target) .await; } @@ -171,7 +183,8 @@ impl BuildCommand { self.dnf_args.clone(), ) .with_no_stamps(self.no_stamps) - .with_runs_on(self.runs_on.clone(), self.nfs_port); + .with_runs_on(self.runs_on.clone(), self.nfs_port) + .with_composed_config(Arc::clone(&composed)); ext_build_cmd.execute().await.with_context(|| { format!("Failed to build extension '{extension_name}'") })?; @@ -231,7 +244,8 @@ impl BuildCommand { self.dnf_args.clone(), ) .with_no_stamps(self.no_stamps) - .with_runs_on(self.runs_on.clone(), self.nfs_port); + .with_runs_on(self.runs_on.clone(), self.nfs_port) + .with_composed_config(Arc::clone(&composed)); ext_build_cmd.execute().await.with_context(|| { format!("Failed to build remote extension '{name}'") })?; @@ -264,7 +278,8 @@ impl BuildCommand { self.dnf_args.clone(), ) .with_no_stamps(self.no_stamps) - .with_runs_on(self.runs_on.clone(), self.nfs_port); + .with_runs_on(self.runs_on.clone(), self.nfs_port) + .with_composed_config(Arc::clone(&composed)); ext_image_cmd.execute().await.with_context(|| { format!("Failed to create image for extension '{extension_name}'") })?; @@ -313,7 +328,8 @@ impl BuildCommand { self.dnf_args.clone(), ) .with_no_stamps(self.no_stamps) - .with_runs_on(self.runs_on.clone(), self.nfs_port); + .with_runs_on(self.runs_on.clone(), self.nfs_port) + .with_composed_config(Arc::clone(&composed)); ext_image_cmd.execute().await.with_context(|| { format!("Failed to create image for remote extension '{name}'") })?; @@ -351,7 +367,8 @@ impl BuildCommand { self.dnf_args.clone(), ) .with_no_stamps(self.no_stamps) - .with_runs_on(self.runs_on.clone(), self.nfs_port); + .with_runs_on(self.runs_on.clone(), self.nfs_port) + .with_composed_config(Arc::clone(&composed)); runtime_build_cmd .execute() .await @@ -838,11 +855,13 @@ echo "Successfully created image for versioned extension '$EXT_NAME-$EXT_VERSION /// Build a single extension without building runtimes async fn build_single_extension( &self, - config: &Config, - parsed: &serde_yaml::Value, + composed: &Arc, extension_name: &str, target: &str, ) -> Result<()> { + let config = &composed.config; + let parsed = &composed.merged_value; + print_info( &format!("Building single extension '{extension_name}' for target '{target}'"), OutputLevel::Normal, @@ -883,7 +902,8 @@ echo "Successfully created image for versioned extension '$EXT_NAME-$EXT_VERSION self.dnf_args.clone(), ) .with_no_stamps(self.no_stamps) - .with_runs_on(self.runs_on.clone(), self.nfs_port); + .with_runs_on(self.runs_on.clone(), self.nfs_port) + .with_composed_config(Arc::clone(composed)); ext_build_cmd .execute() .await @@ -912,7 +932,8 @@ echo "Successfully created image for versioned extension '$EXT_NAME-$EXT_VERSION self.dnf_args.clone(), ) .with_no_stamps(self.no_stamps) - .with_runs_on(self.runs_on.clone(), self.nfs_port); + .with_runs_on(self.runs_on.clone(), self.nfs_port) + .with_composed_config(Arc::clone(composed)); ext_build_cmd .execute() .await @@ -937,7 +958,8 @@ echo "Successfully created image for versioned extension '$EXT_NAME-$EXT_VERSION self.dnf_args.clone(), ) .with_no_stamps(self.no_stamps) - .with_runs_on(self.runs_on.clone(), self.nfs_port); + .with_runs_on(self.runs_on.clone(), self.nfs_port) + .with_composed_config(Arc::clone(composed)); ext_image_cmd.execute().await.with_context(|| { format!("Failed to create image for extension '{ext_name}'") })?; @@ -971,7 +993,8 @@ echo "Successfully created image for versioned extension '$EXT_NAME-$EXT_VERSION self.dnf_args.clone(), ) .with_no_stamps(self.no_stamps) - .with_runs_on(self.runs_on.clone(), self.nfs_port); + .with_runs_on(self.runs_on.clone(), self.nfs_port) + .with_composed_config(Arc::clone(composed)); ext_image_cmd.execute().await.with_context(|| { format!("Failed to create image for remote extension '{name}'") })?; @@ -988,11 +1011,13 @@ echo "Successfully created image for versioned extension '$EXT_NAME-$EXT_VERSION /// Build a single runtime and its required extensions async fn build_single_runtime( &self, - config: &Config, - parsed: &serde_yaml::Value, + composed: &Arc, runtime_name: &str, target: &str, ) -> Result<()> { + let config = &composed.config; + let parsed = &composed.merged_value; + print_info( &format!("Building single runtime '{runtime_name}' for target '{target}'"), OutputLevel::Normal, @@ -1069,7 +1094,8 @@ echo "Successfully created image for versioned extension '$EXT_NAME-$EXT_VERSION self.dnf_args.clone(), ) .with_no_stamps(self.no_stamps) - .with_runs_on(self.runs_on.clone(), self.nfs_port); + .with_runs_on(self.runs_on.clone(), self.nfs_port) + .with_composed_config(Arc::clone(composed)); ext_build_cmd.execute().await.with_context(|| { format!("Failed to build extension '{extension_name}'") })?; @@ -1084,7 +1110,8 @@ echo "Successfully created image for versioned extension '$EXT_NAME-$EXT_VERSION self.dnf_args.clone(), ) .with_no_stamps(self.no_stamps) - .with_runs_on(self.runs_on.clone(), self.nfs_port); + .with_runs_on(self.runs_on.clone(), self.nfs_port) + .with_composed_config(Arc::clone(composed)); ext_image_cmd.execute().await.with_context(|| { format!("Failed to create image for extension '{extension_name}'") })?; @@ -1148,7 +1175,8 @@ echo "Successfully created image for versioned extension '$EXT_NAME-$EXT_VERSION self.dnf_args.clone(), ) .with_no_stamps(self.no_stamps) - .with_runs_on(self.runs_on.clone(), self.nfs_port); + .with_runs_on(self.runs_on.clone(), self.nfs_port) + .with_composed_config(Arc::clone(composed)); ext_build_cmd.execute().await.with_context(|| { format!("Failed to build remote extension '{name}'") })?; @@ -1163,7 +1191,8 @@ echo "Successfully created image for versioned extension '$EXT_NAME-$EXT_VERSION self.dnf_args.clone(), ) .with_no_stamps(self.no_stamps) - .with_runs_on(self.runs_on.clone(), self.nfs_port); + .with_runs_on(self.runs_on.clone(), self.nfs_port) + .with_composed_config(Arc::clone(composed)); ext_image_cmd.execute().await.with_context(|| { format!("Failed to create image for remote extension '{name}'") })?; @@ -1189,7 +1218,8 @@ echo "Successfully created image for versioned extension '$EXT_NAME-$EXT_VERSION self.dnf_args.clone(), ) .with_no_stamps(self.no_stamps) - .with_runs_on(self.runs_on.clone(), self.nfs_port); + .with_runs_on(self.runs_on.clone(), self.nfs_port) + .with_composed_config(Arc::clone(composed)); runtime_build_cmd .execute() .await diff --git a/src/commands/ext/build.rs b/src/commands/ext/build.rs index 9c4bc0a..fd3324c 100644 --- a/src/commands/ext/build.rs +++ b/src/commands/ext/build.rs @@ -2,9 +2,10 @@ #![allow(deprecated)] use anyhow::{Context, Result}; +use std::sync::Arc; use crate::commands::sdk::SdkCompileCommand; -use crate::utils::config::{Config, ExtensionLocation}; +use crate::utils::config::{ComposedConfig, Config, ExtensionLocation}; use crate::utils::container::{RunConfig, SdkContainer}; use crate::utils::output::{print_error, print_info, print_success, OutputLevel}; use crate::utils::stamps::{ @@ -38,6 +39,8 @@ pub struct ExtBuildCommand { pub runs_on: Option, pub nfs_port: Option, pub sdk_arch: Option, + /// Pre-composed configuration to avoid reloading + composed_config: Option>, } impl ExtBuildCommand { @@ -60,6 +63,7 @@ impl ExtBuildCommand { runs_on: None, nfs_port: None, sdk_arch: None, + composed_config: None, } } @@ -82,10 +86,22 @@ impl ExtBuildCommand { self } + /// Set pre-composed configuration to avoid reloading + pub fn with_composed_config(mut self, config: Arc) -> Self { + self.composed_config = Some(config); + self + } + pub async fn execute(&self) -> Result<()> { - // Load composed configuration (includes remote extension configs with compile sections) - let composed = Config::load_composed(&self.config_path, self.target.as_deref()) - .with_context(|| format!("Failed to load composed config from {}", self.config_path))?; + // Use provided config or load fresh + let composed = match &self.composed_config { + Some(cc) => Arc::clone(cc), + None => Arc::new( + Config::load_composed(&self.config_path, self.target.as_deref()).with_context( + || format!("Failed to load composed config from {}", self.config_path), + )?, + ), + }; let config = &composed.config; let parsed = &composed.merged_value; diff --git a/src/commands/ext/checkout.rs b/src/commands/ext/checkout.rs index 4f79a68..7ad3713 100644 --- a/src/commands/ext/checkout.rs +++ b/src/commands/ext/checkout.rs @@ -1,8 +1,9 @@ use anyhow::{Context, Result}; use std::path::Path; +use std::sync::Arc; use tokio::process::Command as AsyncCommand; -use crate::utils::config::Config; +use crate::utils::config::{ComposedConfig, Config}; use crate::utils::container::{RunConfig, SdkContainer}; use crate::utils::output::{print_error, print_info, print_success, OutputLevel}; use crate::utils::stamps::{ @@ -21,6 +22,8 @@ pub struct ExtCheckoutCommand { target: Option, no_stamps: bool, sdk_arch: Option, + /// Pre-composed configuration to avoid reloading + composed_config: Option>, } impl ExtCheckoutCommand { @@ -43,6 +46,7 @@ impl ExtCheckoutCommand { target, no_stamps: false, sdk_arch: None, + composed_config: None, } } @@ -58,18 +62,34 @@ impl ExtCheckoutCommand { self } + /// Set pre-composed configuration to avoid reloading + #[allow(dead_code)] + pub fn with_composed_config(mut self, config: Arc) -> Self { + self.composed_config = Some(config); + self + } + pub async fn execute(&self) -> Result<()> { let cwd = std::env::current_dir().context("Failed to get current directory")?; + // Use provided config or load fresh + let composed = match &self.composed_config { + Some(cc) => Arc::clone(cc), + None => Arc::new( + Config::load_composed(&self.config_path, self.target.as_deref()) + .context("Failed to load config")?, + ), + }; + let config = &composed.config; + // Validate stamps before proceeding (unless --no-stamps) // Checkout requires extension to be installed if !self.no_stamps { - let config = Config::load(&self.config_path)?; - let target = resolve_target_required(self.target.as_deref(), &config)?; + let target = resolve_target_required(self.target.as_deref(), config)?; if let Some(container_image) = config.get_sdk_image() { let container_helper = - SdkContainer::from_config(&self.config_path, &config)?.verbose(self.verbose); + SdkContainer::from_config(&self.config_path, config)?.verbose(self.verbose); let requirements = vec![ StampRequirement::sdk_install(), diff --git a/src/commands/ext/clean.rs b/src/commands/ext/clean.rs index 62898f8..7a279ca 100644 --- a/src/commands/ext/clean.rs +++ b/src/commands/ext/clean.rs @@ -2,8 +2,9 @@ #![allow(deprecated)] use anyhow::{Context, Result}; +use std::sync::Arc; -use crate::utils::config::{Config, ExtensionLocation}; +use crate::utils::config::{ComposedConfig, Config, ExtensionLocation}; use crate::utils::container::{RunConfig, SdkContainer}; use crate::utils::output::{print_error, print_info, print_success, OutputLevel}; use crate::utils::stamps::{ @@ -19,6 +20,8 @@ pub struct ExtCleanCommand { container_args: Option>, dnf_args: Option>, sdk_arch: Option, + /// Pre-composed configuration to avoid reloading + composed_config: Option>, } impl ExtCleanCommand { @@ -38,6 +41,7 @@ impl ExtCleanCommand { container_args, dnf_args, sdk_arch: None, + composed_config: None, } } @@ -47,10 +51,23 @@ impl ExtCleanCommand { self } + /// Set pre-composed configuration to avoid reloading + #[allow(dead_code)] + pub fn with_composed_config(mut self, config: Arc) -> Self { + self.composed_config = Some(config); + self + } + pub async fn execute(&self) -> Result<()> { - // Load composed configuration (includes remote extension configs with compile sections) - let composed = Config::load_composed(&self.config_path, self.target.as_deref()) - .with_context(|| format!("Failed to load composed config from {}", self.config_path))?; + // Use provided config or load fresh + let composed = match &self.composed_config { + Some(cc) => Arc::clone(cc), + None => Arc::new( + Config::load_composed(&self.config_path, self.target.as_deref()).with_context( + || format!("Failed to load composed config from {}", self.config_path), + )?, + ), + }; let config = &composed.config; let parsed = &composed.merged_value; diff --git a/src/commands/ext/deps.rs b/src/commands/ext/deps.rs index 9398e35..6418f28 100644 --- a/src/commands/ext/deps.rs +++ b/src/commands/ext/deps.rs @@ -3,8 +3,9 @@ use anyhow::Result; use std::collections::HashSet; +use std::sync::Arc; -use crate::utils::config::{Config, ExtensionLocation}; +use crate::utils::config::{ComposedConfig, Config, ExtensionLocation}; use crate::utils::output::{print_error, print_info, OutputLevel}; use crate::utils::target::resolve_target_required; @@ -12,6 +13,8 @@ pub struct ExtDepsCommand { config_path: String, extension: Option, target: Option, + /// Pre-composed configuration to avoid reloading + composed_config: Option>, } impl ExtDepsCommand { @@ -20,18 +23,33 @@ impl ExtDepsCommand { config_path, extension, target, + composed_config: None, } } + /// Set pre-composed configuration to avoid reloading + #[allow(dead_code)] + pub fn with_composed_config(mut self, config: Arc) -> Self { + self.composed_config = Some(config); + self + } + pub fn execute(&self) -> Result<()> { - let config = Config::load(&self.config_path)?; - let content = std::fs::read_to_string(&self.config_path)?; - let parsed: serde_yaml::Value = serde_yaml::from_str(&content)?; + // Use provided config or load fresh + let composed = match &self.composed_config { + Some(cc) => Arc::clone(cc), + None => Arc::new(Config::load_composed( + &self.config_path, + self.target.as_deref(), + )?), + }; + let config = &composed.config; + let parsed = &composed.merged_value; - let target = resolve_target_required(self.target.as_deref(), &config)?; - let extensions_to_process = self.get_extensions_to_process(&config, &parsed, &target)?; + let target = resolve_target_required(self.target.as_deref(), config)?; + let extensions_to_process = self.get_extensions_to_process(config, parsed, &target)?; - self.display_dependencies(&parsed, &extensions_to_process); + self.display_dependencies(parsed, &extensions_to_process); Ok(()) } @@ -339,6 +357,7 @@ sdk: config_path: "test.yaml".to_string(), extension: Some("my-extension".to_string()), target: None, + composed_config: None, }; // Test new syntax with install script @@ -384,6 +403,7 @@ extensions: config_path: "test.yaml".to_string(), extension: Some("test-ext".to_string()), target: None, + composed_config: None, }; // Test version dependency diff --git a/src/commands/ext/dnf.rs b/src/commands/ext/dnf.rs index ff0b7ae..4144492 100644 --- a/src/commands/ext/dnf.rs +++ b/src/commands/ext/dnf.rs @@ -2,8 +2,9 @@ #![allow(deprecated)] use anyhow::{Context, Result}; +use std::sync::Arc; -use crate::utils::config::{Config, ExtensionLocation}; +use crate::utils::config::{ComposedConfig, Config, ExtensionLocation}; use crate::utils::container::{RunConfig, SdkContainer}; use crate::utils::output::{print_error, print_info, print_success, OutputLevel}; use crate::utils::target::resolve_target_required; @@ -17,6 +18,8 @@ pub struct ExtDnfCommand { container_args: Option>, dnf_args: Option>, sdk_arch: Option, + /// Pre-composed configuration to avoid reloading + composed_config: Option>, } impl ExtDnfCommand { @@ -38,6 +41,7 @@ impl ExtDnfCommand { container_args, dnf_args, sdk_arch: None, + composed_config: None, } } @@ -47,10 +51,22 @@ impl ExtDnfCommand { self } + /// Set pre-composed configuration to avoid reloading + #[allow(dead_code)] + pub fn with_composed_config(mut self, config: Arc) -> Self { + self.composed_config = Some(config); + self + } + pub async fn execute(&self) -> Result<()> { - // Load composed configuration (includes remote extension configs) - let composed = Config::load_composed(&self.config_path, self.target.as_deref()) - .context("Failed to load composed config")?; + // Use provided config or load fresh + let composed = match &self.composed_config { + Some(cc) => Arc::clone(cc), + None => Arc::new( + Config::load_composed(&self.config_path, self.target.as_deref()) + .context("Failed to load composed config")?, + ), + }; let config = &composed.config; let merged_container_args = config.merge_sdk_container_args(self.container_args.as_ref()); let parsed = &composed.merged_value; diff --git a/src/commands/ext/fetch.rs b/src/commands/ext/fetch.rs index 64d4941..b71cc91 100644 --- a/src/commands/ext/fetch.rs +++ b/src/commands/ext/fetch.rs @@ -4,8 +4,9 @@ //! and installs them to `$AVOCADO_PREFIX/includes//`. use anyhow::{Context, Result}; +use std::sync::Arc; -use crate::utils::config::{Config, ExtensionSource}; +use crate::utils::config::{ComposedConfig, Config, ExtensionSource}; use crate::utils::ext_fetch::ExtensionFetcher; use crate::utils::output::{print_info, print_success, OutputLevel}; use crate::utils::target::resolve_target_required; @@ -30,6 +31,8 @@ pub struct ExtFetchCommand { pub runs_on: Option, /// NFS port for remote execution pub nfs_port: Option, + /// Pre-composed configuration to avoid reloading + composed_config: Option>, } impl ExtFetchCommand { @@ -52,6 +55,7 @@ impl ExtFetchCommand { sdk_arch: None, runs_on: None, nfs_port: None, + composed_config: None, } } @@ -68,14 +72,27 @@ impl ExtFetchCommand { self } + /// Set pre-composed configuration to avoid reloading + #[allow(dead_code)] + pub fn with_composed_config(mut self, config: Arc) -> Self { + self.composed_config = Some(config); + self + } + /// Execute the fetch command pub async fn execute(&self) -> Result<()> { - // Load configuration - let config = Config::load(&self.config_path) - .with_context(|| format!("Failed to load config from {}", self.config_path))?; + // Use provided config or load fresh + let composed = match &self.composed_config { + Some(cc) => Arc::clone(cc), + None => Arc::new( + Config::load_composed(&self.config_path, self.target.as_deref()) + .with_context(|| format!("Failed to load config from {}", self.config_path))?, + ), + }; + let config = &composed.config; // Resolve target - let target = resolve_target_required(self.target.as_deref(), &config)?; + let target = resolve_target_required(self.target.as_deref(), config)?; // Get container image let container_image = config diff --git a/src/commands/ext/image.rs b/src/commands/ext/image.rs index 88466ba..11bc927 100644 --- a/src/commands/ext/image.rs +++ b/src/commands/ext/image.rs @@ -2,8 +2,9 @@ #![allow(deprecated)] use anyhow::{Context, Result}; +use std::sync::Arc; -use crate::utils::config::{Config, ExtensionLocation}; +use crate::utils::config::{ComposedConfig, Config, ExtensionLocation}; use crate::utils::container::{RunConfig, SdkContainer}; use crate::utils::output::{print_info, print_success, OutputLevel}; use crate::utils::stamps::{ @@ -24,6 +25,8 @@ pub struct ExtImageCommand { runs_on: Option, nfs_port: Option, sdk_arch: Option, + /// Pre-composed configuration to avoid reloading + composed_config: Option>, } impl ExtImageCommand { @@ -46,6 +49,7 @@ impl ExtImageCommand { runs_on: None, nfs_port: None, sdk_arch: None, + composed_config: None, } } @@ -68,10 +72,22 @@ impl ExtImageCommand { self } + /// Set pre-composed configuration to avoid reloading + pub fn with_composed_config(mut self, config: Arc) -> Self { + self.composed_config = Some(config); + self + } + pub async fn execute(&self) -> Result<()> { - // Load composed configuration (includes remote extension configs) - let composed = Config::load_composed(&self.config_path, self.target.as_deref()) - .with_context(|| format!("Failed to load composed config from {}", self.config_path))?; + // Use provided config or load fresh + let composed = match &self.composed_config { + Some(cc) => Arc::clone(cc), + None => Arc::new( + Config::load_composed(&self.config_path, self.target.as_deref()).with_context( + || format!("Failed to load composed config from {}", self.config_path), + )?, + ), + }; let config = &composed.config; let parsed = &composed.merged_value; diff --git a/src/commands/ext/install.rs b/src/commands/ext/install.rs index f1febc6..0729672 100644 --- a/src/commands/ext/install.rs +++ b/src/commands/ext/install.rs @@ -3,8 +3,9 @@ use anyhow::{Context, Result}; use std::path::{Path, PathBuf}; +use std::sync::Arc; -use crate::utils::config::{Config, ExtensionLocation}; +use crate::utils::config::{ComposedConfig, Config, ExtensionLocation}; use crate::utils::container::{RunConfig, SdkContainer}; use crate::utils::lockfile::{build_package_spec_with_lock, LockFile, SysrootType}; use crate::utils::output::{print_debug, print_error, print_info, print_success, OutputLevel}; @@ -26,6 +27,8 @@ pub struct ExtInstallCommand { runs_on: Option, nfs_port: Option, sdk_arch: Option, + /// Pre-composed configuration to avoid reloading + composed_config: Option>, } impl ExtInstallCommand { @@ -50,6 +53,7 @@ impl ExtInstallCommand { runs_on: None, nfs_port: None, sdk_arch: None, + composed_config: None, } } @@ -72,10 +76,22 @@ impl ExtInstallCommand { self } + /// Set pre-composed configuration to avoid reloading + pub fn with_composed_config(mut self, config: Arc) -> Self { + self.composed_config = Some(config); + self + } + pub async fn execute(&self) -> Result<()> { - // Load the composed configuration (merges external configs, applies interpolation) - let composed = Config::load_composed(&self.config_path, self.target.as_deref()) - .with_context(|| format!("Failed to load composed config from {}", self.config_path))?; + // Use provided config or load fresh + let composed = match &self.composed_config { + Some(cc) => Arc::clone(cc), + None => Arc::new( + Config::load_composed(&self.config_path, self.target.as_deref()).with_context( + || format!("Failed to load composed config from {}", self.config_path), + )?, + ), + }; let config = &composed.config; let parsed = &composed.merged_value; diff --git a/src/commands/ext/list.rs b/src/commands/ext/list.rs index fad1b4a..7a3027a 100644 --- a/src/commands/ext/list.rs +++ b/src/commands/ext/list.rs @@ -1,23 +1,39 @@ use anyhow::Result; +use std::sync::Arc; -use crate::utils::config::load_config; +use crate::utils::config::{ComposedConfig, Config}; use crate::utils::output::{print_success, OutputLevel}; pub struct ExtListCommand { config_path: String, + /// Pre-composed configuration to avoid reloading + composed_config: Option>, } impl ExtListCommand { pub fn new(config_path: String) -> Self { - Self { config_path } + Self { + config_path, + composed_config: None, + } + } + + /// Set pre-composed configuration to avoid reloading + #[allow(dead_code)] + pub fn with_composed_config(mut self, config: Arc) -> Self { + self.composed_config = Some(config); + self } pub fn execute(&self) -> Result<()> { - let _config = load_config(&self.config_path)?; - let content = std::fs::read_to_string(&self.config_path)?; - let parsed: serde_yaml::Value = serde_yaml::from_str(&content)?; + // Use provided config or load fresh + let composed = match &self.composed_config { + Some(cc) => Arc::clone(cc), + None => Arc::new(Config::load_composed(&self.config_path, None)?), + }; + let parsed = &composed.merged_value; - let extensions = self.get_extensions(&parsed); + let extensions = self.get_extensions(parsed); self.display_extensions(&extensions); print_success( diff --git a/src/commands/ext/package.rs b/src/commands/ext/package.rs index c7e9d4e..421d905 100644 --- a/src/commands/ext/package.rs +++ b/src/commands/ext/package.rs @@ -2,11 +2,12 @@ #![allow(deprecated)] use anyhow::{Context, Result}; +use std::sync::Arc; use std::fs; use std::path::PathBuf; -use crate::utils::config::{Config, ExtensionLocation}; +use crate::utils::config::{ComposedConfig, Config, ExtensionLocation}; use crate::utils::container::SdkContainer; use crate::utils::output::{print_info, print_success, print_warning, OutputLevel}; // Note: Stamp imports removed - we no longer validate build stamps for packaging @@ -28,6 +29,8 @@ pub struct ExtPackageCommand { #[allow(dead_code)] pub no_stamps: bool, pub sdk_arch: Option, + /// Pre-composed configuration to avoid reloading + composed_config: Option>, } impl ExtPackageCommand { @@ -50,6 +53,7 @@ impl ExtPackageCommand { dnf_args, no_stamps: false, sdk_arch: None, + composed_config: None, } } @@ -65,10 +69,23 @@ impl ExtPackageCommand { self } + /// Set pre-composed configuration to avoid reloading + #[allow(dead_code)] + pub fn with_composed_config(mut self, config: Arc) -> Self { + self.composed_config = Some(config); + self + } + pub async fn execute(&self) -> Result<()> { - // Load composed configuration (includes remote extension configs) - let composed = Config::load_composed(&self.config_path, self.target.as_deref()) - .with_context(|| format!("Failed to load composed config from {}", self.config_path))?; + // Use provided config or load fresh + let composed = match &self.composed_config { + Some(cc) => Arc::clone(cc), + None => Arc::new( + Config::load_composed(&self.config_path, self.target.as_deref()).with_context( + || format!("Failed to load composed config from {}", self.config_path), + )?, + ), + }; let config = &composed.config; let parsed = &composed.merged_value; diff --git a/src/commands/fetch.rs b/src/commands/fetch.rs index 3595e1c..0c61881 100644 --- a/src/commands/fetch.rs +++ b/src/commands/fetch.rs @@ -3,11 +3,12 @@ use anyhow::{Context, Result}; use std::collections::HashSet; +use std::sync::Arc; use tokio::process::Command as AsyncCommand; use crate::commands::install::ExtensionDependency; use crate::utils::{ - config::Config, + config::{ComposedConfig, Config}, container::{RunConfig, SdkContainer}, output::{print_error, print_info, print_success, OutputLevel}, target::resolve_target_required, @@ -33,6 +34,8 @@ pub struct FetchCommand { container_args: Option>, dnf_args: Option>, sdk_arch: Option, + /// Pre-composed configuration to avoid reloading + composed_config: Option>, } impl FetchCommand { @@ -54,6 +57,7 @@ impl FetchCommand { container_args, dnf_args, sdk_arch: None, + composed_config: None, } } @@ -63,14 +67,27 @@ impl FetchCommand { self } + /// Set pre-composed configuration to avoid reloading + #[allow(dead_code)] + pub fn with_composed_config(mut self, config: Arc) -> Self { + self.composed_config = Some(config); + self + } + pub async fn execute(&self) -> Result<()> { - // Load configuration - let config = Config::load(&self.config_path)?; - let content = std::fs::read_to_string(&self.config_path)?; - let config_toml: serde_yaml::Value = serde_yaml::from_str(&content)?; + // Use provided config or load fresh + let composed = match &self.composed_config { + Some(cc) => Arc::clone(cc), + None => Arc::new( + Config::load_composed(&self.config_path, self.target.as_deref()) + .with_context(|| format!("Failed to load config from {}", self.config_path))?, + ), + }; + let config = &composed.config; + let config_toml = &composed.merged_value; // Resolve target architecture - let target_arch = resolve_target_required(self.target.as_deref(), &config)?; + let target_arch = resolve_target_required(self.target.as_deref(), config)?; // Get container configuration from interpolated config let container_image = config @@ -110,7 +127,7 @@ impl FetchCommand { repo_release: repo_release.as_ref(), container_args: &merged_container_args, }; - self.fetch_extension_metadata(&config_toml, extension, &container_config) + self.fetch_extension_metadata(config_toml, extension, &container_config) .await?; } (None, Some(runtime)) => { @@ -123,7 +140,7 @@ impl FetchCommand { repo_release: repo_release.as_ref(), container_args: &merged_container_args, }; - self.fetch_runtime_metadata(&config_toml, runtime, &container_config) + self.fetch_runtime_metadata(config_toml, runtime, &container_config) .await?; } (None, None) => { @@ -136,7 +153,7 @@ impl FetchCommand { repo_release: repo_release.as_ref(), container_args: &merged_container_args, }; - self.fetch_all_metadata(&config_toml, &container_config) + self.fetch_all_metadata(config_toml, &container_config) .await?; } (Some(_), Some(_)) => { diff --git a/src/commands/hitl/server.rs b/src/commands/hitl/server.rs index 95d9c3b..22b2ce0 100644 --- a/src/commands/hitl/server.rs +++ b/src/commands/hitl/server.rs @@ -1,4 +1,4 @@ -use crate::utils::config::Config; +use crate::utils::config::{ComposedConfig, Config}; use crate::utils::container::{is_docker_desktop, RunConfig, SdkContainer}; use crate::utils::nfs_server::{NfsExport, HITL_DEFAULT_PORT}; use crate::utils::output::{print_debug, print_info, OutputLevel}; @@ -6,9 +6,10 @@ use crate::utils::stamps::{ generate_batch_read_stamps_script, validate_stamps_batch, StampRequirement, }; use crate::utils::target::validate_and_log_target; -use anyhow::Result; +use anyhow::{Context, Result}; use clap::Args; use std::path::PathBuf; +use std::sync::Arc; #[derive(Args, Debug)] pub struct HitlServerCommand { @@ -46,15 +47,34 @@ pub struct HitlServerCommand { /// SDK container architecture for cross-arch emulation #[arg(skip)] pub sdk_arch: Option, + + /// Pre-composed configuration to avoid reloading + #[arg(skip)] + pub composed_config: Option>, } impl HitlServerCommand { + /// Set pre-composed configuration to avoid reloading + #[allow(dead_code)] + pub fn with_composed_config(mut self, config: Arc) -> Self { + self.composed_config = Some(config); + self + } + pub async fn execute(&self) -> Result<()> { - let config = Config::load(&self.config_path)?; + // Use provided config or load fresh + let composed = match &self.composed_config { + Some(cc) => Arc::clone(cc), + None => Arc::new( + Config::load_composed(&self.config_path, self.target.as_deref()) + .with_context(|| format!("Failed to load config from {}", self.config_path))?, + ), + }; + let config = &composed.config; let container_helper = SdkContainer::new().verbose(self.verbose); // Use shared target resolution logic with early validation and logging - let target = validate_and_log_target(self.target.as_deref(), &config)?; + let target = validate_and_log_target(self.target.as_deref(), config)?; // Get SDK configuration let (container_image, repo_url, repo_release) = if let Some(sdk_config) = &config.sdk { @@ -314,6 +334,7 @@ mod tests { port: None, no_stamps: false, sdk_arch: None, + composed_config: None, }; let commands = cmd.generate_export_setup_commands(); @@ -337,6 +358,7 @@ mod tests { port: Some(2049), no_stamps: false, sdk_arch: None, + composed_config: None, }; let commands = cmd.generate_export_setup_commands(); @@ -358,6 +380,7 @@ mod tests { port: Some(3049), no_stamps: false, sdk_arch: None, + composed_config: None, }; let commands = cmd.generate_export_setup_commands(); @@ -380,6 +403,7 @@ mod tests { port: Some(4049), no_stamps: false, sdk_arch: None, + composed_config: None, }; let commands = cmd.generate_export_setup_commands(); @@ -429,6 +453,7 @@ mod tests { port: None, no_stamps: true, sdk_arch: None, + composed_config: None, }; // With no_stamps, validation should be skipped @@ -448,6 +473,7 @@ mod tests { port: None, no_stamps: false, sdk_arch: None, + composed_config: None, }; // With no extensions, the stamp validation loop is skipped entirely diff --git a/src/commands/install.rs b/src/commands/install.rs index 715f7a5..3dcef7e 100644 --- a/src/commands/install.rs +++ b/src/commands/install.rs @@ -5,6 +5,7 @@ use anyhow::{Context, Result}; use std::path::PathBuf; +use std::sync::Arc; use crate::commands::{ ext::ExtInstallCommand, runtime::RuntimeInstallCommand, sdk::SdkInstallCommand, @@ -56,6 +57,8 @@ pub struct InstallCommand { pub nfs_port: Option, /// SDK container architecture for cross-arch emulation pub sdk_arch: Option, + /// Pre-composed configuration to avoid reloading + composed_config: Option>, } impl InstallCommand { @@ -81,6 +84,7 @@ impl InstallCommand { runs_on: None, nfs_port: None, sdk_arch: None, + composed_config: None, } } @@ -103,20 +107,29 @@ impl InstallCommand { self } + /// Set pre-composed configuration to avoid reloading + #[allow(dead_code)] + pub fn with_composed_config(mut self, config: Arc) -> Self { + self.composed_config = Some(config); + self + } + /// Execute the install command pub async fn execute(&self) -> Result<()> { - // Early target validation - load basic config first to validate target - let basic_config = Config::load(&self.config_path) - .with_context(|| format!("Failed to load config from {}", self.config_path))?; - let _target = validate_and_log_target(self.target.as_deref(), &basic_config)?; - - // Load the composed configuration (merges external configs, applies interpolation) - let composed = Config::load_composed(&self.config_path, self.target.as_deref()) - .with_context(|| format!("Failed to load composed config from {}", self.config_path))?; + // Use provided config or load fresh + let composed = match &self.composed_config { + Some(cc) => Arc::clone(cc), + None => Arc::new( + Config::load_composed(&self.config_path, self.target.as_deref()).with_context( + || format!("Failed to load composed config from {}", self.config_path), + )?, + ), + }; let config = &composed.config; // parsed from initial load is not used after sdk install reloads config let _parsed = &composed.merged_value; + let _target = validate_and_log_target(self.target.as_deref(), config)?; print_info( "Starting comprehensive install process...", @@ -147,7 +160,8 @@ impl InstallCommand { self.dnf_args.clone(), ) .with_no_stamps(self.no_stamps) - .with_runs_on(self.runs_on.clone(), self.nfs_port); + .with_runs_on(self.runs_on.clone(), self.nfs_port) + .with_composed_config(Arc::clone(&composed)); sdk_install_cmd .execute() .await @@ -155,13 +169,16 @@ impl InstallCommand { // Reload composed config after SDK install to pick up newly fetched remote extensions // SDK install includes ext fetch which downloads remote extensions to $AVOCADO_PREFIX/includes/ - let composed = Config::load_composed(&self.config_path, self.target.as_deref()) - .with_context(|| { - format!( - "Failed to reload composed config from {} after SDK install", - self.config_path - ) - })?; + let composed = Arc::new( + Config::load_composed(&self.config_path, self.target.as_deref()).with_context( + || { + format!( + "Failed to reload composed config from {} after SDK install", + self.config_path + ) + }, + )?, + ); let config = &composed.config; let parsed = &composed.merged_value; @@ -195,7 +212,8 @@ impl InstallCommand { self.dnf_args.clone(), ) .with_no_stamps(self.no_stamps) - .with_runs_on(self.runs_on.clone(), self.nfs_port); + .with_runs_on(self.runs_on.clone(), self.nfs_port) + .with_composed_config(Arc::clone(&composed)); ext_install_cmd.execute().await.with_context(|| { format!( "Failed to install extension dependencies for '{extension_name}'" @@ -280,7 +298,8 @@ impl InstallCommand { self.dnf_args.clone(), ) .with_no_stamps(self.no_stamps) - .with_runs_on(self.runs_on.clone(), self.nfs_port); + .with_runs_on(self.runs_on.clone(), self.nfs_port) + .with_composed_config(Arc::clone(&composed)); runtime_install_cmd.execute().await.with_context(|| { format!("Failed to install runtime dependencies for '{runtime_name}'") })?; diff --git a/src/commands/provision.rs b/src/commands/provision.rs index 3c7c4b2..e864b72 100644 --- a/src/commands/provision.rs +++ b/src/commands/provision.rs @@ -1,9 +1,11 @@ //! Provision command implementation that acts as a shortcut to runtime provision. -use anyhow::Result; +use anyhow::{Context, Result}; use std::collections::HashMap; +use std::sync::Arc; use crate::commands::runtime::RuntimeProvisionCommand; +use crate::utils::config::{ComposedConfig, Config}; /// Configuration for provision command pub struct ProvisionConfig { @@ -40,21 +42,38 @@ pub struct ProvisionConfig { /// Implementation of the 'provision' command that calls through to runtime provision. pub struct ProvisionCommand { config: ProvisionConfig, + /// Pre-composed configuration to avoid reloading + composed_config: Option>, } impl ProvisionCommand { /// Create a new ProvisionCommand instance pub fn new(config: ProvisionConfig) -> Self { - Self { config } + Self { + config, + composed_config: None, + } + } + + /// Set pre-composed configuration to avoid reloading + #[allow(dead_code)] + pub fn with_composed_config(mut self, config: Arc) -> Self { + self.composed_config = Some(config); + self } /// Execute the provision command by calling runtime provision pub async fn execute(&self) -> Result<()> { - // Load composed config to access provision profiles (including from remote extensions) - let composed = crate::utils::config::Config::load_composed( - &self.config.config_path, - self.config.target.as_deref(), - )?; + // Use provided config or load fresh + let composed = match &self.composed_config { + Some(cc) => Arc::clone(cc), + None => Arc::new( + Config::load_composed(&self.config.config_path, self.config.target.as_deref()) + .with_context(|| { + format!("Failed to load config from {}", self.config.config_path) + })?, + ), + }; let config = &composed.config; // Get state file path from provision profile if available @@ -84,7 +103,8 @@ impl ProvisionCommand { nfs_port: self.config.nfs_port, sdk_arch: self.config.sdk_arch.clone(), }, - ); + ) + .with_composed_config(composed); runtime_provision_cmd.execute().await } diff --git a/src/commands/runtime/build.rs b/src/commands/runtime/build.rs index 628ae99..14cb534 100644 --- a/src/commands/runtime/build.rs +++ b/src/commands/runtime/build.rs @@ -1,5 +1,5 @@ use crate::utils::{ - config::Config, + config::{ComposedConfig, Config}, container::{RunConfig, SdkContainer}, output::{print_error, print_info, print_success, OutputLevel}, runs_on::RunsOnContext, @@ -11,6 +11,7 @@ use crate::utils::{ }; use anyhow::{Context, Result}; use std::collections::{HashMap, HashSet}; +use std::sync::Arc; pub struct RuntimeBuildCommand { runtime_name: String, @@ -23,6 +24,8 @@ pub struct RuntimeBuildCommand { runs_on: Option, nfs_port: Option, sdk_arch: Option, + /// Pre-composed configuration to avoid reloading + composed_config: Option>, } impl RuntimeBuildCommand { @@ -45,6 +48,7 @@ impl RuntimeBuildCommand { runs_on: None, nfs_port: None, sdk_arch: None, + composed_config: None, } } @@ -67,10 +71,22 @@ impl RuntimeBuildCommand { self } + /// Set pre-composed configuration to avoid reloading + pub fn with_composed_config(mut self, config: Arc) -> Self { + self.composed_config = Some(config); + self + } + pub async fn execute(&self) -> Result<()> { - // Load composed configuration (includes remote extension configs) - let composed = Config::load_composed(&self.config_path, self.target.as_deref()) - .with_context(|| format!("Failed to load composed config from {}", self.config_path))?; + // Use provided config or load fresh + let composed = match &self.composed_config { + Some(cc) => Arc::clone(cc), + None => Arc::new( + Config::load_composed(&self.config_path, self.target.as_deref()).with_context( + || format!("Failed to load composed config from {}", self.config_path), + )?, + ), + }; let config = &composed.config; let parsed = &composed.merged_value; @@ -230,7 +246,8 @@ impl RuntimeBuildCommand { .await?; // Build var image - let build_script = self.create_build_script(parsed, target_arch, &resolved_extensions)?; + let build_script = + self.create_build_script(config, parsed, target_arch, &resolved_extensions)?; if self.verbose { print_info( @@ -362,14 +379,12 @@ impl RuntimeBuildCommand { fn create_build_script( &self, + config: &Config, parsed: &serde_yaml::Value, target_arch: &str, resolved_extensions: &[String], ) -> Result { // Get merged runtime configuration including target-specific dependencies - // Use load_composed to include remote extension configs - let composed = Config::load_composed(&self.config_path, Some(target_arch))?; - let config = &composed.config; let merged_runtime = config .get_merged_runtime_config(&self.runtime_name, target_arch, &self.config_path)? .with_context(|| { @@ -911,9 +926,10 @@ runtimes: ); // Pass empty resolved_extensions since no extensions are defined with versions + let config = Config::load(&cmd.config_path).unwrap(); let resolved_extensions: Vec = vec![]; let script = cmd - .create_build_script(&parsed, "x86_64", &resolved_extensions) + .create_build_script(&config, &parsed, "x86_64", &resolved_extensions) .unwrap(); assert!(script.contains("RUNTIME_NAME=\"test-runtime\"")); @@ -953,9 +969,10 @@ extensions: None, ); + let config = Config::load(&cmd.config_path).unwrap(); let resolved_extensions = vec!["test-ext-1.0.0".to_string()]; let script = cmd - .create_build_script(&parsed, "x86_64", &resolved_extensions) + .create_build_script(&config, &parsed, "x86_64", &resolved_extensions) .unwrap(); assert!(script.contains("test-ext-1.0.0.raw")); @@ -996,9 +1013,10 @@ extensions: None, ); + let config = Config::load(&cmd.config_path).unwrap(); let resolved_extensions = vec!["test-ext-1.0.0".to_string()]; let script = cmd - .create_build_script(&parsed, "x86_64", &resolved_extensions) + .create_build_script(&config, &parsed, "x86_64", &resolved_extensions) .unwrap(); // Extension should be copied from output/extensions to runtime-specific directory @@ -1040,9 +1058,10 @@ extensions: None, ); + let config = Config::load(&cmd.config_path).unwrap(); let resolved_extensions = vec!["test-ext-1.0.0".to_string()]; let script = cmd - .create_build_script(&parsed, "x86_64", &resolved_extensions) + .create_build_script(&config, &parsed, "x86_64", &resolved_extensions) .unwrap(); // Extension should be copied from output/extensions to runtime-specific directory diff --git a/src/commands/runtime/clean.rs b/src/commands/runtime/clean.rs index 6c9c9f0..6b6bb0d 100644 --- a/src/commands/runtime/clean.rs +++ b/src/commands/runtime/clean.rs @@ -1,6 +1,7 @@ use anyhow::Result; +use std::sync::Arc; -use crate::utils::config::{load_config, Config}; +use crate::utils::config::{ComposedConfig, Config}; use crate::utils::container::{RunConfig, SdkContainer}; use crate::utils::output::{print_error, print_info, print_success, OutputLevel}; use crate::utils::target::resolve_target_required; @@ -13,6 +14,8 @@ pub struct RuntimeCleanCommand { container_args: Option>, dnf_args: Option>, sdk_arch: Option, + /// Pre-composed configuration to avoid reloading + composed_config: Option>, } impl RuntimeCleanCommand { @@ -32,6 +35,7 @@ impl RuntimeCleanCommand { container_args, dnf_args, sdk_arch: None, + composed_config: None, } } @@ -41,14 +45,28 @@ impl RuntimeCleanCommand { self } + /// Set pre-composed configuration to avoid reloading + #[allow(dead_code)] + pub fn with_composed_config(mut self, config: Arc) -> Self { + self.composed_config = Some(config); + self + } + pub async fn execute(&self) -> Result<()> { - let config = load_config(&self.config_path)?; - let content = std::fs::read_to_string(&self.config_path)?; - let parsed: serde_yaml::Value = serde_yaml::from_str(&content)?; + // Use provided config or load fresh + let composed = match &self.composed_config { + Some(cc) => Arc::clone(cc), + None => Arc::new(Config::load_composed( + &self.config_path, + self.target.as_deref(), + )?), + }; + let config = &composed.config; + let parsed = &composed.merged_value; - self.validate_runtime_exists(&parsed)?; - let container_image = self.get_container_image(&config)?; - let target = self.resolve_target_architecture(&config)?; + self.validate_runtime_exists(parsed)?; + let container_image = self.get_container_image(config)?; + let target = self.resolve_target_architecture(config)?; self.clean_runtime(&container_image, &target).await } diff --git a/src/commands/runtime/deploy.rs b/src/commands/runtime/deploy.rs index 6ac8c18..4fe6d58 100644 --- a/src/commands/runtime/deploy.rs +++ b/src/commands/runtime/deploy.rs @@ -1,5 +1,5 @@ use crate::utils::{ - config::load_config, + config::{ComposedConfig, Config}, container::{RunConfig, SdkContainer}, output::{print_info, print_success, OutputLevel}, stamps::{generate_batch_read_stamps_script, validate_stamps_batch, StampRequirement}, @@ -7,6 +7,7 @@ use crate::utils::{ }; use anyhow::{Context, Result}; use std::collections::HashMap; +use std::sync::Arc; pub struct RuntimeDeployCommand { runtime_name: String, @@ -18,6 +19,8 @@ pub struct RuntimeDeployCommand { dnf_args: Option>, no_stamps: bool, sdk_arch: Option, + /// Pre-composed configuration to avoid reloading + composed_config: Option>, } impl RuntimeDeployCommand { @@ -40,6 +43,7 @@ impl RuntimeDeployCommand { dnf_args, no_stamps: false, sdk_arch: None, + composed_config: None, } } @@ -55,11 +59,24 @@ impl RuntimeDeployCommand { self } + /// Set pre-composed configuration to avoid reloading + #[allow(dead_code)] + pub fn with_composed_config(mut self, config: Arc) -> Self { + self.composed_config = Some(config); + self + } + pub async fn execute(&self) -> Result<()> { - // Load configuration - let config = load_config(&self.config_path)?; - let content = std::fs::read_to_string(&self.config_path)?; - let parsed: serde_yaml::Value = serde_yaml::from_str(&content)?; + // Use provided config or load fresh + let composed = match &self.composed_config { + Some(cc) => Arc::clone(cc), + None => Arc::new(Config::load_composed( + &self.config_path, + self.target.as_deref(), + )?), + }; + let config = &composed.config; + let parsed = &composed.merged_value; // Get SDK configuration from interpolated config let container_image = config @@ -83,11 +100,11 @@ impl RuntimeDeployCommand { .map(|s| s.to_string()); // Resolve target architecture - let target_arch = resolve_target_required(self.target.as_deref(), &config)?; + let target_arch = resolve_target_required(self.target.as_deref(), config)?; // Initialize SDK container helper let container_helper = - SdkContainer::from_config(&self.config_path, &config)?.verbose(self.verbose); + SdkContainer::from_config(&self.config_path, config)?.verbose(self.verbose); // Validate stamps before proceeding (unless --no-stamps) if !self.no_stamps { diff --git a/src/commands/runtime/deps.rs b/src/commands/runtime/deps.rs index bb2b4d5..f31e838 100644 --- a/src/commands/runtime/deps.rs +++ b/src/commands/runtime/deps.rs @@ -1,12 +1,15 @@ use crate::utils::{ - config::load_config, + config::{ComposedConfig, Config}, output::{print_success, OutputLevel}, }; use anyhow::{Context, Result}; +use std::sync::Arc; pub struct RuntimeDepsCommand { config_path: String, runtime_name: String, + /// Pre-composed configuration to avoid reloading + composed_config: Option>, } impl RuntimeDepsCommand { @@ -14,16 +17,27 @@ impl RuntimeDepsCommand { Self { config_path, runtime_name, + composed_config: None, } } + /// Set pre-composed configuration to avoid reloading + #[allow(dead_code)] + pub fn with_composed_config(mut self, config: Arc) -> Self { + self.composed_config = Some(config); + self + } + pub fn execute(&self) -> Result<()> { - let _config = load_config(&self.config_path)?; - let content = std::fs::read_to_string(&self.config_path)?; - let parsed: serde_yaml::Value = serde_yaml::from_str(&content)?; + // Use provided config or load fresh + let composed = match &self.composed_config { + Some(cc) => Arc::clone(cc), + None => Arc::new(Config::load_composed(&self.config_path, None)?), + }; + let parsed = &composed.merged_value; - self.validate_runtime_exists(&parsed)?; - let dependencies = self.list_runtime_dependencies(&parsed, &self.runtime_name)?; + self.validate_runtime_exists(parsed)?; + let dependencies = self.list_runtime_dependencies(parsed, &self.runtime_name)?; self.display_dependencies(&dependencies); print_success( diff --git a/src/commands/runtime/dnf.rs b/src/commands/runtime/dnf.rs index ff1f96c..98f7d4b 100644 --- a/src/commands/runtime/dnf.rs +++ b/src/commands/runtime/dnf.rs @@ -1,6 +1,7 @@ use anyhow::{Context, Result}; +use std::sync::Arc; -use crate::utils::config::Config; +use crate::utils::config::{ComposedConfig, Config}; use crate::utils::container::{RunConfig, SdkContainer}; use crate::utils::output::{print_error, print_info, print_success, OutputLevel}; use crate::utils::target::resolve_target_required; @@ -14,6 +15,8 @@ pub struct RuntimeDnfCommand { container_args: Option>, dnf_args: Option>, sdk_arch: Option, + /// Pre-composed configuration to avoid reloading + composed_config: Option>, } impl RuntimeDnfCommand { @@ -35,6 +38,7 @@ impl RuntimeDnfCommand { container_args, dnf_args, sdk_arch: None, + composed_config: None, } } @@ -44,10 +48,22 @@ impl RuntimeDnfCommand { self } + /// Set pre-composed configuration to avoid reloading + #[allow(dead_code)] + pub fn with_composed_config(mut self, config: Arc) -> Self { + self.composed_config = Some(config); + self + } + pub async fn execute(&self) -> Result<()> { - // Load composed configuration (includes remote extension configs) - let composed = Config::load_composed(&self.config_path, self.target.as_deref()) - .context("Failed to load composed config")?; + // Use provided config or load fresh + let composed = match &self.composed_config { + Some(cc) => Arc::clone(cc), + None => Arc::new( + Config::load_composed(&self.config_path, self.target.as_deref()) + .context("Failed to load composed config")?, + ), + }; let config = &composed.config; let merged_container_args = config.merge_sdk_container_args(self.container_args.as_ref()); let parsed = &composed.merged_value; diff --git a/src/commands/runtime/install.rs b/src/commands/runtime/install.rs index 31ff84f..d782b6b 100644 --- a/src/commands/runtime/install.rs +++ b/src/commands/runtime/install.rs @@ -1,7 +1,8 @@ use anyhow::{Context, Result}; use std::path::{Path, PathBuf}; +use std::sync::Arc; -use crate::utils::config::Config; +use crate::utils::config::{ComposedConfig, Config}; use crate::utils::container::{RunConfig, SdkContainer}; use crate::utils::lockfile::{build_package_spec_with_lock, LockFile, SysrootType}; use crate::utils::output::{print_debug, print_error, print_info, print_success, OutputLevel}; @@ -23,6 +24,8 @@ pub struct RuntimeInstallCommand { runs_on: Option, nfs_port: Option, sdk_arch: Option, + /// Pre-composed configuration to avoid reloading + composed_config: Option>, } impl RuntimeInstallCommand { @@ -47,6 +50,7 @@ impl RuntimeInstallCommand { runs_on: None, nfs_port: None, sdk_arch: None, + composed_config: None, } } @@ -69,10 +73,22 @@ impl RuntimeInstallCommand { self } + /// Set pre-composed configuration to avoid reloading + pub fn with_composed_config(mut self, config: Arc) -> Self { + self.composed_config = Some(config); + self + } + pub async fn execute(&self) -> Result<()> { - // Load composed configuration (includes remote extension configs) - let composed = Config::load_composed(&self.config_path, self.target.as_deref()) - .with_context(|| format!("Failed to load composed config from {}", self.config_path))?; + // Use provided config or load fresh + let composed = match &self.composed_config { + Some(cc) => Arc::clone(cc), + None => Arc::new( + Config::load_composed(&self.config_path, self.target.as_deref()).with_context( + || format!("Failed to load composed config from {}", self.config_path), + )?, + ), + }; let config = &composed.config; let parsed = &composed.merged_value; diff --git a/src/commands/runtime/list.rs b/src/commands/runtime/list.rs index a22a473..dc8be11 100644 --- a/src/commands/runtime/list.rs +++ b/src/commands/runtime/list.rs @@ -1,23 +1,38 @@ use crate::utils::{ - config::load_config, + config::{ComposedConfig, Config}, output::{print_success, OutputLevel}, }; use anyhow::Result; +use std::sync::Arc; pub struct RuntimeListCommand { config_path: String, + /// Pre-composed configuration to avoid reloading + composed_config: Option>, } impl RuntimeListCommand { pub fn new(config_path: String) -> Self { - Self { config_path } + Self { + config_path, + composed_config: None, + } + } + + /// Set pre-composed configuration to avoid reloading + #[allow(dead_code)] + pub fn with_composed_config(mut self, config: Arc) -> Self { + self.composed_config = Some(config); + self } pub fn execute(&self) -> Result<()> { - // Load configuration and parse raw TOML - let _config = load_config(&self.config_path)?; - let content = std::fs::read_to_string(&self.config_path)?; - let parsed: serde_yaml::Value = serde_yaml::from_str(&content)?; + // Use provided config or load fresh + let composed = match &self.composed_config { + Some(cc) => Arc::clone(cc), + None => Arc::new(Config::load_composed(&self.config_path, None)?), + }; + let parsed = &composed.merged_value; // Check if runtime section exists if let Some(runtime_config) = parsed.get("runtimes").and_then(|v| v.as_mapping()) { diff --git a/src/commands/runtime/provision.rs b/src/commands/runtime/provision.rs index 694dbac..c55b4ca 100644 --- a/src/commands/runtime/provision.rs +++ b/src/commands/runtime/provision.rs @@ -1,7 +1,7 @@ #[cfg(unix)] use crate::utils::signing_service::{generate_helper_script, SigningService, SigningServiceConfig}; use crate::utils::{ - config::Config, + config::{ComposedConfig, Config}, container::{RunConfig, SdkContainer}, output::{print_info, print_success, OutputLevel}, remote::{RemoteHost, SshClient}, @@ -16,6 +16,7 @@ use crate::utils::{ use anyhow::{Context, Result}; use std::collections::HashMap; use std::path::PathBuf; +use std::sync::Arc; pub struct RuntimeProvisionConfig { pub runtime_name: String, @@ -45,6 +46,8 @@ pub struct RuntimeProvisionCommand { config: RuntimeProvisionConfig, #[cfg(unix)] signing_service: Option, + /// Pre-composed configuration to avoid reloading + composed_config: Option>, } impl RuntimeProvisionCommand { @@ -53,13 +56,25 @@ impl RuntimeProvisionCommand { config, #[cfg(unix)] signing_service: None, + composed_config: None, } } + /// Set pre-composed configuration to avoid reloading + pub fn with_composed_config(mut self, config: Arc) -> Self { + self.composed_config = Some(config); + self + } + pub async fn execute(&mut self) -> Result<()> { - // Load composed configuration (includes remote extension configs with provision profiles) - let composed = - Config::load_composed(&self.config.config_path, self.config.target.as_deref())?; + // Use provided config or load fresh + let composed = match &self.composed_config { + Some(cc) => Arc::clone(cc), + None => Arc::new(Config::load_composed( + &self.config.config_path, + self.config.target.as_deref(), + )?), + }; let config = &composed.config; let parsed = &composed.merged_value; @@ -296,6 +311,7 @@ impl RuntimeProvisionCommand { state_file_path, container_state_path, &target_arch, + container_image, ) .await? } else { @@ -367,6 +383,7 @@ impl RuntimeProvisionCommand { container_state_path, &target_arch, state_file_existed, + container_image, ) .await?; } @@ -560,6 +577,7 @@ avocado-provision-{} {} state_file_path: &str, container_state_path: &str, _target_arch: &str, + container_image: &str, ) -> Result { let host_state_file = src_dir.join(state_file_path); @@ -588,14 +606,6 @@ avocado-provision-{} {} ); } - // Load composed configuration to get container image - let composed = - Config::load_composed(&self.config.config_path, self.config.target.as_deref())?; - let config = &composed.config; - let container_image = config - .get_sdk_image() - .context("No SDK container image specified in configuration")?; - let container_tool = "docker"; let volume_manager = VolumeManager::new(container_tool.to_string(), self.config.verbose); let volume_state = volume_manager.get_or_create_volume(src_dir).await?; @@ -672,6 +682,7 @@ avocado-provision-{} {} container_state_path: &str, _target_arch: &str, _original_existed: bool, + container_image: &str, ) -> Result<()> { if self.config.verbose { print_info( @@ -680,14 +691,6 @@ avocado-provision-{} {} ); } - // Load composed configuration to get container image - let composed = - Config::load_composed(&self.config.config_path, self.config.target.as_deref())?; - let config = &composed.config; - let container_image = config - .get_sdk_image() - .context("No SDK container image specified in configuration")?; - let container_tool = "docker"; let volume_manager = VolumeManager::new(container_tool.to_string(), self.config.verbose); let volume_state = volume_manager.get_or_create_volume(src_dir).await?; diff --git a/src/commands/runtime/sign.rs b/src/commands/runtime/sign.rs index 16a433e..481c1c2 100644 --- a/src/commands/runtime/sign.rs +++ b/src/commands/runtime/sign.rs @@ -3,7 +3,7 @@ //! Signs runtime images (extension images) using configured signing keys. use crate::utils::{ - config::load_config, + config::{ComposedConfig, Config}, container::{RunConfig, SdkContainer}, image_signing::{validate_signing_key_for_use, ChecksumAlgorithm}, output::{print_info, print_success, print_warning, OutputLevel}, @@ -15,6 +15,7 @@ use crate::utils::{ }; use anyhow::{Context, Result}; use std::collections::HashSet; +use std::sync::Arc; /// Command to sign runtime images pub struct RuntimeSignCommand { @@ -28,6 +29,8 @@ pub struct RuntimeSignCommand { dnf_args: Option>, no_stamps: bool, sdk_arch: Option, + /// Pre-composed configuration to avoid reloading + composed_config: Option>, } impl RuntimeSignCommand { @@ -48,6 +51,7 @@ impl RuntimeSignCommand { dnf_args, no_stamps: false, sdk_arch: None, + composed_config: None, } } @@ -63,14 +67,26 @@ impl RuntimeSignCommand { self } + /// Set pre-composed configuration to avoid reloading + pub fn with_composed_config(mut self, config: Arc) -> Self { + self.composed_config = Some(config); + self + } + pub async fn execute(&self) -> Result<()> { - // Load configuration - let config = load_config(&self.config_path)?; - let content = std::fs::read_to_string(&self.config_path)?; - let parsed: serde_yaml::Value = serde_yaml::from_str(&content)?; + // Use provided config or load fresh + let composed = match &self.composed_config { + Some(cc) => Arc::clone(cc), + None => Arc::new(Config::load_composed( + &self.config_path, + self.target.as_deref(), + )?), + }; + let config = &composed.config; + let parsed = &composed.merged_value; // Resolve target architecture - let target_arch = resolve_target_required(self.target.as_deref(), &config)?; + let target_arch = resolve_target_required(self.target.as_deref(), config)?; // Validate stamps before proceeding (unless --no-stamps) if !self.no_stamps { @@ -78,7 +94,7 @@ impl RuntimeSignCommand { .get_sdk_image() .context("No SDK container image specified in configuration")?; let container_helper = - SdkContainer::from_config(&self.config_path, &config)?.verbose(self.verbose); + SdkContainer::from_config(&self.config_path, config)?.verbose(self.verbose); // Sign requires runtime build stamp let required = resolve_required_stamps( @@ -157,10 +173,10 @@ impl RuntimeSignCommand { } let all_required_extensions = - self.find_all_extension_dependencies(&config, &required_extensions, &target_arch)?; + self.find_all_extension_dependencies(config, &required_extensions, &target_arch)?; // Sign images - self.sign_runtime_images(&config, &target_arch, &all_required_extensions) + self.sign_runtime_images(config, &target_arch, &all_required_extensions) .await?; print_success( @@ -174,7 +190,7 @@ impl RuntimeSignCommand { .get_sdk_image() .context("No SDK container image specified")?; let container_helper = - SdkContainer::from_config(&self.config_path, &config)?.verbose(self.verbose); + SdkContainer::from_config(&self.config_path, config)?.verbose(self.verbose); let inputs = StampInputs::new("sign".to_string()); let outputs = StampOutputs::default(); diff --git a/src/commands/sdk/clean.rs b/src/commands/sdk/clean.rs index 2826e45..24780e0 100644 --- a/src/commands/sdk/clean.rs +++ b/src/commands/sdk/clean.rs @@ -1,9 +1,10 @@ //! SDK clean command implementation. use anyhow::{Context, Result}; +use std::sync::Arc; use crate::utils::{ - config::Config, + config::{ComposedConfig, Config}, container::{RunConfig, SdkContainer}, output::{print_error, print_info, print_success, OutputLevel}, stamps::{generate_batch_read_stamps_script, validate_stamps_batch, StampRequirement}, @@ -36,6 +37,8 @@ pub struct SdkCleanCommand { pub dnf_args: Option>, /// SDK container architecture for cross-arch emulation pub sdk_arch: Option, + /// Pre-composed configuration to avoid reloading + composed_config: Option>, } impl SdkCleanCommand { @@ -56,6 +59,7 @@ impl SdkCleanCommand { container_args, dnf_args, sdk_arch: None, + composed_config: None, } } @@ -65,11 +69,23 @@ impl SdkCleanCommand { self } + /// Set pre-composed configuration to avoid reloading + #[allow(dead_code)] + pub fn with_composed_config(mut self, config: Arc) -> Self { + self.composed_config = Some(config); + self + } + /// Execute the sdk clean command pub async fn execute(&self) -> Result<()> { - // Load composed configuration to get sdk.compile sections - let composed = Config::load_composed(&self.config_path, self.target.as_deref()) - .with_context(|| format!("Failed to load config from {}", self.config_path))?; + // Use provided config or load fresh + let composed = match &self.composed_config { + Some(cc) => Arc::clone(cc), + None => Arc::new( + Config::load_composed(&self.config_path, self.target.as_deref()) + .with_context(|| format!("Failed to load config from {}", self.config_path))?, + ), + }; let config = &composed.config; // Merge container args from config with CLI args diff --git a/src/commands/sdk/compile.rs b/src/commands/sdk/compile.rs index 9e40760..9f060fa 100644 --- a/src/commands/sdk/compile.rs +++ b/src/commands/sdk/compile.rs @@ -1,9 +1,10 @@ //! SDK compile command implementation. use anyhow::{Context, Result}; +use std::sync::Arc; use crate::utils::{ - config::Config, + config::{ComposedConfig, Config}, container::{RunConfig, SdkContainer}, output::{print_error, print_info, print_success, OutputLevel}, stamps::{generate_batch_read_stamps_script, validate_stamps_batch, StampRequirement}, @@ -39,6 +40,8 @@ pub struct SdkCompileCommand { /// If set, scripts are executed from this directory instead of /opt/src. /// Used for remote extensions where scripts are in $AVOCADO_PREFIX/includes// pub workdir: Option, + /// Pre-composed configuration to avoid reloading + composed_config: Option>, } impl SdkCompileCommand { @@ -61,6 +64,7 @@ impl SdkCompileCommand { no_stamps: false, sdk_arch: None, workdir: None, + composed_config: None, } } @@ -82,17 +86,30 @@ impl SdkCompileCommand { self } + /// Set pre-composed configuration to avoid reloading + #[allow(dead_code)] + pub fn with_composed_config(mut self, config: Arc) -> Self { + self.composed_config = Some(config); + self + } + /// Execute the sdk compile command pub async fn execute(&self) -> Result<()> { - // Load composed configuration (includes remote extension compile sections) + // Use provided config or load fresh if self.verbose { print_info( &format!("Loading SDK compile config from: {}", self.config_path), OutputLevel::Normal, ); } - let composed = Config::load_composed(&self.config_path, self.target.as_deref()) - .with_context(|| format!("Failed to load composed config from {}", self.config_path))?; + let composed = match &self.composed_config { + Some(cc) => Arc::clone(cc), + None => Arc::new( + Config::load_composed(&self.config_path, self.target.as_deref()).with_context( + || format!("Failed to load composed config from {}", self.config_path), + )?, + ), + }; let config = &composed.config; // Validate stamps before proceeding (unless --no-stamps) diff --git a/src/commands/sdk/deps.rs b/src/commands/sdk/deps.rs index 3a2d6d0..3acfe92 100644 --- a/src/commands/sdk/deps.rs +++ b/src/commands/sdk/deps.rs @@ -2,9 +2,10 @@ use anyhow::{Context, Result}; use std::collections::HashMap; +use std::sync::Arc; use crate::utils::{ - config::Config, + config::{ComposedConfig, Config}, output::{print_success, OutputLevel}, }; @@ -15,24 +16,43 @@ type DependencySections = HashMap>; pub struct SdkDepsCommand { /// Path to configuration file pub config_path: String, + /// Pre-composed configuration to avoid reloading + composed_config: Option>, } impl SdkDepsCommand { /// Create a new SdkDepsCommand instance pub fn new(config_path: String) -> Self { - Self { config_path } + Self { + config_path, + composed_config: None, + } + } + + /// Set pre-composed configuration to avoid reloading + #[allow(dead_code)] + pub fn with_composed_config(mut self, config: Arc) -> Self { + self.composed_config = Some(config); + self } /// Execute the sdk deps command pub fn execute(&self) -> Result<()> { - let config = Config::load(&self.config_path) - .with_context(|| format!("Failed to load config from {}", self.config_path))?; + // Use provided config or load fresh + let composed = match &self.composed_config { + Some(cc) => Arc::clone(cc), + None => Arc::new( + Config::load_composed(&self.config_path, None) + .with_context(|| format!("Failed to load config from {}", self.config_path))?, + ), + }; + let config = &composed.config; // Read the config file content for extension parsing let config_content = std::fs::read_to_string(&self.config_path) .with_context(|| format!("Failed to read config file {}", self.config_path))?; - let sections = self.list_packages_by_section(&config, &config_content)?; + let sections = self.list_packages_by_section(config, &config_content)?; let total_count = self.display_packages_by_section(§ions); print_success( diff --git a/src/commands/sdk/dnf.rs b/src/commands/sdk/dnf.rs index 947c3e2..a089260 100644 --- a/src/commands/sdk/dnf.rs +++ b/src/commands/sdk/dnf.rs @@ -1,9 +1,10 @@ //! SDK DNF command implementation. use anyhow::{Context, Result}; +use std::sync::Arc; use crate::utils::{ - config::Config, + config::{ComposedConfig, Config}, container::{RunConfig, SdkContainer}, output::{print_error, print_success, OutputLevel}, target::resolve_target_required, @@ -25,6 +26,8 @@ pub struct SdkDnfCommand { pub dnf_args: Option>, /// SDK container architecture for cross-arch emulation pub sdk_arch: Option, + /// Pre-composed configuration to avoid reloading + composed_config: Option>, } impl SdkDnfCommand { @@ -45,6 +48,7 @@ impl SdkDnfCommand { container_args, dnf_args, sdk_arch: None, + composed_config: None, } } @@ -54,6 +58,13 @@ impl SdkDnfCommand { self } + /// Set pre-composed configuration to avoid reloading + #[allow(dead_code)] + pub fn with_composed_config(mut self, config: Arc) -> Self { + self.composed_config = Some(config); + self + } + /// Execute the sdk dnf command pub async fn execute(&self) -> Result<()> { if self.command.is_empty() { @@ -62,9 +73,15 @@ impl SdkDnfCommand { )); } - // Load composed configuration (includes remote extension configs) - let composed = Config::load_composed(&self.config_path, self.target.as_deref()) - .with_context(|| format!("Failed to load composed config from {}", self.config_path))?; + // Use provided config or load fresh + let composed = match &self.composed_config { + Some(cc) => Arc::clone(cc), + None => Arc::new( + Config::load_composed(&self.config_path, self.target.as_deref()).with_context( + || format!("Failed to load composed config from {}", self.config_path), + )?, + ), + }; let config = &composed.config; // Get the SDK image from configuration diff --git a/src/commands/sdk/install.rs b/src/commands/sdk/install.rs index 139d30f..02d756e 100644 --- a/src/commands/sdk/install.rs +++ b/src/commands/sdk/install.rs @@ -3,9 +3,10 @@ use anyhow::{Context, Result}; use std::collections::HashMap; use std::path::PathBuf; +use std::sync::Arc; use crate::utils::{ - config::Config, + config::{ComposedConfig, Config}, container::{RunConfig, SdkContainer}, lockfile::{build_package_spec_with_lock, LockFile, SysrootType}, output::{print_error, print_info, print_success, OutputLevel}, @@ -39,6 +40,8 @@ pub struct SdkInstallCommand { pub nfs_port: Option, /// SDK container architecture for cross-arch emulation pub sdk_arch: Option, + /// Pre-composed configuration to avoid reloading + composed_config: Option>, } impl SdkInstallCommand { @@ -62,6 +65,7 @@ impl SdkInstallCommand { runs_on: None, nfs_port: None, sdk_arch: None, + composed_config: None, } } @@ -84,19 +88,26 @@ impl SdkInstallCommand { self } + /// Set pre-composed configuration to avoid reloading + pub fn with_composed_config(mut self, config: Arc) -> Self { + self.composed_config = Some(config); + self + } + /// Execute the sdk install command pub async fn execute(&self) -> Result<()> { - // Early target validation - load basic config first - let basic_config = Config::load(&self.config_path) - .with_context(|| format!("Failed to load config from {}", self.config_path))?; - let target = validate_and_log_target(self.target.as_deref(), &basic_config)?; - - // Load initial composed configuration (without remote extensions yet) - // Remote extensions will be fetched after SDK bootstrap when repos are available - let composed = Config::load_composed(&self.config_path, self.target.as_deref()) - .with_context(|| format!("Failed to load composed config from {}", self.config_path))?; + // Use provided config or load fresh + let composed = match &self.composed_config { + Some(cc) => Arc::clone(cc), + None => Arc::new( + Config::load_composed(&self.config_path, self.target.as_deref()).with_context( + || format!("Failed to load composed config from {}", self.config_path), + )?, + ), + }; let config = &composed.config; + let target = validate_and_log_target(self.target.as_deref(), config)?; // Merge container args from config with CLI args let merged_container_args = config.merge_sdk_container_args(self.container_args.as_ref()); diff --git a/src/commands/sdk/run.rs b/src/commands/sdk/run.rs index 01cca63..0a03b00 100644 --- a/src/commands/sdk/run.rs +++ b/src/commands/sdk/run.rs @@ -5,9 +5,10 @@ use crate::utils::signing_service::{generate_helper_script, SigningService, Sign use anyhow::{Context, Result}; #[cfg(unix)] use std::path::PathBuf; +use std::sync::Arc; use crate::utils::{ - config::Config, + config::{ComposedConfig, Config}, container::{RunConfig, SdkContainer}, output::{print_info, print_success, OutputLevel}, target::validate_and_log_target, @@ -52,6 +53,8 @@ pub struct SdkRunCommand { /// Signing service handle (Unix only) #[cfg(unix)] signing_service: Option, + /// Pre-composed configuration to avoid reloading + composed_config: Option>, } impl SdkRunCommand { @@ -93,6 +96,7 @@ impl SdkRunCommand { sdk_arch: None, #[cfg(unix)] signing_service: None, + composed_config: None, } } @@ -109,6 +113,13 @@ impl SdkRunCommand { self } + /// Set pre-composed configuration to avoid reloading + #[allow(dead_code)] + pub fn with_composed_config(mut self, config: Arc) -> Self { + self.composed_config = Some(config); + self + } + /// Setup signing service for runtime if signing is configured #[cfg(unix)] async fn setup_signing_service( @@ -251,12 +262,18 @@ impl SdkRunCommand { )); } - // Load the configuration - let config = Config::load(&self.config_path) - .with_context(|| format!("Failed to load config from {}", self.config_path))?; + // Use provided config or load fresh + let composed = match &self.composed_config { + Some(cc) => Arc::clone(cc), + None => Arc::new( + Config::load_composed(&self.config_path, self.target.as_deref()) + .with_context(|| format!("Failed to load config from {}", self.config_path))?, + ), + }; + let config = &composed.config; // Early target validation and logging - fail fast if target is unsupported - let target = validate_and_log_target(self.target.as_deref(), &config)?; + let target = validate_and_log_target(self.target.as_deref(), config)?; // Get merged SDK configuration for the target let merged_sdk_config = config.get_merged_sdk_config(&target, &self.config_path)?; @@ -306,14 +323,14 @@ impl SdkRunCommand { // Setup signing service if a runtime is specified let signing_config = if let Some(runtime_name) = self.runtime.clone() { - self.setup_signing_service(&config, &runtime_name).await? + self.setup_signing_service(config, &runtime_name).await? } else { None }; // Use the container helper to run the command let container_helper = - SdkContainer::from_config(&self.config_path, &config)?.verbose(self.verbose); + SdkContainer::from_config(&self.config_path, config)?.verbose(self.verbose); // Create RunConfig - detach mode is now handled by the shared run_in_container let mut run_config = RunConfig { diff --git a/src/commands/sign.rs b/src/commands/sign.rs index c9cd48d..ff4612f 100644 --- a/src/commands/sign.rs +++ b/src/commands/sign.rs @@ -4,10 +4,11 @@ //! It signs all runtimes with signing configuration, or a specific runtime with `-r`. use anyhow::{Context, Result}; +use std::sync::Arc; use crate::commands::runtime::RuntimeSignCommand; use crate::utils::{ - config::Config, + config::{ComposedConfig, Config}, output::{print_info, print_success, OutputLevel}, }; @@ -25,6 +26,8 @@ pub struct SignCommand { pub container_args: Option>, /// Additional arguments to pass to DNF commands pub dnf_args: Option>, + /// Pre-composed configuration to avoid reloading + composed_config: Option>, } impl SignCommand { @@ -44,30 +47,50 @@ impl SignCommand { target, container_args, dnf_args, + composed_config: None, } } + /// Set pre-composed configuration to avoid reloading + #[allow(dead_code)] + pub fn with_composed_config(mut self, config: Arc) -> Self { + self.composed_config = Some(config); + self + } + /// Execute the sign command pub async fn execute(&self) -> Result<()> { - // Load the configuration - let config = Config::load(&self.config_path) - .with_context(|| format!("Failed to load config from {}", self.config_path))?; + // Use provided config or load fresh + let composed = match &self.composed_config { + Some(cc) => Arc::clone(cc), + None => Arc::new( + Config::load_composed(&self.config_path, self.target.as_deref()) + .with_context(|| format!("Failed to load config from {}", self.config_path))?, + ), + }; + let config = &composed.config; // Early target validation and logging - fail fast if target is unsupported - let target = - crate::utils::target::validate_and_log_target(self.target.as_deref(), &config)?; + let target = crate::utils::target::validate_and_log_target(self.target.as_deref(), config)?; // If a specific runtime is requested, sign only that runtime if let Some(ref runtime_name) = self.runtime { - return self.sign_single_runtime(runtime_name, &target).await; + return self + .sign_single_runtime(runtime_name, &target, Arc::clone(&composed)) + .await; } // Otherwise, sign all runtimes that have signing configuration - self.sign_all_runtimes(&config, &target).await + self.sign_all_runtimes(&composed, &target).await } /// Sign a single runtime - async fn sign_single_runtime(&self, runtime_name: &str, target: &str) -> Result<()> { + async fn sign_single_runtime( + &self, + runtime_name: &str, + target: &str, + composed: Arc, + ) -> Result<()> { print_info( &format!("Signing runtime '{runtime_name}' for target '{target}'"), OutputLevel::Normal, @@ -80,7 +103,8 @@ impl SignCommand { Some(target.to_string()), self.container_args.clone(), self.dnf_args.clone(), - ); + ) + .with_composed_config(composed); sign_cmd .execute() @@ -91,9 +115,9 @@ impl SignCommand { } /// Sign all runtimes that have signing configuration - async fn sign_all_runtimes(&self, config: &Config, target: &str) -> Result<()> { - let content = std::fs::read_to_string(&self.config_path)?; - let parsed: serde_yaml::Value = serde_yaml::from_str(&content)?; + async fn sign_all_runtimes(&self, composed: &Arc, target: &str) -> Result<()> { + let config = &composed.config; + let parsed = &composed.merged_value; let runtime_section = parsed .get("runtimes") @@ -189,7 +213,8 @@ impl SignCommand { Some(target.to_string()), self.container_args.clone(), self.dnf_args.clone(), - ); + ) + .with_composed_config(Arc::clone(composed)); sign_cmd .execute() diff --git a/src/commands/unlock.rs b/src/commands/unlock.rs index cfc71b7..5112015 100644 --- a/src/commands/unlock.rs +++ b/src/commands/unlock.rs @@ -2,8 +2,9 @@ use anyhow::{Context, Result}; use std::path::Path; +use std::sync::Arc; -use crate::utils::config::Config; +use crate::utils::config::{ComposedConfig, Config}; use crate::utils::lockfile::LockFile; use crate::utils::output::{print_info, print_success, OutputLevel}; use crate::utils::target::resolve_target_required; @@ -25,6 +26,8 @@ pub struct UnlockCommand { runtime: Option, /// Unlock SDK (includes rootfs, target-sysroot, and all SDK arches) sdk: bool, + /// Pre-composed configuration to avoid reloading + composed_config: Option>, } impl UnlockCommand { @@ -44,17 +47,31 @@ impl UnlockCommand { extension, runtime, sdk, + composed_config: None, } } + /// Set pre-composed configuration to avoid reloading + #[allow(dead_code)] + pub fn with_composed_config(mut self, config: Arc) -> Self { + self.composed_config = Some(config); + self + } + /// Execute the unlock command pub fn execute(&self) -> Result<()> { - // Load configuration - let config = Config::load(&self.config_path) - .with_context(|| format!("Failed to load config from {}", self.config_path))?; + // Use provided config or load fresh + let composed = match &self.composed_config { + Some(cc) => Arc::clone(cc), + None => Arc::new( + Config::load_composed(&self.config_path, self.target.as_deref()) + .with_context(|| format!("Failed to load config from {}", self.config_path))?, + ), + }; + let config = &composed.config; // Resolve target - let target = resolve_target_required(self.target.as_deref(), &config)?; + let target = resolve_target_required(self.target.as_deref(), config)?; // Get src_dir from config let src_dir = config diff --git a/src/main.rs b/src/main.rs index 19ca8a6..4ddda6d 100644 --- a/src/main.rs +++ b/src/main.rs @@ -1392,6 +1392,7 @@ async fn main() -> Result<()> { port, no_stamps: no_stamps || cli.no_stamps, sdk_arch: cli.sdk_arch.clone(), + composed_config: None, }; hitl_cmd.execute().await?; Ok(()) From be0037ae6ff34c5b196ad58baebf0cac30d78d95 Mon Sep 17 00:00:00 2001 From: Justin Schneck Date: Sat, 10 Jan 2026 20:04:04 -0500 Subject: [PATCH 23/23] ensure --sdk-arch populates to al lcommand chains --- src/commands/build.rs | 20 +++++- src/commands/clean.rs | 6 +- src/commands/ext/install.rs | 1 + src/commands/install.rs | 6 ++ src/commands/runtime/install.rs | 1 + src/commands/sdk/install.rs | 12 +++- src/utils/container.rs | 66 ++++++++++++++++++ src/utils/stamps.rs | 84 ++++++++++++++++------ src/utils/volume.rs | 120 ++++++++++++++++++++++++++++++++ 9 files changed, 289 insertions(+), 27 deletions(-) diff --git a/src/commands/build.rs b/src/commands/build.rs index 1aad208..1781b75 100644 --- a/src/commands/build.rs +++ b/src/commands/build.rs @@ -184,6 +184,7 @@ impl BuildCommand { ) .with_no_stamps(self.no_stamps) .with_runs_on(self.runs_on.clone(), self.nfs_port) + .with_sdk_arch(self.sdk_arch.clone()) .with_composed_config(Arc::clone(&composed)); ext_build_cmd.execute().await.with_context(|| { format!("Failed to build extension '{extension_name}'") @@ -245,6 +246,7 @@ impl BuildCommand { ) .with_no_stamps(self.no_stamps) .with_runs_on(self.runs_on.clone(), self.nfs_port) + .with_sdk_arch(self.sdk_arch.clone()) .with_composed_config(Arc::clone(&composed)); ext_build_cmd.execute().await.with_context(|| { format!("Failed to build remote extension '{name}'") @@ -279,6 +281,7 @@ impl BuildCommand { ) .with_no_stamps(self.no_stamps) .with_runs_on(self.runs_on.clone(), self.nfs_port) + .with_sdk_arch(self.sdk_arch.clone()) .with_composed_config(Arc::clone(&composed)); ext_image_cmd.execute().await.with_context(|| { format!("Failed to create image for extension '{extension_name}'") @@ -329,6 +332,7 @@ impl BuildCommand { ) .with_no_stamps(self.no_stamps) .with_runs_on(self.runs_on.clone(), self.nfs_port) + .with_sdk_arch(self.sdk_arch.clone()) .with_composed_config(Arc::clone(&composed)); ext_image_cmd.execute().await.with_context(|| { format!("Failed to create image for remote extension '{name}'") @@ -368,6 +372,7 @@ impl BuildCommand { ) .with_no_stamps(self.no_stamps) .with_runs_on(self.runs_on.clone(), self.nfs_port) + .with_sdk_arch(self.sdk_arch.clone()) .with_composed_config(Arc::clone(&composed)); runtime_build_cmd .execute() @@ -579,7 +584,8 @@ impl BuildCommand { self.dnf_args.clone(), ) .with_no_stamps(self.no_stamps) - .with_runs_on(self.runs_on.clone(), self.nfs_port); + .with_runs_on(self.runs_on.clone(), self.nfs_port) + .with_sdk_arch(self.sdk_arch.clone()); // Execute the extension build using the external config match ext_build_cmd.execute().await { @@ -641,7 +647,8 @@ impl BuildCommand { self.dnf_args.clone(), ) .with_no_stamps(self.no_stamps) - .with_runs_on(self.runs_on.clone(), self.nfs_port); + .with_runs_on(self.runs_on.clone(), self.nfs_port) + .with_sdk_arch(self.sdk_arch.clone()); // Execute the image creation ext_image_cmd.execute().await.with_context(|| { @@ -903,6 +910,7 @@ echo "Successfully created image for versioned extension '$EXT_NAME-$EXT_VERSION ) .with_no_stamps(self.no_stamps) .with_runs_on(self.runs_on.clone(), self.nfs_port) + .with_sdk_arch(self.sdk_arch.clone()) .with_composed_config(Arc::clone(composed)); ext_build_cmd .execute() @@ -933,6 +941,7 @@ echo "Successfully created image for versioned extension '$EXT_NAME-$EXT_VERSION ) .with_no_stamps(self.no_stamps) .with_runs_on(self.runs_on.clone(), self.nfs_port) + .with_sdk_arch(self.sdk_arch.clone()) .with_composed_config(Arc::clone(composed)); ext_build_cmd .execute() @@ -959,6 +968,7 @@ echo "Successfully created image for versioned extension '$EXT_NAME-$EXT_VERSION ) .with_no_stamps(self.no_stamps) .with_runs_on(self.runs_on.clone(), self.nfs_port) + .with_sdk_arch(self.sdk_arch.clone()) .with_composed_config(Arc::clone(composed)); ext_image_cmd.execute().await.with_context(|| { format!("Failed to create image for extension '{ext_name}'") @@ -994,6 +1004,7 @@ echo "Successfully created image for versioned extension '$EXT_NAME-$EXT_VERSION ) .with_no_stamps(self.no_stamps) .with_runs_on(self.runs_on.clone(), self.nfs_port) + .with_sdk_arch(self.sdk_arch.clone()) .with_composed_config(Arc::clone(composed)); ext_image_cmd.execute().await.with_context(|| { format!("Failed to create image for remote extension '{name}'") @@ -1095,6 +1106,7 @@ echo "Successfully created image for versioned extension '$EXT_NAME-$EXT_VERSION ) .with_no_stamps(self.no_stamps) .with_runs_on(self.runs_on.clone(), self.nfs_port) + .with_sdk_arch(self.sdk_arch.clone()) .with_composed_config(Arc::clone(composed)); ext_build_cmd.execute().await.with_context(|| { format!("Failed to build extension '{extension_name}'") @@ -1111,6 +1123,7 @@ echo "Successfully created image for versioned extension '$EXT_NAME-$EXT_VERSION ) .with_no_stamps(self.no_stamps) .with_runs_on(self.runs_on.clone(), self.nfs_port) + .with_sdk_arch(self.sdk_arch.clone()) .with_composed_config(Arc::clone(composed)); ext_image_cmd.execute().await.with_context(|| { format!("Failed to create image for extension '{extension_name}'") @@ -1176,6 +1189,7 @@ echo "Successfully created image for versioned extension '$EXT_NAME-$EXT_VERSION ) .with_no_stamps(self.no_stamps) .with_runs_on(self.runs_on.clone(), self.nfs_port) + .with_sdk_arch(self.sdk_arch.clone()) .with_composed_config(Arc::clone(composed)); ext_build_cmd.execute().await.with_context(|| { format!("Failed to build remote extension '{name}'") @@ -1192,6 +1206,7 @@ echo "Successfully created image for versioned extension '$EXT_NAME-$EXT_VERSION ) .with_no_stamps(self.no_stamps) .with_runs_on(self.runs_on.clone(), self.nfs_port) + .with_sdk_arch(self.sdk_arch.clone()) .with_composed_config(Arc::clone(composed)); ext_image_cmd.execute().await.with_context(|| { format!("Failed to create image for remote extension '{name}'") @@ -1219,6 +1234,7 @@ echo "Successfully created image for versioned extension '$EXT_NAME-$EXT_VERSION ) .with_no_stamps(self.no_stamps) .with_runs_on(self.runs_on.clone(), self.nfs_port) + .with_sdk_arch(self.sdk_arch.clone()) .with_composed_config(Arc::clone(composed)); runtime_build_cmd .execute() diff --git a/src/commands/clean.rs b/src/commands/clean.rs index ce39228..5acb7a2 100644 --- a/src/commands/clean.rs +++ b/src/commands/clean.rs @@ -263,11 +263,13 @@ fi ) })?; } else { + // Default behavior: automatically stop VS Code explorer containers + // (these are safe to stop), but fail if other containers are using the volume volume_manager - .remove_volume(&volume_state.volume_name) + .remove_volume_with_explorer_cleanup(&volume_state.volume_name) .await .with_context(|| { - format!("Failed to remove volume: {}", volume_state.volume_name) + format!("Failed to remove volume: {}. If other containers are using this volume, try using --force to remove them.", volume_state.volume_name) })?; } diff --git a/src/commands/ext/install.rs b/src/commands/ext/install.rs index 0729672..f4feade 100644 --- a/src/commands/ext/install.rs +++ b/src/commands/ext/install.rs @@ -675,6 +675,7 @@ $DNF_SDK_HOST \ repo_release.cloned(), merged_container_args.clone(), runs_on_context, + self.sdk_arch.as_ref(), ) .await?; diff --git a/src/commands/install.rs b/src/commands/install.rs index 3dcef7e..8d6d4d6 100644 --- a/src/commands/install.rs +++ b/src/commands/install.rs @@ -161,6 +161,7 @@ impl InstallCommand { ) .with_no_stamps(self.no_stamps) .with_runs_on(self.runs_on.clone(), self.nfs_port) + .with_sdk_arch(self.sdk_arch.clone()) .with_composed_config(Arc::clone(&composed)); sdk_install_cmd .execute() @@ -213,6 +214,7 @@ impl InstallCommand { ) .with_no_stamps(self.no_stamps) .with_runs_on(self.runs_on.clone(), self.nfs_port) + .with_sdk_arch(self.sdk_arch.clone()) .with_composed_config(Arc::clone(&composed)); ext_install_cmd.execute().await.with_context(|| { format!( @@ -299,6 +301,7 @@ impl InstallCommand { ) .with_no_stamps(self.no_stamps) .with_runs_on(self.runs_on.clone(), self.nfs_port) + .with_sdk_arch(self.sdk_arch.clone()) .with_composed_config(Arc::clone(&composed)); runtime_install_cmd.execute().await.with_context(|| { format!("Failed to install runtime dependencies for '{runtime_name}'") @@ -700,6 +703,7 @@ $DNF_SDK_HOST \ repo_release, merged_container_args, None, // TODO: Add runs_on_context support to install.rs + self.sdk_arch.as_ref(), ) .await?; @@ -949,6 +953,7 @@ $DNF_SDK_HOST \ repo_release, merged_container_args, None, // TODO: Add runs_on_context support to install.rs + self.sdk_arch.as_ref(), ) .await?; @@ -1173,6 +1178,7 @@ $DNF_SDK_HOST \ repo_release, merged_container_args, None, // TODO: Add runs_on_context support to install.rs + self.sdk_arch.as_ref(), ) .await?; diff --git a/src/commands/runtime/install.rs b/src/commands/runtime/install.rs index d782b6b..2f5f227 100644 --- a/src/commands/runtime/install.rs +++ b/src/commands/runtime/install.rs @@ -533,6 +533,7 @@ $DNF_SDK_HOST \ repo_release.cloned(), merged_container_args.clone(), runs_on_context, + self.sdk_arch.as_ref(), ) .await?; diff --git a/src/commands/sdk/install.rs b/src/commands/sdk/install.rs index 02d756e..0bc5c3f 100644 --- a/src/commands/sdk/install.rs +++ b/src/commands/sdk/install.rs @@ -7,7 +7,7 @@ use std::sync::Arc; use crate::utils::{ config::{ComposedConfig, Config}, - container::{RunConfig, SdkContainer}, + container::{normalize_sdk_arch, RunConfig, SdkContainer}, lockfile::{build_package_spec_with_lock, LockFile, SysrootType}, output::{print_error, print_info, print_success, OutputLevel}, runs_on::RunsOnContext, @@ -238,8 +238,11 @@ impl SdkInstallCommand { runs_on_context: Option<&RunsOnContext>, ) -> Result<()> { // Determine host architecture for SDK package tracking - // For remote execution, query the remote host; for local, use local arch - let host_arch = if let Some(context) = runs_on_context { + // Priority: sdk_arch (for cross-arch emulation) > runs_on remote arch > local arch + let host_arch = if let Some(ref arch) = self.sdk_arch { + // Convert sdk_arch to normalized architecture name (e.g., "aarch64", "x86_64") + normalize_sdk_arch(arch)? + } else if let Some(context) = runs_on_context { context .get_host_arch() .await @@ -807,6 +810,7 @@ $DNF_SDK_HOST \ repo_release.map(|s| s.to_string()), merged_container_args.cloned(), runs_on_context, + self.sdk_arch.as_ref(), ) .await?; @@ -895,6 +899,7 @@ $DNF_SDK_HOST $DNF_NO_SCRIPTS $DNF_SDK_TARGET_REPO_CONF \ repo_release.map(|s| s.to_string()), merged_container_args.cloned(), runs_on_context, + self.sdk_arch.as_ref(), ) .await?; @@ -1028,6 +1033,7 @@ $DNF_SDK_HOST $DNF_NO_SCRIPTS $DNF_SDK_TARGET_REPO_CONF \ repo_release.map(|s| s.to_string()), merged_container_args.cloned(), runs_on_context, + self.sdk_arch.as_ref(), ) .await?; diff --git a/src/utils/container.rs b/src/utils/container.rs index dc77928..e90ff10 100644 --- a/src/utils/container.rs +++ b/src/utils/container.rs @@ -72,6 +72,23 @@ pub fn sdk_arch_to_platform(sdk_arch: &str) -> Result { } } +/// Normalize SDK arch specification to standard architecture name. +/// +/// # Arguments +/// * `sdk_arch` - Architecture string (e.g., "aarch64", "x86-64", "arm64", "amd64") +/// +/// # Returns +/// Normalized architecture name (e.g., "aarch64", "x86_64") +pub fn normalize_sdk_arch(sdk_arch: &str) -> Result { + match sdk_arch.to_lowercase().as_str() { + "aarch64" | "arm64" => Ok("aarch64".to_string()), + "x86-64" | "x86_64" | "amd64" => Ok("x86_64".to_string()), + _ => Err(anyhow::anyhow!( + "Unsupported SDK architecture: '{sdk_arch}'. Supported values: aarch64, x86-64" + )), + } +} + /// Get the host's native platform in Docker format (e.g., "linux/amd64" or "linux/arm64"). /// This is used to explicitly request the native platform variant from multi-arch images, /// ensuring Docker keeps both variants cached when switching between native and emulated runs. @@ -949,6 +966,7 @@ impl SdkContainer { /// * `repo_release` - Optional repository release /// * `container_args` - Optional additional container arguments /// * `runs_on_context` - Optional remote execution context for --runs-on support + /// * `sdk_arch` - Optional SDK architecture for cross-arch emulation /// /// # Returns /// A HashMap of package name to version string (NEVRA format without name prefix) @@ -963,6 +981,7 @@ impl SdkContainer { repo_release: Option, container_args: Option>, runs_on_context: Option<&crate::utils::runs_on::RunsOnContext>, + sdk_arch: Option<&String>, ) -> Result> { if packages.is_empty() { return Ok(std::collections::HashMap::new()); @@ -993,6 +1012,7 @@ impl SdkContainer { repo_url, repo_release, container_args, + sdk_arch: sdk_arch.cloned(), ..Default::default() }; @@ -2247,6 +2267,52 @@ mod tests { .contains("Unsupported SDK architecture")); } + #[test] + fn test_normalize_sdk_arch_aarch64() { + let result = normalize_sdk_arch("aarch64").unwrap(); + assert_eq!(result, "aarch64"); + } + + #[test] + fn test_normalize_sdk_arch_arm64() { + let result = normalize_sdk_arch("arm64").unwrap(); + assert_eq!(result, "aarch64"); + } + + #[test] + fn test_normalize_sdk_arch_x86_64() { + let result = normalize_sdk_arch("x86-64").unwrap(); + assert_eq!(result, "x86_64"); + } + + #[test] + fn test_normalize_sdk_arch_x86_64_underscore() { + let result = normalize_sdk_arch("x86_64").unwrap(); + assert_eq!(result, "x86_64"); + } + + #[test] + fn test_normalize_sdk_arch_amd64() { + let result = normalize_sdk_arch("amd64").unwrap(); + assert_eq!(result, "x86_64"); + } + + #[test] + fn test_normalize_sdk_arch_case_insensitive() { + let result = normalize_sdk_arch("AARCH64").unwrap(); + assert_eq!(result, "aarch64"); + } + + #[test] + fn test_normalize_sdk_arch_unsupported() { + let result = normalize_sdk_arch("riscv64"); + assert!(result.is_err()); + assert!(result + .unwrap_err() + .to_string() + .contains("Unsupported SDK architecture")); + } + #[test] fn test_get_host_platform_returns_valid_format() { let platform = get_host_platform(); diff --git a/src/utils/stamps.rs b/src/utils/stamps.rs index f91a814..e618402 100644 --- a/src/utils/stamps.rs +++ b/src/utils/stamps.rs @@ -621,11 +621,29 @@ impl fmt::Display for StampValidationError { // Collect unique fix commands, using runs_on hint for SDK install commands let runs_on_ref = self.runs_on.as_deref(); + let local_arch = get_local_arch(); + let mut fixes: Vec = self .missing .iter() .chain(self.stale.iter().map(|(req, _)| req)) - .map(|req| req.fix_command_with_remote(runs_on_ref)) + .flat_map(|req| { + // For SDK install stamps with a different architecture than local, + // offer both --runs-on and --sdk-arch alternatives + if req.component == StampComponent::Sdk + && req.command == StampCommand::Install + && req.host_arch.as_deref() != Some(local_arch) + { + if let Some(arch) = &req.host_arch { + let mut cmds = vec![format!("avocado sdk install --sdk-arch {arch}")]; + if let Some(remote) = runs_on_ref { + cmds.push(format!("avocado sdk install --runs-on {remote}")); + } + return cmds; + } + } + vec![req.fix_command_with_remote(runs_on_ref)] + }) .collect(); fixes.sort(); fixes.dedup(); @@ -967,16 +985,19 @@ pub fn resolve_required_stamps_for_arch( reqs } - // Sign requires runtime build + // Sign requires SDK install + runtime build + // SDK install is needed because signing runs in the SDK container (StampCommand::Sign, StampComponent::Runtime) => { let runtime_name = component_name.expect("Runtime name required"); - vec![StampRequirement::runtime_build(runtime_name)] + vec![sdk_install(), StampRequirement::runtime_build(runtime_name)] } - // Provision requires runtime build + // Provision requires SDK install + runtime build + // SDK install is needed because provisioning runs in the SDK container + // When using --runs-on, this ensures the SDK is installed for the remote's arch (StampCommand::Provision, StampComponent::Runtime) => { let runtime_name = component_name.expect("Runtime name required"); - vec![StampRequirement::runtime_build(runtime_name)] + vec![sdk_install(), StampRequirement::runtime_build(runtime_name)] } // Other combinations have no requirements @@ -1279,28 +1300,30 @@ mod tests { #[test] fn test_resolve_required_stamps_sign() { - // Sign requires runtime build + // Sign requires SDK install + runtime build let reqs = resolve_required_stamps( StampCommand::Sign, StampComponent::Runtime, Some("my-runtime"), &[], ); - assert_eq!(reqs.len(), 1); - assert_eq!(reqs[0], StampRequirement::runtime_build("my-runtime")); + assert_eq!(reqs.len(), 2); + assert_eq!(reqs[0], StampRequirement::sdk_install()); + assert_eq!(reqs[1], StampRequirement::runtime_build("my-runtime")); } #[test] fn test_resolve_required_stamps_provision() { - // Provision requires runtime build + // Provision requires SDK install + runtime build let reqs = resolve_required_stamps( StampCommand::Provision, StampComponent::Runtime, Some("my-runtime"), &[], ); - assert_eq!(reqs.len(), 1); - assert_eq!(reqs[0], StampRequirement::runtime_build("my-runtime")); + assert_eq!(reqs.len(), 2); + assert_eq!(reqs[0], StampRequirement::sdk_install()); + assert_eq!(reqs[1], StampRequirement::runtime_build("my-runtime")); } #[test] @@ -2113,25 +2136,46 @@ runtime/my-runtime/build.stamp:::null"#, } #[test] - fn test_validation_error_includes_runs_on_hint() { + fn test_validation_error_includes_sdk_arch_hint_for_different_arch() { let mut result = StampValidationResult::new(); - result.add_missing(StampRequirement::sdk_install_for_arch("aarch64")); + // Use an architecture different from local to trigger --sdk-arch suggestion + let different_arch = if get_local_arch() == "aarch64" { + "x86_64" + } else { + "aarch64" + }; + result.add_missing(StampRequirement::sdk_install_for_arch(different_arch)); - // Without runs_on, fix should be regular install + // Without runs_on, fix should suggest --sdk-arch for different architecture let error = result.into_error("Cannot provision"); let msg = error.to_string(); - assert!(msg.contains("avocado sdk install")); - assert!(!msg.contains("--runs-on")); + assert!( + msg.contains(&format!("avocado sdk install --sdk-arch {different_arch}")), + "Expected --sdk-arch suggestion in: {msg}" + ); } #[test] - fn test_validation_error_with_runs_on_includes_remote_in_fix() { + fn test_validation_error_with_runs_on_includes_both_alternatives() { let mut result = StampValidationResult::new(); - result.add_missing(StampRequirement::sdk_install_for_arch("aarch64")); + // Use an architecture different from local to trigger both suggestions + let different_arch = if get_local_arch() == "aarch64" { + "x86_64" + } else { + "aarch64" + }; + result.add_missing(StampRequirement::sdk_install_for_arch(different_arch)); - // With runs_on, fix should include the remote + // With runs_on, fix should include BOTH --sdk-arch and --runs-on alternatives let error = result.into_error_with_runs_on("Cannot provision", Some("user@remote")); let msg = error.to_string(); - assert!(msg.contains("avocado sdk install --runs-on user@remote")); + assert!( + msg.contains(&format!("avocado sdk install --sdk-arch {different_arch}")), + "Expected --sdk-arch suggestion in: {msg}" + ); + assert!( + msg.contains("avocado sdk install --runs-on user@remote"), + "Expected --runs-on suggestion in: {msg}" + ); } } diff --git a/src/utils/volume.rs b/src/utils/volume.rs index ca35a83..fdc3db0 100644 --- a/src/utils/volume.rs +++ b/src/utils/volume.rs @@ -258,6 +258,126 @@ impl VolumeManager { Ok(containers) } + + /// Name prefix for VS Code extension explorer containers + const EXPLORER_CONTAINER_PREFIX: &'static str = "avocado-explorer-"; + + /// Get list of VS Code extension explorer containers using a specific volume. + /// These containers are created by the avocado-devtools VS Code extension + /// to browse volume contents and can be safely stopped automatically. + pub async fn get_explorer_containers_using_volume( + &self, + volume_name: &str, + ) -> Result> { + // Find containers that: + // 1. Use the specified volume + // 2. Have a name matching the explorer pattern (avocado-explorer-*) + let output = AsyncCommand::new(&self.container_tool) + .args([ + "ps", + "-a", + "--filter", + &format!("volume={volume_name}"), + "--filter", + &format!("name={}", Self::EXPLORER_CONTAINER_PREFIX), + "--format", + "{{.ID}}\t{{.Names}}", + ]) + .output() + .await + .with_context(|| "Failed to list explorer containers")?; + + if !output.status.success() { + return Ok(Vec::new()); + } + + let stdout = String::from_utf8_lossy(&output.stdout); + let containers: Vec = stdout + .lines() + .filter(|line| !line.is_empty()) + .filter_map(|line| { + let parts: Vec<&str> = line.split('\t').collect(); + if parts.len() >= 2 { + let id = parts[0]; + let name = parts[1]; + // Double-check the name starts with our prefix + if name.starts_with(Self::EXPLORER_CONTAINER_PREFIX) { + return Some(id.to_string()); + } + } + None + }) + .collect(); + + Ok(containers) + } + + /// Stop and remove VS Code extension explorer containers using a specific volume. + /// Returns the number of containers that were stopped. + pub async fn stop_explorer_containers(&self, volume_name: &str) -> Result { + let containers = self + .get_explorer_containers_using_volume(volume_name) + .await?; + + if containers.is_empty() { + return Ok(0); + } + + if self.verbose { + print_info( + &format!( + "Found {} VS Code explorer container(s) using volume, stopping...", + containers.len() + ), + OutputLevel::Normal, + ); + } + + for container_id in &containers { + // Stop the container gracefully with short timeout + let _ = AsyncCommand::new(&self.container_tool) + .args(["stop", "-t", "1", container_id]) + .output() + .await; + + // Remove the container + let output = AsyncCommand::new(&self.container_tool) + .args(["rm", "-f", container_id]) + .output() + .await + .with_context(|| format!("Failed to remove explorer container {container_id}"))?; + + if self.verbose && output.status.success() { + print_info( + &format!( + "Stopped explorer container: {}", + &container_id[..12.min(container_id.len())] + ), + OutputLevel::Normal, + ); + } + } + + Ok(containers.len()) + } + + /// Remove a docker volume, automatically stopping any VS Code explorer containers first. + /// Unlike force_remove_volume, this only stops known safe containers (explorer containers) + /// and will still fail if other containers are using the volume. + pub async fn remove_volume_with_explorer_cleanup(&self, volume_name: &str) -> Result<()> { + // First, stop any VS Code explorer containers that might be using this volume + let stopped = self.stop_explorer_containers(volume_name).await?; + + if stopped > 0 && self.verbose { + print_info( + &format!("Stopped {stopped} VS Code explorer container(s) before volume removal"), + OutputLevel::Normal, + ); + } + + // Now try to remove the volume + self.remove_volume(volume_name).await + } } /// Information about a docker volume