diff --git a/src/cli.rs b/src/cli.rs index dde71420..9a43e77a 100644 --- a/src/cli.rs +++ b/src/cli.rs @@ -843,6 +843,42 @@ pub fn cli() -> Command { .help("Set timeout for download in seconds") .value_parser(clap::value_parser!(u64)) ) + + .arg(Arg::new("recursive") + .action(ArgAction::SetTrue) + .required(false) + .long("recursive") + .help("Download the sources and all the dependency sources") + ) + + .arg(Arg::new("image") + .required(false) + .value_name("IMAGE NAME") + .short('I') + .long("image") + .help("Name of the Docker image to use") + .long_help(indoc::indoc!(r#" + Name of the Docker image to use. + + Required because tree might look different on different images because of + conditions on dependencies. + "#)) + ) + + .arg(Arg::new("env") + .required(false) + .action(ArgAction::Append) + .short('E') + .long("env") + .value_parser(env_pass_validator) + .help("Additional env to be passed when building packages") + .long_help(indoc::indoc!(r#" + Additional env to be passed when building packages. + + Required because tree might look different on different images because of + conditions on dependencies. + "#)) + ) ) .subcommand(Command::new("of") .about("Get the paths of the sources of a package") diff --git a/src/commands/source/download.rs b/src/commands/source/download.rs index ff9ba9a0..ed21d632 100644 --- a/src/commands/source/download.rs +++ b/src/commands/source/download.rs @@ -8,28 +8,29 @@ // SPDX-License-Identifier: EPL-2.0 // +use std::collections::HashSet; use std::concat; +use std::fmt; use std::path::PathBuf; use std::sync::Arc; -use anyhow::anyhow; -use anyhow::Context; -use anyhow::Error; -use anyhow::Result; +use anyhow::{anyhow, Context, Error, Result}; +use ascii_table::{Align, AsciiTable}; use clap::ArgMatches; -use futures::stream::{FuturesUnordered, StreamExt}; +use futures::stream::{FuturesOrdered, StreamExt}; use regex::Regex; use tokio::io::AsyncWriteExt; use tokio::sync::{Mutex, Semaphore}; -use tracing::{info, trace, warn}; +use tracing::{debug, error, info, trace, warn}; -use crate::config::*; -use crate::package::Package; -use crate::package::PackageName; -use crate::package::PackageVersionConstraint; +use crate::config::Configuration; +use crate::package::condition::ConditionData; +use crate::package::{Dag, Package, PackageName, PackageVersionConstraint}; use crate::repository::Repository; -use crate::source::*; +use crate::source::{SourceCache, SourceEntry}; +use crate::util::docker::ImageNameLookup; use crate::util::progress::ProgressBars; +use crate::util::EnvironmentVariableName; const NUMBER_OF_MAX_CONCURRENT_DOWNLOADS: usize = 100; const APP_USER_AGENT: &str = concat! {env!("CARGO_PKG_NAME"), "/", env!("CARGO_PKG_VERSION")}; @@ -42,6 +43,17 @@ enum DownloadResult { MarkedManual, } +impl fmt::Display for DownloadResult { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + match self { + DownloadResult::Forced => write!(f, "forced"), + DownloadResult::Skipped => write!(f, "skipped"), + DownloadResult::Succeeded => write!(f, "succeeded"), + DownloadResult::MarkedManual => write!(f, "marked manual"), + } + } +} + /// A wrapper around the indicatif::ProgressBar /// /// A wrapper around the indicatif::ProgressBar that is used to synchronize status information from @@ -104,6 +116,12 @@ impl ProgressWrapper { } } +struct DownloadJob { + package: Package, + source_entry: SourceEntry, + download_result: Result, +} + async fn perform_download( source: &SourceEntry, progress: Arc>, @@ -270,6 +288,7 @@ pub async fn download( progressbars: ProgressBars, ) -> Result<()> { let force = matches.get_flag("force"); + let recursive = matches.get_flag("recursive"); let timeout = matches.get_one::("timeout").copied(); let cache = PathBuf::from(config.source_cache_root()); let sc = SourceCache::new(cache); @@ -294,11 +313,48 @@ pub async fn download( NUMBER_OF_MAX_CONCURRENT_DOWNLOADS, )); - let r = find_packages(&repo, pname, pvers, matching_regexp)?; + let found_packages = find_packages(&repo, pname, pvers, matching_regexp)?; + + let packages_to_download: HashSet = match recursive { + true => { + debug!("Finding package dependencies recursively"); + + let image_name_lookup = ImageNameLookup::create(config.docker().images())?; + let image_name = matches + .get_one::("image") + .map(|s| image_name_lookup.expand(s)) + .transpose()?; + + let additional_env = matches + .get_many::("env") + .unwrap_or_default() + .map(AsRef::as_ref) + .map(crate::util::env::parse_to_env) + .collect::>>()?; + + let condition_data = ConditionData { + image_name: image_name.as_ref(), + env: &additional_env, + }; + + let dependencies: Vec = found_packages + .iter() + .flat_map(|package| { + Dag::for_root_package((*package).clone(), &repo, None, &condition_data) + .map(|d| d.dag().graph().node_weights().cloned().collect::>()) + .unwrap() + }) + .collect(); + + HashSet::from_iter(dependencies) + } + false => HashSet::from_iter(found_packages.into_iter().cloned()), + }; - let r: Vec<(SourceEntry, Result)> = r + let download_results: Vec<(SourceEntry, Result)> = packages_to_download .iter() .flat_map(|p| { + //download the sources and wait for all packages to finish sc.sources_for(p).into_iter().map(|source| { let download_sema = download_sema.clone(); let progressbar = progressbar.clone(); @@ -310,11 +366,107 @@ pub async fn download( } }) }) - .collect::>() + .collect::>() .collect() .await; - super::verify(matches, config, repo, progressbars).await?; + let mut r: Vec = download_results + .into_iter() + .zip(packages_to_download) + .map(|r| { + let download_result = r.0; + let package = r.1; + DownloadJob { + package, + source_entry: download_result.0, + download_result: download_result.1, + } + }) + .collect(); + + { + let mut ascii_table = AsciiTable::default(); + ascii_table.set_max_width( + terminal_size::terminal_size() + .map(|tpl| tpl.0 .0 as usize) + .unwrap_or(80), + ); + ascii_table.column(0).set_header("#").set_align(Align::Left); + ascii_table + .column(1) + .set_header("Package name") + .set_align(Align::Left); + ascii_table + .column(2) + .set_header("Version") + .set_align(Align::Left); + ascii_table + .column(3) + .set_header("Source name") + .set_align(Align::Left); + ascii_table + .column(4) + .set_header("Status") + .set_align(Align::Left); + ascii_table + .column(5) + .set_header("Path") + .set_align(Align::Left); + + let numbers: Vec = (0..r.len()).map(|n| n + 1).collect(); + r.sort_by(|a, b| { + a.source_entry + .package_name() + .partial_cmp(b.source_entry.package_name()) + .unwrap() + }); + let source_paths: Vec = r.iter().map(|v| v.source_entry.path_as_string()).collect(); + + let data: Vec> = r + .iter() + .enumerate() + .map(|(i, v)| { + debug!("source_entry: {:#?}", v.source_entry); + let n = &numbers[i]; + let mut row: Vec<&dyn fmt::Display> = vec![ + n, + v.source_entry.package_name(), + v.source_entry.package_version(), + v.source_entry.package_source_name(), + ]; + if v.download_result.is_ok() { + let result = v.download_result.as_ref().unwrap() as &dyn fmt::Display; + row.push(result); + } else { + row.push(&"failed"); + } + row.push(&source_paths[i]); + row + }) + .collect(); + + ascii_table.print(data); + } + + for p in &r { + if p.download_result.is_err() { + error!("{}: {:?}", p.source_entry.package_name(), p.download_result); + } + } + + let packages_to_verify = r.iter().filter_map(|j| { + if let Ok(r) = &j.download_result { + match r { + DownloadResult::MarkedManual => None, + _ => Some(&j.package), + } + } else { + None + } + }); + + let sc = SourceCache::new(config.source_cache_root().clone()); + super::verify_impl(packages_to_verify, &sc, &progressbars).await?; Ok(()) } diff --git a/src/source/mod.rs b/src/source/mod.rs index 8e1fa830..8e9306ed 100644 --- a/src/source/mod.rs +++ b/src/source/mod.rs @@ -14,6 +14,7 @@ use anyhow::anyhow; use anyhow::Context; use anyhow::Error; use anyhow::Result; +use getset::Getters; use tracing::trace; use url::Url; @@ -37,11 +38,14 @@ impl SourceCache { } } -#[derive(Debug)] +#[derive(Debug, Getters)] pub struct SourceEntry { cache_root: PathBuf, + #[getset(get = "pub")] package_name: PackageName, + #[getset(get = "pub")] package_version: PackageVersion, + #[getset(get = "pub")] package_source_name: String, package_source: Source, } @@ -73,6 +77,10 @@ impl SourceEntry { }) } + pub fn path_as_string(&self) -> String { + self.path().to_string_lossy().to_string() + } + pub fn url(&self) -> &Url { self.package_source.url() }