diff --git a/Cargo.lock b/Cargo.lock index 6d0f3ba..837be3f 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -105,17 +105,6 @@ dependencies = [ "serde_json", ] -[[package]] -name = "async-trait" -version = "0.1.81" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6e0c28dcc82d7c8ead5cb13beb15405b57b8546e93215673ff8ca0349a028107" -dependencies = [ - "proc-macro2", - "quote", - "syn", -] - [[package]] name = "atomic-waker" version = "1.1.2" @@ -170,6 +159,16 @@ dependencies = [ "generic-array", ] +[[package]] +name = "bstr" +version = "1.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "40723b8fb387abc38f4f4a37c09073622e41dd12327033091ef8950659e6dc0c" +dependencies = [ + "memchr", + "serde", +] + [[package]] name = "bumpalo" version = "3.16.0" @@ -211,9 +210,9 @@ dependencies = [ [[package]] name = "cc" -version = "1.1.8" +version = "1.1.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "504bdec147f2cc13c8b57ed9401fd8a147cc66b67ad5cb241394244f2c947549" +checksum = "e9e8aabfac534be767c909e0690571677d49f41bd8465ae876fe043d52ba5292" dependencies = [ "jobserver", "libc", @@ -247,9 +246,9 @@ dependencies = [ [[package]] name = "clap" -version = "4.5.14" +version = "4.5.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c937d4061031a6d0c8da4b9a4f98a172fc2976dfb1c19213a9cf7d0d3c837e36" +checksum = "11d8838454fda655dafd3accb2b6e2bea645b9e4078abe84a22ceb947235c5cc" dependencies = [ "clap_builder", "clap_derive", @@ -257,9 +256,9 @@ dependencies = [ [[package]] name = "clap_builder" -version = "4.5.14" +version = "4.5.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "85379ba512b21a328adf887e85f7742d12e96eb31f3ef077df4ffc26b506ffed" +checksum = "216aec2b177652e3846684cbfe25c9964d18ec45234f0f5da5157b207ed1aab6" dependencies = [ "anstream", "anstyle", @@ -311,6 +310,19 @@ dependencies = [ "wasm-bindgen", ] +[[package]] +name = "const-hex" +version = "1.12.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "94fb8a24a26d37e1ffd45343323dc9fe6654ceea44c12f2fcb3d7ac29e610bc6" +dependencies = [ + "cfg-if", + "cpufeatures", + "hex", + "proptest", + "serde", +] + [[package]] name = "constant_time_eq" version = "0.1.5" @@ -335,6 +347,25 @@ dependencies = [ "cfg-if", ] +[[package]] +name = "crossbeam-deque" +version = "0.8.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "613f8cc01fe9cf1a3eb3d7f488fd2fa8388403e97039e2f73692932e291a770d" +dependencies = [ + "crossbeam-epoch", + "crossbeam-utils", +] + +[[package]] +name = "crossbeam-epoch" +version = "0.9.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5b82ac4a3c2ca9c3460964f020e1402edd5753411d7737aa39c3714ad1b5420e" +dependencies = [ + "crossbeam-utils", +] + [[package]] name = "crossbeam-utils" version = "0.8.20" @@ -567,6 +598,19 @@ version = "0.29.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "40ecd4077b5ae9fd2e9e169b102c6c330d0605168eb0e8bf79952b256dbefffd" +[[package]] +name = "globset" +version = "0.4.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "57da3b9b5b85bd66f31093f8c408b90a74431672542466497dcbdfdc02034be1" +dependencies = [ + "aho-corasick", + "bstr", + "log", + "regex-automata", + "regex-syntax", +] + [[package]] name = "h2" version = "0.4.5" @@ -740,6 +784,22 @@ dependencies = [ "unicode-normalization", ] +[[package]] +name = "ignore" +version = "0.4.22" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b46810df39e66e925525d6e38ce1e7f6e1d208f72dc39757880fcb66e2c58af1" +dependencies = [ + "crossbeam-deque", + "globset", + "log", + "memchr", + "regex-automata", + "same-file", + "walkdir", + "winapi-util", +] + [[package]] name = "indexmap" version = "2.3.0" @@ -807,6 +867,12 @@ version = "0.2.155" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "97b3888a4aecf77e811145cadf6eef5901f4782c53886191b2f693f24761847c" +[[package]] +name = "libm" +version = "0.2.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4ec2a862134d2a7d32d7983ddcdd1c4923530833c9f2ea1a44fc5fa473989058" + [[package]] name = "lock_api" version = "0.4.12" @@ -862,9 +928,9 @@ dependencies = [ [[package]] name = "mio" -version = "1.0.1" +version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4569e456d394deccd22ce1c1913e6ea0e54519f577285001215d33557431afe4" +checksum = "80e04d1dcff3aae0704555fe5fee3bcfaf3d1fdf8a7e521d5b9d2b42acb52cec" dependencies = [ "hermit-abi", "libc", @@ -903,6 +969,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "071dfc062690e90b734c0b2273ce72ad0ffa95f0c74596bc250dcfd960262841" dependencies = [ "autocfg", + "libm", ] [[package]] @@ -1073,6 +1140,22 @@ dependencies = [ "unicode-ident", ] +[[package]] +name = "proptest" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b4c2511913b88df1637da85cc8d96ec8e43a3f8bb8ccb71ee1ac240d6f3df58d" +dependencies = [ + "bitflags", + "lazy_static", + "num-traits", + "rand", + "rand_chacha", + "rand_xorshift", + "regex-syntax", + "unarray", +] + [[package]] name = "quick-xml" version = "0.18.1" @@ -1169,6 +1252,15 @@ dependencies = [ "getrandom", ] +[[package]] +name = "rand_xorshift" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d25bf25ec5ae4a3f1b92f929810509a2f53d7dca2f50b794ff57e3face536c8f" +dependencies = [ + "rand_core", +] + [[package]] name = "redox_syscall" version = "0.5.3" @@ -1369,9 +1461,9 @@ dependencies = [ [[package]] name = "scc" -version = "2.1.9" +version = "2.1.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "06ff467073ddaff34c3a39e5b454f25dd982484a26fff50254ca793c56a1b714" +checksum = "9a744401cf50c4fe0c428808d76f6fffd75ff6b041c8226210397522b4dde7da" dependencies = [ "sdd", ] @@ -1384,24 +1476,24 @@ checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49" [[package]] name = "sdd" -version = "2.1.0" +version = "3.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "177258b64c0faaa9ffd3c65cd3262c2bc7e2588dbbd9c1641d0346145c1bbda8" +checksum = "0495e4577c672de8254beb68d01a9b62d0e8a13c099edecdbedccce3223cd29f" [[package]] name = "serde" -version = "1.0.205" +version = "1.0.206" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e33aedb1a7135da52b7c21791455563facbbcc43d0f0f66165b42c21b3dfb150" +checksum = "5b3e4cd94123dd520a128bcd11e34d9e9e423e7e3e50425cb1b4b1e3549d0284" dependencies = [ "serde_derive", ] [[package]] name = "serde_derive" -version = "1.0.205" +version = "1.0.206" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "692d6f5ac90220161d6774db30c662202721e64aed9058d2c394f451261420c1" +checksum = "fabfb6138d2383ea8208cf98ccf69cdfb1aff4088460681d84189aa259762f97" dependencies = [ "proc-macro2", "quote", @@ -1410,9 +1502,9 @@ dependencies = [ [[package]] name = "serde_json" -version = "1.0.122" +version = "1.0.124" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "784b6203951c57ff748476b126ccb5e8e2959a5c19e5c617ab1956be3dbc68da" +checksum = "66ad62847a56b3dba58cc891acd13884b9c61138d330c0d7b6181713d4fce38d" dependencies = [ "itoa", "memchr", @@ -1488,19 +1580,6 @@ dependencies = [ "digest", ] -[[package]] -name = "sha256" -version = "1.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "18278f6a914fa3070aa316493f7d2ddfb9ac86ebc06fa3b83bffda487e9065b0" -dependencies = [ - "async-trait", - "bytes", - "hex", - "sha2", - "tokio", -] - [[package]] name = "simd-adler32" version = "0.3.7" @@ -1553,9 +1632,11 @@ version = "0.3.0" dependencies = [ "chrono", "clap", + "const-hex", "email-address-parser", "env_logger", "futures", + "ignore", "mockito", "once_cell", "rand", @@ -1566,15 +1647,13 @@ dependencies = [ "serde", "serde_json", "serial_test", - "sha256", + "sha2", "simple-home-dir", "thiserror", "tokio", "toml_edit", "uuid", - "walkdir", "yansi", - "yash-fnmatch", "zip 2.1.6", "zip-extract", ] @@ -1599,9 +1678,9 @@ checksum = "13c2bddecc57b384dee18652358fb23172facb8a2c51ccc10d74c157bdea3292" [[package]] name = "syn" -version = "2.0.72" +version = "2.0.74" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dc4b9b9bf2add8093d3f2c0204471e951b2285580335de42f9d2534f3ae7a8af" +checksum = "1fceb41e3d546d0bd83421d3409b1460cc7444cd389341a4c880fe7a042cb3d7" dependencies = [ "proc-macro2", "quote", @@ -1787,6 +1866,12 @@ version = "0.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ed646292ffc8188ef8ea4d1e0e0150fb15a5c2e12ad9b8fc191ae7a8a7f3c4b9" +[[package]] +name = "unarray" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eaea85b334db583fe3274d12b4cd1880032beab409c0d774be044d4480ab9a94" + [[package]] name = "unicase" version = "2.7.0" @@ -2161,17 +2246,6 @@ version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cfe53a6657fd280eaa890a3bc59152892ffa3e30101319d168b781ed6529b049" -[[package]] -name = "yash-fnmatch" -version = "1.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "697c20b479d2e6419e9a073bfdd20e90cbd8540d6c683ee46712e13de650e54f" -dependencies = [ - "regex", - "regex-syntax", - "thiserror", -] - [[package]] name = "zerocopy" version = "0.7.35" diff --git a/Cargo.toml b/Cargo.toml index 9de1123..3048cd3 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -18,8 +18,10 @@ chrono = { version = "0.4.38", default-features = false, features = [ "serde", ] } clap = { version = "4.5.9", features = ["derive"] } +const-hex = "1.12.0" email-address-parser = "2.0.0" futures = "0.3.30" +ignore = { version = "0.4.22", features = ["simd-accel"] } once_cell = "1.19" regex = "1.10.5" reqwest = { version = "0.12.5", features = [ @@ -32,15 +34,17 @@ rpassword = "7.3.1" sanitize-filename = "0.5.0" serde = { version = "1.0.204", features = ["derive"] } serde_json = "1.0.120" -sha256 = "1.5.0" +sha2 = "0.10.8" simple-home-dir = "0.4.0" thiserror = "1.0.63" -tokio = { version = "1.38.0", features = ["rt-multi-thread", "macros"] } +tokio = { version = "1.38.0", features = [ + "rt-multi-thread", + "macros", + "io-util", +] } toml_edit = { version = "0.22.15", features = ["serde"] } uuid = { version = "1.10.0", features = ["serde", "v4"] } -walkdir = "2.5.0" yansi = "1.0.1" -yash-fnmatch = "1.1.1" zip = { version = "2.1.3", default-features = false, features = ["deflate"] } zip-extract = "0.1.3" diff --git a/src/dependency_downloader.rs b/src/dependency_downloader.rs index 98575c7..04cf751 100644 --- a/src/dependency_downloader.rs +++ b/src/dependency_downloader.rs @@ -2,7 +2,7 @@ use crate::{ config::{Dependency, GitDependency, HttpDependency}, errors::DownloadError, remote::get_dependency_url_remote, - utils::{read_file, sanitize_dependency_name, sha256_digest}, + utils::{hash_folder, read_file, sanitize_dependency_name, zipfile_hash}, DEPENDENCY_DIR, }; use reqwest::IntoUrl; @@ -18,6 +18,25 @@ use yansi::Paint as _; pub type Result = std::result::Result; +#[derive(Debug, Clone, Default, PartialEq)] +pub struct IntegrityChecksum(pub String); + +impl From for IntegrityChecksum +where + T: Into, +{ + fn from(value: T) -> Self { + let v: String = value.into(); + IntegrityChecksum(v) + } +} + +impl core::fmt::Display for IntegrityChecksum { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + f.write_str(&self.0) + } +} + /// Download the dependencies from the list in parallel /// /// Note: the dependencies list should be sorted by name and version @@ -58,15 +77,15 @@ pub async fn download_dependencies( } // un-zip-ing dependencies to dependencies folder -pub fn unzip_dependencies(dependencies: &[Dependency]) -> Result<()> { - dependencies +pub fn unzip_dependencies(dependencies: &[Dependency]) -> Result>> { + let res: Vec<_> = dependencies .iter() - .filter_map(|d| match d { - Dependency::Http(dep) => Some(dep), - _ => None, + .map(|d| match d { + Dependency::Http(dep) => unzip_dependency(dep).map(Some), + _ => Ok(None), }) - .try_for_each(unzip_dependency)?; - Ok(()) + .collect::>>()?; + Ok(res) } #[derive(Debug, Clone)] @@ -103,7 +122,7 @@ pub async fn download_dependency( DownloadResult { name: dep.name.clone(), version: dep.version.clone(), - hash: sha256_digest(dep), + hash: zipfile_hash(dep)?.to_string(), url, } } @@ -123,17 +142,19 @@ pub async fn download_dependency( Ok(res) } -pub fn unzip_dependency(dependency: &HttpDependency) -> Result<()> { +pub fn unzip_dependency(dependency: &HttpDependency) -> Result { let file_name = sanitize_dependency_name(&format!("{}-{}", dependency.name, dependency.version)); let target_name = format!("{}/", file_name); - let current_dir = DEPENDENCY_DIR.join(format!("{file_name}.zip")); - let target = DEPENDENCY_DIR.join(target_name); - let archive = read_file(current_dir).unwrap(); + let zip_path = DEPENDENCY_DIR.join(format!("{file_name}.zip")); + let target_dir = DEPENDENCY_DIR.join(target_name); + let zip_contents = read_file(&zip_path).unwrap(); - zip_extract::extract(Cursor::new(archive), &target, true)?; + zip_extract::extract(Cursor::new(zip_contents), &target_dir, true)?; println!("{}", format!("The dependency {dependency} was unzipped!").green()); - Ok(()) + + hash_folder(&target_dir, Some(zip_path)) + .map_err(|e| DownloadError::IOError { path: target_dir, source: e }) } pub fn clean_dependency_directory() { diff --git a/src/lib.rs b/src/lib.rs index 6c3a981..ec2cb60 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -237,14 +237,18 @@ async fn install_dependency( } } - write_lock(&[dependency.clone()], LockWriteMode::Append)?; + let integrity = match &dependency { + Dependency::Http(dep) => match unzip_dependency(dep) { + Ok(i) => Some(i), + Err(e) => { + cleanup_dependency(&dependency, true)?; + return Err(SoldeerError::DownloadError { dep: dependency.to_string(), source: e }); + } + }, + Dependency::Git(_) => None, + }; - if let Dependency::Http(dep) = &dependency { - if let Err(e) = unzip_dependency(dep) { - cleanup_dependency(&dependency, true)?; - return Err(SoldeerError::DownloadError { dep: dependency.to_string(), source: e }); - } - } + write_lock(&[dependency.clone()], &[integrity], LockWriteMode::Append)?; janitor::healthcheck_dependency(&dependency)?; @@ -305,15 +309,15 @@ async fn update(regenerate_remappings: bool, recursive_deps: bool) -> Result<(), } }); - unzip_dependencies(&dependencies) + let integrities = unzip_dependencies(&dependencies) .map_err(|e| SoldeerError::DownloadError { dep: String::new(), source: e })?; healthcheck_dependencies(&dependencies)?; - write_lock(&dependencies, LockWriteMode::Replace)?; - cleanup_after(&dependencies)?; + write_lock(&dependencies, &integrities, LockWriteMode::Replace)?; + if config.remappings_generate { if config_path.to_string_lossy().contains("foundry.toml") { match config.remappings_location { @@ -340,9 +344,7 @@ mod tests { use serial_test::serial; use std::{ env::{self}, - fs::{ - create_dir_all, remove_dir, remove_dir_all, remove_file, File, {self}, - }, + fs::{self, create_dir_all, remove_dir, remove_dir_all, remove_file, File}, io::Write, path::{Path, PathBuf}, }; @@ -1278,7 +1280,6 @@ libs = ["dependencies"] let submodules_path = get_current_working_dir().join(".gitmodules"); let lib_path = get_current_working_dir().join("lib"); - let lock_test = get_current_working_dir().join("test").join("soldeer.lock"); //remove it just in case @@ -1286,11 +1287,7 @@ libs = ["dependencies"] let _ = remove_dir_all(&lib_path); let _ = remove_file(&lock_test); - let mut file: std::fs::File = - fs::OpenOptions::new().create_new(true).write(true).open(&submodules_path).unwrap(); - if let Err(e) = write!(file, "this is a test file") { - eprintln!("Couldn't write to the config file: {}", e); - } + fs::write(&submodules_path, "this is a test file").unwrap(); let _ = create_dir_all(&lib_path); let target_config = define_config(true); diff --git a/src/lock.rs b/src/lock.rs index 649ec58..94df8a8 100644 --- a/src/lock.rs +++ b/src/lock.rs @@ -1,5 +1,6 @@ use crate::{ config::Dependency, + dependency_downloader::IntegrityChecksum, errors::LockError, utils::{get_current_working_dir, read_file_to_string}, LOCK_FILE, @@ -12,11 +13,13 @@ pub type Result = std::result::Result; // Top level struct to hold the TOML data. #[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, Hash)] +#[non_exhaustive] pub struct LockEntry { name: String, version: String, source: String, checksum: String, + integrity: Option, } impl LockEntry { @@ -26,12 +29,14 @@ impl LockEntry { version: impl Into, source: impl Into, checksum: impl Into, + integrity: Option, ) -> Self { LockEntry { name: name.into(), version: version.into(), source: source.into(), checksum: checksum.into(), + integrity, } } } @@ -63,7 +68,11 @@ pub enum LockWriteMode { Append, } -pub fn write_lock(dependencies: &[Dependency], mode: LockWriteMode) -> Result<()> { +pub fn write_lock( + dependencies: &[Dependency], + integrity_checksums: &[Option], + mode: LockWriteMode, +) -> Result<()> { let lock_file: PathBuf = if cfg!(test) { get_current_working_dir().join("test").join("soldeer.lock") } else { @@ -79,16 +88,17 @@ pub fn write_lock(dependencies: &[Dependency], mode: LockWriteMode) -> Result<() } let mut entries = read_lock()?; - for dep in dependencies { + for (dep, integrity) in dependencies.iter().zip(integrity_checksums.iter()) { let entry = match dep { Dependency::Http(dep) => LockEntry::new( &dep.name, &dep.version, dep.url.as_ref().unwrap(), dep.checksum.as_ref().unwrap(), + integrity.clone().map(|c| c.to_string()), ), Dependency::Git(dep) => { - LockEntry::new(&dep.name, &dep.version, &dep.git, dep.rev.as_ref().unwrap()) + LockEntry::new(&dep.name, &dep.version, &dep.git, dep.rev.as_ref().unwrap(), None) } }; // check for entry already existing @@ -198,12 +208,14 @@ name = "@openzeppelin-contracts" version = "2.3.0" source = "registry+https://github.com/mario-eth/soldeer-versions/raw/main/all_versions/@openzeppelin-contracts~2.3.0.zip" checksum = "a2d469062adeb62f7a4aada78237acae4ad3c168ba65c3ac9c76e290332c11ec" +integrity = "deadbeef" [[dependencies]] name = "@prb-test" version = "0.6.5" source = "registry+https://github.com/mario-eth/soldeer-versions/raw/main/all_versions/@prb-test~0.6.5.zip" checksum = "5019418b1e9128185398870f77a42e51d624c44315bb1572e7545be51d707016" +integrity = "deadbeef" "#; File::create(lock_file).unwrap().write_all(lock_contents.as_bytes()).unwrap(); } @@ -249,7 +261,7 @@ checksum = "5019418b1e9128185398870f77a42e51d624c44315bb1572e7545be51d707016" checksum: Some("5019418b1e9128185398870f77a42e51d624c44315bb1572e7545be51d707016".to_string()) }); let dependencies = vec![dependency.clone()]; - write_lock(&dependencies, LockWriteMode::Append).unwrap(); + write_lock(&dependencies, &[Some("deadbeef".into())], LockWriteMode::Append).unwrap(); assert!(matches!(lock_check(&dependency, true), Err(LockError::DependencyInstalled(_)))); let contents = read_file_to_string(lock_file); @@ -261,6 +273,7 @@ name = "@openzeppelin-contracts" version = "2.5.0" source = "https://github.com/mario-eth/soldeer-versions/raw/main/all_versions/@openzeppelin-contracts~2.5.0.zip" checksum = "5019418b1e9128185398870f77a42e51d624c44315bb1572e7545be51d707016" +integrity = "deadbeef" "# ); assert!(matches!(lock_check(&dependency, true), Err(LockError::DependencyInstalled(_)))); @@ -279,7 +292,7 @@ checksum = "5019418b1e9128185398870f77a42e51d624c44315bb1572e7545be51d707016" checksum: Some("5019418b1e9128185398870f77a42e51d624c44315bb1572e7545be51d707016".to_string()) }); dependencies.push(dependency.clone()); - write_lock(&dependencies, LockWriteMode::Append).unwrap(); + write_lock(&dependencies, &[Some("deadbeef".into())], LockWriteMode::Append).unwrap(); let contents = read_file_to_string(lock_file); assert_eq!( @@ -289,18 +302,21 @@ name = "@openzeppelin-contracts" version = "2.3.0" source = "registry+https://github.com/mario-eth/soldeer-versions/raw/main/all_versions/@openzeppelin-contracts~2.3.0.zip" checksum = "a2d469062adeb62f7a4aada78237acae4ad3c168ba65c3ac9c76e290332c11ec" +integrity = "deadbeef" [[dependencies]] name = "@openzeppelin-contracts-2" version = "2.6.0" source = "https://github.com/mario-eth/soldeer-versions/raw/main/all_versions/@openzeppelin-contracts~2.6.0.zip" checksum = "5019418b1e9128185398870f77a42e51d624c44315bb1572e7545be51d707016" +integrity = "deadbeef" [[dependencies]] name = "@prb-test" version = "0.6.5" source = "registry+https://github.com/mario-eth/soldeer-versions/raw/main/all_versions/@prb-test~0.6.5.zip" checksum = "5019418b1e9128185398870f77a42e51d624c44315bb1572e7545be51d707016" +integrity = "deadbeef" "# ); @@ -318,7 +334,8 @@ checksum = "5019418b1e9128185398870f77a42e51d624c44315bb1572e7545be51d707016" checksum: Some("5019418b1e9128185398870f77a42e51d624c44315bb1572e7545be51d707016".to_string()) }); let dependencies = vec![dependency.clone()]; - write_lock(&dependencies, LockWriteMode::Append).unwrap(); + write_lock(&dependencies, &[Some(IntegrityChecksum::default())], LockWriteMode::Append) + .unwrap(); match remove_lock(&dependency) { Ok(_) => {} @@ -346,7 +363,12 @@ checksum = "5019418b1e9128185398870f77a42e51d624c44315bb1572e7545be51d707016" checksum: Some("5019418b1e9128185398870f77a42e51d624c44315bb1572e7545be51d707016".to_string()) }); let dependencies = vec![dependency.clone(), dependency2.clone()]; - write_lock(&dependencies, LockWriteMode::Append).unwrap(); + write_lock( + &dependencies, + &[Some("deadbeef".into()), Some("deadbeef".into())], + LockWriteMode::Append, + ) + .unwrap(); match remove_lock(&dependency) { Ok(_) => {} @@ -363,6 +385,7 @@ name = "@openzeppelin-contracts2" version = "2.5.0" source = "https://github.com/mario-eth/soldeer-versions/raw/main/all_versions/@openzeppelin-contracts~2.5.0.zip" checksum = "5019418b1e9128185398870f77a42e51d624c44315bb1572e7545be51d707016" +integrity = "deadbeef" "# ); } @@ -379,7 +402,7 @@ checksum = "5019418b1e9128185398870f77a42e51d624c44315bb1572e7545be51d707016" }); let dependencies = vec![dependency.clone()]; - write_lock(&dependencies, LockWriteMode::Append).unwrap(); + write_lock(&dependencies, &[Some("deadbeef".into())], LockWriteMode::Append).unwrap(); match remove_lock(&Dependency::Http(HttpDependency { name: "non-existent".to_string(), @@ -401,6 +424,7 @@ name = "@openzeppelin-contracts" version = "2.5.0" source = "https://github.com/mario-eth/soldeer-versions/raw/main/all_versions/@openzeppelin-contracts~2.5.0.zip" checksum = "5019418b1e9128185398870f77a42e51d624c44315bb1572e7545be51d707016" +integrity = "deadbeef" "# ); } diff --git a/src/utils.rs b/src/utils.rs index d680d57..35d0780 100644 --- a/src/utils.rs +++ b/src/utils.rs @@ -1,16 +1,24 @@ +use crate::{ + config::HttpDependency, dependency_downloader::IntegrityChecksum, errors::DownloadError, +}; +use ignore::{WalkBuilder, WalkState}; use once_cell::sync::Lazy; use regex::Regex; +use sha2::{Digest, Sha256}; use simple_home_dir::home_dir; use std::{ env, fs::{self, File}, io::{BufReader, Read, Write}, + os::unix::ffi::OsStrExt as _, path::{Path, PathBuf}, + sync::{ + atomic::{AtomicBool, Ordering}, + Arc, Mutex, + }, }; use yansi::Paint as _; -use crate::config::HttpDependency; - static GIT_SSH_REGEX: Lazy = Lazy::new(|| { Regex::new(r"^(?:git@github\.com|git@gitlab)").expect("git ssh regex should compile") }); @@ -152,25 +160,112 @@ pub fn sanitize_dependency_name(dependency_name: &str) -> String { sanitize_filename::sanitize_with_options(dependency_name, options) } -#[cfg(not(test))] -pub fn sha256_digest(dependency: &HttpDependency) -> String { +pub fn zipfile_hash(dependency: &HttpDependency) -> Result { use crate::DEPENDENCY_DIR; let file_name = sanitize_dependency_name(&format!("{}-{}.zip", dependency.name, dependency.version)); + let path = DEPENDENCY_DIR.join(&file_name); + hash_file(&path).map_err(|e| DownloadError::IOError { path, source: e }) +} - let bytes = std::fs::read(DEPENDENCY_DIR.join(file_name)).unwrap(); // Vec - sha256::digest(bytes) +/// Hash the contents of a Reader with SHA256 +pub fn hash_content(content: &mut R) -> [u8; 32] { + let mut hasher = ::new(); + let mut buf = [0; 1024]; + while let Ok(size) = content.read(&mut buf) { + if size == 0 { + break; + } + hasher.update(&buf[0..size]); + } + hasher.finalize().into() } -#[cfg(test)] -pub fn sha256_digest(_dependency: &HttpDependency) -> String { - "5019418b1e9128185398870f77a42e51d624c44315bb1572e7545be51d707016".to_string() +/// Walk a folder and compute the SHA256 hash of all non-hidden and non-gitignored files inside the +/// dir, combining them into a single hash. +/// +/// We hash the name of the folders and files too, so we can check the integrity of their names. +/// +/// Since the folder contains the zip file still, we need to skip it. TODO: can we remove the zip +/// file right after unzipping so this is not necessary? +pub fn hash_folder( + folder_path: impl AsRef, + ignore_path: Option, +) -> Result { + // perf: it's easier to check a boolean than to compare paths, so when we find the zip we skip + // the check afterwards + let seen_ignore_path = Arc::new(AtomicBool::new(ignore_path.is_none())); + // a list of hashes, one for each DirEntry + let hashes = Arc::new(Mutex::new(Vec::with_capacity(100))); + // we use a parallel walker to speed things up + let walker = WalkBuilder::new(folder_path).hidden(false).build_parallel(); + walker.run(|| { + let ignore_path = ignore_path.clone(); + let seen_ignore_path = Arc::clone(&seen_ignore_path); + let hashes = Arc::clone(&hashes); + // function executed for each DirEntry + Box::new(move |result| { + let Ok(entry) = result else { + return WalkState::Continue; + }; + let path = entry.path(); + // check if that file is `ignore_path`, unless we've seen it already + if !seen_ignore_path.load(Ordering::SeqCst) { + let ignore_path = ignore_path + .as_ref() + .expect("ignore_path should always be Some when seen_ignore_path is false"); + if path == ignore_path { + // record that we've seen the zip file + seen_ignore_path.swap(true, Ordering::SeqCst); + return WalkState::Continue; + } + } + // first hash the filename/dirname to make sure it can't be renamed or removed + let mut hasher = ::new(); + hasher.update(path.as_os_str().as_bytes()); + // for files, also hash the contents + if let Some(true) = entry.file_type().map(|t| t.is_file()) { + if let Ok(file) = File::open(path) { + let mut reader = BufReader::new(file); + let hash = hash_content(&mut reader); + hasher.update(hash); + } + } + // record the hash for that file/folder in the list + let hash: [u8; 32] = hasher.finalize().into(); + let mut hashes_lock = hashes.lock().expect("mutex should not be poisoned"); + hashes_lock.push(hash); + WalkState::Continue + }) + }); + + // sort hashes + let mut hasher = ::new(); + let mut hashes = hashes.lock().expect("mutex should not be poisoned"); + hashes.sort_unstable(); + // hash the hashes (yo dawg...) + for hash in hashes.iter() { + hasher.update(hash); + } + let hash: [u8; 32] = hasher.finalize().into(); + Ok(const_hex::encode(hash).into()) +} + +/// Compute the SHA256 hash of the contents of a file +pub fn hash_file(path: impl AsRef) -> Result { + let file = File::open(path)?; + let mut reader = BufReader::new(file); + let bytes = hash_content(&mut reader); + Ok(const_hex::encode(bytes).into()) } #[cfg(test)] mod tests { + use rand::{distributions::Alphanumeric, Rng as _}; + use super::*; + use std::fs; #[test] fn filename_sanitization() { @@ -192,4 +287,77 @@ mod tests { assert_eq!(sanitize_dependency_name("valid~1.0.0"), "valid~1.0.0"); assert_eq!(sanitize_dependency_name("valid~1*0.0"), "valid~1-0.0"); } + + #[test] + fn test_hash_content() { + let mut content = "this is a test file".as_bytes(); + let hash = hash_content(&mut content); + assert_eq!( + const_hex::encode(hash), + "5881707e54b0112f901bc83a1ffbacac8fab74ea46a6f706a3efc5f7d4c1c625".to_string() + ); + } + + #[test] + fn test_hash_content_content_sensitive() { + let mut content = "foobar".as_bytes(); + let hash = hash_content(&mut content); + let mut content2 = "baz".as_bytes(); + let hash2 = hash_content(&mut content2); + assert_ne!(hash, hash2); + } + + #[test] + fn test_hash_file() { + let file = create_random_file("test", "txt"); + let hash = hash_file(&file).unwrap(); + fs::remove_file(&file).unwrap(); + assert_eq!(hash, "5881707e54b0112f901bc83a1ffbacac8fab74ea46a6f706a3efc5f7d4c1c625".into()); + } + + #[test] + fn test_hash_folder() { + let folder = create_test_folder("test", "test_hash_folder"); + let hash = hash_folder(&folder, None).unwrap(); + fs::remove_dir_all(&folder).unwrap(); + assert_eq!(hash, "b0bbe5dbf490a7120cce269564ed7a1f1f016ff50ccbb38eb288849f0ce7ab49".into()); + } + + #[test] + fn test_hash_folder_path_sensitive() { + let folder1 = create_test_folder("test", "test_hash_folder_path_sensitive"); + let folder2 = create_test_folder("test", "test_hash_folder_path_sensitive2"); + let hash1 = hash_folder(&folder1, None).unwrap(); + let hash2 = hash_folder(&folder2, None).unwrap(); + fs::remove_dir_all(&folder1).unwrap(); + fs::remove_dir_all(&folder2).unwrap(); + assert_ne!(hash1, hash2); + } + + #[test] + fn test_hash_folder_ignore_path() { + let folder = create_test_folder("test", "test_hash_folder_ignore_path"); + let hash1 = hash_folder(&folder, None).unwrap(); + let hash2 = hash_folder(&folder, Some(folder.join("a.txt"))).unwrap(); + fs::remove_dir_all(&folder).unwrap(); + assert_ne!(hash1, hash2); + } + + fn create_random_file(target_dir: impl AsRef, extension: &str) -> PathBuf { + let s: String = + rand::thread_rng().sample_iter(&Alphanumeric).take(7).map(char::from).collect(); + let random_file = target_dir.as_ref().join(format!("random{}.{}", s, extension)); + fs::write(&random_file, "this is a test file").expect("could not write to test file"); + random_file + } + + fn create_test_folder(target_dir: impl AsRef, dirname: &str) -> PathBuf { + let test_folder = target_dir.as_ref().join(dirname); + fs::create_dir(&test_folder).expect("could not create test folder"); + fs::write(test_folder.join("a.txt"), "this is a test file") + .expect("could not write to test file a"); + fs::write(test_folder.join("b.txt"), "this is a second test file") + .expect("could not write to test file b"); + test_folder + } } diff --git a/src/versioning.rs b/src/versioning.rs index ea9dce6..9126de1 100644 --- a/src/versioning.rs +++ b/src/versioning.rs @@ -2,8 +2,9 @@ use crate::{ auth::get_token, errors::{AuthError, PublishError}, remote::get_project_id, - utils::{get_base_url, get_current_working_dir, read_file, read_file_to_string}, + utils::{get_base_url, read_file}, }; +use ignore::{WalkBuilder, WalkState}; use regex::Regex; use reqwest::{ header::{HeaderMap, HeaderValue, AUTHORIZATION, CONTENT_TYPE}, @@ -14,10 +15,9 @@ use std::{ fs::{remove_file, File}, io::{self, Read, Write}, path::{Path, PathBuf}, + sync::{Arc, Mutex}, }; -use walkdir::WalkDir; use yansi::Paint as _; -use yash_fnmatch::{without_escape, Pattern}; use zip::{write::SimpleFileOptions, CompressionMethod, ZipWriter}; pub type Result = std::result::Result; @@ -110,69 +110,33 @@ fn zip_file( Ok(zip_file_path) } -fn filter_files_to_copy(root_directory_path: &Path) -> Vec { - let ignore_files: Vec = read_ignore_file(); - - let root_directory: &str = &(root_directory_path.to_str().unwrap().to_owned() + "/"); - let mut files_to_copy: Vec = Vec::new(); - for entry in WalkDir::new(root_directory).into_iter().filter_map(|e| e.ok()) { - let is_dir = entry.path().is_dir(); - let file_path: String = entry.path().to_str().unwrap().to_string(); - if file_path.is_empty() || is_dir { - continue; - } - let mut found: bool = false; - for ignore_file in ignore_files.iter() { - let p = Pattern::parse(without_escape(ignore_file)).unwrap(); - let exists = p.find(&file_path); - if exists.is_some() { - found = true; - break; +fn filter_files_to_copy(root_directory_path: impl AsRef) -> Vec { + let files_to_copy = Arc::new(Mutex::new(Vec::with_capacity(100))); + let walker = WalkBuilder::new(root_directory_path) + .add_custom_ignore_filename(".soldeerignore") + .hidden(false) + .build_parallel(); + walker.run(|| { + let files_to_copy = Arc::clone(&files_to_copy); + // function executed for each DirEntry + Box::new(move |result| { + let Ok(entry) = result else { + return WalkState::Continue; + }; + let path = entry.path(); + if path.is_dir() { + return WalkState::Continue; } - } - - if found { - continue; - } - - files_to_copy.push(entry.path().to_path_buf()); - } - files_to_copy -} - -fn read_ignore_file() -> Vec { - let mut current_dir = get_current_working_dir(); - if cfg!(test) { - current_dir = get_current_working_dir().join("test"); - } - let gitignore = current_dir.join(".gitignore"); - let soldeerignore = current_dir.join(".soldeerignore"); - - let mut files: Vec = Vec::new(); - - if soldeerignore.exists() { - let contents = read_file_to_string(&soldeerignore); - let current_read_file = contents.lines(); - files.append(&mut escape_lines(current_read_file.collect())); - } - - if gitignore.exists() { - let contents = read_file_to_string(&gitignore); - let current_read_file = contents.lines(); - files.append(&mut escape_lines(current_read_file.collect())); - } - - files -} - -fn escape_lines(lines: Vec<&str>) -> Vec { - let mut escaped_liens: Vec = vec![]; - for line in lines { - if !line.trim().is_empty() { - escaped_liens.push(line.trim().to_string()); - } - } - escaped_liens + let mut files_to_copy = files_to_copy.lock().expect("mutex should not be poisoned"); + files_to_copy.push(path.to_path_buf()); + WalkState::Continue + }) + }); + + Arc::into_inner(files_to_copy) + .expect("Arc should have no other strong references") + .into_inner() + .expect("mutex should not be poisoned") } async fn push_to_repo( @@ -238,78 +202,12 @@ async fn push_to_repo( #[cfg(test)] mod tests { use super::*; + use crate::utils::get_current_working_dir; use io::Cursor; use rand::{distributions::Alphanumeric, Rng}; use serial_test::serial; use std::fs::{self, create_dir_all, remove_dir_all, remove_file}; - #[test] - #[serial] - fn read_ignore_files_only_soldeerignore() { - let soldeerignore = define_ignore_file(false); - let gitignore = define_ignore_file(true); - let _ = remove_file(gitignore); - let ignore_contents = r#" -*.toml -*.zip - "#; - write_to_ignore(&soldeerignore, ignore_contents); - let expected_results: Vec = vec!["*.toml".to_string(), "*.zip".to_string()]; - - assert_eq!(read_ignore_file(), expected_results); - let _ = remove_file(soldeerignore); - } - - #[test] - #[serial] - fn read_ignore_files_only_gitignore() { - let soldeerignore = define_ignore_file(false); - let gitignore = define_ignore_file(true); - let _ = remove_file(soldeerignore); - - let ignore_contents = r#" -*.toml -*.zip - "#; - write_to_ignore(&gitignore, ignore_contents); - let expected_results: Vec = vec!["*.toml".to_string(), "*.zip".to_string()]; - - assert_eq!(read_ignore_file(), expected_results); - let _ = remove_file(gitignore); - } - - #[test] - #[serial] - fn read_ignore_files_both_gitignore_soldeerignore() { - let soldeerignore = define_ignore_file(false); - let gitignore = define_ignore_file(true); - let _ = remove_file(&soldeerignore); - let _ = remove_file(&gitignore); - - let ignore_contents_git = r#" -*.toml -*.zip - "#; - write_to_ignore(&gitignore, ignore_contents_git); - - let ignore_contents_soldeer = r#" - *.sol - *.txt - "#; - write_to_ignore(&soldeerignore, ignore_contents_soldeer); - - let expected_results: Vec = vec![ - "*.sol".to_string(), - "*.txt".to_string(), - "*.toml".to_string(), - "*.zip".to_string(), - ]; - - assert_eq!(read_ignore_file(), expected_results); - let _ = remove_file(gitignore); - let _ = remove_file(soldeerignore); - } - #[test] #[serial] fn filter_only_files_success() { @@ -317,16 +215,16 @@ mod tests { let _ = remove_dir_all(&target_dir); let _ = create_dir_all(&target_dir); - let soldeerignore = define_ignore_file(false); - let gitignore = define_ignore_file(true); + let soldeerignore = define_ignore_file(&target_dir, false); + let gitignore = define_ignore_file(&target_dir, true); let _ = remove_file(soldeerignore); let mut ignored_files = vec![]; - let mut filtered_files = vec![]; - ignored_files.push(create_random_file(&target_dir, "toml".to_string())); - ignored_files.push(create_random_file(&target_dir, "zip".to_string())); - ignored_files.push(create_random_file(&target_dir, "toml".to_string())); - filtered_files.push(create_random_file(&target_dir, "txt".to_string())); + let mut filtered_files = vec![gitignore.clone()]; + ignored_files.push(create_random_file(&target_dir, "toml")); + ignored_files.push(create_random_file(&target_dir, "zip")); + ignored_files.push(create_random_file(&target_dir, "toml")); + filtered_files.push(create_random_file(&target_dir, "txt")); let ignore_contents_git = r#" *.toml @@ -336,8 +234,9 @@ mod tests { let result = filter_files_to_copy(&target_dir); assert_eq!(filtered_files.len(), result.len()); - let file = Path::new(&filtered_files[0]); - assert_eq!(file, result[0]); + for res in result { + assert!(filtered_files.contains(&res), "File {:?} not found in filtered files", res); + } let _ = remove_file(gitignore); let _ = remove_dir_all(target_dir); @@ -350,13 +249,13 @@ mod tests { let _ = remove_dir_all(&target_dir); let _ = create_dir_all(&target_dir); - let soldeerignore = define_ignore_file(false); - let gitignore = define_ignore_file(true); + let soldeerignore = define_ignore_file(&target_dir, false); + let gitignore = define_ignore_file(&target_dir, true); let _ = remove_file(soldeerignore); // divide ignored vs filtered files to check them later let mut ignored_files = vec![]; - let mut filtered_files = vec![]; + let mut filtered_files = vec![gitignore.clone()]; // initial dir to test the ignore let target_dir = get_current_working_dir().join("test").join("test_push"); @@ -379,27 +278,27 @@ mod tests { // --- --- --- --- zip <= ignored // --- --- --- --- toml <= ignored - let random_dir = create_random_directory(&target_dir, "".to_string()); - let broadcast_dir = create_random_directory(&target_dir, "broadcast".to_string()); + let random_dir = create_random_directory(&target_dir, None); + let broadcast_dir = create_random_directory(&target_dir, Some("broadcast")); - let the_31337_dir = create_random_directory(&broadcast_dir, "31337".to_string()); - let random_dir_in_broadcast = create_random_directory(&broadcast_dir, "".to_string()); - let dry_run_dir = create_random_directory(&random_dir_in_broadcast, "dry_run".to_string()); + let the_31337_dir = create_random_directory(&broadcast_dir, Some("31337")); + let random_dir_in_broadcast = create_random_directory(&broadcast_dir, None); + let dry_run_dir = create_random_directory(&random_dir_in_broadcast, Some("dry_run")); - ignored_files.push(create_random_file(&random_dir, "toml".to_string())); - filtered_files.push(create_random_file(&random_dir, "zip".to_string())); + ignored_files.push(create_random_file(&random_dir, "toml")); + filtered_files.push(create_random_file(&random_dir, "zip")); - ignored_files.push(create_random_file(&broadcast_dir, "toml".to_string())); - filtered_files.push(create_random_file(&broadcast_dir, "zip".to_string())); + ignored_files.push(create_random_file(&broadcast_dir, "toml")); + filtered_files.push(create_random_file(&broadcast_dir, "zip")); - ignored_files.push(create_random_file(&the_31337_dir, "toml".to_string())); - ignored_files.push(create_random_file(&the_31337_dir, "zip".to_string())); + ignored_files.push(create_random_file(&the_31337_dir, "toml")); + ignored_files.push(create_random_file(&the_31337_dir, "zip")); - filtered_files.push(create_random_file(&random_dir_in_broadcast, "zip".to_string())); - filtered_files.push(create_random_file(&random_dir_in_broadcast, "toml".to_string())); + filtered_files.push(create_random_file(&random_dir_in_broadcast, "zip")); + filtered_files.push(create_random_file(&random_dir_in_broadcast, "toml")); - ignored_files.push(create_random_file(&dry_run_dir, "zip".to_string())); - ignored_files.push(create_random_file(&dry_run_dir, "toml".to_string())); + ignored_files.push(create_random_file(&dry_run_dir, "zip")); + ignored_files.push(create_random_file(&dry_run_dir, "toml")); let ignore_contents_git = r#" *.toml @@ -417,7 +316,7 @@ mod tests { continue; } - assert!(filtered_files.contains(&res)); + assert!(filtered_files.contains(&res), "File {:?} not found in filtered files", res); } let _ = remove_file(gitignore); @@ -441,11 +340,11 @@ mod tests { // --- --- --- random_file_3.txt // --- --- random_file_2.txt // --- random_file_1.txt - let random_dir_1 = create_random_directory(&target_dir, "".to_string()); - let random_dir_2 = create_random_directory(Path::new(&random_dir_1), "".to_string()); - let random_file_1 = create_random_file(&target_dir, "txt".to_string()); - let random_file_2 = create_random_file(Path::new(&random_dir_1), "txt".to_string()); - let random_file_3 = create_random_file(Path::new(&random_dir_2), "txt".to_string()); + let random_dir_1 = create_random_directory(&target_dir, None); + let random_dir_2 = create_random_directory(Path::new(&random_dir_1), None); + let random_file_1 = create_random_file(&target_dir, "txt"); + let random_file_2 = create_random_file(Path::new(&random_dir_1), "txt"); + let random_file_3 = create_random_file(Path::new(&random_dir_2), "txt"); let files_to_copy: Vec = vec![random_file_1.clone(), random_file_3.clone(), random_file_2.clone()]; @@ -483,48 +382,39 @@ mod tests { let _ = remove_dir_all(&target_dir_unzip); } - fn define_ignore_file(git: bool) -> PathBuf { + fn define_ignore_file(target_dir: impl AsRef, git: bool) -> PathBuf { let mut target = ".soldeerignore"; if git { target = ".gitignore"; } - get_current_working_dir().join("test").join(target) + target_dir.as_ref().to_path_buf().join(target) } - fn write_to_ignore(target_file: &PathBuf, content: &str) { - if target_file.exists() { - let _ = remove_file(target_file); - } - let mut file: std::fs::File = - fs::OpenOptions::new().create_new(true).write(true).open(target_file).unwrap(); - if let Err(e) = write!(file, "{}", content) { - eprintln!("Couldn't write to the config file: {}", e); + fn write_to_ignore(target_file: impl AsRef, contents: &str) { + if target_file.as_ref().exists() { + let _ = remove_file(&target_file); } + fs::write(&target_file, contents).expect("Could not write to ignore file"); } - fn create_random_file(target_dir: &Path, extension: String) -> PathBuf { + fn create_random_file(target_dir: impl AsRef, extension: &str) -> PathBuf { let s: String = rand::thread_rng().sample_iter(&Alphanumeric).take(7).map(char::from).collect(); - let target = target_dir.join(format!("random{}.{}", s, extension)); - let mut file: std::fs::File = - fs::OpenOptions::new().create_new(true).write(true).open(&target).unwrap(); - if let Err(e) = write!(file, "this is a test file") { - eprintln!("Couldn't write to the config file: {}", e); - } + let target = target_dir.as_ref().join(format!("random{}.{}", s, extension)); + fs::write(&target, "this is a test file").expect("Could not write to test file"); target } - fn create_random_directory(target_dir: &Path, name: String) -> PathBuf { - let s: String = - rand::thread_rng().sample_iter(&Alphanumeric).take(7).map(char::from).collect(); - if name.is_empty() { - let target = target_dir.join(format!("random{}", s)); - let _ = create_dir_all(&target); - target - } else { - let target = target_dir.join(name); - let _ = create_dir_all(&target); - target - } + fn create_random_directory(target_dir: impl AsRef, name: Option<&str>) -> PathBuf { + let target = match name { + Some(name) => target_dir.as_ref().join(name), + None => { + let s: String = + rand::thread_rng().sample_iter(&Alphanumeric).take(7).map(char::from).collect(); + target_dir.as_ref().join(format!("random{}", s)) + } + }; + let _ = create_dir_all(&target); + target } }