diff --git a/Cargo.lock b/Cargo.lock index 35be07837da5..03aa9025b2ac 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2816,7 +2816,6 @@ dependencies = [ "tracing", "unscanny", "url", - "uv-cache", "uv-client", "uv-fs", "uv-normalize", diff --git a/crates/requirements-txt/Cargo.toml b/crates/requirements-txt/Cargo.toml index 6ee3d3098594..d98249995fac 100644 --- a/crates/requirements-txt/Cargo.toml +++ b/crates/requirements-txt/Cargo.toml @@ -15,7 +15,6 @@ workspace = true [dependencies] pep440_rs = { path = "../pep440-rs", features = ["rkyv", "serde"] } pep508_rs = { path = "../pep508-rs", features = ["rkyv", "serde", "non-pep508-extensions"] } -uv-cache = { path = "../uv-cache" } uv-client = { path = "../uv-client" } uv-fs = { path = "../uv-fs" } uv-normalize = { path = "../uv-normalize" } @@ -25,11 +24,10 @@ async-recursion = { workspace = true } fs-err = { workspace = true } once_cell = { workspace = true } regex = { workspace = true } -reqwest = { workspace = true } -reqwest-middleware = { workspace = true } +reqwest = { workspace = true, optional = true } +reqwest-middleware = { workspace = true, optional = true } serde = { workspace = true } thiserror = { workspace = true } -tokio = { workspace = true } tracing = { workspace = true } unscanny = { workspace = true } url = { workspace = true } @@ -43,3 +41,8 @@ itertools = { version = "0.12.1" } serde_json = { version = "1.0.114" } tempfile = { version = "3.9.0" } test-case = { version = "3.3.1" } +tokio = { version = "1.35.1", features = ["macros"] } + +[features] +default = [] +reqwest = ["dep:reqwest", "dep:reqwest-middleware"] diff --git a/crates/requirements-txt/src/lib.rs b/crates/requirements-txt/src/lib.rs index c14911c01a3e..9efd5652289e 100644 --- a/crates/requirements-txt/src/lib.rs +++ b/crates/requirements-txt/src/lib.rs @@ -39,20 +39,20 @@ use std::fmt::{Display, Formatter}; use std::io; use std::path::{Path, PathBuf}; +use async_recursion::async_recursion; use serde::{Deserialize, Serialize}; use tracing::instrument; use unscanny::{Pattern, Scanner}; use url::Url; -use uv_client::RegistryClient; -use uv_warnings::warn_user; -use async_recursion::async_recursion; use pep508_rs::{ expand_path_vars, split_scheme, Extras, Pep508Error, Pep508ErrorSource, Requirement, Scheme, VerbatimUrl, }; +use uv_client::RegistryClient; use uv_fs::{normalize_url_path, Simplified}; use uv_normalize::ExtraName; +use uv_warnings::warn_user; /// We emit one of those for each requirements.txt entry enum RequirementsTxtStatement { @@ -326,14 +326,38 @@ impl RequirementsTxt { pub async fn parse( requirements_txt: impl AsRef, working_dir: impl AsRef, - client: &RegistryClient, + client: Option<&RegistryClient>, ) -> Result { let requirements_txt = requirements_txt.as_ref(); let working_dir = working_dir.as_ref(); let content = if requirements_txt.starts_with("http://") | requirements_txt.starts_with("https://") { - read_url_to_string(&requirements_txt, client).await + #[cfg(not(feature = "reqwest"))] + { + return Err(RequirementsTxtFileError { + file: requirements_txt.to_path_buf(), + error: RequirementsTxtParserError::IO(io::Error::new( + io::ErrorKind::InvalidInput, + "Remote file not supported without `reqwest` feature", + )), + }); + } + + #[cfg(feature = "reqwest")] + { + let Some(client) = client else { + return Err(RequirementsTxtFileError { + file: requirements_txt.to_path_buf(), + error: RequirementsTxtParserError::IO(io::Error::new( + io::ErrorKind::InvalidInput, + "No client provided for remote file", + )), + }); + }; + + read_url_to_string(&requirements_txt, client).await + } } else { uv_fs::read_to_string(&requirements_txt) .await @@ -372,7 +396,7 @@ impl RequirementsTxt { content: &str, working_dir: &Path, requirements_dir: &Path, - client: &RegistryClient, + client: Option<&'async_recursion RegistryClient>, ) -> Result { let mut s = Scanner::new(content); @@ -794,6 +818,7 @@ fn parse_value<'a, T>( } /// Fetch the contents of a URL and return them as a string. +#[cfg(feature = "reqwest")] async fn read_url_to_string( path: impl AsRef, client: &RegistryClient, @@ -859,10 +884,11 @@ pub enum RequirementsTxtParserError { start: usize, end: usize, }, - Reqwest(reqwest_middleware::Error), NonUnicodeUrl { url: PathBuf, }, + #[cfg(feature = "reqwest")] + Reqwest(reqwest_middleware::Error), } impl RequirementsTxtParserError { @@ -910,8 +936,9 @@ impl RequirementsTxtParserError { start: start + offset, end: end + offset, }, - Self::Reqwest(err) => Self::Reqwest(err), Self::NonUnicodeUrl { url } => Self::NonUnicodeUrl { url }, + #[cfg(feature = "reqwest")] + Self::Reqwest(err) => Self::Reqwest(err), } } } @@ -954,9 +981,6 @@ impl Display for RequirementsTxtParserError { Self::Subfile { start, .. } => { write!(f, "Error parsing included file at position {start}") } - Self::Reqwest(err) => { - write!(f, "Error while accessing remote requirements file {err}") - } Self::NonUnicodeUrl { url } => { write!( f, @@ -964,6 +988,10 @@ impl Display for RequirementsTxtParserError { url.display(), ) } + #[cfg(feature = "reqwest")] + Self::Reqwest(err) => { + write!(f, "Error while accessing remote requirements file {err}") + } } } } @@ -981,8 +1009,9 @@ impl std::error::Error for RequirementsTxtParserError { Self::Pep508 { source, .. } => Some(source), Self::Subfile { source, .. } => Some(source.as_ref()), Self::Parser { .. } => None, - Self::Reqwest(err) => err.source(), Self::NonUnicodeUrl { .. } => None, + #[cfg(feature = "reqwest")] + Self::Reqwest(err) => err.source(), } } } @@ -1058,19 +1087,19 @@ impl Display for RequirementsTxtFileError { self.file.simplified_display(), ) } - RequirementsTxtParserError::Reqwest(err) => { + RequirementsTxtParserError::NonUnicodeUrl { url } => { write!( f, - "Error while accessing remote requirements file {}: {err}", - self.file.simplified_display(), + "Remote requirements URL contains non-unicode characters: {}", + url.display(), ) } - - RequirementsTxtParserError::NonUnicodeUrl { url } => { + #[cfg(feature = "reqwest")] + RequirementsTxtParserError::Reqwest(err) => { write!( f, - "Remote requirements URL contains non-unicode characters: {}", - url.display(), + "Error while accessing remote requirements file {}: {err}", + self.file.simplified_display(), ) } } @@ -1089,6 +1118,7 @@ impl From for RequirementsTxtParserError { } } +#[cfg(feature = "reqwest")] impl From for RequirementsTxtParserError { fn from(err: reqwest_middleware::Error) -> Self { Self::Reqwest(err) @@ -1147,7 +1177,7 @@ mod test { use tempfile::tempdir; use test_case::test_case; use unscanny::Scanner; - use uv_client::{RegistryClient, RegistryClientBuilder}; + use uv_fs::Simplified; use crate::{calculate_row_column, EditableRequirement, RequirementsTxt}; @@ -1156,12 +1186,6 @@ mod test { PathBuf::from("./test-data") } - fn registry_client() -> RegistryClient { - RegistryClientBuilder::new(uv_cache::Cache::temp().unwrap()) - .connectivity(uv_client::Connectivity::Online) - .build() - } - #[test_case(Path::new("basic.txt"))] #[test_case(Path::new("constraints-a.txt"))] #[test_case(Path::new("constraints-b.txt"))] @@ -1177,7 +1201,7 @@ mod test { let working_dir = workspace_test_data_dir().join("requirements-txt"); let requirements_txt = working_dir.join(path); - let actual = RequirementsTxt::parse(requirements_txt, &working_dir, ®istry_client()) + let actual = RequirementsTxt::parse(requirements_txt, &working_dir, None) .await .unwrap(); @@ -1221,7 +1245,7 @@ mod test { let requirements_txt = temp_dir.path().join(path); fs::write(&requirements_txt, contents).unwrap(); - let actual = RequirementsTxt::parse(&requirements_txt, &working_dir, ®istry_client()) + let actual = RequirementsTxt::parse(&requirements_txt, &working_dir, None) .await .unwrap(); @@ -1238,10 +1262,9 @@ mod test { -r missing.txt "})?; - let error = - RequirementsTxt::parse(requirements_txt.path(), temp_dir.path(), ®istry_client()) - .await - .unwrap_err(); + let error = RequirementsTxt::parse(requirements_txt.path(), temp_dir.path(), None) + .await + .unwrap_err(); let errors = anyhow::Error::new(error) .chain() // The last error is operating-system specific. @@ -1276,10 +1299,9 @@ mod test { numpy[รถ]==1.29 "})?; - let error = - RequirementsTxt::parse(requirements_txt.path(), temp_dir.path(), ®istry_client()) - .await - .unwrap_err(); + let error = RequirementsTxt::parse(requirements_txt.path(), temp_dir.path(), None) + .await + .unwrap_err(); let errors = anyhow::Error::new(error).chain().join("\n"); let requirement_txt = @@ -1310,10 +1332,9 @@ mod test { -e http://localhost:8080/ "})?; - let error = - RequirementsTxt::parse(requirements_txt.path(), temp_dir.path(), ®istry_client()) - .await - .unwrap_err(); + let error = RequirementsTxt::parse(requirements_txt.path(), temp_dir.path(), None) + .await + .unwrap_err(); let errors = anyhow::Error::new(error).chain().join("\n"); let requirement_txt = @@ -1339,10 +1360,9 @@ mod test { -e black[,abcdef] "})?; - let error = - RequirementsTxt::parse(requirements_txt.path(), temp_dir.path(), ®istry_client()) - .await - .unwrap_err(); + let error = RequirementsTxt::parse(requirements_txt.path(), temp_dir.path(), None) + .await + .unwrap_err(); let errors = anyhow::Error::new(error).chain().join("\n"); let requirement_txt = @@ -1370,10 +1390,9 @@ mod test { --index-url 123 "})?; - let error = - RequirementsTxt::parse(requirements_txt.path(), temp_dir.path(), ®istry_client()) - .await - .unwrap_err(); + let error = RequirementsTxt::parse(requirements_txt.path(), temp_dir.path(), None) + .await + .unwrap_err(); let errors = anyhow::Error::new(error).chain().join("\n"); let requirement_txt = @@ -1407,10 +1426,9 @@ mod test { file.txt "})?; - let error = - RequirementsTxt::parse(requirements_txt.path(), temp_dir.path(), ®istry_client()) - .await - .unwrap_err(); + let error = RequirementsTxt::parse(requirements_txt.path(), temp_dir.path(), None) + .await + .unwrap_err(); let errors = anyhow::Error::new(error).chain().join("\n"); let requirement_txt = @@ -1451,10 +1469,9 @@ mod test { -r subdir/child.txt "})?; - let requirements = - RequirementsTxt::parse(parent_txt.path(), temp_dir.path(), ®istry_client()) - .await - .unwrap(); + let requirements = RequirementsTxt::parse(parent_txt.path(), temp_dir.path(), None) + .await + .unwrap(); insta::assert_debug_snapshot!(requirements, @r###" RequirementsTxt { requirements: [ @@ -1504,10 +1521,9 @@ mod test { --no-index "})?; - let requirements = - RequirementsTxt::parse(requirements_txt.path(), temp_dir.path(), ®istry_client()) - .await - .unwrap(); + let requirements = RequirementsTxt::parse(requirements_txt.path(), temp_dir.path(), None) + .await + .unwrap(); insta::assert_debug_snapshot!(requirements, @r###" RequirementsTxt { @@ -1565,10 +1581,9 @@ mod test { --index-url https://fake.pypi.org/simple "})?; - let error = - RequirementsTxt::parse(requirements_txt.path(), temp_dir.path(), ®istry_client()) - .await - .unwrap_err(); + let error = RequirementsTxt::parse(requirements_txt.path(), temp_dir.path(), None) + .await + .unwrap_err(); let errors = anyhow::Error::new(error).chain().join("\n"); let requirement_txt = @@ -1616,10 +1631,9 @@ mod test { tqdm "})?; - let error = - RequirementsTxt::parse(requirements_txt.path(), temp_dir.path(), ®istry_client()) - .await - .unwrap_err(); + let error = RequirementsTxt::parse(requirements_txt.path(), temp_dir.path(), None) + .await + .unwrap_err(); let errors = anyhow::Error::new(error).chain().join("\n"); let requirement_txt = diff --git a/crates/uv/Cargo.toml b/crates/uv/Cargo.toml index 5b62ce7cbe16..b09bf65c19c3 100644 --- a/crates/uv/Cargo.toml +++ b/crates/uv/Cargo.toml @@ -22,7 +22,7 @@ pep508_rs = { path = "../pep508-rs" } platform-host = { path = "../platform-host" } platform-tags = { path = "../platform-tags" } pypi-types = { path = "../pypi-types" } -requirements-txt = { path = "../requirements-txt" } +requirements-txt = { path = "../requirements-txt", features = ["reqwest"] } uv-build = { path = "../uv-build" } uv-cache = { path = "../uv-cache", features = ["clap"] } uv-client = { path = "../uv-client" } diff --git a/crates/uv/src/commands/pip_compile.rs b/crates/uv/src/commands/pip_compile.rs index 68a1e1ed17a3..9f7389c41f89 100644 --- a/crates/uv/src/commands/pip_compile.rs +++ b/crates/uv/src/commands/pip_compile.rs @@ -9,7 +9,6 @@ use std::str::FromStr; use anstream::{eprint, AutoStream, StripStream}; use anyhow::{anyhow, Context, Result}; use chrono::{DateTime, Utc}; -use futures::future::OptionFuture; use itertools::Itertools; use owo_colors::OwoColorize; use rustc_hash::FxHashSet; @@ -17,7 +16,6 @@ use tempfile::tempdir_in; use tracing::debug; use distribution_types::{IndexLocations, LocalEditable, Verbatim}; -use pep508_rs::Requirement; use platform_host::Platform; use platform_tags::Tags; use requirements_txt::EditableRequirement; @@ -38,7 +36,9 @@ use uv_warnings::warn_user; use crate::commands::reporters::{DownloadReporter, ResolverReporter}; use crate::commands::{elapsed, ExitStatus}; use crate::printer::Printer; -use crate::requirements::{ExtrasSpecification, RequirementsSource, RequirementsSpecification}; +use crate::requirements::{ + read_lockfile, ExtrasSpecification, RequirementsSource, RequirementsSpecification, +}; /// Resolve a set of requirements into a set of pinned versions. #[allow(clippy::too_many_arguments, clippy::fn_params_excessive_bools)] @@ -109,7 +109,7 @@ pub(crate) async fn pip_compile( ) .await?; - // Check that all provided extras are used + // Check that all provided extras are used. if let ExtrasSpecification::Some(extras) = extras { let mut unused_extras = extras .iter() @@ -126,33 +126,8 @@ pub(crate) async fn pip_compile( } } - let preferences: Vec = OptionFuture::from( - output_file - // As an optimization, skip reading the lockfile is we're upgrading all packages anyway. - .filter(|_| !upgrade.is_all()) - .filter(|output_file| output_file.exists()) - .map(Path::to_path_buf) - .map(RequirementsSource::from_path) - .as_ref() - .map(|source| async { - RequirementsSpecification::from_source(source, &extras, &client).await - }), - ) - .await - .transpose()? - .map(|spec| spec.requirements) - .map(|requirements| match upgrade { - // Respect all pinned versions from the existing lockfile. - Upgrade::None => requirements, - // Ignore all pinned versions from the existing lockfile. - Upgrade::All => vec![], - // Ignore pinned versions for the specified packages. - Upgrade::Packages(packages) => requirements - .into_iter() - .filter(|requirement| !packages.contains(&requirement.name)) - .collect(), - }) - .unwrap_or_default(); + // Read the lockfile, if present. + let preferences = read_lockfile(output_file, upgrade).await?; // Find an interpreter to use for building distributions let platform = Platform::current()?; diff --git a/crates/uv/src/requirements.rs b/crates/uv/src/requirements.rs index 31db38181cc4..e5fe33ce99a0 100644 --- a/crates/uv/src/requirements.rs +++ b/crates/uv/src/requirements.rs @@ -7,19 +7,19 @@ use anyhow::{Context, Result}; use console::Term; use indexmap::IndexMap; use rustc_hash::FxHashSet; +use tracing::{instrument, Level}; use distribution_types::{FlatIndexLocation, IndexUrl}; use pep508_rs::Requirement; use requirements_txt::{EditableRequirement, FindLink, RequirementsTxt}; -use tracing::{instrument, Level}; use uv_client::RegistryClient; use uv_fs::Simplified; use uv_normalize::{ExtraName, PackageName}; +use uv_warnings::warn_user; +use crate::commands::Upgrade; use crate::confirm; -use uv_warnings::warn_user; - #[derive(Debug)] pub(crate) enum RequirementsSource { /// A package was provided on the command line (e.g., `pip install flask`). @@ -179,7 +179,7 @@ impl RequirementsSpecification { } RequirementsSource::RequirementsTxt(path) => { let requirements_txt = - RequirementsTxt::parse(path, std::env::current_dir()?, client).await?; + RequirementsTxt::parse(path, std::env::current_dir()?, Some(client)).await?; Self { project: None, requirements: requirements_txt @@ -433,3 +433,45 @@ fn flatten_extra( &mut FxHashSet::default(), ) } + +/// Load the preferred requirements from an existing lockfile, applying the upgrade strategy. +pub(crate) async fn read_lockfile( + output_file: Option<&Path>, + upgrade: Upgrade, +) -> Result> { + // As an optimization, skip reading the lockfile is we're upgrading all packages anyway. + let Some(output_file) = output_file + .filter(|_| !upgrade.is_all()) + .filter(|output_file| output_file.exists()) + else { + return Ok(Vec::new()); + }; + + // Parse the requirements from the lockfile. + let requirements_txt = + RequirementsTxt::parse(output_file, std::env::current_dir()?, None).await?; + let requirements = requirements_txt + .requirements + .into_iter() + .filter_map(|entry| { + if entry.editable { + None + } else { + Some(entry.requirement) + } + }) + .collect::>(); + + // Apply the upgrade strategy to the requirements. + Ok(match upgrade { + // Respect all pinned versions from the existing lockfile. + Upgrade::None => requirements, + // Ignore all pinned versions from the existing lockfile. + Upgrade::All => vec![], + // Ignore pinned versions for the specified packages. + Upgrade::Packages(packages) => requirements + .into_iter() + .filter(|requirement| !packages.contains(&requirement.name)) + .collect(), + }) +}