diff --git a/src/cli/add.rs b/src/cli/add.rs index a6d191df1..3d3cf3f72 100644 --- a/src/cli/add.rs +++ b/src/cli/add.rs @@ -1,62 +1,77 @@ -use crate::{ - config::ConfigCli, - environment::{get_up_to_date_prefix, verify_prefix_location_unchanged, LockFileUsage}, - project::{has_features::HasFeatures, DependencyType, Project, SpecType}, - FeatureName, +use std::{ + collections::{HashMap, HashSet}, + path::PathBuf, + str::FromStr, }; + use clap::Parser; use indexmap::IndexMap; -use itertools::{Either, Itertools}; - -use crate::project::grouped_environment::GroupedEnvironment; -use miette::{IntoDiagnostic, WrapErr}; +use itertools::Itertools; +use pep440_rs::VersionSpecifiers; +use pep508_rs::{Requirement, VersionOrUrl::VersionSpecifier}; use rattler_conda_types::{ version_spec::{LogicalOperator, RangeOperator}, - Channel, MatchSpec, NamelessMatchSpec, PackageName, Platform, Version, VersionBumpType, - VersionSpec, -}; -use rattler_repodata_gateway::{Gateway, RepoData}; -use rattler_solve::{resolvo, SolverImpl}; -use std::time::Instant; -use std::{ - collections::{HashMap, HashSet}, - path::PathBuf, + MatchSpec, NamelessMatchSpec, PackageName, Platform, Version, VersionBumpType, VersionSpec, }; +use rattler_lock::{LockFile, Package}; use super::has_specs::HasSpecs; +use crate::{ + config::ConfigCli, + environment::{verify_prefix_location_unchanged, LockFileUsage}, + load_lock_file, + lock_file::{filter_lock_file, LockFileDerivedData, UpdateContext}, + project::{ + grouped_environment::GroupedEnvironment, + has_features::HasFeatures, + manifest::{python::PyPiPackageName, DependencyOverwriteBehavior}, + DependencyType, Project, SpecType, + }, + FeatureName, +}; /// Adds dependencies to the project /// -/// The dependencies should be defined as MatchSpec for conda package, or a PyPI requirement -/// for the --pypi dependencies. If no specific version is provided, the latest version -/// compatible with your project will be chosen automatically or a * will be used. +/// The dependencies should be defined as MatchSpec for conda package, or a PyPI +/// requirement for the --pypi dependencies. If no specific version is provided, +/// the latest version compatible with your project will be chosen automatically +/// or a * will be used. /// /// Example usage: /// -/// - `pixi add python=3.9`: This will select the latest minor version that complies with 3.9.*, i.e., -/// python version 3.9.0, 3.9.1, 3.9.2, etc. -/// - `pixi add python`: In absence of a specified version, the latest version will be chosen. -/// For instance, this could resolve to python version 3.11.3.* at the time of writing. +/// - `pixi add python=3.9`: This will select the latest minor version that +/// complies with 3.9.*, i.e., python version 3.9.0, 3.9.1, 3.9.2, etc. +/// - `pixi add python`: In absence of a specified version, the latest version +/// will be chosen. For instance, this could resolve to python version +/// 3.11.3.* at the time of writing. /// /// Adding multiple dependencies at once is also supported: -/// - `pixi add python pytest`: This will add both `python` and `pytest` to the project's dependencies. +/// - `pixi add python pytest`: This will add both `python` and `pytest` to the +/// project's dependencies. /// -/// The `--platform` and `--build/--host` flags make the dependency target specific. -/// - `pixi add python --platform linux-64 --platform osx-arm64`: Will add the latest version of python for linux-64 and osx-arm64 platforms. -/// - `pixi add python --build`: Will add the latest version of python for as a build dependency. +/// The `--platform` and `--build/--host` flags make the dependency target +/// specific. +/// - `pixi add python --platform linux-64 --platform osx-arm64`: Will add the +/// latest version of python for linux-64 and osx-arm64 platforms. +/// - `pixi add python --build`: Will add the latest version of python for as a +/// build dependency. /// /// Mixing `--platform` and `--build`/`--host` flags is supported /// -/// The `--pypi` option will add the package as a pypi dependency. This can not be mixed with the conda dependencies +/// The `--pypi` option will add the package as a pypi dependency. This can not +/// be mixed with the conda dependencies /// - `pixi add --pypi boto3` /// - `pixi add --pypi "boto3==version" /// -/// If the project manifest is a `pyproject.toml`, adding a pypi dependency will add it to the native pyproject `project.dependencies` array -/// or to the native `project.optional-dependencies` table if a feature is specified: -/// - `pixi add --pypi boto3` will add `boto3` to the `project.dependencies` array -/// - `pixi add --pypi boto3 --feature aws` will add `boto3` to the `project.dependencies.aws` array -/// These dependencies will then be read by pixi as if they had been added to the pixi `pypi-dependencies` tables of the default or of a named feature. -/// +/// If the project manifest is a `pyproject.toml`, adding a pypi dependency will +/// add it to the native pyproject `project.dependencies` array or to the native +/// `project.optional-dependencies` table if a feature is specified: +/// - `pixi add --pypi boto3` will add `boto3` to the `project.dependencies` +/// array +/// - `pixi add --pypi boto3 --feature aws` will add `boto3` to the +/// `project.dependencies.aws` array +/// These dependencies will then be read by pixi as if they had been added to +/// the pixi `pypi-dependencies` tables of the default or of a named feature. #[derive(Parser, Debug, Default)] #[clap(arg_required_else_help = true, verbatim_doc_comment)] pub struct Args { @@ -80,15 +95,18 @@ pub struct DependencyConfig { #[arg(long)] pub manifest_path: Option, - /// The specified dependencies are host dependencies. Conflicts with `build` and `pypi` + /// The specified dependencies are host dependencies. Conflicts with `build` + /// and `pypi` #[arg(long, conflicts_with_all = ["build", "pypi"])] pub host: bool, - /// The specified dependencies are build dependencies. Conflicts with `host` and `pypi` + /// The specified dependencies are build dependencies. Conflicts with `host` + /// and `pypi` #[arg(long, conflicts_with_all = ["host", "pypi"])] pub build: bool, - /// The specified dependencies are pypi dependencies. Conflicts with `host` and `build` + /// The specified dependencies are pypi dependencies. Conflicts with `host` + /// and `build` #[arg(long, conflicts_with_all = ["host", "build"])] pub pypi: bool, @@ -133,12 +151,17 @@ impl DependencyConfig { .clone() .map_or(FeatureName::Default, FeatureName::Named) } - pub fn display_success(&self, operation: &str) { + pub fn display_success(&self, operation: &str, implicit_constraints: HashMap) { for package in self.specs.clone() { eprintln!( - "{}{operation} {}", + "{}{operation} {}{}", console::style(console::Emoji("✔ ", "")).green(), - console::style(package).bold(), + console::style(&package).bold(), + if let Some(constraint) = implicit_constraints.get(&package) { + format!(" {}", console::style(constraint).dim()) + } else { + "".to_string() + } ); } @@ -174,296 +197,306 @@ pub async fn execute(args: Args) -> miette::Result<()> { let (args, config, editable) = (args.dependency_config, args.config, args.editable); let mut project = Project::load_or_else_discover(args.manifest_path.as_deref())?.with_cli_config(config); - let dependency_type = args.dependency_type(); // Sanity check of prefix location verify_prefix_location_unchanged(project.default_environment().dir().as_path()).await?; + // Load the current lock-file + let lock_file = load_lock_file(&project).await?; + // Add the platform if it is not already present project .manifest .add_platforms(args.platform.iter(), &FeatureName::Default)?; - match dependency_type { + // Add the individual specs to the project. + let mut conda_specs_to_add_constraints_for = IndexMap::new(); + let mut pypi_specs_to_add_constraints_for = IndexMap::new(); + let mut conda_packages = HashSet::new(); + let mut pypi_packages = HashSet::new(); + match args.dependency_type() { DependencyType::CondaDependency(spec_type) => { let specs = args.specs()?; - add_conda_specs_to_project( - &mut project, - &args.feature_name(), - specs, - spec_type, - args.no_install, - args.lock_file_usage(), - &args.platform, - ) - .await + for (name, spec) in specs { + let added = project.manifest.add_dependency( + &spec, + spec_type, + &args.platform, + &args.feature_name(), + DependencyOverwriteBehavior::OverwriteIfExplicit, + )?; + if added { + if spec.version.is_none() { + conda_specs_to_add_constraints_for.insert(name.clone(), (spec_type, spec)); + } + conda_packages.insert(name); + } + } } DependencyType::PypiDependency => { - let specs = args.pypi_deps(&project)?.values().cloned().collect_vec(); - add_pypi_requirements_to_project( - &mut project, - &args.feature_name(), - specs, - &args.platform, - args.lock_file_usage(), - args.no_install, - Some(editable), - ) - .await + let specs = args.pypi_deps(&project)?; + for (name, spec) in specs { + let added = project.manifest.add_pypi_dependency( + &spec, + &args.platform, + &args.feature_name(), + Some(editable), + DependencyOverwriteBehavior::OverwriteIfExplicit, + )?; + if added { + if spec.version_or_url.is_none() { + pypi_specs_to_add_constraints_for.insert(name.clone(), spec); + } + pypi_packages.insert(name.as_normalized().clone()); + } + } } - }?; - - args.display_success("Added"); - - Project::warn_on_discovered_from_env(args.manifest_path.as_deref()); - Ok(()) -} - -pub async fn add_pypi_requirements_to_project( - project: &mut Project, - feature_name: &FeatureName, - requirements: Vec, - platforms: &[Platform], - lock_file_usage: LockFileUsage, - no_install: bool, - editable: Option, -) -> miette::Result<()> { - for requirement in &requirements { - // TODO: Get best version - // Add the dependency to the project - project - .manifest - .add_pypi_dependency(requirement, platforms, feature_name, editable)?; } - get_up_to_date_prefix(&project.default_environment(), lock_file_usage, no_install).await?; + // Determine the environments that are affected by the change. + let feature_name = args.feature_name(); + let affected_environments = project + .environments() + .iter() + // Filter out any environment that does not contain the feature we modified + .filter(|e| e.features().any(|f| f.name == feature_name)) + // Expand the selection to also included any environment that shares the same solve + // group + .flat_map(|e| { + GroupedEnvironment::from(e.clone()) + .environments() + .collect_vec() + }) + .unique() + .collect_vec(); + let default_environment_is_affected = + affected_environments.contains(&project.default_environment()); + + tracing::debug!( + "environments affected by the add command: {}", + affected_environments.iter().map(|e| e.name()).format(", ") + ); - project.save()?; + // Determine the combination of platforms and environments that are affected by + // the command + let affect_environment_and_platforms = affected_environments + .into_iter() + // Create an iterator over all environment and platform combinations + .flat_map(|e| e.platforms().into_iter().map(move |p| (e.clone(), p))) + // Filter out any platform that is not affected by the changes. + .filter(|(_, platform)| args.platform.is_empty() || args.platform.contains(platform)) + .map(|(e, p)| (e.name().to_string(), p)) + .collect_vec(); + + // Create an updated lock-file where the dependencies to be added are removed + // from the lock-file. + let unlocked_lock_file = unlock_packages( + &project, + &lock_file, + conda_packages, + pypi_packages, + affect_environment_and_platforms + .iter() + .map(|(e, p)| (e.as_str(), *p)) + .collect(), + ); - Ok(()) -} + // Solve the updated project. + let LockFileDerivedData { + lock_file, + package_cache, + uv_context, + updated_conda_prefixes, + updated_pypi_prefixes, + .. + } = UpdateContext::builder(&project) + .with_lock_file(unlocked_lock_file) + .with_no_install(args.no_install || args.no_lockfile_update) + .finish()? + .update() + .await?; + + // Update the constraints of specs that didn't have a version constraint based + // on the contents of the lock-file. + let implicit_constraints = if !conda_specs_to_add_constraints_for.is_empty() { + update_conda_specs_from_lock_file( + &mut project, + &lock_file, + conda_specs_to_add_constraints_for, + affect_environment_and_platforms, + &feature_name, + &args.platform, + )? + } else if !pypi_specs_to_add_constraints_for.is_empty() { + update_pypi_specs_from_lock_file( + &mut project, + &lock_file, + pypi_specs_to_add_constraints_for, + affect_environment_and_platforms, + &feature_name, + &args.platform, + editable, + )? + } else { + HashMap::new() + }; -pub async fn add_conda_specs_to_project( - project: &mut Project, - feature_name: &FeatureName, - specs: IndexMap, - spec_type: SpecType, - no_install: bool, - lock_file_usage: LockFileUsage, - specs_platforms: &[Platform], -) -> miette::Result<()> { - // Determine the best version per platform - let mut package_versions = HashMap::>::new(); - - // Get the grouped environments that contain the feature - let grouped_environments: Vec = project - .grouped_environments() - .iter() - .filter(|env| { - env.features() - .map(|feat| &feat.name) - .contains(&feature_name) - }) - .cloned() - .collect(); - - // TODO: show progress of this set of solves - // TODO: Make this parallel - // TODO: Make this more efficient by reusing the solves in the get_up_to_date_prefix - for grouped_environment in grouped_environments { - let platforms = if specs_platforms.is_empty() { - Either::Left(grouped_environment.platforms().into_iter()) - } else { - Either::Right(specs_platforms.iter().copied()) - }; - - for platform in platforms { - // Solve the environment with the new specs added - let solved_versions = match determine_best_version( - &grouped_environment, - &specs, - spec_type, - platform, - grouped_environment.channels(), - project.repodata_gateway(), - ) - .await - { - Ok(versions) => versions, - Err(err) => { - return Err(err).wrap_err_with(|| miette::miette!( - "could not determine any available versions for {} on {platform}. Either the package could not be found or version constraints on other dependencies result in a conflict.", - specs.keys().map(|s| s.as_source()).join(", ") - )); - } - }; + // Write the lock-file and the project to disk + project.save()?; - // Collect all the versions seen. - for (name, version) in solved_versions { - package_versions.entry(name).or_default().insert(version); - } - } + // Reconstruct the lock-file derived data. + let mut updated_lock_file = LockFileDerivedData { + project: &project, + lock_file, + package_cache, + updated_conda_prefixes, + updated_pypi_prefixes, + uv_context, + }; + if !args.no_lockfile_update { + updated_lock_file.write_to_disk()?; } - // Update the specs passed on the command line with the best available versions. - for (name, spec) in specs { - let updated_spec = if spec.version.is_none() { - let mut updated_spec = NamelessMatchSpec::from(spec.clone()); - if let Some(versions_seen) = package_versions.get(&name).cloned() { - updated_spec.version = determine_version_constraint(&versions_seen); - } else { - updated_spec.version = determine_version_constraint( - &determine_latest_versions(project, specs_platforms, &name).await?, - ); - } - updated_spec - } else { - spec.into() - }; - let spec = MatchSpec::from_nameless(updated_spec, Some(name)); - - // Add the dependency to the project - project - .manifest - .add_dependency(&spec, spec_type, specs_platforms, feature_name)?; + // Install/update the default environment if: + // - we are not skipping the installation, + // - there is only the default environment, + // - and the default environment is affected by the changes, + if !args.no_install && project.environments().len() == 1 && default_environment_is_affected { + updated_lock_file + .prefix(&project.default_environment()) + .await?; } - // Update the prefix - get_up_to_date_prefix(&project.default_environment(), lock_file_usage, no_install).await?; - - project.save()?; + // Notify the user we succeeded. + args.display_success("Added", implicit_constraints); + Project::warn_on_discovered_from_env(args.manifest_path.as_deref()); Ok(()) } -/// Get all the latest versions found in the platforms repodata. -async fn determine_latest_versions( - project: &Project, +/// Update the pypi specs of newly added packages based on the contents of the +/// updated lock-file. +fn update_pypi_specs_from_lock_file( + project: &mut Project, + updated_lock_file: &LockFile, + pypi_specs_to_add_constraints_for: IndexMap, + affect_environment_and_platforms: Vec<(String, Platform)>, + feature_name: &FeatureName, platforms: &[Platform], - name: &PackageName, -) -> miette::Result> { - // Get platforms to search for including NoArch - let platforms = if platforms.is_empty() { - let mut temp = project - .default_environment() - .platforms() - .into_iter() - .collect_vec(); - temp.push(Platform::NoArch); - temp - } else { - let mut temp = platforms.to_vec(); - temp.push(Platform::NoArch); - temp - }; - - // Get the records for the package - let records = project - .repodata_gateway() - .query( - project - .default_environment() - .channels() - .into_iter() - .cloned(), - platforms, - [name.clone()], - ) - .recursive(false) - .await - .into_diagnostic()?; - - // Find the first non-empty channel - let Some(priority_records) = records.into_iter().find(|records| !records.is_empty()) else { - return Ok(vec![]); - }; + editable: bool, +) -> miette::Result> { + let mut implicit_constraints = HashMap::new(); - // Find the maximum versions per platform - let mut found_records: HashMap = HashMap::new(); - for record in priority_records.iter() { - let version = record.package_record.version.version().clone(); - let platform = &record.package_record.subdir; - found_records - .entry(platform.clone()) - .and_modify(|max| { - if &version > max { - *max = version.clone(); - } - }) - .or_insert(version); + let pypi_records = affect_environment_and_platforms + .into_iter() + // Get all the conda and pypi records for the combination of environments and + // platforms + .filter_map(|(env, platform)| { + let locked_env = updated_lock_file.environment(&env)?; + locked_env.pypi_packages_for_platform(platform) + }) + .flatten() + .collect_vec(); + + // Determine the versions of the packages in the lock-file + for (name, _) in pypi_specs_to_add_constraints_for { + let version_constraint = determine_version_constraint( + pypi_records + .iter() + .filter_map(|(data, _)| { + if &data.name == name.as_normalized() { + Version::from_str(&data.version.to_string()).ok() + } else { + None + } + }) + .collect_vec() + .iter(), + ); + + let version_spec = + version_constraint.and_then(|spec| VersionSpecifiers::from_str(&spec.to_string()).ok()); + if let Some(version_spec) = version_spec { + implicit_constraints.insert(name.as_source().to_string(), version_spec.to_string()); + project.manifest.add_pypi_dependency( + &Requirement { + name: name.as_normalized().clone(), + extras: vec![], + version_or_url: Some(VersionSpecifier(version_spec)), + marker: None, + origin: None, + }, + platforms, + feature_name, + Some(editable), + DependencyOverwriteBehavior::Overwrite, + )?; + } } - // Determine the version constraint based on the max of every channel and platform. - Ok(found_records.into_values().collect()) + Ok(implicit_constraints) } -/// Given several specs determines the highest installable version for them. -pub async fn determine_best_version<'p>( - environment: &GroupedEnvironment<'p>, - new_specs: &IndexMap, - new_specs_type: SpecType, - platform: Platform, - channels: impl IntoIterator, - repodata_gateway: &Gateway, -) -> miette::Result> { - // Build the combined set of specs while updating the dependencies with the new specs. - let dependencies = SpecType::all() - .map(|spec_type| { - let mut deps = environment.dependencies(Some(spec_type), Some(platform)); - if spec_type == new_specs_type { - for (new_name, new_spec) in new_specs.iter() { - deps.remove(new_name); // Remove any existing specs - deps.insert(new_name.clone(), NamelessMatchSpec::from(new_spec.clone())); - // Add the new specs - } - } - deps - }) - .reduce(|acc, deps| acc.overwrite(&deps)) - .unwrap_or_default(); - - // Extract the package names from all the dependencies - let fetch_repodata_start = Instant::now(); - let available_packages = repodata_gateway - .query( - channels.into_iter().cloned(), - [platform, Platform::NoArch], - dependencies.clone().into_match_specs(), - ) - .recursive(true) - .await - .into_diagnostic()?; - let total_records = available_packages.iter().map(RepoData::len).sum::(); - tracing::info!( - "fetched {total_records} records in {:?}", - fetch_repodata_start.elapsed() - ); - // Construct a solver task to start solving. - let task = rattler_solve::SolverTask { - specs: dependencies - .iter_specs() - .map(|(name, spec)| MatchSpec::from_nameless(spec.clone(), Some(name.clone()))) - .collect(), - virtual_packages: environment.virtual_packages(platform), - ..rattler_solve::SolverTask::from_iter(&available_packages) - }; - - let records = resolvo::Solver.solve(task).into_diagnostic()?; +/// Update the conda specs of newly added packages based on the contents of the +/// updated lock-file. +fn update_conda_specs_from_lock_file( + project: &mut Project, + updated_lock_file: &LockFile, + conda_specs_to_add_constraints_for: IndexMap, + affect_environment_and_platforms: Vec<(String, Platform)>, + feature_name: &FeatureName, + platforms: &[Platform], +) -> miette::Result> { + let mut implicit_constraints = HashMap::new(); - // Determine the versions of the new packages - Ok(records + // Determine the conda records that were affected by the add. + let conda_records = affect_environment_and_platforms .into_iter() - .filter(|record| new_specs.contains_key(&record.package_record.name)) - .map(|record| { - ( - record.package_record.name, - record.package_record.version.into(), - ) + // Get all the conda and pypi records for the combination of environments and + // platforms + .filter_map(|(env, platform)| { + let locked_env = updated_lock_file.environment(&env)?; + locked_env + .conda_repodata_records_for_platform(platform) + .ok()? }) - .collect()) + .flatten() + .collect_vec(); + + for (name, (spec_type, _)) in conda_specs_to_add_constraints_for { + let version_constraint = + determine_version_constraint(conda_records.iter().filter_map(|record| { + if record.package_record.name == name { + Some(record.package_record.version.version()) + } else { + None + } + })); + + if let Some(version_constraint) = version_constraint { + implicit_constraints + .insert(name.as_source().to_string(), version_constraint.to_string()); + project.manifest.add_dependency( + &MatchSpec::from_nameless( + NamelessMatchSpec { + version: Some(version_constraint), + ..NamelessMatchSpec::default() + }, + Some(name), + ), + spec_type, + platforms, + feature_name, + DependencyOverwriteBehavior::Overwrite, + )?; + } + } + + Ok(implicit_constraints) } -/// Given a set of versions, determines the best version constraint to use that captures all of them. +/// Given a set of versions, determines the best version constraint to use that +/// captures all of them. fn determine_version_constraint<'a>( versions: impl IntoIterator, ) -> Option { @@ -483,6 +516,26 @@ fn determine_version_constraint<'a>( )) } +/// Constructs a new lock-file where some of the constraints have been removed. +fn unlock_packages( + project: &Project, + lock_file: &LockFile, + conda_packages: HashSet, + pypi_packages: HashSet, + affected_environments: HashSet<(&str, Platform)>, +) -> LockFile { + filter_lock_file(project, lock_file, |env, platform, package| { + if affected_environments.contains(&(env.name().as_str(), platform)) { + match package { + Package::Conda(package) => !conda_packages.contains(&package.package_record().name), + Package::Pypi(package) => !pypi_packages.contains(&package.data().package.name), + } + } else { + true + } + }) +} + #[cfg(test)] mod tests { use super::*; diff --git a/src/cli/init.rs b/src/cli/init.rs index 95ed9e765..d29335d9e 100644 --- a/src/cli/init.rs +++ b/src/cli/init.rs @@ -1,6 +1,7 @@ use crate::config::Config; use crate::environment::{get_up_to_date_prefix, LockFileUsage}; use crate::project::manifest::pyproject::PyProjectToml; +use crate::project::manifest::DependencyOverwriteBehavior; use crate::utils::conda_environment_file::CondaEnvFile; use crate::{config::get_default_author, consts}; use crate::{FeatureName, Project}; @@ -188,6 +189,7 @@ pub async fn execute(args: Args) -> miette::Result<()> { crate::SpecType::Run, &platforms, &FeatureName::default(), + DependencyOverwriteBehavior::Overwrite, )?; } for requirement in pypi_deps { @@ -196,6 +198,7 @@ pub async fn execute(args: Args) -> miette::Result<()> { &platforms, &FeatureName::default(), None, + DependencyOverwriteBehavior::Overwrite, )?; } project.save()?; diff --git a/src/cli/remove.rs b/src/cli/remove.rs index 20692c334..a8348301e 100644 --- a/src/cli/remove.rs +++ b/src/cli/remove.rs @@ -63,7 +63,7 @@ pub async fn execute(args: Args) -> miette::Result<()> { ) .await?; - args.display_success("Removed"); + args.display_success("Removed", Default::default()); Project::warn_on_discovered_from_env(args.manifest_path.as_deref()); Ok(()) diff --git a/src/cli/update.rs b/src/cli/update.rs index fa9c100af..2db7514cb 100644 --- a/src/cli/update.rs +++ b/src/cli/update.rs @@ -12,7 +12,7 @@ use indexmap::IndexMap; use itertools::{Either, Itertools}; use miette::{Context, IntoDiagnostic, MietteDiagnostic}; use rattler_conda_types::Platform; -use rattler_lock::{LockFile, LockFileBuilder, Package}; +use rattler_lock::{LockFile, Package}; use serde::Serialize; use serde_json::Value; use tabwriter::TabWriter; @@ -22,8 +22,7 @@ use crate::{ consts, consts::{CondaEmoji, PypiEmoji}, load_lock_file, - lock_file::UpdateContext, - project::grouped_environment::GroupedEnvironment, + lock_file::{filter_lock_file, UpdateContext}, EnvironmentName, HasFeatures, Project, }; @@ -262,35 +261,9 @@ fn check_package_exists( /// Constructs a new lock-file where some of the constraints have been removed. fn unlock_packages(project: &Project, lock_file: &LockFile, specs: &UpdateSpecs) -> LockFile { - let mut builder = LockFileBuilder::new(); - - for (environment_name, environment) in lock_file.environments() { - // Find the environment in the project - let Some(project_env) = project.environment(environment_name) else { - continue; - }; - - // Copy the channels - builder.set_channels(environment_name, environment.channels().to_vec()); - - // Copy the indexes - let indexes = environment - .pypi_indexes() - .cloned() - .unwrap_or_else(|| GroupedEnvironment::from(project_env).pypi_options().into()); - builder.set_pypi_indexes(environment_name, indexes); - - // Copy all packages that don't need to be relaxed - for (platform, packages) in environment.packages_by_platform() { - for package in packages { - if !specs.should_relax(environment_name, platform, &package) { - builder.add_package(environment_name, platform, package); - } - } - } - } - - builder.finish() + filter_lock_file(project, lock_file, |env, platform, package| { + !specs.should_relax(env.name().as_str(), platform, package) + }) } // Represents the differences between two sets of packages. diff --git a/src/lock_file/mod.rs b/src/lock_file/mod.rs index 778ddc7b5..01d379ba3 100644 --- a/src/lock_file/mod.rs +++ b/src/lock_file/mod.rs @@ -4,20 +4,22 @@ mod records_by_name; mod resolve; mod satisfiability; mod update; +mod utils; -use crate::Project; use miette::{IntoDiagnostic, WrapErr}; -use rattler_conda_types::RepoDataRecord; -use rattler_lock::{LockFile, PypiPackageData, PypiPackageEnvironmentData}; - pub use outdated::OutdatedEnvironments; pub use package_identifier::PypiPackageIdentifier; +use rattler_conda_types::RepoDataRecord; +use rattler_lock::{LockFile, PypiPackageData, PypiPackageEnvironmentData}; pub use records_by_name::{PypiRecordsByName, RepoDataRecordsByName}; pub use resolve::{ conda::resolve_conda, pypi::resolve_pypi, uv_resolution_context::UvResolutionContext, }; pub use satisfiability::{verify_environment_satisfiability, verify_platform_satisfiability}; pub use update::{LockFileDerivedData, UpdateContext, UpdateLockFileOptions}; +pub use utils::filter_lock_file; + +use crate::Project; /// A list of conda packages that are locked for a specific platform. pub type LockedCondaPackages = Vec; @@ -25,11 +27,12 @@ pub type LockedCondaPackages = Vec; /// A list of Pypi packages that are locked for a specific platform. pub type LockedPypiPackages = Vec; -/// A single Pypi record that contains both the package data and the environment data. In Pixi we -/// basically always need both. +/// A single Pypi record that contains both the package data and the environment +/// data. In Pixi we basically always need both. pub type PypiRecord = (PypiPackageData, PypiPackageEnvironmentData); -/// Loads the lockfile for the specified project or returns a dummy one if none could be found. +/// Loads the lockfile for the specified project or returns a dummy one if none +/// could be found. pub async fn load_lock_file(project: &Project) -> miette::Result { let lock_file_path = project.lock_file_path(); if lock_file_path.is_file() { diff --git a/src/lock_file/update.rs b/src/lock_file/update.rs index 8a22e7fc5..dfa7b8e03 100644 --- a/src/lock_file/update.rs +++ b/src/lock_file/update.rs @@ -93,11 +93,11 @@ pub struct LockFileDerivedData<'p> { pub package_cache: PackageCache, /// A list of prefixes that are up-to-date with the latest conda packages. - pub updated_conda_prefixes: HashMap, (Prefix, PythonStatus)>, + pub updated_conda_prefixes: HashMap, /// A list of prefixes that have been updated while resolving all /// dependencies. - pub updated_pypi_prefixes: HashMap, Prefix>, + pub updated_pypi_prefixes: HashMap, /// The cached uv context pub uv_context: Option, @@ -125,7 +125,7 @@ impl<'p> LockFileDerivedData<'p> { }, )?; - if let Some(prefix) = self.updated_pypi_prefixes.get(environment) { + if let Some(prefix) = self.updated_pypi_prefixes.get(environment.name()) { return Ok(prefix.clone()); } @@ -176,7 +176,7 @@ impl<'p> LockFileDerivedData<'p> { // Store that we updated the environment, so we won't have to do it again. self.updated_pypi_prefixes - .insert(environment.clone(), prefix.clone()); + .insert(environment.name().clone(), prefix.clone()); Ok(prefix) } @@ -210,7 +210,7 @@ impl<'p> LockFileDerivedData<'p> { environment: &Environment<'p>, ) -> miette::Result<(Prefix, PythonStatus)> { // If we previously updated this environment, early out. - if let Some((prefix, python_status)) = self.updated_conda_prefixes.get(environment) { + if let Some((prefix, python_status)) = self.updated_conda_prefixes.get(environment.name()) { return Ok((prefix.clone(), python_status.clone())); } @@ -257,8 +257,10 @@ impl<'p> LockFileDerivedData<'p> { .await?; // Store that we updated the environment, so we won't have to do it again. - self.updated_conda_prefixes - .insert(environment.clone(), (prefix.clone(), python_status.clone())); + self.updated_conda_prefixes.insert( + environment.name().clone(), + (prefix.clone(), python_status.clone()), + ); Ok((prefix, python_status)) } @@ -434,7 +436,7 @@ impl<'p> UpdateContext<'p> { /// Get a list of conda prefixes that have been updated. pub fn take_instantiated_conda_prefixes( &mut self, - ) -> HashMap, (Prefix, PythonStatus)> { + ) -> HashMap { self.instantiated_conda_prefixes .drain() .filter_map(|(env, cell)| match env { @@ -443,7 +445,7 @@ impl<'p> UpdateContext<'p> { .expect("prefixes must not be shared") .into_inner() .expect("prefix must be available"); - Some((env, prefix)) + Some((env.name().clone(), prefix)) } _ => None, }) diff --git a/src/lock_file/utils.rs b/src/lock_file/utils.rs new file mode 100644 index 000000000..01c71039f --- /dev/null +++ b/src/lock_file/utils.rs @@ -0,0 +1,45 @@ +use rattler_conda_types::Platform; +use rattler_lock::{LockFile, LockFileBuilder, Package}; + +use crate::{ + project::{grouped_environment::GroupedEnvironment, Environment}, + HasFeatures, Project, +}; + +/// Constructs a new lock-file where some of the packages have been removed +pub fn filter_lock_file<'p, F: FnMut(&Environment<'p>, Platform, &Package) -> bool>( + project: &'p Project, + lock_file: &LockFile, + mut filter: F, +) -> LockFile { + let mut builder = LockFileBuilder::new(); + + for (environment_name, environment) in lock_file.environments() { + // Find the environment in the project + let Some(project_env) = project.environment(environment_name) else { + continue; + }; + + // Copy the channels + builder.set_channels(environment_name, environment.channels().to_vec()); + + // Copy the indexes + let indexes = environment.pypi_indexes().cloned().unwrap_or_else(|| { + GroupedEnvironment::from(project_env.clone()) + .pypi_options() + .into() + }); + builder.set_pypi_indexes(environment_name, indexes); + + // Copy all packages that don't need to be relaxed + for (platform, packages) in environment.packages_by_platform() { + for package in packages { + if filter(&project_env, platform, &package) { + builder.add_package(environment_name, platform, package); + } + } + } + } + + builder.finish() +} diff --git a/src/project/manifest/document.rs b/src/project/manifest/document.rs index dafc18dc8..550f7c41d 100644 --- a/src/project/manifest/document.rs +++ b/src/project/manifest/document.rs @@ -251,6 +251,7 @@ impl ManifestSource { requirement: &pep508_rs::Requirement, platform: Option, feature_name: &FeatureName, + editable: Option, ) -> Result<(), TomlError> { match self { ManifestSource::PyProjectToml(_) => { @@ -270,10 +271,15 @@ impl ManifestSource { } } ManifestSource::PixiToml(_) => { + let mut pypi_requirement = PyPiRequirement::from(requirement.clone()); + if let Some(editable) = editable { + pypi_requirement.set_editable(editable); + } + self.get_or_insert_toml_table(platform, feature_name, consts::PYPI_DEPENDENCIES)? .insert( requirement.name.as_ref(), - Item::Value(PyPiRequirement::from(requirement.clone()).into()), + Item::Value(pypi_requirement.into()), ); } }; diff --git a/src/project/manifest/mod.rs b/src/project/manifest/mod.rs index b63002916..c268ad626 100644 --- a/src/project/manifest/mod.rs +++ b/src/project/manifest/mod.rs @@ -113,6 +113,21 @@ pub struct Manifest { pub parsed: ProjectManifest, } +#[derive(Debug, Copy, Clone)] +pub enum DependencyOverwriteBehavior { + /// Overwrite anything that is already present. + Overwrite, + + /// Overwrite only if the dependency is explicitly defined (e.g it has some constraints). + OverwriteIfExplicit, + + /// Ignore any duplicate + IgnoreDuplicate, + + /// Error on duplicate + Error, +} + impl Borrow for Manifest { fn borrow(&self) -> &ProjectManifest { &self.parsed @@ -391,29 +406,34 @@ impl Manifest { spec_type: SpecType, platforms: &[Platform], feature_name: &FeatureName, - ) -> miette::Result<()> { + overwrite_behavior: DependencyOverwriteBehavior, + ) -> miette::Result { // Determine the name of the package to add let (Some(name), spec) = spec.clone().into_nameless() else { miette::bail!("pixi does not support wildcard dependencies") }; + let mut any_added = false; for platform in to_options(platforms) { // Add the dependency to the manifest match self .get_or_insert_target_mut(platform, Some(feature_name)) - .try_add_dependency(&name, &spec, spec_type) + .try_add_dependency(&name, &spec, spec_type, overwrite_behavior) { - Ok(_) => (), - Err(DependencyError::Duplicate(e)) => { - tracing::warn!("Dependency `{}` already existed, overwriting", e); + Ok(true) => { + self.document.add_dependency( + &name, + &spec, + spec_type, + platform, + feature_name, + )?; + any_added = true; } + Ok(false) => {} Err(e) => return Err(e.into()), }; - - // and to the TOML document - self.document - .add_dependency(&name, &spec, spec_type, platform, feature_name)?; } - Ok(()) + Ok(any_added) } /// Add a pypi requirement to the manifest @@ -423,24 +443,29 @@ impl Manifest { platforms: &[Platform], feature_name: &FeatureName, editable: Option, - ) -> miette::Result<()> { + overwrite_behavior: DependencyOverwriteBehavior, + ) -> miette::Result { + let mut any_added = false; for platform in to_options(platforms) { // Add the pypi dependency to the manifest match self .get_or_insert_target_mut(platform, Some(feature_name)) - .try_add_pypi_dependency(requirement, editable) + .try_add_pypi_dependency(requirement, editable, overwrite_behavior) { - Ok(_) => (), - Err(DependencyError::Duplicate(e)) => { - tracing::warn!("Dependency `{}` already existed, overwriting", e); + Ok(true) => { + self.document.add_pypi_dependency( + requirement, + platform, + feature_name, + editable, + )?; + any_added = true; } + Ok(false) => {} Err(e) => return Err(e.into()), }; - // and to the TOML document - self.document - .add_pypi_dependency(requirement, platform, feature_name)?; } - Ok(()) + Ok(any_added) } /// Removes a dependency based on `SpecType`. @@ -2645,10 +2670,11 @@ bar = "*" let mut manifest = Manifest::from_str(Path::new("pixi.toml"), file_contents).unwrap(); manifest .add_dependency( - &MatchSpec::from_str(" baz >=1.2.3", Strict).unwrap(), + &MatchSpec::from_str("baz >=1.2.3", Strict).unwrap(), SpecType::Run, &[], &FeatureName::Default, + DependencyOverwriteBehavior::Overwrite, ) .unwrap(); assert_eq!( @@ -2670,6 +2696,7 @@ bar = "*" SpecType::Run, &[], &FeatureName::Named("test".to_string()), + DependencyOverwriteBehavior::Overwrite, ) .unwrap(); @@ -2694,6 +2721,7 @@ bar = "*" SpecType::Run, &[Platform::Linux64], &FeatureName::Named("extra".to_string()), + DependencyOverwriteBehavior::Overwrite, ) .unwrap(); @@ -2719,6 +2747,7 @@ bar = "*" SpecType::Build, &[Platform::Linux64], &FeatureName::Named("build".to_string()), + DependencyOverwriteBehavior::Overwrite, ) .unwrap(); diff --git a/src/project/manifest/pyproject.rs b/src/project/manifest/pyproject.rs index 65bd291f3..2960c3e36 100644 --- a/src/project/manifest/pyproject.rs +++ b/src/project/manifest/pyproject.rs @@ -1,19 +1,17 @@ +use std::{collections::HashMap, fs, path::PathBuf, str::FromStr}; + use miette::Report; use pep440_rs::VersionSpecifiers; use pyproject_toml::{self, Project}; use rattler_conda_types::{NamelessMatchSpec, PackageName, ParseStrictness::Lenient, VersionSpec}; use serde::Deserialize; -use std::fs; -use std::path::PathBuf; -use std::{collections::HashMap, str::FromStr}; use toml_edit::DocumentMut; -use crate::FeatureName; - use super::{ error::{RequirementConversionError, TomlError}, Feature, ProjectManifest, SpecType, }; +use crate::FeatureName; #[derive(Deserialize, Debug, Clone)] pub struct PyProjectManifest { @@ -66,12 +64,14 @@ impl From for ProjectManifest { // TODO: could copy across / convert some other optional fields if relevant manifest.project.name = Some(pyproject.name.clone()); - // Add python as dependency based on the project.requires_python property (if any) + // Add python as dependency based on the project.requires_python property (if + // any) let python_spec = pyproject.requires_python.clone(); let target = manifest.default_feature_mut().targets.default_mut(); let python = PackageName::from_str("python").unwrap(); - // If the target doesn't have any python dependency, we add it from the `requires-python` + // If the target doesn't have any python dependency, we add it from the + // `requires-python` if !target.has_dependency(&python, Some(SpecType::Run), None) { target.add_dependency( &python, @@ -121,8 +121,8 @@ impl From for ProjectManifest { } /// Try to return a NamelessMatchSpec from a pep508_rs::VersionOrUrl -/// This will only work if it is not URL and the VersionSpecifier can successfully -/// be interpreted as a NamelessMatchSpec.version +/// This will only work if it is not URL and the VersionSpecifier can +/// successfully be interpreted as a NamelessMatchSpec.version fn version_or_url_to_nameless_matchspec( version: &Option, ) -> Result { @@ -145,7 +145,8 @@ fn version_or_url_to_nameless_matchspec( /// A struct wrapping pyproject_toml::PyProjectToml /// ensuring it has a project table /// -/// This is used during 'pixi init' to parse a potentially non-pixi 'pyproject.toml' +/// This is used during 'pixi init' to parse a potentially non-pixi +/// 'pyproject.toml' pub struct PyProjectToml { inner: pyproject_toml::PyProjectToml, } @@ -178,10 +179,14 @@ impl PyProjectToml { self.inner.project.as_ref().unwrap() } - /// Builds a list of pixi environments from pyproject groups of extra dependencies: - /// - one environment is created per group of extra, with the same name as the group of extra - /// - each environment includes the feature of the same name as the group of extra - /// - it will also include other features inferred from any self references to other groups of extras + /// Builds a list of pixi environments from pyproject groups of extra + /// dependencies: + /// - one environment is created per group of extra, with the same name as + /// the group of extra + /// - each environment includes the feature of the same name as the group + /// of extra + /// - it will also include other features inferred from any self references + /// to other groups of extras pub fn environments_from_extras(&self) -> HashMap> { let mut environments = HashMap::new(); if let Some(extras) = &self.project().optional_dependencies { @@ -203,14 +208,14 @@ impl PyProjectToml { environments } - /// Checks whether a path is a valid `pyproject.toml` for use with pixi by checking if it - /// contains a `[tool.pixi.project]` item. + /// Checks whether a path is a valid `pyproject.toml` for use with pixi by + /// checking if it contains a `[tool.pixi.project]` item. pub fn is_pixi(path: &PathBuf) -> bool { let source = fs::read_to_string(path).unwrap(); Self::is_pixi_str(&source).unwrap_or(false) } - /// Checks whether a string is a valid `pyproject.toml` for use with pixi by checking if it - /// contains a `[tool.pixi.project]` item. + /// Checks whether a string is a valid `pyproject.toml` for use with pixi by + /// checking if it contains a `[tool.pixi.project]` item. pub fn is_pixi_str(source: &str) -> Result { match source.parse::().map_err(TomlError::from) { Err(e) => e.to_fancy("pyproject.toml", source), @@ -225,15 +230,14 @@ impl PyProjectToml { #[cfg(test)] mod tests { - use std::path::Path; - use std::str::FromStr; + use std::{path::Path, str::FromStr}; use insta::assert_snapshot; use pep440_rs::VersionSpecifiers; use rattler_conda_types::{ParseStrictness, VersionSpec}; use crate::{ - project::manifest::{python::PyPiPackageName, Manifest}, + project::manifest::{python::PyPiPackageName, DependencyOverwriteBehavior, Manifest}, FeatureName, }; @@ -409,7 +413,13 @@ mod tests { // Add numpy to pyproject let requirement = pep508_rs::Requirement::from_str("numpy>=3.12").unwrap(); manifest - .add_pypi_dependency(&requirement, &[], &FeatureName::Default, None) + .add_pypi_dependency( + &requirement, + &[], + &FeatureName::Default, + None, + DependencyOverwriteBehavior::Overwrite, + ) .unwrap(); assert!(manifest @@ -431,6 +441,7 @@ mod tests { &[], &FeatureName::Named("test".to_string()), None, + DependencyOverwriteBehavior::Overwrite, ) .unwrap(); assert!(manifest diff --git a/src/project/manifest/python.rs b/src/project/manifest/python.rs index a400d4396..ad44714cd 100644 --- a/src/project/manifest/python.rs +++ b/src/project/manifest/python.rs @@ -222,6 +222,9 @@ impl From for toml_edit::Value { } match &val { + PyPiRequirement::Version { version, extras } if extras.is_empty() => { + toml_edit::Value::from(version.to_string()) + } PyPiRequirement::Version { version, extras } => { let mut table = toml_edit::Table::new().into_inline_table(); table.insert( @@ -308,7 +311,11 @@ impl From for PyPiRequirement { if let Some(version_or_url) = req.version_or_url { match version_or_url { pep508_rs::VersionOrUrl::VersionSpecifier(v) => PyPiRequirement::Version { - version: VersionOrStar::Version(v), + version: if v.is_empty() { + VersionOrStar::Star + } else { + VersionOrStar::Version(v) + }, extras: req.extras, }, pep508_rs::VersionOrUrl::Url(u) => { diff --git a/src/project/manifest/target.rs b/src/project/manifest/target.rs index ea5f7fa51..0c035b35b 100644 --- a/src/project/manifest/target.rs +++ b/src/project/manifest/target.rs @@ -1,25 +1,27 @@ -use crate::project::manifest::activation::Activation; -use crate::project::manifest::python::PyPiPackageName; -use crate::task::TaskName; -use crate::utils::spanned::PixiSpanned; -use crate::{ - project::{manifest::PyPiRequirement, SpecType}, - task::Task, -}; -use indexmap::map::Entry; -use indexmap::IndexMap; +use std::{borrow::Cow, collections::HashMap, str::FromStr}; + +use indexmap::{map::Entry, IndexMap}; use itertools::Either; use rattler_conda_types::{NamelessMatchSpec, PackageName, Platform}; use serde::{Deserialize, Deserializer}; use serde_with::{serde_as, DisplayFromStr, PickFirst}; -use std::borrow::Cow; -use std::collections::HashMap; -use std::str::FromStr; use super::error::DependencyError; +use crate::{ + project::{ + manifest::{ + activation::Activation, python::PyPiPackageName, DependencyOverwriteBehavior, + PyPiRequirement, + }, + SpecType, + }, + task::{Task, TaskName}, + utils::spanned::PixiSpanned, +}; -/// A target describes the dependencies, activations and task available to a specific feature, in -/// a specific environment, and optionally for a specific platform. +/// A target describes the dependencies, activations and task available to a +/// specific feature, in a specific environment, and optionally for a specific +/// platform. #[derive(Default, Debug, Clone)] pub struct Target { /// Dependencies for this target. @@ -51,16 +53,17 @@ impl Target { self.dependencies.get(&SpecType::Build) } - /// Returns the dependencies to use for the given `spec_type`. If `None` is specified, the - /// combined dependencies are returned. + /// Returns the dependencies to use for the given `spec_type`. If `None` is + /// specified, the combined dependencies are returned. /// - /// The `build` dependencies overwrite the `host` dependencies which overwrite the `run` - /// dependencies. + /// The `build` dependencies overwrite the `host` dependencies which + /// overwrite the `run` dependencies. /// - /// This function returns `None` if no dependencies are specified for the given `spec_type`. + /// This function returns `None` if no dependencies are specified for the + /// given `spec_type`. /// - /// This function returns a `Cow` to avoid cloning the dependencies if they can be returned - /// directly from the underlying map. + /// This function returns a `Cow` to avoid cloning the dependencies if they + /// can be returned directly from the underlying map. pub fn dependencies( &self, spec_type: Option, @@ -74,13 +77,14 @@ impl Target { /// Determines the combined set of dependencies. /// - /// The `build` dependencies overwrite the `host` dependencies which overwrite the `run` - /// dependencies. + /// The `build` dependencies overwrite the `host` dependencies which + /// overwrite the `run` dependencies. /// - /// This function returns `None` if no dependencies are specified for the given `spec_type`. + /// This function returns `None` if no dependencies are specified for the + /// given `spec_type`. /// - /// This function returns a `Cow` to avoid cloning the dependencies if they can be returned - /// directly from the underlying map. + /// This function returns a `Cow` to avoid cloning the dependencies if they + /// can be returned directly from the underlying map. fn combined_dependencies(&self) -> Option>> { let mut all_deps = None; for spec_type in [SpecType::Run, SpecType::Host, SpecType::Build] { @@ -166,12 +170,22 @@ impl Target { dep_name: &PackageName, spec: &NamelessMatchSpec, spec_type: SpecType, - ) -> Result<(), DependencyError> { - if self.has_dependency(dep_name, Some(spec_type), Some(spec)) { - return Err(DependencyError::Duplicate(dep_name.as_normalized().into())); + dependency_overwrite_behavior: DependencyOverwriteBehavior, + ) -> Result { + if self.has_dependency(dep_name, Some(spec_type), None) { + match dependency_overwrite_behavior { + DependencyOverwriteBehavior::OverwriteIfExplicit if spec.version.is_none() => { + return Ok(false) + } + DependencyOverwriteBehavior::IgnoreDuplicate => return Ok(false), + DependencyOverwriteBehavior::Error => { + return Err(DependencyError::Duplicate(dep_name.as_normalized().into())); + } + _ => {} + } } self.add_dependency(dep_name, spec, spec_type); - Ok(()) + Ok(true) } /// Checks if this target contains a specific pypi dependency @@ -232,16 +246,29 @@ impl Target { &mut self, requirement: &pep508_rs::Requirement, editable: Option, - ) -> Result<(), DependencyError> { - if self.has_pypi_dependency(requirement, true) { - return Err(DependencyError::Duplicate(requirement.name.to_string())); + dependency_overwrite_behavior: DependencyOverwriteBehavior, + ) -> Result { + if self.has_pypi_dependency(requirement, false) { + match dependency_overwrite_behavior { + DependencyOverwriteBehavior::OverwriteIfExplicit + if requirement.version_or_url.is_none() => + { + return Ok(false) + } + DependencyOverwriteBehavior::IgnoreDuplicate => return Ok(false), + DependencyOverwriteBehavior::Error => { + return Err(DependencyError::Duplicate(requirement.name.to_string())); + } + _ => {} + } } self.add_pypi_dependency(requirement, editable); - Ok(()) + Ok(true) } } -/// Represents a target selector. Currently we only support explicit platform selection. +/// Represents a target selector. Currently we only support explicit platform +/// selection. #[derive(Debug, Clone, Eq, PartialEq, Hash)] pub enum TargetSelector { // Platform specific configuration @@ -360,8 +387,8 @@ impl<'de> Deserialize<'de> for Target { pub struct Targets { default_target: Target, - /// We use an [`IndexMap`] to preserve the order in which the items where defined in the - /// manifest. + /// We use an [`IndexMap`] to preserve the order in which the items where + /// defined in the manifest. targets: IndexMap, /// The source location of the target selector in the manifest. @@ -369,7 +396,8 @@ pub struct Targets { } impl Targets { - /// Constructs a new [`Targets`] from a default target and additional user defined targets. + /// Constructs a new [`Targets`] from a default target and additional user + /// defined targets. pub fn from_default_and_user_defined( default_target: Target, user_defined_targets: IndexMap, Target>, @@ -400,11 +428,12 @@ impl Targets { &mut self.default_target } - /// Returns all the targets that apply for the given platform. If no platform is specified, only - /// the default target is returned. + /// Returns all the targets that apply for the given platform. If no + /// platform is specified, only the default target is returned. /// - /// Multiple selectors might match for a given platform. This function returns all of them in - /// order, with the most specific selector first and the default target last. + /// Multiple selectors might match for a given platform. This function + /// returns all of them in order, with the most specific selector first + /// and the default target last. /// /// This also always includes the default target. pub fn resolve( @@ -420,8 +449,9 @@ impl Targets { /// Returns all the targets that apply for the given platform. /// - /// Multiple selectors might match for a given platform. This function returns all of them in - /// order, with the most specific selector first and the default target last. + /// Multiple selectors might match for a given platform. This function + /// returns all of them in order, with the most specific selector first + /// and the default target last. /// /// This also always includes the default target. /// @@ -447,8 +477,8 @@ impl Targets { self.targets.get(target) } - /// Returns the target for the given target selector or the default target if the selector is - /// `None`. + /// Returns the target for the given target selector or the default target + /// if the selector is `None`. pub fn for_opt_target(&self, target: Option<&TargetSelector>) -> Option<&Target> { if let Some(sel) = target { self.targets.get(sel) @@ -457,8 +487,8 @@ impl Targets { } } - /// Returns the target for the given target selector or the default target if no target is - /// specified. + /// Returns the target for the given target selector or the default target + /// if no target is specified. pub fn for_opt_target_mut(&mut self, target: Option<&TargetSelector>) -> Option<&mut Target> { if let Some(sel) = target { self.targets.get_mut(sel) @@ -467,10 +497,11 @@ impl Targets { } } - /// Returns the target for the given target selector or the default target if no target is - /// specified. + /// Returns the target for the given target selector or the default target + /// if no target is specified. /// - /// If a target is specified and it does not exist the default target is returned instead. + /// If a target is specified and it does not exist the default target is + /// returned instead. pub fn for_opt_target_or_default(&self, target: Option<&TargetSelector>) -> &Target { if let Some(sel) = target { self.targets.get(sel).unwrap_or(&self.default_target) @@ -479,8 +510,8 @@ impl Targets { } } - /// Returns a mutable reference to the target for the given target selector or the default - /// target if no target is specified. + /// Returns a mutable reference to the target for the given target selector + /// or the default target if no target is specified. /// /// If a target is specified and it does not exist, it will be created. pub fn for_opt_target_or_default_mut( @@ -523,10 +554,12 @@ impl Targets { #[cfg(test)] mod tests { - use crate::Project; + use std::path::Path; + use insta::assert_snapshot; use itertools::Itertools; - use std::path::Path; + + use crate::Project; #[test] fn test_targets_overwrite_order() { diff --git a/tests/common/builders.rs b/tests/common/builders.rs index 141a81219..6d9250187 100644 --- a/tests/common/builders.rs +++ b/tests/common/builders.rs @@ -47,9 +47,18 @@ pub fn string_from_iter(iter: impl IntoIterator>) -> Vec< /// the CLI execute method and await the result at the same time. pub struct InitBuilder { pub args: init::Args, + pub no_fast_prefix: bool, } impl InitBuilder { + /// Disable using `https://fast.prefix.dev` as the default channel. + pub fn no_fast_prefix_overwrite(self, no_fast_prefix: bool) -> Self { + Self { + no_fast_prefix, + ..self + } + } + pub fn with_channel(mut self, channel: impl ToString) -> Self { self.args .channels @@ -73,7 +82,17 @@ impl IntoFuture for InitBuilder { type IntoFuture = Pin + 'static>>; fn into_future(self) -> Self::IntoFuture { - init::execute(self.args).boxed_local() + init::execute(init::Args { + channels: if !self.no_fast_prefix { + self.args + .channels + .or_else(|| Some(vec!["https://fast.prefix.dev/conda-forge".to_string()])) + } else { + self.args.channels + }, + ..self.args + }) + .boxed_local() } } diff --git a/tests/common/mod.rs b/tests/common/mod.rs index c0ad8442a..a4dd40dcc 100644 --- a/tests/common/mod.rs +++ b/tests/common/mod.rs @@ -199,6 +199,7 @@ impl PixiControl { /// `.await` on the return value. pub fn init(&self) -> InitBuilder { InitBuilder { + no_fast_prefix: false, args: init::Args { path: self.project_path().to_path_buf(), channels: None, @@ -214,6 +215,7 @@ impl PixiControl { /// `.await` on the return value. pub fn init_with_platforms(&self, platforms: Vec) -> InitBuilder { InitBuilder { + no_fast_prefix: false, args: init::Args { path: self.project_path().to_path_buf(), channels: None, diff --git a/tests/init_tests.rs b/tests/init_tests.rs index dc1007197..0c3c68679 100644 --- a/tests/init_tests.rs +++ b/tests/init_tests.rs @@ -1,9 +1,11 @@ mod common; -use crate::common::PixiControl; +use std::str::FromStr; + use pixi::{util::default_channel_config, HasFeatures}; use rattler_conda_types::{Channel, Version}; -use std::str::FromStr; + +use crate::common::PixiControl; #[tokio::test] async fn init_creates_project_manifest() { @@ -27,7 +29,8 @@ async fn init_creates_project_manifest() { ); } -/// Tests that when initializing an empty project with a custom channel it is actually used. +/// Tests that when initializing an empty project with a custom channel it is +/// actually used. #[tokio::test] async fn specific_channel() { let pixi = PixiControl::new().unwrap(); @@ -53,13 +56,14 @@ async fn specific_channel() { ) } -/// Tests that when initializing an empty project the default channel `conda-forge` is used. +/// Tests that when initializing an empty project the default channel +/// `conda-forge` is used. #[tokio::test] async fn default_channel() { let pixi = PixiControl::new().unwrap(); // Init a new project - pixi.init().await.unwrap(); + pixi.init().no_fast_prefix_overwrite(true).await.unwrap(); // Load the project let project = pixi.project().unwrap(); @@ -81,9 +85,9 @@ async fn default_channel() { // let mut pypi_config = PyPIConfig::default(); // pypi_config.index_url = Some(index_url.clone()); // pypi_config.extra_index_urls = vec![index_url.clone()]; -// // pypi_config.keyring_provider = Some(pixi::config::KeyringProvider::Subprocess); -// let mut config = Config::default(); -// config.pypi_config = pypi_config; +// // pypi_config.keyring_provider = +// Some(pixi::config::KeyringProvider::Subprocess); let mut config = +// Config::default(); config.pypi_config = pypi_config; // pixi.init().await.unwrap(); // // Load the project diff --git a/tests/install_tests.rs b/tests/install_tests.rs index 768cb29d3..31c042c37 100644 --- a/tests/install_tests.rs +++ b/tests/install_tests.rs @@ -1,25 +1,32 @@ mod common; -use std::fs::{create_dir_all, File}; -use std::io::Write; -use std::path::{Path, PathBuf}; -use std::str::FromStr; +use std::{ + fs::{create_dir_all, File}, + io::Write, + path::{Path, PathBuf}, + str::FromStr, +}; -use crate::common::builders::{string_from_iter, HasDependencyConfig}; -use crate::common::package_database::{Package, PackageDatabase}; use common::{LockFileExt, PixiControl}; -use pixi::cli::run::Args; -use pixi::cli::{run, LockFileUsageArgs}; -use pixi::config::{Config, DetachedEnvironments}; -use pixi::consts::{DEFAULT_ENVIRONMENT_NAME, PIXI_UV_INSTALLER}; -use pixi::{consts, FeatureName}; +use pixi::{ + cli::{run, run::Args, LockFileUsageArgs}, + config::{Config, DetachedEnvironments}, + consts, + consts::{DEFAULT_ENVIRONMENT_NAME, PIXI_UV_INSTALLER}, + FeatureName, +}; use rattler_conda_types::Platform; use serial_test::serial; use tempfile::TempDir; use uv_interpreter::PythonEnvironment; -/// Should add a python version to the environment and lock file that matches the specified version -/// and run it +use crate::common::{ + builders::{string_from_iter, HasDependencyConfig}, + package_database::{Package, PackageDatabase}, +}; + +/// Should add a python version to the environment and lock file that matches +/// the specified version and run it #[tokio::test] #[serial] #[cfg_attr(not(feature = "slow_integration_tests"), ignore)] @@ -59,14 +66,15 @@ async fn install_run_python() { /// This is a test to check that creating incremental lock files works. /// -/// It works by using a fake channel that contains two packages: `foo` and `bar`. `foo` depends on -/// `bar` so adding a dependency on `foo` pulls in `bar`. Initially only version `1` of both -/// packages is added and a project is created that depends on `foo >=1`. This select `foo@1` and -/// `bar@1`. -/// Next, version 2 for both packages is added and the requirement in the project is updated to -/// `foo >=2`, this should then select `foo@1` but `bar` should remain on version `1` even though -/// version `2` is available. This is because `bar` was previously locked to version `1` and it is -/// still a valid solution to keep using version `1` of bar. +/// It works by using a fake channel that contains two packages: `foo` and +/// `bar`. `foo` depends on `bar` so adding a dependency on `foo` pulls in +/// `bar`. Initially only version `1` of both packages is added and a project is +/// created that depends on `foo >=1`. This select `foo@1` and `bar@1`. +/// Next, version 2 for both packages is added and the requirement in the +/// project is updated to `foo >=2`, this should then select `foo@1` but `bar` +/// should remain on version `1` even though version `2` is available. This is +/// because `bar` was previously locked to version `1` and it is still a valid +/// solution to keep using version `1` of bar. #[tokio::test] async fn test_incremental_lock_file() { let mut package_database = PackageDatabase::default(); @@ -114,8 +122,8 @@ async fn test_incremental_lock_file() { .await .unwrap(); - // Force using version 2 of `foo`. This should force `foo` to version `2` but `bar` should still - // remaining on `1` because it was previously locked + // Force using version 2 of `foo`. This should force `foo` to version `2` but + // `bar` should still remaining on `1` because it was previously locked pixi.add("foo >=2").await.unwrap(); let lock = pixi.lock_file().await.unwrap(); @@ -155,7 +163,8 @@ async fn install_locked_with_config() { "python==3.10.0" } else if cfg!(target_os = "windows") { // Abusing this test to also test the `add` function of older version of python - // Before this wasn't possible because uv queried the python interpreter, even without pypi dependencies. + // Before this wasn't possible because uv queried the python interpreter, even + // without pypi dependencies. "python==3.6.0" } else { "python==2.7.15" @@ -292,6 +301,11 @@ async fn pypi_reinstall_python() { .set_type(pixi::DependencyType::PypiDependency) .await .unwrap(); + assert!(pixi.lock_file().await.unwrap().contains_match_spec( + DEFAULT_ENVIRONMENT_NAME, + Platform::current(), + "python==3.11" + )); let prefix = pixi.default_env_path().unwrap(); @@ -302,19 +316,25 @@ async fn pypi_reinstall_python() { let installed_311 = uv_installer::SitePackages::from_executable(&env).unwrap(); assert!(installed_311.iter().count() > 0); - // sleep for a few seconds to make sure we can remove stuff (Windows file system issues) + // sleep for a few seconds to make sure we can remove stuff (Windows file system + // issues) #[cfg(target_os = "windows")] tokio::time::sleep(std::time::Duration::from_secs(2)).await; // Reinstall python pixi.add("python==3.12").with_install(true).await.unwrap(); + assert!(pixi.lock_file().await.unwrap().contains_match_spec( + DEFAULT_ENVIRONMENT_NAME, + Platform::current(), + "python==3.12" + )); // Check if site-packages has entries, should be empty now let installed_312 = uv_installer::SitePackages::from_executable(&env).unwrap(); if cfg!(not(target_os = "windows")) { // On non-windows the site-packages should be empty - assert!(installed_312.iter().count() == 0); + assert_eq!(installed_312.iter().count(), 0); } else { // Windows should still contain some packages // This is because the site-packages is not prefixed with the python version @@ -365,7 +385,8 @@ async fn test_channels_changed() { package_database_a.add_package(Package::build("bar", "2").finish()); let channel_a = package_database_a.into_channel().await.unwrap(); - // Write another channel with a package `bar` with only one version but another one. + // Write another channel with a package `bar` with only one version but another + // one. let mut package_database_b = PackageDatabase::default(); package_database_b.add_package(Package::build("bar", "1").finish()); let channel_b = package_database_b.into_channel().await.unwrap(); @@ -385,7 +406,8 @@ async fn test_channels_changed() { )) .unwrap(); - // Get an up-to-date lockfile and verify that bar version 2 was selected from channel `a`. + // Get an up-to-date lockfile and verify that bar version 2 was selected from + // channel `a`. let lock_file = pixi.up_to_date_lock_file().await.unwrap(); assert!(lock_file.contains_match_spec(DEFAULT_ENVIRONMENT_NAME, platform, "bar ==2")); @@ -405,7 +427,8 @@ async fn test_channels_changed() { )) .unwrap(); - // Get an up-to-date lockfile and verify that bar version 1 was now selected from channel `b`. + // Get an up-to-date lockfile and verify that bar version 1 was now selected + // from channel `b`. let lock_file = pixi.up_to_date_lock_file().await.unwrap(); assert!(lock_file.contains_match_spec(DEFAULT_ENVIRONMENT_NAME, platform, "bar ==1")); } @@ -416,8 +439,7 @@ async fn test_channels_changed() { async fn install_conda_meta_history() { let pixi = PixiControl::new().unwrap(); pixi.init().await.unwrap(); - // Add and update lockfile with this version of python - pixi.add("python==3.11").with_install(true).await.unwrap(); + pixi.install().await.unwrap(); let prefix = pixi.default_env_path().unwrap(); let conda_meta_history_file = prefix.join("conda-meta/history"); @@ -457,12 +479,11 @@ async fn minimal_lockfile_update_pypi() { .await .unwrap(); - // Check the locked click dependencies to see if it was only minimally updated + // `click` should not be updated to a higher version. let lock = pixi.lock_file().await.unwrap(); assert!(lock.contains_pep508_requirement( DEFAULT_ENVIRONMENT_NAME, Platform::current(), - // With a fresh solve this would be bumped to `>=8.0.0` pep508_rs::Requirement::from_str("click==7.1.2").unwrap() )); }