From 85a7d458f9be9fbc67b5f3fa39075b9d46094c0d Mon Sep 17 00:00:00 2001 From: Mathieu Kniewallner Date: Mon, 11 Nov 2024 15:58:31 +0100 Subject: [PATCH 01/23] docs(tools): mention how to use extras (#8972) ## Summary Figured this could be helpful to mention in the documentation, as it might not be obvious that this is possible. ## Test Plan Tested the commands locally. --- docs/guides/tools.md | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/docs/guides/tools.md b/docs/guides/tools.md index bd9d8c64489c..f6022940cb91 100644 --- a/docs/guides/tools.md +++ b/docs/guides/tools.md @@ -93,6 +93,20 @@ $ uvx --from 'ruff>0.2.0,<0.3.0' ruff check Note the `@` syntax cannot be used for anything other than an exact version. +## Requesting extras + +The `--from` option can be used to run a tool with extras: + +```console +$ uvx --from 'mypy[faster-cache,reports]' mypy --xml-report mypy_report +``` + +This can also be combined with version selection: + +```console +$ uvx --from 'mypy[faster-cache,reports]==1.13.0' mypy --xml-report mypy_report +``` + ## Requesting different sources The `--from` option can also be used to install from alternative sources. From cc5a0dfdbf555fd8ede4a422c6540e9a8d6db7df Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Mon, 11 Nov 2024 10:29:39 -0600 Subject: [PATCH 02/23] Update pre-commit dependencies (#9003) --- .pre-commit-config.yaml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 46470681dcc4..05c69c8b6270 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -7,12 +7,12 @@ exclude: | repos: - repo: https://github.com/abravalheri/validate-pyproject - rev: v0.22 + rev: v0.23 hooks: - id: validate-pyproject - repo: https://github.com/crate-ci/typos - rev: v1.27.0 + rev: v1.27.3 hooks: - id: typos @@ -42,7 +42,7 @@ repos: types_or: [yaml, json5] - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.7.2 + rev: v0.7.3 hooks: - id: ruff-format - id: ruff From 760cf82ee38a004eacd51670d1a769c61f4bbaa5 Mon Sep 17 00:00:00 2001 From: konsti Date: Mon, 11 Nov 2024 18:38:47 +0100 Subject: [PATCH 03/23] Use Python syntax for `value_type` consistently (#9017) Spotted that when looking though `value_type =` declarations. --- crates/uv-settings/src/settings.rs | 4 ++-- docs/reference/settings.md | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/crates/uv-settings/src/settings.rs b/crates/uv-settings/src/settings.rs index bab2d0d1c02a..e7e1e2090edd 100644 --- a/crates/uv-settings/src/settings.rs +++ b/crates/uv-settings/src/settings.rs @@ -527,7 +527,7 @@ pub struct ResolverInstallerOptions { /// are already installed. #[option( default = "[]", - value_type = "Vec", + value_type = "list[str]", example = r#" no-build-isolation-package = ["package1", "package2"] "# @@ -909,7 +909,7 @@ pub struct PipOptions { /// are already installed. #[option( default = "[]", - value_type = "Vec", + value_type = "list[str]", example = r#" no-build-isolation-package = ["package1", "package2"] "# diff --git a/docs/reference/settings.md b/docs/reference/settings.md index d8801f5598da..7e07811452fc 100644 --- a/docs/reference/settings.md +++ b/docs/reference/settings.md @@ -1001,7 +1001,7 @@ are already installed. **Default value**: `[]` -**Type**: `Vec` +**Type**: `list[str]` **Example usage**: @@ -2246,7 +2246,7 @@ are already installed. **Default value**: `[]` -**Type**: `Vec` +**Type**: `list[str]` **Example usage**: From 769afa96a4115dd710187eb22b6d3cfe6a534bfa Mon Sep 17 00:00:00 2001 From: Charlie Marsh Date: Mon, 11 Nov 2024 12:54:35 -0500 Subject: [PATCH 04/23] Avoid retraversing filesystem when testing exact glob matches (#9022) ## Summary When testing for exact inclusion, we can just test the glob directly. There's no need to re-traverse the filesystem to find it. --- crates/uv-workspace/src/workspace.rs | 34 ++++++++++------------------ 1 file changed, 12 insertions(+), 22 deletions(-) diff --git a/crates/uv-workspace/src/workspace.rs b/crates/uv-workspace/src/workspace.rs index 7b2fba0f09ec..ea07f3757fb4 100644 --- a/crates/uv-workspace/src/workspace.rs +++ b/crates/uv-workspace/src/workspace.rs @@ -1312,17 +1312,12 @@ fn is_excluded_from_workspace( let absolute_glob = PathBuf::from(glob::Pattern::escape( workspace_root.simplified().to_string_lossy().as_ref(), )) - .join(exclude_glob.as_str()) - .to_string_lossy() - .to_string(); - for excluded_root in glob(&absolute_glob) - .map_err(|err| WorkspaceError::Pattern(absolute_glob.to_string(), err))? - { - let excluded_root = excluded_root - .map_err(|err| WorkspaceError::Glob(absolute_glob.to_string(), err))?; - if excluded_root == project_path.simplified() { - return Ok(true); - } + .join(exclude_glob.as_str()); + let absolute_glob = absolute_glob.to_string_lossy(); + let exclude_pattern = glob::Pattern::new(&absolute_glob) + .map_err(|err| WorkspaceError::Pattern(absolute_glob.to_string(), err))?; + if exclude_pattern.matches_path(project_path) { + return Ok(true); } } Ok(false) @@ -1338,17 +1333,12 @@ fn is_included_in_workspace( let absolute_glob = PathBuf::from(glob::Pattern::escape( workspace_root.simplified().to_string_lossy().as_ref(), )) - .join(member_glob.as_str()) - .to_string_lossy() - .to_string(); - for member_root in glob(&absolute_glob) - .map_err(|err| WorkspaceError::Pattern(absolute_glob.to_string(), err))? - { - let member_root = - member_root.map_err(|err| WorkspaceError::Glob(absolute_glob.to_string(), err))?; - if member_root == project_path { - return Ok(true); - } + .join(member_glob.as_str()); + let absolute_glob = absolute_glob.to_string_lossy(); + let include_pattern = glob::Pattern::new(&absolute_glob) + .map_err(|err| WorkspaceError::Pattern(absolute_glob.to_string(), err))?; + if include_pattern.matches_path(project_path) { + return Ok(true); } } Ok(false) From 58bc604b6e6f8bff3aade085c970f90aff370ba8 Mon Sep 17 00:00:00 2001 From: Zanie Blue Date: Mon, 11 Nov 2024 12:41:33 -0600 Subject: [PATCH 05/23] Update format of environment variable reference (#9018) - Sorts the variables - Separates `UV_` variables from others - Uses headings so the toc is available --- .../uv-dev/src/generate_env_vars_reference.rs | 43 +- docs/configuration/environment.md | 707 +++++++++++++----- 2 files changed, 541 insertions(+), 209 deletions(-) diff --git a/crates/uv-dev/src/generate_env_vars_reference.rs b/crates/uv-dev/src/generate_env_vars_reference.rs index 9e6467b01be5..0f702a2306cf 100644 --- a/crates/uv-dev/src/generate_env_vars_reference.rs +++ b/crates/uv-dev/src/generate_env_vars_reference.rs @@ -2,6 +2,7 @@ use anyhow::bail; use pretty_assertions::StrComparison; +use std::collections::BTreeSet; use std::path::PathBuf; use uv_static::EnvVars; @@ -71,30 +72,32 @@ fn generate() -> String { let mut output = String::new(); output.push_str("# Environment variables\n\n"); - output.push_str("uv respects the following environment variables:\n\n"); - - for (var, doc) in EnvVars::metadata() { - // Remove empty lines and ddd two spaces to the beginning from the second line. - let doc = doc - .lines() - .enumerate() - .filter(|(_, line)| !line.trim().is_empty()) - .map(|(i, line)| { - if i == 0 { - line.to_string() - } else { - format!(" {line}") - } - }) - .collect::>() - .join("\n"); - output.push_str(&format!( - "- [`{var}`](#{var}): {doc}\n" - )); + + // Partition and sort environment variables into UV_ and external variables. + let (uv_vars, external_vars): (BTreeSet<_>, BTreeSet<_>) = EnvVars::metadata() + .iter() + .partition(|(var, _)| var.starts_with("UV_")); + + output.push_str("uv defines and respects the following environment variables:\n\n"); + + for (var, doc) in uv_vars { + output.push_str(&render(var, doc)); + } + + output.push_str("\n\n## Externally defined variables\n\n"); + output.push_str("uv also reads the following externally defined environment variables:\n\n"); + + for (var, doc) in external_vars { + output.push_str(&render(var, doc)); } output } +/// Render an environment variable and its documentation. +fn render(var: &str, doc: &str) -> String { + format!("### `{var}`\n\n{doc}\n\n") +} + #[cfg(test)] mod tests; diff --git a/docs/configuration/environment.md b/docs/configuration/environment.md index 0d0c1ccd8017..b30ac7520dea 100644 --- a/docs/configuration/environment.md +++ b/docs/configuration/environment.md @@ -1,191 +1,520 @@ # Environment variables -uv respects the following environment variables: - -- [`UV_DEFAULT_INDEX`](#UV_DEFAULT_INDEX): Equivalent to the `--default-index` command-line argument. If set, uv will use - this URL as the default index when searching for packages. -- [`UV_INDEX`](#UV_INDEX): Equivalent to the `--index` command-line argument. If set, uv will use this - space-separated list of URLs as additional indexes when searching for packages. -- [`UV_INDEX_URL`](#UV_INDEX_URL): Equivalent to the `--index-url` command-line argument. If set, uv will use this - URL as the default index when searching for packages. - (Deprecated: use `UV_DEFAULT_INDEX` instead.) -- [`UV_EXTRA_INDEX_URL`](#UV_EXTRA_INDEX_URL): Equivalent to the `--extra-index-url` command-line argument. If set, uv will - use this space-separated list of URLs as additional indexes when searching for packages. - (Deprecated: use `UV_INDEX` instead.) -- [`UV_FIND_LINKS`](#UV_FIND_LINKS): Equivalent to the `--find-links` command-line argument. If set, uv will use this - comma-separated list of additional locations to search for packages. -- [`UV_CACHE_DIR`](#UV_CACHE_DIR): Equivalent to the `--cache-dir` command-line argument. If set, uv will use this - directory for caching instead of the default cache directory. -- [`UV_NO_CACHE`](#UV_NO_CACHE): Equivalent to the `--no-cache` command-line argument. If set, uv will not use the - cache for any operations. -- [`UV_RESOLUTION`](#UV_RESOLUTION): Equivalent to the `--resolution` command-line argument. For example, if set to - `lowest-direct`, uv will install the lowest compatible versions of all direct dependencies. -- [`UV_PRERELEASE`](#UV_PRERELEASE): Equivalent to the `--prerelease` command-line argument. For example, if set to - `allow`, uv will allow pre-release versions for all dependencies. -- [`UV_SYSTEM_PYTHON`](#UV_SYSTEM_PYTHON): Equivalent to the `--system` command-line argument. If set to `true`, uv will - use the first Python interpreter found in the system `PATH`. - WARNING: `UV_SYSTEM_PYTHON=true` is intended for use in continuous integration (CI) - or containerized environments and should be used with caution, as modifying the system - Python can lead to unexpected behavior. -- [`UV_PYTHON`](#UV_PYTHON): Equivalent to the `--python` command-line argument. If set to a path, uv will use - this Python interpreter for all operations. -- [`UV_BREAK_SYSTEM_PACKAGES`](#UV_BREAK_SYSTEM_PACKAGES): Equivalent to the `--break-system-packages` command-line argument. If set to `true`, - uv will allow the installation of packages that conflict with system-installed packages. - WARNING: `UV_BREAK_SYSTEM_PACKAGES=true` is intended for use in continuous integration - (CI) or containerized environments and should be used with caution, as modifying the system - Python can lead to unexpected behavior. -- [`UV_NATIVE_TLS`](#UV_NATIVE_TLS): Equivalent to the `--native-tls` command-line argument. If set to `true`, uv will - use the system's trust store instead of the bundled `webpki-roots` crate. -- [`UV_INDEX_STRATEGY`](#UV_INDEX_STRATEGY): Equivalent to the `--index-strategy` command-line argument. For example, if - set to `unsafe-any-match`, uv will consider versions of a given package available across all index - URLs, rather than limiting its search to the first index URL that contains the package. -- [`UV_REQUIRE_HASHES`](#UV_REQUIRE_HASHES): Equivalent to the `--require-hashes` command-line argument. If set to `true`, - uv will require that all dependencies have a hash specified in the requirements file. -- [`UV_CONSTRAINT`](#UV_CONSTRAINT): Equivalent to the `--constraint` command-line argument. If set, uv will use this - file as the constraints file. Uses space-separated list of files. -- [`UV_BUILD_CONSTRAINT`](#UV_BUILD_CONSTRAINT): Equivalent to the `--build-constraint` command-line argument. If set, uv will use this file - as constraints for any source distribution builds. Uses space-separated list of files. -- [`UV_OVERRIDE`](#UV_OVERRIDE): Equivalent to the `--override` command-line argument. If set, uv will use this file - as the overrides file. Uses space-separated list of files. -- [`UV_LINK_MODE`](#UV_LINK_MODE): Equivalent to the `--link-mode` command-line argument. If set, uv will use this as - a link mode. -- [`UV_NO_BUILD_ISOLATION`](#UV_NO_BUILD_ISOLATION): Equivalent to the `--no-build-isolation` command-line argument. If set, uv will - skip isolation when building source distributions. -- [`UV_CUSTOM_COMPILE_COMMAND`](#UV_CUSTOM_COMPILE_COMMAND): Equivalent to the `--custom-compile-command` command-line argument. - Used to override uv in the output header of the `requirements.txt` files generated by - `uv pip compile`. Intended for use-cases in which `uv pip compile` is called from within a wrapper - script, to include the name of the wrapper script in the output file. -- [`UV_KEYRING_PROVIDER`](#UV_KEYRING_PROVIDER): Equivalent to the `--keyring-provider` command-line argument. If set, uv - will use this value as the keyring provider. -- [`UV_CONFIG_FILE`](#UV_CONFIG_FILE): Equivalent to the `--config-file` command-line argument. Expects a path to a - local `uv.toml` file to use as the configuration file. -- [`UV_NO_CONFIG`](#UV_NO_CONFIG): Equivalent to the `--no-config` command-line argument. If set, uv will not read - any configuration files from the current directory, parent directories, or user configuration - directories. -- [`UV_EXCLUDE_NEWER`](#UV_EXCLUDE_NEWER): Equivalent to the `--exclude-newer` command-line argument. If set, uv will - exclude distributions published after the specified date. -- [`UV_PYTHON_PREFERENCE`](#UV_PYTHON_PREFERENCE): Equivalent to the `--python-preference` command-line argument. Whether uv - should prefer system or managed Python versions. -- [`UV_PYTHON_DOWNLOADS`](#UV_PYTHON_DOWNLOADS): Equivalent to the - [`python-downloads`](../reference/settings.md#python-downloads) setting and, when disabled, the - `--no-python-downloads` option. Whether uv should allow Python downloads. -- [`UV_COMPILE_BYTECODE`](#UV_COMPILE_BYTECODE): Equivalent to the `--compile-bytecode` command-line argument. If set, uv - will compile Python source files to bytecode after installation. -- [`UV_PUBLISH_URL`](#UV_PUBLISH_URL): Equivalent to the `--publish-url` command-line argument. The URL of the upload - endpoint of the index to use with `uv publish`. -- [`UV_PUBLISH_TOKEN`](#UV_PUBLISH_TOKEN): Equivalent to the `--token` command-line argument in `uv publish`. If set, uv - will use this token (with the username `__token__`) for publishing. -- [`UV_PUBLISH_USERNAME`](#UV_PUBLISH_USERNAME): Equivalent to the `--username` command-line argument in `uv publish`. If - set, uv will use this username for publishing. -- [`UV_PUBLISH_PASSWORD`](#UV_PUBLISH_PASSWORD): Equivalent to the `--password` command-line argument in `uv publish`. If - set, uv will use this password for publishing. -- [`UV_PUBLISH_CHECK_URL`](#UV_PUBLISH_CHECK_URL): Don't upload a file if it already exists on the index. The value is the URL of the index. -- [`UV_NO_SYNC`](#UV_NO_SYNC): Equivalent to the `--no-sync` command-line argument. If set, uv will skip updating - the environment. -- [`UV_LOCKED`](#UV_LOCKED): Equivalent to the `--locked` command-line argument. If set, uv will assert that the - `uv.lock` remains unchanged. -- [`UV_FROZEN`](#UV_FROZEN): Equivalent to the `--frozen` command-line argument. If set, uv will run without - updating the `uv.lock` file. -- [`UV_PREVIEW`](#UV_PREVIEW): Equivalent to the `--preview` argument. Enables preview mode. -- [`UV_GITHUB_TOKEN`](#UV_GITHUB_TOKEN): Equivalent to the `--token` argument for self update. A GitHub token for authentication. -- [`UV_VERIFY_HASHES`](#UV_VERIFY_HASHES): Equivalent to the `--verify-hashes` argument. Verifies included hashes. -- [`UV_INSECURE_HOST`](#UV_INSECURE_HOST): Equivalent to the `--allow-insecure-host` argument. -- [`UV_CONCURRENT_DOWNLOADS`](#UV_CONCURRENT_DOWNLOADS): Sets the maximum number of in-flight concurrent downloads that uv will - perform at any given time. -- [`UV_CONCURRENT_BUILDS`](#UV_CONCURRENT_BUILDS): Sets the maximum number of source distributions that uv will build - concurrently at any given time. -- [`UV_CONCURRENT_INSTALLS`](#UV_CONCURRENT_INSTALLS): Controls the number of threads used when installing and unzipping - packages. -- [`UV_NO_PROGRESS`](#UV_NO_PROGRESS): Disables all progress output. For example, spinners and progress bars. -- [`UV_TOOL_DIR`](#UV_TOOL_DIR): Specifies the directory where uv stores managed tools. -- [`UV_TOOL_BIN_DIR`](#UV_TOOL_BIN_DIR): Specifies the "bin" directory for installing tool executables. -- [`UV_PROJECT_ENVIRONMENT`](#UV_PROJECT_ENVIRONMENT): Specifies the path to the directory to use for a project virtual environment. - See the [project documentation](../concepts/projects.md#configuring-the-project-environment-path) - for more details. -- [`UV_PYTHON_BIN_DIR`](#UV_PYTHON_BIN_DIR): Specifies the directory to place links to installed, managed Python executables. -- [`UV_PYTHON_INSTALL_DIR`](#UV_PYTHON_INSTALL_DIR): Specifies the directory for storing managed Python installations. -- [`UV_PYTHON_INSTALL_MIRROR`](#UV_PYTHON_INSTALL_MIRROR): Managed Python installations are downloaded from - [`python-build-standalone`](https://github.com/indygreg/python-build-standalone). - This variable can be set to a mirror URL to use a different source for Python installations. - The provided URL will replace `https://github.com/indygreg/python-build-standalone/releases/download` in, e.g., - `https://github.com/indygreg/python-build-standalone/releases/download/20240713/cpython-3.12.4%2B20240713-aarch64-apple-darwin-install_only.tar.gz`. - Distributions can be read from a local directory by using the `file://` URL scheme. -- [`UV_PYPY_INSTALL_MIRROR`](#UV_PYPY_INSTALL_MIRROR): Managed PyPy installations are downloaded from - [python.org](https://downloads.python.org/). This variable can be set to a mirror URL to use a - different source for PyPy installations. The provided URL will replace - `https://downloads.python.org/pypy` in, e.g., - `https://downloads.python.org/pypy/pypy3.8-v7.3.7-osx64.tar.bz2`. - Distributions can be read from a local directory by using the `file://` URL scheme. -- [`UV_NO_WRAP`](#UV_NO_WRAP): Use to disable line wrapping for diagnostics. -- [`UV_STACK_SIZE`](#UV_STACK_SIZE): Use to control the stack size used by uv. Typically more relevant for Windows in debug mode. -- [`UV_INDEX_{name}_USERNAME`](#UV_INDEX_{name}_USERNAME): Generates the environment variable key for the HTTP Basic authentication username. -- [`UV_INDEX_{name}_PASSWORD`](#UV_INDEX_{name}_PASSWORD): Generates the environment variable key for the HTTP Basic authentication password. -- [`XDG_CONFIG_DIRS`](#XDG_CONFIG_DIRS): Path to system-level configuration directory on Unix systems. -- [`SYSTEMDRIVE`](#SYSTEMDRIVE): Path to system-level configuration directory on Windows systems. -- [`XDG_CONFIG_HOME`](#XDG_CONFIG_HOME): Path to user-level configuration directory on Unix systems. -- [`XDG_CACHE_HOME`](#XDG_CACHE_HOME): Path to cache directory on Unix systems. -- [`XDG_DATA_HOME`](#XDG_DATA_HOME): Path to directory for storing managed Python installations and tools. -- [`XDG_BIN_HOME`](#XDG_BIN_HOME): Path to directory where executables are installed. -- [`SSL_CERT_FILE`](#SSL_CERT_FILE): Custom certificate bundle file path for SSL connections. -- [`SSL_CLIENT_CERT`](#SSL_CLIENT_CERT): If set, uv will use this file for mTLS authentication. - This should be a single file containing both the certificate and the private key in PEM format. -- [`HTTP_PROXY`](#HTTP_PROXY): Proxy for HTTP requests. -- [`HTTPS_PROXY`](#HTTPS_PROXY): Proxy for HTTPS requests. -- [`ALL_PROXY`](#ALL_PROXY): General proxy for all network requests. -- [`UV_HTTP_TIMEOUT`](#UV_HTTP_TIMEOUT): Timeout (in seconds) for HTTP requests. (default: 30 s) -- [`UV_REQUEST_TIMEOUT`](#UV_REQUEST_TIMEOUT): Timeout (in seconds) for HTTP requests. Equivalent to `UV_HTTP_TIMEOUT`. -- [`HTTP_TIMEOUT`](#HTTP_TIMEOUT): Timeout (in seconds) for HTTP requests. Equivalent to `UV_HTTP_TIMEOUT`. -- [`PYC_INVALIDATION_MODE`](#PYC_INVALIDATION_MODE): The validation modes to use when run with `--compile`. - See [`PycInvalidationMode`](https://docs.python.org/3/library/py_compile.html#py_compile.PycInvalidationMode). -- [`VIRTUAL_ENV`](#VIRTUAL_ENV): Used to detect an activated virtual environment. -- [`CONDA_PREFIX`](#CONDA_PREFIX): Used to detect an activated Conda environment. -- [`CONDA_DEFAULT_ENV`](#CONDA_DEFAULT_ENV): Used to determine if an active Conda environment is the base environment or not. -- [`VIRTUAL_ENV_DISABLE_PROMPT`](#VIRTUAL_ENV_DISABLE_PROMPT): If set to `1` before a virtual environment is activated, then the - virtual environment name will not be prepended to the terminal prompt. -- [`PROMPT`](#PROMPT): Used to detect the use of the Windows Command Prompt (as opposed to PowerShell). -- [`NU_VERSION`](#NU_VERSION): Used to detect `NuShell` usage. -- [`FISH_VERSION`](#FISH_VERSION): Used to detect Fish shell usage. -- [`BASH_VERSION`](#BASH_VERSION): Used to detect Bash shell usage. -- [`ZSH_VERSION`](#ZSH_VERSION): Used to detect Zsh shell usage. -- [`ZDOTDIR`](#ZDOTDIR): Used to determine which `.zshenv` to use when Zsh is being used. -- [`KSH_VERSION`](#KSH_VERSION): Used to detect Ksh shell usage. -- [`MACOSX_DEPLOYMENT_TARGET`](#MACOSX_DEPLOYMENT_TARGET): Used with `--python-platform macos` and related variants to set the - deployment target (i.e., the minimum supported macOS version). - Defaults to `12.0`, the least-recent non-EOL macOS version at time of writing. -- [`NO_COLOR`](#NO_COLOR): Disables colored output (takes precedence over `FORCE_COLOR`). - See [no-color.org](https://no-color.org). -- [`FORCE_COLOR`](#FORCE_COLOR): Forces colored output regardless of terminal support. - See [force-color.org](https://force-color.org). -- [`CLICOLOR_FORCE`](#CLICOLOR_FORCE): Use to control color via `anstyle`. -- [`PATH`](#PATH): The standard `PATH` env var. -- [`HOME`](#HOME): The standard `HOME` env var. -- [`SHELL`](#SHELL): The standard `SHELL` posix env var. -- [`PWD`](#PWD): The standard `PWD` posix env var. -- [`LOCALAPPDATA`](#LOCALAPPDATA): Used to look for Microsoft Store Pythons installations. -- [`GITHUB_ACTIONS`](#GITHUB_ACTIONS): Used for trusted publishing via `uv publish`. -- [`ACTIONS_ID_TOKEN_REQUEST_URL`](#ACTIONS_ID_TOKEN_REQUEST_URL): Used for trusted publishing via `uv publish`. Contains the oidc token url. -- [`ACTIONS_ID_TOKEN_REQUEST_TOKEN`](#ACTIONS_ID_TOKEN_REQUEST_TOKEN): Used for trusted publishing via `uv publish`. Contains the oidc request token. -- [`PYTHONPATH`](#PYTHONPATH): Adds directories to Python module search path (e.g., `PYTHONPATH=/path/to/modules`). -- [`NETRC`](#NETRC): Use to set the .netrc file location. -- [`PAGER`](#PAGER): The standard `PAGER` posix env var. Used by `uv` to configure the appropriate pager. -- [`JPY_SESSION_NAME`](#JPY_SESSION_NAME): Used to detect when running inside a Jupyter notebook. -- [`TRACING_DURATIONS_FILE`](#TRACING_DURATIONS_FILE): Use to create the tracing durations file via the `tracing-durations-export` feature. -- [`RUST_LOG`](#RUST_LOG): If set, uv will use this value as the log level for its `--verbose` output. Accepts - any filter compatible with the `tracing_subscriber` crate. - For example: - * `RUST_LOG=uv=debug` is the equivalent of adding `--verbose` to the command line - * `RUST_LOG=trace` will enable trace-level logging. - See the [tracing documentation](https://docs.rs/tracing-subscriber/latest/tracing_subscriber/filter/struct.EnvFilter.html#example-syntax) - for more. -- [`UV_ENV_FILE`](#UV_ENV_FILE): `.env` files from which to load environment variables when executing `uv run` commands. -- [`UV_NO_ENV_FILE`](#UV_NO_ENV_FILE): Ignore `.env` files when executing `uv run` commands. -- [`UV_INSTALLER_GITHUB_BASE_URL`](#UV_INSTALLER_GITHUB_BASE_URL): The URL from which to download uv using the standalone installer and `self update` feature, - in lieu of the default GitHub URL. -- [`UV_INSTALLER_GHE_BASE_URL`](#UV_INSTALLER_GHE_BASE_URL): The URL from which to download uv using the standalone installer and `self update` feature, - in lieu of the default GitHub Enterprise URL. -- [`UV_INSTALL_DIR`](#UV_INSTALL_DIR): The directory in which to install uv using the standalone installer and `self update` feature. - Defaults to `~/.local/bin`. -- [`UV_UNMANAGED_INSTALL`](#UV_UNMANAGED_INSTALL): Used ephemeral environments like CI to install uv to a specific path while preventing - the installer from modifying shell profiles or environment variables. -- [`INSTALLER_NO_MODIFY_PATH`](#INSTALLER_NO_MODIFY_PATH): Avoid modifying the `PATH` environment variable when installing uv using the standalone - installer and `self update` feature. +uv defines and respects the following environment variables: + +### `UV_BREAK_SYSTEM_PACKAGES` + +Equivalent to the `--break-system-packages` command-line argument. If set to `true`, +uv will allow the installation of packages that conflict with system-installed packages. +WARNING: `UV_BREAK_SYSTEM_PACKAGES=true` is intended for use in continuous integration +(CI) or containerized environments and should be used with caution, as modifying the system +Python can lead to unexpected behavior. + +### `UV_BUILD_CONSTRAINT` + +Equivalent to the `--build-constraint` command-line argument. If set, uv will use this file +as constraints for any source distribution builds. Uses space-separated list of files. + +### `UV_CACHE_DIR` + +Equivalent to the `--cache-dir` command-line argument. If set, uv will use this +directory for caching instead of the default cache directory. + +### `UV_COMPILE_BYTECODE` + +Equivalent to the `--compile-bytecode` command-line argument. If set, uv +will compile Python source files to bytecode after installation. + +### `UV_CONCURRENT_BUILDS` + +Sets the maximum number of source distributions that uv will build +concurrently at any given time. + +### `UV_CONCURRENT_DOWNLOADS` + +Sets the maximum number of in-flight concurrent downloads that uv will +perform at any given time. + +### `UV_CONCURRENT_INSTALLS` + +Controls the number of threads used when installing and unzipping +packages. + +### `UV_CONFIG_FILE` + +Equivalent to the `--config-file` command-line argument. Expects a path to a +local `uv.toml` file to use as the configuration file. + +### `UV_CONSTRAINT` + +Equivalent to the `--constraint` command-line argument. If set, uv will use this +file as the constraints file. Uses space-separated list of files. + +### `UV_CUSTOM_COMPILE_COMMAND` + +Equivalent to the `--custom-compile-command` command-line argument. +Used to override uv in the output header of the `requirements.txt` files generated by +`uv pip compile`. Intended for use-cases in which `uv pip compile` is called from within a wrapper +script, to include the name of the wrapper script in the output file. + +### `UV_DEFAULT_INDEX` + +Equivalent to the `--default-index` command-line argument. If set, uv will use +this URL as the default index when searching for packages. + +### `UV_ENV_FILE` + +`.env` files from which to load environment variables when executing `uv run` commands. + +### `UV_EXCLUDE_NEWER` + +Equivalent to the `--exclude-newer` command-line argument. If set, uv will +exclude distributions published after the specified date. + +### `UV_EXTRA_INDEX_URL` + +Equivalent to the `--extra-index-url` command-line argument. If set, uv will +use this space-separated list of URLs as additional indexes when searching for packages. +(Deprecated: use `UV_INDEX` instead.) + +### `UV_FIND_LINKS` + +Equivalent to the `--find-links` command-line argument. If set, uv will use this +comma-separated list of additional locations to search for packages. + +### `UV_FROZEN` + +Equivalent to the `--frozen` command-line argument. If set, uv will run without +updating the `uv.lock` file. + +### `UV_GITHUB_TOKEN` + +Equivalent to the `--token` argument for self update. A GitHub token for authentication. + +### `UV_HTTP_TIMEOUT` + +Timeout (in seconds) for HTTP requests. (default: 30 s) + +### `UV_INDEX` + +Equivalent to the `--index` command-line argument. If set, uv will use this +space-separated list of URLs as additional indexes when searching for packages. + +### `UV_INDEX_STRATEGY` + +Equivalent to the `--index-strategy` command-line argument. For example, if +set to `unsafe-any-match`, uv will consider versions of a given package available across all index +URLs, rather than limiting its search to the first index URL that contains the package. + +### `UV_INDEX_URL` + +Equivalent to the `--index-url` command-line argument. If set, uv will use this +URL as the default index when searching for packages. +(Deprecated: use `UV_DEFAULT_INDEX` instead.) + +### `UV_INDEX_{name}_PASSWORD` + +Generates the environment variable key for the HTTP Basic authentication password. + +### `UV_INDEX_{name}_USERNAME` + +Generates the environment variable key for the HTTP Basic authentication username. + +### `UV_INSECURE_HOST` + +Equivalent to the `--allow-insecure-host` argument. + +### `UV_INSTALLER_GHE_BASE_URL` + +The URL from which to download uv using the standalone installer and `self update` feature, +in lieu of the default GitHub Enterprise URL. + +### `UV_INSTALLER_GITHUB_BASE_URL` + +The URL from which to download uv using the standalone installer and `self update` feature, +in lieu of the default GitHub URL. + +### `UV_INSTALL_DIR` + +The directory in which to install uv using the standalone installer and `self update` feature. +Defaults to `~/.local/bin`. + +### `UV_KEYRING_PROVIDER` + +Equivalent to the `--keyring-provider` command-line argument. If set, uv +will use this value as the keyring provider. + +### `UV_LINK_MODE` + +Equivalent to the `--link-mode` command-line argument. If set, uv will use this as +a link mode. + +### `UV_LOCKED` + +Equivalent to the `--locked` command-line argument. If set, uv will assert that the +`uv.lock` remains unchanged. + +### `UV_NATIVE_TLS` + +Equivalent to the `--native-tls` command-line argument. If set to `true`, uv will +use the system's trust store instead of the bundled `webpki-roots` crate. + +### `UV_NO_BUILD_ISOLATION` + +Equivalent to the `--no-build-isolation` command-line argument. If set, uv will +skip isolation when building source distributions. + +### `UV_NO_CACHE` + +Equivalent to the `--no-cache` command-line argument. If set, uv will not use the +cache for any operations. + +### `UV_NO_CONFIG` + +Equivalent to the `--no-config` command-line argument. If set, uv will not read +any configuration files from the current directory, parent directories, or user configuration +directories. + +### `UV_NO_ENV_FILE` + +Ignore `.env` files when executing `uv run` commands. + +### `UV_NO_PROGRESS` + +Disables all progress output. For example, spinners and progress bars. + +### `UV_NO_SYNC` + +Equivalent to the `--no-sync` command-line argument. If set, uv will skip updating +the environment. + +### `UV_NO_WRAP` + +Use to disable line wrapping for diagnostics. + +### `UV_OVERRIDE` + +Equivalent to the `--override` command-line argument. If set, uv will use this file +as the overrides file. Uses space-separated list of files. + +### `UV_PRERELEASE` + +Equivalent to the `--prerelease` command-line argument. For example, if set to +`allow`, uv will allow pre-release versions for all dependencies. + +### `UV_PREVIEW` + +Equivalent to the `--preview` argument. Enables preview mode. + +### `UV_PROJECT_ENVIRONMENT` + +Specifies the path to the directory to use for a project virtual environment. +See the [project documentation](../concepts/projects.md#configuring-the-project-environment-path) +for more details. + +### `UV_PUBLISH_CHECK_URL` + +Don't upload a file if it already exists on the index. The value is the URL of the index. + +### `UV_PUBLISH_PASSWORD` + +Equivalent to the `--password` command-line argument in `uv publish`. If +set, uv will use this password for publishing. + +### `UV_PUBLISH_TOKEN` + +Equivalent to the `--token` command-line argument in `uv publish`. If set, uv +will use this token (with the username `__token__`) for publishing. + +### `UV_PUBLISH_URL` + +Equivalent to the `--publish-url` command-line argument. The URL of the upload +endpoint of the index to use with `uv publish`. + +### `UV_PUBLISH_USERNAME` + +Equivalent to the `--username` command-line argument in `uv publish`. If +set, uv will use this username for publishing. + +### `UV_PYPY_INSTALL_MIRROR` + +Managed PyPy installations are downloaded from +[python.org](https://downloads.python.org/). This variable can be set to a mirror URL to use a +different source for PyPy installations. The provided URL will replace +`https://downloads.python.org/pypy` in, e.g., +`https://downloads.python.org/pypy/pypy3.8-v7.3.7-osx64.tar.bz2`. +Distributions can be read from a local directory by using the `file://` URL scheme. + +### `UV_PYTHON` + +Equivalent to the `--python` command-line argument. If set to a path, uv will use +this Python interpreter for all operations. + +### `UV_PYTHON_BIN_DIR` + +Specifies the directory to place links to installed, managed Python executables. + +### `UV_PYTHON_DOWNLOADS` + +Equivalent to the +[`python-downloads`](../reference/settings.md#python-downloads) setting and, when disabled, the +`--no-python-downloads` option. Whether uv should allow Python downloads. + +### `UV_PYTHON_INSTALL_DIR` + +Specifies the directory for storing managed Python installations. + +### `UV_PYTHON_INSTALL_MIRROR` + +Managed Python installations are downloaded from +[`python-build-standalone`](https://github.com/indygreg/python-build-standalone). +This variable can be set to a mirror URL to use a different source for Python installations. +The provided URL will replace `https://github.com/indygreg/python-build-standalone/releases/download` in, e.g., +`https://github.com/indygreg/python-build-standalone/releases/download/20240713/cpython-3.12.4%2B20240713-aarch64-apple-darwin-install_only.tar.gz`. +Distributions can be read from a local directory by using the `file://` URL scheme. + +### `UV_PYTHON_PREFERENCE` + +Equivalent to the `--python-preference` command-line argument. Whether uv +should prefer system or managed Python versions. + +### `UV_REQUEST_TIMEOUT` + +Timeout (in seconds) for HTTP requests. Equivalent to `UV_HTTP_TIMEOUT`. + +### `UV_REQUIRE_HASHES` + +Equivalent to the `--require-hashes` command-line argument. If set to `true`, +uv will require that all dependencies have a hash specified in the requirements file. + +### `UV_RESOLUTION` + +Equivalent to the `--resolution` command-line argument. For example, if set to +`lowest-direct`, uv will install the lowest compatible versions of all direct dependencies. + +### `UV_STACK_SIZE` + +Use to control the stack size used by uv. Typically more relevant for Windows in debug mode. + +### `UV_SYSTEM_PYTHON` + +Equivalent to the `--system` command-line argument. If set to `true`, uv will +use the first Python interpreter found in the system `PATH`. +WARNING: `UV_SYSTEM_PYTHON=true` is intended for use in continuous integration (CI) +or containerized environments and should be used with caution, as modifying the system +Python can lead to unexpected behavior. + +### `UV_TOOL_BIN_DIR` + +Specifies the "bin" directory for installing tool executables. + +### `UV_TOOL_DIR` + +Specifies the directory where uv stores managed tools. + +### `UV_UNMANAGED_INSTALL` + +Used ephemeral environments like CI to install uv to a specific path while preventing +the installer from modifying shell profiles or environment variables. + +### `UV_VERIFY_HASHES` + +Equivalent to the `--verify-hashes` argument. Verifies included hashes. + + + +## Externally defined variables + +uv also reads the following externally defined environment variables: + +### `ACTIONS_ID_TOKEN_REQUEST_TOKEN` + +Used for trusted publishing via `uv publish`. Contains the oidc request token. + +### `ACTIONS_ID_TOKEN_REQUEST_URL` + +Used for trusted publishing via `uv publish`. Contains the oidc token url. + +### `ALL_PROXY` + +General proxy for all network requests. + +### `BASH_VERSION` + +Used to detect Bash shell usage. + +### `CLICOLOR_FORCE` + +Use to control color via `anstyle`. + +### `CONDA_DEFAULT_ENV` + +Used to determine if an active Conda environment is the base environment or not. + +### `CONDA_PREFIX` + +Used to detect an activated Conda environment. + +### `FISH_VERSION` + +Used to detect Fish shell usage. + +### `FORCE_COLOR` + +Forces colored output regardless of terminal support. + +See [force-color.org](https://force-color.org). + +### `GITHUB_ACTIONS` + +Used for trusted publishing via `uv publish`. + +### `HOME` + +The standard `HOME` env var. + +### `HTTPS_PROXY` + +Proxy for HTTPS requests. + +### `HTTP_PROXY` + +Proxy for HTTP requests. + +### `HTTP_TIMEOUT` + +Timeout (in seconds) for HTTP requests. Equivalent to `UV_HTTP_TIMEOUT`. + +### `INSTALLER_NO_MODIFY_PATH` + +Avoid modifying the `PATH` environment variable when installing uv using the standalone +installer and `self update` feature. + +### `JPY_SESSION_NAME` + +Used to detect when running inside a Jupyter notebook. + +### `KSH_VERSION` + +Used to detect Ksh shell usage. + +### `LOCALAPPDATA` + +Used to look for Microsoft Store Pythons installations. + +### `MACOSX_DEPLOYMENT_TARGET` + +Used with `--python-platform macos` and related variants to set the +deployment target (i.e., the minimum supported macOS version). + +Defaults to `12.0`, the least-recent non-EOL macOS version at time of writing. + +### `NETRC` + +Use to set the .netrc file location. + +### `NO_COLOR` + +Disables colored output (takes precedence over `FORCE_COLOR`). + +See [no-color.org](https://no-color.org). + +### `NU_VERSION` + +Used to detect `NuShell` usage. + +### `PAGER` + +The standard `PAGER` posix env var. Used by `uv` to configure the appropriate pager. + +### `PATH` + +The standard `PATH` env var. + +### `PROMPT` + +Used to detect the use of the Windows Command Prompt (as opposed to PowerShell). + +### `PWD` + +The standard `PWD` posix env var. + +### `PYC_INVALIDATION_MODE` + +The validation modes to use when run with `--compile`. + +See [`PycInvalidationMode`](https://docs.python.org/3/library/py_compile.html#py_compile.PycInvalidationMode). + +### `PYTHONPATH` + +Adds directories to Python module search path (e.g., `PYTHONPATH=/path/to/modules`). + +### `RUST_LOG` + +If set, uv will use this value as the log level for its `--verbose` output. Accepts +any filter compatible with the `tracing_subscriber` crate. +For example: +* `RUST_LOG=uv=debug` is the equivalent of adding `--verbose` to the command line +* `RUST_LOG=trace` will enable trace-level logging. + +See the [tracing documentation](https://docs.rs/tracing-subscriber/latest/tracing_subscriber/filter/struct.EnvFilter.html#example-syntax) +for more. + +### `SHELL` + +The standard `SHELL` posix env var. + +### `SSL_CERT_FILE` + +Custom certificate bundle file path for SSL connections. + +### `SSL_CLIENT_CERT` + +If set, uv will use this file for mTLS authentication. +This should be a single file containing both the certificate and the private key in PEM format. + +### `SYSTEMDRIVE` + +Path to system-level configuration directory on Windows systems. + +### `TRACING_DURATIONS_FILE` + +Use to create the tracing durations file via the `tracing-durations-export` feature. + +### `VIRTUAL_ENV` + +Used to detect an activated virtual environment. + +### `VIRTUAL_ENV_DISABLE_PROMPT` + +If set to `1` before a virtual environment is activated, then the +virtual environment name will not be prepended to the terminal prompt. + +### `XDG_BIN_HOME` + +Path to directory where executables are installed. + +### `XDG_CACHE_HOME` + +Path to cache directory on Unix systems. + +### `XDG_CONFIG_DIRS` + +Path to system-level configuration directory on Unix systems. + +### `XDG_CONFIG_HOME` + +Path to user-level configuration directory on Unix systems. + +### `XDG_DATA_HOME` + +Path to directory for storing managed Python installations and tools. + +### `ZDOTDIR` + +Used to determine which `.zshenv` to use when Zsh is being used. + +### `ZSH_VERSION` + +Used to detect Zsh shell usage. + From a3c2a3642231ee6f59332666071e3be1fbdf56f6 Mon Sep 17 00:00:00 2001 From: Charlie Marsh Date: Mon, 11 Nov 2024 13:52:22 -0500 Subject: [PATCH 06/23] Add an error case for `--no-binary` sync failures (#9025) --- crates/uv/tests/it/sync.rs | 48 ++++++++++++++++++++++++++++++++++---- 1 file changed, 44 insertions(+), 4 deletions(-) diff --git a/crates/uv/tests/it/sync.rs b/crates/uv/tests/it/sync.rs index d4792ccf2998..86d038722256 100644 --- a/crates/uv/tests/it/sync.rs +++ b/crates/uv/tests/it/sync.rs @@ -2857,6 +2857,8 @@ fn no_binary() -> Result<()> { "#, )?; + context.lock().assert().success(); + uv_snapshot!(context.filters(), context.sync().arg("--no-binary-package").arg("iniconfig"), @r###" success: true exit_code: 0 @@ -2886,7 +2888,7 @@ fn no_binary_error() -> Result<()> { name = "project" version = "0.1.0" requires-python = ">=3.12" - dependencies = ["django_allauth==0.51.0"] + dependencies = ["odrive"] [build-system] requires = ["setuptools>=42"] @@ -2896,14 +2898,14 @@ fn no_binary_error() -> Result<()> { context.lock().assert().success(); - uv_snapshot!(context.filters(), context.sync().arg("--no-build-package").arg("django-allauth"), @r###" + uv_snapshot!(context.filters(), context.sync().arg("--no-binary-package").arg("odrive"), @r###" success: false exit_code: 2 ----- stdout ----- ----- stderr ----- - Resolved 19 packages in [TIME] - error: Distribution `django-allauth==0.51.0 @ registry+https://pypi.org/simple` can't be installed because it is marked as `--no-build` but has no binary distribution + Resolved 31 packages in [TIME] + error: Distribution `odrive==0.6.8 @ registry+https://pypi.org/simple` can't be installed because it is marked as `--no-binary` but has no source distribution "###); assert!(context.temp_dir.child("uv.lock").exists()); @@ -2930,6 +2932,8 @@ fn no_build() -> Result<()> { "#, )?; + context.lock().assert().success(); + uv_snapshot!(context.filters(), context.sync().arg("--no-build-package").arg("iniconfig"), @r###" success: true exit_code: 0 @@ -2948,6 +2952,42 @@ fn no_build() -> Result<()> { Ok(()) } +#[test] +fn no_build_error() -> Result<()> { + let context = TestContext::new("3.12"); + + let pyproject_toml = context.temp_dir.child("pyproject.toml"); + pyproject_toml.write_str( + r#" + [project] + name = "project" + version = "0.1.0" + requires-python = ">=3.12" + dependencies = ["django_allauth==0.51.0"] + + [build-system] + requires = ["setuptools>=42"] + build-backend = "setuptools.build_meta" + "#, + )?; + + context.lock().assert().success(); + + uv_snapshot!(context.filters(), context.sync().arg("--no-build-package").arg("django-allauth"), @r###" + success: false + exit_code: 2 + ----- stdout ----- + + ----- stderr ----- + Resolved 19 packages in [TIME] + error: Distribution `django-allauth==0.51.0 @ registry+https://pypi.org/simple` can't be installed because it is marked as `--no-build` but has no binary distribution + "###); + + assert!(context.temp_dir.child("uv.lock").exists()); + + Ok(()) +} + #[test] fn sync_wheel_url_source_error() -> Result<()> { let context = TestContext::new("3.12"); From dd4b27bfbdacff33ebfcf9bccb971413240533b3 Mon Sep 17 00:00:00 2001 From: Charlie Marsh Date: Mon, 11 Nov 2024 14:02:37 -0500 Subject: [PATCH 07/23] Allow `--no-build` to validate lock (#9024) ## Summary Just as we don't enforce tag compliance, we shouldn't enforce `--no-build` when validating the lockfile. If we end up building from source, the distribution database will correctly error. Closes https://github.com/astral-sh/uv/issues/9016. --- crates/uv-resolver/src/lock/mod.rs | 8 ++++++-- crates/uv/src/commands/project/lock.rs | 6 +----- crates/uv/tests/it/sync.rs | 4 +--- 3 files changed, 8 insertions(+), 10 deletions(-) diff --git a/crates/uv-resolver/src/lock/mod.rs b/crates/uv-resolver/src/lock/mod.rs index 540a597a62be..577a047c7f56 100644 --- a/crates/uv-resolver/src/lock/mod.rs +++ b/crates/uv-resolver/src/lock/mod.rs @@ -974,7 +974,6 @@ impl Lock { overrides: &[Requirement], dependency_metadata: &DependencyMetadata, indexes: Option<&IndexLocations>, - build_options: &BuildOptions, tags: &Tags, hasher: &HashStrategy, index: &InMemoryIndex, @@ -1183,8 +1182,13 @@ impl Lock { // Get the metadata for the distribution. let dist = package.to_dist( workspace.install_path(), + // When validating, it's okay to use wheels that don't match the current platform. TagPolicy::Preferred(tags), - build_options, + // When validating, it's okay to use (e.g.) a source distribution with `--no-build`. + // We're just trying to determine whether the lockfile is up-to-date. If we end + // up needing to build a source distribution in order to do so, below, we'll error + // there. + &BuildOptions::default(), )?; // Fetch the metadata for the distribution. diff --git a/crates/uv/src/commands/project/lock.rs b/crates/uv/src/commands/project/lock.rs index f7ad9b9900a7..9e141d3df05e 100644 --- a/crates/uv/src/commands/project/lock.rs +++ b/crates/uv/src/commands/project/lock.rs @@ -11,8 +11,7 @@ use tracing::debug; use uv_cache::Cache; use uv_client::{Connectivity, FlatIndexClient, RegistryClientBuilder}; use uv_configuration::{ - BuildOptions, Concurrency, Constraints, ExtrasSpecification, LowerBound, Reinstall, - TrustedHost, Upgrade, + Concurrency, Constraints, ExtrasSpecification, LowerBound, Reinstall, TrustedHost, Upgrade, }; use uv_dispatch::BuildDispatch; use uv_distribution::DistributionDatabase; @@ -531,7 +530,6 @@ async fn do_lock( interpreter, &requires_python, index_locations, - build_options, upgrade, &options, &hasher, @@ -714,7 +712,6 @@ impl ValidatedLock { interpreter: &Interpreter, requires_python: &RequiresPython, index_locations: &IndexLocations, - build_options: &BuildOptions, upgrade: &Upgrade, options: &Options, hasher: &HashStrategy, @@ -841,7 +838,6 @@ impl ValidatedLock { overrides, dependency_metadata, indexes, - build_options, interpreter.tags()?, hasher, index, diff --git a/crates/uv/tests/it/sync.rs b/crates/uv/tests/it/sync.rs index 86d038722256..f95f9d4d1455 100644 --- a/crates/uv/tests/it/sync.rs +++ b/crates/uv/tests/it/sync.rs @@ -1997,20 +1997,18 @@ fn no_install_project_no_build() -> Result<()> { ----- stdout ----- ----- stderr ----- - warning: Failed to validate existing lockfile: Distribution `project==0.1.0 @ editable+.` can't be installed because it is marked as `--no-build` but has no binary distribution Resolved 4 packages in [TIME] error: Distribution `project==0.1.0 @ editable+.` can't be installed because it is marked as `--no-build` but has no binary distribution "###); // But it's fine to combine `--no-install-project` with `--no-build`. We shouldn't error, since // we aren't building the project. - uv_snapshot!(context.filters(), context.sync().arg("--no-install-project").arg("--no-build"), @r###" + uv_snapshot!(context.filters(), context.sync().arg("--no-install-project").arg("--no-build").arg("--locked"), @r###" success: true exit_code: 0 ----- stdout ----- ----- stderr ----- - warning: Failed to validate existing lockfile: Distribution `project==0.1.0 @ editable+.` can't be installed because it is marked as `--no-build` but has no binary distribution Resolved 4 packages in [TIME] Prepared 3 packages in [TIME] Installed 3 packages in [TIME] From 59f3ec2b6c92e5f1503b813859a70317645a6c6c Mon Sep 17 00:00:00 2001 From: Charlie Marsh Date: Mon, 11 Nov 2024 15:30:00 -0500 Subject: [PATCH 08/23] Remove duplicate `Lock::to_resolution` (#9029) ## Summary This got moved to `InstallTarget`! Must've been an oversight not to delete. I verified that no code was changed here since the date that we moved it to `InstallTarget`. --- crates/uv-resolver/src/lock/mod.rs | 139 +---------------------------- 1 file changed, 2 insertions(+), 137 deletions(-) diff --git a/crates/uv-resolver/src/lock/mod.rs b/crates/uv-resolver/src/lock/mod.rs index 577a047c7f56..205ad043c904 100644 --- a/crates/uv-resolver/src/lock/mod.rs +++ b/crates/uv-resolver/src/lock/mod.rs @@ -1,4 +1,3 @@ -use either::Either; use itertools::Itertools; use petgraph::graph::NodeIndex; use petgraph::visit::EdgeRef; @@ -25,14 +24,14 @@ use crate::{ ResolutionMode, }; use uv_cache_key::RepositoryUrl; -use uv_configuration::{BuildOptions, DevGroupsManifest, ExtrasSpecification, InstallOptions}; +use uv_configuration::BuildOptions; use uv_distribution::DistributionDatabase; use uv_distribution_filename::{DistExtension, ExtensionError, SourceDistExtension, WheelFilename}; use uv_distribution_types::{ BuiltDist, DependencyMetadata, DirectUrlBuiltDist, DirectUrlSourceDist, DirectorySourceDist, Dist, DistributionMetadata, FileLocation, GitSourceDist, IndexLocations, IndexUrl, Name, PathBuiltDist, PathSourceDist, RegistryBuiltDist, RegistryBuiltWheel, RegistrySourceDist, - RemoteSource, Resolution, ResolvedDist, StaticMetadata, ToUrlError, UrlString, + RemoteSource, ResolvedDist, StaticMetadata, ToUrlError, UrlString, }; use uv_fs::{relative_to, PortablePath, PortablePathBuf}; use uv_git::{GitReference, GitSha, RepositoryReference, ResolvedRepositoryReference}; @@ -42,7 +41,6 @@ use uv_pep508::{split_scheme, MarkerEnvironment, MarkerTree, VerbatimUrl, Verbat use uv_platform_tags::{TagCompatibility, TagPriority, Tags}; use uv_pypi_types::{ redact_credentials, HashDigest, ParsedArchiveUrl, ParsedGitUrl, Requirement, RequirementSource, - ResolverMarkerEnvironment, }; use uv_types::{BuildContext, HashStrategy}; use uv_workspace::dependency_groups::DependencyGroupError; @@ -587,139 +585,6 @@ impl Lock { self.fork_markers.as_slice() } - /// Convert the [`Lock`] to a [`Resolution`] using the given marker environment, tags, and root. - pub fn to_resolution( - &self, - target: InstallTarget<'_>, - marker_env: &ResolverMarkerEnvironment, - tags: &Tags, - extras: &ExtrasSpecification, - dev: &DevGroupsManifest, - build_options: &BuildOptions, - install_options: &InstallOptions, - ) -> Result { - let mut queue: VecDeque<(&Package, Option<&ExtraName>)> = VecDeque::new(); - let mut seen = FxHashSet::default(); - - // Add the workspace packages to the queue. - for root_name in target.packages() { - let root = self - .find_by_name(root_name) - .map_err(|_| LockErrorKind::MultipleRootPackages { - name: root_name.clone(), - })? - .ok_or_else(|| LockErrorKind::MissingRootPackage { - name: root_name.clone(), - })?; - - if dev.prod() { - // Add the base package. - queue.push_back((root, None)); - - // Add any extras. - match extras { - ExtrasSpecification::None => {} - ExtrasSpecification::All => { - for extra in root.optional_dependencies.keys() { - queue.push_back((root, Some(extra))); - } - } - ExtrasSpecification::Some(extras) => { - for extra in extras { - queue.push_back((root, Some(extra))); - } - } - } - } - - // Add any dev dependencies. - for group in dev.iter() { - for dep in root.dependency_groups.get(group).into_iter().flatten() { - if dep.complexified_marker.evaluate(marker_env, &[]) { - let dep_dist = self.find_by_id(&dep.package_id); - if seen.insert((&dep.package_id, None)) { - queue.push_back((dep_dist, None)); - } - for extra in &dep.extra { - if seen.insert((&dep.package_id, Some(extra))) { - queue.push_back((dep_dist, Some(extra))); - } - } - } - } - } - } - - // Add any dependency groups that are exclusive to the workspace root (e.g., dev - // dependencies in (legacy) non-project workspace roots). - let groups = target - .groups() - .map_err(|err| LockErrorKind::DependencyGroup { err })?; - for group in dev.iter() { - for dependency in groups.get(group).into_iter().flatten() { - if dependency.marker.evaluate(marker_env, &[]) { - let root_name = &dependency.name; - let root = self - .find_by_markers(root_name, marker_env) - .map_err(|_| LockErrorKind::MultipleRootPackages { - name: root_name.clone(), - })? - .ok_or_else(|| LockErrorKind::MissingRootPackage { - name: root_name.clone(), - })?; - - // Add the base package. - queue.push_back((root, None)); - - // Add any extras. - for extra in &dependency.extras { - queue.push_back((root, Some(extra))); - } - } - } - } - - let mut map = BTreeMap::default(); - let mut hashes = BTreeMap::default(); - while let Some((dist, extra)) = queue.pop_front() { - let deps = if let Some(extra) = extra { - Either::Left(dist.optional_dependencies.get(extra).into_iter().flatten()) - } else { - Either::Right(dist.dependencies.iter()) - }; - for dep in deps { - if dep.complexified_marker.evaluate(marker_env, &[]) { - let dep_dist = self.find_by_id(&dep.package_id); - if seen.insert((&dep.package_id, None)) { - queue.push_back((dep_dist, None)); - } - for extra in &dep.extra { - if seen.insert((&dep.package_id, Some(extra))) { - queue.push_back((dep_dist, Some(extra))); - } - } - } - } - if install_options.include_package( - &dist.id.name, - target.project_name(), - &self.manifest.members, - ) { - map.insert( - dist.id.name.clone(), - ResolvedDist::Installable(dist.to_dist( - target.workspace().install_path(), - TagPolicy::Required(tags), - build_options, - )?), - ); - hashes.insert(dist.id.name.clone(), dist.hashes()); - } - } - let diagnostics = vec![]; - Ok(Resolution::new(map, hashes, diagnostics)) - } - /// Returns the TOML representation of this lockfile. pub fn to_toml(&self) -> anyhow::Result { // We construct a TOML document manually instead of going through Serde to enable From e059c25bd6a0cd086e4770ecf700e38157af1bc7 Mon Sep 17 00:00:00 2001 From: Charlie Marsh Date: Mon, 11 Nov 2024 21:19:38 -0500 Subject: [PATCH 09/23] Consistently box distributions in fetch and build errors (#9038) ## Summary This is useful for pattern-matching. --- Cargo.lock | 1 - crates/uv-installer/Cargo.toml | 1 - crates/uv-installer/src/preparer.rs | 34 +++++++++---------------- crates/uv-requirements/src/extras.rs | 8 +++--- crates/uv-requirements/src/lib.rs | 6 ++--- crates/uv-requirements/src/lookahead.rs | 22 ++++++++-------- 6 files changed, 30 insertions(+), 42 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index bebfd16a77bc..fdd98f432bfe 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -5016,7 +5016,6 @@ dependencies = [ "uv-configuration", "uv-distribution", "uv-distribution-types", - "uv-extract", "uv-fs", "uv-install-wheel", "uv-normalize", diff --git a/crates/uv-installer/Cargo.toml b/crates/uv-installer/Cargo.toml index 12a1114b7f41..e00ab8daef6f 100644 --- a/crates/uv-installer/Cargo.toml +++ b/crates/uv-installer/Cargo.toml @@ -22,7 +22,6 @@ uv-cache-key = { workspace = true } uv-configuration = { workspace = true } uv-distribution = { workspace = true } uv-distribution-types = { workspace = true } -uv-extract = { workspace = true } uv-fs = { workspace = true } uv-install-wheel = { workspace = true, default-features = false } uv-normalize = { workspace = true } diff --git a/crates/uv-installer/src/preparer.rs b/crates/uv-installer/src/preparer.rs index de9cd4b48cae..146776e79dfa 100644 --- a/crates/uv-installer/src/preparer.rs +++ b/crates/uv-installer/src/preparer.rs @@ -2,10 +2,8 @@ use std::cmp::Reverse; use std::sync::Arc; use futures::{stream::FuturesUnordered, FutureExt, Stream, TryFutureExt, TryStreamExt}; -use tokio::task::JoinError; use tracing::{debug, instrument}; use url::Url; -use uv_pep508::PackageName; use uv_cache::Cache; use uv_configuration::BuildOptions; @@ -14,6 +12,7 @@ use uv_distribution_types::{ BuildableSource, BuiltDist, CachedDist, Dist, Hashed, Identifier, Name, RemoteSource, SourceDist, }; +use uv_pep508::PackageName; use uv_platform_tags::Tags; use uv_types::{BuildContext, HashStrategy, InFlight}; @@ -23,21 +22,12 @@ pub enum Error { NoBuild(PackageName), #[error("Using pre-built wheels is disabled, but attempted to use `{0}`")] NoBinary(PackageName), - #[error("Failed to unzip wheel: {0}")] - Unzip(Dist, #[source] Box), #[error("Failed to download `{0}`")] - Fetch(BuiltDist, #[source] Box), + Fetch(Box, #[source] Box), #[error("Failed to download and build `{0}`")] - FetchAndBuild(SourceDist, #[source] Box), + FetchAndBuild(Box, #[source] Box), #[error("Failed to build `{0}`")] - Build(SourceDist, #[source] Box), - /// Should not occur; only seen when another task panicked. - #[error("The task executor is broken, did some other task panic?")] - Join(#[from] JoinError), - #[error(transparent)] - Editable(#[from] Box), - #[error("Failed to write to the client cache")] - CacheWrite(#[source] std::io::Error), + Build(Box, #[source] Box), #[error("Unzip failed in another thread: {0}")] Thread(String), } @@ -155,13 +145,13 @@ impl<'a, Context: BuildContext> Preparer<'a, Context> { .database .get_or_build_wheel(&dist, self.tags, policy) .boxed_local() - .map_err(|err| match &dist { - Dist::Built(dist) => Error::Fetch(dist.clone(), Box::new(err)), + .map_err(|err| match dist.clone() { + Dist::Built(dist) => Error::Fetch(Box::new(dist), Box::new(err)), Dist::Source(dist) => { if dist.is_local() { - Error::Build(dist.clone(), Box::new(err)) + Error::Build(Box::new(dist), Box::new(err)) } else { - Error::FetchAndBuild(dist.clone(), Box::new(err)) + Error::FetchAndBuild(Box::new(dist), Box::new(err)) } } }) @@ -175,13 +165,13 @@ impl<'a, Context: BuildContext> Preparer<'a, Context> { policy.digests(), wheel.hashes(), ); - Err(match &dist { - Dist::Built(dist) => Error::Fetch(dist.clone(), Box::new(err)), + Err(match dist { + Dist::Built(dist) => Error::Fetch(Box::new(dist), Box::new(err)), Dist::Source(dist) => { if dist.is_local() { - Error::Build(dist.clone(), Box::new(err)) + Error::Build(Box::new(dist), Box::new(err)) } else { - Error::FetchAndBuild(dist.clone(), Box::new(err)) + Error::FetchAndBuild(Box::new(dist), Box::new(err)) } } }) diff --git a/crates/uv-requirements/src/extras.rs b/crates/uv-requirements/src/extras.rs index 50a7e3f93337..bca7fe87da68 100644 --- a/crates/uv-requirements/src/extras.rs +++ b/crates/uv-requirements/src/extras.rs @@ -100,13 +100,13 @@ impl<'a, Context: BuildContext> ExtrasResolver<'a, Context> { let archive = database .get_or_build_wheel_metadata(&dist, hasher.get(&dist)) .await - .map_err(|err| match &dist { - Dist::Built(built) => Error::Download(built.clone(), err), + .map_err(|err| match dist { + Dist::Built(built) => Error::Download(Box::new(built), err), Dist::Source(source) => { if source.is_local() { - Error::Build(source.clone(), err) + Error::Build(Box::new(source), err) } else { - Error::DownloadAndBuild(source.clone(), err) + Error::DownloadAndBuild(Box::new(source), err) } } })?; diff --git a/crates/uv-requirements/src/lib.rs b/crates/uv-requirements/src/lib.rs index a72656332c66..04bac27ec6d5 100644 --- a/crates/uv-requirements/src/lib.rs +++ b/crates/uv-requirements/src/lib.rs @@ -20,13 +20,13 @@ pub mod upgrade; #[derive(Debug, thiserror::Error)] pub enum Error { #[error("Failed to download `{0}`")] - Download(BuiltDist, #[source] uv_distribution::Error), + Download(Box, #[source] uv_distribution::Error), #[error("Failed to download and build `{0}`")] - DownloadAndBuild(SourceDist, #[source] uv_distribution::Error), + DownloadAndBuild(Box, #[source] uv_distribution::Error), #[error("Failed to build `{0}`")] - Build(SourceDist, #[source] uv_distribution::Error), + Build(Box, #[source] uv_distribution::Error), #[error(transparent)] Distribution(#[from] uv_distribution::Error), diff --git a/crates/uv-requirements/src/lookahead.rs b/crates/uv-requirements/src/lookahead.rs index 1f6c7cc64fe0..0192c6313845 100644 --- a/crates/uv-requirements/src/lookahead.rs +++ b/crates/uv-requirements/src/lookahead.rs @@ -143,6 +143,13 @@ impl<'a, Context: BuildContext> LookaheadResolver<'a, Context> { return Ok(None); }; + // Consider the dependencies to be "direct" if the requirement is a local source tree. + let direct = if let Dist::Source(source_dist) = &dist { + source_dist.as_path().is_some_and(std::path::Path::is_dir) + } else { + false + }; + // Fetch the metadata for the distribution. let metadata = { let id = dist.version_id(); @@ -167,13 +174,13 @@ impl<'a, Context: BuildContext> LookaheadResolver<'a, Context> { .database .get_or_build_wheel_metadata(&dist, self.hasher.get(&dist)) .await - .map_err(|err| match &dist { - Dist::Built(built) => Error::Download(built.clone(), err), + .map_err(|err| match dist { + Dist::Built(built) => Error::Download(Box::new(built), err), Dist::Source(source) => { if source.is_local() { - Error::Build(source.clone(), err) + Error::Build(Box::new(source), err) } else { - Error::DownloadAndBuild(source.clone(), err) + Error::DownloadAndBuild(Box::new(source), err) } } })?; @@ -218,13 +225,6 @@ impl<'a, Context: BuildContext> LookaheadResolver<'a, Context> { }) .collect(); - // Consider the dependencies to be "direct" if the requirement is a local source tree. - let direct = if let Dist::Source(source_dist) = &dist { - source_dist.as_path().is_some_and(std::path::Path::is_dir) - } else { - false - }; - // Return the requirements from the metadata. Ok(Some(RequestedRequirements::new( requirement.extras, From b3c660c58af9a74759a4acf2c92076eb32f4c912 Mon Sep 17 00:00:00 2001 From: Charlie Marsh Date: Mon, 11 Nov 2024 21:30:20 -0500 Subject: [PATCH 10/23] Rename `Fetch` to `Download` in build errors (#9039) ## Summary We're inconsistent with these -- sometimes it's `Error::Fetch` and sometimes it's `Error::Download`. The message says download, so let's just use that? --- crates/uv-installer/src/preparer.rs | 12 ++++++------ crates/uv-resolver/src/error.rs | 4 ++-- crates/uv-resolver/src/resolver/mod.rs | 13 +++++++++---- crates/uv/src/commands/diagnostics.rs | 2 +- crates/uv/src/commands/pip/compile.rs | 2 +- crates/uv/src/commands/pip/install.rs | 2 +- crates/uv/src/commands/pip/sync.rs | 2 +- crates/uv/src/commands/project/add.rs | 2 +- crates/uv/src/commands/project/export.rs | 2 +- crates/uv/src/commands/project/lock.rs | 2 +- crates/uv/src/commands/project/run.rs | 6 +++--- crates/uv/src/commands/project/sync.rs | 2 +- 12 files changed, 28 insertions(+), 23 deletions(-) diff --git a/crates/uv-installer/src/preparer.rs b/crates/uv-installer/src/preparer.rs index 146776e79dfa..f75f085def5e 100644 --- a/crates/uv-installer/src/preparer.rs +++ b/crates/uv-installer/src/preparer.rs @@ -23,9 +23,9 @@ pub enum Error { #[error("Using pre-built wheels is disabled, but attempted to use `{0}`")] NoBinary(PackageName), #[error("Failed to download `{0}`")] - Fetch(Box, #[source] Box), + Download(Box, #[source] Box), #[error("Failed to download and build `{0}`")] - FetchAndBuild(Box, #[source] Box), + DownloadAndBuild(Box, #[source] Box), #[error("Failed to build `{0}`")] Build(Box, #[source] Box), #[error("Unzip failed in another thread: {0}")] @@ -146,12 +146,12 @@ impl<'a, Context: BuildContext> Preparer<'a, Context> { .get_or_build_wheel(&dist, self.tags, policy) .boxed_local() .map_err(|err| match dist.clone() { - Dist::Built(dist) => Error::Fetch(Box::new(dist), Box::new(err)), + Dist::Built(dist) => Error::Download(Box::new(dist), Box::new(err)), Dist::Source(dist) => { if dist.is_local() { Error::Build(Box::new(dist), Box::new(err)) } else { - Error::FetchAndBuild(Box::new(dist), Box::new(err)) + Error::DownloadAndBuild(Box::new(dist), Box::new(err)) } } }) @@ -166,12 +166,12 @@ impl<'a, Context: BuildContext> Preparer<'a, Context> { wheel.hashes(), ); Err(match dist { - Dist::Built(dist) => Error::Fetch(Box::new(dist), Box::new(err)), + Dist::Built(dist) => Error::Download(Box::new(dist), Box::new(err)), Dist::Source(dist) => { if dist.is_local() { Error::Build(Box::new(dist), Box::new(err)) } else { - Error::FetchAndBuild(Box::new(dist), Box::new(err)) + Error::DownloadAndBuild(Box::new(dist), Box::new(err)) } } }) diff --git a/crates/uv-resolver/src/error.rs b/crates/uv-resolver/src/error.rs index 6c60f69a693f..a9db58e7fa03 100644 --- a/crates/uv-resolver/src/error.rs +++ b/crates/uv-resolver/src/error.rs @@ -87,10 +87,10 @@ pub enum ResolveError { ParsedUrl(#[from] uv_pypi_types::ParsedUrlError), #[error("Failed to download `{0}`")] - Fetch(Box, #[source] uv_distribution::Error), + Download(Box, #[source] uv_distribution::Error), #[error("Failed to download and build `{0}`")] - FetchAndBuild(Box, #[source] uv_distribution::Error), + DownloadAndBuild(Box, #[source] uv_distribution::Error), #[error("Failed to read `{0}`")] Read(Box, #[source] uv_distribution::Error), diff --git a/crates/uv-resolver/src/resolver/mod.rs b/crates/uv-resolver/src/resolver/mod.rs index e1e0c6c6d073..7f72e6fd85db 100644 --- a/crates/uv-resolver/src/resolver/mod.rs +++ b/crates/uv-resolver/src/resolver/mod.rs @@ -1783,12 +1783,14 @@ impl ResolverState { ResolveError::Build(Box::new(source_dist), err) } - Dist::Built(built_dist) => ResolveError::Fetch(Box::new(built_dist), err), + Dist::Built(built_dist) => { + ResolveError::Download(Box::new(built_dist), err) + } Dist::Source(source_dist) => { if source_dist.is_local() { ResolveError::Build(Box::new(source_dist), err) } else { - ResolveError::FetchAndBuild(Box::new(source_dist), err) + ResolveError::DownloadAndBuild(Box::new(source_dist), err) } } })?; @@ -1919,13 +1921,16 @@ impl ResolverState { - ResolveError::Fetch(Box::new(built_dist), err) + ResolveError::Download(Box::new(built_dist), err) } Dist::Source(source_dist) => { if source_dist.is_local() { ResolveError::Build(Box::new(source_dist), err) } else { - ResolveError::FetchAndBuild(Box::new(source_dist), err) + ResolveError::DownloadAndBuild( + Box::new(source_dist), + err, + ) } } })?; diff --git a/crates/uv/src/commands/diagnostics.rs b/crates/uv/src/commands/diagnostics.rs index ce317a8d34c0..befe12ea0bd8 100644 --- a/crates/uv/src/commands/diagnostics.rs +++ b/crates/uv/src/commands/diagnostics.rs @@ -20,7 +20,7 @@ static SUGGESTIONS: LazyLock> = LazyLock::ne .collect() }); -/// Render a [`uv_resolver::ResolveError::FetchAndBuild`] with a help message. +/// Render a [`uv_resolver::ResolveError::DownloadAndBuild`] with a help message. pub(crate) fn fetch_and_build(sdist: Box, cause: uv_distribution::Error) { #[derive(Debug, miette::Diagnostic, thiserror::Error)] #[error("Failed to download and build `{sdist}`")] diff --git a/crates/uv/src/commands/pip/compile.rs b/crates/uv/src/commands/pip/compile.rs index 59ba8a26b97c..8906273d4e8b 100644 --- a/crates/uv/src/commands/pip/compile.rs +++ b/crates/uv/src/commands/pip/compile.rs @@ -410,7 +410,7 @@ pub(crate) async fn pip_compile( diagnostics::no_solution(&err); return Ok(ExitStatus::Failure); } - Err(operations::Error::Resolve(uv_resolver::ResolveError::FetchAndBuild(dist, err))) => { + Err(operations::Error::Resolve(uv_resolver::ResolveError::DownloadAndBuild(dist, err))) => { diagnostics::fetch_and_build(dist, err); return Ok(ExitStatus::Failure); } diff --git a/crates/uv/src/commands/pip/install.rs b/crates/uv/src/commands/pip/install.rs index 13610f7ef98d..1934038c981b 100644 --- a/crates/uv/src/commands/pip/install.rs +++ b/crates/uv/src/commands/pip/install.rs @@ -416,7 +416,7 @@ pub(crate) async fn pip_install( diagnostics::no_solution(&err); return Ok(ExitStatus::Failure); } - Err(operations::Error::Resolve(uv_resolver::ResolveError::FetchAndBuild(dist, err))) => { + Err(operations::Error::Resolve(uv_resolver::ResolveError::DownloadAndBuild(dist, err))) => { diagnostics::fetch_and_build(dist, err); return Ok(ExitStatus::Failure); } diff --git a/crates/uv/src/commands/pip/sync.rs b/crates/uv/src/commands/pip/sync.rs index 26b8175658d0..9bdd7b1d0a54 100644 --- a/crates/uv/src/commands/pip/sync.rs +++ b/crates/uv/src/commands/pip/sync.rs @@ -360,7 +360,7 @@ pub(crate) async fn pip_sync( diagnostics::no_solution(&err); return Ok(ExitStatus::Failure); } - Err(operations::Error::Resolve(uv_resolver::ResolveError::FetchAndBuild(dist, err))) => { + Err(operations::Error::Resolve(uv_resolver::ResolveError::DownloadAndBuild(dist, err))) => { diagnostics::fetch_and_build(dist, err); return Ok(ExitStatus::Failure); } diff --git a/crates/uv/src/commands/project/add.rs b/crates/uv/src/commands/project/add.rs index 4f0d501df74c..d8412214a293 100644 --- a/crates/uv/src/commands/project/add.rs +++ b/crates/uv/src/commands/project/add.rs @@ -680,7 +680,7 @@ pub(crate) async fn add( Ok(ExitStatus::Failure) } ProjectError::Operation(pip::operations::Error::Resolve( - uv_resolver::ResolveError::FetchAndBuild(dist, err), + uv_resolver::ResolveError::DownloadAndBuild(dist, err), )) => { diagnostics::fetch_and_build(dist, err); Ok(ExitStatus::Failure) diff --git a/crates/uv/src/commands/project/export.rs b/crates/uv/src/commands/project/export.rs index b5aec86affdd..54b843a11a1f 100644 --- a/crates/uv/src/commands/project/export.rs +++ b/crates/uv/src/commands/project/export.rs @@ -149,7 +149,7 @@ pub(crate) async fn export( return Ok(ExitStatus::Failure); } Err(ProjectError::Operation(pip::operations::Error::Resolve( - uv_resolver::ResolveError::FetchAndBuild(dist, err), + uv_resolver::ResolveError::DownloadAndBuild(dist, err), ))) => { diagnostics::fetch_and_build(dist, err); return Ok(ExitStatus::Failure); diff --git a/crates/uv/src/commands/project/lock.rs b/crates/uv/src/commands/project/lock.rs index 9e141d3df05e..4edf2058b63a 100644 --- a/crates/uv/src/commands/project/lock.rs +++ b/crates/uv/src/commands/project/lock.rs @@ -170,7 +170,7 @@ pub(crate) async fn lock( Ok(ExitStatus::Failure) } Err(ProjectError::Operation(pip::operations::Error::Resolve( - uv_resolver::ResolveError::FetchAndBuild(dist, err), + uv_resolver::ResolveError::DownloadAndBuild(dist, err), ))) => { diagnostics::fetch_and_build(dist, err); Ok(ExitStatus::Failure) diff --git a/crates/uv/src/commands/project/run.rs b/crates/uv/src/commands/project/run.rs index fc8651a91507..76c759cb9c55 100644 --- a/crates/uv/src/commands/project/run.rs +++ b/crates/uv/src/commands/project/run.rs @@ -306,7 +306,7 @@ pub(crate) async fn run( return Ok(ExitStatus::Failure); } Err(ProjectError::Operation(operations::Error::Resolve( - uv_resolver::ResolveError::FetchAndBuild(dist, err), + uv_resolver::ResolveError::DownloadAndBuild(dist, err), ))) => { diagnostics::fetch_and_build(dist, err); return Ok(ExitStatus::Failure); @@ -635,7 +635,7 @@ pub(crate) async fn run( return Ok(ExitStatus::Failure); } Err(ProjectError::Operation(operations::Error::Resolve( - uv_resolver::ResolveError::FetchAndBuild(dist, err), + uv_resolver::ResolveError::DownloadAndBuild(dist, err), ))) => { diagnostics::fetch_and_build(dist, err); return Ok(ExitStatus::Failure); @@ -838,7 +838,7 @@ pub(crate) async fn run( return Ok(ExitStatus::Failure); } Err(ProjectError::Operation(operations::Error::Resolve( - uv_resolver::ResolveError::FetchAndBuild(dist, err), + uv_resolver::ResolveError::DownloadAndBuild(dist, err), ))) => { diagnostics::fetch_and_build(dist, err); return Ok(ExitStatus::Failure); diff --git a/crates/uv/src/commands/project/sync.rs b/crates/uv/src/commands/project/sync.rs index eac0f4761ad3..4bac3189fb5a 100644 --- a/crates/uv/src/commands/project/sync.rs +++ b/crates/uv/src/commands/project/sync.rs @@ -161,7 +161,7 @@ pub(crate) async fn sync( return Ok(ExitStatus::Failure); } Err(ProjectError::Operation(operations::Error::Resolve( - uv_resolver::ResolveError::FetchAndBuild(dist, err), + uv_resolver::ResolveError::DownloadAndBuild(dist, err), ))) => { diagnostics::fetch_and_build(dist, err); return Ok(ExitStatus::Failure); From 5187f330c195aab6f952b206189eaa782764c35d Mon Sep 17 00:00:00 2001 From: Charlie Marsh Date: Mon, 11 Nov 2024 21:32:31 -0500 Subject: [PATCH 11/23] Hide `--no-system` from `uv pip tree` CLI (#9040) ## Summary This is hidden from all other commands, so it looks like an oversight. Closes #9035. --- crates/uv-cli/src/lib.rs | 2 +- docs/reference/cli.md | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/crates/uv-cli/src/lib.rs b/crates/uv-cli/src/lib.rs index 25a3e8dc3cd4..668bdc906ebf 100644 --- a/crates/uv-cli/src/lib.rs +++ b/crates/uv-cli/src/lib.rs @@ -2111,7 +2111,7 @@ pub struct PipTreeArgs { )] pub system: bool, - #[arg(long, overrides_with("system"))] + #[arg(long, overrides_with("system"), hide = true)] pub no_system: bool, #[command(flatten)] diff --git a/docs/reference/cli.md b/docs/reference/cli.md index c7585ae21435..4b5c4cb61880 100644 --- a/docs/reference/cli.md +++ b/docs/reference/cli.md @@ -7033,7 +7033,7 @@ uv pip tree [OPTIONS]

May also be set with the UV_NO_PROGRESS environment variable.

--no-python-downloads

Disable automatic downloads of Python.

-
--no-system
--offline

Disable network access.

+
--offline

Disable network access.

When disabled, uv will only use locally cached data and locally available files.

From 052b4e77a60847de0ccd275f4c7fc241378c9bc0 Mon Sep 17 00:00:00 2001 From: Shane Kennedy Date: Tue, 12 Nov 2024 03:48:21 +0100 Subject: [PATCH 12/23] Handle sigterm calls, fixes #6724 (#8933) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ## Summary This PR builds off of https://github.com/astral-sh/uv/pull/6738 to fix #6724 (sorry for the new PR @charliermarsh I didn't want to push to your branch, not even sure if I could). The reason the original PR doesn't fix the issue described in #6724 is because the fastapi is ran in the project context (as I assume a lot of use cases are). This PR adds an extra commit to handle the signals in the project/run.rs file ~It also addresses the comment [here](https://github.com/astral-sh/uv/pull/6738/files#r1734757548) to not use the tokio ctrl-c method since we are now handling SIGINT ourselves~ update, tokio handles SIGINT in a platform agnostic way, intercepting this ouselves makes the logic more complicated with windows, decided to leave the tokio ctrl-c handler ~[This comment](https://github.com/astral-sh/uv/pull/6738/files#r1743510140) remains unaddressed, however, the Child process does not have any other methods besides kill() so I don't see how we can "preserve" the interrupt call :/ I tried looking around but no luck.~ updated, this PR is reduced to only handling SIGTERM propagation on unix machines, and the sigterm call to the child is preserved by making use of the nix package, instead of relying on tokio which only allowed for `kill()` on a child process ## Test Plan I tested this by building the docker container locally with these changes and tagging it "myuv", and then using that as the base image in uv-docker-example, (and ofc following the rest of the repro issues in #6724. In my tests I see that ctrl-c in the docker-compose up command exits the process almost immediately 👍 --------- Co-authored-by: Charlie Marsh --- Cargo.lock | 3 ++- Cargo.toml | 5 ++-- crates/uv/Cargo.toml | 3 +++ crates/uv/src/commands/project/run.rs | 34 +++++++++++++++++++++++++-- crates/uv/src/commands/tool/run.rs | 34 +++++++++++++++++++++++++-- 5 files changed, 72 insertions(+), 7 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index fdd98f432bfe..6d886e80fd27 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -4376,6 +4376,7 @@ dependencies = [ "itertools 0.13.0", "jiff", "miette", + "nix", "owo-colors", "petgraph", "predicates", @@ -5798,7 +5799,7 @@ version = "0.1.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cf221c93e13a30d793f7645a0e7762c55d169dbb0a49671918a2319d289b10bb" dependencies = [ - "windows-sys 0.48.0", + "windows-sys 0.59.0", ] [[package]] diff --git a/Cargo.toml b/Cargo.toml index 3075f679313e..f055ca98e4c3 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -120,15 +120,15 @@ md-5 = { version = "0.10.6" } memchr = { version = "2.7.4" } miette = { version = "7.2.0" } nanoid = { version = "0.4.0" } +nix = { version = "0.29.0" } owo-colors = { version = "4.1.0" } path-slash = { version = "0.2.1" } pathdiff = { version = "0.2.1" } petgraph = { version = "0.6.5" } platform-info = { version = "2.0.3" } -procfs = { version = "0.17.0", default-features = false, features = ["flate2"] } proc-macro2 = { version = "1.0.86" } +procfs = { version = "0.17.0", default-features = false, features = ["flate2"] } pubgrub = { git = "https://github.com/astral-sh/pubgrub", rev = "95e1390399cdddee986b658be19587eb1fdb2d79" } -version-ranges = { git = "https://github.com/astral-sh/pubgrub", rev = "95e1390399cdddee986b658be19587eb1fdb2d79" } quote = { version = "1.0.37" } rayon = { version = "1.10.0" } reflink-copy = { version = "0.1.19" } @@ -172,6 +172,7 @@ unicode-width = { version = "0.1.13" } unscanny = { version = "0.1.0" } url = { version = "2.5.2" } urlencoding = { version = "2.1.3" } +version-ranges = { git = "https://github.com/astral-sh/pubgrub", rev = "95e1390399cdddee986b658be19587eb1fdb2d79" } walkdir = { version = "2.5.0" } which = { version = "7.0.0", features = ["regex"] } windows-registry = { version = "0.3.0" } diff --git a/crates/uv/Cargo.toml b/crates/uv/Cargo.toml index b2be62851772..1d0af552e2d7 100644 --- a/crates/uv/Cargo.toml +++ b/crates/uv/Cargo.toml @@ -115,6 +115,9 @@ similar = { version = "2.6.0" } tempfile = { workspace = true } zip = { workspace = true } +[target.'cfg(unix)'.dependencies] +nix = { workspace = true } + [package.metadata.cargo-shear] ignored = [ "flate2", diff --git a/crates/uv/src/commands/project/run.rs b/crates/uv/src/commands/project/run.rs index 76c759cb9c55..6ce8bcbcca5d 100644 --- a/crates/uv/src/commands/project/run.rs +++ b/crates/uv/src/commands/project/run.rs @@ -996,9 +996,30 @@ pub(crate) async fn run( // signal handlers after the command completes. let _handler = tokio::spawn(async { while tokio::signal::ctrl_c().await.is_ok() {} }); - let status = handle.wait().await.context("Child process disappeared")?; + // Exit based on the result of the command. + #[cfg(unix)] + let status = { + use tokio::select; + use tokio::signal::unix::{signal, SignalKind}; + + let mut term_signal = signal(SignalKind::terminate())?; + loop { + select! { + result = handle.wait() => { + break result; + }, + + // `SIGTERM` + _ = term_signal.recv() => { + let _ = terminate_process(&mut handle); + } + }; + } + }?; + + #[cfg(not(unix))] + let status = handle.wait().await?; - // Exit based on the result of the command if let Some(code) = status.code() { debug!("Command exited with code: {code}"); if let Ok(code) = u8::try_from(code) { @@ -1017,6 +1038,15 @@ pub(crate) async fn run( } } +#[cfg(unix)] +fn terminate_process(child: &mut tokio::process::Child) -> anyhow::Result<()> { + use nix::sys::signal::{self, Signal}; + use nix::unistd::Pid; + + let pid = child.id().context("Failed to get child process ID")?; + signal::kill(Pid::from_raw(pid.try_into()?), Signal::SIGTERM).context("Failed to send SIGTERM") +} + /// Returns `true` if we can skip creating an additional ephemeral environment in `uv run`. fn can_skip_ephemeral( spec: Option<&RequirementsSpecification>, diff --git a/crates/uv/src/commands/tool/run.rs b/crates/uv/src/commands/tool/run.rs index 43816a5d748c..16062ccc9756 100644 --- a/crates/uv/src/commands/tool/run.rs +++ b/crates/uv/src/commands/tool/run.rs @@ -236,9 +236,30 @@ pub(crate) async fn run( // signal handlers after the command completes. let _handler = tokio::spawn(async { while tokio::signal::ctrl_c().await.is_ok() {} }); - let status = handle.wait().await.context("Child process disappeared")?; + // Exit based on the result of the command. + #[cfg(unix)] + let status = { + use tokio::select; + use tokio::signal::unix::{signal, SignalKind}; + + let mut term_signal = signal(SignalKind::terminate())?; + loop { + select! { + result = handle.wait() => { + break result; + }, + + // `SIGTERM` + _ = term_signal.recv() => { + let _ = terminate_process(&mut handle); + } + }; + } + }?; + + #[cfg(not(unix))] + let status = handle.wait().await?; - // Exit based on the result of the command if let Some(code) = status.code() { debug!("Command exited with code: {code}"); if let Ok(code) = u8::try_from(code) { @@ -257,6 +278,15 @@ pub(crate) async fn run( } } +#[cfg(unix)] +fn terminate_process(child: &mut tokio::process::Child) -> anyhow::Result<()> { + use nix::sys::signal::{self, Signal}; + use nix::unistd::Pid; + + let pid = child.id().context("Failed to get child process ID")?; + signal::kill(Pid::from_raw(pid.try_into()?), Signal::SIGTERM).context("Failed to send SIGTERM") +} + /// Return the entry points for the specified package. fn get_entrypoints( from: &PackageName, From a147ed0b5514821bc2640dabbb98916587337450 Mon Sep 17 00:00:00 2001 From: Charlie Marsh Date: Mon, 11 Nov 2024 22:12:38 -0500 Subject: [PATCH 13/23] Use rich diagnostic formatting for early build failures (#9041) ## Summary This addresses cases like URL distributions, which are built before we get to the resolver. --- crates/uv/src/commands/pip/compile.rs | 11 ++ crates/uv/src/commands/pip/install.rs | 11 ++ crates/uv/src/commands/pip/sync.rs | 11 ++ crates/uv/src/commands/project/add.rs | 12 ++ crates/uv/src/commands/project/export.rs | 12 ++ crates/uv/src/commands/project/lock.rs | 13 +- crates/uv/src/commands/project/run.rs | 36 +++++ crates/uv/src/commands/project/sync.rs | 12 ++ crates/uv/tests/it/lock.rs | 19 ++- crates/uv/tests/it/pip_compile.rs | 23 ++- crates/uv/tests/it/pip_install.rs | 197 +++++++++++------------ 11 files changed, 232 insertions(+), 125 deletions(-) diff --git a/crates/uv/src/commands/pip/compile.rs b/crates/uv/src/commands/pip/compile.rs index 8906273d4e8b..63f772e2c67e 100644 --- a/crates/uv/src/commands/pip/compile.rs +++ b/crates/uv/src/commands/pip/compile.rs @@ -418,6 +418,17 @@ pub(crate) async fn pip_compile( diagnostics::build(dist, err); return Ok(ExitStatus::Failure); } + Err(operations::Error::Requirements(uv_requirements::Error::DownloadAndBuild( + dist, + err, + ))) => { + diagnostics::fetch_and_build(dist, err); + return Ok(ExitStatus::Failure); + } + Err(operations::Error::Requirements(uv_requirements::Error::Build(dist, err))) => { + diagnostics::build(dist, err); + return Ok(ExitStatus::Failure); + } Err(err) => return Err(err.into()), }; diff --git a/crates/uv/src/commands/pip/install.rs b/crates/uv/src/commands/pip/install.rs index 1934038c981b..bc1a49002679 100644 --- a/crates/uv/src/commands/pip/install.rs +++ b/crates/uv/src/commands/pip/install.rs @@ -424,6 +424,17 @@ pub(crate) async fn pip_install( diagnostics::build(dist, err); return Ok(ExitStatus::Failure); } + Err(operations::Error::Requirements(uv_requirements::Error::DownloadAndBuild( + dist, + err, + ))) => { + diagnostics::fetch_and_build(dist, err); + return Ok(ExitStatus::Failure); + } + Err(operations::Error::Requirements(uv_requirements::Error::Build(dist, err))) => { + diagnostics::build(dist, err); + return Ok(ExitStatus::Failure); + } Err(err) => return Err(err.into()), }; diff --git a/crates/uv/src/commands/pip/sync.rs b/crates/uv/src/commands/pip/sync.rs index 9bdd7b1d0a54..7dde9dadb405 100644 --- a/crates/uv/src/commands/pip/sync.rs +++ b/crates/uv/src/commands/pip/sync.rs @@ -368,6 +368,17 @@ pub(crate) async fn pip_sync( diagnostics::build(dist, err); return Ok(ExitStatus::Failure); } + Err(operations::Error::Requirements(uv_requirements::Error::DownloadAndBuild( + dist, + err, + ))) => { + diagnostics::fetch_and_build(dist, err); + return Ok(ExitStatus::Failure); + } + Err(operations::Error::Requirements(uv_requirements::Error::Build(dist, err))) => { + diagnostics::build(dist, err); + return Ok(ExitStatus::Failure); + } Err(err) => return Err(err.into()), }; diff --git a/crates/uv/src/commands/project/add.rs b/crates/uv/src/commands/project/add.rs index d8412214a293..ebf0736ce217 100644 --- a/crates/uv/src/commands/project/add.rs +++ b/crates/uv/src/commands/project/add.rs @@ -691,6 +691,18 @@ pub(crate) async fn add( diagnostics::build(dist, err); Ok(ExitStatus::Failure) } + ProjectError::Operation(pip::operations::Error::Requirements( + uv_requirements::Error::DownloadAndBuild(dist, err), + )) => { + diagnostics::fetch_and_build(dist, err); + Ok(ExitStatus::Failure) + } + ProjectError::Operation(pip::operations::Error::Requirements( + uv_requirements::Error::Build(dist, err), + )) => { + diagnostics::build(dist, err); + Ok(ExitStatus::Failure) + } err => { // Revert the changes to the `pyproject.toml`, if necessary. if modified { diff --git a/crates/uv/src/commands/project/export.rs b/crates/uv/src/commands/project/export.rs index 54b843a11a1f..c2d13de2148f 100644 --- a/crates/uv/src/commands/project/export.rs +++ b/crates/uv/src/commands/project/export.rs @@ -160,6 +160,18 @@ pub(crate) async fn export( diagnostics::build(dist, err); return Ok(ExitStatus::Failure); } + Err(ProjectError::Operation(pip::operations::Error::Requirements( + uv_requirements::Error::DownloadAndBuild(dist, err), + ))) => { + diagnostics::fetch_and_build(dist, err); + return Ok(ExitStatus::Failure); + } + Err(ProjectError::Operation(pip::operations::Error::Requirements( + uv_requirements::Error::Build(dist, err), + ))) => { + diagnostics::build(dist, err); + return Ok(ExitStatus::Failure); + } Err(err) => return Err(err.into()), }; diff --git a/crates/uv/src/commands/project/lock.rs b/crates/uv/src/commands/project/lock.rs index 4edf2058b63a..d652ebc8d845 100644 --- a/crates/uv/src/commands/project/lock.rs +++ b/crates/uv/src/commands/project/lock.rs @@ -181,7 +181,18 @@ pub(crate) async fn lock( diagnostics::build(dist, err); Ok(ExitStatus::Failure) } - + Err(ProjectError::Operation(pip::operations::Error::Requirements( + uv_requirements::Error::DownloadAndBuild(dist, err), + ))) => { + diagnostics::fetch_and_build(dist, err); + Ok(ExitStatus::Failure) + } + Err(ProjectError::Operation(pip::operations::Error::Requirements( + uv_requirements::Error::Build(dist, err), + ))) => { + diagnostics::build(dist, err); + Ok(ExitStatus::Failure) + } Err(err) => Err(err.into()), } } diff --git a/crates/uv/src/commands/project/run.rs b/crates/uv/src/commands/project/run.rs index 6ce8bcbcca5d..c48f2d50e5bb 100644 --- a/crates/uv/src/commands/project/run.rs +++ b/crates/uv/src/commands/project/run.rs @@ -317,6 +317,18 @@ pub(crate) async fn run( diagnostics::build(dist, err); return Ok(ExitStatus::Failure); } + Err(ProjectError::Operation(operations::Error::Requirements( + uv_requirements::Error::DownloadAndBuild(dist, err), + ))) => { + diagnostics::fetch_and_build(dist, err); + return Ok(ExitStatus::Failure); + } + Err(ProjectError::Operation(operations::Error::Requirements( + uv_requirements::Error::Build(dist, err), + ))) => { + diagnostics::build(dist, err); + return Ok(ExitStatus::Failure); + } Err(err) => return Err(err.into()), }; @@ -646,6 +658,18 @@ pub(crate) async fn run( diagnostics::build(dist, err); return Ok(ExitStatus::Failure); } + Err(ProjectError::Operation(operations::Error::Requirements( + uv_requirements::Error::DownloadAndBuild(dist, err), + ))) => { + diagnostics::fetch_and_build(dist, err); + return Ok(ExitStatus::Failure); + } + Err(ProjectError::Operation(operations::Error::Requirements( + uv_requirements::Error::Build(dist, err), + ))) => { + diagnostics::build(dist, err); + return Ok(ExitStatus::Failure); + } Err(err) => return Err(err.into()), }; @@ -849,6 +873,18 @@ pub(crate) async fn run( diagnostics::build(dist, err); return Ok(ExitStatus::Failure); } + Err(ProjectError::Operation(operations::Error::Requirements( + uv_requirements::Error::DownloadAndBuild(dist, err), + ))) => { + diagnostics::fetch_and_build(dist, err); + return Ok(ExitStatus::Failure); + } + Err(ProjectError::Operation(operations::Error::Requirements( + uv_requirements::Error::Build(dist, err), + ))) => { + diagnostics::build(dist, err); + return Ok(ExitStatus::Failure); + } Err(ProjectError::Operation(operations::Error::Requirements(err))) => { let err = miette::Report::msg(format!("{err}")) .context("Invalid `--with` requirement"); diff --git a/crates/uv/src/commands/project/sync.rs b/crates/uv/src/commands/project/sync.rs index 4bac3189fb5a..03243d8623f1 100644 --- a/crates/uv/src/commands/project/sync.rs +++ b/crates/uv/src/commands/project/sync.rs @@ -172,6 +172,18 @@ pub(crate) async fn sync( diagnostics::build(dist, err); return Ok(ExitStatus::Failure); } + Err(ProjectError::Operation(operations::Error::Requirements( + uv_requirements::Error::DownloadAndBuild(dist, err), + ))) => { + diagnostics::fetch_and_build(dist, err); + return Ok(ExitStatus::Failure); + } + Err(ProjectError::Operation(operations::Error::Requirements( + uv_requirements::Error::Build(dist, err), + ))) => { + diagnostics::build(dist, err); + return Ok(ExitStatus::Failure); + } Err(err) => return Err(err.into()), }; diff --git a/crates/uv/tests/it/lock.rs b/crates/uv/tests/it/lock.rs index f361bbfb1e5b..3c609c66fce6 100644 --- a/crates/uv/tests/it/lock.rs +++ b/crates/uv/tests/it/lock.rs @@ -15666,20 +15666,19 @@ fn lock_invalid_project_table() -> Result<()> { uv_snapshot!(context.filters(), context.lock().current_dir(context.temp_dir.join("a")), @r###" success: false - exit_code: 2 + exit_code: 1 ----- stdout ----- ----- stderr ----- Using CPython 3.12.[X] interpreter at: [PYTHON-3.12] - error: Failed to build `b @ file://[TEMP_DIR]/b` - Caused by: Failed to extract static metadata from `pyproject.toml` - Caused by: `pyproject.toml` is using the `[project]` table, but the required `project.name` field is not set. - Caused by: TOML parse error at line 2, column 10 - | - 2 | [project.urls] - | ^^^^^^^ - missing field `name` - + × Failed to build `b @ file://[TEMP_DIR]/b` + ├─▶ Failed to extract static metadata from `pyproject.toml` + ├─▶ `pyproject.toml` is using the `[project]` table, but the required `project.name` field is not set. + ╰─▶ TOML parse error at line 2, column 10 + | + 2 | [project.urls] + | ^^^^^^^ + missing field `name` "###); Ok(()) diff --git a/crates/uv/tests/it/pip_compile.rs b/crates/uv/tests/it/pip_compile.rs index f12650a1d2df..d625b476040a 100644 --- a/crates/uv/tests/it/pip_compile.rs +++ b/crates/uv/tests/it/pip_compile.rs @@ -1871,12 +1871,12 @@ fn compile_git_mismatched_name() -> Result<()> { uv_snapshot!(context.filters(), context.pip_compile() .arg("requirements.in"), @r###" success: false - exit_code: 2 + exit_code: 1 ----- stdout ----- ----- stderr ----- - error: Failed to download and build `dask @ git+https://github.com/pallets/flask.git@3.0.0` - Caused by: Package metadata name `flask` does not match given name `dask` + × Failed to download and build `dask @ git+https://github.com/pallets/flask.git@3.0.0` + ╰─▶ Package metadata name `flask` does not match given name `dask` "### ); @@ -12017,18 +12017,17 @@ fn git_source_missing_tag() -> Result<()> { uv_snapshot!(filters, context.pip_compile() .arg("pyproject.toml"), @r###" success: false - exit_code: 2 + exit_code: 1 ----- stdout ----- ----- stderr ----- - error: Failed to download and build `uv-public-pypackage @ git+https://github.com/astral-test/uv-public-pypackage@missing` - Caused by: Git operation failed - Caused by: failed to clone into: [CACHE_DIR]/git-v0/db/8dab139913c4b566 - Caused by: failed to fetch tag `missing` - Caused by: process didn't exit successfully: `git fetch --force --update-head-ok 'https://github.com/astral-test/uv-public-pypackage' '+refs/tags/missing:refs/remotes/origin/tags/missing'` (exit status: 128) - --- stderr - fatal: couldn't find remote ref refs/tags/missing - + × Failed to download and build `uv-public-pypackage @ git+https://github.com/astral-test/uv-public-pypackage@missing` + ├─▶ Git operation failed + ├─▶ failed to clone into: [CACHE_DIR]/git-v0/db/8dab139913c4b566 + ├─▶ failed to fetch tag `missing` + ╰─▶ process didn't exit successfully: `git fetch --force --update-head-ok 'https://github.com/astral-test/uv-public-pypackage' '+refs/tags/missing:refs/remotes/origin/tags/missing'` (exit status: 128) + --- stderr + fatal: couldn't find remote ref refs/tags/missing "###); Ok(()) diff --git a/crates/uv/tests/it/pip_install.rs b/crates/uv/tests/it/pip_install.rs index 47e2f8d0b8d1..a1f41379cf2f 100644 --- a/crates/uv/tests/it/pip_install.rs +++ b/crates/uv/tests/it/pip_install.rs @@ -253,64 +253,63 @@ dependencies = ["flask==1.0.x"] .arg("-r") .arg("requirements.txt"), @r###" success: false - exit_code: 2 + exit_code: 1 ----- stdout ----- ----- stderr ----- - error: Failed to build `project @ file://[TEMP_DIR]/path_dep` - Caused by: Build backend failed to determine requirements with `build_wheel()` (exit status: 1) + × Failed to build `project @ file://[TEMP_DIR]/path_dep` + ╰─▶ Build backend failed to determine requirements with `build_wheel()` (exit status: 1) - [stdout] - configuration error: `project.dependencies[0]` must be pep508 - DESCRIPTION: - Project dependency specification according to PEP 508 + [stdout] + configuration error: `project.dependencies[0]` must be pep508 + DESCRIPTION: + Project dependency specification according to PEP 508 - GIVEN VALUE: - "flask==1.0.x" + GIVEN VALUE: + "flask==1.0.x" - OFFENDING RULE: 'format' + OFFENDING RULE: 'format' - DEFINITION: - { - "$id": "#/definitions/dependency", - "title": "Dependency", - "type": "string", - "format": "pep508" - } - - [stderr] - Traceback (most recent call last): - File "", line 14, in - File "[CACHE_DIR]/builds-v0/[TMP]/build_meta.py", line 325, in get_requires_for_build_wheel - return self._get_build_requires(config_settings, requirements=['wheel']) - ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - File "[CACHE_DIR]/builds-v0/[TMP]/build_meta.py", line 295, in _get_build_requires - self.run_setup() - File "[CACHE_DIR]/builds-v0/[TMP]/build_meta.py", line 487, in run_setup - super().run_setup(setup_script=setup_script) - File "[CACHE_DIR]/builds-v0/[TMP]/build_meta.py", line 311, in run_setup - exec(code, locals()) - File "", line 1, in - File "[CACHE_DIR]/builds-v0/[TMP]/__init__.py", line 104, in setup - return distutils.core.setup(**attrs) - ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - File "[CACHE_DIR]/builds-v0/[TMP]/core.py", line 159, in setup - dist.parse_config_files() - File "[CACHE_DIR]/builds-v0/[TMP]/_virtualenv.py", line 20, in parse_config_files - result = old_parse_config_files(self, *args, **kwargs) - ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - File "[CACHE_DIR]/builds-v0/[TMP]/dist.py", line 631, in parse_config_files - pyprojecttoml.apply_configuration(self, filename, ignore_option_errors) - File "[CACHE_DIR]/builds-v0/[TMP]/pyprojecttoml.py", line 68, in apply_configuration - config = read_configuration(filepath, True, ignore_option_errors, dist) - ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - File "[CACHE_DIR]/builds-v0/[TMP]/pyprojecttoml.py", line 129, in read_configuration - validate(subset, filepath) - File "[CACHE_DIR]/builds-v0/[TMP]/pyprojecttoml.py", line 57, in validate - raise ValueError(f"{error}/n{summary}") from None - ValueError: invalid pyproject.toml config: `project.dependencies[0]`. - configuration error: `project.dependencies[0]` must be pep508 + DEFINITION: + { + "$id": "#/definitions/dependency", + "title": "Dependency", + "type": "string", + "format": "pep508" + } + [stderr] + Traceback (most recent call last): + File "", line 14, in + File "[CACHE_DIR]/builds-v0/[TMP]/build_meta.py", line 325, in get_requires_for_build_wheel + return self._get_build_requires(config_settings, requirements=['wheel']) + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + File "[CACHE_DIR]/builds-v0/[TMP]/build_meta.py", line 295, in _get_build_requires + self.run_setup() + File "[CACHE_DIR]/builds-v0/[TMP]/build_meta.py", line 487, in run_setup + super().run_setup(setup_script=setup_script) + File "[CACHE_DIR]/builds-v0/[TMP]/build_meta.py", line 311, in run_setup + exec(code, locals()) + File "", line 1, in + File "[CACHE_DIR]/builds-v0/[TMP]/__init__.py", line 104, in setup + return distutils.core.setup(**attrs) + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + File "[CACHE_DIR]/builds-v0/[TMP]/core.py", line 159, in setup + dist.parse_config_files() + File "[CACHE_DIR]/builds-v0/[TMP]/_virtualenv.py", line 20, in parse_config_files + result = old_parse_config_files(self, *args, **kwargs) + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + File "[CACHE_DIR]/builds-v0/[TMP]/dist.py", line 631, in parse_config_files + pyprojecttoml.apply_configuration(self, filename, ignore_option_errors) + File "[CACHE_DIR]/builds-v0/[TMP]/pyprojecttoml.py", line 68, in apply_configuration + config = read_configuration(filepath, True, ignore_option_errors, dist) + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + File "[CACHE_DIR]/builds-v0/[TMP]/pyprojecttoml.py", line 129, in read_configuration + validate(subset, filepath) + File "[CACHE_DIR]/builds-v0/[TMP]/pyprojecttoml.py", line 57, in validate + raise ValueError(f"{error}/n{summary}") from None + ValueError: invalid pyproject.toml config: `project.dependencies[0]`. + configuration error: `project.dependencies[0]` must be pep508 "### ); @@ -1623,18 +1622,17 @@ fn install_git_public_https_missing_branch_or_tag() { // 2.0.0 does not exist .arg("uv-public-pypackage @ git+https://github.com/astral-test/uv-public-pypackage@2.0.0"), @r###" success: false - exit_code: 2 + exit_code: 1 ----- stdout ----- ----- stderr ----- - error: Failed to download and build `uv-public-pypackage @ git+https://github.com/astral-test/uv-public-pypackage@2.0.0` - Caused by: Git operation failed - Caused by: failed to clone into: [CACHE_DIR]/git-v0/db/8dab139913c4b566 - Caused by: failed to fetch branch or tag `2.0.0` - Caused by: process didn't exit successfully: `git fetch [...]` (exit code: 128) - --- stderr - fatal: couldn't find remote ref refs/tags/2.0.0 - + × Failed to download and build `uv-public-pypackage @ git+https://github.com/astral-test/uv-public-pypackage@2.0.0` + ├─▶ Git operation failed + ├─▶ failed to clone into: [CACHE_DIR]/git-v0/db/8dab139913c4b566 + ├─▶ failed to fetch branch or tag `2.0.0` + ╰─▶ process didn't exit successfully: `git fetch [...]` (exit code: 128) + --- stderr + fatal: couldn't find remote ref refs/tags/2.0.0 "###); } @@ -1660,18 +1658,17 @@ fn install_git_public_https_missing_commit() { .arg("uv-public-pypackage @ git+https://github.com/astral-test/uv-public-pypackage@79a935a7a1a0ad6d0bdf72dce0e16cb0a24a1b3b") , @r###" success: false - exit_code: 2 + exit_code: 1 ----- stdout ----- ----- stderr ----- - error: Failed to download and build `uv-public-pypackage @ git+https://github.com/astral-test/uv-public-pypackage@79a935a7a1a0ad6d0bdf72dce0e16cb0a24a1b3b` - Caused by: Git operation failed - Caused by: failed to clone into: [CACHE_DIR]/git-v0/db/8dab139913c4b566 - Caused by: failed to fetch commit `79a935a7a1a0ad6d0bdf72dce0e16cb0a24a1b3b` - Caused by: process didn't exit successfully: `git fetch [...]` (exit code: 128) - --- stderr - fatal: remote error: upload-pack: not our ref 79a935a7a1a0ad6d0bdf72dce0e16cb0a24a1b3b - + × Failed to download and build `uv-public-pypackage @ git+https://github.com/astral-test/uv-public-pypackage@79a935a7a1a0ad6d0bdf72dce0e16cb0a24a1b3b` + ├─▶ Git operation failed + ├─▶ failed to clone into: [CACHE_DIR]/git-v0/db/8dab139913c4b566 + ├─▶ failed to fetch commit `79a935a7a1a0ad6d0bdf72dce0e16cb0a24a1b3b` + ╰─▶ process didn't exit successfully: `git fetch [...]` (exit code: 128) + --- stderr + fatal: remote error: upload-pack: not our ref 79a935a7a1a0ad6d0bdf72dce0e16cb0a24a1b3b "###); } @@ -1872,19 +1869,18 @@ fn install_git_private_https_pat_not_authorized() { .arg(format!("uv-private-pypackage @ git+https://git:{token}@github.com/astral-test/uv-private-pypackage")) , @r###" success: false - exit_code: 2 + exit_code: 1 ----- stdout ----- ----- stderr ----- - error: Failed to download and build `uv-private-pypackage @ git+https://git:***@github.com/astral-test/uv-private-pypackage` - Caused by: Git operation failed - Caused by: failed to clone into: [CACHE_DIR]/git-v0/db/8401f5508e3e612d - Caused by: process didn't exit successfully: `git fetch --force --update-head-ok 'https://git:***@github.com/astral-test/uv-private-pypackage' '+HEAD:refs/remotes/origin/HEAD'` (exit status: 128) - --- stderr - remote: Support for password authentication was removed on August 13, 2021. - remote: Please see https://docs.github.com/get-started/getting-started-with-git/about-remote-repositories#cloning-with-https-urls for information on currently recommended modes of authentication. - fatal: Authentication failed for 'https://github.com/astral-test/uv-private-pypackage/' - + × Failed to download and build `uv-private-pypackage @ git+https://git:***@github.com/astral-test/uv-private-pypackage` + ├─▶ Git operation failed + ├─▶ failed to clone into: [CACHE_DIR]/git-v0/db/8401f5508e3e612d + ╰─▶ process didn't exit successfully: `git fetch --force --update-head-ok 'https://git:***@github.com/astral-test/uv-private-pypackage' '+HEAD:refs/remotes/origin/HEAD'` (exit status: 128) + --- stderr + remote: Support for password authentication was removed on August 13, 2021. + remote: Please see https://docs.github.com/get-started/getting-started-with-git/about-remote-repositories#cloning-with-https-urls for information on currently recommended modes of authentication. + fatal: Authentication failed for 'https://github.com/astral-test/uv-private-pypackage/' "###); } @@ -4011,18 +4007,17 @@ fn no_build_isolation() -> Result<()> { .arg("requirements.in") .arg("--no-build-isolation"), @r###" success: false - exit_code: 2 + exit_code: 1 ----- stdout ----- ----- stderr ----- - error: Failed to download and build `anyio @ https://files.pythonhosted.org/packages/db/4d/3970183622f0330d3c23d9b8a5f52e365e50381fd484d08e3285104333d3/anyio-4.3.0.tar.gz` - Caused by: Build backend failed to determine metadata through `prepare_metadata_for_build_wheel` (exit status: 1) - - [stderr] - Traceback (most recent call last): - File "", line 8, in - ModuleNotFoundError: No module named 'setuptools' + × Failed to download and build `anyio @ https://files.pythonhosted.org/packages/db/4d/3970183622f0330d3c23d9b8a5f52e365e50381fd484d08e3285104333d3/anyio-4.3.0.tar.gz` + ╰─▶ Build backend failed to determine metadata through `prepare_metadata_for_build_wheel` (exit status: 1) + [stderr] + Traceback (most recent call last): + File "", line 8, in + ModuleNotFoundError: No module named 'setuptools' "### ); @@ -4080,18 +4075,17 @@ fn respect_no_build_isolation_env_var() -> Result<()> { .arg("requirements.in") .env(EnvVars::UV_NO_BUILD_ISOLATION, "yes"), @r###" success: false - exit_code: 2 + exit_code: 1 ----- stdout ----- ----- stderr ----- - error: Failed to download and build `anyio @ https://files.pythonhosted.org/packages/db/4d/3970183622f0330d3c23d9b8a5f52e365e50381fd484d08e3285104333d3/anyio-4.3.0.tar.gz` - Caused by: Build backend failed to determine metadata through `prepare_metadata_for_build_wheel` (exit status: 1) - - [stderr] - Traceback (most recent call last): - File "", line 8, in - ModuleNotFoundError: No module named 'setuptools' + × Failed to download and build `anyio @ https://files.pythonhosted.org/packages/db/4d/3970183622f0330d3c23d9b8a5f52e365e50381fd484d08e3285104333d3/anyio-4.3.0.tar.gz` + ╰─▶ Build backend failed to determine metadata through `prepare_metadata_for_build_wheel` (exit status: 1) + [stderr] + Traceback (most recent call last): + File "", line 8, in + ModuleNotFoundError: No module named 'setuptools' "### ); @@ -7041,18 +7035,17 @@ fn install_build_isolation_package() -> Result<()> { .arg("iniconfig") .arg(package.path()), @r###" success: false - exit_code: 2 + exit_code: 1 ----- stdout ----- ----- stderr ----- - error: Failed to download and build `iniconfig @ https://files.pythonhosted.org/packages/d7/4b/cbd8e699e64a6f16ca3a8220661b5f83792b3017d0f79807cb8708d33913/iniconfig-2.0.0.tar.gz` - Caused by: Build backend failed to determine metadata through `prepare_metadata_for_build_wheel` (exit status: 1) - - [stderr] - Traceback (most recent call last): - File "", line 8, in - ModuleNotFoundError: No module named 'hatchling' + × Failed to download and build `iniconfig @ https://files.pythonhosted.org/packages/d7/4b/cbd8e699e64a6f16ca3a8220661b5f83792b3017d0f79807cb8708d33913/iniconfig-2.0.0.tar.gz` + ╰─▶ Build backend failed to determine metadata through `prepare_metadata_for_build_wheel` (exit status: 1) + [stderr] + Traceback (most recent call last): + File "", line 8, in + ModuleNotFoundError: No module named 'hatchling' "### ); From de9dc398a849173d131270aad7837e3641dd58f5 Mon Sep 17 00:00:00 2001 From: Charlie Marsh Date: Mon, 11 Nov 2024 22:23:29 -0500 Subject: [PATCH 14/23] Rename `fetch_and_build` to `download_and_build` (#9042) --- crates/uv/src/commands/diagnostics.rs | 6 +++--- crates/uv/src/commands/pip/compile.rs | 4 ++-- crates/uv/src/commands/pip/install.rs | 4 ++-- crates/uv/src/commands/pip/sync.rs | 4 ++-- crates/uv/src/commands/project/add.rs | 4 ++-- crates/uv/src/commands/project/export.rs | 4 ++-- crates/uv/src/commands/project/lock.rs | 4 ++-- crates/uv/src/commands/project/run.rs | 12 ++++++------ crates/uv/src/commands/project/sync.rs | 4 ++-- 9 files changed, 23 insertions(+), 23 deletions(-) diff --git a/crates/uv/src/commands/diagnostics.rs b/crates/uv/src/commands/diagnostics.rs index befe12ea0bd8..9a63542845cc 100644 --- a/crates/uv/src/commands/diagnostics.rs +++ b/crates/uv/src/commands/diagnostics.rs @@ -20,8 +20,8 @@ static SUGGESTIONS: LazyLock> = LazyLock::ne .collect() }); -/// Render a [`uv_resolver::ResolveError::DownloadAndBuild`] with a help message. -pub(crate) fn fetch_and_build(sdist: Box, cause: uv_distribution::Error) { +/// Render a remote source distribution build failure with a help message. +pub(crate) fn download_and_build(sdist: Box, cause: uv_distribution::Error) { #[derive(Debug, miette::Diagnostic, thiserror::Error)] #[error("Failed to download and build `{sdist}`")] #[diagnostic()] @@ -48,7 +48,7 @@ pub(crate) fn fetch_and_build(sdist: Box, cause: uv_distribution::Er anstream::eprint!("{report:?}"); } -/// Render a [`uv_resolver::ResolveError::Build`] with a help message. +/// Render a local source distribution build failure with a help message. pub(crate) fn build(sdist: Box, cause: uv_distribution::Error) { #[derive(Debug, miette::Diagnostic, thiserror::Error)] #[error("Failed to build `{sdist}`")] diff --git a/crates/uv/src/commands/pip/compile.rs b/crates/uv/src/commands/pip/compile.rs index 63f772e2c67e..17a7c34f10bb 100644 --- a/crates/uv/src/commands/pip/compile.rs +++ b/crates/uv/src/commands/pip/compile.rs @@ -411,7 +411,7 @@ pub(crate) async fn pip_compile( return Ok(ExitStatus::Failure); } Err(operations::Error::Resolve(uv_resolver::ResolveError::DownloadAndBuild(dist, err))) => { - diagnostics::fetch_and_build(dist, err); + diagnostics::download_and_build(dist, err); return Ok(ExitStatus::Failure); } Err(operations::Error::Resolve(uv_resolver::ResolveError::Build(dist, err))) => { @@ -422,7 +422,7 @@ pub(crate) async fn pip_compile( dist, err, ))) => { - diagnostics::fetch_and_build(dist, err); + diagnostics::download_and_build(dist, err); return Ok(ExitStatus::Failure); } Err(operations::Error::Requirements(uv_requirements::Error::Build(dist, err))) => { diff --git a/crates/uv/src/commands/pip/install.rs b/crates/uv/src/commands/pip/install.rs index bc1a49002679..1e146e9f3c5c 100644 --- a/crates/uv/src/commands/pip/install.rs +++ b/crates/uv/src/commands/pip/install.rs @@ -417,7 +417,7 @@ pub(crate) async fn pip_install( return Ok(ExitStatus::Failure); } Err(operations::Error::Resolve(uv_resolver::ResolveError::DownloadAndBuild(dist, err))) => { - diagnostics::fetch_and_build(dist, err); + diagnostics::download_and_build(dist, err); return Ok(ExitStatus::Failure); } Err(operations::Error::Resolve(uv_resolver::ResolveError::Build(dist, err))) => { @@ -428,7 +428,7 @@ pub(crate) async fn pip_install( dist, err, ))) => { - diagnostics::fetch_and_build(dist, err); + diagnostics::download_and_build(dist, err); return Ok(ExitStatus::Failure); } Err(operations::Error::Requirements(uv_requirements::Error::Build(dist, err))) => { diff --git a/crates/uv/src/commands/pip/sync.rs b/crates/uv/src/commands/pip/sync.rs index 7dde9dadb405..10118692b876 100644 --- a/crates/uv/src/commands/pip/sync.rs +++ b/crates/uv/src/commands/pip/sync.rs @@ -361,7 +361,7 @@ pub(crate) async fn pip_sync( return Ok(ExitStatus::Failure); } Err(operations::Error::Resolve(uv_resolver::ResolveError::DownloadAndBuild(dist, err))) => { - diagnostics::fetch_and_build(dist, err); + diagnostics::download_and_build(dist, err); return Ok(ExitStatus::Failure); } Err(operations::Error::Resolve(uv_resolver::ResolveError::Build(dist, err))) => { @@ -372,7 +372,7 @@ pub(crate) async fn pip_sync( dist, err, ))) => { - diagnostics::fetch_and_build(dist, err); + diagnostics::download_and_build(dist, err); return Ok(ExitStatus::Failure); } Err(operations::Error::Requirements(uv_requirements::Error::Build(dist, err))) => { diff --git a/crates/uv/src/commands/project/add.rs b/crates/uv/src/commands/project/add.rs index ebf0736ce217..05b57ff1509c 100644 --- a/crates/uv/src/commands/project/add.rs +++ b/crates/uv/src/commands/project/add.rs @@ -682,7 +682,7 @@ pub(crate) async fn add( ProjectError::Operation(pip::operations::Error::Resolve( uv_resolver::ResolveError::DownloadAndBuild(dist, err), )) => { - diagnostics::fetch_and_build(dist, err); + diagnostics::download_and_build(dist, err); Ok(ExitStatus::Failure) } ProjectError::Operation(pip::operations::Error::Resolve( @@ -694,7 +694,7 @@ pub(crate) async fn add( ProjectError::Operation(pip::operations::Error::Requirements( uv_requirements::Error::DownloadAndBuild(dist, err), )) => { - diagnostics::fetch_and_build(dist, err); + diagnostics::download_and_build(dist, err); Ok(ExitStatus::Failure) } ProjectError::Operation(pip::operations::Error::Requirements( diff --git a/crates/uv/src/commands/project/export.rs b/crates/uv/src/commands/project/export.rs index c2d13de2148f..36504040848b 100644 --- a/crates/uv/src/commands/project/export.rs +++ b/crates/uv/src/commands/project/export.rs @@ -151,7 +151,7 @@ pub(crate) async fn export( Err(ProjectError::Operation(pip::operations::Error::Resolve( uv_resolver::ResolveError::DownloadAndBuild(dist, err), ))) => { - diagnostics::fetch_and_build(dist, err); + diagnostics::download_and_build(dist, err); return Ok(ExitStatus::Failure); } Err(ProjectError::Operation(pip::operations::Error::Resolve( @@ -163,7 +163,7 @@ pub(crate) async fn export( Err(ProjectError::Operation(pip::operations::Error::Requirements( uv_requirements::Error::DownloadAndBuild(dist, err), ))) => { - diagnostics::fetch_and_build(dist, err); + diagnostics::download_and_build(dist, err); return Ok(ExitStatus::Failure); } Err(ProjectError::Operation(pip::operations::Error::Requirements( diff --git a/crates/uv/src/commands/project/lock.rs b/crates/uv/src/commands/project/lock.rs index d652ebc8d845..3f3866b87b58 100644 --- a/crates/uv/src/commands/project/lock.rs +++ b/crates/uv/src/commands/project/lock.rs @@ -172,7 +172,7 @@ pub(crate) async fn lock( Err(ProjectError::Operation(pip::operations::Error::Resolve( uv_resolver::ResolveError::DownloadAndBuild(dist, err), ))) => { - diagnostics::fetch_and_build(dist, err); + diagnostics::download_and_build(dist, err); Ok(ExitStatus::Failure) } Err(ProjectError::Operation(pip::operations::Error::Resolve( @@ -184,7 +184,7 @@ pub(crate) async fn lock( Err(ProjectError::Operation(pip::operations::Error::Requirements( uv_requirements::Error::DownloadAndBuild(dist, err), ))) => { - diagnostics::fetch_and_build(dist, err); + diagnostics::download_and_build(dist, err); Ok(ExitStatus::Failure) } Err(ProjectError::Operation(pip::operations::Error::Requirements( diff --git a/crates/uv/src/commands/project/run.rs b/crates/uv/src/commands/project/run.rs index c48f2d50e5bb..95ab47f077b1 100644 --- a/crates/uv/src/commands/project/run.rs +++ b/crates/uv/src/commands/project/run.rs @@ -308,7 +308,7 @@ pub(crate) async fn run( Err(ProjectError::Operation(operations::Error::Resolve( uv_resolver::ResolveError::DownloadAndBuild(dist, err), ))) => { - diagnostics::fetch_and_build(dist, err); + diagnostics::download_and_build(dist, err); return Ok(ExitStatus::Failure); } Err(ProjectError::Operation(operations::Error::Resolve( @@ -320,7 +320,7 @@ pub(crate) async fn run( Err(ProjectError::Operation(operations::Error::Requirements( uv_requirements::Error::DownloadAndBuild(dist, err), ))) => { - diagnostics::fetch_and_build(dist, err); + diagnostics::download_and_build(dist, err); return Ok(ExitStatus::Failure); } Err(ProjectError::Operation(operations::Error::Requirements( @@ -649,7 +649,7 @@ pub(crate) async fn run( Err(ProjectError::Operation(operations::Error::Resolve( uv_resolver::ResolveError::DownloadAndBuild(dist, err), ))) => { - diagnostics::fetch_and_build(dist, err); + diagnostics::download_and_build(dist, err); return Ok(ExitStatus::Failure); } Err(ProjectError::Operation(operations::Error::Resolve( @@ -661,7 +661,7 @@ pub(crate) async fn run( Err(ProjectError::Operation(operations::Error::Requirements( uv_requirements::Error::DownloadAndBuild(dist, err), ))) => { - diagnostics::fetch_and_build(dist, err); + diagnostics::download_and_build(dist, err); return Ok(ExitStatus::Failure); } Err(ProjectError::Operation(operations::Error::Requirements( @@ -864,7 +864,7 @@ pub(crate) async fn run( Err(ProjectError::Operation(operations::Error::Resolve( uv_resolver::ResolveError::DownloadAndBuild(dist, err), ))) => { - diagnostics::fetch_and_build(dist, err); + diagnostics::download_and_build(dist, err); return Ok(ExitStatus::Failure); } Err(ProjectError::Operation(operations::Error::Resolve( @@ -876,7 +876,7 @@ pub(crate) async fn run( Err(ProjectError::Operation(operations::Error::Requirements( uv_requirements::Error::DownloadAndBuild(dist, err), ))) => { - diagnostics::fetch_and_build(dist, err); + diagnostics::download_and_build(dist, err); return Ok(ExitStatus::Failure); } Err(ProjectError::Operation(operations::Error::Requirements( diff --git a/crates/uv/src/commands/project/sync.rs b/crates/uv/src/commands/project/sync.rs index 03243d8623f1..339763252054 100644 --- a/crates/uv/src/commands/project/sync.rs +++ b/crates/uv/src/commands/project/sync.rs @@ -163,7 +163,7 @@ pub(crate) async fn sync( Err(ProjectError::Operation(operations::Error::Resolve( uv_resolver::ResolveError::DownloadAndBuild(dist, err), ))) => { - diagnostics::fetch_and_build(dist, err); + diagnostics::download_and_build(dist, err); return Ok(ExitStatus::Failure); } Err(ProjectError::Operation(operations::Error::Resolve( @@ -175,7 +175,7 @@ pub(crate) async fn sync( Err(ProjectError::Operation(operations::Error::Requirements( uv_requirements::Error::DownloadAndBuild(dist, err), ))) => { - diagnostics::fetch_and_build(dist, err); + diagnostics::download_and_build(dist, err); return Ok(ExitStatus::Failure); } Err(ProjectError::Operation(operations::Error::Requirements( From 00bf69be282d5105f4e8311d9655816c43a8deb7 Mon Sep 17 00:00:00 2001 From: Charlie Marsh Date: Mon, 11 Nov 2024 22:38:05 -0500 Subject: [PATCH 15/23] Revert `uv.lock` changes when `uv add` fails (#9030) ## Summary If a `uv add` fails at the sync stage, we need to clean up the changes to the `uv.lock`, since it might've been edited during in the lock stage (which, by necessity, succeeded). As-is, we revert the `pyproject.toml` but not the `uv.lock`, so the two are out-of-sync. Closes https://github.com/astral-sh/uv/issues/9028. Closes https://github.com/astral-sh/uv/issues/7992. --- crates/uv/src/commands/project/add.rs | 58 ++++++--- crates/uv/src/commands/project/lock.rs | 11 ++ crates/uv/tests/it/edit.rs | 172 ++++++++++++++++++++++--- 3 files changed, 204 insertions(+), 37 deletions(-) diff --git a/crates/uv/src/commands/project/add.rs b/crates/uv/src/commands/project/add.rs index 05b57ff1509c..db76cc2e7a3c 100644 --- a/crates/uv/src/commands/project/add.rs +++ b/crates/uv/src/commands/project/add.rs @@ -1,5 +1,6 @@ use std::collections::hash_map::Entry; use std::fmt::Write; +use std::io; use std::path::{Path, PathBuf}; use anyhow::{bail, Context, Result}; @@ -44,7 +45,7 @@ use crate::commands::pip::loggers::{ use crate::commands::pip::operations::Modifications; use crate::commands::project::lock::LockMode; use crate::commands::project::{ - init_script_python_requirement, validate_script_requires_python, ProjectError, + init_script_python_requirement, lock, validate_script_requires_python, ProjectError, ProjectInterpreter, ScriptPython, }; use crate::commands::reporters::{PythonDownloadReporter, ResolverReporter}; @@ -618,8 +619,10 @@ pub(crate) async fn add( } // Store the content prior to any modifications. - let existing = project.pyproject_toml().as_ref().to_vec(); - let root = project.root().to_path_buf(); + let project_root = project.root().to_path_buf(); + let workspace_root = project.workspace().install_path().clone(); + let existing_pyproject_toml = project.pyproject_toml().as_ref().to_vec(); + let existing_uv_lock = lock::read_bytes(project.workspace()).await?; // Update the `pypackage.toml` in-memory. let project = project @@ -628,12 +631,18 @@ pub(crate) async fn add( // Set the Ctrl-C handler to revert changes on exit. let _ = ctrlc::set_handler({ - let root = root.clone(); - let existing = existing.clone(); + let project_root = project_root.clone(); + let workspace_root = workspace_root.clone(); + let existing_pyproject_toml = existing_pyproject_toml.clone(); + let existing_uv_lock = existing_uv_lock.clone(); move || { - // Revert the changes to the `pyproject.toml`, if necessary. if modified { - let _ = fs_err::write(root.join("pyproject.toml"), &existing); + let _ = revert( + &project_root, + &workspace_root, + &existing_pyproject_toml, + existing_uv_lock.as_deref(), + ); } #[allow(clippy::exit, clippy::cast_possible_wrap)] @@ -667,9 +676,13 @@ pub(crate) async fn add( { Ok(()) => Ok(ExitStatus::Success), Err(err) => { - // Revert the changes to the `pyproject.toml`, if necessary. if modified { - fs_err::write(root.join("pyproject.toml"), &existing)?; + let _ = revert( + &project_root, + &workspace_root, + &existing_pyproject_toml, + existing_uv_lock.as_deref(), + ); } match err { @@ -703,13 +716,7 @@ pub(crate) async fn add( diagnostics::build(dist, err); Ok(ExitStatus::Failure) } - err => { - // Revert the changes to the `pyproject.toml`, if necessary. - if modified { - fs_err::write(root.join("pyproject.toml"), &existing)?; - } - Err(err.into()) - } + err => Err(err.into()), } } } @@ -943,6 +950,25 @@ async fn lock_and_sync( Ok(()) } +/// Revert the changes to the `pyproject.toml` and `uv.lock`, if necessary. +fn revert( + project_root: &Path, + workspace_root: &Path, + pyproject_toml: &[u8], + uv_lock: Option<&[u8]>, +) -> Result<(), io::Error> { + debug!("Reverting changes to `pyproject.toml`"); + let () = fs_err::write(project_root.join("pyproject.toml"), pyproject_toml)?; + if let Some(uv_lock) = uv_lock.as_ref() { + debug!("Reverting changes to `uv.lock`"); + let () = fs_err::write(workspace_root.join("uv.lock"), uv_lock)?; + } else { + debug!("Removing `uv.lock`"); + let () = fs_err::remove_file(workspace_root.join("uv.lock"))?; + } + Ok(()) +} + /// Augment a user-provided requirement by attaching any specification data that was provided /// separately from the requirement itself (e.g., `--branch main`). fn augment_requirement( diff --git a/crates/uv/src/commands/project/lock.rs b/crates/uv/src/commands/project/lock.rs index 3f3866b87b58..01a5833566f7 100644 --- a/crates/uv/src/commands/project/lock.rs +++ b/crates/uv/src/commands/project/lock.rs @@ -1009,6 +1009,17 @@ pub(crate) async fn read(workspace: &Workspace) -> Result, ProjectE } } +/// Read the lockfile from the workspace as bytes. +/// +/// Returns `Ok(None)` if the lockfile does not exist. +pub(crate) async fn read_bytes(workspace: &Workspace) -> Result>, ProjectError> { + match fs_err::tokio::read(&workspace.install_path().join("uv.lock")).await { + Ok(encoded) => Ok(Some(encoded)), + Err(err) if err.kind() == std::io::ErrorKind::NotFound => Ok(None), + Err(err) => Err(err.into()), + } +} + /// Reports on the versions that were upgraded in the new lockfile. /// /// Returns `true` if any upgrades were reported. diff --git a/crates/uv/tests/it/edit.rs b/crates/uv/tests/it/edit.rs index 04f25d4ad4f1..622771e44260 100644 --- a/crates/uv/tests/it/edit.rs +++ b/crates/uv/tests/it/edit.rs @@ -5506,57 +5506,77 @@ fn add_git_to_script() -> Result<()> { Ok(()) } -/// Revert changes to a `pyproject.toml` the `add` fails. +/// Revert changes to the `pyproject.toml` and `uv.lock` when the `add` operation fails. #[test] fn fail_to_add_revert_project() -> Result<()> { let context = TestContext::new("3.12"); - let pyproject_toml = context.temp_dir.child("pyproject.toml"); - pyproject_toml.write_str(indoc! {r#" + context + .temp_dir + .child("pyproject.toml") + .write_str(indoc! {r#" [project] - name = "project" + name = "parent" version = "0.1.0" requires-python = ">=3.12" dependencies = [] + "#})?; + + // Add a dependency on a package that declares static metadata (so can always resolve), but + // can't be installed. + let pyproject_toml = context.temp_dir.child("child/pyproject.toml"); + pyproject_toml.write_str(indoc! {r#" + [project] + name = "child" + version = "0.1.0" + requires-python = ">=3.12" + dependencies = ["iniconfig"] [build-system] requires = ["setuptools>=42"] build-backend = "setuptools.build_meta" "#})?; + context + .temp_dir + .child("src") + .child("child") + .child("__init__.py") + .touch()?; + context + .temp_dir + .child("child") + .child("setup.py") + .write_str("1/0")?; - // Adding `pytorch==1.0.2` should produce an error let filters = std::iter::once((r"exit code: 1", "exit status: 1")) .chain(context.filters()) .collect::>(); - uv_snapshot!(filters, context.add().arg("pytorch==1.0.2"), @r###" + uv_snapshot!(filters, context.add().arg("./child"), @r###" success: false exit_code: 2 ----- stdout ----- ----- stderr ----- - Resolved 2 packages in [TIME] + Resolved 3 packages in [TIME] error: Failed to prepare distributions - Caused by: Failed to download and build `pytorch==1.0.2` - Caused by: Build backend failed to build wheel through `build_wheel` (exit status: 1) + Caused by: Failed to build `child @ file://[TEMP_DIR]/child` + Caused by: Build backend failed to determine requirements with `build_wheel()` (exit status: 1) [stderr] Traceback (most recent call last): - File "", line 11, in - File "[CACHE_DIR]/builds-v0/[TMP]/build_meta.py", line 410, in build_wheel - return self._build_with_temp_dir( - ^^^^^^^^^^^^^^^^^^^^^^^^^^ - File "[CACHE_DIR]/builds-v0/[TMP]/build_meta.py", line 395, in _build_with_temp_dir + File "", line 14, in + File "[CACHE_DIR]/builds-v0/[TMP]/build_meta.py", line 325, in get_requires_for_build_wheel + return self._get_build_requires(config_settings, requirements=['wheel']) + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + File "[CACHE_DIR]/builds-v0/[TMP]/build_meta.py", line 295, in _get_build_requires self.run_setup() - File "[CACHE_DIR]/builds-v0/[TMP]/build_meta.py", line 487, in run_setup - super().run_setup(setup_script=setup_script) File "[CACHE_DIR]/builds-v0/[TMP]/build_meta.py", line 311, in run_setup exec(code, locals()) - File "", line 15, in - Exception: You tried to install "pytorch". The package named for PyTorch is "torch" - + File "", line 1, in + ZeroDivisionError: division by zero "###); - let pyproject_toml = context.read("pyproject.toml"); + let pyproject_toml = fs_err::read_to_string(context.temp_dir.join("pyproject.toml"))?; insta::with_settings!({ filters => context.filters(), @@ -5564,18 +5584,128 @@ fn fail_to_add_revert_project() -> Result<()> { assert_snapshot!( pyproject_toml, @r###" [project] - name = "project" + name = "parent" + version = "0.1.0" + requires-python = ">=3.12" + dependencies = [] + "### + ); + }); + + // The lockfile should not exist, even though resolution succeeded. + assert!(!context.temp_dir.join("uv.lock").exists()); + + Ok(()) +} + +/// Revert changes to the `pyproject.toml` and `uv.lock` when the `add` operation fails. +/// +/// In this case, the project has an existing lockfile. +#[test] +fn fail_to_edit_revert_project() -> Result<()> { + let context = TestContext::new("3.12"); + + context + .temp_dir + .child("pyproject.toml") + .write_str(indoc! {r#" + [project] + name = "parent" version = "0.1.0" requires-python = ">=3.12" dependencies = [] + "#})?; + + uv_snapshot!(context.filters(), context.add().arg("iniconfig"), @r###" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Resolved 2 packages in [TIME] + Prepared 1 package in [TIME] + Installed 1 package in [TIME] + + iniconfig==2.0.0 + "###); + + let before = fs_err::read_to_string(context.temp_dir.join("uv.lock"))?; + + // Add a dependency on a package that declares static metadata (so can always resolve), but + // can't be installed. + let pyproject_toml = context.temp_dir.child("child/pyproject.toml"); + pyproject_toml.write_str(indoc! {r#" + [project] + name = "child" + version = "0.1.0" + requires-python = ">=3.12" + dependencies = ["iniconfig"] [build-system] requires = ["setuptools>=42"] build-backend = "setuptools.build_meta" + "#})?; + context + .temp_dir + .child("src") + .child("child") + .child("__init__.py") + .touch()?; + context + .temp_dir + .child("child") + .child("setup.py") + .write_str("1/0")?; + + let filters = std::iter::once((r"exit code: 1", "exit status: 1")) + .chain(context.filters()) + .collect::>(); + uv_snapshot!(filters, context.add().arg("./child"), @r###" + success: false + exit_code: 2 + ----- stdout ----- + + ----- stderr ----- + Resolved 3 packages in [TIME] + error: Failed to prepare distributions + Caused by: Failed to build `child @ file://[TEMP_DIR]/child` + Caused by: Build backend failed to determine requirements with `build_wheel()` (exit status: 1) + + [stderr] + Traceback (most recent call last): + File "", line 14, in + File "[CACHE_DIR]/builds-v0/[TMP]/build_meta.py", line 325, in get_requires_for_build_wheel + return self._get_build_requires(config_settings, requirements=['wheel']) + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + File "[CACHE_DIR]/builds-v0/[TMP]/build_meta.py", line 295, in _get_build_requires + self.run_setup() + File "[CACHE_DIR]/builds-v0/[TMP]/build_meta.py", line 311, in run_setup + exec(code, locals()) + File "", line 1, in + ZeroDivisionError: division by zero + "###); + + let pyproject_toml = fs_err::read_to_string(context.temp_dir.join("pyproject.toml"))?; + + insta::with_settings!({ + filters => context.filters(), + }, { + assert_snapshot!( + pyproject_toml, @r###" + [project] + name = "parent" + version = "0.1.0" + requires-python = ">=3.12" + dependencies = [ + "iniconfig>=2.0.0", + ] "### ); }); + // The lockfile should exist, but be unchanged. + let after = fs_err::read_to_string(context.temp_dir.join("uv.lock"))?; + assert_eq!(before, after); + Ok(()) } From c5caf92edf539a9ebf24d375871178f8f8a0ab93 Mon Sep 17 00:00:00 2001 From: Charlie Marsh Date: Mon, 11 Nov 2024 22:54:30 -0500 Subject: [PATCH 16/23] Use rich diagnostic formatting for install failures (#9043) ## Summary Shows similar diagnostics for failures that happen at install time, rather than resolve time. This will ultimately feed into https://github.com/astral-sh/uv/issues/8962 since we'll now have consolidated handling for these kinds of failures. --- crates/uv-dispatch/src/lib.rs | 5 +- crates/uv-installer/src/lib.rs | 2 +- crates/uv-installer/src/preparer.rs | 18 +- crates/uv/src/commands/diagnostics.rs | 30 ++- crates/uv/src/commands/pip/install.rs | 23 +- crates/uv/src/commands/pip/operations.rs | 8 +- crates/uv/src/commands/pip/sync.rs | 23 +- crates/uv/src/commands/project/add.rs | 18 ++ crates/uv/src/commands/project/remove.rs | 70 +++++- crates/uv/src/commands/project/run.rs | 26 +- crates/uv/src/commands/project/sync.rs | 26 +- crates/uv/tests/it/build.rs | 1 - crates/uv/tests/it/edit.rs | 66 +++-- crates/uv/tests/it/lock.rs | 44 ++-- crates/uv/tests/it/pip_install.rs | 45 ++-- crates/uv/tests/it/pip_sync.rs | 300 +++++++++++------------ crates/uv/tests/it/sync.rs | 48 ++-- 17 files changed, 449 insertions(+), 304 deletions(-) diff --git a/crates/uv-dispatch/src/lib.rs b/crates/uv-dispatch/src/lib.rs index 6003e45b1d93..c07f48f5ee2c 100644 --- a/crates/uv-dispatch/src/lib.rs +++ b/crates/uv-dispatch/src/lib.rs @@ -275,10 +275,7 @@ impl<'a> BuildContext for BuildDispatch<'a> { remote.iter().map(ToString::to_string).join(", ") ); - preparer - .prepare(remote, self.in_flight) - .await - .context("Failed to prepare distributions")? + preparer.prepare(remote, self.in_flight).await? }; // Remove any unnecessary packages. diff --git a/crates/uv-installer/src/lib.rs b/crates/uv-installer/src/lib.rs index a13419723e49..1f5b333bd48b 100644 --- a/crates/uv-installer/src/lib.rs +++ b/crates/uv-installer/src/lib.rs @@ -1,7 +1,7 @@ pub use compile::{compile_tree, CompileError}; pub use installer::{Installer, Reporter as InstallReporter}; pub use plan::{Plan, Planner}; -pub use preparer::{Preparer, Reporter as PrepareReporter}; +pub use preparer::{Error as PrepareError, Preparer, Reporter as PrepareReporter}; pub use site_packages::{SatisfiesResult, SitePackages, SitePackagesDiagnostic}; pub use uninstall::{uninstall, UninstallError}; diff --git a/crates/uv-installer/src/preparer.rs b/crates/uv-installer/src/preparer.rs index f75f085def5e..3d05ecda4996 100644 --- a/crates/uv-installer/src/preparer.rs +++ b/crates/uv-installer/src/preparer.rs @@ -23,11 +23,11 @@ pub enum Error { #[error("Using pre-built wheels is disabled, but attempted to use `{0}`")] NoBinary(PackageName), #[error("Failed to download `{0}`")] - Download(Box, #[source] Box), + Download(Box, #[source] uv_distribution::Error), #[error("Failed to download and build `{0}`")] - DownloadAndBuild(Box, #[source] Box), + DownloadAndBuild(Box, #[source] uv_distribution::Error), #[error("Failed to build `{0}`")] - Build(Box, #[source] Box), + Build(Box, #[source] uv_distribution::Error), #[error("Unzip failed in another thread: {0}")] Thread(String), } @@ -146,12 +146,12 @@ impl<'a, Context: BuildContext> Preparer<'a, Context> { .get_or_build_wheel(&dist, self.tags, policy) .boxed_local() .map_err(|err| match dist.clone() { - Dist::Built(dist) => Error::Download(Box::new(dist), Box::new(err)), + Dist::Built(dist) => Error::Download(Box::new(dist), err), Dist::Source(dist) => { if dist.is_local() { - Error::Build(Box::new(dist), Box::new(err)) + Error::Build(Box::new(dist), err) } else { - Error::DownloadAndBuild(Box::new(dist), Box::new(err)) + Error::DownloadAndBuild(Box::new(dist), err) } } }) @@ -166,12 +166,12 @@ impl<'a, Context: BuildContext> Preparer<'a, Context> { wheel.hashes(), ); Err(match dist { - Dist::Built(dist) => Error::Download(Box::new(dist), Box::new(err)), + Dist::Built(dist) => Error::Download(Box::new(dist), err), Dist::Source(dist) => { if dist.is_local() { - Error::Build(Box::new(dist), Box::new(err)) + Error::Build(Box::new(dist), err) } else { - Error::DownloadAndBuild(Box::new(dist), Box::new(err)) + Error::DownloadAndBuild(Box::new(dist), err) } } }) diff --git a/crates/uv/src/commands/diagnostics.rs b/crates/uv/src/commands/diagnostics.rs index 9a63542845cc..391948d66d75 100644 --- a/crates/uv/src/commands/diagnostics.rs +++ b/crates/uv/src/commands/diagnostics.rs @@ -2,7 +2,7 @@ use owo_colors::OwoColorize; use rustc_hash::FxHashMap; use std::str::FromStr; use std::sync::LazyLock; -use uv_distribution_types::{Name, SourceDist}; +use uv_distribution_types::{BuiltDist, Name, SourceDist}; use uv_normalize::PackageName; /// Static map of common package name typos or misconfigurations to their correct package names. @@ -48,6 +48,34 @@ pub(crate) fn download_and_build(sdist: Box, cause: uv_distribution: anstream::eprint!("{report:?}"); } +/// Render a remote binary distribution download failure with a help message. +pub(crate) fn download(sdist: Box, cause: uv_distribution::Error) { + #[derive(Debug, miette::Diagnostic, thiserror::Error)] + #[error("Failed to download `{sdist}`")] + #[diagnostic()] + struct Error { + sdist: Box, + #[source] + cause: uv_distribution::Error, + #[help] + help: Option, + } + + let report = miette::Report::new(Error { + help: SUGGESTIONS.get(sdist.name()).map(|suggestion| { + format!( + "`{}` is often confused for `{}` Did you mean to install `{}` instead?", + sdist.name().cyan(), + suggestion.cyan(), + suggestion.cyan(), + ) + }), + sdist, + cause, + }); + anstream::eprint!("{report:?}"); +} + /// Render a local source distribution build failure with a help message. pub(crate) fn build(sdist: Box, cause: uv_distribution::Error) { #[derive(Debug, miette::Diagnostic, thiserror::Error)] diff --git a/crates/uv/src/commands/pip/install.rs b/crates/uv/src/commands/pip/install.rs index 1e146e9f3c5c..8dfb6490e756 100644 --- a/crates/uv/src/commands/pip/install.rs +++ b/crates/uv/src/commands/pip/install.rs @@ -439,7 +439,7 @@ pub(crate) async fn pip_install( }; // Sync the environment. - operations::install( + match operations::install( &resolution, site_packages, modifications, @@ -461,7 +461,26 @@ pub(crate) async fn pip_install( dry_run, printer, ) - .await?; + .await + { + Ok(_) => {} + Err(operations::Error::Prepare(uv_installer::PrepareError::Build(dist, err))) => { + diagnostics::build(dist, err); + return Ok(ExitStatus::Failure); + } + Err(operations::Error::Prepare(uv_installer::PrepareError::DownloadAndBuild( + dist, + err, + ))) => { + diagnostics::download_and_build(dist, err); + return Ok(ExitStatus::Failure); + } + Err(operations::Error::Prepare(uv_installer::PrepareError::Download(dist, err))) => { + diagnostics::download(dist, err); + return Ok(ExitStatus::Failure); + } + Err(err) => return Err(err.into()), + } // Notify the user of any resolution diagnostics. operations::diagnose_resolution(resolution.diagnostics(), printer)?; diff --git a/crates/uv/src/commands/pip/operations.rs b/crates/uv/src/commands/pip/operations.rs index 88d06a2be6f3..d954c73ee4d1 100644 --- a/crates/uv/src/commands/pip/operations.rs +++ b/crates/uv/src/commands/pip/operations.rs @@ -457,10 +457,7 @@ pub(crate) async fn install( ) .with_reporter(PrepareReporter::from(printer).with_length(remote.len() as u64)); - let wheels = preparer - .prepare(remote.clone(), in_flight) - .await - .context("Failed to prepare distributions")?; + let wheels = preparer.prepare(remote.clone(), in_flight).await?; logger.on_prepare(wheels.len(), start, printer)?; @@ -751,6 +748,9 @@ pub(crate) fn diagnose_environment( #[derive(thiserror::Error, Debug)] pub(crate) enum Error { + #[error("Failed to prepare distributions")] + Prepare(#[from] uv_installer::PrepareError), + #[error(transparent)] Resolve(#[from] uv_resolver::ResolveError), diff --git a/crates/uv/src/commands/pip/sync.rs b/crates/uv/src/commands/pip/sync.rs index 10118692b876..551ad6798664 100644 --- a/crates/uv/src/commands/pip/sync.rs +++ b/crates/uv/src/commands/pip/sync.rs @@ -383,7 +383,7 @@ pub(crate) async fn pip_sync( }; // Sync the environment. - operations::install( + match operations::install( &resolution, site_packages, Modifications::Exact, @@ -405,7 +405,26 @@ pub(crate) async fn pip_sync( dry_run, printer, ) - .await?; + .await + { + Ok(_) => {} + Err(operations::Error::Prepare(uv_installer::PrepareError::Build(dist, err))) => { + diagnostics::build(dist, err); + return Ok(ExitStatus::Failure); + } + Err(operations::Error::Prepare(uv_installer::PrepareError::DownloadAndBuild( + dist, + err, + ))) => { + diagnostics::download_and_build(dist, err); + return Ok(ExitStatus::Failure); + } + Err(operations::Error::Prepare(uv_installer::PrepareError::Download(dist, err))) => { + diagnostics::download(dist, err); + return Ok(ExitStatus::Failure); + } + Err(err) => return Err(err.into()), + } // Notify the user of any resolution diagnostics. operations::diagnose_resolution(resolution.diagnostics(), printer)?; diff --git a/crates/uv/src/commands/project/add.rs b/crates/uv/src/commands/project/add.rs index db76cc2e7a3c..ac6b5c8a15df 100644 --- a/crates/uv/src/commands/project/add.rs +++ b/crates/uv/src/commands/project/add.rs @@ -716,6 +716,24 @@ pub(crate) async fn add( diagnostics::build(dist, err); Ok(ExitStatus::Failure) } + ProjectError::Operation(pip::operations::Error::Prepare( + uv_installer::PrepareError::Build(dist, err), + )) => { + diagnostics::build(dist, err); + Ok(ExitStatus::Failure) + } + ProjectError::Operation(pip::operations::Error::Prepare( + uv_installer::PrepareError::DownloadAndBuild(dist, err), + )) => { + diagnostics::download_and_build(dist, err); + Ok(ExitStatus::Failure) + } + ProjectError::Operation(pip::operations::Error::Prepare( + uv_installer::PrepareError::Download(dist, err), + )) => { + diagnostics::download(dist, err); + Ok(ExitStatus::Failure) + } err => Err(err.into()), } } diff --git a/crates/uv/src/commands/project/remove.rs b/crates/uv/src/commands/project/remove.rs index 736827447ee5..e68e94b35fa0 100644 --- a/crates/uv/src/commands/project/remove.rs +++ b/crates/uv/src/commands/project/remove.rs @@ -21,10 +21,11 @@ use uv_workspace::pyproject_mut::{DependencyTarget, PyProjectTomlMut}; use uv_workspace::{DiscoveryOptions, VirtualProject, Workspace}; use crate::commands::pip::loggers::{DefaultInstallLogger, DefaultResolveLogger}; +use crate::commands::pip::operations; use crate::commands::pip::operations::Modifications; -use crate::commands::project::default_dependency_groups; use crate::commands::project::lock::LockMode; -use crate::commands::{project, ExitStatus, SharedState}; +use crate::commands::project::{default_dependency_groups, ProjectError}; +use crate::commands::{diagnostics, project, ExitStatus, SharedState}; use crate::printer::Printer; use crate::settings::ResolverInstallerSettings; @@ -213,7 +214,7 @@ pub(crate) async fn remove( let state = SharedState::default(); // Lock and sync the environment, if necessary. - let lock = project::lock::do_safe_lock( + let lock = match project::lock::do_safe_lock( mode, project.workspace(), settings.as_ref().into(), @@ -227,8 +228,41 @@ pub(crate) async fn remove( cache, printer, ) - .await? - .into_lock(); + .await + { + Ok(result) => result.into_lock(), + Err(ProjectError::Operation(operations::Error::Resolve( + uv_resolver::ResolveError::NoSolution(err), + ))) => { + diagnostics::no_solution(&err); + return Ok(ExitStatus::Failure); + } + Err(ProjectError::Operation(operations::Error::Resolve( + uv_resolver::ResolveError::DownloadAndBuild(dist, err), + ))) => { + diagnostics::download_and_build(dist, err); + return Ok(ExitStatus::Failure); + } + Err(ProjectError::Operation(operations::Error::Resolve( + uv_resolver::ResolveError::Build(dist, err), + ))) => { + diagnostics::build(dist, err); + return Ok(ExitStatus::Failure); + } + Err(ProjectError::Operation(operations::Error::Requirements( + uv_requirements::Error::DownloadAndBuild(dist, err), + ))) => { + diagnostics::download_and_build(dist, err); + return Ok(ExitStatus::Failure); + } + Err(ProjectError::Operation(operations::Error::Requirements( + uv_requirements::Error::Build(dist, err), + ))) => { + diagnostics::build(dist, err); + return Ok(ExitStatus::Failure); + } + Err(err) => return Err(err.into()), + }; if no_sync { return Ok(ExitStatus::Success); @@ -255,7 +289,7 @@ pub(crate) async fn remove( }, }; - project::sync::do_sync( + match project::sync::do_sync( target, &venv, &extras, @@ -272,7 +306,29 @@ pub(crate) async fn remove( cache, printer, ) - .await?; + .await + { + Ok(()) => {} + Err(ProjectError::Operation(operations::Error::Prepare( + uv_installer::PrepareError::Build(dist, err), + ))) => { + diagnostics::build(dist, err); + return Ok(ExitStatus::Failure); + } + Err(ProjectError::Operation(operations::Error::Prepare( + uv_installer::PrepareError::DownloadAndBuild(dist, err), + ))) => { + diagnostics::download_and_build(dist, err); + return Ok(ExitStatus::Failure); + } + Err(ProjectError::Operation(operations::Error::Prepare( + uv_installer::PrepareError::Download(dist, err), + ))) => { + diagnostics::download(dist, err); + return Ok(ExitStatus::Failure); + } + Err(err) => return Err(err.into()), + } Ok(ExitStatus::Success) } diff --git a/crates/uv/src/commands/project/run.rs b/crates/uv/src/commands/project/run.rs index 95ab47f077b1..91988de94de3 100644 --- a/crates/uv/src/commands/project/run.rs +++ b/crates/uv/src/commands/project/run.rs @@ -697,7 +697,7 @@ pub(crate) async fn run( let install_options = InstallOptions::default(); - project::sync::do_sync( + match project::sync::do_sync( target, &venv, &extras, @@ -718,7 +718,29 @@ pub(crate) async fn run( cache, printer, ) - .await?; + .await + { + Ok(()) => {} + Err(ProjectError::Operation(operations::Error::Prepare( + uv_installer::PrepareError::Build(dist, err), + ))) => { + diagnostics::build(dist, err); + return Ok(ExitStatus::Failure); + } + Err(ProjectError::Operation(operations::Error::Prepare( + uv_installer::PrepareError::DownloadAndBuild(dist, err), + ))) => { + diagnostics::download_and_build(dist, err); + return Ok(ExitStatus::Failure); + } + Err(ProjectError::Operation(operations::Error::Prepare( + uv_installer::PrepareError::Download(dist, err), + ))) => { + diagnostics::download(dist, err); + return Ok(ExitStatus::Failure); + } + Err(err) => return Err(err.into()), + } lock = Some(result.into_lock()); } diff --git a/crates/uv/src/commands/project/sync.rs b/crates/uv/src/commands/project/sync.rs index 339763252054..76f363a8ab26 100644 --- a/crates/uv/src/commands/project/sync.rs +++ b/crates/uv/src/commands/project/sync.rs @@ -213,7 +213,7 @@ pub(crate) async fn sync( }; // Perform the sync operation. - do_sync( + match do_sync( target, &venv, &extras, @@ -230,7 +230,29 @@ pub(crate) async fn sync( cache, printer, ) - .await?; + .await + { + Ok(()) => {} + Err(ProjectError::Operation(operations::Error::Prepare( + uv_installer::PrepareError::Build(dist, err), + ))) => { + diagnostics::build(dist, err); + return Ok(ExitStatus::Failure); + } + Err(ProjectError::Operation(operations::Error::Prepare( + uv_installer::PrepareError::DownloadAndBuild(dist, err), + ))) => { + diagnostics::download_and_build(dist, err); + return Ok(ExitStatus::Failure); + } + Err(ProjectError::Operation(operations::Error::Prepare( + uv_installer::PrepareError::Download(dist, err), + ))) => { + diagnostics::download(dist, err); + return Ok(ExitStatus::Failure); + } + Err(err) => return Err(err.into()), + } Ok(ExitStatus::Success) } diff --git a/crates/uv/tests/it/build.rs b/crates/uv/tests/it/build.rs index 8f8b5e30a0ae..d5404191c942 100644 --- a/crates/uv/tests/it/build.rs +++ b/crates/uv/tests/it/build.rs @@ -1552,7 +1552,6 @@ fn sha() -> Result<()> { ----- stderr ----- Building source distribution... error: Failed to install requirements from `build-system.requires` - Caused by: Failed to prepare distributions Caused by: Failed to download `setuptools==68.2.2` Caused by: Hash mismatch for `setuptools==68.2.2` diff --git a/crates/uv/tests/it/edit.rs b/crates/uv/tests/it/edit.rs index 622771e44260..0a3b43baf311 100644 --- a/crates/uv/tests/it/edit.rs +++ b/crates/uv/tests/it/edit.rs @@ -5553,27 +5553,26 @@ fn fail_to_add_revert_project() -> Result<()> { .collect::>(); uv_snapshot!(filters, context.add().arg("./child"), @r###" success: false - exit_code: 2 + exit_code: 1 ----- stdout ----- ----- stderr ----- Resolved 3 packages in [TIME] - error: Failed to prepare distributions - Caused by: Failed to build `child @ file://[TEMP_DIR]/child` - Caused by: Build backend failed to determine requirements with `build_wheel()` (exit status: 1) - - [stderr] - Traceback (most recent call last): - File "", line 14, in - File "[CACHE_DIR]/builds-v0/[TMP]/build_meta.py", line 325, in get_requires_for_build_wheel - return self._get_build_requires(config_settings, requirements=['wheel']) - ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - File "[CACHE_DIR]/builds-v0/[TMP]/build_meta.py", line 295, in _get_build_requires - self.run_setup() - File "[CACHE_DIR]/builds-v0/[TMP]/build_meta.py", line 311, in run_setup - exec(code, locals()) - File "", line 1, in - ZeroDivisionError: division by zero + × Failed to build `child @ file://[TEMP_DIR]/child` + ╰─▶ Build backend failed to determine requirements with `build_wheel()` (exit status: 1) + + [stderr] + Traceback (most recent call last): + File "", line 14, in + File "[CACHE_DIR]/builds-v0/[TMP]/build_meta.py", line 325, in get_requires_for_build_wheel + return self._get_build_requires(config_settings, requirements=['wheel']) + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + File "[CACHE_DIR]/builds-v0/[TMP]/build_meta.py", line 295, in _get_build_requires + self.run_setup() + File "[CACHE_DIR]/builds-v0/[TMP]/build_meta.py", line 311, in run_setup + exec(code, locals()) + File "", line 1, in + ZeroDivisionError: division by zero "###); let pyproject_toml = fs_err::read_to_string(context.temp_dir.join("pyproject.toml"))?; @@ -5661,27 +5660,26 @@ fn fail_to_edit_revert_project() -> Result<()> { .collect::>(); uv_snapshot!(filters, context.add().arg("./child"), @r###" success: false - exit_code: 2 + exit_code: 1 ----- stdout ----- ----- stderr ----- Resolved 3 packages in [TIME] - error: Failed to prepare distributions - Caused by: Failed to build `child @ file://[TEMP_DIR]/child` - Caused by: Build backend failed to determine requirements with `build_wheel()` (exit status: 1) - - [stderr] - Traceback (most recent call last): - File "", line 14, in - File "[CACHE_DIR]/builds-v0/[TMP]/build_meta.py", line 325, in get_requires_for_build_wheel - return self._get_build_requires(config_settings, requirements=['wheel']) - ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - File "[CACHE_DIR]/builds-v0/[TMP]/build_meta.py", line 295, in _get_build_requires - self.run_setup() - File "[CACHE_DIR]/builds-v0/[TMP]/build_meta.py", line 311, in run_setup - exec(code, locals()) - File "", line 1, in - ZeroDivisionError: division by zero + × Failed to build `child @ file://[TEMP_DIR]/child` + ╰─▶ Build backend failed to determine requirements with `build_wheel()` (exit status: 1) + + [stderr] + Traceback (most recent call last): + File "", line 14, in + File "[CACHE_DIR]/builds-v0/[TMP]/build_meta.py", line 325, in get_requires_for_build_wheel + return self._get_build_requires(config_settings, requirements=['wheel']) + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + File "[CACHE_DIR]/builds-v0/[TMP]/build_meta.py", line 295, in _get_build_requires + self.run_setup() + File "[CACHE_DIR]/builds-v0/[TMP]/build_meta.py", line 311, in run_setup + exec(code, locals()) + File "", line 1, in + ZeroDivisionError: division by zero "###); let pyproject_toml = fs_err::read_to_string(context.temp_dir.join("pyproject.toml"))?; diff --git a/crates/uv/tests/it/lock.rs b/crates/uv/tests/it/lock.rs index 3c609c66fce6..ba5f92360dd3 100644 --- a/crates/uv/tests/it/lock.rs +++ b/crates/uv/tests/it/lock.rs @@ -5549,20 +5549,19 @@ fn lock_invalid_hash() -> Result<()> { // Install from the lockfile. uv_snapshot!(context.filters(), context.sync().arg("--frozen").env_remove(EnvVars::UV_EXCLUDE_NEWER), @r###" success: false - exit_code: 2 + exit_code: 1 ----- stdout ----- ----- stderr ----- - error: Failed to prepare distributions - Caused by: Failed to download `idna==3.6` - Caused by: Hash mismatch for `idna==3.6` + × Failed to download `idna==3.6` + ╰─▶ Hash mismatch for `idna==3.6` - Expected: - sha256:aecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca - sha256:d05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f + Expected: + sha256:aecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca + sha256:d05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f - Computed: - sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f + Computed: + sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f "###); Ok(()) @@ -6394,27 +6393,25 @@ fn lock_redact_https() -> Result<()> { // when installing `iniconfig`, rather than when building `foo`. uv_snapshot!(context.filters(), context.sync().arg("--frozen").arg("--index-url").arg("https://pypi-proxy.fly.dev/basic-auth/simple").arg("--no-install-project"), @r###" success: false - exit_code: 2 + exit_code: 1 ----- stdout ----- ----- stderr ----- - error: Failed to prepare distributions - Caused by: Failed to download `iniconfig==2.0.0` - Caused by: Failed to fetch: `https://pypi-proxy.fly.dev/basic-auth/files/packages/ef/a6/62565a6e1cf69e10f5727360368e451d4b7f58beeac6173dc9db836a5b46/iniconfig-2.0.0-py3-none-any.whl` - Caused by: HTTP status client error (401 Unauthorized) for url (https://pypi-proxy.fly.dev/basic-auth/files/packages/ef/a6/62565a6e1cf69e10f5727360368e451d4b7f58beeac6173dc9db836a5b46/iniconfig-2.0.0-py3-none-any.whl) + × Failed to download `iniconfig==2.0.0` + ├─▶ Failed to fetch: `https://pypi-proxy.fly.dev/basic-auth/files/packages/ef/a6/62565a6e1cf69e10f5727360368e451d4b7f58beeac6173dc9db836a5b46/iniconfig-2.0.0-py3-none-any.whl` + ╰─▶ HTTP status client error (401 Unauthorized) for url (https://pypi-proxy.fly.dev/basic-auth/files/packages/ef/a6/62565a6e1cf69e10f5727360368e451d4b7f58beeac6173dc9db836a5b46/iniconfig-2.0.0-py3-none-any.whl) "###); // Installing from the lockfile should fail without an index. uv_snapshot!(context.filters(), context.sync().arg("--frozen").arg("--no-install-project"), @r###" success: false - exit_code: 2 + exit_code: 1 ----- stdout ----- ----- stderr ----- - error: Failed to prepare distributions - Caused by: Failed to download `iniconfig==2.0.0` - Caused by: Failed to fetch: `https://pypi-proxy.fly.dev/basic-auth/files/packages/ef/a6/62565a6e1cf69e10f5727360368e451d4b7f58beeac6173dc9db836a5b46/iniconfig-2.0.0-py3-none-any.whl` - Caused by: HTTP status client error (401 Unauthorized) for url (https://pypi-proxy.fly.dev/basic-auth/files/packages/ef/a6/62565a6e1cf69e10f5727360368e451d4b7f58beeac6173dc9db836a5b46/iniconfig-2.0.0-py3-none-any.whl) + × Failed to download `iniconfig==2.0.0` + ├─▶ Failed to fetch: `https://pypi-proxy.fly.dev/basic-auth/files/packages/ef/a6/62565a6e1cf69e10f5727360368e451d4b7f58beeac6173dc9db836a5b46/iniconfig-2.0.0-py3-none-any.whl` + ╰─▶ HTTP status client error (401 Unauthorized) for url (https://pypi-proxy.fly.dev/basic-auth/files/packages/ef/a6/62565a6e1cf69e10f5727360368e451d4b7f58beeac6173dc9db836a5b46/iniconfig-2.0.0-py3-none-any.whl) "###); // Installing from the lockfile should succeed when credentials are included on the command-line. @@ -6447,14 +6444,13 @@ fn lock_redact_https() -> Result<()> { // Installing without credentials will fail without a cache. uv_snapshot!(context.filters(), context.sync().arg("--frozen").arg("--reinstall").arg("--no-cache").arg("--no-install-project"), @r###" success: false - exit_code: 2 + exit_code: 1 ----- stdout ----- ----- stderr ----- - error: Failed to prepare distributions - Caused by: Failed to download `iniconfig==2.0.0` - Caused by: Failed to fetch: `https://pypi-proxy.fly.dev/basic-auth/files/packages/ef/a6/62565a6e1cf69e10f5727360368e451d4b7f58beeac6173dc9db836a5b46/iniconfig-2.0.0-py3-none-any.whl` - Caused by: HTTP status client error (401 Unauthorized) for url (https://pypi-proxy.fly.dev/basic-auth/files/packages/ef/a6/62565a6e1cf69e10f5727360368e451d4b7f58beeac6173dc9db836a5b46/iniconfig-2.0.0-py3-none-any.whl) + × Failed to download `iniconfig==2.0.0` + ├─▶ Failed to fetch: `https://pypi-proxy.fly.dev/basic-auth/files/packages/ef/a6/62565a6e1cf69e10f5727360368e451d4b7f58beeac6173dc9db836a5b46/iniconfig-2.0.0-py3-none-any.whl` + ╰─▶ HTTP status client error (401 Unauthorized) for url (https://pypi-proxy.fly.dev/basic-auth/files/packages/ef/a6/62565a6e1cf69e10f5727360368e451d4b7f58beeac6173dc9db836a5b46/iniconfig-2.0.0-py3-none-any.whl) "###); // Installing with credentials from with `UV_INDEX_URL` should succeed. diff --git a/crates/uv/tests/it/pip_install.rs b/crates/uv/tests/it/pip_install.rs index a1f41379cf2f..92cd4135665f 100644 --- a/crates/uv/tests/it/pip_install.rs +++ b/crates/uv/tests/it/pip_install.rs @@ -2347,16 +2347,15 @@ fn no_prerelease_hint_source_builds() -> Result<()> { uv_snapshot!(context.filters(), context.pip_install().arg(".").env(EnvVars::UV_EXCLUDE_NEWER, "2018-10-08"), @r###" success: false - exit_code: 2 + exit_code: 1 ----- stdout ----- ----- stderr ----- Resolved 1 package in [TIME] - error: Failed to prepare distributions - Caused by: Failed to build `project @ file://[TEMP_DIR]/` - Caused by: Failed to resolve requirements from `setup.py` build - Caused by: No solution found when resolving: `setuptools>=40.8.0` - Caused by: Because only setuptools<40.8.0 is available and you require setuptools>=40.8.0, we can conclude that your requirements are unsatisfiable. + × Failed to build `project @ file://[TEMP_DIR]/` + ├─▶ Failed to resolve requirements from `setup.py` build + ├─▶ No solution found when resolving: `setuptools>=40.8.0` + ╰─▶ Because only setuptools<40.8.0 is available and you require setuptools>=40.8.0, we can conclude that your requirements are unsatisfiable. "### ); @@ -5741,21 +5740,20 @@ fn require_hashes_mismatch() -> Result<()> { .arg("requirements.txt") .arg("--require-hashes"), @r###" success: false - exit_code: 2 + exit_code: 1 ----- stdout ----- ----- stderr ----- Resolved 3 packages in [TIME] - error: Failed to prepare distributions - Caused by: Failed to download `anyio==4.0.0` - Caused by: Hash mismatch for `anyio==4.0.0` + × Failed to download `anyio==4.0.0` + ╰─▶ Hash mismatch for `anyio==4.0.0` - Expected: - sha256:afdb2b588b9fc25ede96d8db56ed50848b0b649dca3dd1df0b11f683bb9e0b5f - sha256:a7ed51751b2c2add651e5747c891b47e26d2a21be5d32d9311dfe9692f3e5d7a + Expected: + sha256:afdb2b588b9fc25ede96d8db56ed50848b0b649dca3dd1df0b11f683bb9e0b5f + sha256:a7ed51751b2c2add651e5747c891b47e26d2a21be5d32d9311dfe9692f3e5d7a - Computed: - sha256:cfdb2b588b9fc25ede96d8db56ed50848b0b649dca3dd1df0b11f683bb9e0b5f + Computed: + sha256:cfdb2b588b9fc25ede96d8db56ed50848b0b649dca3dd1df0b11f683bb9e0b5f "### ); @@ -6226,21 +6224,20 @@ fn verify_hashes_mismatch() -> Result<()> { .arg("requirements.txt") .arg("--verify-hashes"), @r###" success: false - exit_code: 2 + exit_code: 1 ----- stdout ----- ----- stderr ----- Resolved 3 packages in [TIME] - error: Failed to prepare distributions - Caused by: Failed to download `anyio==4.0.0` - Caused by: Hash mismatch for `anyio==4.0.0` + × Failed to download `anyio==4.0.0` + ╰─▶ Hash mismatch for `anyio==4.0.0` - Expected: - sha256:afdb2b588b9fc25ede96d8db56ed50848b0b649dca3dd1df0b11f683bb9e0b5f - sha256:a7ed51751b2c2add651e5747c891b47e26d2a21be5d32d9311dfe9692f3e5d7a + Expected: + sha256:afdb2b588b9fc25ede96d8db56ed50848b0b649dca3dd1df0b11f683bb9e0b5f + sha256:a7ed51751b2c2add651e5747c891b47e26d2a21be5d32d9311dfe9692f3e5d7a - Computed: - sha256:cfdb2b588b9fc25ede96d8db56ed50848b0b649dca3dd1df0b11f683bb9e0b5f + Computed: + sha256:cfdb2b588b9fc25ede96d8db56ed50848b0b649dca3dd1df0b11f683bb9e0b5f "### ); diff --git a/crates/uv/tests/it/pip_sync.rs b/crates/uv/tests/it/pip_sync.rs index bdfab930f3da..26e740ed33d6 100644 --- a/crates/uv/tests/it/pip_sync.rs +++ b/crates/uv/tests/it/pip_sync.rs @@ -3565,20 +3565,19 @@ fn require_hashes_wheel_no_binary() -> Result<()> { .arg(":all:") .arg("--require-hashes"), @r###" success: false - exit_code: 2 + exit_code: 1 ----- stdout ----- ----- stderr ----- Resolved 1 package in [TIME] - error: Failed to prepare distributions - Caused by: Failed to download and build `anyio==4.0.0` - Caused by: Hash mismatch for `anyio==4.0.0` + × Failed to download and build `anyio==4.0.0` + ╰─▶ Hash mismatch for `anyio==4.0.0` - Expected: - sha256:cfdb2b588b9fc25ede96d8db56ed50848b0b649dca3dd1df0b11f683bb9e0b5f + Expected: + sha256:cfdb2b588b9fc25ede96d8db56ed50848b0b649dca3dd1df0b11f683bb9e0b5f - Computed: - sha256:f7ed51751b2c2add651e5747c891b47e26d2a21be5d32d9311dfe9692f3e5d7a + Computed: + sha256:f7ed51751b2c2add651e5747c891b47e26d2a21be5d32d9311dfe9692f3e5d7a "### ); @@ -3659,20 +3658,19 @@ fn require_hashes_source_only_binary() -> Result<()> { .arg(":all:") .arg("--require-hashes"), @r###" success: false - exit_code: 2 + exit_code: 1 ----- stdout ----- ----- stderr ----- Resolved 1 package in [TIME] - error: Failed to prepare distributions - Caused by: Failed to download `anyio==4.0.0` - Caused by: Hash mismatch for `anyio==4.0.0` + × Failed to download `anyio==4.0.0` + ╰─▶ Hash mismatch for `anyio==4.0.0` - Expected: - sha256:f7ed51751b2c2add651e5747c891b47e26d2a21be5d32d9311dfe9692f3e5d7a + Expected: + sha256:f7ed51751b2c2add651e5747c891b47e26d2a21be5d32d9311dfe9692f3e5d7a - Computed: - sha256:cfdb2b588b9fc25ede96d8db56ed50848b0b649dca3dd1df0b11f683bb9e0b5f + Computed: + sha256:cfdb2b588b9fc25ede96d8db56ed50848b0b649dca3dd1df0b11f683bb9e0b5f "### ); @@ -3692,20 +3690,19 @@ fn require_hashes_wrong_digest() -> Result<()> { .arg("requirements.txt") .arg("--require-hashes"), @r###" success: false - exit_code: 2 + exit_code: 1 ----- stdout ----- ----- stderr ----- Resolved 1 package in [TIME] - error: Failed to prepare distributions - Caused by: Failed to download `anyio==4.0.0` - Caused by: Hash mismatch for `anyio==4.0.0` + × Failed to download `anyio==4.0.0` + ╰─▶ Hash mismatch for `anyio==4.0.0` - Expected: - sha256:afdb2b588b9fc25ede96d8db56ed50848b0b649dca3dd1df0b11f683bb9e0b5f + Expected: + sha256:afdb2b588b9fc25ede96d8db56ed50848b0b649dca3dd1df0b11f683bb9e0b5f - Computed: - sha256:cfdb2b588b9fc25ede96d8db56ed50848b0b649dca3dd1df0b11f683bb9e0b5f + Computed: + sha256:cfdb2b588b9fc25ede96d8db56ed50848b0b649dca3dd1df0b11f683bb9e0b5f "### ); @@ -3725,20 +3722,19 @@ fn require_hashes_wrong_algorithm() -> Result<()> { .arg("requirements.txt") .arg("--require-hashes"), @r###" success: false - exit_code: 2 + exit_code: 1 ----- stdout ----- ----- stderr ----- Resolved 1 package in [TIME] - error: Failed to prepare distributions - Caused by: Failed to download `anyio==4.0.0` - Caused by: Hash mismatch for `anyio==4.0.0` + × Failed to download `anyio==4.0.0` + ╰─▶ Hash mismatch for `anyio==4.0.0` - Expected: - sha512:cfdb2b588b9fc25ede96d8db56ed50848b0b649dca3dd1df0b11f683bb9e0b5f + Expected: + sha512:cfdb2b588b9fc25ede96d8db56ed50848b0b649dca3dd1df0b11f683bb9e0b5f - Computed: - sha512:f30761c1e8725b49c498273b90dba4b05c0fd157811994c806183062cb6647e773364ce45f0e1ff0b10e32fe6d0232ea5ad39476ccf37109d6b49603a09c11c2 + Computed: + sha512:f30761c1e8725b49c498273b90dba4b05c0fd157811994c806183062cb6647e773364ce45f0e1ff0b10e32fe6d0232ea5ad39476ccf37109d6b49603a09c11c2 "### ); @@ -3898,20 +3894,19 @@ fn require_hashes_wheel_url() -> Result<()> { .arg("--reinstall") .arg("--require-hashes"), @r###" success: false - exit_code: 2 + exit_code: 1 ----- stdout ----- ----- stderr ----- Resolved 1 package in [TIME] - error: Failed to prepare distributions - Caused by: Failed to download `anyio @ https://files.pythonhosted.org/packages/36/55/ad4de788d84a630656ece71059665e01ca793c04294c463fd84132f40fe6/anyio-4.0.0-py3-none-any.whl` - Caused by: Hash mismatch for `anyio @ https://files.pythonhosted.org/packages/36/55/ad4de788d84a630656ece71059665e01ca793c04294c463fd84132f40fe6/anyio-4.0.0-py3-none-any.whl` + × Failed to download `anyio @ https://files.pythonhosted.org/packages/36/55/ad4de788d84a630656ece71059665e01ca793c04294c463fd84132f40fe6/anyio-4.0.0-py3-none-any.whl` + ╰─▶ Hash mismatch for `anyio @ https://files.pythonhosted.org/packages/36/55/ad4de788d84a630656ece71059665e01ca793c04294c463fd84132f40fe6/anyio-4.0.0-py3-none-any.whl` - Expected: - sha256:afdb2b588b9fc25ede96d8db56ed50848b0b649dca3dd1df0b11f683bb9e0b5f + Expected: + sha256:afdb2b588b9fc25ede96d8db56ed50848b0b649dca3dd1df0b11f683bb9e0b5f - Computed: - sha256:cfdb2b588b9fc25ede96d8db56ed50848b0b649dca3dd1df0b11f683bb9e0b5f + Computed: + sha256:cfdb2b588b9fc25ede96d8db56ed50848b0b649dca3dd1df0b11f683bb9e0b5f "### ); @@ -3953,20 +3948,19 @@ fn require_hashes_wheel_url_mismatch() -> Result<()> { .arg("requirements.txt") .arg("--require-hashes"), @r###" success: false - exit_code: 2 + exit_code: 1 ----- stdout ----- ----- stderr ----- Resolved 1 package in [TIME] - error: Failed to prepare distributions - Caused by: Failed to download `anyio @ https://files.pythonhosted.org/packages/36/55/ad4de788d84a630656ece71059665e01ca793c04294c463fd84132f40fe6/anyio-4.0.0-py3-none-any.whl` - Caused by: Hash mismatch for `anyio @ https://files.pythonhosted.org/packages/36/55/ad4de788d84a630656ece71059665e01ca793c04294c463fd84132f40fe6/anyio-4.0.0-py3-none-any.whl` + × Failed to download `anyio @ https://files.pythonhosted.org/packages/36/55/ad4de788d84a630656ece71059665e01ca793c04294c463fd84132f40fe6/anyio-4.0.0-py3-none-any.whl` + ╰─▶ Hash mismatch for `anyio @ https://files.pythonhosted.org/packages/36/55/ad4de788d84a630656ece71059665e01ca793c04294c463fd84132f40fe6/anyio-4.0.0-py3-none-any.whl` - Expected: - sha256:afdb2b588b9fc25ede96d8db56ed50848b0b649dca3dd1df0b11f683bb9e0b5f + Expected: + sha256:afdb2b588b9fc25ede96d8db56ed50848b0b649dca3dd1df0b11f683bb9e0b5f - Computed: - sha256:cfdb2b588b9fc25ede96d8db56ed50848b0b649dca3dd1df0b11f683bb9e0b5f + Computed: + sha256:cfdb2b588b9fc25ede96d8db56ed50848b0b649dca3dd1df0b11f683bb9e0b5f "### ); @@ -4062,20 +4056,19 @@ fn require_hashes_re_download() -> Result<()> { .arg("--reinstall") .arg("--require-hashes"), @r###" success: false - exit_code: 2 + exit_code: 1 ----- stdout ----- ----- stderr ----- Resolved 1 package in [TIME] - error: Failed to prepare distributions - Caused by: Failed to download `anyio==4.0.0` - Caused by: Hash mismatch for `anyio==4.0.0` + × Failed to download `anyio==4.0.0` + ╰─▶ Hash mismatch for `anyio==4.0.0` - Expected: - sha256:afdb2b588b9fc25ede96d8db56ed50848b0b649dca3dd1df0b11f683bb9e0b5f + Expected: + sha256:afdb2b588b9fc25ede96d8db56ed50848b0b649dca3dd1df0b11f683bb9e0b5f - Computed: - sha256:cfdb2b588b9fc25ede96d8db56ed50848b0b649dca3dd1df0b11f683bb9e0b5f + Computed: + sha256:cfdb2b588b9fc25ede96d8db56ed50848b0b649dca3dd1df0b11f683bb9e0b5f "### ); @@ -4154,20 +4147,19 @@ fn require_hashes_wheel_path_mismatch() -> Result<()> { .arg("requirements.txt") .arg("--require-hashes"), @r###" success: false - exit_code: 2 + exit_code: 1 ----- stdout ----- ----- stderr ----- Resolved 1 package in [TIME] - error: Failed to prepare distributions - Caused by: Failed to download `tqdm @ file://[WORKSPACE]/scripts/links/tqdm-1000.0.0-py3-none-any.whl` - Caused by: Hash mismatch for `tqdm @ file://[WORKSPACE]/scripts/links/tqdm-1000.0.0-py3-none-any.whl` + × Failed to download `tqdm @ file://[WORKSPACE]/scripts/links/tqdm-1000.0.0-py3-none-any.whl` + ╰─▶ Hash mismatch for `tqdm @ file://[WORKSPACE]/scripts/links/tqdm-1000.0.0-py3-none-any.whl` - Expected: - sha256:cfdb2b588b9fc25ede96d8db56ed50848b0b649dca3dd1df0b11f683bb9e0b5f + Expected: + sha256:cfdb2b588b9fc25ede96d8db56ed50848b0b649dca3dd1df0b11f683bb9e0b5f - Computed: - sha256:a34996d4bd5abb2336e14ff0a2d22b92cfd0f0ed344e6883041ce01953276a13 + Computed: + sha256:a34996d4bd5abb2336e14ff0a2d22b92cfd0f0ed344e6883041ce01953276a13 "### ); @@ -4431,20 +4423,19 @@ fn require_hashes_repeated_hash() -> Result<()> { .arg("--require-hashes") .arg("--reinstall"), @r###" success: false - exit_code: 2 + exit_code: 1 ----- stdout ----- ----- stderr ----- Resolved 1 package in [TIME] - error: Failed to prepare distributions - Caused by: Failed to download `anyio @ https://files.pythonhosted.org/packages/36/55/ad4de788d84a630656ece71059665e01ca793c04294c463fd84132f40fe6/anyio-4.0.0-py3-none-any.whl` - Caused by: Hash mismatch for `anyio @ https://files.pythonhosted.org/packages/36/55/ad4de788d84a630656ece71059665e01ca793c04294c463fd84132f40fe6/anyio-4.0.0-py3-none-any.whl` + × Failed to download `anyio @ https://files.pythonhosted.org/packages/36/55/ad4de788d84a630656ece71059665e01ca793c04294c463fd84132f40fe6/anyio-4.0.0-py3-none-any.whl` + ╰─▶ Hash mismatch for `anyio @ https://files.pythonhosted.org/packages/36/55/ad4de788d84a630656ece71059665e01ca793c04294c463fd84132f40fe6/anyio-4.0.0-py3-none-any.whl` - Expected: - md5:520d85e19168705cdf0223621b18831a + Expected: + md5:520d85e19168705cdf0223621b18831a - Computed: - md5:420d85e19168705cdf0223621b18831a + Computed: + md5:420d85e19168705cdf0223621b18831a "### ); @@ -4563,20 +4554,19 @@ fn require_hashes_find_links_no_hash() -> Result<()> { .arg("--find-links") .arg("https://raw.githubusercontent.com/astral-test/astral-test-hash/main/no-hash/simple-html/example-a-961b4c22/index.html"), @r###" success: false - exit_code: 2 + exit_code: 1 ----- stdout ----- ----- stderr ----- Resolved 1 package in [TIME] - error: Failed to prepare distributions - Caused by: Failed to download `example-a-961b4c22==1.0.0` - Caused by: Hash mismatch for `example-a-961b4c22==1.0.0` + × Failed to download `example-a-961b4c22==1.0.0` + ╰─▶ Hash mismatch for `example-a-961b4c22==1.0.0` - Expected: - sha256:123 + Expected: + sha256:123 - Computed: - sha256:5d69f0b590514103234f0c3526563856f04d044d8d0ea1073a843ae429b3187e + Computed: + sha256:5d69f0b590514103234f0c3526563856f04d044d8d0ea1073a843ae429b3187e "### ); @@ -4593,20 +4583,19 @@ fn require_hashes_find_links_no_hash() -> Result<()> { .arg("--find-links") .arg("https://raw.githubusercontent.com/astral-test/astral-test-hash/main/no-hash/simple-html/example-a-961b4c22/index.html"), @r###" success: false - exit_code: 2 + exit_code: 1 ----- stdout ----- ----- stderr ----- Resolved 1 package in [TIME] - error: Failed to prepare distributions - Caused by: Failed to download `example-a-961b4c22==1.0.0` - Caused by: Hash mismatch for `example-a-961b4c22==1.0.0` + × Failed to download `example-a-961b4c22==1.0.0` + ╰─▶ Hash mismatch for `example-a-961b4c22==1.0.0` - Expected: - sha256:294e788dbe500fdc39e8b88e82652ab67409a1dc9dd06543d0fe0ae31b713eb3 + Expected: + sha256:294e788dbe500fdc39e8b88e82652ab67409a1dc9dd06543d0fe0ae31b713eb3 - Computed: - sha256:5d69f0b590514103234f0c3526563856f04d044d8d0ea1073a843ae429b3187e + Computed: + sha256:5d69f0b590514103234f0c3526563856f04d044d8d0ea1073a843ae429b3187e "### ); @@ -4684,20 +4673,19 @@ fn require_hashes_find_links_invalid_hash() -> Result<()> { .arg("--find-links") .arg("https://raw.githubusercontent.com/astral-test/astral-test-hash/main/invalid-hash/simple-html/example-a-961b4c22/index.html"), @r###" success: false - exit_code: 2 + exit_code: 1 ----- stdout ----- ----- stderr ----- Resolved 1 package in [TIME] - error: Failed to prepare distributions - Caused by: Failed to download `example-a-961b4c22==1.0.0` - Caused by: Hash mismatch for `example-a-961b4c22==1.0.0` + × Failed to download `example-a-961b4c22==1.0.0` + ╰─▶ Hash mismatch for `example-a-961b4c22==1.0.0` - Expected: - sha256:123 + Expected: + sha256:123 - Computed: - sha256:5d69f0b590514103234f0c3526563856f04d044d8d0ea1073a843ae429b3187e + Computed: + sha256:5d69f0b590514103234f0c3526563856f04d044d8d0ea1073a843ae429b3187e "### ); @@ -4713,20 +4701,19 @@ fn require_hashes_find_links_invalid_hash() -> Result<()> { .arg("--find-links") .arg("https://raw.githubusercontent.com/astral-test/astral-test-hash/main/invalid-hash/simple-html/example-a-961b4c22/index.html"), @r###" success: false - exit_code: 2 + exit_code: 1 ----- stdout ----- ----- stderr ----- Resolved 1 package in [TIME] - error: Failed to prepare distributions - Caused by: Failed to download `example-a-961b4c22==1.0.0` - Caused by: Hash mismatch for `example-a-961b4c22==1.0.0` + × Failed to download `example-a-961b4c22==1.0.0` + ╰─▶ Hash mismatch for `example-a-961b4c22==1.0.0` - Expected: - sha256:8838f9d005ff0432b258ba648d9cabb1cbdf06ac29d14f788b02edae544032ea + Expected: + sha256:8838f9d005ff0432b258ba648d9cabb1cbdf06ac29d14f788b02edae544032ea - Computed: - sha256:5d69f0b590514103234f0c3526563856f04d044d8d0ea1073a843ae429b3187e + Computed: + sha256:5d69f0b590514103234f0c3526563856f04d044d8d0ea1073a843ae429b3187e "### ); @@ -4794,21 +4781,20 @@ fn require_hashes_find_links_invalid_hash() -> Result<()> { .arg("--find-links") .arg("https://raw.githubusercontent.com/astral-test/astral-test-hash/main/invalid-hash/simple-html/example-a-961b4c22/index.html"), @r###" success: false - exit_code: 2 + exit_code: 1 ----- stdout ----- ----- stderr ----- Resolved 1 package in [TIME] - error: Failed to prepare distributions - Caused by: Failed to download and build `example-a-961b4c22==1.0.0` - Caused by: Hash mismatch for `example-a-961b4c22==1.0.0` + × Failed to download and build `example-a-961b4c22==1.0.0` + ╰─▶ Hash mismatch for `example-a-961b4c22==1.0.0` - Expected: - sha256:5d69f0b590514103234f0c3526563856f04d044d8d0ea1073a843ae429b3187e - sha256:a3cf07a05aac526131a2e8b6e4375ee6c6eaac8add05b88035e960ac6cd999ee + Expected: + sha256:5d69f0b590514103234f0c3526563856f04d044d8d0ea1073a843ae429b3187e + sha256:a3cf07a05aac526131a2e8b6e4375ee6c6eaac8add05b88035e960ac6cd999ee - Computed: - sha256:294e788dbe500fdc39e8b88e82652ab67409a1dc9dd06543d0fe0ae31b713eb3 + Computed: + sha256:294e788dbe500fdc39e8b88e82652ab67409a1dc9dd06543d0fe0ae31b713eb3 "### ); @@ -4890,20 +4876,19 @@ fn require_hashes_registry_invalid_hash() -> Result<()> { .arg("--index-url") .arg("https://astral-test.github.io/astral-test-hash/invalid-hash/simple-html/"), @r###" success: false - exit_code: 2 + exit_code: 1 ----- stdout ----- ----- stderr ----- Resolved 1 package in [TIME] - error: Failed to prepare distributions - Caused by: Failed to download `example-a-961b4c22==1.0.0` - Caused by: Hash mismatch for `example-a-961b4c22==1.0.0` + × Failed to download `example-a-961b4c22==1.0.0` + ╰─▶ Hash mismatch for `example-a-961b4c22==1.0.0` - Expected: - sha256:123 + Expected: + sha256:123 - Computed: - sha256:5d69f0b590514103234f0c3526563856f04d044d8d0ea1073a843ae429b3187e + Computed: + sha256:5d69f0b590514103234f0c3526563856f04d044d8d0ea1073a843ae429b3187e "### ); @@ -4920,20 +4905,19 @@ fn require_hashes_registry_invalid_hash() -> Result<()> { .arg("--index-url") .arg("https://astral-test.github.io/astral-test-hash/invalid-hash/simple-html/"), @r###" success: false - exit_code: 2 + exit_code: 1 ----- stdout ----- ----- stderr ----- Resolved 1 package in [TIME] - error: Failed to prepare distributions - Caused by: Failed to download `example-a-961b4c22==1.0.0` - Caused by: Hash mismatch for `example-a-961b4c22==1.0.0` + × Failed to download `example-a-961b4c22==1.0.0` + ╰─▶ Hash mismatch for `example-a-961b4c22==1.0.0` - Expected: - sha256:8838f9d005ff0432b258ba648d9cabb1cbdf06ac29d14f788b02edae544032ea + Expected: + sha256:8838f9d005ff0432b258ba648d9cabb1cbdf06ac29d14f788b02edae544032ea - Computed: - sha256:5d69f0b590514103234f0c3526563856f04d044d8d0ea1073a843ae429b3187e + Computed: + sha256:5d69f0b590514103234f0c3526563856f04d044d8d0ea1073a843ae429b3187e "### ); @@ -5004,21 +4988,20 @@ fn require_hashes_registry_invalid_hash() -> Result<()> { .arg("--index-url") .arg("https://astral-test.github.io/astral-test-hash/invalid-hash/simple-html/"), @r###" success: false - exit_code: 2 + exit_code: 1 ----- stdout ----- ----- stderr ----- Resolved 1 package in [TIME] - error: Failed to prepare distributions - Caused by: Failed to download and build `example-a-961b4c22==1.0.0` - Caused by: Hash mismatch for `example-a-961b4c22==1.0.0` + × Failed to download and build `example-a-961b4c22==1.0.0` + ╰─▶ Hash mismatch for `example-a-961b4c22==1.0.0` - Expected: - sha256:5d69f0b590514103234f0c3526563856f04d044d8d0ea1073a843ae429b3187e - sha256:a3cf07a05aac526131a2e8b6e4375ee6c6eaac8add05b88035e960ac6cd999ee + Expected: + sha256:5d69f0b590514103234f0c3526563856f04d044d8d0ea1073a843ae429b3187e + sha256:a3cf07a05aac526131a2e8b6e4375ee6c6eaac8add05b88035e960ac6cd999ee - Computed: - sha256:294e788dbe500fdc39e8b88e82652ab67409a1dc9dd06543d0fe0ae31b713eb3 + Computed: + sha256:294e788dbe500fdc39e8b88e82652ab67409a1dc9dd06543d0fe0ae31b713eb3 "### ); @@ -5092,20 +5075,19 @@ fn require_hashes_url_invalid() -> Result<()> { .arg("requirements.txt") .arg("--require-hashes"), @r###" success: false - exit_code: 2 + exit_code: 1 ----- stdout ----- ----- stderr ----- Resolved 1 package in [TIME] - error: Failed to prepare distributions - Caused by: Failed to download `iniconfig @ https://files.pythonhosted.org/packages/ef/a6/62565a6e1cf69e10f5727360368e451d4b7f58beeac6173dc9db836a5b46/iniconfig-2.0.0-py3-none-any.whl#sha256=c6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374` - Caused by: Hash mismatch for `iniconfig @ https://files.pythonhosted.org/packages/ef/a6/62565a6e1cf69e10f5727360368e451d4b7f58beeac6173dc9db836a5b46/iniconfig-2.0.0-py3-none-any.whl#sha256=c6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374` + × Failed to download `iniconfig @ https://files.pythonhosted.org/packages/ef/a6/62565a6e1cf69e10f5727360368e451d4b7f58beeac6173dc9db836a5b46/iniconfig-2.0.0-py3-none-any.whl#sha256=c6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374` + ╰─▶ Hash mismatch for `iniconfig @ https://files.pythonhosted.org/packages/ef/a6/62565a6e1cf69e10f5727360368e451d4b7f58beeac6173dc9db836a5b46/iniconfig-2.0.0-py3-none-any.whl#sha256=c6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374` - Expected: - sha256:c6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374 + Expected: + sha256:c6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374 - Computed: - sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374 + Computed: + sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374 "### ); @@ -5126,20 +5108,19 @@ fn require_hashes_url_ignore() -> Result<()> { .arg("requirements.txt") .arg("--require-hashes"), @r###" success: false - exit_code: 2 + exit_code: 1 ----- stdout ----- ----- stderr ----- Resolved 1 package in [TIME] - error: Failed to prepare distributions - Caused by: Failed to download `iniconfig @ https://files.pythonhosted.org/packages/ef/a6/62565a6e1cf69e10f5727360368e451d4b7f58beeac6173dc9db836a5b46/iniconfig-2.0.0-py3-none-any.whl#sha256=b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374` - Caused by: Hash mismatch for `iniconfig @ https://files.pythonhosted.org/packages/ef/a6/62565a6e1cf69e10f5727360368e451d4b7f58beeac6173dc9db836a5b46/iniconfig-2.0.0-py3-none-any.whl#sha256=b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374` + × Failed to download `iniconfig @ https://files.pythonhosted.org/packages/ef/a6/62565a6e1cf69e10f5727360368e451d4b7f58beeac6173dc9db836a5b46/iniconfig-2.0.0-py3-none-any.whl#sha256=b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374` + ╰─▶ Hash mismatch for `iniconfig @ https://files.pythonhosted.org/packages/ef/a6/62565a6e1cf69e10f5727360368e451d4b7f58beeac6173dc9db836a5b46/iniconfig-2.0.0-py3-none-any.whl#sha256=b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374` - Expected: - sha256:c6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374 + Expected: + sha256:c6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374 - Computed: - sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374 + Computed: + sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374 "### ); @@ -5487,16 +5468,15 @@ fn incompatible_build_constraint() -> Result<()> { .arg("--build-constraint") .arg("build_constraints.txt"), @r###" success: false - exit_code: 2 + exit_code: 1 ----- stdout ----- ----- stderr ----- Resolved 1 package in [TIME] - error: Failed to prepare distributions - Caused by: Failed to download and build `requests==1.2.0` - Caused by: Failed to resolve requirements from `setup.py` build - Caused by: No solution found when resolving: `setuptools>=40.8.0` - Caused by: Because you require setuptools>=40.8.0 and setuptools==1, we can conclude that your requirements are unsatisfiable. + × Failed to download and build `requests==1.2.0` + ├─▶ Failed to resolve requirements from `setup.py` build + ├─▶ No solution found when resolving: `setuptools>=40.8.0` + ╰─▶ Because you require setuptools>=40.8.0 and setuptools==1, we can conclude that your requirements are unsatisfiable. "### ); diff --git a/crates/uv/tests/it/sync.rs b/crates/uv/tests/it/sync.rs index f95f9d4d1455..1e0b9a21a1a4 100644 --- a/crates/uv/tests/it/sync.rs +++ b/crates/uv/tests/it/sync.rs @@ -688,20 +688,18 @@ fn sync_build_isolation_package() -> Result<()> { .collect::>(); uv_snapshot!(filters, context.sync().arg("--no-build-isolation-package").arg("source-distribution"), @r###" success: false - exit_code: 2 + exit_code: 1 ----- stdout ----- ----- stderr ----- Resolved 2 packages in [TIME] - error: Failed to prepare distributions - Caused by: Failed to download and build `source-distribution @ https://files.pythonhosted.org/packages/10/1f/57aa4cce1b1abf6b433106676e15f9fa2c92ed2bd4cf77c3b50a9e9ac773/source_distribution-0.0.1.tar.gz` - Caused by: Build backend failed to build wheel through `build_wheel` (exit status: 1) - - [stderr] - Traceback (most recent call last): - File "", line 8, in - ModuleNotFoundError: No module named 'hatchling' + × Failed to download and build `source-distribution @ https://files.pythonhosted.org/packages/10/1f/57aa4cce1b1abf6b433106676e15f9fa2c92ed2bd4cf77c3b50a9e9ac773/source_distribution-0.0.1.tar.gz` + ╰─▶ Build backend failed to build wheel through `build_wheel` (exit status: 1) + [stderr] + Traceback (most recent call last): + File "", line 8, in + ModuleNotFoundError: No module named 'hatchling' "###); // Install `hatchling` for `source-distribution`. @@ -779,39 +777,35 @@ fn sync_build_isolation_extra() -> Result<()> { .collect::>(); uv_snapshot!(&filters, context.sync().arg("--extra").arg("compile"), @r###" success: false - exit_code: 2 + exit_code: 1 ----- stdout ----- ----- stderr ----- Resolved [N] packages in [TIME] - error: Failed to prepare distributions - Caused by: Failed to download and build `source-distribution @ https://files.pythonhosted.org/packages/10/1f/57aa4cce1b1abf6b433106676e15f9fa2c92ed2bd4cf77c3b50a9e9ac773/source_distribution-0.0.1.tar.gz` - Caused by: Build backend failed to build wheel through `build_wheel` (exit status: 1) - - [stderr] - Traceback (most recent call last): - File "", line 8, in - ModuleNotFoundError: No module named 'hatchling' + × Failed to download and build `source-distribution @ https://files.pythonhosted.org/packages/10/1f/57aa4cce1b1abf6b433106676e15f9fa2c92ed2bd4cf77c3b50a9e9ac773/source_distribution-0.0.1.tar.gz` + ╰─▶ Build backend failed to build wheel through `build_wheel` (exit status: 1) + [stderr] + Traceback (most recent call last): + File "", line 8, in + ModuleNotFoundError: No module named 'hatchling' "###); // Running `uv sync` with `--all-extras` should also fail. uv_snapshot!(&filters, context.sync().arg("--all-extras"), @r###" success: false - exit_code: 2 + exit_code: 1 ----- stdout ----- ----- stderr ----- Resolved [N] packages in [TIME] - error: Failed to prepare distributions - Caused by: Failed to download and build `source-distribution @ https://files.pythonhosted.org/packages/10/1f/57aa4cce1b1abf6b433106676e15f9fa2c92ed2bd4cf77c3b50a9e9ac773/source_distribution-0.0.1.tar.gz` - Caused by: Build backend failed to build wheel through `build_wheel` (exit status: 1) - - [stderr] - Traceback (most recent call last): - File "", line 8, in - ModuleNotFoundError: No module named 'hatchling' + × Failed to download and build `source-distribution @ https://files.pythonhosted.org/packages/10/1f/57aa4cce1b1abf6b433106676e15f9fa2c92ed2bd4cf77c3b50a9e9ac773/source_distribution-0.0.1.tar.gz` + ╰─▶ Build backend failed to build wheel through `build_wheel` (exit status: 1) + [stderr] + Traceback (most recent call last): + File "", line 8, in + ModuleNotFoundError: No module named 'hatchling' "###); // Install the build dependencies. From 5248dff2dc3bb6ddf06a7e8d7fd50ba508a9cf7b Mon Sep 17 00:00:00 2001 From: konsti Date: Tue, 12 Nov 2024 14:45:22 +0100 Subject: [PATCH 17/23] Typo fixes (#9057) --- crates/uv-pep440/src/version_specifier.rs | 2 +- docs/reference/resolver-internals.md | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/crates/uv-pep440/src/version_specifier.rs b/crates/uv-pep440/src/version_specifier.rs index 2a9440254cdf..9ab5b41b1a34 100644 --- a/crates/uv-pep440/src/version_specifier.rs +++ b/crates/uv-pep440/src/version_specifier.rs @@ -51,7 +51,7 @@ impl VersionSpecifiers { self.iter().all(|specifier| specifier.contains(version)) } - /// Returns `true` if the specifiers are empty is empty. + /// Returns `true` if there are no specifiers. pub fn is_empty(&self) -> bool { self.0.is_empty() } diff --git a/docs/reference/resolver-internals.md b/docs/reference/resolver-internals.md index a0b83c4458d5..5381ac04898d 100644 --- a/docs/reference/resolver-internals.md +++ b/docs/reference/resolver-internals.md @@ -14,9 +14,9 @@ in the worst case you have to try all possible combinations of all versions of a there are no general, fast algorithms. In practice, this is misleading for a number of reasons: - The slowest part of resolution in uv is loading package and version metadata, even if it's cached. -- There are many possible solutions, but some are preferable than others. For example we generally +- There are many possible solutions, but some are preferable to others. For example, we generally prefer using the latest version of packages. -- Package's dependencies are complex, e.g., there are contiguous versions ranges — not arbitrary +- Package dependencies are complex, e.g., there are contiguous versions ranges — not arbitrary boolean inclusion/exclusions of versions, adjacent releases often have the same or similar requirements, etc. - For most resolutions, the resolver doesn't need to backtrack, picking versions iteratively is From 828045cd27fb6812a03f6b374234aa95ec9dd092 Mon Sep 17 00:00:00 2001 From: konsti Date: Tue, 12 Nov 2024 14:57:47 +0100 Subject: [PATCH 18/23] Use crates.io reqwest-middleware (#9058) Thanks to https://github.com/TrueLayer/reqwest-middleware/pull/198, we can now remove the git dependency and switch back to a crates.io dependency. --- Cargo.lock | 14 ++++++++------ Cargo.toml | 9 +++------ 2 files changed, 11 insertions(+), 12 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 6d886e80fd27..f6a11d62d8e8 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -183,9 +183,9 @@ dependencies = [ [[package]] name = "async_http_range_reader" -version = "0.9.0" +version = "0.9.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4015e7130cf870da1c64a9c7ba474f4b3772a530edbeb05f8358bc9a02f8e505" +checksum = "2b537c00269e3f943e06f5d7cabf8ccd281b800fd0c7f111dd82f77154334197" dependencies = [ "bisection", "futures", @@ -2950,8 +2950,9 @@ dependencies = [ [[package]] name = "reqwest-middleware" -version = "0.3.3" -source = "git+https://github.com/TrueLayer/reqwest-middleware?rev=d95ec5a99fcc9a4339e1850d40378bbfe55ab121#d95ec5a99fcc9a4339e1850d40378bbfe55ab121" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d1ccd3b55e711f91a9885a2fa6fbbb2e39db1776420b062efc058c6410f7e5e3" dependencies = [ "anyhow", "async-trait", @@ -2964,8 +2965,9 @@ dependencies = [ [[package]] name = "reqwest-retry" -version = "0.7.1" -source = "git+https://github.com/TrueLayer/reqwest-middleware?rev=d95ec5a99fcc9a4339e1850d40378bbfe55ab121#d95ec5a99fcc9a4339e1850d40378bbfe55ab121" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "29c73e4195a6bfbcb174b790d9b3407ab90646976c55de58a6515da25d851178" dependencies = [ "anyhow", "async-trait", diff --git a/Cargo.toml b/Cargo.toml index f055ca98e4c3..ff333acaa47f 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -73,7 +73,7 @@ anyhow = { version = "1.0.89" } async-channel = { version = "2.3.1" } async-compression = { version = "0.4.12" } async-trait = { version = "0.1.82" } -async_http_range_reader = { version = "0.9.0" } +async_http_range_reader = { version = "0.9.1" } async_zip = { git = "https://github.com/charliermarsh/rs-async-zip", rev = "011b24604fa7bc223daaad7712c0694bac8f0a87", features = ["deflate", "tokio"] } axoupdater = { version = "0.8.0", default-features = false } backoff = { version = "0.4.0" } @@ -134,8 +134,8 @@ rayon = { version = "1.10.0" } reflink-copy = { version = "0.1.19" } regex = { version = "1.10.6" } reqwest = { version = "0.12.7", default-features = false, features = ["json", "gzip", "stream", "rustls-tls", "rustls-tls-native-roots", "socks", "multipart", "http2"] } -reqwest-middleware = { git = "https://github.com/TrueLayer/reqwest-middleware", rev = "d95ec5a99fcc9a4339e1850d40378bbfe55ab121", features = ["multipart"] } -reqwest-retry = { git = "https://github.com/TrueLayer/reqwest-middleware", rev = "d95ec5a99fcc9a4339e1850d40378bbfe55ab121" } +reqwest-middleware = { version = "0.4.0", features = ["multipart"] } +reqwest-retry = { version = "0.7.0" } rkyv = { version = "0.8.8", features = ["bytecheck"] } rmp-serde = { version = "1.3.0" } rust-netrc = { version = "0.1.2" } @@ -187,9 +187,6 @@ zip = { version = "0.6.6", default-features = false, features = ["deflate"] } [workspace.metadata.cargo-shear] ignored = ["flate2", "xz2"] -[patch.crates-io] -reqwest-middleware = { git = "https://github.com/TrueLayer/reqwest-middleware", rev = "d95ec5a99fcc9a4339e1850d40378bbfe55ab121" } - [workspace.lints.rust] unsafe_code = "warn" unreachable_pub = "warn" From e0f657ed37d8be271ee2b2b6eed3ef56f0addbfb Mon Sep 17 00:00:00 2001 From: Charlie Marsh Date: Tue, 12 Nov 2024 09:46:03 -0500 Subject: [PATCH 19/23] DRY up diagnostic reporting for resolution failures (#9044) ## Summary Not thrilled with this but helps for now. I feel like this error-handling should happen at the top-level, rather than on all these individual commands. But we don't have a unified result type at the top-level of the CLI -- all these commands return `anyhow::Result`. --- crates/uv/src/commands/diagnostics.rs | 106 +++++++++++++++++- crates/uv/src/commands/pip/compile.rs | 27 +---- crates/uv/src/commands/pip/install.rs | 46 ++------ crates/uv/src/commands/pip/sync.rs | 46 ++------ crates/uv/src/commands/project/add.rs | 54 +--------- crates/uv/src/commands/project/export.rs | 35 +----- crates/uv/src/commands/project/lock.rs | 36 +------ crates/uv/src/commands/project/mod.rs | 4 +- crates/uv/src/commands/project/remove.rs | 55 ++-------- crates/uv/src/commands/project/run.rs | 130 +++-------------------- crates/uv/src/commands/project/sync.rs | 54 ++-------- crates/uv/src/commands/project/tree.rs | 20 ++-- crates/uv/src/commands/tool/install.rs | 45 ++++++-- crates/uv/src/commands/tool/run.rs | 13 ++- crates/uv/tests/it/export.rs | 8 +- crates/uv/tests/it/lock.rs | 52 ++++----- crates/uv/tests/it/run.rs | 2 +- crates/uv/tests/it/tool_install.rs | 38 ++++--- crates/uv/tests/it/tool_run.rs | 2 +- crates/uv/tests/it/workspace.rs | 8 +- 20 files changed, 283 insertions(+), 498 deletions(-) diff --git a/crates/uv/src/commands/diagnostics.rs b/crates/uv/src/commands/diagnostics.rs index 391948d66d75..db6668635dbf 100644 --- a/crates/uv/src/commands/diagnostics.rs +++ b/crates/uv/src/commands/diagnostics.rs @@ -1,10 +1,14 @@ -use owo_colors::OwoColorize; -use rustc_hash::FxHashMap; use std::str::FromStr; use std::sync::LazyLock; + +use owo_colors::OwoColorize; +use rustc_hash::FxHashMap; + use uv_distribution_types::{BuiltDist, Name, SourceDist}; use uv_normalize::PackageName; +use crate::commands::pip; + /// Static map of common package name typos or misconfigurations to their correct package names. static SUGGESTIONS: LazyLock> = LazyLock::new(|| { let suggestions: Vec<(String, String)> = @@ -20,6 +24,104 @@ static SUGGESTIONS: LazyLock> = LazyLock::ne .collect() }); +/// A rich reporter for operational diagnostics, i.e., errors that occur during resolution and +/// installation. +#[derive(Debug, Default)] +pub(crate) struct OperationDiagnostic { + /// The hint to display to the user upon resolution failure. + pub(crate) hint: Option, + /// The context to display to the user upon resolution failure. + pub(crate) context: Option<&'static str>, +} + +impl OperationDiagnostic { + /// Set the hint to display to the user upon resolution failure. + #[must_use] + pub(crate) fn with_hint(hint: String) -> Self { + Self { + hint: Some(hint), + context: None, + } + } + + /// Set the context to display to the user upon resolution failure. + #[must_use] + pub(crate) fn with_context(context: &'static str) -> Self { + Self { + hint: None, + context: Some(context), + } + } + + /// Attempt to report an error with rich diagnostic context. + /// + /// Returns `Some` if the error was not handled. + pub(crate) fn report(self, err: pip::operations::Error) -> Option { + match err { + pip::operations::Error::Resolve(uv_resolver::ResolveError::NoSolution(err)) => { + if let Some(context) = self.context { + no_solution_context(&err, context); + } else if let Some(hint) = self.hint { + // TODO(charlie): The `hint` should be shown on all diagnostics, not just + // `NoSolutionError`. + no_solution_hint(err, hint); + } else { + no_solution(&err); + } + None + } + pip::operations::Error::Resolve(uv_resolver::ResolveError::DownloadAndBuild( + dist, + err, + )) => { + download_and_build(dist, err); + None + } + pip::operations::Error::Resolve(uv_resolver::ResolveError::Build(dist, err)) => { + build(dist, err); + None + } + pip::operations::Error::Requirements(uv_requirements::Error::DownloadAndBuild( + dist, + err, + )) => { + download_and_build(dist, err); + None + } + pip::operations::Error::Requirements(uv_requirements::Error::Build(dist, err)) => { + build(dist, err); + None + } + pip::operations::Error::Prepare(uv_installer::PrepareError::Build(dist, err)) => { + build(dist, err); + None + } + pip::operations::Error::Prepare(uv_installer::PrepareError::DownloadAndBuild( + dist, + err, + )) => { + download_and_build(dist, err); + None + } + pip::operations::Error::Prepare(uv_installer::PrepareError::Download(dist, err)) => { + download(dist, err); + None + } + pip::operations::Error::Requirements(err) => { + if let Some(context) = self.context { + let err = miette::Report::msg(format!("{err}")) + .context(format!("Failed to resolve {context} requirement")); + anstream::eprint!("{err:?}"); + None + } else { + Some(pip::operations::Error::Requirements(err)) + } + } + err => Some(err), + } + } +} + /// Render a remote source distribution build failure with a help message. pub(crate) fn download_and_build(sdist: Box, cause: uv_distribution::Error) { #[derive(Debug, miette::Diagnostic, thiserror::Error)] diff --git a/crates/uv/src/commands/pip/compile.rs b/crates/uv/src/commands/pip/compile.rs index 17a7c34f10bb..9ddbfd1cf19e 100644 --- a/crates/uv/src/commands/pip/compile.rs +++ b/crates/uv/src/commands/pip/compile.rs @@ -406,30 +406,11 @@ pub(crate) async fn pip_compile( .await { Ok(resolution) => resolution, - Err(operations::Error::Resolve(uv_resolver::ResolveError::NoSolution(err))) => { - diagnostics::no_solution(&err); - return Ok(ExitStatus::Failure); + Err(err) => { + return diagnostics::OperationDiagnostic::default() + .report(err) + .map_or(Ok(ExitStatus::Failure), |err| Err(err.into())) } - Err(operations::Error::Resolve(uv_resolver::ResolveError::DownloadAndBuild(dist, err))) => { - diagnostics::download_and_build(dist, err); - return Ok(ExitStatus::Failure); - } - Err(operations::Error::Resolve(uv_resolver::ResolveError::Build(dist, err))) => { - diagnostics::build(dist, err); - return Ok(ExitStatus::Failure); - } - Err(operations::Error::Requirements(uv_requirements::Error::DownloadAndBuild( - dist, - err, - ))) => { - diagnostics::download_and_build(dist, err); - return Ok(ExitStatus::Failure); - } - Err(operations::Error::Requirements(uv_requirements::Error::Build(dist, err))) => { - diagnostics::build(dist, err); - return Ok(ExitStatus::Failure); - } - Err(err) => return Err(err.into()), }; // Write the resolved dependencies to the output channel. diff --git a/crates/uv/src/commands/pip/install.rs b/crates/uv/src/commands/pip/install.rs index 8dfb6490e756..8dc2438ec626 100644 --- a/crates/uv/src/commands/pip/install.rs +++ b/crates/uv/src/commands/pip/install.rs @@ -412,30 +412,11 @@ pub(crate) async fn pip_install( .await { Ok(resolution) => Resolution::from(resolution), - Err(operations::Error::Resolve(uv_resolver::ResolveError::NoSolution(err))) => { - diagnostics::no_solution(&err); - return Ok(ExitStatus::Failure); + Err(err) => { + return diagnostics::OperationDiagnostic::default() + .report(err) + .map_or(Ok(ExitStatus::Failure), |err| Err(err.into())) } - Err(operations::Error::Resolve(uv_resolver::ResolveError::DownloadAndBuild(dist, err))) => { - diagnostics::download_and_build(dist, err); - return Ok(ExitStatus::Failure); - } - Err(operations::Error::Resolve(uv_resolver::ResolveError::Build(dist, err))) => { - diagnostics::build(dist, err); - return Ok(ExitStatus::Failure); - } - Err(operations::Error::Requirements(uv_requirements::Error::DownloadAndBuild( - dist, - err, - ))) => { - diagnostics::download_and_build(dist, err); - return Ok(ExitStatus::Failure); - } - Err(operations::Error::Requirements(uv_requirements::Error::Build(dist, err))) => { - diagnostics::build(dist, err); - return Ok(ExitStatus::Failure); - } - Err(err) => return Err(err.into()), }; // Sync the environment. @@ -464,22 +445,11 @@ pub(crate) async fn pip_install( .await { Ok(_) => {} - Err(operations::Error::Prepare(uv_installer::PrepareError::Build(dist, err))) => { - diagnostics::build(dist, err); - return Ok(ExitStatus::Failure); - } - Err(operations::Error::Prepare(uv_installer::PrepareError::DownloadAndBuild( - dist, - err, - ))) => { - diagnostics::download_and_build(dist, err); - return Ok(ExitStatus::Failure); - } - Err(operations::Error::Prepare(uv_installer::PrepareError::Download(dist, err))) => { - diagnostics::download(dist, err); - return Ok(ExitStatus::Failure); + Err(err) => { + return diagnostics::OperationDiagnostic::default() + .report(err) + .map_or(Ok(ExitStatus::Failure), |err| Err(err.into())) } - Err(err) => return Err(err.into()), } // Notify the user of any resolution diagnostics. diff --git a/crates/uv/src/commands/pip/sync.rs b/crates/uv/src/commands/pip/sync.rs index 551ad6798664..ca3de945d772 100644 --- a/crates/uv/src/commands/pip/sync.rs +++ b/crates/uv/src/commands/pip/sync.rs @@ -356,30 +356,11 @@ pub(crate) async fn pip_sync( .await { Ok(resolution) => Resolution::from(resolution), - Err(operations::Error::Resolve(uv_resolver::ResolveError::NoSolution(err))) => { - diagnostics::no_solution(&err); - return Ok(ExitStatus::Failure); + Err(err) => { + return diagnostics::OperationDiagnostic::default() + .report(err) + .map_or(Ok(ExitStatus::Failure), |err| Err(err.into())) } - Err(operations::Error::Resolve(uv_resolver::ResolveError::DownloadAndBuild(dist, err))) => { - diagnostics::download_and_build(dist, err); - return Ok(ExitStatus::Failure); - } - Err(operations::Error::Resolve(uv_resolver::ResolveError::Build(dist, err))) => { - diagnostics::build(dist, err); - return Ok(ExitStatus::Failure); - } - Err(operations::Error::Requirements(uv_requirements::Error::DownloadAndBuild( - dist, - err, - ))) => { - diagnostics::download_and_build(dist, err); - return Ok(ExitStatus::Failure); - } - Err(operations::Error::Requirements(uv_requirements::Error::Build(dist, err))) => { - diagnostics::build(dist, err); - return Ok(ExitStatus::Failure); - } - Err(err) => return Err(err.into()), }; // Sync the environment. @@ -408,22 +389,11 @@ pub(crate) async fn pip_sync( .await { Ok(_) => {} - Err(operations::Error::Prepare(uv_installer::PrepareError::Build(dist, err))) => { - diagnostics::build(dist, err); - return Ok(ExitStatus::Failure); - } - Err(operations::Error::Prepare(uv_installer::PrepareError::DownloadAndBuild( - dist, - err, - ))) => { - diagnostics::download_and_build(dist, err); - return Ok(ExitStatus::Failure); - } - Err(operations::Error::Prepare(uv_installer::PrepareError::Download(dist, err))) => { - diagnostics::download(dist, err); - return Ok(ExitStatus::Failure); + Err(err) => { + return diagnostics::OperationDiagnostic::default() + .report(err) + .map_or(Ok(ExitStatus::Failure), |err| Err(err.into())) } - Err(err) => return Err(err.into()), } // Notify the user of any resolution diagnostics. diff --git a/crates/uv/src/commands/project/add.rs b/crates/uv/src/commands/project/add.rs index ac6b5c8a15df..00606ba65c33 100644 --- a/crates/uv/src/commands/project/add.rs +++ b/crates/uv/src/commands/project/add.rs @@ -49,7 +49,7 @@ use crate::commands::project::{ ProjectInterpreter, ScriptPython, }; use crate::commands::reporters::{PythonDownloadReporter, ResolverReporter}; -use crate::commands::{diagnostics, pip, project, ExitStatus, SharedState}; +use crate::commands::{diagnostics, project, ExitStatus, SharedState}; use crate::printer::Printer; use crate::settings::{ResolverInstallerSettings, ResolverInstallerSettingsRef}; @@ -684,56 +684,10 @@ pub(crate) async fn add( existing_uv_lock.as_deref(), ); } - match err { - ProjectError::Operation(pip::operations::Error::Resolve( - uv_resolver::ResolveError::NoSolution(err), - )) => { - diagnostics::no_solution_hint(err, format!("If you want to add the package regardless of the failed resolution, provide the `{}` flag to skip locking and syncing.", "--frozen".green())); - Ok(ExitStatus::Failure) - } - ProjectError::Operation(pip::operations::Error::Resolve( - uv_resolver::ResolveError::DownloadAndBuild(dist, err), - )) => { - diagnostics::download_and_build(dist, err); - Ok(ExitStatus::Failure) - } - ProjectError::Operation(pip::operations::Error::Resolve( - uv_resolver::ResolveError::Build(dist, err), - )) => { - diagnostics::build(dist, err); - Ok(ExitStatus::Failure) - } - ProjectError::Operation(pip::operations::Error::Requirements( - uv_requirements::Error::DownloadAndBuild(dist, err), - )) => { - diagnostics::download_and_build(dist, err); - Ok(ExitStatus::Failure) - } - ProjectError::Operation(pip::operations::Error::Requirements( - uv_requirements::Error::Build(dist, err), - )) => { - diagnostics::build(dist, err); - Ok(ExitStatus::Failure) - } - ProjectError::Operation(pip::operations::Error::Prepare( - uv_installer::PrepareError::Build(dist, err), - )) => { - diagnostics::build(dist, err); - Ok(ExitStatus::Failure) - } - ProjectError::Operation(pip::operations::Error::Prepare( - uv_installer::PrepareError::DownloadAndBuild(dist, err), - )) => { - diagnostics::download_and_build(dist, err); - Ok(ExitStatus::Failure) - } - ProjectError::Operation(pip::operations::Error::Prepare( - uv_installer::PrepareError::Download(dist, err), - )) => { - diagnostics::download(dist, err); - Ok(ExitStatus::Failure) - } + ProjectError::Operation(err) => diagnostics::OperationDiagnostic::with_hint(format!("If you want to add the package regardless of the failed resolution, provide the `{}` flag to skip locking and syncing.", "--frozen".green())) + .report(err) + .map_or(Ok(ExitStatus::Failure), |err| Err(err.into())), err => Err(err.into()), } } diff --git a/crates/uv/src/commands/project/export.rs b/crates/uv/src/commands/project/export.rs index 36504040848b..09b1f98aa363 100644 --- a/crates/uv/src/commands/project/export.rs +++ b/crates/uv/src/commands/project/export.rs @@ -21,7 +21,7 @@ use crate::commands::project::lock::{do_safe_lock, LockMode}; use crate::commands::project::{ default_dependency_groups, DependencyGroupsTarget, ProjectError, ProjectInterpreter, }; -use crate::commands::{diagnostics, pip, ExitStatus, OutputWriter, SharedState}; +use crate::commands::{diagnostics, ExitStatus, OutputWriter, SharedState}; use crate::printer::Printer; use crate::settings::ResolverSettings; @@ -142,35 +142,10 @@ pub(crate) async fn export( .await { Ok(result) => result.into_lock(), - Err(ProjectError::Operation(pip::operations::Error::Resolve( - uv_resolver::ResolveError::NoSolution(err), - ))) => { - diagnostics::no_solution(&err); - return Ok(ExitStatus::Failure); - } - Err(ProjectError::Operation(pip::operations::Error::Resolve( - uv_resolver::ResolveError::DownloadAndBuild(dist, err), - ))) => { - diagnostics::download_and_build(dist, err); - return Ok(ExitStatus::Failure); - } - Err(ProjectError::Operation(pip::operations::Error::Resolve( - uv_resolver::ResolveError::Build(dist, err), - ))) => { - diagnostics::build(dist, err); - return Ok(ExitStatus::Failure); - } - Err(ProjectError::Operation(pip::operations::Error::Requirements( - uv_requirements::Error::DownloadAndBuild(dist, err), - ))) => { - diagnostics::download_and_build(dist, err); - return Ok(ExitStatus::Failure); - } - Err(ProjectError::Operation(pip::operations::Error::Requirements( - uv_requirements::Error::Build(dist, err), - ))) => { - diagnostics::build(dist, err); - return Ok(ExitStatus::Failure); + Err(ProjectError::Operation(err)) => { + return diagnostics::OperationDiagnostic::default() + .report(err) + .map_or(Ok(ExitStatus::Failure), |err| Err(err.into())) } Err(err) => return Err(err.into()), }; diff --git a/crates/uv/src/commands/project/lock.rs b/crates/uv/src/commands/project/lock.rs index 01a5833566f7..e09fe5cb74df 100644 --- a/crates/uv/src/commands/project/lock.rs +++ b/crates/uv/src/commands/project/lock.rs @@ -163,36 +163,9 @@ pub(crate) async fn lock( Ok(ExitStatus::Success) } - Err(ProjectError::Operation(pip::operations::Error::Resolve( - uv_resolver::ResolveError::NoSolution(err), - ))) => { - diagnostics::no_solution(&err); - Ok(ExitStatus::Failure) - } - Err(ProjectError::Operation(pip::operations::Error::Resolve( - uv_resolver::ResolveError::DownloadAndBuild(dist, err), - ))) => { - diagnostics::download_and_build(dist, err); - Ok(ExitStatus::Failure) - } - Err(ProjectError::Operation(pip::operations::Error::Resolve( - uv_resolver::ResolveError::Build(dist, err), - ))) => { - diagnostics::build(dist, err); - Ok(ExitStatus::Failure) - } - Err(ProjectError::Operation(pip::operations::Error::Requirements( - uv_requirements::Error::DownloadAndBuild(dist, err), - ))) => { - diagnostics::download_and_build(dist, err); - Ok(ExitStatus::Failure) - } - Err(ProjectError::Operation(pip::operations::Error::Requirements( - uv_requirements::Error::Build(dist, err), - ))) => { - diagnostics::build(dist, err); - Ok(ExitStatus::Failure) - } + Err(ProjectError::Operation(err)) => diagnostics::OperationDiagnostic::default() + .report(err) + .map_or(Ok(ExitStatus::Failure), |err| Err(err.into())), Err(err) => Err(err.into()), } } @@ -625,7 +598,8 @@ async fn do_lock( ExtrasResolver::new(&hasher, &state.index, database) .with_reporter(ResolverReporter::from(printer)) .resolve(workspace.members_requirements()) - .await? + .await + .map_err(|err| ProjectError::Operation(err.into()))? .into_iter() .chain(requirements.iter().cloned()) .map(UnresolvedRequirementSpecification::from) diff --git a/crates/uv/src/commands/project/mod.rs b/crates/uv/src/commands/project/mod.rs index 75b0260410b2..5ee669151a68 100644 --- a/crates/uv/src/commands/project/mod.rs +++ b/crates/uv/src/commands/project/mod.rs @@ -1116,7 +1116,7 @@ pub(crate) async fn sync_environment( allow_insecure_host: &[TrustedHost], cache: &Cache, printer: Printer, -) -> anyhow::Result { +) -> Result { let InstallerSettingsRef { index_locations, index_strategy, @@ -1269,7 +1269,7 @@ pub(crate) async fn update_environment( allow_insecure_host: &[TrustedHost], cache: &Cache, printer: Printer, -) -> anyhow::Result { +) -> Result { warn_on_requirements_txt_setting(&spec, settings.as_ref().into()); let ResolverInstallerSettings { diff --git a/crates/uv/src/commands/project/remove.rs b/crates/uv/src/commands/project/remove.rs index e68e94b35fa0..2c02c7322cf6 100644 --- a/crates/uv/src/commands/project/remove.rs +++ b/crates/uv/src/commands/project/remove.rs @@ -21,7 +21,6 @@ use uv_workspace::pyproject_mut::{DependencyTarget, PyProjectTomlMut}; use uv_workspace::{DiscoveryOptions, VirtualProject, Workspace}; use crate::commands::pip::loggers::{DefaultInstallLogger, DefaultResolveLogger}; -use crate::commands::pip::operations; use crate::commands::pip::operations::Modifications; use crate::commands::project::lock::LockMode; use crate::commands::project::{default_dependency_groups, ProjectError}; @@ -231,35 +230,10 @@ pub(crate) async fn remove( .await { Ok(result) => result.into_lock(), - Err(ProjectError::Operation(operations::Error::Resolve( - uv_resolver::ResolveError::NoSolution(err), - ))) => { - diagnostics::no_solution(&err); - return Ok(ExitStatus::Failure); - } - Err(ProjectError::Operation(operations::Error::Resolve( - uv_resolver::ResolveError::DownloadAndBuild(dist, err), - ))) => { - diagnostics::download_and_build(dist, err); - return Ok(ExitStatus::Failure); - } - Err(ProjectError::Operation(operations::Error::Resolve( - uv_resolver::ResolveError::Build(dist, err), - ))) => { - diagnostics::build(dist, err); - return Ok(ExitStatus::Failure); - } - Err(ProjectError::Operation(operations::Error::Requirements( - uv_requirements::Error::DownloadAndBuild(dist, err), - ))) => { - diagnostics::download_and_build(dist, err); - return Ok(ExitStatus::Failure); - } - Err(ProjectError::Operation(operations::Error::Requirements( - uv_requirements::Error::Build(dist, err), - ))) => { - diagnostics::build(dist, err); - return Ok(ExitStatus::Failure); + Err(ProjectError::Operation(err)) => { + return diagnostics::OperationDiagnostic::default() + .report(err) + .map_or(Ok(ExitStatus::Failure), |err| Err(err.into())) } Err(err) => return Err(err.into()), }; @@ -309,23 +283,10 @@ pub(crate) async fn remove( .await { Ok(()) => {} - Err(ProjectError::Operation(operations::Error::Prepare( - uv_installer::PrepareError::Build(dist, err), - ))) => { - diagnostics::build(dist, err); - return Ok(ExitStatus::Failure); - } - Err(ProjectError::Operation(operations::Error::Prepare( - uv_installer::PrepareError::DownloadAndBuild(dist, err), - ))) => { - diagnostics::download_and_build(dist, err); - return Ok(ExitStatus::Failure); - } - Err(ProjectError::Operation(operations::Error::Prepare( - uv_installer::PrepareError::Download(dist, err), - ))) => { - diagnostics::download(dist, err); - return Ok(ExitStatus::Failure); + Err(ProjectError::Operation(err)) => { + return diagnostics::OperationDiagnostic::default() + .report(err) + .map_or(Ok(ExitStatus::Failure), |err| Err(err.into())) } Err(err) => return Err(err.into()), } diff --git a/crates/uv/src/commands/project/run.rs b/crates/uv/src/commands/project/run.rs index 91988de94de3..4bf0dc019b72 100644 --- a/crates/uv/src/commands/project/run.rs +++ b/crates/uv/src/commands/project/run.rs @@ -5,7 +5,6 @@ use std::fmt::Write; use std::io::Read; use std::path::{Path, PathBuf}; -use anstream::eprint; use anyhow::{anyhow, bail, Context}; use futures::StreamExt; use itertools::Itertools; @@ -13,6 +12,7 @@ use owo_colors::OwoColorize; use tokio::process::Command; use tracing::{debug, warn}; use url::Url; + use uv_cache::Cache; use uv_cli::ExternalCommand; use uv_client::{BaseClientBuilder, Connectivity}; @@ -25,7 +25,6 @@ use uv_fs::which::is_executable; use uv_fs::{PythonExt, Simplified}; use uv_installer::{SatisfiesResult, SitePackages}; use uv_normalize::PackageName; - use uv_python::{ EnvironmentPreference, Interpreter, PythonDownloads, PythonEnvironment, PythonInstallation, PythonPreference, PythonRequest, PythonVersionFile, VersionFileDiscoveryOptions, @@ -40,7 +39,6 @@ use uv_workspace::{DiscoveryOptions, VirtualProject, Workspace, WorkspaceError}; use crate::commands::pip::loggers::{ DefaultInstallLogger, DefaultResolveLogger, SummaryInstallLogger, SummaryResolveLogger, }; -use crate::commands::pip::operations; use crate::commands::pip::operations::Modifications; use crate::commands::project::environment::CachedEnvironment; use crate::commands::project::lock::LockMode; @@ -299,35 +297,10 @@ pub(crate) async fn run( let environment = match result { Ok(resolution) => resolution, - Err(ProjectError::Operation(operations::Error::Resolve( - uv_resolver::ResolveError::NoSolution(err), - ))) => { - diagnostics::no_solution_context(&err, "script"); - return Ok(ExitStatus::Failure); - } - Err(ProjectError::Operation(operations::Error::Resolve( - uv_resolver::ResolveError::DownloadAndBuild(dist, err), - ))) => { - diagnostics::download_and_build(dist, err); - return Ok(ExitStatus::Failure); - } - Err(ProjectError::Operation(operations::Error::Resolve( - uv_resolver::ResolveError::Build(dist, err), - ))) => { - diagnostics::build(dist, err); - return Ok(ExitStatus::Failure); - } - Err(ProjectError::Operation(operations::Error::Requirements( - uv_requirements::Error::DownloadAndBuild(dist, err), - ))) => { - diagnostics::download_and_build(dist, err); - return Ok(ExitStatus::Failure); - } - Err(ProjectError::Operation(operations::Error::Requirements( - uv_requirements::Error::Build(dist, err), - ))) => { - diagnostics::build(dist, err); - return Ok(ExitStatus::Failure); + Err(ProjectError::Operation(err)) => { + return diagnostics::OperationDiagnostic::with_context("script") + .report(err) + .map_or(Ok(ExitStatus::Failure), |err| Err(err.into())) } Err(err) => return Err(err.into()), }; @@ -640,35 +613,10 @@ pub(crate) async fn run( .await { Ok(result) => result, - Err(ProjectError::Operation(operations::Error::Resolve( - uv_resolver::ResolveError::NoSolution(err), - ))) => { - diagnostics::no_solution(&err); - return Ok(ExitStatus::Failure); - } - Err(ProjectError::Operation(operations::Error::Resolve( - uv_resolver::ResolveError::DownloadAndBuild(dist, err), - ))) => { - diagnostics::download_and_build(dist, err); - return Ok(ExitStatus::Failure); - } - Err(ProjectError::Operation(operations::Error::Resolve( - uv_resolver::ResolveError::Build(dist, err), - ))) => { - diagnostics::build(dist, err); - return Ok(ExitStatus::Failure); - } - Err(ProjectError::Operation(operations::Error::Requirements( - uv_requirements::Error::DownloadAndBuild(dist, err), - ))) => { - diagnostics::download_and_build(dist, err); - return Ok(ExitStatus::Failure); - } - Err(ProjectError::Operation(operations::Error::Requirements( - uv_requirements::Error::Build(dist, err), - ))) => { - diagnostics::build(dist, err); - return Ok(ExitStatus::Failure); + Err(ProjectError::Operation(err)) => { + return diagnostics::OperationDiagnostic::default() + .report(err) + .map_or(Ok(ExitStatus::Failure), |err| Err(err.into())) } Err(err) => return Err(err.into()), }; @@ -721,23 +669,10 @@ pub(crate) async fn run( .await { Ok(()) => {} - Err(ProjectError::Operation(operations::Error::Prepare( - uv_installer::PrepareError::Build(dist, err), - ))) => { - diagnostics::build(dist, err); - return Ok(ExitStatus::Failure); - } - Err(ProjectError::Operation(operations::Error::Prepare( - uv_installer::PrepareError::DownloadAndBuild(dist, err), - ))) => { - diagnostics::download_and_build(dist, err); - return Ok(ExitStatus::Failure); - } - Err(ProjectError::Operation(operations::Error::Prepare( - uv_installer::PrepareError::Download(dist, err), - ))) => { - diagnostics::download(dist, err); - return Ok(ExitStatus::Failure); + Err(ProjectError::Operation(err)) => { + return diagnostics::OperationDiagnostic::default() + .report(err) + .map_or(Ok(ExitStatus::Failure), |err| Err(err.into())) } Err(err) => return Err(err.into()), } @@ -877,41 +812,10 @@ pub(crate) async fn run( let environment = match result { Ok(resolution) => resolution, - Err(ProjectError::Operation(operations::Error::Resolve( - uv_resolver::ResolveError::NoSolution(err), - ))) => { - diagnostics::no_solution_context(&err, "`--with`"); - return Ok(ExitStatus::Failure); - } - Err(ProjectError::Operation(operations::Error::Resolve( - uv_resolver::ResolveError::DownloadAndBuild(dist, err), - ))) => { - diagnostics::download_and_build(dist, err); - return Ok(ExitStatus::Failure); - } - Err(ProjectError::Operation(operations::Error::Resolve( - uv_resolver::ResolveError::Build(dist, err), - ))) => { - diagnostics::build(dist, err); - return Ok(ExitStatus::Failure); - } - Err(ProjectError::Operation(operations::Error::Requirements( - uv_requirements::Error::DownloadAndBuild(dist, err), - ))) => { - diagnostics::download_and_build(dist, err); - return Ok(ExitStatus::Failure); - } - Err(ProjectError::Operation(operations::Error::Requirements( - uv_requirements::Error::Build(dist, err), - ))) => { - diagnostics::build(dist, err); - return Ok(ExitStatus::Failure); - } - Err(ProjectError::Operation(operations::Error::Requirements(err))) => { - let err = miette::Report::msg(format!("{err}")) - .context("Invalid `--with` requirement"); - eprint!("{err:?}"); - return Ok(ExitStatus::Failure); + Err(ProjectError::Operation(err)) => { + return diagnostics::OperationDiagnostic::with_context("`--with`") + .report(err) + .map_or(Ok(ExitStatus::Failure), |err| Err(err.into())) } Err(err) => return Err(err.into()), }; diff --git a/crates/uv/src/commands/project/sync.rs b/crates/uv/src/commands/project/sync.rs index 76f363a8ab26..3367eb9084d5 100644 --- a/crates/uv/src/commands/project/sync.rs +++ b/crates/uv/src/commands/project/sync.rs @@ -154,35 +154,10 @@ pub(crate) async fn sync( .await { Ok(result) => result.into_lock(), - Err(ProjectError::Operation(operations::Error::Resolve( - uv_resolver::ResolveError::NoSolution(err), - ))) => { - diagnostics::no_solution(&err); - return Ok(ExitStatus::Failure); - } - Err(ProjectError::Operation(operations::Error::Resolve( - uv_resolver::ResolveError::DownloadAndBuild(dist, err), - ))) => { - diagnostics::download_and_build(dist, err); - return Ok(ExitStatus::Failure); - } - Err(ProjectError::Operation(operations::Error::Resolve( - uv_resolver::ResolveError::Build(dist, err), - ))) => { - diagnostics::build(dist, err); - return Ok(ExitStatus::Failure); - } - Err(ProjectError::Operation(operations::Error::Requirements( - uv_requirements::Error::DownloadAndBuild(dist, err), - ))) => { - diagnostics::download_and_build(dist, err); - return Ok(ExitStatus::Failure); - } - Err(ProjectError::Operation(operations::Error::Requirements( - uv_requirements::Error::Build(dist, err), - ))) => { - diagnostics::build(dist, err); - return Ok(ExitStatus::Failure); + Err(ProjectError::Operation(err)) => { + return diagnostics::OperationDiagnostic::default() + .report(err) + .map_or(Ok(ExitStatus::Failure), |err| Err(err.into())) } Err(err) => return Err(err.into()), }; @@ -233,23 +208,10 @@ pub(crate) async fn sync( .await { Ok(()) => {} - Err(ProjectError::Operation(operations::Error::Prepare( - uv_installer::PrepareError::Build(dist, err), - ))) => { - diagnostics::build(dist, err); - return Ok(ExitStatus::Failure); - } - Err(ProjectError::Operation(operations::Error::Prepare( - uv_installer::PrepareError::DownloadAndBuild(dist, err), - ))) => { - diagnostics::download_and_build(dist, err); - return Ok(ExitStatus::Failure); - } - Err(ProjectError::Operation(operations::Error::Prepare( - uv_installer::PrepareError::Download(dist, err), - ))) => { - diagnostics::download(dist, err); - return Ok(ExitStatus::Failure); + Err(ProjectError::Operation(err)) => { + return diagnostics::OperationDiagnostic::default() + .report(err) + .map_or(Ok(ExitStatus::Failure), |err| Err(err.into())) } Err(err) => return Err(err.into()), } diff --git a/crates/uv/src/commands/project/tree.rs b/crates/uv/src/commands/project/tree.rs index b1da0fc457fe..1556157d31cc 100644 --- a/crates/uv/src/commands/project/tree.rs +++ b/crates/uv/src/commands/project/tree.rs @@ -20,11 +20,11 @@ use uv_workspace::{DiscoveryOptions, Workspace}; use crate::commands::pip::latest::LatestClient; use crate::commands::pip::loggers::DefaultResolveLogger; use crate::commands::pip::resolution_markers; -use crate::commands::project::lock::LockMode; +use crate::commands::project::lock::{do_safe_lock, LockMode}; use crate::commands::project::{ - default_dependency_groups, DependencyGroupsTarget, ProjectInterpreter, + default_dependency_groups, DependencyGroupsTarget, ProjectError, ProjectInterpreter, }; -use crate::commands::{project, ExitStatus, SharedState}; +use crate::commands::{diagnostics, ExitStatus, SharedState}; use crate::printer::Printer; use crate::settings::ResolverSettings; @@ -104,7 +104,7 @@ pub(crate) async fn tree( let state = SharedState::default(); // Update the lockfile, if necessary. - let lock = project::lock::do_safe_lock( + let lock = match do_safe_lock( mode, &workspace, settings.as_ref(), @@ -118,8 +118,16 @@ pub(crate) async fn tree( cache, printer, ) - .await? - .into_lock(); + .await + { + Ok(result) => result.into_lock(), + Err(ProjectError::Operation(err)) => { + return diagnostics::OperationDiagnostic::default() + .report(err) + .map_or(Ok(ExitStatus::Failure), |err| Err(err.into())) + } + Err(err) => return Err(err.into()), + }; // Determine the markers to use for resolution. let markers = (!universal).then(|| { diff --git a/crates/uv/src/commands/tool/install.rs b/crates/uv/src/commands/tool/install.rs index c838366f13a6..0ec7a92aee17 100644 --- a/crates/uv/src/commands/tool/install.rs +++ b/crates/uv/src/commands/tool/install.rs @@ -25,11 +25,13 @@ use crate::commands::pip::loggers::{DefaultInstallLogger, DefaultResolveLogger}; use crate::commands::project::{ resolve_environment, resolve_names, sync_environment, update_environment, - EnvironmentSpecification, + EnvironmentSpecification, ProjectError, }; use crate::commands::tool::common::remove_entrypoints; use crate::commands::tool::Target; -use crate::commands::{reporters::PythonDownloadReporter, tool::common::install_executables}; +use crate::commands::{ + diagnostics, reporters::PythonDownloadReporter, tool::common::install_executables, +}; use crate::commands::{ExitStatus, SharedState}; use crate::printer::Printer; use crate::settings::ResolverInstallerSettings; @@ -348,7 +350,7 @@ pub(crate) async fn install( // entrypoints always contain an absolute path to the relevant Python interpreter, which would // be invalidated by moving the environment. let environment = if let Some(environment) = existing_environment { - let environment = update_environment( + let environment = match update_environment( environment, spec, &settings, @@ -362,8 +364,16 @@ pub(crate) async fn install( &cache, printer, ) - .await? - .into_environment(); + .await + { + Ok(update) => update.into_environment(), + Err(ProjectError::Operation(err)) => { + return diagnostics::OperationDiagnostic::default() + .report(err) + .map_or(Ok(ExitStatus::Failure), |err| Err(err.into())) + } + Err(err) => return Err(err.into()), + }; // At this point, we updated the existing environment, so we should remove any of its // existing executables. @@ -375,7 +385,7 @@ pub(crate) async fn install( } else { // If we're creating a new environment, ensure that we can resolve the requirements prior // to removing any existing tools. - let resolution = resolve_environment( + let resolution = match resolve_environment( EnvironmentSpecification::from(spec), &interpreter, settings.as_ref().into(), @@ -388,7 +398,16 @@ pub(crate) async fn install( &cache, printer, ) - .await?; + .await + { + Ok(resolution) => resolution, + Err(ProjectError::Operation(err)) => { + return diagnostics::OperationDiagnostic::default() + .report(err) + .map_or(Ok(ExitStatus::Failure), |err| Err(err.into())) + } + Err(err) => return Err(err.into()), + }; let environment = installed_tools.create_environment(&from.name, interpreter)?; @@ -399,7 +418,7 @@ pub(crate) async fn install( } // Sync the environment with the resolved requirements. - sync_environment( + match sync_environment( environment, &resolution.into(), settings.as_ref().into(), @@ -417,7 +436,15 @@ pub(crate) async fn install( // If we failed to sync, remove the newly created environment. debug!("Failed to sync environment; removing `{}`", from.name); let _ = installed_tools.remove_environment(&from.name); - })? + }) { + Ok(environment) => environment, + Err(ProjectError::Operation(err)) => { + return diagnostics::OperationDiagnostic::default() + .report(err) + .map_or(Ok(ExitStatus::Failure), |err| Err(err.into())) + } + Err(err) => return Err(err.into()), + } }; install_executables( diff --git a/crates/uv/src/commands/tool/run.rs b/crates/uv/src/commands/tool/run.rs index 16062ccc9756..be811a406444 100644 --- a/crates/uv/src/commands/tool/run.rs +++ b/crates/uv/src/commands/tool/run.rs @@ -33,7 +33,6 @@ use uv_warnings::warn_user; use crate::commands::pip::loggers::{ DefaultInstallLogger, DefaultResolveLogger, SummaryInstallLogger, SummaryResolveLogger, }; -use crate::commands::pip::operations; use crate::commands::project::{resolve_names, EnvironmentSpecification, ProjectError}; use crate::commands::reporters::PythonDownloadReporter; use crate::commands::tool::Target; @@ -127,14 +126,14 @@ pub(crate) async fn run( let (from, environment) = match result { Ok(resolution) => resolution, - Err(ProjectError::Operation(operations::Error::Resolve( - uv_resolver::ResolveError::NoSolution(err), - ))) => { - diagnostics::no_solution_context(&err, "tool"); - return Ok(ExitStatus::Failure); + Err(ProjectError::Operation(err)) => { + return diagnostics::OperationDiagnostic::with_context("tool") + .report(err) + .map_or(Ok(ExitStatus::Failure), |err| Err(err.into())) } Err(ProjectError::Requirements(err)) => { - let err = miette::Report::msg(format!("{err}")).context("Invalid `--with` requirement"); + let err = miette::Report::msg(format!("{err}")) + .context("Failed to resolve `--with` requirement"); eprint!("{err:?}"); return Ok(ExitStatus::Failure); } diff --git a/crates/uv/tests/it/export.rs b/crates/uv/tests/it/export.rs index 2526d04eb1aa..6e03023ca4a5 100644 --- a/crates/uv/tests/it/export.rs +++ b/crates/uv/tests/it/export.rs @@ -620,13 +620,13 @@ fn frozen() -> Result<()> { uv_snapshot!(context.filters(), context.export().arg("--all-packages"), @r###" success: false - exit_code: 2 + exit_code: 1 ----- stdout ----- ----- stderr ----- - error: Failed to build `project @ file://[TEMP_DIR]/` - Caused by: Failed to parse entry: `child` - Caused by: Package is not included as workspace package in `tool.uv.workspace` + × Failed to build `project @ file://[TEMP_DIR]/` + ├─▶ Failed to parse entry: `child` + ╰─▶ Package is not included as workspace package in `tool.uv.workspace` "###); uv_snapshot!(context.filters(), context.export().arg("--all-packages").arg("--frozen"), @r###" diff --git a/crates/uv/tests/it/lock.rs b/crates/uv/tests/it/lock.rs index ba5f92360dd3..ce48a30fa1c9 100644 --- a/crates/uv/tests/it/lock.rs +++ b/crates/uv/tests/it/lock.rs @@ -9431,13 +9431,13 @@ fn lock_mismatched_sources() -> Result<()> { uv_snapshot!(context.filters(), context.lock(), @r###" success: false - exit_code: 2 + exit_code: 1 ----- stdout ----- ----- stderr ----- - error: Failed to build `project @ file://[TEMP_DIR]/` - Caused by: Failed to parse entry: `uv-public-pypackage` - Caused by: Can't combine URLs from both `project.dependencies` and `tool.uv.sources` + × Failed to build `project @ file://[TEMP_DIR]/` + ├─▶ Failed to parse entry: `uv-public-pypackage` + ╰─▶ Can't combine URLs from both `project.dependencies` and `tool.uv.sources` "###); Ok(()) @@ -13597,13 +13597,13 @@ fn lock_named_index_cli() -> Result<()> { // The package references a non-existent index. uv_snapshot!(context.filters(), context.lock().env_remove(EnvVars::UV_EXCLUDE_NEWER), @r###" success: false - exit_code: 2 + exit_code: 1 ----- stdout ----- ----- stderr ----- - error: Failed to build `project @ file://[TEMP_DIR]/` - Caused by: Failed to parse entry: `jinja2` - Caused by: Package `jinja2` references an undeclared index: `pytorch` + × Failed to build `project @ file://[TEMP_DIR]/` + ├─▶ Failed to parse entry: `jinja2` + ╰─▶ Package `jinja2` references an undeclared index: `pytorch` "###); // But it's fine if it comes from the CLI. @@ -17286,12 +17286,12 @@ fn lock_group_include_cycle() -> Result<()> { uv_snapshot!(context.filters(), context.lock(), @r###" success: false - exit_code: 2 + exit_code: 1 ----- stdout ----- ----- stderr ----- - error: Failed to build `project @ file://[TEMP_DIR]/` - Caused by: Detected a cycle in `dependency-groups`: `bar` -> `foobar` -> `foo` -> `bar` + × Failed to build `project @ file://[TEMP_DIR]/` + ╰─▶ Detected a cycle in `dependency-groups`: `bar` -> `foobar` -> `foo` -> `bar` "###); Ok(()) @@ -17317,12 +17317,12 @@ fn lock_group_include_missing() -> Result<()> { uv_snapshot!(context.filters(), context.lock(), @r###" success: false - exit_code: 2 + exit_code: 1 ----- stdout ----- ----- stderr ----- - error: Failed to build `project @ file://[TEMP_DIR]/` - Caused by: Failed to find group `bar` included by `foo` + × Failed to build `project @ file://[TEMP_DIR]/` + ╰─▶ Failed to find group `bar` included by `foo` "###); Ok(()) @@ -17348,28 +17348,28 @@ fn lock_group_invalid_entry_package() -> Result<()> { uv_snapshot!(context.filters(), context.lock(), @r###" success: false - exit_code: 2 + exit_code: 1 ----- stdout ----- ----- stderr ----- - error: Failed to build `project @ file://[TEMP_DIR]/` - Caused by: Failed to parse entry in group `foo`: `invalid!` - Caused by: no such comparison operator "!", must be one of ~= == != <= >= < > === - invalid! - ^ + × Failed to build `project @ file://[TEMP_DIR]/` + ├─▶ Failed to parse entry in group `foo`: `invalid!` + ╰─▶ no such comparison operator "!", must be one of ~= == != <= >= < > === + invalid! + ^ "###); uv_snapshot!(context.filters(), context.sync().arg("--group").arg("foo"), @r###" success: false - exit_code: 2 + exit_code: 1 ----- stdout ----- ----- stderr ----- - error: Failed to build `project @ file://[TEMP_DIR]/` - Caused by: Failed to parse entry in group `foo`: `invalid!` - Caused by: no such comparison operator "!", must be one of ~= == != <= >= < > === - invalid! - ^ + × Failed to build `project @ file://[TEMP_DIR]/` + ├─▶ Failed to parse entry in group `foo`: `invalid!` + ╰─▶ no such comparison operator "!", must be one of ~= == != <= >= < > === + invalid! + ^ "###); Ok(()) diff --git a/crates/uv/tests/it/run.rs b/crates/uv/tests/it/run.rs index 36056fec2ef3..8a2a04fffa99 100644 --- a/crates/uv/tests/it/run.rs +++ b/crates/uv/tests/it/run.rs @@ -1105,7 +1105,7 @@ fn run_with_editable() -> Result<()> { ----- stderr ----- Resolved 3 packages in [TIME] Audited 3 packages in [TIME] - × Invalid `--with` requirement + × Failed to resolve `--with` requirement ╰─▶ Distribution not found at: file://[TEMP_DIR]/foo "###); diff --git a/crates/uv/tests/it/tool_install.rs b/crates/uv/tests/it/tool_install.rs index 380e3c43238e..d5db82e56355 100644 --- a/crates/uv/tests/it/tool_install.rs +++ b/crates/uv/tests/it/tool_install.rs @@ -1460,31 +1460,28 @@ fn tool_install_uninstallable() { .env(EnvVars::XDG_BIN_HOME, bin_dir.as_os_str()) .env(EnvVars::PATH, bin_dir.as_os_str()), @r###" success: false - exit_code: 2 + exit_code: 1 ----- stdout ----- ----- stderr ----- Resolved 1 package in [TIME] - error: Failed to prepare distributions - Caused by: Failed to download and build `pyenv==0.0.1` - Caused by: Build backend failed to build wheel through `build_wheel` (exit status: 1) + × Failed to download and build `pyenv==0.0.1` + ╰─▶ Build backend failed to build wheel through `build_wheel` (exit status: 1) - [stdout] - running bdist_wheel - running build - installing to build/bdist.linux-x86_64/wheel - running install + [stdout] + running bdist_wheel + running build + installing to build/bdist.linux-x86_64/wheel + running install - [stderr] - # NOTE # - We are sorry, but this package is not installable with pip. + [stderr] + # NOTE # + We are sorry, but this package is not installable with pip. - Please read the installation instructions at: + Please read the installation instructions at: - https://github.com/pyenv/pyenv#installation - # - - + https://github.com/pyenv/pyenv#installation + # "###); // Ensure the tool environment is not created. @@ -2439,12 +2436,13 @@ fn tool_install_preserve_environment() { .env(EnvVars::XDG_BIN_HOME, bin_dir.as_os_str()) .env(EnvVars::PATH, bin_dir.as_os_str()), @r###" success: false - exit_code: 2 + exit_code: 1 ----- stdout ----- ----- stderr ----- - error: Because black==24.1.1 depends on packaging>=22.0 and you require black==24.1.1, we can conclude that you require packaging>=22.0. - And because you require packaging==0.0.1, we can conclude that your requirements are unsatisfiable. + × No solution found when resolving dependencies: + ╰─▶ Because black==24.1.1 depends on packaging>=22.0 and you require black==24.1.1, we can conclude that you require packaging>=22.0. + And because you require packaging==0.0.1, we can conclude that your requirements are unsatisfiable. "###); // Install `black`. The tool should already be installed, since we didn't remove the environment. diff --git a/crates/uv/tests/it/tool_run.rs b/crates/uv/tests/it/tool_run.rs index 7426a4ebc850..94f62fe53f7b 100644 --- a/crates/uv/tests/it/tool_run.rs +++ b/crates/uv/tests/it/tool_run.rs @@ -1277,7 +1277,7 @@ fn tool_run_with_editable() -> anyhow::Result<()> { ----- stdout ----- ----- stderr ----- - × Invalid `--with` requirement + × Failed to resolve `--with` requirement ╰─▶ Distribution not found at: file://[TEMP_DIR]/foo "###); diff --git a/crates/uv/tests/it/workspace.rs b/crates/uv/tests/it/workspace.rs index b2f31b672e0d..4fc770213b32 100644 --- a/crates/uv/tests/it/workspace.rs +++ b/crates/uv/tests/it/workspace.rs @@ -1690,14 +1690,14 @@ fn workspace_member_name_shadows_dependencies() -> Result<()> { // TODO(zanieb): This error message is bad? uv_snapshot!(context.filters(), context.lock().current_dir(&workspace), @r###" success: false - exit_code: 2 + exit_code: 1 ----- stdout ----- ----- stderr ----- Using CPython 3.12.[X] interpreter at: [PYTHON-3.12] - error: Failed to build `foo @ file://[TEMP_DIR]/workspace/packages/foo` - Caused by: Failed to parse entry: `anyio` - Caused by: Package is not included as workspace package in `tool.uv.workspace` + × Failed to build `foo @ file://[TEMP_DIR]/workspace/packages/foo` + ├─▶ Failed to parse entry: `anyio` + ╰─▶ Package is not included as workspace package in `tool.uv.workspace` "### ); From b780155400392f0e71e8d40efdee8e2dba3db8e3 Mon Sep 17 00:00:00 2001 From: Charlie Marsh Date: Tue, 12 Nov 2024 10:01:38 -0500 Subject: [PATCH 20/23] Make diagnostic methods dynamic on error (#9037) ## Summary I need these to accept "any error". --- crates/uv/src/commands/diagnostics.rs | 40 ++++++++++++++------------- 1 file changed, 21 insertions(+), 19 deletions(-) diff --git a/crates/uv/src/commands/diagnostics.rs b/crates/uv/src/commands/diagnostics.rs index db6668635dbf..df5e1eb09abe 100644 --- a/crates/uv/src/commands/diagnostics.rs +++ b/crates/uv/src/commands/diagnostics.rs @@ -9,6 +9,8 @@ use uv_normalize::PackageName; use crate::commands::pip; +type Error = Box; + /// Static map of common package name typos or misconfigurations to their correct package names. static SUGGESTIONS: LazyLock> = LazyLock::new(|| { let suggestions: Vec<(String, String)> = @@ -74,37 +76,37 @@ impl OperationDiagnostic { dist, err, )) => { - download_and_build(dist, err); + download_and_build(dist, Box::new(err)); None } pip::operations::Error::Resolve(uv_resolver::ResolveError::Build(dist, err)) => { - build(dist, err); + build(dist, Box::new(err)); None } pip::operations::Error::Requirements(uv_requirements::Error::DownloadAndBuild( dist, err, )) => { - download_and_build(dist, err); + download_and_build(dist, Box::new(err)); None } pip::operations::Error::Requirements(uv_requirements::Error::Build(dist, err)) => { - build(dist, err); + build(dist, Box::new(err)); None } pip::operations::Error::Prepare(uv_installer::PrepareError::Build(dist, err)) => { - build(dist, err); + build(dist, Box::new(err)); None } pip::operations::Error::Prepare(uv_installer::PrepareError::DownloadAndBuild( dist, err, )) => { - download_and_build(dist, err); + download_and_build(dist, Box::new(err)); None } pip::operations::Error::Prepare(uv_installer::PrepareError::Download(dist, err)) => { - download(dist, err); + download(dist, Box::new(err)); None } pip::operations::Error::Requirements(err) => { @@ -123,19 +125,19 @@ impl OperationDiagnostic { } /// Render a remote source distribution build failure with a help message. -pub(crate) fn download_and_build(sdist: Box, cause: uv_distribution::Error) { +pub(crate) fn download_and_build(sdist: Box, cause: Error) { #[derive(Debug, miette::Diagnostic, thiserror::Error)] #[error("Failed to download and build `{sdist}`")] #[diagnostic()] - struct Error { + struct Diagnostic { sdist: Box, #[source] - cause: uv_distribution::Error, + cause: Error, #[help] help: Option, } - let report = miette::Report::new(Error { + let report = miette::Report::new(Diagnostic { help: SUGGESTIONS.get(sdist.name()).map(|suggestion| { format!( "`{}` is often confused for `{}` Did you mean to install `{}` instead?", @@ -151,19 +153,19 @@ pub(crate) fn download_and_build(sdist: Box, cause: uv_distribution: } /// Render a remote binary distribution download failure with a help message. -pub(crate) fn download(sdist: Box, cause: uv_distribution::Error) { +pub(crate) fn download(sdist: Box, cause: Error) { #[derive(Debug, miette::Diagnostic, thiserror::Error)] #[error("Failed to download `{sdist}`")] #[diagnostic()] - struct Error { + struct Diagnostic { sdist: Box, #[source] - cause: uv_distribution::Error, + cause: Error, #[help] help: Option, } - let report = miette::Report::new(Error { + let report = miette::Report::new(Diagnostic { help: SUGGESTIONS.get(sdist.name()).map(|suggestion| { format!( "`{}` is often confused for `{}` Did you mean to install `{}` instead?", @@ -179,19 +181,19 @@ pub(crate) fn download(sdist: Box, cause: uv_distribution::Error) { } /// Render a local source distribution build failure with a help message. -pub(crate) fn build(sdist: Box, cause: uv_distribution::Error) { +pub(crate) fn build(sdist: Box, cause: Error) { #[derive(Debug, miette::Diagnostic, thiserror::Error)] #[error("Failed to build `{sdist}`")] #[diagnostic()] - struct Error { + struct Diagnostic { sdist: Box, #[source] - cause: uv_distribution::Error, + cause: Error, #[help] help: Option, } - let report = miette::Report::new(Error { + let report = miette::Report::new(Diagnostic { help: SUGGESTIONS.get(sdist.name()).map(|suggestion| { format!( "`{}` is often confused for `{}` Did you mean to install `{}` instead?", From 720a47755b66d00e536342e65d695741fdc71fa9 Mon Sep 17 00:00:00 2001 From: RafaelWO <38643099+RafaelWO@users.noreply.github.com> Date: Tue, 12 Nov 2024 18:18:43 +0100 Subject: [PATCH 21/23] Fix typo in GitLab integration docs (#9047) ## Summary This PR changes the mentioned file name `requirement.txt` to `requirements.txt` (add "s") to be consistent. --- docs/guides/integration/gitlab.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/guides/integration/gitlab.md b/docs/guides/integration/gitlab.md index 5f4e5af0a9f5..b83f6615d72b 100644 --- a/docs/guides/integration/gitlab.md +++ b/docs/guides/integration/gitlab.md @@ -66,5 +66,5 @@ variables: To opt-out again, the `--no-system` flag can be used in any uv invocation. -When persisting the cache, you may want to use `requirement.txt` or `pyproject.toml` as +When persisting the cache, you may want to use `requirements.txt` or `pyproject.toml` as your cache key files instead of `uv.lock`. From 8ed37eeab8e8a233d3b8ac336583eb1c6440aef1 Mon Sep 17 00:00:00 2001 From: Charlie Marsh Date: Tue, 12 Nov 2024 12:54:58 -0500 Subject: [PATCH 22/23] Fix `.env` file example in docs (#9064) ## Summary The current example is broken since `$MY_ENV_VAR` gets evaluated by the shell. --- docs/configuration/files.md | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/docs/configuration/files.md b/docs/configuration/files.md index 3ad6d3bc72b4..8c987a7000dc 100644 --- a/docs/configuration/files.md +++ b/docs/configuration/files.md @@ -83,7 +83,9 @@ the `--env-file` flag to `uv run`. For example, to load environment variables from a `.env` file in the current working directory: ```console -$ uv run --env-file .env -- echo $MY_ENV_VAR +$ echo "MY_VAR='Hello, world!'" > .env +$ uv run --env-file .env -- python -c 'import os; print(os.getenv("MY_VAR"))' +Hello, world! ``` The `--env-file` flag can be provided multiple times, with subsequent files overriding values From 95e7d8702fbc11c401ffd07da13fbdcc031d6a2e Mon Sep 17 00:00:00 2001 From: Charlie Marsh Date: Tue, 12 Nov 2024 14:43:54 -0500 Subject: [PATCH 23/23] Use `[[index]]` API in configuration example (#9065) ## Summary This came up in #9049. --- docs/configuration/files.md | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/docs/configuration/files.md b/docs/configuration/files.md index 8c987a7000dc..05dcbdb35f09 100644 --- a/docs/configuration/files.md +++ b/docs/configuration/files.md @@ -19,8 +19,9 @@ If a `pyproject.toml` file is found, uv will read configuration from the `[tool. example, to set a persistent index URL, add the following to a `pyproject.toml`: ```toml title="pyproject.toml" -[tool.uv] -index-url = "https://test.pypi.org/simple" +[[tool.uv.index]] +url = "https://test.pypi.org/simple" +default = true ``` (If there is no such table, the `pyproject.toml` file will be ignored, and uv will continue @@ -30,7 +31,9 @@ uv will also search for `uv.toml` files, which follow an identical structure, bu `[tool.uv]` prefix. For example: ```toml title="uv.toml" -index-url = "https://test.pypi.org/simple" +[[index]] +url = "https://test.pypi.org/simple" +default = true ``` !!! note