diff --git a/.github/workflows/create_tests_package_lists.yml b/.github/workflows/create_tests_package_lists.yml index 27a9b52014..db472412ca 100644 --- a/.github/workflows/create_tests_package_lists.yml +++ b/.github/workflows/create_tests_package_lists.yml @@ -1,7 +1,9 @@ name: Create tests package lists for offline tests on: workflow_dispatch: - +concurrency: + group: check-${{ github.ref }} + cancel-in-progress: true jobs: create_package_lists: name: Create package lists @@ -9,7 +11,7 @@ jobs: strategy: matrix: os: [ubuntu-latest, macos-latest, windows-latest] - python-version: ["3.8", "3.9", "3.10", "3.11"] + python-version: ["3.12", "3.11", "3.10", "3.9", "3.8"] steps: - uses: actions/checkout@v4 diff --git a/.github/workflows/exhaustive_package_test.yml b/.github/workflows/exhaustive_package_test.yml index 09885bb089..cf40bc833e 100644 --- a/.github/workflows/exhaustive_package_test.yml +++ b/.github/workflows/exhaustive_package_test.yml @@ -1,7 +1,9 @@ name: Exhaustive Package Test (slow) on: workflow_dispatch: - +concurrency: + group: check-${{ github.ref }} + cancel-in-progress: true jobs: test_all_packages: name: Exhaustive Package Test @@ -10,7 +12,7 @@ jobs: strategy: matrix: os: [ubuntu-latest, macos-latest, windows-latest] - python-version: ["3.8", "3.9", "3.10", "3.11"] + python-version: ["3.12", "3.11", "3.10", "3.9", "3.8"] steps: - uses: actions/checkout@v4 diff --git a/.github/workflows/publish-testpypi.yml b/.github/workflows/publish-testpypi.yml index ea04d7ed6b..a9a295c303 100644 --- a/.github/workflows/publish-testpypi.yml +++ b/.github/workflows/publish-testpypi.yml @@ -11,7 +11,7 @@ on: workflow_dispatch: env: - default-python: "3.11" + default-python: "3.12" jobs: testpypi-publish: diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 6a7b9e10d2..5d977a768a 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -9,12 +9,18 @@ name: tests on: - pull_request: push: + tags-ignore: ["**"] + pull_request: + schedule: + - cron: "0 8 * * *" +concurrency: + group: check-${{ github.ref }} + cancel-in-progress: true # If changing default-python be sure to change job "tests" matrix: include: also env: - default-python: "3.11" + default-python: "3.12" jobs: lint: @@ -55,12 +61,12 @@ jobs: fail-fast: false matrix: os: [ubuntu-latest] - python-version: ["3.8", "3.9", "3.10", "3.11"] + python-version: ["3.12", "3.11", "3.10", "3.9", "3.8"] include: - os: windows-latest - python-version: "3.11" + python-version: "3.12" - os: macos-latest - python-version: "3.11" + python-version: "3.12" steps: - uses: actions/checkout@v4 diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 649d6a5f3c..15f7be99b2 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -12,14 +12,16 @@ repos: - id: check-added-large-files - id: trailing-whitespace - id: check-yaml -- repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.1.3 +- repo: https://github.com/tox-dev/pyproject-fmt + rev: "1.5.2" hooks: - - id: ruff -- repo: https://github.com/psf/black - rev: 23.10.1 + - id: pyproject-fmt +- repo: https://github.com/astral-sh/ruff-pre-commit + rev: v0.1.6 hooks: - - id: black + - id: ruff-format + - id: ruff + args: [ "--fix", "--unsafe-fixes", "--exit-non-zero-on-fix"] # mypy args: # must include --ignore-missing-imports for mypy. It is included by default # if no arguments are supplied, but we must supply it ourselves since we @@ -27,8 +29,8 @@ repos: # cannot use --warn-unused-ignores because it conflicts with # --ignore-missing-imports - repo: https://github.com/pre-commit/mirrors-mypy - rev: v1.6.1 + rev: v1.7.1 hooks: - id: mypy - args: ['--warn-unused-ignores', '--strict-equality','--no-implicit-optional'] + args: ['--warn-unused-ignores', '--strict-equality','--no-implicit-optional', '--check-untyped-defs'] exclude: 'testdata/test_package_specifier/local_extras/setup.py' diff --git a/.readthedocs.yaml b/.readthedocs.yml similarity index 58% rename from .readthedocs.yaml rename to .readthedocs.yml index 3930655266..b1717cf813 100644 --- a/.readthedocs.yaml +++ b/.readthedocs.yml @@ -1,11 +1,8 @@ -# See https://docs.readthedocs.io/en/stable/config-file/v2.html for details - version: 2 - build: os: ubuntu-22.04 tools: - python: "3.11" + python: "3.12" commands: - pip install nox - nox --session build_docs -- "${READTHEDOCS_OUTPUT}"/html diff --git a/CHANGELOG.md b/CHANGELOG.md index 9c7e67e1ed..e13d4d37b3 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -27,6 +27,7 @@ automatically upgraded. - Print all environment variables in `pipx environment` - Return an error message when directory can't be added to PATH successfully - Expose manual pages included in an application installed with `pipx install` +- Add explicit 3.12 support ## 1.2.1 diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 8715dfbcbd..6010898ebe 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -52,45 +52,51 @@ nox -l At the time of this writing, the output looks like this ``` -- refresh_packages_cache-3.6 -> Populate .pipx_tests/package_cache -- refresh_packages_cache-3.7 -> Populate .pipx_tests/package_cache -- refresh_packages_cache-3.8 -> Populate .pipx_tests/package_cache +- refresh_packages_cache-3.12 -> Populate .pipx_tests/package_cache +- refresh_packages_cache-3.11 -> Populate .pipx_tests/package_cache +- refresh_packages_cache-3.10 -> Populate .pipx_tests/package_cache - refresh_packages_cache-3.9 -> Populate .pipx_tests/package_cache -- tests_internet-3.6 -> Tests using internet pypi only -- tests_internet-3.7 -> Tests using internet pypi only -- tests_internet-3.8 -> Tests using internet pypi only +- refresh_packages_cache-3.8 -> Populate .pipx_tests/package_cache +- tests_internet-3.12 -> Tests using internet pypi only +- tests_internet-3.11 -> Tests using internet pypi only +- tests_internet-3.10 -> Tests using internet pypi only - tests_internet-3.9 -> Tests using internet pypi only -* tests-3.6 -> Tests using local pypiserver only -* tests-3.7 -> Tests using local pypiserver only -* tests-3.8 -> Tests using local pypiserver only +- tests_internet-3.8 -> Tests using internet pypi only +* tests-3.12 -> Tests using local pypiserver only +* tests-3.11 -> Tests using local pypiserver only +* tests-3.10 -> Tests using local pypiserver only * tests-3.9 -> Tests using local pypiserver only -- test_all_packages-3.6 -- test_all_packages-3.7 -- test_all_packages-3.8 +* tests-3.8 -> Tests using local pypiserver only +- test_all_packages-3.12 +- test_all_packages-3.11 +- test_all_packages-3.10 - test_all_packages-3.9 +- test_all_packages-3.8 - cover -> Coverage analysis * lint -- develop-3.6 -- develop-3.7 -- develop-3.8 +- develop-3.12 +- develop-3.11 +- develop-3.10 - develop-3.9 +- develop-3.8 - build - publish * build_docs -- publish_docs - watch_docs +* build_man - pre_release - post_release -- create_test_package_list-3.6 -- create_test_package_list-3.7 -- create_test_package_list-3.8 +- create_test_package_list-3.12 +- create_test_package_list-3.11 +- create_test_package_list-3.10 - create_test_package_list-3.9 +- create_test_package_list-3.8 ``` ### Unit Tests -To run unit tests in Python3.9, you can run +To run unit tests in Python3.12, you can run ``` -nox -s tests-3.9 +nox -s tests-3.12 ``` !!! tip diff --git a/docs/examples.md b/docs/examples.md index 61bed698e8..974d047ed1 100644 --- a/docs/examples.md +++ b/docs/examples.md @@ -3,7 +3,7 @@ ``` pipx install pycowsay pipx install --python python3.10 pycowsay -pipx install --python 3.11 pycowsay +pipx install --python 3.12 pycowsay pipx install git+https://github.com/psf/black pipx install git+https://github.com/psf/black.git@branch-name pipx install git+https://github.com/psf/black.git@git-hash diff --git a/noxfile.py b/noxfile.py index 6beb7bce9f..ab2d89e742 100644 --- a/noxfile.py +++ b/noxfile.py @@ -4,15 +4,14 @@ import nox # type: ignore -PYTHON_ALL_VERSIONS = ["3.8", "3.9", "3.10", "3.11"] -PYTHON_DEFAULT_VERSION = "3.11" +PYTHON_ALL_VERSIONS = ["3.12", "3.11", "3.10", "3.9", "3.8"] +PYTHON_DEFAULT_VERSION = "3.12" DOC_DEPENDENCIES = [".", "jinja2", "mkdocs", "mkdocs-material"] MAN_DEPENDENCIES = [".", "argparse-manpage[setuptools]"] LINT_DEPENDENCIES = [ - "black==23.10.1", - "mypy==1.6.1", + "mypy==1.7.1", "packaging>=20.0", - "ruff==0.1.3", + "ruff==0.1.6", "types-jinja2", ] # Packages whose dependencies need an intact system PATH to compile @@ -51,11 +50,7 @@ def prebuild_wheels(session, prebuild_dict): def has_changes(): status = ( - subprocess.run( - "git status --porcelain", shell=True, check=True, stdout=subprocess.PIPE - ) - .stdout.decode() - .strip() + subprocess.run("git status --porcelain", shell=True, check=True, stdout=subprocess.PIPE).stdout.decode().strip() ) return len(status) > 0 @@ -103,7 +98,7 @@ def tests_with_options(session, net_pypiserver): if net_pypiserver: pypiserver_option = ["--net-pypiserver"] else: - session.install("pypiserver[passlib]") + session.install("pypiserver[passlib]", 'setuptools; python_version>="3.12"') refresh_packages_cache(session) pypiserver_option = [] @@ -152,16 +147,14 @@ def cover(session): def lint(session): session.run("python", "-m", "pip", "install", "--upgrade", "pip") session.install(*LINT_DEPENDENCIES) - files = [str(Path("src") / "pipx"), "tests", "scripts"] + [ - str(p) for p in Path(".").glob("*.py") - ] + files = [str(Path("src") / "pipx"), "tests", "scripts"] + [str(p) for p in Path(".").glob("*.py")] session.run("ruff", *files) - session.run("black", "--check", *files) session.run( "mypy", "--strict-equality", "--no-implicit-optional", "--warn-unused-ignores", + "--check-untyped-defs", *files, ) @@ -170,7 +163,7 @@ def lint(session): def develop(session): session.run("python", "-m", "pip", "install", "--upgrade", "pip") session.install(*DOC_DEPENDENCIES, *LINT_DEPENDENCIES) - session.install("-e", ".") + session.install("-e", ".", "pytest", "pypiserver[passlib]", 'setuptools; python_version>="3.12"') @nox.session(python=PYTHON_DEFAULT_VERSION) @@ -196,9 +189,7 @@ def build_docs(session): site_dir = session.posargs or ["site/"] session.run("python", "-m", "pip", "install", "--upgrade", "pip") session.install(*DOC_DEPENDENCIES) - session.env[ - "PIPX__DOC_DEFAULT_PYTHON" - ] = "typically the python used to execute pipx" + session.env["PIPX__DOC_DEFAULT_PYTHON"] = "typically the python used to execute pipx" session.run("python", "scripts/generate_docs.py") session.run("mkdocs", "build", "--strict", "--site-dir", *site_dir) @@ -214,9 +205,7 @@ def watch_docs(session): def build_man(session): session.run("python", "-m", "pip", "install", "--upgrade", "pip") session.install(*MAN_DEPENDENCIES) - session.env[ - "PIPX__DOC_DEFAULT_PYTHON" - ] = "typically the python used to execute pipx" + session.env["PIPX__DOC_DEFAULT_PYTHON"] = "typically the python used to execute pipx" session.run("python", "scripts/generate_man.py") @@ -247,17 +236,14 @@ def post_release(session): session.run("git", "--no-pager", "diff", external=True) print("") session.log( - "If `git diff` above looks ok, execute the following command:\n\n" - " git commit -a -m 'Post-release.'\n" + "If `git diff` above looks ok, execute the following command:\n\n" " git commit -a -m 'Post-release.'\n" ) @nox.session(python=PYTHON_ALL_VERSIONS) def create_test_package_list(session): session.run("python", "-m", "pip", "install", "--upgrade", "pip") - output_dir = ( - session.posargs[0] if session.posargs else str(PIPX_TESTS_PACKAGE_LIST_DIR) - ) + output_dir = session.posargs[0] if session.posargs else str(PIPX_TESTS_PACKAGE_LIST_DIR) session.run( "python", "scripts/list_test_packages.py", diff --git a/pyproject.toml b/pyproject.toml index 950f0f54d6..509c8597d1 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,15 +1,23 @@ [build-system] -requires = ["hatchling>=0.15.0"] build-backend = "hatchling.build" +requires = [ + "hatchling>=0.15", +] [project] name = "pipx" description = "Install and Run Python Applications in Isolated Environments" readme = "README.md" +keywords = [ + "cli", + "install", + "pip", + "Virtual Environment", + "workflow", +] license = "MIT" -requires-python = ">=3.8" -keywords = ["pip", "install", "cli", "workflow", "Virtual Environment"] authors = [{ name = "Chad Smith", email = "chadsmith.software@gmail.com" }] +requires-python = ">=3.8" classifiers = [ "License :: OSI Approved :: MIT License", "Operating System :: OS Independent", @@ -19,40 +27,34 @@ classifiers = [ "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", +] +dynamic = [ + "version", ] dependencies = [ "argcomplete>=1.9.4", - "colorama>=0.4.4; sys_platform == 'win32'", - "packaging>=20.0", - "platformdirs>=2.1.0", - "tomli; python_version < '3.11'", - "userpath>=1.6.0,!=1.9.0", + 'colorama>=0.4.4; sys_platform == "win32"', + "packaging>=20", + "platformdirs>=2.1", + 'tomli; python_version < "3.11"', + "userpath!=1.9.0,>=1.6", ] -dynamic = ["version"] - -[project.urls] -Documentation = "https://pypa.github.io/pipx/" -"Source Code" = "https://github.com/pypa/pipx" -"Bug Tracker" = "https://github.com/pypa/pipx/issues" - -[project.scripts] -pipx = "pipx.main:cli" +urls."Bug Tracker" = "https://github.com/pypa/pipx/issues" +urls.Documentation = "https://pipx.pypa.io" +urls.Homepage = "https://pipx.pypa.io" +urls."Release Notes" = "https://pipx.pypa.io/latest/changelog/" +urls."Source Code" = "https://github.com/pypa/pipx" +scripts.pipx = "pipx.main:cli" -[tool.hatch.version] -source = "code" -path = "src/pipx/version.py" - -[tool.hatch.build.targets.sdist] -include = ["/src", "/logo.png", "/pipx_demo.gif", "/*.md"] - -[tool.black] -skip-magic-trailing-comma = true +[tool.hatch] +version.source = "code" +version.path = "src/pipx/version.py" +build.targets.sdist.include = ["/src", "/logo.png", "/pipx_demo.gif", "/*.md"] [tool.ruff] line-length = 121 - -[tool.ruff.lint] -select = [ +lint.select = [ "A", "B", "C4", @@ -66,27 +68,22 @@ select = [ "RSE", "W", ] -ignore = [ +lint.ignore = [ "B904", ] - -[tool.ruff.lint.isort] -known-first-party = ["helpers", "package_info", "pipx"] - -[tool.ruff.lint.mccabe] -max-complexity = 15 +isort = {known-first-party = ["helpers", "package_info", "pipx"]} +lint.mccabe.max-complexity = 15 [tool.pytest.ini_options] markers = ["all_packages: test install with maximum number of packages"] [tool.mypy] show_error_codes = true - -[[tool.mypy.overrides]] -module = [ +overrides = [ + { module = [ "packaging.*", "platformdirs", "pycowsay.*", "jinja2", + ], ignore_missing_imports = true }, ] -ignore_missing_imports = true diff --git a/scripts/generate_docs.py b/scripts/generate_docs.py index e65f7c43f1..ef0b9c05b5 100644 --- a/scripts/generate_docs.py +++ b/scripts/generate_docs.py @@ -17,9 +17,7 @@ def get_help(pipxcmd: Optional[str]) -> str: cmd = ["pipx", "--help"] helptext = ( - subprocess.run(cmd, stdout=subprocess.PIPE, check=True) - .stdout.decode() - .replace(os.path.expanduser("~"), "~") + subprocess.run(cmd, stdout=subprocess.PIPE, check=True).stdout.decode().replace(os.path.expanduser("~"), "~") ) return f""" ``` diff --git a/scripts/generate_man.py b/scripts/generate_man.py index cd63e041b5..602a84c9db 100644 --- a/scripts/generate_man.py +++ b/scripts/generate_man.py @@ -3,6 +3,7 @@ import os.path import sys import textwrap +from typing import cast from build_manpages.manpage import Manpage # type: ignore @@ -12,7 +13,7 @@ def main(): sys.argv[0] = "pipx" parser = get_command_parser() - parser.man_short_description = parser.description.splitlines()[1] + parser.man_short_description = cast(str, parser.description).splitlines()[1] # type: ignore[attr-defined] manpage = Manpage(parser) body = str(manpage) diff --git a/scripts/list_test_packages.py b/scripts/list_test_packages.py index 139552d845..cac66e83c6 100644 --- a/scripts/list_test_packages.py +++ b/scripts/list_test_packages.py @@ -1,12 +1,12 @@ #!/usr/bin/env python3 import argparse -import os import re import subprocess import sys import tempfile +from concurrent.futures import ThreadPoolExecutor, as_completed from pathlib import Path -from typing import Any, Dict, List +from typing import Any, Dict, List, Set from test_packages_support import get_platform_list_path @@ -33,20 +33,12 @@ def process_command_line(argv: List[str]) -> argparse.Namespace: # required arguments parser.add_argument( "primary_package_list", - help="Main packages to examine, getting list of " - "matching distribution files and dependencies.", - ) - parser.add_argument( - "package_list_dir", help="Directory to output package distribution lists." + help="Main packages to examine, getting list of " "matching distribution files and dependencies.", ) + parser.add_argument("package_list_dir", help="Directory to output package distribution lists.") # switches/options: - parser.add_argument( - "-v", - "--verbose", - action="store_true", - help="Maximum verbosity, especially for pip operations.", - ) + parser.add_argument("-v", "--verbose", action="store_true", help="Maximum verbosity, especially for pip operations.") args = parser.parse_args(argv) @@ -65,16 +57,9 @@ def parse_package_list(package_list_file: Path) -> List[Dict[str, Any]]: if len(line_list) == 1: output_list.append({"spec": line_list[0]}) elif len(line_list) == 2: - output_list.append( - { - "spec": line_list[0], - "no-deps": line_list[1].lower() == "true", - } - ) + output_list.append({"spec": line_list[0], "no-deps": line_list[1].lower() == "true"}) else: - print( - f"ERROR: Unable to parse primary package list line:\n {line.strip()}" - ) + print(f"ERROR: Unable to parse primary package list line:\n {line.strip()}") return [] except OSError: print("ERROR: File problem reading primary package list.") @@ -82,58 +67,26 @@ def parse_package_list(package_list_file: Path) -> List[Dict[str, Any]]: return output_list -def create_test_packages_list( - package_list_dir_path: Path, primary_package_list_path: Path, verbose: bool -) -> int: +def create_test_packages_list(package_list_dir_path: Path, primary_package_list_path: Path, verbose: bool) -> int: exit_code = 0 package_list_dir_path.mkdir(exist_ok=True) platform_package_list_path = get_platform_list_path(package_list_dir_path) primary_test_packages = parse_package_list(primary_package_list_path) if not primary_test_packages: - print( - f"ERROR: Problem reading {primary_package_list_path}. Exiting.", - file=sys.stderr, - ) + print(f"ERROR: Problem reading {primary_package_list_path}. Exiting.", file=sys.stderr) return 1 - with tempfile.TemporaryDirectory() as download_dir: - for test_package in primary_test_packages: - test_package_option_string = ( - " (no-deps)" if test_package.get("no-deps", False) else "" - ) - verbose_this_iteration = False - cmd_list = ( - ["pip", "download"] - + (["--no-deps"] if test_package.get("no-deps", False) else []) - + [test_package["spec"], "-d", str(download_dir)] - ) - if verbose: - print(f"CMD: {' '.join(cmd_list)}") - pip_download_process = subprocess.run( - cmd_list, capture_output=True, text=True, check=False - ) - if pip_download_process.returncode == 0: - print(f"Examined {test_package['spec']}{test_package_option_string}") - else: - print( - f"ERROR with {test_package['spec']}{test_package_option_string}", - file=sys.stderr, - ) - verbose_this_iteration = True - exit_code = 1 - if verbose or verbose_this_iteration: - print(pip_download_process.stdout) - print(pip_download_process.stderr) - downloaded_list = os.listdir(download_dir) + with ThreadPoolExecutor(max_workers=12) as pool: + futures = {pool.submit(download, pkg, verbose) for pkg in primary_test_packages} + downloaded_list = set() + for future in as_completed(futures): + downloaded_list.update(future.result()) all_packages = [] - for downloaded_filename in downloaded_list: - wheel_re = re.search( - r"([^-]+)\-([^-]+)\-([^-]+)\-([^-]+)\-([^-]+)(-[^-]+)?\.whl$", - downloaded_filename, - ) - src_re = re.search(r"(.+)\-([^-]+)\.(?:tar.gz|zip)$", downloaded_filename) + for downloaded_path in downloaded_list: + wheel_re = re.search(r"([^-]+)-([^-]+)-([^-]+)\-([^-]+)-([^-]+)(-[^-]+)?\.whl$", downloaded_path) + src_re = re.search(r"(.+)-([^-]+)\.(?:tar.gz|zip)$", downloaded_path) if wheel_re: package_name = wheel_re.group(1) package_version = wheel_re.group(2) @@ -141,7 +94,7 @@ def create_test_packages_list( package_name = src_re.group(1) package_version = src_re.group(2) else: - print(f"ERROR: cannot parse: {downloaded_filename}", file=sys.stderr) + print(f"ERROR: cannot parse: {downloaded_path}", file=sys.stderr) continue all_packages.append(f"{package_name}=={package_version}") @@ -153,12 +106,30 @@ def create_test_packages_list( return exit_code +def download(test_package: Dict[str, str], verbose: bool) -> Set[str]: + no_deps = test_package.get("no-deps", False) + test_package_option_string = " (no-deps)" if no_deps else "" + verbose_this_iteration = False + with tempfile.TemporaryDirectory() as download_dir: + cmd_list = ["pip", "download"] + (["--no-deps"] if no_deps else []) + [test_package["spec"], "-d", download_dir] + if verbose: + print(f"CMD: {' '.join(cmd_list)}") + pip_download_process = subprocess.run(cmd_list, capture_output=True, text=True, check=False) + if pip_download_process.returncode == 0: + print(f"Examined {test_package['spec']}{test_package_option_string}") + else: + print(f"ERROR with {test_package['spec']}{test_package_option_string}", file=sys.stderr) + verbose_this_iteration = True + if verbose or verbose_this_iteration: + print(pip_download_process.stdout) + print(pip_download_process.stderr) + return {i.name for i in Path(download_dir).iterdir()} + + def main(argv: List[str]) -> int: args = process_command_line(argv) - return create_test_packages_list( - Path(args.package_list_dir), Path(args.primary_package_list), args.verbose - ) + return create_test_packages_list(Path(args.package_list_dir), Path(args.primary_package_list), args.verbose) if __name__ == "__main__": diff --git a/scripts/migrate_pipsi_to_pipx.py b/scripts/migrate_pipsi_to_pipx.py index 74df56e250..dbee21a819 100644 --- a/scripts/migrate_pipsi_to_pipx.py +++ b/scripts/migrate_pipsi_to_pipx.py @@ -41,14 +41,9 @@ def main(): ret = subprocess.run(["pipsi", "uninstall", "--yes", package], check=False) if ret.returncode: error = True - print( - f"Failed to uninstall {package!r} with pipsi. " - "Not attempting to install with pipx." - ) + print(f"Failed to uninstall {package!r} with pipsi. " "Not attempting to install with pipx.") else: - print( - f"uninstalled {package!r} with pipsi. Now attempting to install with pipx." - ) + print(f"uninstalled {package!r} with pipsi. Now attempting to install with pipx.") ret = subprocess.run(["pipx", "install", package], check=False) if ret.returncode: error = True @@ -58,14 +53,11 @@ def main(): print(f"Done migrating {len(packages)} packages!") print( - "You may still need to run `pipsi uninstall pipsi` or `pip uninstall pipsi`. " - "Refer to pipsi's documentation." + "You may still need to run `pipsi uninstall pipsi` or `pip uninstall pipsi`. " "Refer to pipsi's documentation." ) if error: - print( - "Note: Finished with errors. Review output to manually complete migration." - ) + print("Note: Finished with errors. Review output to manually complete migration.") if __name__ == "__main__": diff --git a/scripts/pipx_prerelease.py b/scripts/pipx_prerelease.py index 9302d097b6..bff251a7a1 100644 --- a/scripts/pipx_prerelease.py +++ b/scripts/pipx_prerelease.py @@ -28,9 +28,7 @@ def fix_changelog(new_version: str) -> bool: changelog_file = Path("docs/changelog.md") new_changelog_file = Path("docs/changelog.new") - copy_file_replace_line( - changelog_file, new_changelog_file, line_re=r"^\s*dev\s*$", new_line=new_version - ) + copy_file_replace_line(changelog_file, new_changelog_file, line_re=r"^\s*dev\s*$", new_line=new_version) new_changelog_file.rename(changelog_file) return True diff --git a/scripts/pipx_release.py b/scripts/pipx_release.py index 90151a8df4..65352d5525 100644 --- a/scripts/pipx_release.py +++ b/scripts/pipx_release.py @@ -8,9 +8,7 @@ def python_mypy_ok(filepath: Path) -> bool: return True if mypy_proc.returncode == 0 else False -def copy_file_replace_line( - orig_file: Path, new_file: Path, line_re: str, new_line: str -) -> None: +def copy_file_replace_line(orig_file: Path, new_file: Path, line_re: str, new_line: str) -> None: old_version_fh = orig_file.open("r") new_version_fh = new_file.open("w") for line in old_version_fh: diff --git a/scripts/test_packages_support.py b/scripts/test_packages_support.py index 151b52f84a..0341672870 100644 --- a/scripts/test_packages_support.py +++ b/scripts/test_packages_support.py @@ -14,9 +14,7 @@ def get_platform_list_path(package_list_dir_path: Path) -> Path: - platform_package_list_path = ( - package_list_dir_path / f"{FULL_PLATFORM}-python{PYTHON_VERSION_STR}.txt" - ) + platform_package_list_path = package_list_dir_path / f"{FULL_PLATFORM}-python{PYTHON_VERSION_STR}.txt" return platform_package_list_path diff --git a/scripts/update_package_cache.py b/scripts/update_package_cache.py index ee0ad07440..c43aedd1a3 100644 --- a/scripts/update_package_cache.py +++ b/scripts/update_package_cache.py @@ -3,6 +3,7 @@ import re import subprocess import sys +from concurrent.futures import ThreadPoolExecutor, as_completed from pathlib import Path from typing import List @@ -45,16 +46,13 @@ def process_command_line(argv: List[str]) -> argparse.Namespace: "-c", "--check-only", action="store_true", - help="Only check to see if needed packages are in PACKAGES_DIR, do not " - "download or delete files.", + help="Only check to see if needed packages are in PACKAGES_DIR, do not " "download or delete files.", ) return parser.parse_args(argv) -def update_test_packages_cache( - package_list_dir_path: Path, pipx_package_cache_path: Path, check_only: bool -) -> int: +def update_test_packages_cache(package_list_dir_path: Path, pipx_package_cache_path: Path, check_only: bool) -> int: exit_code = 0 platform_package_list_path = get_platform_list_path(package_list_dir_path) @@ -65,8 +63,7 @@ def update_test_packages_cache( if not platform_package_list_path.exists(): print( - f"WARNING. File {str(platform_package_list_path)}\n" - " does not exist. Creating now...", + f"WARNING. File {str(platform_package_list_path)}\n" " does not exist. Creating now...", file=sys.stderr, ) create_list_returncode = create_test_packages_list( @@ -83,8 +80,7 @@ def update_test_packages_cache( ) else: print( - f"ERROR. Unable to create {str(platform_package_list_path)}\n" - " Cannot continue.\n", + f"ERROR. Unable to create {str(platform_package_list_path)}\n" " Cannot continue.\n", file=sys.stderr, ) return 1 @@ -93,8 +89,7 @@ def update_test_packages_cache( platform_package_list_fh = platform_package_list_path.open("r") except OSError: print( - f"ERROR. File {str(platform_package_list_path)}\n" - " is not readable. Cannot continue.\n", + f"ERROR. File {str(platform_package_list_path)}\n" " is not readable. Cannot continue.\n", file=sys.stderr, ) return 1 @@ -119,12 +114,7 @@ def update_test_packages_cache( package_name = package_spec_re.group(1) package_ver = package_spec_re.group(2) - package_dist_patt = ( - re.escape(package_name) - + r"-" - + re.escape(package_ver) - + r"(.tar.gz|.zip|-)" - ) + package_dist_patt = re.escape(package_name) + r"-" + re.escape(package_ver) + r"(.tar.gz|.zip|-)" matches = [] for output_dir_file in packages_dir_files: if re.search(package_dist_patt, output_dir_file.name): @@ -134,9 +124,7 @@ def update_test_packages_cache( packages_dir_hits.append(matches[0]) continue elif len(matches) > 1: - print( - f"ERROR: more than one match for {package_spec}.", file=sys.stderr - ) + print(f"ERROR: more than one match for {package_spec}.", file=sys.stderr) print(f" {matches}", file=sys.stderr) exit_code = 1 continue @@ -150,27 +138,10 @@ def update_test_packages_cache( if check_only: return 0 if len(packages_dir_missing) == 0 else 1 else: - for package_spec in packages_dir_missing: - pip_download_process = subprocess.run( - [ - "pip", - "download", - "--no-deps", - package_spec, - "-d", - str(packages_dir_path), - ], - capture_output=True, - text=True, - check=False, - ) - if pip_download_process.returncode == 0: - print(f"Successfully downloaded {package_spec}") - else: - print(f"ERROR downloading {package_spec}", file=sys.stderr) - print(pip_download_process.stdout, file=sys.stderr) - print(pip_download_process.stderr, file=sys.stderr) - exit_code = 1 + with ThreadPoolExecutor(max_workers=12) as pool: + futures = {pool.submit(download, pkg, packages_dir_path) for pkg in packages_dir_missing} + for future in as_completed(futures): + exit_code = future.result() or exit_code for unused_file in packages_dir_files: print(f"Deleting {unused_file}...") @@ -179,11 +150,33 @@ def update_test_packages_cache( return exit_code +def download(package_spec: str, packages_dir_path: Path) -> int: + pip_download_process = subprocess.run( + [ + "pip", + "download", + "--no-deps", + package_spec, + "-d", + str(packages_dir_path), + ], + capture_output=True, + text=True, + check=False, + ) + if pip_download_process.returncode == 0: + print(f"Successfully downloaded {package_spec}") + return 0 + + print(f"ERROR downloading {package_spec}", file=sys.stderr) + print(pip_download_process.stdout, file=sys.stderr) + print(pip_download_process.stderr, file=sys.stderr) + return 1 + + def main(argv: List[str]) -> int: args = process_command_line(argv) - return update_test_packages_cache( - Path(args.package_list_dir), Path(args.pipx_package_cache_dir), args.check_only - ) + return update_test_packages_cache(Path(args.package_list_dir), Path(args.pipx_package_cache_dir), args.check_only) if __name__ == "__main__": diff --git a/src/pipx/__init__.py b/src/pipx/__init__.py index c4155c40ab..43b21b8a5c 100644 --- a/src/pipx/__init__.py +++ b/src/pipx/__init__.py @@ -1,8 +1,4 @@ import sys if sys.version_info < (3, 8, 0): - sys.exit( - "Python 3.8 or later is required. " - "See https://github.com/pypa/pipx " - "for installation instructions." - ) + sys.exit("Python 3.8 or later is required. " "See https://github.com/pypa/pipx " "for installation instructions.") diff --git a/src/pipx/animate.py b/src/pipx/animate.py index 1561cbbb77..327911dbb6 100644 --- a/src/pipx/animate.py +++ b/src/pipx/animate.py @@ -30,9 +30,7 @@ def _env_supports_animation() -> bool: @contextmanager -def animate( - message: str, do_animation: bool, *, delay: float = 0 -) -> Generator[None, None, None]: +def animate(message: str, do_animation: bool, *, delay: float = 0) -> Generator[None, None, None]: if not do_animation or not _env_supports_animation(): # No animation, just a single print of message sys.stderr.write(f"{message}...\n") diff --git a/src/pipx/commands/common.py b/src/pipx/commands/common.py index c0bb25aefa..62fd0de8df 100644 --- a/src/pipx/commands/common.py +++ b/src/pipx/commands/common.py @@ -155,10 +155,7 @@ def _symlink_package_resource( ) return if is_symlink and not exists: - logger.info( - f"Removing existing symlink {str(symlink_path)} since it " - "pointed non-existent location" - ) + logger.info(f"Removing existing symlink {str(symlink_path)} since it " "pointed non-existent location") symlink_path.unlink() if executable: @@ -179,9 +176,7 @@ def _symlink_package_resource( ) -def venv_health_check( - venv: Venv, package_name: Optional[str] = None -) -> Tuple[VenvProblems, str]: +def venv_health_check(venv: Venv, package_name: Optional[str] = None) -> Tuple[VenvProblems, str]: venv_dir = venv.root python_path = venv.python_path.resolve() @@ -191,26 +186,22 @@ def venv_health_check( if not python_path.is_file(): return ( VenvProblems(invalid_interpreter=True), - f" package {red(bold(venv_dir.name))} has invalid " - f"interpreter {str(python_path)}\r{hazard}", + f" package {red(bold(venv_dir.name))} has invalid " f"interpreter {str(python_path)}\r{hazard}", ) if not venv.package_metadata: return ( VenvProblems(missing_metadata=True), - f" package {red(bold(venv_dir.name))} has missing " - f"internal pipx metadata.\r{hazard}", + f" package {red(bold(venv_dir.name))} has missing " f"internal pipx metadata.\r{hazard}", ) if venv_dir.name != canonicalize_name(venv_dir.name): return ( VenvProblems(bad_venv_name=True), - f" package {red(bold(venv_dir.name))} needs its " - f"internal data updated.\r{hazard}", + f" package {red(bold(venv_dir.name))} needs its " f"internal data updated.\r{hazard}", ) if venv.package_metadata[package_name].package_version == "": return ( VenvProblems(not_installed=True), - f" package {red(bold(package_name))} {red('is not installed')} " - f"in the venv {venv_dir.name}\r{hazard}", + f" package {red(bold(package_name))} {red('is not installed')} " f"in the venv {venv_dir.name}\r{hazard}", ) return (VenvProblems(), "") @@ -245,8 +236,7 @@ def get_venv_summary( ) exposed_binary_names = sorted(p.name for p in exposed_app_paths) unavailable_binary_names = sorted( - {add_suffix(name, package_metadata.suffix) for name in package_metadata.apps} - - set(exposed_binary_names) + {add_suffix(name, package_metadata.suffix) for name in package_metadata.apps} - set(exposed_binary_names) ) exposed_man_paths = set() for man_section in MAN_SECTIONS: @@ -255,19 +245,11 @@ def get_venv_summary( constants.LOCAL_MAN_DIR / man_section, man_pages, ) - exposed_man_pages = sorted( - str(Path(p.parent.name) / p.name) for p in exposed_man_paths - ) - unavailable_man_pages = sorted( - set(package_metadata.man_pages) - set(exposed_man_pages) - ) + exposed_man_pages = sorted(str(Path(p.parent.name) / p.name) for p in exposed_man_paths) + unavailable_man_pages = sorted(set(package_metadata.man_pages) - set(exposed_man_pages)) # The following is to satisfy mypy that python_version is str and not # Optional[str] - python_version = ( - venv.pipx_metadata.python_version - if venv.pipx_metadata.python_version is not None - else "" - ) + python_version = venv.pipx_metadata.python_version if venv.pipx_metadata.python_version is not None else "" return ( _get_list_output( python_version, @@ -362,17 +344,13 @@ def _get_list_output( for name in exposed_binary_names: output.append(f" - {name}") for name in unavailable_binary_names: - output.append( - f" - {red(name)} (symlink missing or pointing to unexpected location)" - ) + output.append(f" - {red(name)} (symlink missing or pointing to unexpected location)") if new_install and (exposed_man_pages or unavailable_man_pages): output.append(" These manual pages are now globally available") for name in exposed_man_pages: output.append(f" - {name}") for name in unavailable_man_pages: - output.append( - f" - {red(name)} (symlink missing or pointing to unexpected location)" - ) + output.append(f" - {red(name)} (symlink missing or pointing to unexpected location)") if injected_packages: output.append(" Injected Packages:") for name in injected_packages: @@ -380,9 +358,7 @@ def _get_list_output( return "\n".join(output) -def package_name_from_spec( - package_spec: str, python: str, *, pip_args: List[str], verbose: bool -) -> str: +def package_name_from_spec(package_spec: str, python: str, *, pip_args: List[str], verbose: bool) -> str: start_time = time.time() # shortcut if valid PyPI name @@ -401,9 +377,7 @@ def package_name_from_spec( with tempfile.TemporaryDirectory() as temp_venv_dir: venv = Venv(Path(temp_venv_dir), python=python, verbose=verbose) venv.create_venv(venv_args=[], pip_args=[]) - package_name = venv.install_package_no_deps( - package_or_url=package_spec, pip_args=pip_args - ) + package_name = venv.install_package_no_deps(package_or_url=package_spec, pip_args=pip_args) logger.info(f"Package name determined in {time.time()-start_time:.1f}s") return package_name @@ -425,8 +399,7 @@ def run_post_install_actions( if ( not venv.main_package_name == package_name - and venv.package_metadata[venv.main_package_name].suffix - == package_metadata.suffix + and venv.package_metadata[venv.main_package_name].suffix == package_metadata.suffix ): package_name = display_name @@ -446,9 +419,7 @@ def run_post_install_actions( dep, dependent_apps, ) in package_metadata.app_paths_of_dependencies.items(): - print( - f"Note: Dependent package '{dep}' contains {len(dependent_apps)} apps" - ) + print(f"Note: Dependent package '{dep}' contains {len(dependent_apps)} apps") for app in dependent_apps: print(f" - {app.name}") if venv.safe_to_remove(): @@ -470,9 +441,7 @@ def run_post_install_actions( force=force, suffix=package_metadata.suffix, ) - expose_resources_globally( - "man", local_man_dir, package_metadata.man_paths, force=force - ) + expose_resources_globally("man", local_man_dir, package_metadata.man_paths, force=force) if include_dependencies: for _, app_paths in package_metadata.app_paths_of_dependencies.items(): @@ -486,9 +455,7 @@ def run_post_install_actions( for _, man_paths in package_metadata.man_paths_of_dependencies.items(): expose_resources_globally("man", local_man_dir, man_paths, force=force) - package_summary, _ = get_venv_summary( - venv_dir, package_name=package_name, new_install=True - ) + package_summary, _ = get_venv_summary(venv_dir, package_name=package_name, new_install=True) print(package_summary) warn_if_not_on_path(local_bin_dir) print(f"done! {stars}", file=sys.stderr) diff --git a/src/pipx/commands/ensure_path.py b/src/pipx/commands/ensure_path.py index 6e3de6ffe0..c24b56b08e 100644 --- a/src/pipx/commands/ensure_path.py +++ b/src/pipx/commands/ensure_path.py @@ -90,9 +90,7 @@ def ensure_path(location: Path, *, force: bool) -> Tuple[bool, bool]: ) ) else: - print( - pipx_wrap(f"{location_str} is already in PATH.", subsequent_indent=" " * 4) - ) + print(pipx_wrap(f"{location_str} is already in PATH.", subsequent_indent=" " * 4)) return (path_added, need_shell_restart) @@ -108,9 +106,7 @@ def ensure_pipx_paths(force: bool) -> ExitCode: path_added = False need_shell_restart = False for bin_path in bin_paths: - (path_added_current, need_shell_restart_current) = ensure_path( - bin_path, force=force - ) + (path_added_current, need_shell_restart_current) = ensure_path(bin_path, force=force) path_added |= path_added_current need_shell_restart |= need_shell_restart_current diff --git a/src/pipx/commands/install.py b/src/pipx/commands/install.py index 7146d81f2a..2f51b9d663 100644 --- a/src/pipx/commands/install.py +++ b/src/pipx/commands/install.py @@ -34,9 +34,7 @@ def install( python = python or DEFAULT_PYTHON if package_name is None: - package_name = package_name_from_spec( - package_spec, python, pip_args=pip_args, verbose=verbose - ) + package_name = package_name_from_spec(package_spec, python, pip_args=pip_args, verbose=verbose) if venv_dir is None: venv_container = VenvContainer(constants.PIPX_LOCAL_VENVS) venv_dir = venv_container.get_venv_dir(f"{package_name}{suffix}") @@ -78,9 +76,7 @@ def install( override_shared = package_name == "pip" venv.create_venv(venv_args, pip_args, override_shared) for dep in preinstall_packages or []: - dep_name = package_name_from_spec( - dep, python, pip_args=pip_args, verbose=verbose - ) + dep_name = package_name_from_spec(dep, python, pip_args=pip_args, verbose=verbose) venv.upgrade_package_no_metadata(dep_name, []) venv.install_package( package_name=package_name, diff --git a/src/pipx/commands/list_packages.py b/src/pipx/commands/list_packages.py index 7473dcff93..ba584d3341 100644 --- a/src/pipx/commands/list_packages.py +++ b/src/pipx/commands/list_packages.py @@ -43,18 +43,14 @@ def list_short(venv_dirs: Collection[Path]) -> VenvProblems: return all_venv_problems -def list_text( - venv_dirs: Collection[Path], include_injected: bool, venv_root_dir: str -) -> VenvProblems: +def list_text(venv_dirs: Collection[Path], include_injected: bool, venv_root_dir: str) -> VenvProblems: print(f"venvs are in {bold(venv_root_dir)}") print(f"apps are exposed on your $PATH at {bold(str(constants.LOCAL_BIN_DIR))}") print(f"manual pages are exposed at {bold(str(constants.LOCAL_MAN_DIR))}") all_venv_problems = VenvProblems() for venv_dir in venv_dirs: - package_summary, venv_problems = get_venv_summary( - venv_dir, include_injected=include_injected - ) + package_summary, venv_problems = get_venv_summary(venv_dir, include_injected=include_injected) if venv_problems.any_(): logger.warning(package_summary) else: @@ -72,9 +68,7 @@ def list_json(venv_dirs: Collection[Path]) -> VenvProblems: } all_venv_problems = VenvProblems() for venv_dir in venv_dirs: - (venv_metadata, venv_problems, warning_str) = get_venv_metadata_summary( - venv_dir - ) + (venv_metadata, venv_problems, warning_str) = get_venv_metadata_summary(venv_dir) all_venv_problems.or_(venv_problems) if venv_problems.any_(): warning_messages.append(warning_str) @@ -83,9 +77,7 @@ def list_json(venv_dirs: Collection[Path]) -> VenvProblems: spec_metadata["venvs"][venv_dir.name] = {} spec_metadata["venvs"][venv_dir.name]["metadata"] = venv_metadata.to_dict() - print( - json.dumps(spec_metadata, indent=4, sort_keys=True, cls=JsonEncoderHandlesPath) - ) + print(json.dumps(spec_metadata, indent=4, sort_keys=True, cls=JsonEncoderHandlesPath)) for warning_message in warning_messages: logger.warning(warning_message) @@ -122,8 +114,7 @@ def list_packages( ) if all_venv_problems.invalid_interpreter: logger.warning( - "\nOne or more packages have a missing python interpreter.\n" - " To fix, execute: pipx reinstall-all" + "\nOne or more packages have a missing python interpreter.\n" " To fix, execute: pipx reinstall-all" ) if all_venv_problems.missing_metadata: logger.warning( diff --git a/src/pipx/commands/reinstall.py b/src/pipx/commands/reinstall.py index 4a65e7420f..7d5ec49f70 100644 --- a/src/pipx/commands/reinstall.py +++ b/src/pipx/commands/reinstall.py @@ -78,9 +78,7 @@ def reinstall( if injected_package.package_or_url is None: # This should never happen, but package_or_url is type # Optional[str] so mypy thinks it could be None - raise PipxError( - f"Internal Error injecting package {injected_package} into {venv.name}" - ) + raise PipxError(f"Internal Error injecting package {injected_package} into {venv.name}") inject_dep( venv_dir, injected_name, @@ -127,8 +125,6 @@ def reinstall_all( if package_exit != 0: failed.append(venv_dir.name) if len(failed) > 0: - raise PipxError( - f"The following package(s) failed to reinstall: {', '.join(failed)}" - ) + raise PipxError(f"The following package(s) failed to reinstall: {', '.join(failed)}") # Any failure to install will raise PipxError, otherwise success return EXIT_CODE_OK diff --git a/src/pipx/commands/run.py b/src/pipx/commands/run.py index ce84efc8b6..1f2813e3b4 100644 --- a/src/pipx/commands/run.py +++ b/src/pipx/commands/run.py @@ -132,9 +132,7 @@ def run_package( pypackage_bin_path = get_pypackage_bin_path(app) if pypackage_bin_path.exists(): - logger.info( - f"Using app in local __pypackages__ directory at '{pypackage_bin_path}'" - ) + logger.info(f"Using app in local __pypackages__ directory at '{pypackage_bin_path}'") run_pypackage_bin(pypackage_bin_path, app_args) if pypackages: raise PipxError( @@ -229,9 +227,7 @@ def _download_and_run( if venv.pipx_metadata.main_package.package is not None: package_name = venv.pipx_metadata.main_package.package else: - package_name = package_name_from_spec( - package_or_url, python, pip_args=pip_args, verbose=verbose - ) + package_name = package_name_from_spec(package_or_url, python, pip_args=pip_args, verbose=verbose) override_shared = package_name == "pip" @@ -258,10 +254,7 @@ def _download_and_run( else: app_filename = app else: - all_apps = ( - f"{a} - usage: 'pipx run --spec {package_or_url} {a} [arguments?]'" - for a in apps - ) + all_apps = (f"{a} - usage: 'pipx run --spec {package_or_url} {a} [arguments?]'" for a in apps) raise PipxError( APP_NOT_FOUND_ERROR_MESSAGE.format( app=app, @@ -278,9 +271,7 @@ def _download_and_run( venv.run_app(app, app_filename, app_args) -def _get_temporary_venv_path( - requirements: List[str], python: str, pip_args: List[str], venv_args: List[str] -) -> Path: +def _get_temporary_venv_path(requirements: List[str], python: str, pip_args: List[str], venv_args: List[str]) -> Path: """Computes deterministic path using hashing function on arguments relevant to virtual environment's end state. Arguments used should result in idempotent virtual environment. (i.e. args passed to app aren't relevant, but args @@ -329,9 +320,7 @@ def _http_get_request(url: str) -> str: # This regex comes from PEP 723 -PEP723 = re.compile( - r"(?m)^# /// (?P[a-zA-Z0-9-]+)$\s(?P(^#(| .*)$\s)+)^# ///$" -) +PEP723 = re.compile(r"(?m)^# /// (?P[a-zA-Z0-9-]+)$\s(?P(^#(| .*)$\s)+)^# ///$") def _get_requirements_from_script(content: str) -> Optional[List[str]]: @@ -353,8 +342,7 @@ def _get_requirements_from_script(content: str) -> Optional[List[str]]: raise ValueError(f"Multiple {name} blocks found") content = "".join( - line[2:] if line.startswith("# ") else line[1:] - for line in matches[0].group("content").splitlines(keepends=True) + line[2:] if line.startswith("# ") else line[1:] for line in matches[0].group("content").splitlines(keepends=True) ) pyproject = tomllib.loads(content) diff --git a/src/pipx/commands/run_pip.py b/src/pipx/commands/run_pip.py index d4bda07e6c..bdb4383d99 100644 --- a/src/pipx/commands/run_pip.py +++ b/src/pipx/commands/run_pip.py @@ -6,14 +6,10 @@ from pipx.venv import Venv -def run_pip( - package: str, venv_dir: Path, pip_args: List[str], verbose: bool -) -> ExitCode: +def run_pip(package: str, venv_dir: Path, pip_args: List[str], verbose: bool) -> ExitCode: """Returns pipx exit code.""" venv = Venv(venv_dir, verbose=verbose) if not venv.python_path.exists(): - raise PipxError( - f"venv for {package!r} was not found. Was {package!r} installed with pipx?" - ) + raise PipxError(f"venv for {package!r} was not found. Was {package!r} installed with pipx?") venv.verbose = True return venv.run_pip_get_exit_code(pip_args) diff --git a/src/pipx/commands/uninject.py b/src/pipx/commands/uninject.py index b3d680672e..9711275a2f 100644 --- a/src/pipx/commands/uninject.py +++ b/src/pipx/commands/uninject.py @@ -23,9 +23,7 @@ logger = logging.getLogger(__name__) -def get_include_resource_paths( - package_name: str, venv: Venv, local_bin_dir: Path, local_man_dir: Path -) -> Set[Path]: +def get_include_resource_paths(package_name: str, venv: Venv, local_bin_dir: Path, local_man_dir: Path) -> Set[Path]: bin_dir_app_paths = _get_package_bin_dir_app_paths( venv, venv.package_metadata[package_name], venv.bin_path, local_bin_dir ) @@ -78,9 +76,7 @@ def uninject_dep( need_app_uninstall = venv.package_metadata[package_name].include_apps - new_resource_paths = get_include_resource_paths( - package_name, venv, local_bin_dir, local_man_dir - ) + new_resource_paths = get_include_resource_paths(package_name, venv, local_bin_dir, local_man_dir) if not leave_deps: orig_not_required_packages = venv.list_installed_packages(not_required=True) @@ -114,9 +110,7 @@ def uninject_dep( else: logger.info(f"removed file {path}") - print( - f"Uninjected package {bold(package_name)}{deps_string} from venv {bold(venv.root.name)} {stars}" - ) + print(f"Uninjected package {bold(package_name)}{deps_string} from venv {bold(venv.root.name)} {stars}") return True diff --git a/src/pipx/commands/uninstall.py b/src/pipx/commands/uninstall.py index ef03517186..84ce1d96ef 100644 --- a/src/pipx/commands/uninstall.py +++ b/src/pipx/commands/uninstall.py @@ -66,14 +66,10 @@ def _get_package_bin_dir_app_paths( apps += package_info.apps if package_info.include_dependencies: apps += package_info.apps_of_dependencies - return get_exposed_paths_for_package( - venv_bin_path, local_bin_dir, [add_suffix(app, suffix) for app in apps] - ) + return get_exposed_paths_for_package(venv_bin_path, local_bin_dir, [add_suffix(app, suffix) for app in apps]) -def _get_package_man_paths( - venv: Venv, package_info: PackageInfo, venv_man_path: Path, local_man_dir: Path -) -> Set[Path]: +def _get_package_man_paths(venv: Venv, package_info: PackageInfo, venv_man_path: Path, local_man_dir: Path) -> Set[Path]: man_pages = [] if package_info.include_apps: man_pages += package_info.man_pages @@ -96,9 +92,7 @@ def _get_venv_resource_paths( if venv.pipx_metadata.main_package.package is not None: # Valid metadata for venv for package_info in venv.package_metadata.values(): - resource_paths |= get_package_resource_paths( - venv, package_info, venv_resource_path, local_resource_dir - ) + resource_paths |= get_package_resource_paths(venv, package_info, venv_resource_path, local_resource_dir) elif venv.python_path.is_file(): # No metadata from pipx_metadata.json, but valid python interpreter. # In pre-metadata-pipx venv.root.name is name of main package @@ -107,12 +101,8 @@ def _get_venv_resource_paths( # not include_dependencies. Other PackageInfo fields are irrelevant # here. venv_metadata = venv.get_venv_metadata_for_package(venv.root.name, set()) - main_package_info = _venv_metadata_to_package_info( - venv_metadata, venv.root.name - ) - resource_paths = get_package_resource_paths( - venv, main_package_info, venv_resource_path, local_resource_dir - ) + main_package_info = _venv_metadata_to_package_info(venv_metadata, venv.root.name) + resource_paths = get_package_resource_paths(venv, main_package_info, venv_resource_path, local_resource_dir) else: # No metadata and no valid python interpreter. # We'll take our best guess on what to uninstall here based on symlink @@ -124,16 +114,12 @@ def _get_venv_resource_paths( if not local_resource_dir.is_dir() or not can_symlink(local_resource_dir): return set() - resource_paths = get_exposed_paths_for_package( - venv_resource_path, local_resource_dir - ) + resource_paths = get_exposed_paths_for_package(venv_resource_path, local_resource_dir) return resource_paths -def uninstall( - venv_dir: Path, local_bin_dir: Path, local_man_dir: Path, verbose: bool -) -> ExitCode: +def uninstall(venv_dir: Path, local_bin_dir: Path, local_man_dir: Path, verbose: bool) -> ExitCode: """Uninstall entire venv_dir, including main package and all injected packages. @@ -143,21 +129,15 @@ def uninstall( print(f"Nothing to uninstall for {venv_dir.name} {sleep}") app = which(venv_dir.name) if app: - print( - f"{hazard} Note: '{app}' still exists on your system and is on your PATH" - ) + print(f"{hazard} Note: '{app}' still exists on your system and is on your PATH") return EXIT_CODE_UNINSTALL_VENV_NONEXISTENT venv = Venv(venv_dir, verbose=verbose) - bin_dir_app_paths = _get_venv_resource_paths( - "app", venv, venv.bin_path, local_bin_dir - ) + bin_dir_app_paths = _get_venv_resource_paths("app", venv, venv.bin_path, local_bin_dir) man_dir_paths = set() for man_section in MAN_SECTIONS: - man_dir_paths |= _get_venv_resource_paths( - "man", venv, venv.man_path / man_section, local_man_dir / man_section - ) + man_dir_paths |= _get_venv_resource_paths("man", venv, venv.man_path / man_section, local_man_dir / man_section) for path in bin_dir_app_paths | man_dir_paths: try: diff --git a/src/pipx/commands/upgrade.py b/src/pipx/commands/upgrade.py index 19ab9423a5..1530a30cdf 100644 --- a/src/pipx/commands/upgrade.py +++ b/src/pipx/commands/upgrade.py @@ -26,9 +26,7 @@ def _upgrade_package( package_metadata = venv.package_metadata[package_name] if package_metadata.package_or_url is None: - raise PipxError( - f"Internal Error: package {package_name} has corrupt pipx metadata." - ) + raise PipxError(f"Internal Error: package {package_name} has corrupt pipx metadata.") package_or_url = parse_specifier_for_upgrade(package_metadata.package_or_url) old_version = package_metadata.package_version @@ -56,9 +54,7 @@ def _upgrade_package( force=force, suffix=package_metadata.suffix, ) - expose_resources_globally( - "man", constants.LOCAL_MAN_DIR, package_metadata.man_paths, force=force - ) + expose_resources_globally("man", constants.LOCAL_MAN_DIR, package_metadata.man_paths, force=force) if package_metadata.include_dependencies: for _, app_paths in package_metadata.app_paths_of_dependencies.items(): @@ -70,9 +66,7 @@ def _upgrade_package( suffix=package_metadata.suffix, ) for _, man_paths in package_metadata.man_paths_of_dependencies.items(): - expose_resources_globally( - "man", constants.LOCAL_MAN_DIR, man_paths, force=force - ) + expose_resources_globally("man", constants.LOCAL_MAN_DIR, man_paths, force=force) if old_version == new_version: if upgrading_all: @@ -194,10 +188,7 @@ def upgrade_all( venvs_upgraded = 0 for venv_dir in venv_container.iter_venv_dirs(): venv = Venv(venv_dir, verbose=verbose) - if ( - venv_dir.name in skip - or "--editable" in venv.pipx_metadata.main_package.pip_args - ): + if venv_dir.name in skip or "--editable" in venv.pipx_metadata.main_package.pip_args: continue try: venvs_upgraded += _upgrade_venv( @@ -215,13 +206,10 @@ def upgrade_all( logger.error(f"{e}\n") if venvs_upgraded == 0: - print( - f"Versions did not change after running 'pipx upgrade' for each package {sleep}" - ) + print(f"Versions did not change after running 'pipx upgrade' for each package {sleep}") if venv_error: raise PipxError( - "\nSome packages encountered errors during upgrade.\n" - " See specific error messages above.", + "\nSome packages encountered errors during upgrade.\n" " See specific error messages above.", wrap_message=False, ) diff --git a/src/pipx/constants.py b/src/pipx/constants.py index 4fee116bb8..dbe3a707cc 100644 --- a/src/pipx/constants.py +++ b/src/pipx/constants.py @@ -28,9 +28,7 @@ PIPX_TRASH_DIR = PIPX_HOME / "trash" PIPX_VENV_CACHEDIR = user_cache_path("pipx") -PIPX_SHARED_LIBS = Path( - os.environ.get("PIPX_SHARED_LIBS", DEFAULT_PIPX_SHARED_LIBS) -).resolve() +PIPX_SHARED_LIBS = Path(os.environ.get("PIPX_SHARED_LIBS", DEFAULT_PIPX_SHARED_LIBS)).resolve() PIPX_SHARED_PTH = "pipx_shared.pth" LOCAL_BIN_DIR = Path(os.environ.get("PIPX_BIN_DIR", DEFAULT_PIPX_BIN_DIR)).resolve() LOCAL_MAN_DIR = Path(os.environ.get("PIPX_MAN_DIR", DEFAULT_PIPX_MAN_DIR)).resolve() diff --git a/src/pipx/interpreter.py b/src/pipx/interpreter.py index 8eb0f727e3..0b29185d13 100644 --- a/src/pipx/interpreter.py +++ b/src/pipx/interpreter.py @@ -54,9 +54,7 @@ def _find_default_windows_python() -> str: # Special treatment to detect Windows Store stub. # https://twitter.com/zooba/status/1212454929379581952 - proc = subprocess.run( - [python, "-V"], stdout=subprocess.PIPE, stderr=subprocess.DEVNULL, check=False - ) + proc = subprocess.run([python, "-V"], stdout=subprocess.PIPE, stderr=subprocess.DEVNULL, check=False) if proc.returncode != 0: # Cover the 9009 return code pre-emptively. raise PipxError("No suitable Python found") diff --git a/src/pipx/main.py b/src/pipx/main.py index 094e1c1f3e..8755fef94e 100644 --- a/src/pipx/main.py +++ b/src/pipx/main.py @@ -140,11 +140,7 @@ def __init__(self, venv_container: VenvContainer) -> None: self.packages = [str(p.name) for p in sorted(venv_container.iter_venv_dirs())] def use(self, prefix: str, **kwargs: Any) -> List[str]: - return [ - f"{prefix}{x[len(prefix):]}" - for x in self.packages - if x.startswith(canonicalize_name(prefix)) - ] + return [f"{prefix}{x[len(prefix):]}" for x in self.packages if x.startswith(canonicalize_name(prefix))] def get_pip_args(parsed_args: Dict[str, str]) -> List[str]: @@ -267,17 +263,11 @@ def run_pipx_command(args: argparse.Namespace) -> ExitCode: # noqa: C901 force=args.force, ) elif args.command == "list": - return commands.list_packages( - venv_container, args.include_injected, args.json, args.short - ) + return commands.list_packages(venv_container, args.include_injected, args.json, args.short) elif args.command == "uninstall": - return commands.uninstall( - venv_dir, constants.LOCAL_BIN_DIR, constants.LOCAL_MAN_DIR, verbose - ) + return commands.uninstall(venv_dir, constants.LOCAL_BIN_DIR, constants.LOCAL_MAN_DIR, verbose) elif args.command == "uninstall-all": - return commands.uninstall_all( - venv_container, constants.LOCAL_BIN_DIR, constants.LOCAL_MAN_DIR, verbose - ) + return commands.uninstall_all(venv_container, constants.LOCAL_BIN_DIR, constants.LOCAL_MAN_DIR, verbose) elif args.command == "reinstall": return commands.reinstall( venv_dir=venv_dir, @@ -334,9 +324,7 @@ def add_pip_venv_args(parser: argparse.ArgumentParser) -> None: def add_include_dependencies(parser: argparse.ArgumentParser) -> None: - parser.add_argument( - "--include-deps", help="Include apps of dependent packages", action="store_true" - ) + parser.add_argument("--include-deps", help="Include apps of dependent packages", action="store_true") def _add_install(subparsers: argparse._SubParsersAction) -> None: @@ -375,10 +363,7 @@ def _add_install(subparsers: argparse._SubParsersAction) -> None: p.add_argument( "--preinstall", action="append", - help=( - "Optional packages to be installed into the Virtual Environment before " - "installing the main package." - ), + help=("Optional packages to be installed into the Virtual Environment before " "installing the main package."), ) add_pip_venv_args(p) @@ -578,9 +563,7 @@ def _add_list(subparsers: argparse._SubParsersAction) -> None: help="Show packages injected into the main app's environment", ) g = p.add_mutually_exclusive_group() - g.add_argument( - "--json", action="store_true", help="Output rich data in json format." - ) + g.add_argument("--json", action="store_true", help="Output rich data in json format.") g.add_argument("--short", action="store_true", help="List packages only.") p.add_argument("--verbose", action="store_true") @@ -621,9 +604,7 @@ def _add_run(subparsers: argparse._SubParsersAction) -> None: help="app/package name and any arguments to be passed to it", default=[], ) - p.add_argument( - "--path", action="store_true", help="Interpret app name as a local path" - ) + p.add_argument("--path", action="store_true", help="Interpret app name as a local path") p.add_argument( "--pypackages", action="store_true", @@ -671,10 +652,7 @@ def _add_runpip(subparsers, venv_completer: VenvCompleter) -> None: def _add_ensurepath(subparsers: argparse._SubParsersAction) -> None: p = subparsers.add_parser( "ensurepath", - help=( - "Ensure directories necessary for pipx operation are in your " - "PATH environment variable." - ), + help=("Ensure directories necessary for pipx operation are in your " "PATH environment variable."), description=( "Ensure directory where pipx stores apps is in your " "PATH environment variable. Also if pipx was installed via " @@ -711,9 +689,7 @@ def _add_environment(subparsers: argparse._SubParsersAction) -> None: """ ), ) - p.add_argument( - "--value", "-v", metavar="VARIABLE", help="Print the value of the variable." - ) + p.add_argument("--value", "-v", metavar="VARIABLE", help="Print the value of the variable.") def get_command_parser() -> argparse.ArgumentParser: @@ -728,9 +704,7 @@ def get_command_parser() -> argparse.ArgumentParser: ) parser.man_short_description = PIPX_DESCRIPTION.splitlines()[1] # type: ignore - subparsers = parser.add_subparsers( - dest="command", description="Get help for commands with pipx COMMAND --help" - ) + subparsers = parser.add_subparsers(dest="command", description="Get help for commands with pipx COMMAND --help") _add_install(subparsers) _add_uninject(subparsers, completer_venvs.use) @@ -893,9 +867,7 @@ def check_args(parsed_pipx_args: argparse.Namespace) -> None: # since we would like app to be required but not in a separate argparse # add_argument, we implement our own missing required arg error if not parsed_pipx_args.app_with_args: - parsed_pipx_args.subparser.error( - "the following arguments are required: app" - ) + parsed_pipx_args.subparser.error("the following arguments are required: app") def cli() -> ExitCode: diff --git a/src/pipx/package_specifier.py b/src/pipx/package_specifier.py index c4f47bcd88..26ced4f471 100644 --- a/src/pipx/package_specifier.py +++ b/src/pipx/package_specifier.py @@ -103,9 +103,7 @@ def _parse_specifier(package_spec: str) -> ParsedPackage: ) -def package_or_url_from_pep508( - requirement: Requirement, remove_version_specifiers: bool = False -) -> str: +def package_or_url_from_pep508(requirement: Requirement, remove_version_specifiers: bool = False) -> str: requirement.marker = None requirement.name = canonicalize_name(requirement.name) if remove_version_specifiers: @@ -113,9 +111,7 @@ def package_or_url_from_pep508( return str(requirement) -def _parsed_package_to_package_or_url( - parsed_package: ParsedPackage, remove_version_specifiers: bool -) -> str: +def _parsed_package_to_package_or_url(parsed_package: ParsedPackage, remove_version_specifiers: bool) -> str: if parsed_package.valid_pep508 is not None: if parsed_package.valid_pep508.marker is not None: logger.warning( @@ -142,9 +138,7 @@ def _parsed_package_to_package_or_url( return package_or_url -def parse_specifier_for_install( - package_spec: str, pip_args: List[str] -) -> Tuple[str, List[str]]: +def parse_specifier_for_install(package_spec: str, pip_args: List[str]) -> Tuple[str, List[str]]: """Return package_or_url and pip_args suitable for pip install Specifically: @@ -153,9 +147,7 @@ def parse_specifier_for_install( * Convert local paths to absolute paths """ parsed_package = _parse_specifier(package_spec) - package_or_url = _parsed_package_to_package_or_url( - parsed_package, remove_version_specifiers=False - ) + package_or_url = _parsed_package_to_package_or_url(parsed_package, remove_version_specifiers=False) if "--editable" in pip_args and not parsed_package.valid_local_path: logger.warning( pipx_wrap( @@ -180,9 +172,7 @@ def parse_specifier_for_metadata(package_spec: str) -> str: * Convert local paths to absolute paths """ parsed_package = _parse_specifier(package_spec) - package_or_url = _parsed_package_to_package_or_url( - parsed_package, remove_version_specifiers=False - ) + package_or_url = _parsed_package_to_package_or_url(parsed_package, remove_version_specifiers=False) return package_or_url @@ -195,9 +185,7 @@ def parse_specifier_for_upgrade(package_spec: str) -> str: * Convert local paths to absolute paths """ parsed_package = _parse_specifier(package_spec) - package_or_url = _parsed_package_to_package_or_url( - parsed_package, remove_version_specifiers=True - ) + package_or_url = _parsed_package_to_package_or_url(parsed_package, remove_version_specifiers=True) return package_or_url diff --git a/src/pipx/pipx_metadata_file.py b/src/pipx/pipx_metadata_file.py index b5e3ea7cc0..196b43a602 100644 --- a/src/pipx/pipx_metadata_file.py +++ b/src/pipx/pipx_metadata_file.py @@ -83,9 +83,7 @@ def to_dict(self) -> Dict[str, Any]: "main_package": self.main_package._asdict(), "python_version": self.python_version, "venv_args": self.venv_args, - "injected_packages": { - name: data._asdict() for (name, data) in self.injected_packages.items() - }, + "injected_packages": {name: data._asdict() for (name, data) in self.injected_packages.items()}, "pipx_metadata_version": self.__METADATA_VERSION__, } @@ -96,9 +94,7 @@ def _convert_legacy_metadata(self, metadata_dict: Dict[str, Any]) -> Dict[str, A main_package_data = metadata_dict["main_package"] if main_package_data["package"] != self.venv_dir.name: # handle older suffixed packages gracefully - main_package_data["suffix"] = self.venv_dir.name.replace( - main_package_data["package"], "" - ) + main_package_data["suffix"] = self.venv_dir.name.replace(main_package_data["package"], "") return metadata_dict else: raise PipxError( @@ -131,9 +127,7 @@ def _validate_before_write(self) -> None: def write(self) -> None: self._validate_before_write() try: - with open( - self.venv_dir / PIPX_INFO_FILENAME, "w", encoding="utf-8" - ) as pipx_metadata_fh: + with open(self.venv_dir / PIPX_INFO_FILENAME, "w", encoding="utf-8") as pipx_metadata_fh: json.dump( self.to_dict(), pipx_metadata_fh, @@ -157,9 +151,7 @@ def write(self) -> None: def read(self, verbose: bool = False) -> None: try: with open(self.venv_dir / PIPX_INFO_FILENAME, "rb") as pipx_metadata_fh: - self.from_dict( - json.load(pipx_metadata_fh, object_hook=_json_decoder_object_hook) - ) + self.from_dict(json.load(pipx_metadata_fh, object_hook=_json_decoder_object_hook)) except OSError: # Reset self if problem reading if verbose: logger.warning( diff --git a/src/pipx/shared_libs.py b/src/pipx/shared_libs.py index 0ec1f5e3a6..366e84aea6 100644 --- a/src/pipx/shared_libs.py +++ b/src/pipx/shared_libs.py @@ -42,9 +42,7 @@ def site_packages(self) -> Path: def create(self, verbose: bool = False) -> None: if not self.is_valid: with animate("creating shared libraries", not verbose): - create_process = run_subprocess( - [DEFAULT_PYTHON, "-m", "venv", "--clear", self.root] - ) + create_process = run_subprocess([DEFAULT_PYTHON, "-m", "venv", "--clear", self.root]) subprocess_post_check(create_process) # ignore installed packages to ensure no unexpected patches from the OS vendor @@ -83,9 +81,7 @@ def needs_upgrade(self) -> bool: self.has_been_logged_this_run = True return time_since_last_update_sec > SHARED_LIBS_MAX_AGE_SEC - def upgrade( - self, *, pip_args: Optional[List[str]] = None, verbose: bool = False - ) -> None: + def upgrade(self, *, pip_args: Optional[List[str]] = None, verbose: bool = False) -> None: if not self.is_valid: self.create(verbose=verbose) return diff --git a/src/pipx/util.py b/src/pipx/util.py index c32686a781..b2c340818f 100644 --- a/src/pipx/util.py +++ b/src/pipx/util.py @@ -64,15 +64,11 @@ def rmdir(path: Path, safe_rm: bool = True) -> None: # move it to be deleted later if it still exists if path.is_dir(): if safe_rm: - logger.warning( - f"Failed to delete {path}. Will move it to a temp folder to delete later." - ) + logger.warning(f"Failed to delete {path}. Will move it to a temp folder to delete later.") path.rename(_get_trash_file(path)) else: - logger.warning( - f"Failed to delete {path}. You may need to delete it manually." - ) + logger.warning(f"Failed to delete {path}. You may need to delete it manually.") def mkdir(path: Path) -> None: @@ -194,18 +190,14 @@ def run_subprocess( return completed_process -def subprocess_post_check( - completed_process: "subprocess.CompletedProcess[str]", raise_error: bool = True -) -> None: +def subprocess_post_check(completed_process: "subprocess.CompletedProcess[str]", raise_error: bool = True) -> None: if completed_process.returncode: if completed_process.stdout is not None: print(completed_process.stdout, file=sys.stdout, end="") if completed_process.stderr is not None: print(completed_process.stderr, file=sys.stderr, end="") if raise_error: - raise PipxError( - f"{' '.join([str(x) for x in completed_process.args])!r} failed" - ) + raise PipxError(f"{' '.join([str(x) for x in completed_process.args])!r} failed") else: logger.info(f"{' '.join(completed_process.args)!r} failed") @@ -298,16 +290,12 @@ def analyze_pip_output(pip_stdout: str, pip_stderr: str) -> None: failed_to_build_str = "\n ".join(failed_build_stdout) plural_str = "s" if len(failed_build_stdout) > 1 else "" print("", file=sys.stderr) - logger.error( - f"pip failed to build package{plural_str}:\n {failed_to_build_str}" - ) + logger.error(f"pip failed to build package{plural_str}:\n {failed_to_build_str}") elif failed_build_stderr: failed_to_build_str = "\n ".join(failed_build_stderr) plural_str = "s" if len(failed_build_stderr) > 1 else "" print("", file=sys.stderr) - logger.error( - f"pip seemed to fail to build package{plural_str}:\n {failed_to_build_str}" - ) + logger.error(f"pip seemed to fail to build package{plural_str}:\n {failed_to_build_str}") elif last_collecting_dep is not None: print("", file=sys.stderr) logger.error(f"pip seemed to fail to build package:\n {last_collecting_dep}") @@ -319,13 +307,9 @@ def analyze_pip_output(pip_stdout: str, pip_stderr: str) -> None: print_categories = [x.category for x in relevant_searches] relevants_saved_filtered = relevants_saved.copy() - while (len(print_categories) > 1) and ( - len(relevants_saved_filtered) > max_relevant_errors - ): + while (len(print_categories) > 1) and (len(relevants_saved_filtered) > max_relevant_errors): print_categories.pop(-1) - relevants_saved_filtered = [ - x for x in relevants_saved if x[1] in print_categories - ] + relevants_saved_filtered = [x for x in relevants_saved if x[1] in print_categories] for relevant_saved in relevants_saved_filtered: print(f" {relevant_saved[0]}", file=sys.stderr) @@ -339,9 +323,7 @@ def subprocess_post_check_handle_pip_error( # Save STDOUT and STDERR to file in pipx/logs/ if pipx.constants.pipx_log_file is None: raise PipxError("Pipx internal error: No log_file present.") - pip_error_file = pipx.constants.pipx_log_file.parent / ( - pipx.constants.pipx_log_file.stem + "_pip_errors.log" - ) + pip_error_file = pipx.constants.pipx_log_file.parent / (pipx.constants.pipx_log_file.stem + "_pip_errors.log") with pip_error_file.open("w", encoding="utf-8") as pip_error_fh: print("PIP STDOUT", file=pip_error_fh) print("----------", file=pip_error_fh) @@ -352,10 +334,7 @@ def subprocess_post_check_handle_pip_error( if completed_process.stderr is not None: print(completed_process.stderr, file=pip_error_fh, end="") - logger.error( - "Fatal error from pip prevented installation. Full pip output in file:\n" - f" {pip_error_file}" - ) + logger.error("Fatal error from pip prevented installation. Full pip output in file:\n" f" {pip_error_file}") analyze_pip_output(completed_process.stdout, completed_process.stderr) @@ -377,12 +356,7 @@ def exec_app( if extra_python_paths is not None: env["PYTHONPATH"] = os.path.pathsep.join( - extra_python_paths - + ( - os.getenv("PYTHONPATH", "").split(os.path.pathsep) - if os.getenv("PYTHONPATH") - else [] - ) + extra_python_paths + (os.getenv("PYTHONPATH", "").split(os.path.pathsep) if os.getenv("PYTHONPATH") else []) ) # make sure we show cursor again before handing over control @@ -413,9 +387,7 @@ def full_package_description(package_name: str, package_spec: str) -> str: return f"{package_name} from spec {package_spec!r}" -def pipx_wrap( - text: str, subsequent_indent: str = "", keep_newlines: bool = False -) -> str: +def pipx_wrap(text: str, subsequent_indent: str = "", keep_newlines: bool = False) -> str: """Dedent, strip, wrap to shell width. Don't break on hyphens, only spaces""" minimum_width = 40 width = max(shutil.get_terminal_size((80, 40)).columns, minimum_width) - 2 diff --git a/src/pipx/venv.py b/src/pipx/venv.py index a888b73b4a..15355c408c 100644 --- a/src/pipx/venv.py +++ b/src/pipx/venv.py @@ -84,9 +84,7 @@ def verify_shared_libs(self) -> None: class Venv: """Abstraction for a virtual environment with various useful methods for pipx""" - def __init__( - self, path: Path, *, verbose: bool = False, python: str = DEFAULT_PYTHON - ) -> None: + def __init__(self, path: Path, *, verbose: bool = False, python: str = DEFAULT_PYTHON) -> None: self.root = path self.python = python self.bin_path, self.python_path, self.man_path = get_venv_paths(self.root) @@ -121,10 +119,7 @@ def __init__( @property def name(self) -> str: if self.pipx_metadata.main_package.package is not None: - venv_name = ( - f"{self.pipx_metadata.main_package.package}" - f"{self.pipx_metadata.main_package.suffix}" - ) + venv_name = f"{self.pipx_metadata.main_package.package}" f"{self.pipx_metadata.main_package.suffix}" else: venv_name = self.root.name return venv_name @@ -142,9 +137,7 @@ def uses_shared_libs(self) -> bool: def package_metadata(self) -> Dict[str, PackageInfo]: return_dict = self.pipx_metadata.injected_packages.copy() if self.pipx_metadata.main_package.package is not None: - return_dict[ - self.pipx_metadata.main_package.package - ] = self.pipx_metadata.main_package + return_dict[self.pipx_metadata.main_package.package] = self.pipx_metadata.main_package return return_dict @property @@ -156,9 +149,7 @@ def main_package_name(self) -> str: else: return self.pipx_metadata.main_package.package - def create_venv( - self, venv_args: List[str], pip_args: List[str], override_shared: bool = False - ) -> None: + def create_venv(self, venv_args: List[str], pip_args: List[str], override_shared: bool = False) -> None: """ override_shared -- Override installing shared libraries to the pipx shared directory (default False) """ @@ -238,9 +229,7 @@ def install_package( package_or_url = fix_package_name(package_or_url, package_name) # check syntax and clean up spec and pip_args - (package_or_url, pip_args) = parse_specifier_for_install( - package_or_url, pip_args - ) + (package_or_url, pip_args) = parse_specifier_for_install(package_or_url, pip_args) with animate( f"installing {full_package_description(package_name, package_or_url)}", @@ -262,9 +251,7 @@ def install_package( pip_process = run_subprocess(cmd, log_stdout=False, log_stderr=False) subprocess_post_check_handle_pip_error(pip_process) if pip_process.returncode: - raise PipxError( - f"Error installing {full_package_description(package_name, package_or_url)}." - ) + raise PipxError(f"Error installing {full_package_description(package_name, package_or_url)}.") self._update_package_metadata( package_name=package_name, @@ -286,9 +273,7 @@ def install_package( wrap_message=False, ) - def install_unmanaged_packages( - self, requirements: List[str], pip_args: List[str] - ) -> None: + def install_unmanaged_packages(self, requirements: List[str], pip_args: List[str]) -> None: """Install packages in the venv, but do not record them in the metadata.""" # Note: We want to install everything at once, as that lets @@ -313,9 +298,7 @@ def install_unmanaged_packages( raise PipxError(f"Error installing {', '.join(requirements)}.") def install_package_no_deps(self, package_or_url: str, pip_args: List[str]) -> str: - with animate( - f"determining package name from {package_or_url!r}", self.do_animation - ): + with animate(f"determining package name from {package_or_url!r}", self.do_animation): old_package_set = self.list_installed_packages() cmd = [ "--no-input", @@ -350,16 +333,10 @@ def install_package_no_deps(self, package_or_url: str, pip_args: List[str]) -> s return package_name - def get_venv_metadata_for_package( - self, package_name: str, package_extras: Set[str] - ) -> VenvMetadata: + def get_venv_metadata_for_package(self, package_name: str, package_extras: Set[str]) -> VenvMetadata: data_start = time.time() - venv_metadata = inspect_venv( - package_name, package_extras, self.bin_path, self.python_path, self.man_path - ) - logger.info( - f"get_venv_metadata_for_package: {1e3*(time.time()-data_start):.0f}ms" - ) + venv_metadata = inspect_venv(package_name, package_extras, self.bin_path, self.python_path, self.man_path) + logger.info(f"get_venv_metadata_for_package: {1e3*(time.time()-data_start):.0f}ms") return venv_metadata def _update_package_metadata( @@ -372,9 +349,7 @@ def _update_package_metadata( is_main_package: bool, suffix: str = "", ) -> None: - venv_package_metadata = self.get_venv_metadata_for_package( - package_name, get_extras(package_or_url) - ) + venv_package_metadata = self.get_venv_metadata_for_package(package_name, get_extras(package_or_url)) package_info = PackageInfo( package=package_name, package_or_url=parse_specifier_for_metadata(package_or_url), @@ -404,8 +379,7 @@ def get_python_version(self) -> str: def list_installed_packages(self, not_required=False) -> Set[str]: cmd_run = run_subprocess( - [str(self.python_path), "-m", "pip", "list", "--format=json"] - + (["--not-required"] if not_required else []) + [str(self.python_path), "-m", "pip", "list", "--format=json"] + (["--not-required"] if not_required else []) ) pip_list = json.loads(cmd_run.stdout.strip()) return {x["name"] for x in pip_list} @@ -413,9 +387,7 @@ def list_installed_packages(self, not_required=False) -> Set[str]: def _find_entry_point(self, app: str) -> Optional[EntryPoint]: if not self.python_path.exists(): return None - dists = Distribution.discover( - name=self.main_package_name, path=[str(get_site_packages(self.python_path))] - ) + dists = Distribution.discover(name=self.main_package_name, path=[str(get_site_packages(self.python_path))]) for dist in dists: for ep in dist.entry_points: if ep.group == "pipx.run" and ep.name == app: @@ -435,11 +407,7 @@ def run_app(self, app: str, filename: str, app_args: List[str]) -> NoReturn: match = _entry_point_value_pattern.match(entry_point.value) assert match is not None, "invalid entry point" module, attr = match.group("module", "attr") - code = ( - f"import sys, {module}\n" - f"sys.argv[0] = {entry_point.name!r}\n" - f"sys.exit({module}.{attr}())\n" - ) + code = f"import sys, {module}\n" f"sys.argv[0] = {entry_point.name!r}\n" f"sys.exit({module}.{attr}())\n" exec_app([str(self.python_path), "-c", code] + app_args) def has_app(self, app: str, filename: str) -> bool: @@ -447,16 +415,12 @@ def has_app(self, app: str, filename: str) -> bool: return True return (self.bin_path / filename).is_file() - def upgrade_package_no_metadata( - self, package_name: str, pip_args: List[str] - ) -> None: + def upgrade_package_no_metadata(self, package_name: str, pip_args: List[str]) -> None: with animate( f"upgrading {full_package_description(package_name, package_name)}", self.do_animation, ): - pip_process = self._run_pip( - ["--no-input", "install"] + pip_args + ["--upgrade", package_name] - ) + pip_process = self._run_pip(["--no-input", "install"] + pip_args + ["--upgrade", package_name]) subprocess_post_check(pip_process) def upgrade_package( @@ -473,9 +437,7 @@ def upgrade_package( f"upgrading {full_package_description(package_name, package_or_url)}", self.do_animation, ): - pip_process = self._run_pip( - ["--no-input", "install"] + pip_args + ["--upgrade", package_or_url] - ) + pip_process = self._run_pip(["--no-input", "install"] + pip_args + ["--upgrade", package_or_url]) subprocess_post_check(pip_process) self._update_package_metadata( @@ -498,9 +460,7 @@ def run_pip_get_exit_code(self, cmd: List[str]) -> ExitCode: cmd = [str(self.python_path), "-m", "pip"] + cmd if not self.verbose: cmd.append("-q") - returncode = run_subprocess( - cmd, capture_stdout=False, capture_stderr=False - ).returncode + returncode = run_subprocess(cmd, capture_stdout=False, capture_stderr=False).returncode if returncode: cmd_str = " ".join(str(c) for c in cmd) logger.error(f"{cmd_str!r} failed") diff --git a/src/pipx/venv_inspect.py b/src/pipx/venv_inspect.py index afc27268e6..9dab0ffe8a 100644 --- a/src/pipx/venv_inspect.py +++ b/src/pipx/venv_inspect.py @@ -38,9 +38,7 @@ class VenvMetadata(NamedTuple): python_version: str -def get_dist( - package: str, distributions: Collection[metadata.Distribution] -) -> Optional[metadata.Distribution]: +def get_dist(package: str, distributions: Collection[metadata.Distribution]) -> Optional[metadata.Distribution]: """Find matching distribution in the canonicalized sense.""" for dist in distributions: if canonicalize_name(dist.metadata["name"]) == canonicalize_name(package): @@ -48,9 +46,7 @@ def get_dist( return None -def get_package_dependencies( - dist: metadata.Distribution, extras: Set[str], env: Dict[str, str] -) -> List[Requirement]: +def get_package_dependencies(dist: metadata.Distribution, extras: Set[str], env: Dict[str, str]) -> List[Requirement]: eval_env = env.copy() # Add an empty extra to enable evaluation of non-extra markers if not extras: @@ -84,9 +80,7 @@ def get_apps_from_entry_points(dist: metadata.Distribution, bin_path: Path): return app_names -def get_resources_from_dist_files( - dist: metadata.Distribution, bin_path: Path, man_path: Path -): +def get_resources_from_dist_files(dist: metadata.Distribution, bin_path: Path, man_path: Path): app_names = set() man_names = set() # search installed files @@ -101,19 +95,14 @@ def get_resources_from_dist_files( try: if dist_file_path.parent.samefile(bin_path): app_names.add(path.name) - if ( - dist_file_path.parent.name in MAN_SECTIONS - and dist_file_path.parent.parent.samefile(man_path) - ): + if dist_file_path.parent.name in MAN_SECTIONS and dist_file_path.parent.parent.samefile(man_path): man_names.add(str(Path(dist_file_path.parent.name) / path.name)) except FileNotFoundError: pass return app_names, man_names -def get_resources_from_inst_files( - dist: metadata.Distribution, bin_path: Path, man_path: Path -): +def get_resources_from_inst_files(dist: metadata.Distribution, bin_path: Path, man_path: Path): app_names = set() man_names = set() # not sure what is found here @@ -124,21 +113,14 @@ def get_resources_from_inst_files( try: if inst_file_path.parent.samefile(bin_path): app_names.add(inst_file_path.name) - if ( - inst_file_path.parent.name in MAN_SECTIONS - and inst_file_path.parent.parent.samefile(man_path) - ): - man_names.add( - str(Path(inst_file_path.parent.name) / inst_file_path.name) - ) + if inst_file_path.parent.name in MAN_SECTIONS and inst_file_path.parent.parent.samefile(man_path): + man_names.add(str(Path(inst_file_path.parent.name) / inst_file_path.name)) except FileNotFoundError: pass return app_names, man_names -def get_resources( - dist: metadata.Distribution, bin_path: Path, man_path: Path -) -> Tuple[List[str], List[str]]: +def get_resources(dist: metadata.Distribution, bin_path: Path, man_path: Path) -> Tuple[List[str], List[str]]: app_names = set() man_names = set() app_names_ep = get_apps_from_entry_points(dist, bin_path) @@ -161,9 +143,7 @@ def _dfs_package_resources( # Initialize: we have already visited root dep_visited = {canonicalize_name(package_req.name): True} - dependencies = get_package_dependencies( - dist, package_req.extras, venv_inspect_info.env - ) + dependencies = get_package_dependencies(dist, package_req.extras, venv_inspect_info.env) for dep_req in dependencies: dep_name = canonicalize_name(dep_req.name) if dep_name in dep_visited: @@ -172,20 +152,12 @@ def _dfs_package_resources( dep_dist = get_dist(dep_req.name, venv_inspect_info.distributions) if dep_dist is None: - raise PipxError( - f"Pipx Internal Error: cannot find package {dep_req.name!r} metadata." - ) - app_names, man_names = get_resources( - dep_dist, venv_inspect_info.bin_path, venv_inspect_info.man_path - ) + raise PipxError(f"Pipx Internal Error: cannot find package {dep_req.name!r} metadata.") + app_names, man_names = get_resources(dep_dist, venv_inspect_info.bin_path, venv_inspect_info.man_path) if app_names: - app_paths_of_dependencies[dep_name] = [ - venv_inspect_info.bin_path / name for name in app_names - ] + app_paths_of_dependencies[dep_name] = [venv_inspect_info.bin_path / name for name in app_names] if man_names: - man_paths_of_dependencies[dep_name] = [ - venv_inspect_info.man_path / name for name in man_names - ] + man_paths_of_dependencies[dep_name] = [venv_inspect_info.man_path / name for name in man_names] # recursively search for more dep_visited[dep_name] = True app_paths_of_dependencies, man_paths_of_dependencies = _dfs_package_resources( @@ -290,9 +262,7 @@ def inspect_venv( root_req = Requirement(root_package_name) root_req.extras = root_package_extras - (venv_sys_path, venv_env, venv_python_version) = fetch_info_in_venv( - venv_python_path - ) + (venv_sys_path, venv_env, venv_python_version) = fetch_info_in_venv(venv_python_path) # Collect the generator created from metadata.distributions() # (see `itertools.chain.from_iterable`) into a tuple because we @@ -311,9 +281,7 @@ def inspect_venv( root_dist = get_dist(root_req.name, venv_inspect_info.distributions) if root_dist is None: - raise PipxError( - f"Pipx Internal Error: cannot find package {root_req.name!r} metadata." - ) + raise PipxError(f"Pipx Internal Error: cannot find package {root_req.name!r} metadata.") app_paths_of_dependencies, man_paths_of_dependencies = _dfs_package_resources( root_dist, root_req, @@ -329,17 +297,12 @@ def inspect_venv( app_paths = _windows_extra_app_paths(app_paths) for dep in app_paths_of_dependencies: - apps_of_dependencies += [ - dep_path.name for dep_path in app_paths_of_dependencies[dep] - ] + apps_of_dependencies += [dep_path.name for dep_path in app_paths_of_dependencies[dep]] if WINDOWS: - app_paths_of_dependencies[dep] = _windows_extra_app_paths( - app_paths_of_dependencies[dep] - ) + app_paths_of_dependencies[dep] = _windows_extra_app_paths(app_paths_of_dependencies[dep]) for dep in man_paths_of_dependencies: man_pages_of_dependencies += [ - str(Path(dep_path.parent.name) / dep_path.name) - for dep_path in man_paths_of_dependencies[dep] + str(Path(dep_path.parent.name) / dep_path.name) for dep_path in man_paths_of_dependencies[dep] ] venv_metadata = VenvMetadata( diff --git a/tests/conftest.py b/tests/conftest.py index b7e08012c8..af7a29c833 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,8 +1,14 @@ import os import shutil +import socket import subprocess import sys +from contextlib import closing +from http import HTTPStatus from pathlib import Path +from typing import Iterator +from urllib.error import HTTPError, URLError +from urllib.request import urlopen import pytest # type: ignore @@ -13,6 +19,11 @@ PIPX_TESTS_PACKAGE_LIST_DIR = Path("testdata/tests_packages") +@pytest.fixture(scope="session") +def root() -> Path: + return Path(__file__).parents[1] + + def pytest_addoption(parser): parser.addoption( "--all-packages", @@ -41,9 +52,7 @@ def pytest_configure(config): config.option.markexpr = new_markexpr -def pipx_temp_env_helper( - pipx_shared_dir, tmp_path, monkeypatch, request, utils_temp_dir -): +def pipx_temp_env_helper(pipx_shared_dir, tmp_path, monkeypatch, request, utils_temp_dir, pypi): home_dir = Path(tmp_path) / "subdir" / "pipxhome" bin_dir = Path(tmp_path) / "otherdir" / "pipxbindir" man_dir = Path(tmp_path) / "otherdir" / "pipxmandir" @@ -67,7 +76,7 @@ def pipx_temp_env_helper( # macOS needs /usr/bin in PATH to compile certain packages, but # applications in /usr/bin cause test_install.py tests to raise warnings # which make tests fail (e.g. on Github ansible apps exist in /usr/bin) - monkeypatch.setenv("PATH_ORIG", str(bin_dir) + os.pathsep + os.getenv("PATH")) + monkeypatch.setenv("PATH_ORIG", str(bin_dir) + os.pathsep + os.environ["PATH"]) monkeypatch.setenv("PATH_TEST", str(bin_dir)) monkeypatch.setenv("PATH", str(bin_dir) + os.pathsep + str(utils_temp_dir)) # On Windows, monkeypatch pipx.commands.common._can_symlink_cache to @@ -75,33 +84,25 @@ def pipx_temp_env_helper( # cannot use symlinks, even if we're running as administrator and # symlinks are actually possible. if WIN: - monkeypatch.setitem( - commands.common._can_symlink_cache, constants.LOCAL_BIN_DIR, False - ) - monkeypatch.setitem( - commands.common._can_symlink_cache, constants.LOCAL_MAN_DIR, False - ) + monkeypatch.setitem(commands.common._can_symlink_cache, constants.LOCAL_BIN_DIR, False) + monkeypatch.setitem(commands.common._can_symlink_cache, constants.LOCAL_MAN_DIR, False) if not request.config.option.net_pypiserver: # IMPORTANT: use 127.0.0.1 not localhost # Using localhost on Windows creates enormous slowdowns # (for some reason--perhaps IPV6/IPV4 tries, timeouts?) - monkeypatch.setenv( - "PIP_INDEX_URL", "http://username:password@127.0.0.1:8080/simple" - ) + monkeypatch.setenv("PIP_INDEX_URL", pypi) @pytest.fixture(scope="session", autouse=True) -def pipx_local_pypiserver(request): +def pipx_local_pypiserver(request, root: Path, tmp_path_factory) -> Iterator[str]: """Starts local pypiserver once per session unless --net-pypiserver was passed to pytest""" if request.config.option.net_pypiserver: # need both yield and return because other codepath has both - yield + yield "" return - pipx_cache_dir = ( - request.config.invocation_params.dir / PIPX_TESTS_DIR / "package_cache" - ) + pipx_cache_dir = root / PIPX_TESTS_DIR / "package_cache" check_test_packages_cmd = [ sys.executable, "scripts/update_package_cache.py", @@ -115,7 +116,7 @@ def pipx_local_pypiserver(request): str(PIPX_TESTS_PACKAGE_LIST_DIR), str(pipx_cache_dir), ] - check_test_packages_process = subprocess.run(check_test_packages_cmd, check=False) + check_test_packages_process = subprocess.run(check_test_packages_cmd, check=False, cwd=root) if check_test_packages_process.returncode != 0: raise Exception( f"Directory {str(pipx_cache_dir)} does not contain all " @@ -124,36 +125,31 @@ def pipx_local_pypiserver(request): f"{' '.join(update_test_packages_cmd)}" ) - with open( - request.config.invocation_params.dir / PIPX_TESTS_DIR / "pypiserver.log", "w" - ) as pypiserver_err_fh: - pypiserver_htpasswd = str( - request.config.invocation_params.dir / PIPX_TESTS_DIR / "htpasswd" - ) - - from passlib.apache import HtpasswdFile # type: ignore - - ht = HtpasswdFile(pypiserver_htpasswd, new=True) - ht.set_password("username", "password") - ht.save() - - pypiserver_process = subprocess.Popen( - [ - "pypi-server", - "run", - "--verbose", - "--authenticate=update,download,list", - f"--passwords={pypiserver_htpasswd}", - "--disable-fallback", - str(pipx_cache_dir / f"{sys.version_info[0]}.{sys.version_info[1]}"), - ], - universal_newlines=True, - stderr=pypiserver_err_fh, - ) - - yield - - pypiserver_process.terminate() + def find_free_port(): + with closing(socket.socket(socket.AF_INET, socket.SOCK_STREAM)) as s: + s.bind(("", 0)) + return s.getsockname()[1] + + server_log = tmp_path_factory.mktemp("log") / "pypiserver.log" + if server_log.exists(): + server_log.unlink() + port = find_free_port() + os.environ["NO_PROXY"] = "127.0.0.1" + cache = str(pipx_cache_dir / f"{sys.version_info[0]}.{sys.version_info[1]}") + server = str(Path(sys.executable).parent / "pypi-server") + cmd = [server, "run", "--verbose", "--disable-fallback", "--host", "127.0.0.1", "--port", str(port), cache] + cmd += ["--log-file", str(server_log)] + pypiserver_process = subprocess.Popen(cmd, cwd=root) + url = f"http://127.0.0.1:{port}/simple/" + while True: + try: + with urlopen(url) as response: + if response.code == HTTPStatus.OK: + break + except (URLError, HTTPError): + continue + yield url + pypiserver_process.terminate() @pytest.fixture(scope="session") @@ -167,7 +163,9 @@ def utils_temp_dir(tmp_path_factory): tmp_path = tmp_path_factory.mktemp("session_utilstempdir") utils = ["git"] for util in utils: - util_path = Path(shutil.which(util)) + at_path = shutil.which(util) + assert at_path is not None + util_path = Path(at_path) try: (tmp_path / util_path.name).symlink_to(util_path) except FileExistsError: @@ -176,9 +174,7 @@ def utils_temp_dir(tmp_path_factory): @pytest.fixture -def pipx_temp_env( - tmp_path, monkeypatch, pipx_session_shared_dir, request, utils_temp_dir -): +def pipx_temp_env(tmp_path, monkeypatch, pipx_session_shared_dir, request, utils_temp_dir, pipx_local_pypiserver): """Sets up temporary paths for pipx to install into. Shared libs are setup once per session, all other pipx dirs, constants are @@ -187,13 +183,11 @@ def pipx_temp_env( Also adds environment variables as necessary to make pip installations seamless. """ - pipx_temp_env_helper( - pipx_session_shared_dir, tmp_path, monkeypatch, request, utils_temp_dir - ) + pipx_temp_env_helper(pipx_session_shared_dir, tmp_path, monkeypatch, request, utils_temp_dir, pipx_local_pypiserver) @pytest.fixture -def pipx_ultra_temp_env(tmp_path, monkeypatch, request, utils_temp_dir): +def pipx_ultra_temp_env(tmp_path, monkeypatch, request, utils_temp_dir, pipx_local_pypiserver): """Sets up temporary paths for pipx to install into. Fully temporary environment, every test function starts as if pipx has @@ -203,4 +197,4 @@ def pipx_ultra_temp_env(tmp_path, monkeypatch, request, utils_temp_dir): seamless. """ shared_dir = Path(tmp_path) / "shareddir" - pipx_temp_env_helper(shared_dir, tmp_path, monkeypatch, request, utils_temp_dir) + pipx_temp_env_helper(shared_dir, tmp_path, monkeypatch, request, utils_temp_dir, pipx_local_pypiserver) diff --git a/tests/helpers.py b/tests/helpers.py index a5151ffd89..be0fcbd439 100644 --- a/tests/helpers.py +++ b/tests/helpers.py @@ -76,17 +76,13 @@ def unwrap_log_text(log_text: str): return re.sub(r"\n\s+", " ", log_text) -def _mock_legacy_package_info( - modern_package_info: Dict[str, Any], metadata_version: str -) -> Dict[str, Any]: +def _mock_legacy_package_info(modern_package_info: Dict[str, Any], metadata_version: str) -> Dict[str, Any]: if metadata_version == "0.2": mock_package_info_template = MOCK_PACKAGE_INFO_0_2 elif metadata_version == "0.1": mock_package_info_template = MOCK_PACKAGE_INFO_0_1 else: - raise Exception( - f"Internal Test Error: Unknown metadata_version={metadata_version}" - ) + raise Exception(f"Internal Test Error: Unknown metadata_version={metadata_version}") mock_package_info = {} for key in mock_package_info_template: @@ -114,9 +110,7 @@ def mock_legacy_venv(venv_name: str, metadata_version: Optional[str] = None) -> os.remove(venv_dir / "pipx_metadata.json") return else: - raise Exception( - f"Internal Test Error: Unknown metadata_version={metadata_version}" - ) + raise Exception(f"Internal Test Error: Unknown metadata_version={metadata_version}") modern_metadata = pipx_metadata_file.PipxMetadata(venv_dir).to_dict() @@ -124,9 +118,7 @@ def mock_legacy_venv(venv_name: str, metadata_version: Optional[str] = None) -> mock_pipx_metadata = {} for key in mock_pipx_metadata_template: if key == "main_package": - mock_pipx_metadata[key] = _mock_legacy_package_info( - modern_metadata[key], metadata_version=metadata_version - ) + mock_pipx_metadata[key] = _mock_legacy_package_info(modern_metadata[key], metadata_version=metadata_version) if key == "injected_packages": mock_pipx_metadata[key] = {} for injected in modern_metadata[key]: @@ -135,9 +127,7 @@ def mock_legacy_venv(venv_name: str, metadata_version: Optional[str] = None) -> ) else: mock_pipx_metadata[key] = modern_metadata[key] - mock_pipx_metadata["pipx_metadata_version"] = mock_pipx_metadata_template[ - "pipx_metadata_version" - ] + mock_pipx_metadata["pipx_metadata_version"] = mock_pipx_metadata_template["pipx_metadata_version"] # replicate pipx_metadata_file.PipxMetadata.write() with open(venv_dir / "pipx_metadata.json", "w") as pipx_metadata_fh: @@ -167,10 +157,7 @@ def create_package_info_ref(venv_name, package_name, pipx_venvs_dir, **field_ove include_apps=field_overrides.get("include_apps", True), include_dependencies=field_overrides.get("include_dependencies", False), apps=PKG[package_name]["apps"], - app_paths=[ - pipx_venvs_dir / venv_name / venv_bin_dir / app - for app in PKG[package_name]["apps"] - ], + app_paths=[pipx_venvs_dir / venv_name / venv_bin_dir / app for app in PKG[package_name]["apps"]], apps_of_dependencies=PKG[package_name]["apps_of_dependencies"], app_paths_of_dependencies=field_overrides.get("app_paths_of_dependencies", {}), man_pages=PKG[package_name].get("man_pages", []), @@ -178,9 +165,7 @@ def create_package_info_ref(venv_name, package_name, pipx_venvs_dir, **field_ove pipx_venvs_dir / venv_name / "share" / "man" / man_page for man_page in PKG[package_name].get("man_pages", []) ], - man_pages_of_dependencies=PKG[package_name].get( - "man_pages_of_dependencies", [] - ), + man_pages_of_dependencies=PKG[package_name].get("man_pages_of_dependencies", []), man_paths_of_dependencies=field_overrides.get("man_paths_of_dependencies", {}), package_version=PKG[package_name]["spec"].split("==")[-1], ) diff --git a/tests/package_info.py b/tests/package_info.py index 98357be372..9bae56b3f0 100644 --- a/tests/package_info.py +++ b/tests/package_info.py @@ -69,9 +69,7 @@ def _exe_if_win(apps): "b2": { "spec": "b2==3.12.0", "apps": _exe_if_win(["b2"]), - "apps_of_dependencies": _exe_if_win( - ["docutils", "rst2ansi", "tabulate", "tqdm", "normalizer"] - ) + "apps_of_dependencies": _exe_if_win(["docutils", "rst2ansi", "tabulate", "tqdm", "normalizer"]) + [ "activate-global-python-argcomplete", "python-argcomplete-check-easy-install-script", @@ -195,22 +193,17 @@ def _exe_if_win(apps): "cloudtoken": { "spec": "cloudtoken==0.1.707", "apps": ["awstoken", "cloudtoken", "cloudtoken.app", "cloudtoken_proxy.sh"], - "apps_of_dependencies": _exe_if_win(["flask", "keyring", "normalizer"]) - + ["jp.py"], + "apps_of_dependencies": _exe_if_win(["flask", "keyring", "normalizer"]) + ["jp.py"], }, "coala": { "spec": "coala==0.11.0", - "apps": _exe_if_win( - ["coala", "coala-ci", "coala-delete-orig", "coala-format", "coala-json"] - ), + "apps": _exe_if_win(["coala", "coala-ci", "coala-delete-orig", "coala-format", "coala-json"]), "apps_of_dependencies": _exe_if_win(["normalizer", "pygmentize"]) + ["unidiff"], }, "cookiecutter": { "spec": "cookiecutter==2.4.0", "apps": _exe_if_win(["cookiecutter"]), - "apps_of_dependencies": _exe_if_win( - ["chardetect", "normalizer", "markdown-it", "pygmentize", "slugify"] - ), + "apps_of_dependencies": _exe_if_win(["chardetect", "normalizer", "markdown-it", "pygmentize", "slugify"]), }, "cython": { "spec": "cython==0.29.21", @@ -220,8 +213,7 @@ def _exe_if_win(apps): "datasette": { "spec": "datasette==0.50.2", "apps": _exe_if_win(["datasette"]), - "apps_of_dependencies": _exe_if_win(["httpx", "hupper", "uvicorn"]) - + ["pint-convert"], + "apps_of_dependencies": _exe_if_win(["httpx", "hupper", "uvicorn"]) + ["pint-convert"], }, "diffoscope": { "spec": "diffoscope==154", @@ -285,16 +277,12 @@ def _exe_if_win(apps): "howdoi": { "spec": "howdoi==2.0.20", "apps": _exe_if_win(["howdoi"]), - "apps_of_dependencies": _exe_if_win( - ["markdown-it", "keep", "normalizer", "pygmentize"] - ), + "apps_of_dependencies": _exe_if_win(["markdown-it", "keep", "normalizer", "pygmentize"]), }, "httpie": { "spec": "httpie==3.2.2", "apps": _exe_if_win(["http", "httpie", "https"]), - "apps_of_dependencies": _exe_if_win( - ["markdown-it", "normalizer", "pygmentize"] - ), + "apps_of_dependencies": _exe_if_win(["markdown-it", "normalizer", "pygmentize"]), "man_pages": [ str(Path("man1") / "http.1"), str(Path("man1") / "httpie.1"), @@ -305,8 +293,7 @@ def _exe_if_win(apps): "hyde": { "spec": "hyde==0.8.9", "apps": _exe_if_win(["hyde"]), - "apps_of_dependencies": _exe_if_win(["markdown_py", "pygmentize"]) - + ["smartypants"], + "apps_of_dependencies": _exe_if_win(["markdown_py", "pygmentize"]) + ["smartypants"], }, "ipython": { "spec": "ipython==7.16.1", @@ -393,9 +380,7 @@ def _exe_if_win(apps): "kaggle": { "spec": "kaggle==1.5.16", "apps": _exe_if_win(["kaggle"]), - "apps_of_dependencies": list( - set(_exe_if_win(["slugify", "normalizer", "tqdm"])) - ), + "apps_of_dependencies": list(set(_exe_if_win(["slugify", "normalizer", "tqdm"]))), }, "kibitzr": { "spec": "kibitzr==7.0.5", @@ -416,9 +401,7 @@ def _exe_if_win(apps): "lektor": { "spec": "Lektor==3.3.10", "apps": _exe_if_win(["lektor"]), - "apps_of_dependencies": _exe_if_win( - ["filetype", "flask", "pybabel", "normalizer", "slugify", "watchmedo"] - ) + "apps_of_dependencies": _exe_if_win(["filetype", "flask", "pybabel", "normalizer", "slugify", "watchmedo"]) + ["EXIF.py"], }, "localstack": { @@ -563,9 +546,7 @@ def _exe_if_win(apps): "pelican-themes", ] ), - "apps_of_dependencies": _exe_if_win( - ["docutils", "markdown-it", "pygmentize", "unidecode"] - ) + "apps_of_dependencies": _exe_if_win(["docutils", "markdown-it", "pygmentize", "unidecode"]) + [ "rst2html.py", # docutils NO_EXE "rst2html4.py", # docutils NO_EXE @@ -715,12 +696,8 @@ def _exe_if_win(apps): }, "sphinx": { "spec": "Sphinx==7.2.6", - "apps": _exe_if_win( - ["sphinx-apidoc", "sphinx-autogen", "sphinx-build", "sphinx-quickstart"] - ), - "apps_of_dependencies": _exe_if_win( - ["docutils", "pybabel", "normalizer", "pygmentize"] - ) + "apps": _exe_if_win(["sphinx-apidoc", "sphinx-autogen", "sphinx-build", "sphinx-quickstart"]), + "apps_of_dependencies": _exe_if_win(["docutils", "pybabel", "normalizer", "pygmentize"]) + [ "rst2html.py", # docutils NO_EXE "rst2html4.py", # docutils NO_EXE @@ -751,8 +728,7 @@ def _exe_if_win(apps): "taguette": { "spec": "taguette==0.9.2", "apps": _exe_if_win(["taguette"]), - "apps_of_dependencies": _exe_if_win(["alembic", "mako-render"]) - + ["vba_extract.py"], + "apps_of_dependencies": _exe_if_win(["alembic", "mako-render"]) + ["vba_extract.py"], }, "term2048": { "spec": "term2048==0.2.7", diff --git a/tests/test_animate.py b/tests/test_animate.py index b57816e28e..08e9b69140 100644 --- a/tests/test_animate.py +++ b/tests/test_animate.py @@ -40,9 +40,7 @@ def check_animate_output( print("check_animate_output() Test Debug Output:") if len(captured.err) < chars_to_test: - print( - "Not enough captured characters--Likely need to increase extra_animate_time" - ) + print("Not enough captured characters--Likely need to increase extra_animate_time") print(f"captured characters: {len(captured.err)}") print(f"chars_to_test: {chars_to_test}") for i in range(0, chars_to_test, 40): @@ -82,12 +80,8 @@ def test_line_lengths_emoji(capsys, monkeypatch, env_columns, expected_frame_mes monkeypatch.setenv("COLUMNS", str(env_columns)) frames_to_test = 4 - frame_strings = [ - f"\r{CLEAR_LINE}{x} {expected_frame_message}" for x in EMOJI_ANIMATION_FRAMES - ] - check_animate_output( - capsys, TEST_STRING_40_CHAR, frame_strings, EMOJI_FRAME_PERIOD, frames_to_test - ) + frame_strings = [f"\r{CLEAR_LINE}{x} {expected_frame_message}" for x in EMOJI_ANIMATION_FRAMES] + check_animate_output(capsys, TEST_STRING_40_CHAR, frame_strings, EMOJI_FRAME_PERIOD, frames_to_test) @pytest.mark.parametrize( @@ -98,9 +92,7 @@ def test_line_lengths_emoji(capsys, monkeypatch, env_columns, expected_frame_mes (45, f"{TEST_STRING_40_CHAR}"), ], ) -def test_line_lengths_no_emoji( - capsys, monkeypatch, env_columns, expected_frame_message -): +def test_line_lengths_no_emoji(capsys, monkeypatch, env_columns, expected_frame_message): # EMOJI_SUPPORT and stderr_is_tty is set only at import animate.py # since we are already after that, we must override both here monkeypatch.setattr(pipx.animate, "stderr_is_tty", True) @@ -109,9 +101,7 @@ def test_line_lengths_no_emoji( monkeypatch.setenv("COLUMNS", str(env_columns)) frames_to_test = 2 - frame_strings = [ - f"\r{CLEAR_LINE}{expected_frame_message}{x}" for x in NONEMOJI_ANIMATION_FRAMES - ] + frame_strings = [f"\r{CLEAR_LINE}{expected_frame_message}{x}" for x in NONEMOJI_ANIMATION_FRAMES] check_animate_output( capsys, @@ -122,9 +112,7 @@ def test_line_lengths_no_emoji( ) -@pytest.mark.parametrize( - "env_columns,stderr_is_tty", [(0, True), (8, True), (16, True), (17, False)] -) +@pytest.mark.parametrize("env_columns,stderr_is_tty", [(0, True), (8, True), (16, True), (17, False)]) def test_env_no_animate(capsys, monkeypatch, env_columns, stderr_is_tty): monkeypatch.setattr(pipx.animate, "stderr_is_tty", stderr_is_tty) monkeypatch.setenv("COLUMNS", str(env_columns)) diff --git a/tests/test_inject.py b/tests/test_inject.py index 63817976d2..5bae92bebd 100644 --- a/tests/test_inject.py +++ b/tests/test_inject.py @@ -41,15 +41,9 @@ def test_inject_include_apps(pipx_temp_env, capsys, with_suffix): install_args = [f"--suffix={suffix}"] assert not run_pipx_cli(["install", "pycowsay", *install_args]) - assert not run_pipx_cli( - ["inject", f"pycowsay{suffix}", PKG["black"]["spec"], "--include-deps"] - ) + assert not run_pipx_cli(["inject", f"pycowsay{suffix}", PKG["black"]["spec"], "--include-deps"]) if suffix: - assert run_pipx_cli( - ["inject", "pycowsay", PKG["black"]["spec"], "--include-deps"] - ) + assert run_pipx_cli(["inject", "pycowsay", PKG["black"]["spec"], "--include-deps"]) - assert not run_pipx_cli( - ["inject", f"pycowsay{suffix}", PKG["black"]["spec"], "--include-deps"] - ) + assert not run_pipx_cli(["inject", f"pycowsay{suffix}", PKG["black"]["spec"], "--include-deps"]) diff --git a/tests/test_install.py b/tests/test_install.py index ebe4ad9bdc..1ece9bdc4f 100644 --- a/tests/test_install.py +++ b/tests/test_install.py @@ -16,9 +16,7 @@ def test_help_text(monkeypatch, capsys): mock_exit = mock.Mock(side_effect=ValueError("raised in test to exit early")) - with mock.patch.object(sys, "exit", mock_exit), pytest.raises( - ValueError, match="raised in test to exit early" - ): + with mock.patch.object(sys, "exit", mock_exit), pytest.raises(ValueError, match="raised in test to exit early"): run_pipx_cli(["install", "--help"]) captured = capsys.readouterr() assert "apps you can run from anywhere" in captured.out @@ -45,9 +43,7 @@ def install_package(capsys, pipx_temp_env, caplog, package, package_name=""): "package_name, package_spec", [("pycowsay", "pycowsay"), ("black", PKG["black"]["spec"])], ) -def test_install_easy_packages( - capsys, pipx_temp_env, caplog, package_name, package_spec -): +def test_install_easy_packages(capsys, pipx_temp_env, caplog, package_name, package_spec): install_package(capsys, pipx_temp_env, caplog, package_spec, package_name) @@ -60,9 +56,7 @@ def test_install_easy_packages( ("shell-functools", PKG["shell-functools"]["spec"]), ], ) -def test_install_tricky_packages( - capsys, pipx_temp_env, caplog, package_name, package_spec -): +def test_install_tricky_packages(capsys, pipx_temp_env, caplog, package_name, package_spec): if os.getenv("FAST"): pytest.skip("skipping slow tests") if sys.platform.startswith("win") and package_name == "ansible": @@ -79,9 +73,7 @@ def test_install_tricky_packages( ("nox", "https://github.com/wntrblm/nox/archive/2022.1.7.zip"), ], ) -def test_install_package_specs( - capsys, pipx_temp_env, caplog, package_name, package_spec -): +def test_install_package_specs(capsys, pipx_temp_env, caplog, package_name, package_spec): install_package(capsys, pipx_temp_env, caplog, package_spec, package_name) @@ -111,10 +103,7 @@ def test_install_same_package_twice_no_force(pipx_temp_env, capsys): assert not run_pipx_cli(["install", "pycowsay"]) assert not run_pipx_cli(["install", "pycowsay"]) captured = capsys.readouterr() - assert ( - "'pycowsay' already seems to be installed. Not modifying existing installation" - in captured.out - ) + assert "'pycowsay' already seems to be installed. Not modifying existing installation" in captured.out def test_include_deps(pipx_temp_env, capsys): @@ -129,9 +118,7 @@ def test_include_deps(pipx_temp_env, capsys): ("tox-ini-fmt", PKG["tox-ini-fmt"]["spec"]), ], ) -def test_name_tricky_characters( - caplog, capsys, pipx_temp_env, package_name, package_spec -): +def test_name_tricky_characters(caplog, capsys, pipx_temp_env, package_name, package_spec): install_package(capsys, pipx_temp_env, caplog, package_spec, package_name) @@ -141,10 +128,8 @@ def test_extra(pipx_temp_env, capsys): assert f"- {app_name('tox')}\n" in captured.out -def test_install_local_extra(pipx_temp_env, capsys): - assert not run_pipx_cli( - ["install", f"{TEST_DATA_PATH}/local_extras[cow]", "--include-deps"] - ) +def test_install_local_extra(pipx_temp_env, capsys, monkeypatch, root): + assert not run_pipx_cli(["install", str(root / f"{TEST_DATA_PATH}/local_extras[cow]"), "--include-deps"]) captured = capsys.readouterr() assert f"- {app_name('pycowsay')}\n" in captured.out assert f"- {Path('man6/pycowsay.6')}\n" in captured.out @@ -152,18 +137,14 @@ def test_install_local_extra(pipx_temp_env, capsys): def test_path_warning(pipx_temp_env, capsys, monkeypatch, caplog): assert not run_pipx_cli(["install", "pycowsay"]) - assert "is not on your PATH environment variable" not in unwrap_log_text( - caplog.text - ) + assert "is not on your PATH environment variable" not in unwrap_log_text(caplog.text) monkeypatch.setenv("PATH", "") assert not run_pipx_cli(["install", "pycowsay", "--force"]) assert "is not on your PATH environment variable" in unwrap_log_text(caplog.text) -def test_existing_symlink_points_to_existing_wrong_location_warning( - pipx_temp_env, caplog, capsys -): +def test_existing_symlink_points_to_existing_wrong_location_warning(pipx_temp_env, caplog, capsys): if sys.platform.startswith("win"): pytest.skip("pipx does not use symlinks on Windows") @@ -178,9 +159,7 @@ def test_existing_symlink_points_to_existing_wrong_location_warning( assert "is not on your PATH environment variable" not in captured.err -def test_existing_man_page_symlink_points_to_existing_wrong_location_warning( - pipx_temp_env, caplog, capsys -): +def test_existing_man_page_symlink_points_to_existing_wrong_location_warning(pipx_temp_env, caplog, capsys): if sys.platform.startswith("win"): pytest.skip("pipx does not use symlinks on Windows") @@ -280,17 +259,15 @@ def test_install_pip_failure(pipx_temp_env, capsys): assert "Fatal error from pip" in captured.err - pip_log_file_match = re.search( - r"Full pip output in file:\s+(\S.+)$", captured.err, re.MULTILINE - ) + pip_log_file_match = re.search(r"Full pip output in file:\s+(\S.+)$", captured.err, re.MULTILINE) assert pip_log_file_match assert Path(pip_log_file_match[1]).exists() assert re.search(r"pip (failed|seemed to fail) to build package", captured.err) -def test_install_local_archive(pipx_temp_env, monkeypatch, capsys): - monkeypatch.chdir(Path(TEST_DATA_PATH) / "local_extras") +def test_install_local_archive(pipx_temp_env, monkeypatch, capsys, root): + monkeypatch.chdir(root / TEST_DATA_PATH / "local_extras") subprocess.run([sys.executable, "-m", "pip", "wheel", "."], check=True) assert not run_pipx_cli(["install", "repeatme-0.1-py3-none-any.whl"]) @@ -299,9 +276,7 @@ def test_install_local_archive(pipx_temp_env, monkeypatch, capsys): def test_force_install_changes(pipx_temp_env, capsys): - assert not run_pipx_cli( - ["install", "https://github.com/wntrblm/nox/archive/2022.1.7.zip"] - ) + assert not run_pipx_cli(["install", "https://github.com/wntrblm/nox/archive/2022.1.7.zip"]) captured = capsys.readouterr() assert "2022.1.7" in captured.out diff --git a/tests/test_install_all_packages.py b/tests/test_install_all_packages.py index 83d9f7a20a..7079f4cafe 100644 --- a/tests/test_install_all_packages.py +++ b/tests/test_install_all_packages.py @@ -139,9 +139,7 @@ class ModuleGlobalsData: def __init__(self): self.errors_path = Path(".") self.install_data: List[PackageData] = [] - self.py_version_display = "Python {0.major}.{0.minor}.{0.micro}".format( - sys.version_info - ) + self.py_version_display = "Python {0.major}.{0.minor}.{0.micro}".format(sys.version_info) self.py_version_short = "{0.major}.{0.minor}".format(sys.version_info) self.report_path = Path(".") self.sys_platform = sys.platform @@ -278,21 +276,15 @@ def verify_installed_resources( return True reported_resources_re = re.search( - r"These " - + resource_name_long - + r" are now globally available\n((?: - [^\n]+\n)*)", + r"These " + resource_name_long + r" are now globally available\n((?: - [^\n]+\n)*)", captured_outerr.out, re.DOTALL, ) if reported_resources_re: - reported_resources = [ - x.strip()[2:] for x in reported_resources_re.group(1).strip().split("\n") - ] + reported_resources = [x.strip()[2:] for x in reported_resources_re.group(1).strip().split("\n")] if set(reported_resources) != set(package_resources): resource_success = False - print( - "verify_install: REPORTED APPS DO NOT MATCH PACKAGE", file=test_error_fh - ) + print("verify_install: REPORTED APPS DO NOT MATCH PACKAGE", file=test_error_fh) print( f"pipx reported %s: {reported_resources}" % resource_name, file=test_error_fh, @@ -329,37 +321,26 @@ def verify_post_install( print("verify_install: WARNING IN CAPLOG:", file=test_error_fh) print(record.message, file=test_error_fh) if "Fatal error from pip prevented installation" in record.message: - pip_error_file_re = re.search( - r"pip output in file:\s+(\S.+)$", record.message - ) + pip_error_file_re = re.search(r"pip output in file:\s+(\S.+)$", record.message) if pip_error_file_re: pip_error_file = Path(pip_error_file_re.group(1)) if install_success and PKG[package_name].get("apps", None) is not None: - app_success = verify_installed_resources( - "app", captured_outerr, package_name, test_error_fh, deps=deps - ) + app_success = verify_installed_resources("app", captured_outerr, package_name, test_error_fh, deps=deps) else: app_success = True if install_success and ( PKG[package_name].get("man_pages", None) is not None or PKG[package_name].get("man_pages_of_dependencies", None) is not None ): - man_success = verify_installed_resources( - "man", captured_outerr, package_name, test_error_fh, deps=deps - ) + man_success = verify_installed_resources("man", captured_outerr, package_name, test_error_fh, deps=deps) else: man_success = True - pip_pass = not ( - (pipx_exit_code != 0) - and f"Error installing {package_name}" in captured_outerr.err - ) + pip_pass = not ((pipx_exit_code != 0) and f"Error installing {package_name}" in captured_outerr.err) pipx_pass: Optional[bool] if pip_pass: - pipx_pass = ( - install_success and not caplog_problem and app_success and man_success - ) + pipx_pass = install_success and not caplog_problem and app_success and man_success else: pipx_pass = None @@ -378,8 +359,7 @@ def print_error_report( print("\n\n", file=errors_fh) print("=" * 79, file=errors_fh) print( - f"{package_spec:24}{test_type:16}{module_globals.sys_platform:16}" - f"{module_globals.py_version_display}", + f"{package_spec:24}{test_type:16}{module_globals.sys_platform:16}" f"{module_globals.py_version_display}", file=errors_fh, ) print("\nSTDOUT:", file=errors_fh) @@ -412,14 +392,11 @@ def install_and_verify( test_error_fh = io.StringIO() - monkeypatch.setenv( - "PATH", os.getenv("PATH_TEST" if using_clear_path else "PATH_ORIG") - ) + monkeypatch.setenv("PATH", os.getenv("PATH_TEST" if using_clear_path else "PATH_ORIG")) start_time = time.time() pipx_exit_code = run_pipx_cli( - ["install", package_data.package_spec, "--verbose"] - + (["--include-deps"] if deps else []) + ["install", package_data.package_spec, "--verbose"] + (["--include-deps"] if deps else []) ) elapsed_time = time.time() - start_time captured = capsys.readouterr() diff --git a/tests/test_interpreter.py b/tests/test_interpreter.py index 1c2f3c92de..bbd013abe5 100644 --- a/tests/test_interpreter.py +++ b/tests/test_interpreter.py @@ -1,6 +1,7 @@ import shutil import subprocess import sys +from unittest.mock import Mock import pytest # type: ignore @@ -24,6 +25,7 @@ def which(name): monkeypatch.setattr(pipx.interpreter, "has_venv", lambda: venv) monkeypatch.setattr(shutil, "which", which) python_path = find_py_launcher_python(f"{major}.{minor}") + assert python_path is not None assert f"{major}.{minor}" in python_path or f"{major}{minor}" in python_path assert python_path.endswith("python.exe") @@ -75,10 +77,7 @@ def __init__(self, rc, out): self.out = out def __call__(self, *args, **kw): - class Ret: - pass - - ret = Ret() + ret = Mock() ret.returncode = self.rc ret.stdout = self.out return ret diff --git a/tests/test_list.py b/tests/test_list.py index 87271288f6..9b6ef401fa 100644 --- a/tests/test_list.py +++ b/tests/test_list.py @@ -90,9 +90,7 @@ def test_list_json(pipx_temp_env, capsys): assert sorted(json_parsed["venvs"].keys()) == ["pycowsay", "pylint"] # pycowsay venv - pycowsay_package_ref = create_package_info_ref( - "pycowsay", "pycowsay", pipx_venvs_dir - ) + pycowsay_package_ref = create_package_info_ref("pycowsay", "pycowsay", pipx_venvs_dir) assert_package_metadata( PackageInfo(**json_parsed["venvs"]["pycowsay"]["metadata"]["main_package"]), pycowsay_package_ref, @@ -104,32 +102,18 @@ def test_list_json(pipx_temp_env, capsys): "pylint", "pylint", pipx_venvs_dir, - **{ - "app_paths_of_dependencies": { - "isort": [pipx_venvs_dir / "pylint" / venv_bin_dir / app_name("isort")] - } - }, + **{"app_paths_of_dependencies": {"isort": [pipx_venvs_dir / "pylint" / venv_bin_dir / app_name("isort")]}}, ) assert_package_metadata( PackageInfo(**json_parsed["venvs"]["pylint"]["metadata"]["main_package"]), pylint_package_ref, ) - assert sorted( - json_parsed["venvs"]["pylint"]["metadata"]["injected_packages"].keys() - ) == ["isort"] - isort_package_ref = create_package_info_ref( - "pylint", "isort", pipx_venvs_dir, include_apps=False - ) + assert sorted(json_parsed["venvs"]["pylint"]["metadata"]["injected_packages"].keys()) == ["isort"] + isort_package_ref = create_package_info_ref("pylint", "isort", pipx_venvs_dir, include_apps=False) print(isort_package_ref) - print( - PackageInfo( - **json_parsed["venvs"]["pylint"]["metadata"]["injected_packages"]["isort"] - ) - ) + print(PackageInfo(**json_parsed["venvs"]["pylint"]["metadata"]["injected_packages"]["isort"])) assert_package_metadata( - PackageInfo( - **json_parsed["venvs"]["pylint"]["metadata"]["injected_packages"]["isort"] - ), + PackageInfo(**json_parsed["venvs"]["pylint"]["metadata"]["injected_packages"]["isort"]), isort_package_ref, ) diff --git a/tests/test_main.py b/tests/test_main.py index fa15e6fdc3..3a8bd53759 100644 --- a/tests/test_main.py +++ b/tests/test_main.py @@ -9,9 +9,7 @@ def test_help_text(monkeypatch, capsys): mock_exit = mock.Mock(side_effect=ValueError("raised in test to exit early")) - with mock.patch.object(sys, "exit", mock_exit), pytest.raises( - ValueError, match="raised in test to exit early" - ): + with mock.patch.object(sys, "exit", mock_exit), pytest.raises(ValueError, match="raised in test to exit early"): assert not run_pipx_cli(["--help"]) captured = capsys.readouterr() assert "usage: pipx" in captured.out @@ -19,9 +17,7 @@ def test_help_text(monkeypatch, capsys): def test_version(monkeypatch, capsys): mock_exit = mock.Mock(side_effect=ValueError("raised in test to exit early")) - with mock.patch.object(sys, "exit", mock_exit), pytest.raises( - ValueError, match="raised in test to exit early" - ): + with mock.patch.object(sys, "exit", mock_exit), pytest.raises(ValueError, match="raised in test to exit early"): assert not run_pipx_cli(["--version"]) captured = capsys.readouterr() mock_exit.assert_called_with(0) diff --git a/tests/test_package_specifier.py b/tests/test_package_specifier.py index 6351d207c9..2dcdeabc10 100644 --- a/tests/test_package_specifier.py +++ b/tests/test_package_specifier.py @@ -52,6 +52,9 @@ def test_fix_package_name(package_spec_in, package_name, package_spec_out): assert fix_package_name(package_spec_in, package_name) == package_spec_out +_ROOT = Path(__file__).parents[1] + + @pytest.mark.parametrize( "package_spec_in,package_or_url_correct,valid_spec", [ @@ -64,7 +67,7 @@ def test_fix_package_name(package_spec_in, package_name, package_spec_out): ('pipx==0.15.0;python_version>="3.6"', "pipx==0.15.0", True), ("pipx[extra1]", "pipx[extra1]", True), ("pipx[extra1, extra2]", "pipx[extra1,extra2]", True), - ("src/pipx", str(Path("src/pipx").resolve()), True), + ("src/pipx", str((_ROOT / "src" / "pipx").resolve()), True), ( "git+https://github.com/cs01/nox.git@5ea70723e9e6", "git+https://github.com/cs01/nox.git@5ea70723e9e6", @@ -108,9 +111,8 @@ def test_fix_package_name(package_spec_in, package_name, package_spec_out): ), ], ) -def test_parse_specifier_for_metadata( - package_spec_in, package_or_url_correct, valid_spec -): +def test_parse_specifier_for_metadata(package_spec_in, package_or_url_correct, valid_spec, monkeypatch, root): + monkeypatch.chdir(root) if valid_spec: package_or_url = parse_specifier_for_metadata(package_spec_in) assert package_or_url == package_or_url_correct @@ -133,7 +135,7 @@ def test_parse_specifier_for_metadata( ('pipx==0.15.0;python_version>="3.6"', "pipx", True), ("pipx[extra1]", "pipx[extra1]", True), ("pipx[extra1, extra2]", "pipx[extra1,extra2]", True), - ("src/pipx", str(Path("src/pipx").resolve()), True), + ("src/pipx", str((_ROOT / "src" / "pipx").resolve()), True), ( "git+https://github.com/cs01/nox.git@5ea70723e9e6", "git+https://github.com/cs01/nox.git@5ea70723e9e6", @@ -177,9 +179,8 @@ def test_parse_specifier_for_metadata( ), ], ) -def test_parse_specifier_for_upgrade( - package_spec_in, package_or_url_correct, valid_spec -): +def test_parse_specifier_for_upgrade(package_spec_in, package_or_url_correct, valid_spec, monkeypatch, root): + monkeypatch.chdir(root) if valid_spec: package_or_url = parse_specifier_for_upgrade(package_spec_in) assert package_or_url == package_or_url_correct @@ -267,9 +268,10 @@ def test_parse_specifier_for_install( package_spec_expected, pip_args_expected, warning_str, + monkeypatch, + root, ): - [package_or_url_out, pip_args_out] = parse_specifier_for_install( - package_spec_in, pip_args_in - ) + monkeypatch.chdir(root) + [package_or_url_out, pip_args_out] = parse_specifier_for_install(package_spec_in, pip_args_in) if warning_str is not None: assert warning_str in caplog.text diff --git a/tests/test_pipx_metadata_file.py b/tests/test_pipx_metadata_file.py index 5de8c520e9..a6e9c7f7c4 100644 --- a/tests/test_pipx_metadata_file.py +++ b/tests/test_pipx_metadata_file.py @@ -92,9 +92,7 @@ def test_package_install(monkeypatch, tmp_path, pipx_temp_env): assert (pipx_venvs_dir / "pycowsay" / "pipx_metadata.json").is_file() pipx_metadata = PipxMetadata(pipx_venvs_dir / "pycowsay") - pycowsay_package_ref = create_package_info_ref( - "pycowsay", "pycowsay", pipx_venvs_dir - ) + pycowsay_package_ref = create_package_info_ref("pycowsay", "pycowsay", pipx_venvs_dir) assert_package_metadata(pipx_metadata.main_package, pycowsay_package_ref) assert pipx_metadata.injected_packages == {} @@ -109,7 +107,5 @@ def test_package_inject(monkeypatch, tmp_path, pipx_temp_env): pipx_metadata = PipxMetadata(pipx_venvs_dir / "pycowsay") assert pipx_metadata.injected_packages.keys() == {"black"} - black_package_ref = create_package_info_ref( - "pycowsay", "black", pipx_venvs_dir, include_apps=False - ) + black_package_ref = create_package_info_ref("pycowsay", "black", pipx_venvs_dir, include_apps=False) assert_package_metadata(pipx_metadata.injected_packages["black"], black_package_ref) diff --git a/tests/test_reinstall.py b/tests/test_reinstall.py index 13b8ee7df8..bcfc2cc5ab 100644 --- a/tests/test_reinstall.py +++ b/tests/test_reinstall.py @@ -27,9 +27,7 @@ def test_reinstall_suffix(pipx_temp_env, capsys): suffix = "_x" assert not run_pipx_cli(["install", "pycowsay", f"--suffix={suffix}"]) - assert not run_pipx_cli( - ["reinstall", "--python", sys.executable, f"pycowsay{suffix}"] - ) + assert not run_pipx_cli(["reinstall", "--python", sys.executable, f"pycowsay{suffix}"]) @pytest.mark.parametrize("metadata_version", ["0.1"]) @@ -38,9 +36,7 @@ def test_reinstall_suffix_legacy_venv(pipx_temp_env, capsys, metadata_version): assert not run_pipx_cli(["install", "pycowsay", f"--suffix={suffix}"]) mock_legacy_venv(f"pycowsay{suffix}", metadata_version=metadata_version) - assert not run_pipx_cli( - ["reinstall", "--python", sys.executable, f"pycowsay{suffix}"] - ) + assert not run_pipx_cli(["reinstall", "--python", sys.executable, f"pycowsay{suffix}"]) def test_reinstall_specifier(pipx_temp_env, capsys): diff --git a/tests/test_run.py b/tests/test_run.py index d881beb327..934eccef7e 100644 --- a/tests/test_run.py +++ b/tests/test_run.py @@ -16,9 +16,7 @@ def test_help_text(pipx_temp_env, monkeypatch, capsys): mock_exit = mock.Mock(side_effect=ValueError("raised in test to exit early")) - with mock.patch.object(sys, "exit", mock_exit), pytest.raises( - ValueError, match="raised in test to exit early" - ): + with mock.patch.object(sys, "exit", mock_exit), pytest.raises(ValueError, match="raised in test to exit early"): run_pipx_cli(["run", "--help"]) captured = capsys.readouterr() assert "Download the latest version of a package" in captured.out @@ -44,9 +42,7 @@ def run_pipx_cli_exit(pipx_cmd_list, assert_exit=None): assert sys_exit.value.code == assert_exit -@pytest.mark.parametrize( - "package_name", ["pycowsay", "pycowsay==0.0.0.2", "pycowsay>=0.0.0.2"] -) +@pytest.mark.parametrize("package_name", ["pycowsay", "pycowsay==0.0.0.2", "pycowsay>=0.0.0.2"]) @mock.patch("os.execvpe", new=execvpe_mock) def test_simple_run(pipx_temp_env, monkeypatch, capsys, package_name): run_pipx_cli_exit(["run", package_name, "--help"]) @@ -118,9 +114,7 @@ def test_run_script_from_internet(pipx_temp_env, capsys): (["--", "--", "pycowsay", "--"], ["--", "pycowsay", "--"]), ], ) -def test_appargs_doubledash( - pipx_temp_env, capsys, monkeypatch, input_run_args, expected_app_with_args -): +def test_appargs_doubledash(pipx_temp_env, capsys, monkeypatch, input_run_args, expected_app_with_args): parser = pipx.main.get_command_parser() monkeypatch.setattr(sys, "argv", ["pipx", "run"] + input_run_args) parsed_pipx_args = parser.parse_args() @@ -167,18 +161,14 @@ def test_run_ensure_null_pythonpath(): ], ) @mock.patch("os.execvpe", new=execvpe_mock) -def test_package_determination( - caplog, pipx_temp_env, package, package_or_url, app_appargs, skip_win -): +def test_package_determination(caplog, pipx_temp_env, package, package_or_url, app_appargs, skip_win): if sys.platform.startswith("win") and skip_win: # Skip packages with 'scripts' in setup.py that don't work on Windows pytest.skip() caplog.set_level(logging.INFO) - run_pipx_cli_exit( - ["run", "--verbose", "--spec", package_or_url, "--"] + app_appargs - ) + run_pipx_cli_exit(["run", "--verbose", "--spec", package_or_url, "--"] + app_appargs) assert "Cannot determine package name" not in caplog.text assert f"Determined package name: {package}" in caplog.text @@ -321,9 +311,7 @@ def test_run_script_by_relative_name(caplog, pipx_temp_env, monkeypatch, tmp_pat assert out.read_text() == test_str -@pytest.mark.skipif( - not sys.platform.startswith("win"), reason="uses windows version format" -) +@pytest.mark.skipif(not sys.platform.startswith("win"), reason="uses windows version format") @mock.patch("os.execvpe", new=execvpe_mock) def test_run_with_windows_python_version(caplog, pipx_temp_env, tmp_path): script = tmp_path / "test.py" @@ -337,8 +325,8 @@ def test_run_with_windows_python_version(caplog, pipx_temp_env, tmp_path): """ ).strip() ) - run_pipx_cli_exit(["run", script.as_uri(), "--python", "3.11"]) - assert "3.11" in out.read_text() + run_pipx_cli_exit(["run", script.as_uri(), "--python", "3.12"]) + assert "3.12" in out.read_text() @mock.patch("os.execvpe", new=execvpe_mock) diff --git a/tests/test_shared_libs.py b/tests/test_shared_libs.py index e9b4cc37bd..68b710b0fb 100644 --- a/tests/test_shared_libs.py +++ b/tests/test_shared_libs.py @@ -13,9 +13,7 @@ (-shared_libs.SHARED_LIBS_MAX_AGE_SEC + 5 * 60, False), ], ) -def test_auto_update_shared_libs( - capsys, pipx_ultra_temp_env, mtime_minus_now, needs_upgrade -): +def test_auto_update_shared_libs(capsys, pipx_ultra_temp_env, mtime_minus_now, needs_upgrade): now = time.time() shared_libs.shared_libs.create(verbose=True) shared_libs.shared_libs.has_been_updated_this_run = False diff --git a/tests/test_uninject.py b/tests/test_uninject.py index 3e0531f24c..164b29ffb0 100644 --- a/tests/test_uninject.py +++ b/tests/test_uninject.py @@ -15,9 +15,7 @@ def test_uninject_simple(pipx_temp_env, capsys): def test_uninject_with_include_apps(pipx_temp_env, capsys, caplog): assert not run_pipx_cli(["install", "pycowsay"]) - assert not run_pipx_cli( - ["inject", "pycowsay", PKG["black"]["spec"], "--include-deps", "--include-apps"] - ) + assert not run_pipx_cli(["inject", "pycowsay", PKG["black"]["spec"], "--include-deps", "--include-apps"]) assert not run_pipx_cli(["uninject", "pycowsay", "black", "--verbose"]) assert "removed file" in caplog.text @@ -25,9 +23,7 @@ def test_uninject_with_include_apps(pipx_temp_env, capsys, caplog): def test_uninject_leave_deps(pipx_temp_env, capsys, caplog): assert not run_pipx_cli(["install", "pycowsay"]) assert not run_pipx_cli(["inject", "pycowsay", PKG["black"]["spec"]]) - assert not run_pipx_cli( - ["uninject", "pycowsay", "black", "--leave-deps", "--verbose"] - ) + assert not run_pipx_cli(["uninject", "pycowsay", "black", "--leave-deps", "--verbose"]) captured = capsys.readouterr() assert "Uninjected package black from venv pycowsay" in captured.out assert "Dependencies of uninstalled package:" not in caplog.text diff --git a/tests/test_uninstall.py b/tests/test_uninstall.py index 585d619fc6..a48f7f97f5 100644 --- a/tests/test_uninstall.py +++ b/tests/test_uninstall.py @@ -60,20 +60,14 @@ def test_uninstall_man_page(pipx_temp_env): def test_uninstall_injected(pipx_temp_env): - pycowsay_app_paths = [ - constants.LOCAL_BIN_DIR / app for app in PKG["pycowsay"]["apps"] - ] - pycowsay_man_page_paths = [ - constants.LOCAL_MAN_DIR / man_page for man_page in PKG["pycowsay"]["man_pages"] - ] + pycowsay_app_paths = [constants.LOCAL_BIN_DIR / app for app in PKG["pycowsay"]["apps"]] + pycowsay_man_page_paths = [constants.LOCAL_MAN_DIR / man_page for man_page in PKG["pycowsay"]["man_pages"]] pylint_app_paths = [constants.LOCAL_BIN_DIR / app for app in PKG["pylint"]["apps"]] app_paths = pycowsay_app_paths + pylint_app_paths man_page_paths = pycowsay_man_page_paths assert not run_pipx_cli(["install", PKG["pycowsay"]["spec"]]) - assert not run_pipx_cli( - ["inject", "--include-apps", "pycowsay", PKG["pylint"]["spec"]] - ) + assert not run_pipx_cli(["inject", "--include-apps", "pycowsay", PKG["pylint"]["spec"]]) for app_path in app_paths: assert app_path.exists() @@ -148,9 +142,7 @@ def test_uninstall_proper_dep_behavior(pipx_temp_env, metadata_version): @pytest.mark.parametrize("metadata_version", [None, "0.1", "0.2"]) -def test_uninstall_proper_dep_behavior_missing_interpreter( - pipx_temp_env, metadata_version -): +def test_uninstall_proper_dep_behavior_missing_interpreter(pipx_temp_env, metadata_version): # isort is a dependency of pylint. Make sure that uninstalling pylint # does not also uninstall isort app in LOCAL_BIN_DIR isort_app_paths = [constants.LOCAL_BIN_DIR / app for app in PKG["isort"]["apps"]] diff --git a/tests/test_upgrade.py b/tests/test_upgrade.py index adacd229ae..a8b5068eb3 100644 --- a/tests/test_upgrade.py +++ b/tests/test_upgrade.py @@ -18,10 +18,7 @@ def test_upgrade_legacy_venv(pipx_temp_env, capsys, metadata_version): if metadata_version is None: assert run_pipx_cli(["upgrade", "pycowsay"]) captured = capsys.readouterr() - assert ( - "Not upgrading pycowsay. It has missing internal pipx metadata." - in captured.err - ) + assert "Not upgrading pycowsay. It has missing internal pipx metadata." in captured.err else: assert not run_pipx_cli(["upgrade", "pycowsay"]) captured = capsys.readouterr()