Repository: basnijholt/unidep Branch: main Commit: ef030c208627 Files: 101 Total size: 863.1 KB Directory structure: gitextract_6n9izm0e/ ├── .github/ │ ├── release.py │ ├── renovate.json │ ├── use-local-unidep.py │ └── workflows/ │ ├── documentation-links.yml │ ├── install-example-projects.yml │ ├── pytest.yml │ ├── release.yml │ ├── toc.yaml │ └── update-readme.yml ├── .gitignore ├── .pre-commit-config.yaml ├── .pre-commit-hooks.yaml ├── .readthedocs.yaml ├── LICENSE ├── README.md ├── bootstrap.sh ├── docs/ │ ├── Makefile │ └── source/ │ ├── .gitignore │ └── conf.py ├── example/ │ ├── README.md │ ├── environment.yaml │ ├── hatch2_project/ │ │ ├── README.md │ │ ├── hatch2_project.py │ │ └── pyproject.toml │ ├── hatch_project/ │ │ ├── README.md │ │ ├── hatch_project.py │ │ ├── pyproject.toml │ │ └── requirements.yaml │ ├── pyproject_toml_project/ │ │ ├── README.md │ │ ├── pyproject.toml │ │ └── pyproject_toml_project.py │ ├── setup_py_project/ │ │ ├── README.md │ │ ├── pyproject.toml │ │ ├── requirements.yaml │ │ ├── setup.py │ │ └── setup_py_project.py │ └── setuptools_project/ │ ├── README.md │ ├── pyproject.toml │ ├── requirements.yaml │ └── setuptools_project.py ├── pyproject.toml ├── tests/ │ ├── __init__.py │ ├── helpers.py │ ├── shared_local_install_monorepo/ │ │ ├── project1/ │ │ │ ├── pyproject.toml │ │ │ └── requirements.yaml │ │ ├── project2/ │ │ │ ├── pyproject.toml │ │ │ └── requirements.yaml │ │ └── shared/ │ │ ├── pyproject.toml │ │ └── requirements.yaml │ ├── simple_monorepo/ │ │ ├── common-requirements.yaml │ │ ├── conda-lock.yml │ │ ├── project1/ │ │ │ ├── conda-lock.yml │ │ │ └── requirements.yaml │ │ └── project2/ │ │ ├── conda-lock.yml │ │ └── requirements.yaml │ ├── test-pip-and-conda-different-name/ │ │ ├── conda-lock.yml │ │ ├── project1/ │ │ │ └── requirements.yaml │ │ └── project2/ │ │ └── requirements.yaml │ ├── test-pip-package-with-conda-dependency/ │ │ ├── conda-lock.yml │ │ ├── project1/ │ │ │ └── requirements.yaml │ │ └── project2/ │ │ └── requirements.yaml │ ├── test_cli.py │ ├── test_cli_install_conda_lock.py │ ├── test_conda_lock.py │ ├── test_dependencies_parsing_internal.py │ ├── test_dependency_selection.py │ ├── test_local_wheels_and_zip.py │ ├── test_parse_yaml_local_dependencies.py │ ├── test_parse_yaml_nested_local_dependencies.py │ ├── test_pip_indices.py │ ├── test_pip_indices_cli.py │ ├── test_pip_indices_integration.py │ ├── test_pixi.py │ ├── test_project_dependency_handling.py │ ├── test_pypi_alternatives/ │ │ ├── main_app/ │ │ │ ├── main_app/ │ │ │ │ └── __init__.py │ │ │ └── pyproject.toml │ │ ├── shared_lib/ │ │ │ ├── pyproject.toml │ │ │ └── shared_lib/ │ │ │ └── __init__.py │ │ └── test_all_scenarios.sh │ ├── test_pypi_alternatives.py │ ├── test_pypi_alternatives_errors.py │ ├── test_pypi_alternatives_integration.py │ ├── test_setuptools_integration.py │ ├── test_unidep.py │ ├── test_utils.py │ └── test_version_conflicts.py └── unidep/ ├── __init__.py ├── _cli.py ├── _conda_env.py ├── _conda_lock.py ├── _conflicts.py ├── _dependencies_parsing.py ├── _dependency_selection.py ├── _hatch_integration.py ├── _pixi.py ├── _pytest_plugin.py ├── _setuptools_integration.py ├── _version.py ├── platform_definitions.py ├── py.typed └── utils.py ================================================ FILE CONTENTS ================================================ ================================================ FILE: .github/release.py ================================================ """Create a new release tag with CalVer format.""" import datetime import operator import os from pathlib import Path import git from packaging import version def get_repo() -> git.Repo: """Get the git repo for the current project.""" return git.Repo(Path(__file__).parent.parent) def is_already_tagged(repo: git.Repo) -> bool: """Check if the current commit is already tagged.""" return repo.git.tag(points_at="HEAD") def should_skip_release(repo: git.Repo) -> bool: """Check if the commit message contains [skip release].""" commit_message = repo.head.commit.message.split("\n")[0] return "[skip release]" in commit_message def get_new_version(repo: git.Repo) -> str: """Get the new version number.""" latest_tag = max(repo.tags, key=operator.attrgetter("commit.committed_datetime")) last_version = version.parse(latest_tag.name) now = datetime.datetime.now(tz=datetime.timezone.utc) patch = ( last_version.micro + 1 if last_version.major == now.year and last_version.minor == now.month else 0 ) return f"{now.year}.{now.month}.{patch}" def set_author(repo: git.Repo) -> None: """Set author information.""" author_name = repo.head.commit.author.name author_email = repo.head.commit.author.email os.environ["GIT_AUTHOR_NAME"] = author_name os.environ["GIT_AUTHOR_EMAIL"] = author_email os.environ["GIT_COMMITTER_NAME"] = author_name os.environ["GIT_COMMITTER_EMAIL"] = author_email def create_tag(repo: git.Repo, new_version: str, release_notes: str) -> None: """Create a new tag.""" set_author(repo) repo.create_tag(new_version, message=f"Release {new_version}\n\n{release_notes}") def push_tag(repo: git.Repo, new_version: str) -> None: """Push the new tag to the remote repository.""" origin = repo.remote("origin") origin.push(new_version) def get_commit_messages_since_last_release(repo: git.Repo) -> str: """Get the commit messages since the last release.""" latest_tag = max(repo.tags, key=operator.attrgetter("commit.committed_datetime")) return repo.git.log(f"{latest_tag}..HEAD", "--pretty=format:%s") def format_release_notes(commit_messages: str, new_version: str) -> str: """Format the release notes.""" header = f"🚀 Release {new_version}\n\n" intro = "📝 This release includes the following changes:\n\n" commit_list = commit_messages.split("\n") formatted_commit_list = [f"- {commit}" for commit in commit_list] commit_section = "\n".join(formatted_commit_list) footer = ( "\n\n🙏 Thank you for using this project! Please report any issues " "or feedback on the GitHub repository" " on https://github.com/basnijholt/home-assistant-streamdeck-yaml." ) return f"{header}{intro}{commit_section}{footer}" def main() -> None: """Main entry point.""" repo = get_repo() if is_already_tagged(repo): print("Current commit is already tagged!") return if should_skip_release(repo): print("Commit message is [skip release]!") return new_version = get_new_version(repo) commit_messages = get_commit_messages_since_last_release(repo) release_notes = format_release_notes(commit_messages, new_version) print(release_notes) create_tag(repo, new_version, release_notes) push_tag(repo, new_version) # Write the output version to the GITHUB_OUTPUT environment file with open(os.environ["GITHUB_OUTPUT"], "a") as output_file: # noqa: PTH123 output_file.write(f"version={new_version}\n") print(f"Created new tag: {new_version}") if __name__ == "__main__": main() ================================================ FILE: .github/renovate.json ================================================ { "$schema": "https://docs.renovatebot.com/renovate-schema.json", "rebaseWhen": "behind-base-branch", "dependencyDashboard": true, "labels": [ "dependencies", "no-stale" ], "commitMessagePrefix": "⬆️", "commitMessageTopic": "{{depName}}", "prBodyDefinitions": { "Release": "yes" }, "packageRules": [ { "matchManagers": [ "github-actions" ], "addLabels": [ "github_actions" ], "rangeStrategy": "pin" }, { "matchManagers": [ "github-actions" ], "matchUpdateTypes": [ "minor", "patch" ], "automerge": true } ] } ================================================ FILE: .github/use-local-unidep.py ================================================ """Update `pyproject.toml` in each example project to use local `unidep`.""" from pathlib import Path REPO_ROOT = Path(__file__).resolve().parent.parent EXAMPLE_DIR = REPO_ROOT / "example" PROJECT_DIRS = [p for p in EXAMPLE_DIR.iterdir() if p.name.endswith("_project")] REPO_ROOT_URI = REPO_ROOT.resolve().as_uri() print( f"REPO_ROOT: {REPO_ROOT}, EXAMPLE_DIR: {EXAMPLE_DIR}, PROJECT_DIRS: {PROJECT_DIRS}", ) for project_dir in PROJECT_DIRS: # Find the line with `requires = [` in `pyproject.toml` and replace # `unidep`/`unidep[toml]` entries with file:// references to the repo root. pyproject_toml = project_dir / "pyproject.toml" lines = pyproject_toml.read_text().splitlines() for i, line in enumerate(lines): if "requires = [" not in line: continue if "unidep[toml]" in line: lines[i] = line.replace( "unidep[toml]", f"unidep[toml] @ {REPO_ROOT_URI}", ) elif "unidep" in line: lines[i] = line.replace("unidep", f"unidep @ {REPO_ROOT_URI}") break pyproject_toml.write_text("\n".join(lines)) ================================================ FILE: .github/workflows/documentation-links.yml ================================================ name: readthedocs/actions on: pull_request_target: types: - opened permissions: pull-requests: write jobs: documentation-links: runs-on: ubuntu-latest steps: - uses: readthedocs/actions/preview@v1 with: project-slug: "unidep" ================================================ FILE: .github/workflows/install-example-projects.yml ================================================ name: install-example-projects on: push: branches: [main] pull_request: jobs: pip-install: strategy: fail-fast: false matrix: python-version: ["3.8", "3.9", "3.10", "3.11", "3.12"] # skips 3.7 (unsupported on GH Actions) platform: [ubuntu-latest, macos-latest, windows-latest] runs-on: ${{ matrix.platform }} env: PYTHONIOENCODING: "utf8" # https://gist.github.com/NodeJSmith/e7e37f2d3f162456869f015f842bcf15 steps: - uses: actions/checkout@v6 with: fetch-depth: 0 - name: Set up Python uses: actions/setup-python@v6 with: python-version: ${{ matrix.python-version }} - name: Update pyproject.toml run: | python .github/use-local-unidep.py - name: Install example packages run: | set -ex # Loop over all folders in `./example` and install them for d in ./example/*/ ; do pip install -e "$d" pkg=$(basename $d) python -c "import $pkg" pip list done shell: bash micromamba-install: strategy: fail-fast: false matrix: python-version: ["3.8", "3.9", "3.10", "3.11", "3.12"] # skips 3.7 (unsupported on GH Actions) platform: [ubuntu-latest, macos-latest, windows-latest] runs-on: ${{ matrix.platform }} env: PYTHONIOENCODING: "utf8" # https://gist.github.com/NodeJSmith/e7e37f2d3f162456869f015f842bcf15 steps: - uses: actions/checkout@v6 with: fetch-depth: 0 - name: Set up Python ${{ matrix.python-version }} uses: mamba-org/setup-micromamba@v2 with: environment-name: unidep create-args: >- python=${{ matrix.python-version }} - name: Install unidep run: | python -m pip install --upgrade pip pip install -e ".[toml]" shell: bash -el {0} - name: Update pyproject.toml run: python .github/use-local-unidep.py shell: bash -el {0} - name: Install example packages run: | set -ex # Loop over all folders in `./example` and install them for d in ./example/*/ ; do unidep install -e "$d" pkg=$(basename $d) python -c "import $pkg" micromamba list done shell: bash -el {0} - name: Install pyproject_toml_project in new environment run: | unidep install -n new-env -e ./example/pyproject_toml_project micromamba activate new-env python -c "import pyproject_toml_project" shell: bash -el {0} miniconda-install: strategy: fail-fast: false matrix: python-version: ["3.8", "3.12"] # Just testing the oldest and newest supported versions platform: [ubuntu-latest, macos-latest, windows-latest] runs-on: ${{ matrix.platform }} env: PYTHONIOENCODING: "utf8" # https://gist.github.com/NodeJSmith/e7e37f2d3f162456869f015f842bcf15 steps: - uses: actions/checkout@v6 with: fetch-depth: 0 - uses: conda-incubator/setup-miniconda@v3 with: auto-update-conda: true python-version: ${{ matrix.python-version }} - name: Conda info shell: bash -el {0} run: conda info - name: Install unidep run: | python -m pip install --upgrade pip pip install -e ".[toml]" shell: bash -el {0} - name: Update pyproject.toml run: python .github/use-local-unidep.py shell: bash -el {0} - name: Install example packages run: | set -ex # Loop over all folders in `./example` and install them for d in ./example/*/ ; do unidep install -e "$d" pkg=$(basename $d) python -c "import $pkg" conda list done shell: bash -el {0} - name: Install pyproject_toml_project in new environment run: | unidep install -n new-env -e ./example/pyproject_toml_project conda activate new-env python -c "import pyproject_toml_project" shell: bash -el {0} ================================================ FILE: .github/workflows/pytest.yml ================================================ name: pytest on: push: branches: [main] pull_request: jobs: test: strategy: fail-fast: false matrix: python-version: ["3.8", "3.9", "3.10", "3.11", "3.12"] # skips 3.7 (unsupported on GH Actions) platform: [ubuntu-latest, macos-latest, windows-latest] runs-on: ${{ matrix.platform }} env: PYTHONIOENCODING: "utf8" # https://gist.github.com/NodeJSmith/e7e37f2d3f162456869f015f842bcf15 steps: - uses: actions/checkout@v6 with: fetch-depth: 0 - name: Set up Python ${{ matrix.python-version }} uses: mamba-org/setup-micromamba@v2 with: environment-name: unidep create-args: >- python=${{ matrix.python-version }} - name: Install dependencies run: | python -m pip install --upgrade pip if [[ "${{ matrix.python-version }}" == "3.8" ]]; then # Python 3.8 coverage does not support the "patch" config option sed -i.bak '/patch/d' pyproject.toml && rm pyproject.toml.bak fi pip install -e ".[test]" shell: bash -el {0} - name: Run pytest run: | if [[ "${{ matrix.platform }}" == "ubuntu-latest" && "${{ matrix.python-version }}" == "3.11" ]]; then pytest else pytest --cov-fail-under=0 fi shell: bash -el {0} - name: Upload coverage to Codecov if: matrix.python-version == '3.11' && matrix.platform == 'ubuntu-latest' uses: codecov/codecov-action@v5 with: token: ${{ secrets.CODECOV_TOKEN }} ================================================ FILE: .github/workflows/release.yml ================================================ name: Upload Python Package on: release: types: [published] jobs: deploy: runs-on: ubuntu-latest environment: name: pypi url: https://pypi.org/p/${{ github.repository }} permissions: id-token: write steps: - uses: actions/checkout@v6 - name: Set up Python uses: actions/setup-python@v6 with: python-version: "3.14.2" - name: Install dependencies run: | python -m pip install --upgrade pip pip install setuptools wheel build - name: Build run: | python -m build - name: Publish package distributions to PyPI uses: pypa/gh-action-pypi-publish@release/v1 ================================================ FILE: .github/workflows/toc.yaml ================================================ on: push name: TOC Generator jobs: generateTOC: name: TOC Generator runs-on: ubuntu-latest steps: - uses: technote-space/toc-generator@v4 with: TOC_TITLE: "" TARGET_PATHS: "README.md,example/README.md" ================================================ FILE: .github/workflows/update-readme.yml ================================================ name: Update README.md on: push: branches: - main pull_request: jobs: update_readme: runs-on: ubuntu-latest steps: - name: Check out repository uses: actions/checkout@v6 with: persist-credentials: false fetch-depth: 0 - name: Set up Python uses: actions/setup-python@v6 with: python-version: '3.14.2' - name: Install Python dependencies run: | python -m pip install --upgrade pip pip install markdown-code-runner pip install -e . - name: Run markdown-code-runner run: | markdown-code-runner README.md cd example markdown-code-runner README.md - name: Commit updated files id: commit run: | git add -u . git config --local user.email "github-actions[bot]@users.noreply.github.com" git config --local user.name "github-actions[bot]" if git diff --quiet && git diff --staged --quiet; then echo "No changes, skipping commit." echo "commit_status=skipped" >> $GITHUB_ENV else git commit -m "Update files from markdown-code-runner" echo "commit_status=committed" >> $GITHUB_ENV fi - name: Push changes if: env.commit_status == 'committed' uses: ad-m/github-push-action@v1.1.0 with: github_token: ${{ secrets.GITHUB_TOKEN }} branch: ${{ github.head_ref }} ================================================ FILE: .gitignore ================================================ # Byte-compiled / optimized / DLL files __pycache__/ *.py[cod] *$py.class # C extensions *.so # Distribution / packaging .Python env/ build/ develop-eggs/ dist/ downloads/ eggs/ .eggs/ lib/ lib64/ parts/ sdist/ var/ *.egg-info/ .installed.cfg *.egg # PyInstaller # Usually these files are written by a python script from a template # before PyInstaller builds the exe, so as to inject date/other infos into it. *.manifest *.spec # Installer logs pip-log.txt pip-delete-this-directory.txt # Unit test / coverage reports htmlcov/ .tox/ .coverage .coverage.* .cache nosetests.xml coverage.xml *,cover .hypothesis/ # Translations *.mo *.pot # Django stuff: *.log local_settings.py # Flask stuff: instance/ .webassets-cache # Scrapy stuff: .scrapy # Sphinx documentation docs/_build/ # PyBuilder target/ # IPython Notebook .ipynb_checkpoints # pyenv .python-version # celery beat schedule file celerybeat-schedule # dotenv .env # virtualenv venv/ ENV/ # Spyder project settings .spyderproject # Rope project settings .ropeproject # other .pixi .DS_Store *.code-workspace ================================================ FILE: .pre-commit-config.yaml ================================================ repos: - repo: https://github.com/pre-commit/pre-commit-hooks rev: v5.0.0 hooks: - id: check-added-large-files - id: trailing-whitespace - id: end-of-file-fixer - id: mixed-line-ending - repo: https://github.com/astral-sh/ruff-pre-commit rev: "v0.9.9" hooks: - id: ruff args: ["--fix"] - id: ruff-format - repo: https://github.com/pre-commit/mirrors-mypy rev: "v1.15.0" hooks: - id: mypy additional_dependencies: ["types-PyYAML", "types-setuptools"] ================================================ FILE: .pre-commit-hooks.yaml ================================================ - id: unidep-environment-yaml name: unidep environment.yaml description: Generate environment.yaml from requirements.yaml using unidep. entry: unidep merge language: python files: '(requirements\.yaml|pyproject\.toml)$' pass_filenames: false ================================================ FILE: .readthedocs.yaml ================================================ version: 2 build: os: ubuntu-22.04 tools: python: "3.12" sphinx: configuration: docs/source/conf.py python: install: - method: pip path: . extra_requirements: - docs ================================================ FILE: LICENSE ================================================ BSD 3-Clause License Copyright (c) 2023, Bas Nijholt All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. * Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. ================================================ FILE: README.md ================================================ # 🚀 UniDep - Unified Conda and Pip Dependency Management 🚀 ![UniDep logo](https://media.githubusercontent.com/media/basnijholt/nijho.lt/main/content/project/unidep/featured.png) [![PyPI](https://img.shields.io/pypi/v/unidep.svg)](https://pypi.python.org/pypi/unidep) [![Build Status](https://github.com/basnijholt/unidep/actions/workflows/pytest.yml/badge.svg)](https://github.com/basnijholt/unidep/actions/workflows/pytest.yml) [![CodeCov](https://codecov.io/gh/basnijholt/unidep/branch/main/graph/badge.svg)](https://codecov.io/gh/basnijholt/unidep) [![GitHub Repo stars](https://img.shields.io/github/stars/basnijholt/unidep)](https://github.com/basnijholt/unidep) [![Documentation](https://readthedocs.org/projects/unidep/badge/?version=latest)](https://unidep.readthedocs.io/) [![Python Bytes](https://img.shields.io/badge/Python_Bytes-366-D7F9FF?logo=applepodcasts&labelColor=blue)](https://www.youtube.com/live/PRaTs3PnJvI?si=UrVozo81Pj8WcyXh&t=489) > UniDep streamlines Python project dependency management by unifying Conda and Pip packages in a single system. > [Learn when to use UniDep](#q-when-to-use-unidep) in our [FAQ](#-faq). Handling dependencies in Python projects can be challenging, especially when juggling Python and non-Python packages. This often leads to confusion and inefficiency, as developers juggle between multiple dependency files. - **📝 Unified Dependency File**: Use either `requirements.yaml` or `pyproject.toml` to manage both Conda and Pip dependencies in one place. - **⚙️ Build System Integration**: Integrates with Setuptools and Hatchling for automatic dependency handling during `pip install ./your-package`. - **💻 One-Command Installation**: `unidep install` handles Conda, Pip, and local dependencies effortlessly. - **⚡️ Fast Pip Operations**: Leverages `uv` (if installed) for faster pip installations. - **🏢 Monorepo-Friendly**: Render (multiple) `requirements.yaml` or `pyproject.toml` files into one Conda `environment.yaml` file and maintain fully consistent global *and* per sub package `conda-lock` files. - **🌍 Platform-Specific Support**: Specify dependencies for different operating systems or architectures. - **🔧 `pip-compile` Integration**: Generate fully pinned `requirements.txt` files from `requirements.yaml` or `pyproject.toml` files using `pip-compile`. - **🔒 Integration with `conda-lock`**: Generate fully pinned `conda-lock.yml` files from (multiple) `requirements.yaml` or `pyproject.toml` file(s), leveraging `conda-lock`. - **🥧 Pixi Support**: Generate `pixi.toml` files from your dependency files, enabling Pixi-based workflows while keeping UniDep as the single source of truth. - **🤓 Nerd stats**: written in Python, 100% test coverage, fully-typed, all Ruff's rules enabled, easily extensible, and minimal dependencies `unidep` is designed to make dependency management in Python projects as simple and efficient as possible. Try it now and streamline your development process! > [!TIP] > Check out the [example `requirements.yaml` and `pyproject.toml` below](#example). ## :books: Table of Contents - [:rocket: Bootstrap from Scratch](#rocket-bootstrap-from-scratch) - [:package: Installation](#package-installation) - [:memo: `requirements.yaml` and `pyproject.toml` structure](#memo-requirementsyaml-and-pyprojecttoml-structure) - [Example](#example) - [Example `requirements.yaml`](#example-requirementsyaml) - [Example `pyproject.toml`](#example-pyprojecttoml) - [Key Points](#key-points) - [Supported Version Pinnings](#supported-version-pinnings) - [Conflict Resolution](#conflict-resolution) - [How It Works](#how-it-works) - [Platform Selectors](#platform-selectors) - [Supported Selectors](#supported-selectors) - [Usage](#usage) - [Implementation](#implementation) - [Custom Pip Index URLs](#custom-pip-index-urls) - [How It Works](#how-it-works-1) - [Example Usage](#example-usage) - [Generated Output](#generated-output) - [`[project.dependencies]` in `pyproject.toml` handling](#projectdependencies-in-pyprojecttoml-handling) - [:jigsaw: Build System Integration](#jigsaw-build-system-integration) - [Local Dependencies in Monorepos](#local-dependencies-in-monorepos) - [PyPI Alternatives for Local Dependencies](#pypi-alternatives-for-local-dependencies) - [Overriding Nested Vendor Copies with `use`](#overriding-nested-vendor-copies-with-use) - [Example: Override foo's bundled bar with your PyPI build](#example-override-foos-bundled-bar-with-your-pypi-build) - [All `use` values](#all-use-values) - [Build System Behavior](#build-system-behavior) - [Example packages](#example-packages) - [Setuptools Integration](#setuptools-integration) - [Hatchling Integration](#hatchling-integration) - [:desktop_computer: As a CLI](#desktop_computer-as-a-cli) - [`unidep merge`](#unidep-merge) - [`unidep install`](#unidep-install) - [`unidep install-all`](#unidep-install-all) - [`unidep conda-lock`](#unidep-conda-lock) - [`unidep pixi`](#unidep-pixi) - [What `unidep pixi` generates](#what-unidep-pixi-generates) - [Dependency reconciliation rules (important)](#dependency-reconciliation-rules-important) - [Channels/platforms precedence](#channelsplatforms-precedence) - [Example (single-file)](#example-single-file) - [`unidep pip-compile`](#unidep-pip-compile) - [`unidep pip`](#unidep-pip) - [`unidep conda`](#unidep-conda) - [❓ FAQ](#-faq) - [**Q: When to use UniDep?**](#q-when-to-use-unidep) - [**Q: Just show me a full example!**](#q-just-show-me-a-full-example) - [**Q: Uses of UniDep in the wild?**](#q-uses-of-unidep-in-the-wild) - [**Q: How do I force PyPI instead of a local path for one dependency?**](#q-how-do-i-force-pypi-instead-of-a-local-path-for-one-dependency) - [**Q: How do I ignore a local dependency entirely?**](#q-how-do-i-ignore-a-local-dependency-entirely) - [**Q: A submodule brings its own copy of package X. How do I avoid conflicts?**](#q-a-submodule-brings-its-own-copy-of-package-x-how-do-i-avoid-conflicts) - [**Q: How is this different from conda/mamba/pip?**](#q-how-is-this-different-from-condamambapip) - [**Q: I found a project using unidep, now what?**](#q-i-found-a-project-using-unidep-now-what) - [**Q: How to handle local dependencies that do not use UniDep?**](#q-how-to-handle-local-dependencies-that-do-not-use-unidep) - [**Q: Can't Conda already do this?**](#q-cant-conda-already-do-this) - [**Q: What is the difference between `conda-lock` and `unidep conda-lock`?**](#q-what-is-the-difference-between-conda-lock-and-unidep-conda-lock) - [**Q: What is the difference between `hatch-conda` / `pdm-conda` and `unidep`?**](#q-what-is-the-difference-between-hatch-conda--pdm-conda-and-unidep) - [:hammer_and_wrench: Troubleshooting](#hammer_and_wrench-troubleshooting) - [`pip install` fails with `FileNotFoundError`](#pip-install-fails-with-filenotfounderror) - [:warning: Limitations](#warning-limitations) ## :rocket: Bootstrap from Scratch To get started quickly with UniDep, run the following command. This will download and install [micromamba](https://mamba.readthedocs.io/en/latest/installation/micromamba-installation.html) (recommended for fast Conda environment management), [uv](https://docs.astral.sh/uv/getting-started/installation/) (recommended for faster pip installations), and then install UniDep: ```bash "${SHELL}" <(curl -LsSf raw.githubusercontent.com/basnijholt/unidep/main/bootstrap.sh) ``` > [!NOTE] > Micromamba and uv are recommended to optimize your installation experience, but they are not required if you prefer to use your existing Conda and pip setup. > [!WARNING] > NEVER! run scripts from the internet without understanding what they do. Always inspect the script first!
Pin the hash of the bootstrap script with: ```bash "${SHELL}" <(curl -LsSf raw.githubusercontent.com/basnijholt/unidep/939246571b65004391c425eb6df713303663054a/bootstrap.sh) ```
## :package: Installation To install `unidep`, run one of the following commands that use [`pipx`](https://pipx.pypa.io/) (recommended), `pip`, or `conda`: ```bash pipx install "unidep[all]" # Recommended (install as a standalone CLI) ``` or ```bash pip install "unidep[all]" ``` or ```bash conda install -c conda-forge unidep ``` ## :memo: `requirements.yaml` and `pyproject.toml` structure `unidep` allows either using a 1. `requirements.yaml` file with a specific format (similar but _**not**_ the same as a Conda `environment.yaml` file) or 2. `pyproject.toml` file with a `[tool.unidep]` section. Both files contain the following keys: - **name** (Optional): For documentation, not used in the output. - **channels**: List of conda channels for packages, such as `conda-forge`. - **dependencies**: Mix of Conda and Pip packages. - **local_dependencies** (Optional): List of paths to other `requirements.yaml` or `pyproject.toml` files to include. - **optional_dependencies** (Optional): Dictionary with lists of optional dependencies. - **platforms** (Optional): List of platforms that are supported (used in `conda-lock`). - **pip_indices** (Optional): List of custom pip index URLs for private or alternative package repositories. Whether you use a `requirements.yaml` or `pyproject.toml` file, the same information can be specified in either. Choose the format that works best for your project. ### Example #### Example `requirements.yaml` Example of a `requirements.yaml` file: ```yaml name: example_environment channels: - conda-forge dependencies: - numpy # same name on conda and pip - conda: python-graphviz # When names differ between Conda and Pip pip: graphviz - pip: slurm-usage >=1.1.0,<2 # pip-only - conda: mumps # conda-only # Use platform selectors - conda: cuda-toolkit =11.8 # [linux64] local_dependencies: - ../other-project-using-unidep # include other projects that use unidep - ../common-requirements.yaml # include other requirements.yaml files - ../project-not-managed-by-unidep # 🚨 Skips its dependencies! optional_dependencies: test: - pytest full: - ../other-local-dep[test] # include its optional 'test' dependencies platforms: # (Optional) specify platforms that are supported (used in conda-lock) - linux-64 - osx-arm64 pip_indices: # (Optional) additional pip index URLs for private packages - https://pypi.org/simple/ # Main PyPI index (automatically included if not specified) - https://private.company.com/simple/ # Private company index - https://${PIP_USER}:${PIP_PASSWORD}@private.pypi.org/simple/ # Authenticated index with env vars ``` > [!IMPORTANT] > `unidep` can process this during `pip install` and create a Conda installable `environment.yaml` or `conda-lock.yml` file, and more! > [!NOTE] > For a more in-depth example containing multiple installable projects, see the [`example`](example/) directory. #### Example `pyproject.toml` ***Alternatively***, one can fully configure the dependencies in the `pyproject.toml` file in the `[tool.unidep]` section: ```toml [tool.unidep] channels = ["conda-forge"] dependencies = [ "numpy", # same name on conda and pip { conda = "python-graphviz", pip = "graphviz" }, # When names differ between Conda and Pip { pip = "slurm-usage >=1.1.0,<2" }, # pip-only { conda = "mumps" }, # conda-only { conda = "cuda-toolkit =11.8:linux64" } # Use platform selectors by appending `:linux64` ] local_dependencies = [ "../other-project-using-unidep", # include other projects that use unidep "../common-requirements.yaml", # include other requirements.yaml files "../project-not-managed-by-unidep" # 🚨 Skips its dependencies! ] optional_dependencies = { test = ["pytest"], full = ["../other-local-dep[test]"] # include its optional 'test' dependencies } platforms = [ # (Optional) specify platforms that are supported (used in conda-lock) "linux-64", "osx-arm64" ] pip_indices = [ # (Optional) additional pip index URLs for private packages "https://pypi.org/simple/", # Main PyPI index (automatically included if not specified) "https://private.company.com/simple/", # Private company index "https://${PIP_USER}:${PIP_PASSWORD}@private.pypi.org/simple/" # Authenticated index with env vars ] ``` This data structure is *identical* to the `requirements.yaml` format, with the exception of the `name` field and the [platform selectors](#platform-selectors). In the `requirements.yaml` file, one can use e.g., `# [linux64]`, which in the `pyproject.toml` file is `:linux64` at the end of the package name. See [Build System Integration](#jigsaw-build-system-integration) for more information on how to set up `unidep` with different build systems (Setuptools or Hatchling). > [!IMPORTANT] > In these docs, we often mention the `requirements.yaml` format for simplicity, but the same information can be specified in `pyproject.toml` as well. > Everything that is possible in `requirements.yaml` is also possible in `pyproject.toml`! ### Key Points - Standard names (e.g., `- numpy`) are assumed to be the same for Conda and Pip. - Use a dictionary with `conda: ` *and* `pip: ` to specify different names across platforms. - Use `pip:` to specify packages that are only available through Pip. - Use `conda:` to specify packages that are only available through Conda. - Use `# [selector]` (YAML only) or `package:selector` to specify platform-specific dependencies. - Use `local_dependencies:` to include other `requirements.yaml` or `pyproject.toml` files and merge them into one. Also allows projects that are not managed by `unidep` to be included, but be aware that this skips their dependencies! Can specify PyPI alternatives for monorepo setups (see [PyPI Alternatives for Local Dependencies](#pypi-alternatives-for-local-dependencies)). - Use `optional_dependencies:` to specify optional dependencies. Can be installed like `unidep install ".[test]"` or `pip install ".[test]"`. - Use `platforms:` to specify the platforms that are supported. If omitted, all platforms are assumed to be supported. - Use `pip_indices:` to specify additional pip index URLs for installing packages from private or alternative package repositories (see [Custom Pip Index URLs](#custom-pip-index-urls) below). > *We use the YAML notation here, but the same information can be specified in `pyproject.toml` as well.* ### Supported Version Pinnings UniDep has two relevant pinning layers: - **Dict-based conflict helper (`unidep._conflicts.resolve_conflicts`)**: combines repeated pinnings with the Conda-compatible subset of operators: `=`, `>`, `<`, `>=`, `<=`, `!=`. - **CLI-facing pip renderers**: additionally preserve safe pip-only PEP 440 forms such as `==` and `~=` when those constraints can be kept explicitly without ambiguity. Examples: - Conda-compatible merge: `>1.0.0, <2.0.0` - Exact pip pin: `==0.25.2.1` - Compatible release pin: `~=1.0` - **Redundant Pinning Resolution**: Automatically resolves redundant compatible constraints when possible. - Example: `>1.0.0, >0.5.0` simplifies to `>1.0.0`. - **Contradictory Version Detection**: Errors are raised for contradictory pinnings to maintain dependency integrity. See the [Conflict Resolution](#conflict-resolution) section for more information. - Example: Specifying `>2.0.0, <1.5.0` triggers a `VersionConflictError`. - **Invalid Pinning Detection**: Detects and raises errors for unrecognized or improperly formatted version specifications. - **Conda Build Pinning**: UniDep also supports Conda's build pinning, allowing you to specify builds in your pinning patterns. - Example: Conda supports pinning builds like `qsimcirq * cuda*` or `vtk * *egl*`. - **Limitation**: While UniDep allows such build pinning, it requires that there be a single pin per package. UniDep cannot resolve conflicts where multiple build pinnings are specified for the same package. - Example: UniDep can handle `qsimcirq * cuda*`, but it cannot resolve a scenario with both `qsimcirq * cuda*` and `qsimcirq * cpu*`. - **Other Special Cases**: In addition to Conda build pins, UniDep supports all special pinning formats, such as VCS (Version Control System) URLs or local file paths. This includes formats like `package @ git+https://git/repo/here` or `package @ file:///path/to/package`. However, UniDep has a limitation: it can handle only one special pin per package. These special pins can be combined with an unpinned version specification, but not with multiple special pin formats for the same package. - Example: UniDep can manage dependencies specified as `package @ git+https://git/repo/here` and `package` in the same `requirements.yaml`. However, it cannot resolve scenarios where both `package @ git+https://git/repo/here` and `package @ file:///path/to/package` are specified for the same package. > [!WARNING] > **Pinning Validation and Combination**: UniDep actively validates and/or combines pinnings only when **multiple different pinnings** are specified for the same package. > This means if your `requirements.yaml` files include multiple pinnings for a single package, UniDep will attempt to resolve them into a single, coherent specification. > However, if the pinnings are contradictory or incompatible, UniDep will raise an error to alert you of the conflict. ### Conflict Resolution `unidep` features a conflict resolution mechanism to manage version conflicts and platform-specific dependencies in `requirements.yaml` or `pyproject.toml` files. #### How It Works - **Within-source pinning priority**: `unidep` combines repeated entries within the same source (`conda` or `pip`) and gives priority to version-pinned packages. For instance, if both `foo` and `foo <1` are listed for the same source, `foo <1` is selected due to its specific version pin. - **Entry-based rendering**: CLI-facing outputs now work from `parse_requirements(...).dependency_entries`, preserving each original declaration long enough for the shared selector to choose the final Conda-like or pip-only result. - **Lower-level metadata helper**: `unidep._conflicts.resolve_conflicts()` still exists for the older dict-based requirements model (`ParsedRequirements.requirements`), but it is no longer the main renderer handoff. - **Conda-like paired-entry selection**: For explicit dependency entries that provide both `conda:` and `pip:` alternatives, Conda-like outputs use deterministic source selection rules: Pip extras win, otherwise a single pinned side wins, and ties prefer Conda. - **Pip-only output selection**: Pip-only exports (`unidep pip`, setuptools integration, `get_python_dependencies`) keep the Pip dependency when it exists, even if Conda would win for a Conda-like output. - **Platform-Specific Version Pinning**: `unidep` resolves platform-specific dependency conflicts by preferring the version with the narrowest platform scope. For instance, given `foo <3 # [linux64]` and `foo >1`, it installs `foo >1,<3` exclusively on Linux-64 and `foo >1` on all other platforms. - **Intractable Conflicts**: When conflicts are irreconcilable within a source (e.g., `foo >1` vs. `foo <1`), `unidep` raises an exception. ### Platform Selectors This tool supports a range of platform selectors that allow for specific handling of dependencies based on the user's operating system and architecture. This feature is particularly useful for managing conditional dependencies in diverse environments. #### Supported Selectors The following selectors are supported: - `linux`: For all Linux-based systems. - `linux64`: Specifically for 64-bit Linux systems. - `aarch64`: For Linux systems on ARM64 architectures. - `ppc64le`: For Linux on PowerPC 64-bit Little Endian architectures. - `osx`: For all macOS systems. - `osx64`: Specifically for 64-bit macOS systems. - `arm64`: For macOS systems on ARM64 architectures (Apple Silicon). - `macos`: An alternative to `osx` for macOS systems. - `unix`: A general selector for all UNIX-like systems (includes Linux and macOS). - `win`: For all Windows systems. - `win64`: Specifically for 64-bit Windows systems. #### Usage Selectors are used in `requirements.yaml` files to conditionally include dependencies based on the platform: ```yaml dependencies: - some-package >=1 # [unix] - another-package # [win] - special-package # [osx64] - pip: cirq # [macos win] conda: cirq # [linux] ``` Or when using `pyproject.toml` instead of `requirements.yaml`: ```toml [tool.unidep] dependencies = [ "some-package >=1:unix", "another-package:win", "special-package:osx64", { pip = "cirq:macos win", conda = "cirq:linux" }, ] ``` In this example: - `some-package` is included only in UNIX-like environments (Linux and macOS). - `another-package` is specific to Windows. - `special-package` is included only for 64-bit macOS systems. - `cirq` is managed by `pip` on macOS and Windows, and by `conda` on Linux. This demonstrates how you can specify different package managers for the same package based on the platform. Note that the `package-name:unix` syntax can also be used in the `requirements.yaml` file, but the `package-name # [unix]` syntax is not supported in `pyproject.toml`. #### Implementation `unidep` parses these selectors and filters dependencies according to the platform where it's being installed. It is also used for creating environment and lock files that are portable across different platforms, ensuring that each environment has the appropriate dependencies installed. ### Custom Pip Index URLs The `pip_indices` field allows you to specify additional pip index URLs for installing packages from private or alternative package repositories. It may be given as a single string or a list of strings. This is particularly useful for: - **Private Company Packages**: Access internal packages hosted on private PyPI servers - **Alternative Package Repositories**: Use mirrors or alternative package sources - **Authenticated Repositories**: Access protected repositories using environment variables for credentials #### How It Works When `pip_indices` is specified: 1. **First index is primary**: The first URL in the list is used as `--index-url` (primary index) 2. **Additional indices are extra**: Subsequent URLs are passed as `--extra-index-url` flags 3. **Environment variable expansion**: Variables like `${PIP_USER}` and `${PIP_PASSWORD}` are automatically expanded from environment variables 4. **Automatic deduplication**: Duplicate URLs are automatically removed while preserving order 5. **Integration with all tools**: Works with `unidep install`, `pip install`, and when using `uv` as the installer #### Example Usage ```yaml # requirements.yaml pip_indices: - https://pypi.org/simple/ # Primary index (optional, used by default) - https://test.pypi.org/simple/ # Test PyPI for pre-release packages - https://${GITLAB_USER}:${GITLAB_TOKEN}@gitlab.company.com/api/v4/projects/123/packages/pypi/simple # Private GitLab ``` ```toml # pyproject.toml [tool.unidep] pip_indices = [ "https://download.pytorch.org/whl/cpu", # PyTorch CPU-only builds "https://${ARTIFACTORY_USER}:${ARTIFACTORY_PASSWORD}@artifactory.company.com/pypi/simple" # Artifactory ] ``` #### Generated Output When generating `environment.yaml` files, `pip_indices` are included as `pip-repositories`: ```yaml # Generated environment.yaml name: myproject channels: - conda-forge pip-repositories: - https://pypi.org/simple/ - https://private.company.com/simple/ dependencies: - python - pip: - private-package # Will be installed from the private index ``` > [!TIP] > Store sensitive credentials in environment variables rather than hardcoding them in configuration files. UniDep automatically expands `${VAR_NAME}` patterns. ### `[project.dependencies]` in `pyproject.toml` handling The `project_dependency_handling` option in `[tool.unidep]` (in `pyproject.toml`) controls how dependencies listed in the standard `[project.dependencies]` section of `pyproject.toml` are handled when processed by `unidep`. **Modes:** - **`ignore`** (default): Dependencies in `[project.dependencies]` are ignored by `unidep`. - **`same-name`**: Dependencies in `[project.dependencies]` are treated as dependencies with the same name for both Conda and Pip. They will be added to the `dependencies` list in `[tool.unidep]` under the assumption that the package name is the same for both package managers. - **`pip-only`**: Dependencies in `[project.dependencies]` are treated as pip-only dependencies. They will be added to the `dependencies` list in `[tool.unidep]` under the `pip` key. **Example `pyproject.toml`:** ```toml [build-system] requires = ["hatchling", "unidep"] build-backend = "hatchling.build" [project] name = "my-project" version = "0.1.0" dependencies = [ # These will be handled according to the `project_dependency_handling` option "requests", "pandas", ] [tool.unidep] project_dependency_handling = "same-name" # Or "pip-only", "ignore" dependencies = [ {conda = "python-graphviz", pip = "graphivz"}, ] ``` **Notes:** - The `project_dependency_handling` option only affects how dependencies from `[project.dependencies]` are processed. Dependencies directly listed under `[tool.unidep.dependencies]` are handled as before. - This feature is helpful for projects that are already using the standard `[project.dependencies]` field and want to integrate `unidep` without duplicating their dependency list. - The `project_dependency_handling` feature is _*only available*_ when using `pyproject.toml` files. It is not supported in `requirements.yaml` files. ## :jigsaw: Build System Integration > [!TIP] > See [`example/`](example/) for working examples of using `unidep` with different build systems. `unidep` seamlessly integrates with popular Python build systems to simplify dependency management in your projects. ### Local Dependencies in Monorepos Local dependencies are essential for monorepos and multi-package projects, allowing you to: - Share code between packages during development - Maintain separate releases for each package - Test changes across multiple packages simultaneously However, when building wheels for distribution, local paths create non-portable packages that only work on the original system. ### PyPI Alternatives for Local Dependencies UniDep solves this problem by letting you specify both local paths (for development) and PyPI packages (for distribution): ```yaml # requirements.yaml dependencies: - numpy - pandas local_dependencies: # Standard string format for local dependencies - ../shared-lib # Dictionary format with optional PyPI alternative for build-time - local: ../auth-lib pypi: company-auth-lib>=1.0 - local: ../utils pypi: company-utils~=2.0 use: pypi # see [Overriding Nested Vendor Copies](#overriding-nested-vendor-copies-with-use) ``` Or in `pyproject.toml`: ```toml [tool.unidep] dependencies = ["numpy", "pandas"] local_dependencies = [ # Standard string format for local dependencies "../shared-lib", # Dictionary format with optional PyPI alternative for build-time {local = "../auth-lib", pypi = "company-auth-lib>=1.0"}, {local = "../utils", pypi = "company-utils~=2.0", use = "pypi"}, ] ``` **How it works:** - **During development** (e.g., `unidep install` or `pip install -e .`): Uses local paths when they exist - **When building wheels**: PyPI alternatives (if specified) are used to create portable packages - The standard string format continues to work as always for local dependencies > [!TIP] > PyPI alternatives ensure your wheels are portable and can be installed anywhere, not just on the build system. Use the `use` field (see [Overriding Nested Vendor Copies](#overriding-nested-vendor-copies-with-use)) to control whether UniDep installs the local path, forces PyPI, or skips the entry entirely. ### Overriding Nested Vendor Copies with `use` **The Problem:** When vendoring dependencies as git submodules, you often encounter conflicts where a submodule bundles its own copy of a dependency you also use, but at a different version. **The Solution:** Use `use: pypi` to force your PyPI package instead of the vendored copy, with automatic propagation to all nested references. #### Example: Override foo's bundled bar with your PyPI build Your project vendors `foo` as a submodule. Foo bundles `bar@1.0`, but you need `bar@2.0`: ``` project/ third_party/ foo/ # git submodule you don't control third_party/ bar/ # foo bundles bar@1.0 ``` **Solution with `use: pypi`:** ```yaml local_dependencies: - ./third_party/foo # Keep foo editable for development # Override: force YOUR PyPI build of bar - local: ./third_party/foo/third_party/bar pypi: my-bar>=2.0 use: pypi # Install from PyPI, skip local path ``` **What happens:** 1. `foo` stays local (editable for development) 2. `my-bar>=2.0` gets installed from PyPI (not foo's bundled v1.0) 3. **Propagates**: Every nested reference to `bar` uses your PyPI package 4. Works with `unidep install`, `unidep conda-lock`, all CLI commands This is the **key difference** from just using `pypi:` as a build-time fallback - `use: pypi` **forces the PyPI package during development** while keeping other local dependencies editable. --- ### All `use` values Tell UniDep what to **use** for each entry in `local_dependencies`: | `use` value | When to use | Installs from | Propagates override? | |------------|-------------|---------------|---------------------| | `local` *(default)* | Normal local development | Local path | - | | `pypi` | **Force PyPI** even when local exists | `pypi:` spec | Yes | | `skip` | Ignore this path entirely | Nothing | Yes | **Common patterns:** ```yaml local_dependencies: # Standard local development (default) - ../shared-lib # Force PyPI to override nested vendor copy - local: ./vendor/foo/nested/bar pypi: my-bar>=2.0 use: pypi # Skip a path without installing anything - local: ./deprecated-module use: skip ``` > [!NOTE] > **Precedence:** The `use` flag on the entry itself always wins. When UniDep encounters the same path in nested `local_dependencies`, it uses your override. Setting `UNIDEP_SKIP_LOCAL_DEPS=1` forces any effective `use: local` to behave like `pypi` (if specified) or `skip`, but does **not** override explicit `use: pypi` or `use: skip`. > [!WARNING] > If `use: pypi` is set but no `pypi:` requirement is provided, UniDep exits with a clear error so you can supply the missing spec. ### Build System Behavior **Important differences between build backends:** - **Setuptools**: Builds wheels containing `file://` URLs with absolute paths. These wheels only work on the original system. - **Hatchling**: Rejects `file://` URLs by default, preventing non-portable wheels. To ensure portable wheels, you can use the `UNIDEP_SKIP_LOCAL_DEPS` environment variable: ```bash # Force use of PyPI alternatives even when local paths exist UNIDEP_SKIP_LOCAL_DEPS=1 python -m build # For hatch projects UNIDEP_SKIP_LOCAL_DEPS=1 hatch build # For uv build UNIDEP_SKIP_LOCAL_DEPS=1 uv build ``` > [!NOTE] > **When `UNIDEP_SKIP_LOCAL_DEPS=1` is set:** > - Any effective `use: local` behaves as `use: pypi` (if a `pypi` spec exists) or `use: skip` > - Explicit `use: pypi` and `use: skip` remain unchanged > - Dependencies from local packages are still included (from their `requirements.yaml`/`pyproject.toml`) ### Example packages Explore these installable [example](example/) packages to understand how `unidep` integrates with different build tools and configurations: | Project | Build Tool | `pyproject.toml` | `requirements.yaml` | `setup.py` | | ---------------------------------------------------------- | ------------ | ---------------- | ------------------- | ---------- | | [`setup_py_project`](example/setup_py_project) | `setuptools` | ✅ | ✅ | ✅ | | [`setuptools_project`](example/setuptools_project) | `setuptools` | ✅ | ✅ | ❌ | | [`pyproject_toml_project`](example/pyproject_toml_project) | `setuptools` | ✅ | ❌ | ❌ | | [`hatch_project`](example/hatch_project) | `hatch` | ✅ | ✅ | ❌ | | [`hatch2_project`](example/hatch2_project) | `hatch` | ✅ | ❌ | ❌ | ### Setuptools Integration For projects using `setuptools`, configure `unidep` in `pyproject.toml` and either specify dependencies in a `requirements.yaml` file or include them in `pyproject.toml` too. - **Using `pyproject.toml` only**: The `[project.dependencies]` field in `pyproject.toml` gets automatically populated from `requirements.yaml` or from the `[tool.unidep]` section in `pyproject.toml`. - **Using `setup.py`**: The `install_requires` field in `setup.py` automatically reflects dependencies specified in `requirements.yaml` or `pyproject.toml`. **Example `pyproject.toml` Configuration**: ```toml [build-system] build-backend = "setuptools.build_meta" requires = ["setuptools", "unidep"] [project] dynamic = ["dependencies"] ``` ### Hatchling Integration For projects managed with [Hatch](https://hatch.pypa.io/), `unidep` can be configured in `pyproject.toml` to automatically process the dependencies from `requirements.yaml` or from the `[tool.unidep]` section in `pyproject.toml`. **Example Configuration for Hatch**: ```toml [build-system] requires = ["hatchling", "unidep"] build-backend = "hatchling.build" [project] dynamic = ["dependencies"] # Additional project configurations [tool.hatch.metadata.hooks.unidep] # Enable the unidep plugin [tool.hatch.metadata] allow-direct-references = true [tool.unidep] # Your dependencies configuration ``` ## :desktop_computer: As a CLI See [example](example/) for more information or check the output of `unidep -h` for the available sub commands: ```bash usage: unidep [-h] {merge,install,install-all,conda-lock,pixi,pip-compile,pip,conda,version} ... Unified Conda and Pip requirements management. positional arguments: {merge,install,install-all,conda-lock,pixi,pip-compile,pip,conda,version} Subcommands merge Combine multiple (or a single) `requirements.yaml` or `pyproject.toml` files into a single Conda installable `environment.yaml` file. install Automatically install all dependencies from one or more `requirements.yaml` or `pyproject.toml` files. This command first installs dependencies with Conda, then with Pip. Finally, it installs local packages (those containing the `requirements.yaml` or `pyproject.toml` files) using `pip install [-e] ./project`. install-all Install dependencies from all `requirements.yaml` or `pyproject.toml` files found in the current directory or specified directory. This command first installs dependencies using Conda, then Pip, and finally the local packages. conda-lock Generate a global `conda-lock.yml` file for a collection of `requirements.yaml` or `pyproject.toml` files. Additionally, create individual `conda- lock.yml` files for each `requirements.yaml` or `pyproject.toml` file consistent with the global lock file. pixi Generate a `pixi.toml` file from `requirements.yaml` or `pyproject.toml` files. pip-compile Generate a fully pinned `requirements.txt` file from one or more `requirements.yaml` or `pyproject.toml` files using `pip-compile` from `pip-tools`. This command consolidates all pip dependencies defined in the `requirements.yaml` or `pyproject.toml` files and compiles them into a single `requirements.txt` file, taking into account the specific versions and dependencies of each package. pip Get the pip requirements for the current platform only. conda Get the conda requirements for the current platform only. version Print version information of unidep. options: -h, --help show this help message and exit ``` ### `unidep merge` Use `unidep merge` to scan directories for `requirements.yaml` file(s) and combine them into an `environment.yaml` file. Optional dependency groups can be included with `--optional-dependencies docs test` or `--all-optional-dependencies`. See `unidep merge -h` for more information: ```bash usage: unidep merge [-h] [-o OUTPUT] [-n NAME] [--stdout] [--selector {sel,comment}] [--optional-dependencies GROUP [GROUP ...] | --all-optional-dependencies] [-d DIRECTORY] [--depth DEPTH] [-v] [-p {linux-64,linux-aarch64,linux-ppc64le,osx-64,osx-arm64,win-64}] [--skip-dependency SKIP_DEPENDENCY] [--ignore-pin IGNORE_PIN] [--overwrite-pin OVERWRITE_PIN] Combine multiple (or a single) `requirements.yaml` or `pyproject.toml` files into a single Conda installable `environment.yaml` file. Example usage: `unidep merge --directory . --depth 1 --output environment.yaml` to search for `requirements.yaml` or `pyproject.toml` files in the current directory and its subdirectories and create `environment.yaml`. These are the defaults, so you can also just run `unidep merge`. For Pixi support, use `unidep pixi`. options: -h, --help show this help message and exit -o, --output OUTPUT Output file for the conda environment, by default `environment.yaml` -n, --name NAME Name of the conda environment, by default `myenv` --stdout Output to stdout instead of a file --selector {sel,comment} The selector to use for the environment markers, if `sel` then `- numpy # [linux]` becomes `sel(linux): numpy`, if `comment` then it remains `- numpy # [linux]`, by default `sel` --optional-dependencies GROUP [GROUP ...] Include the named optional dependency group(s) from the discovered requirements files. --all-optional-dependencies Include all optional dependency groups from the discovered requirements files. -d, --directory DIRECTORY Base directory to scan for `requirements.yaml` or `pyproject.toml` file(s), by default `.` --depth DEPTH Maximum depth to scan for `requirements.yaml` or `pyproject.toml` files, by default 1 -v, --verbose Print verbose output -p, --platform {linux-64,linux-aarch64,linux-ppc64le,osx-64,osx-arm64,win-64} The platform(s) to get the requirements for. Multiple platforms can be specified. If omitted, behavior is command-specific: platforms may be inferred from requirements files, otherwise the current platform is used. --skip-dependency SKIP_DEPENDENCY Skip installing a specific dependency that is in one of the `requirements.yaml` or `pyproject.toml` files. This option can be used multiple times, each time specifying a different package to skip. For example, use `--skip-dependency pandas` to skip installing pandas. --ignore-pin IGNORE_PIN Ignore the version pin for a specific package, e.g., `--ignore-pin numpy`. This option can be repeated to ignore multiple packages. --overwrite-pin OVERWRITE_PIN Overwrite the version pin for a specific package, e.g., `--overwrite-pin 'numpy=1.19.2'`. This option can be repeated to overwrite the pins of multiple packages. ``` ### `unidep install` Use `unidep install` on one or more `requirements.yaml` files and install the dependencies on the current platform using conda, then install the remaining dependencies with pip, and finally install the current package with `pip install [-e] .`. See `unidep install -h` for more information: ```bash usage: unidep install [-h] [-v] [-e] [--skip-local] [--skip-pip] [--skip-conda] [--skip-dependency SKIP_DEPENDENCY] [--no-dependencies] [--conda-executable {conda,mamba,micromamba}] [-n CONDA_ENV_NAME | -p CONDA_ENV_PREFIX] [--dry-run] [--ignore-pin IGNORE_PIN] [--overwrite-pin OVERWRITE_PIN] [-f CONDA_LOCK_FILE] [--no-uv] files [files ...] Automatically install all dependencies from one or more `requirements.yaml` or `pyproject.toml` files. This command first installs dependencies with Conda, then with Pip. Finally, it installs local packages (those containing the `requirements.yaml` or `pyproject.toml` files) using `pip install [-e] ./project`. Example usage: `unidep install .` for a single project. For multiple projects: `unidep install ./project1 ./project2`. The command accepts both file paths and directories containing a `requirements.yaml` or `pyproject.toml` file. Use `--editable` or `-e` to install the local packages in editable mode. See `unidep install-all` to install all `requirements.yaml` or `pyproject.toml` files in and below the current folder. positional arguments: files The `requirements.yaml` or `pyproject.toml` file(s) to parse or folder(s) that contain those file(s), by default `.` options: -h, --help show this help message and exit -v, --verbose Print verbose output -e, --editable Install the project in editable mode --skip-local Skip installing local dependencies --skip-pip Skip installing pip dependencies from `requirements.yaml` or `pyproject.toml` --skip-conda Skip installing conda dependencies from `requirements.yaml` or `pyproject.toml` --skip-dependency SKIP_DEPENDENCY Skip installing a specific dependency that is in one of the `requirements.yaml` or `pyproject.toml` files. This option can be used multiple times, each time specifying a different package to skip. For example, use `--skip-dependency pandas` to skip installing pandas. --no-dependencies, --no-deps Skip installing dependencies from `requirements.yaml` or `pyproject.toml` file(s) and only install local package(s). Useful after installing a `conda-lock.yml` file because then all dependencies have already been installed. --conda-executable {conda,mamba,micromamba} The conda executable to use -n, --conda-env-name CONDA_ENV_NAME Name of the conda environment, if not provided, the currently active environment name is used, unless `--conda-env-prefix` is provided -p, --conda-env-prefix CONDA_ENV_PREFIX Path to the conda environment, if not provided, the currently active environment path is used, unless `--conda-env-name` is provided --dry-run, --dry Only print the commands that would be run --ignore-pin IGNORE_PIN Ignore the version pin for a specific package, e.g., `--ignore-pin numpy`. This option can be repeated to ignore multiple packages. --overwrite-pin OVERWRITE_PIN Overwrite the version pin for a specific package, e.g., `--overwrite-pin 'numpy=1.19.2'`. This option can be repeated to overwrite the pins of multiple packages. -f, --conda-lock-file CONDA_LOCK_FILE Path to the `conda-lock.yml` file to use for creating the new environment. Assumes that the lock file contains all dependencies. Must be used with `--conda- env-name` or `--conda-env-prefix`. --no-uv Disables the use of `uv` for pip install. By default, `uv` is used if it is available in the PATH. ``` ### `unidep install-all` Use `unidep install-all` on a folder with packages that contain `requirements.yaml` files and install the dependencies on the current platform using conda, then install the remaining dependencies with pip, and finally install the current package with `pip install [-e] ./package1 ./package2`. See `unidep install-all -h` for more information: ```bash usage: unidep install [-h] [-v] [-e] [--skip-local] [--skip-pip] [--skip-conda] [--skip-dependency SKIP_DEPENDENCY] [--no-dependencies] [--conda-executable {conda,mamba,micromamba}] [-n CONDA_ENV_NAME | -p CONDA_ENV_PREFIX] [--dry-run] [--ignore-pin IGNORE_PIN] [--overwrite-pin OVERWRITE_PIN] [-f CONDA_LOCK_FILE] [--no-uv] files [files ...] Automatically install all dependencies from one or more `requirements.yaml` or `pyproject.toml` files. This command first installs dependencies with Conda, then with Pip. Finally, it installs local packages (those containing the `requirements.yaml` or `pyproject.toml` files) using `pip install [-e] ./project`. Example usage: `unidep install .` for a single project. For multiple projects: `unidep install ./project1 ./project2`. The command accepts both file paths and directories containing a `requirements.yaml` or `pyproject.toml` file. Use `--editable` or `-e` to install the local packages in editable mode. See `unidep install-all` to install all `requirements.yaml` or `pyproject.toml` files in and below the current folder. positional arguments: files The `requirements.yaml` or `pyproject.toml` file(s) to parse or folder(s) that contain those file(s), by default `.` options: -h, --help show this help message and exit -v, --verbose Print verbose output -e, --editable Install the project in editable mode --skip-local Skip installing local dependencies --skip-pip Skip installing pip dependencies from `requirements.yaml` or `pyproject.toml` --skip-conda Skip installing conda dependencies from `requirements.yaml` or `pyproject.toml` --skip-dependency SKIP_DEPENDENCY Skip installing a specific dependency that is in one of the `requirements.yaml` or `pyproject.toml` files. This option can be used multiple times, each time specifying a different package to skip. For example, use `--skip-dependency pandas` to skip installing pandas. --no-dependencies, --no-deps Skip installing dependencies from `requirements.yaml` or `pyproject.toml` file(s) and only install local package(s). Useful after installing a `conda-lock.yml` file because then all dependencies have already been installed. --conda-executable {conda,mamba,micromamba} The conda executable to use -n, --conda-env-name CONDA_ENV_NAME Name of the conda environment, if not provided, the currently active environment name is used, unless `--conda-env-prefix` is provided -p, --conda-env-prefix CONDA_ENV_PREFIX Path to the conda environment, if not provided, the currently active environment path is used, unless `--conda-env-name` is provided --dry-run, --dry Only print the commands that would be run --ignore-pin IGNORE_PIN Ignore the version pin for a specific package, e.g., `--ignore-pin numpy`. This option can be repeated to ignore multiple packages. --overwrite-pin OVERWRITE_PIN Overwrite the version pin for a specific package, e.g., `--overwrite-pin 'numpy=1.19.2'`. This option can be repeated to overwrite the pins of multiple packages. -f, --conda-lock-file CONDA_LOCK_FILE Path to the `conda-lock.yml` file to use for creating the new environment. Assumes that the lock file contains all dependencies. Must be used with `--conda- env-name` or `--conda-env-prefix`. --no-uv Disables the use of `uv` for pip install. By default, `uv` is used if it is available in the PATH. ``` ### `unidep conda-lock` Use `unidep conda-lock` on one or multiple `requirements.yaml` files and output the conda-lock file. Optionally, when using a monorepo with multiple subpackages (with their own `requirements.yaml` files), generate a lock file for each subpackage. See `unidep conda-lock -h` for more information: ```bash usage: unidep conda-lock [-h] [--only-global] [--lockfile LOCKFILE] [--check-input-hash] [-d DIRECTORY] [--depth DEPTH] [-f FILE] [-v] [-p {linux-64,linux-aarch64,linux-ppc64le,osx-64,osx-arm64,win-64}] [--skip-dependency SKIP_DEPENDENCY] [--ignore-pin IGNORE_PIN] [--overwrite-pin OVERWRITE_PIN] ... Generate a global `conda-lock.yml` file for a collection of `requirements.yaml` or `pyproject.toml` files. Additionally, create individual `conda-lock.yml` files for each `requirements.yaml` or `pyproject.toml` file consistent with the global lock file. Example usage: `unidep conda-lock --directory ./projects` to generate conda-lock files for all `requirements.yaml` or `pyproject.toml` files in the `./projects` directory. Use `--only-global` to generate only the global lock file. The `--check-input- hash` option can be used to avoid regenerating lock files if the input hasn't changed. positional arguments: extra_flags Extra flags to pass to `conda-lock lock`. These flags are passed directly and should be provided in the format expected by `conda-lock lock`. For example, `unidep conda-lock -- --micromamba`. Note that the `--` is required to separate the flags for `unidep conda-lock` from the flags for `conda-lock lock`. options: -h, --help show this help message and exit --only-global Only generate the global lock file --lockfile LOCKFILE Specify a path for the global lockfile (default: `conda-lock.yml` in current directory). Path should be relative, e.g., `--lockfile ./locks/example.conda- lock.yml`. --check-input-hash Check existing input hashes in lockfiles before regenerating lock files. This flag is directly passed to `conda-lock`. -d, --directory DIRECTORY Base directory to scan for `requirements.yaml` or `pyproject.toml` file(s), by default `.` --depth DEPTH Maximum depth to scan for `requirements.yaml` or `pyproject.toml` files, by default 1 -f, --file FILE A single `requirements.yaml` or `pyproject.toml` file to use, or folder that contains that file. This is an alternative to using `--directory` which searches for all `requirements.yaml` or `pyproject.toml` files in the directory and its subdirectories. -v, --verbose Print verbose output -p, --platform {linux-64,linux-aarch64,linux-ppc64le,osx-64,osx-arm64,win-64} The platform(s) to get the requirements for. Multiple platforms can be specified. If omitted, behavior is command-specific: platforms may be inferred from requirements files, otherwise the current platform is used. --skip-dependency SKIP_DEPENDENCY Skip installing a specific dependency that is in one of the `requirements.yaml` or `pyproject.toml` files. This option can be used multiple times, each time specifying a different package to skip. For example, use `--skip-dependency pandas` to skip installing pandas. --ignore-pin IGNORE_PIN Ignore the version pin for a specific package, e.g., `--ignore-pin numpy`. This option can be repeated to ignore multiple packages. --overwrite-pin OVERWRITE_PIN Overwrite the version pin for a specific package, e.g., `--overwrite-pin 'numpy=1.19.2'`. This option can be repeated to overwrite the pins of multiple packages. ``` ### `unidep pixi` Use `unidep pixi` to generate a `pixi.toml` file from your `requirements.yaml` or `pyproject.toml` files. This enables using [Pixi](https://pixi.sh/) for solving/locking/installing while keeping UniDep as your source of truth. The philosophy is **"Let UniDep translate, let Pixi resolve"**. **Workflow:** ```bash # 1. Generate pixi.toml from your requirements unidep pixi # 2. Use pixi directly pixi install pixi lock pixi run ``` #### What `unidep pixi` generates - A `[workspace]` section with `name`, `channels`, and `platforms` - Conda deps in `[dependencies]` - PyPI deps in `[pypi-dependencies]` - Selector/platform-specific deps in `[target..dependencies]` and/or `[target..pypi-dependencies]` - Optional dependency groups as Pixi features (`[feature..*]`) - Local installable projects as editable path deps: ```toml [pypi-dependencies] my_pkg = { path = "./relative/path", editable = true } ``` In monorepo mode (multiple input files), UniDep builds feature sections per discovered project and composes environments from those features. #### Dependency reconciliation rules (important) When the same package appears from both conda and pip, UniDep applies deterministic rules before writing `pixi.toml`: 1. If pip has extras (`foo[bar]`), pip wins. 2. If only one side is pinned, pinned wins. 3. On ties (both pinned or both unpinned), conda wins. 4. When both sides are pinned and one declaration is narrower in platform scope, the narrower target-specific intent wins on that target. Other platforms continue through the same shared selection rules independently. Version pins from repeated entries are merged when possible (for example `>=1.7,<2` + `<1.16` → `>=1.7,<1.16`). #### Channels/platforms precedence - **Channels** - If `--channel` is passed: use only CLI-provided channels. - Else: collect channels from requirement files. - Else fallback: `conda-forge`. - **Platforms** - If `--platform` is passed: use CLI-provided platforms. - Else: use platforms declared in files. - Else: infer from selectors in dependencies. - Else fallback: current platform. #### Example (single-file) Input (`requirements.yaml`): ```yaml channels: - conda-forge dependencies: - numpy >=1.26 - pip: rich - pip: uvloop # [linux64] optional_dependencies: dev: - pytest platforms: - linux-64 - osx-64 ``` Representative output shape (`pixi.toml`): ```toml [workspace] name = "my-project" channels = ["conda-forge"] platforms = ["linux-64", "osx-64"] [dependencies] numpy = ">=1.26" [pypi-dependencies] rich = "*" [target.linux-64.pypi-dependencies] uvloop = "*" [feature.dev.dependencies] pytest = "*" [environments] default = [] dev = ["dev"] ``` See `unidep pixi -h` for more information: ```bash usage: unidep pixi [-h] [-o OUTPUT] [-n NAME] [--stdout] [-c CHANNEL] [-d DIRECTORY] [--depth DEPTH] [-f FILE] [-v] [-p {linux-64,linux-aarch64,linux-ppc64le,osx-64,osx-arm64,win-64}] [--skip-dependency SKIP_DEPENDENCY] [--ignore-pin IGNORE_PIN] [--overwrite-pin OVERWRITE_PIN] Generate a `pixi.toml` file from `requirements.yaml` or `pyproject.toml` files. Example usage: `unidep pixi` to generate a pixi.toml file. Use `--output` to specify a different output path. Use `--name` to set the project name. After generating, use `pixi lock` and `pixi install` directly. options: -h, --help show this help message and exit -o, --output OUTPUT Output path for pixi.toml (default: pixi.toml in current directory) -n, --name NAME Name of the project (default: current directory name) --stdout Output to stdout instead of a file -c, --channel CHANNEL Conda channel to include. Can be repeated. Overrides channels declared in requirements files. If omitted, channels are read from the requirements files (defaulting to conda-forge). -d, --directory DIRECTORY Base directory to scan for `requirements.yaml` or `pyproject.toml` file(s), by default `.` --depth DEPTH Maximum depth to scan for `requirements.yaml` or `pyproject.toml` files, by default 1 -f, --file FILE A single `requirements.yaml` or `pyproject.toml` file to use, or folder that contains that file. This is an alternative to using `--directory` which searches for all `requirements.yaml` or `pyproject.toml` files in the directory and its subdirectories. -v, --verbose Print verbose output -p, --platform {linux-64,linux-aarch64,linux-ppc64le,osx-64,osx-arm64,win-64} The platform(s) to get the requirements for. Multiple platforms can be specified. If omitted, behavior is command-specific: platforms may be inferred from requirements files, otherwise the current platform is used. --skip-dependency SKIP_DEPENDENCY Skip installing a specific dependency that is in one of the `requirements.yaml` or `pyproject.toml` files. This option can be used multiple times, each time specifying a different package to skip. For example, use `--skip-dependency pandas` to skip installing pandas. --ignore-pin IGNORE_PIN Ignore the version pin for a specific package, e.g., `--ignore-pin numpy`. This option can be repeated to ignore multiple packages. --overwrite-pin OVERWRITE_PIN Overwrite the version pin for a specific package, e.g., `--overwrite-pin 'numpy=1.19.2'`. This option can be repeated to overwrite the pins of multiple packages. ``` > [!TIP] > Install Pixi-related optional dependencies with: `pip install "unidep[pixi]"` ### `unidep pip-compile` Use `unidep pip-compile` on one or multiple `requirements.yaml` files and output a fully locked `requirements.txt` file using `pip-compile` from [`pip-tools`](https://pip-tools.readthedocs.io/en/latest/). See `unidep pip-compile -h` for more information: ```bash usage: unidep pip-compile [-h] [-o OUTPUT_FILE] [-d DIRECTORY] [--depth DEPTH] [-v] [-p {linux-64,linux-aarch64,linux-ppc64le,osx-64,osx-arm64,win-64}] [--skip-dependency SKIP_DEPENDENCY] [--ignore-pin IGNORE_PIN] [--overwrite-pin OVERWRITE_PIN] ... Generate a fully pinned `requirements.txt` file from one or more `requirements.yaml` or `pyproject.toml` files using `pip-compile` from `pip- tools`. This command consolidates all pip dependencies defined in the `requirements.yaml` or `pyproject.toml` files and compiles them into a single `requirements.txt` file, taking into account the specific versions and dependencies of each package. Example usage: `unidep pip-compile --directory ./projects` to generate a `requirements.txt` file for all `requirements.yaml` or `pyproject.toml` files in the `./projects` directory. Use `--output-file requirements.txt` to specify a different output file. positional arguments: extra_flags Extra flags to pass to `pip-compile`. These flags are passed directly and should be provided in the format expected by `pip-compile`. For example, `unidep pip- compile -- --generate-hashes --allow-unsafe`. Note that the `--` is required to separate the flags for `unidep pip-compile` from the flags for `pip-compile`. options: -h, --help show this help message and exit -o, --output-file OUTPUT_FILE Output file for the pip requirements, by default `requirements.txt` -d, --directory DIRECTORY Base directory to scan for `requirements.yaml` or `pyproject.toml` file(s), by default `.` --depth DEPTH Maximum depth to scan for `requirements.yaml` or `pyproject.toml` files, by default 1 -v, --verbose Print verbose output -p, --platform {linux-64,linux-aarch64,linux-ppc64le,osx-64,osx-arm64,win-64} The platform(s) to get the requirements for. Multiple platforms can be specified. If omitted, behavior is command-specific: platforms may be inferred from requirements files, otherwise the current platform is used. --skip-dependency SKIP_DEPENDENCY Skip installing a specific dependency that is in one of the `requirements.yaml` or `pyproject.toml` files. This option can be used multiple times, each time specifying a different package to skip. For example, use `--skip-dependency pandas` to skip installing pandas. --ignore-pin IGNORE_PIN Ignore the version pin for a specific package, e.g., `--ignore-pin numpy`. This option can be repeated to ignore multiple packages. --overwrite-pin OVERWRITE_PIN Overwrite the version pin for a specific package, e.g., `--overwrite-pin 'numpy=1.19.2'`. This option can be repeated to overwrite the pins of multiple packages. ``` ### `unidep pip` Use `unidep pip` on a `requirements.yaml` file and output the pip installable dependencies on the current platform (default). See `unidep pip -h` for more information: ```bash usage: unidep pip [-h] [-f FILE] [-v] [-p {linux-64,linux-aarch64,linux-ppc64le,osx-64,osx-arm64,win-64}] [--skip-dependency SKIP_DEPENDENCY] [--ignore-pin IGNORE_PIN] [--overwrite-pin OVERWRITE_PIN] [--separator SEPARATOR] Get the pip requirements for the current platform only. Example usage: `unidep pip --file folder1 --file folder2/requirements.yaml --separator ' ' --platform linux-64` to extract all the pip dependencies specific to the linux-64 platform. Note that the `--file` argument can be used multiple times to specify multiple `requirements.yaml` or `pyproject.toml` files and that --file can also be a folder that contains a `requirements.yaml` or `pyproject.toml` file. options: -h, --help show this help message and exit -f, --file FILE The `requirements.yaml` or `pyproject.toml` file to parse, or folder that contains that file, by default `.` -v, --verbose Print verbose output -p, --platform {linux-64,linux-aarch64,linux-ppc64le,osx-64,osx-arm64,win-64} The platform(s) to get the requirements for. Multiple platforms can be specified. If omitted, behavior is command-specific: platforms may be inferred from requirements files, otherwise the current platform is used. --skip-dependency SKIP_DEPENDENCY Skip installing a specific dependency that is in one of the `requirements.yaml` or `pyproject.toml` files. This option can be used multiple times, each time specifying a different package to skip. For example, use `--skip-dependency pandas` to skip installing pandas. --ignore-pin IGNORE_PIN Ignore the version pin for a specific package, e.g., `--ignore-pin numpy`. This option can be repeated to ignore multiple packages. --overwrite-pin OVERWRITE_PIN Overwrite the version pin for a specific package, e.g., `--overwrite-pin 'numpy=1.19.2'`. This option can be repeated to overwrite the pins of multiple packages. --separator SEPARATOR The separator between the dependencies, by default ` ` ``` ### `unidep conda` Use `unidep conda` on a `requirements.yaml` file and output the conda installable dependencies on the current platform (default). See `unidep conda -h` for more information: ```bash usage: unidep conda [-h] [-f FILE] [-v] [-p {linux-64,linux-aarch64,linux-ppc64le,osx-64,osx-arm64,win-64}] [--skip-dependency SKIP_DEPENDENCY] [--ignore-pin IGNORE_PIN] [--overwrite-pin OVERWRITE_PIN] [--separator SEPARATOR] Get the conda requirements for the current platform only. Example usage: `unidep conda --file folder1 --file folder2/requirements.yaml --separator ' ' --platform linux-64` to extract all the conda dependencies specific to the linux-64 platform. Note that the `--file` argument can be used multiple times to specify multiple `requirements.yaml` or `pyproject.toml` files and that --file can also be a folder that contains a `requirements.yaml` or `pyproject.toml` file. options: -h, --help show this help message and exit -f, --file FILE The `requirements.yaml` or `pyproject.toml` file to parse, or folder that contains that file, by default `.` -v, --verbose Print verbose output -p, --platform {linux-64,linux-aarch64,linux-ppc64le,osx-64,osx-arm64,win-64} The platform(s) to get the requirements for. Multiple platforms can be specified. If omitted, behavior is command-specific: platforms may be inferred from requirements files, otherwise the current platform is used. --skip-dependency SKIP_DEPENDENCY Skip installing a specific dependency that is in one of the `requirements.yaml` or `pyproject.toml` files. This option can be used multiple times, each time specifying a different package to skip. For example, use `--skip-dependency pandas` to skip installing pandas. --ignore-pin IGNORE_PIN Ignore the version pin for a specific package, e.g., `--ignore-pin numpy`. This option can be repeated to ignore multiple packages. --overwrite-pin OVERWRITE_PIN Overwrite the version pin for a specific package, e.g., `--overwrite-pin 'numpy=1.19.2'`. This option can be repeated to overwrite the pins of multiple packages. --separator SEPARATOR The separator between the dependencies, by default ` ` ``` ## ❓ FAQ Here is a list of questions we have either been asked by users or potential pitfalls we hope to help users avoid: ### **Q: When to use UniDep?** **A:** UniDep is particularly useful for setting up full development environments that require both Python *and* non-Python dependencies (e.g., CUDA, compilers, etc.) with a single command. In fields like research, data science, robotics, AI, and ML projects, it is common to work from a locally cloned Git repository. Setting up a full development environment can be a pain, especially if you need to install non Python dependencies like compilers, low-level numerical libraries, or CUDA (luckily Conda has all of them). Typically, instructions are different for each OS and their corresponding package managers (`apt`, `brew`, `yum`, `winget`, etc.). With UniDep, you can specify all your Pip and Conda dependencies in a single file. To get set up on a new machine, you just need to install Conda (we recommend [micromamba](https://mamba.readthedocs.io/en/latest/installation/micromamba-installation.html)) and run `pip install unidep; unidep install-all -e` in your project directory, to install all dependencies and local packages in editable mode in the current Conda environment. For fully reproducible environments, you can run `unidep conda-lock` to generate a `conda-lock.yml` file. Then, run `conda env create -f conda-lock.yml -n myenv` to create a new Conda environment with all the third-party dependencies. Finally, run `unidep install-all -e --no-dependencies` to install all your local packages in editable mode. For those who prefer not to use Conda, you can simply run `pip install -e .` on a project using UniDep. You'll need to install the non-Python dependencies yourself, but you'll have a list of them in the `requirements.yaml` file. In summary, use UniDep if you: - Prefer installing packages with conda but still want your package to be pip installable. - Are tired of synchronizing your Pip requirements (`requirements.txt`) and Conda requirements (`environment.yaml`). - Want a low-effort, comprehensive development environment setup. ### **Q: Just show me a full example!** **A:** Check out the [`example` folder](https://github.com/basnijholt/unidep/tree/main/example). ### **Q: Uses of UniDep in the wild?** **A:** UniDep really shines when used in a monorepo with multiple dependent projects, however, since these are typically private, we cannot share them. However, an example of a single package that is public is [`home-assistant-streamdeck-yaml`](https://github.com/basnijholt/home-assistant-streamdeck-yaml/). This is a Python package that allows to interact with [Home Assistant](https://www.home-assistant.io/) from an Elgato Stream Deck connected via USB to e.g., a Raspberry Pi. It requires a couple of system dependencies (e.g., `libusb` and `hidapi`), which are typically installed with `apt` or `brew`. The [`README.md`](https://github.com/basnijholt/home-assistant-streamdeck-yaml/blob/main/README.md) shows different installation instructions on Linux, MacOS, and Windows for non-Conda installs, however, with UniDep, we can just use `unidep install .` on all platforms. It is fully configured via [`pyproject.toml`](https://github.com/basnijholt/home-assistant-streamdeck-yaml/blob/main/pyproject.toml). The 2 `Dockerfile`s show 2 different ways of using UniDep: 1. [`Dockerfile.locked`](https://github.com/basnijholt/home-assistant-streamdeck-yaml/blob/a1b9966398dfe748804f058f82d546e47cd7f722/Dockerfile.locked): Installing `conda-lock.yml` (generated with `unidep conda-lock`) and then `pip install .` the local package. 2. [`Dockerfile.latest`](https://github.com/basnijholt/home-assistant-streamdeck-yaml/blob/a1b9966398dfe748804f058f82d546e47cd7f722/Dockerfile.latest): Using `unidep install .` to install all dependencies, first with conda, then pip, then the local package. ### **Q: How do I force PyPI instead of a local path for one dependency?** **A:** Use `use: pypi` to force the PyPI package even during development (see [Overriding Nested Vendor Copies](#overriding-nested-vendor-copies-with-use)). This is especially useful for overriding nested vendor copies while keeping other local dependencies editable. ```yaml local_dependencies: - local: ./path/to/dep pypi: my-package>=1.0 use: pypi # Force PyPI, skip local path ``` ### **Q: How do I ignore a local dependency entirely?** **A:** Set `use: skip` on that entry. It won't be installed and UniDep won't recurse into it. See [Overriding Nested Vendor Copies](#overriding-nested-vendor-copies-with-use) for details. ### **Q: A submodule brings its own copy of package X. How do I avoid conflicts?** **A:** Use `use: pypi` as shown in [Overriding Nested Vendor Copies](#overriding-nested-vendor-copies-with-use). In short: ```yaml local_dependencies: - ./third_party/foo # Keep foo editable - local: ./third_party/foo/third_party/bar pypi: my-bar>=2.0 use: pypi # Force YOUR PyPI build of bar ``` This propagates to **every** nested reference, so foo's bundled bar gets replaced with your PyPI package. ### **Q: How is this different from conda/mamba/pip?** **A:** UniDep uses pip and conda under the hood to install dependencies, but it is not a replacement for them. UniDep will print the commands it runs, so you can see exactly what it is doing. ### **Q: I found a project using unidep, now what?** **A:** You can install it like *any other Python package* using `pip install`. However, to take full advantage of UniDep's functionality, clone the repository and run `unidep install-all -e` in the project directory. This installs all dependencies in editable mode in the current Conda environment. ### **Q: How to handle local dependencies that do not use UniDep?** **A:** You can use the `local_dependencies` field in the `requirements.yaml` or `pyproject.toml` file to specify local dependencies. However, *if* a local dependency is *not* managed by UniDep, it will skip installing its dependencies! To include all its dependencies, either convert the package to use UniDep (🏆), or maintain a separate `requirements.yaml` file, e.g., for a package called `foo` create, `foo-requirements.yaml`: ```yaml dependencies: # List the dependencies of foo here - numpy - scipy - matplotlib - bar local_dependencies: - ./path/to/foo # This is the path to the package ``` Then, in the `requirements.yaml` or `pyproject.toml` file of the package that uses `foo`, list `foo-requirements.yaml` as a local dependency: ```yaml local_dependencies: - ./path/to/foo-requirements.yaml ``` ### **Q: Can't Conda already do this?** **A:** Not quite. Conda can indeed install both Conda and Pip dependencies via an `environment.yaml` file, however, it does not work the other way around. Pip cannot install the `pip` dependencies from an `environment.yaml` file. This means, that if you want your package to be installable with `pip install -e .` *and* support Conda, you need to maintain two separate files: `environment.yaml` and `requirements.txt` (or specify these dependencies in `pyproject.toml` or `setup.py`). ### **Q: What is the difference between `conda-lock` and `unidep conda-lock`?** **A:** [`conda-lock`](https://github.com/conda/conda-lock) is a standalone tool that creates a `conda-lock.yml` file from a `environment.yaml` file. On the other hand, `unidep conda-lock` is a command within the UniDep tool that also generates a `conda-lock.yml` file (leveraging `conda-lock`), but it does so from one or more `requirements.yaml` or `pyproject.toml` files. When managing multiple dependent projects (e.g., in a monorepo), a unique feature of `unidep conda-lock` is its ability to create **_consistent_** individual `conda-lock.yml` files for each `requirements.yaml` or `pyproject.toml` file, ensuring consistency with a global `conda-lock.yml` file. This feature is not available in the standalone `conda-lock` tool. ### **Q: What is the difference between `hatch-conda` / `pdm-conda` and `unidep`?** **A:** [`hatch-conda`](https://github.com/OldGrumpyViking/hatch-conda) is a plugin for [`hatch`](https://hatch.pypa.io/latest/) that integrates Conda environments into `hatch`. A key difference is that `hatch-conda` keeps Conda and Pip dependencies separate, choosing to install packages with either Conda *or* Pip. This results in Conda being a hard requirement, for example, if `numba` is specified for Conda, it cannot be installed with Pip despite its availability on PyPI. In contrast, [UniDep](https://github.com/basnijholt/unidep/) does not require Conda. Without Conda, it can still install any dependency that is available on PyPI (e.g., `numba` is both Conda and Pip installable). However, without Conda, UniDep will not install dependencies exclusive to Conda. These Conda-specific dependencies can often be installed through alternative package managers like `apt`, `brew`, `yum`, or by building them from source. Another key difference is that `hatch-conda` is managing [Hatch environments](https://hatch.pypa.io/latest/environment/) whereas `unidep` can install Pip dependencies in the current Python environment (venv, Conda, Hatch, etc.), however, to optimally use UniDep, we recommend using Conda environments to additionally install non-Python dependencies. Similar to `hatch-conda`, `unidep` also integrates with Hatchling, but it works in a slightly different way. **A:** [`pdm-conda`](https://github.com/macro128/pdm-conda) is a plugin for [`pdm`](https://pdm-project.org/) designed to facilitate the use of Conda environments in conjunction with `pdm`. Like `hatch-conda`, `pdm-conda` opts to install packages either with Conda or Pip. It is closely integrated with `pdm`, primarily enabling the inclusion of Conda packages in `pdm`'s lock file (`pdm.lock`). However, `pdm-conda` lacks extensive cross-platform support. For instance, when adding a package like Numba using `pdm-conda`, it gets locked to the current platform (e.g., osx-arm64) without the flexibility to specify compatibility for other platforms such as linux64. In contrast, UniDep allows for cross-platform compatibility, enabling the user to specify dependencies for multiple platforms. UniDep currently does not support `pdm`, but it does support Hatchling and Setuptools. UniDep stands out from both `pdm-conda` and `hatch-conda` with its additional functionalities, particularly beneficial for monorepos and projects spanning multiple operating systems. For instance: 1. **Conda Lock Files**: Create `conda-lock.yml` files for all packages with consistent sub-lock files per package. 2. **CLI tools**: Provides tools like `unidep install-all -e` which will install multiple local projects (e.g., in monorepo) and all its dependencies first with Conda, then remaining ones with Pip, and finally the local dependencies in editable mode with Pip. 3. **Conda Environment Files**: Can create standard Conda `environment.yaml` files by combining the dependencies from many `requirements.yaml` or `pyproject.toml` files. 4. **Platform-Specific Dependencies**: Allows specifying dependencies for certain platforms (e.g., linux64, osx-arm64), enhancing cross-platform compatibility. ## :hammer_and_wrench: Troubleshooting ### `pip install` fails with `FileNotFoundError` When using a project that uses `local_dependencies: [../not/current/dir]` in the `requirements.yaml` file: ```yaml local_dependencies: # File in a different directory than the pyproject.toml file - ../common-requirements.yaml ``` You might get an error like this when using a `pip` version older than `22.0`: ```bash $ pip install /path/to/your/project/using/unidep ... File "/usr/lib/python3.8/pathlib.py", line 1222, in open return io.open(self, mode, buffering, encoding, errors, newline, File "/usr/lib/python3.8/pathlib.py", line 1078, in _opener return self._accessor.open(self, flags, mode) FileNotFoundError: [Errno 2] No such file or directory: '/tmp/common-requirements.yaml' ``` The solution is to upgrade `pip` to version `22.0` or newer: ```bash pip install --upgrade pip ``` ## :warning: Limitations - **Conda-Focused**: Best suited for Conda environments. However, note that having `conda` is not a requirement to install packages that use UniDep. - **Setuptools and Hatchling only**: Currently only works with setuptools and Hatchling, not flit, poetry, or other build systems. Open an issue if you'd like to see support for other build systems. - No [logic operators in platform selectors](https://github.com/basnijholt/unidep/issues/5) and [no Python selectors](https://github.com/basnijholt/unidep/issues/7). * * * Try `unidep` today for a streamlined approach to managing your Conda environment dependencies across multiple projects! 🎉👏 ================================================ FILE: bootstrap.sh ================================================ #!/usr/bin/env bash # Run this script with: # "${SHELL}" <(curl -LsSf raw.githubusercontent.com/basnijholt/unidep/main/bootstrap.sh) # # 🚀 UniDep - Unified Conda and Pip Dependency Management 🚀 # # This script downloads and installs: # - micromamba to ~/.local/bin/micromamba (for fast Conda environment management) # - uv to ~/.local/bin/uv (for fast pip installations) # - unidep (to manage unified Conda and Pip dependencies) # # UniDep streamlines Python project dependency management by combining both Conda # and Pip dependencies into a single system. For more information, visit: # https://github.com/basnijholt/unidep # # If you prefer to run the commands manually, you can execute each section one by one. # Otherwise, piping this script directly to your default shell ensures everything is installed in one go. echo "Downloading and installing micromamba to ~/.local/bin/micromamba and uv to ~/.local/bin/uv" # Install micromamba (https://mamba.readthedocs.io/en/latest/installation/micromamba-installation.html) "${SHELL}" <(curl -LsSf micro.mamba.pm/install.sh) < /dev/null # Install uv (https://docs.astral.sh/uv/getting-started/installation/) curl -LsSf https://astral.sh/uv/install.sh | sh # Install unidep using uv ~/.local/bin/uv tool install --quiet -U "unidep[all]" echo "Done installing micromamba, uv, and unidep" ================================================ FILE: docs/Makefile ================================================ # Minimal makefile for Sphinx documentation # # You can set these variables from the command line, and also # from the environment for the first two. SPHINXOPTS ?= SPHINXBUILD ?= sphinx-build SOURCEDIR = source BUILDDIR = build # Put it first so that "make" without argument is like "make help". help: @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) .PHONY: help Makefile # Catch-all target: route all unknown targets to Sphinx using the new # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). %: Makefile @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) clean: @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) rm -rf $(BUILDDIR)/* rm -f $(SOURCEDIR)/*.md ================================================ FILE: docs/source/.gitignore ================================================ *.md ================================================ FILE: docs/source/conf.py ================================================ """Spinx configuration file for the unidep documentation. The documentation is generated from the README.md file in the root of the repository. The README.md file is copied to the Sphinx source directory and processed to generate the documentation. The following transformations are applied to the README.md file: 1. Replace named emojis with unicode emojis. 2. Replace markdown alerts with admonitions. 3. Replace relative links to `example/` files with absolute links to GitHub. 4. Fix anchors with named emojis. 5. Split the README.md file into individual sections based on second-level headers. 6. Extract the table of contents links from the processed README. 7. Replace links in each section to point to the correct section. 8. Decrease the header levels by one in each section. 9. Rename the first section to `introduction.md` and update its header. 10. Write an index file for the documentation. This code is tightly coupled with the structure of the README.md file and the table of contents generated by the doctoc tool. """ from __future__ import annotations import os import re import shutil import sys import textwrap from pathlib import Path package_path = Path("../..").resolve() sys.path.insert(0, str(package_path)) PYTHON_PATH = os.environ.get("PYTHONPATH", "") os.environ["PYTHONPATH"] = f"{package_path}:{PYTHON_PATH}" docs_path = Path("..").resolve() sys.path.insert(1, str(docs_path)) import unidep # noqa: E402 project = "unidep" copyright = "2023, Bas Nijholt" # noqa: A001 author = "Bas Nijholt" version = unidep.__version__ release = unidep.__version__ extensions = [ "sphinx.ext.autodoc", "sphinx.ext.autosummary", "sphinx.ext.autosectionlabel", "sphinx.ext.intersphinx", "sphinx.ext.viewcode", "sphinx.ext.napoleon", "myst_parser", "sphinx_autodoc_typehints", ] autosectionlabel_maxdepth = 5 myst_heading_anchors = 0 templates_path = ["_templates"] source_suffix = [".rst", ".md"] master_doc = "index" language = "en" pygments_style = "sphinx" html_theme = "furo" html_static_path = ["_static"] htmlhelp_basename = "unidepdoc" default_role = "autolink" intersphinx_mapping = { "python": ("https://docs.python.org/3", None), } html_logo = "https://github.com/basnijholt/nijho.lt/raw/2cf0045f9609a176cb53422c591fde946459669d/content/project/unidep/unidep-logo.webp" def replace_named_emojis(input_file: Path, output_file: Path) -> None: """Replace named emojis in a file with unicode emojis.""" import emoji with input_file.open("r") as infile: content = infile.read() content_with_emojis = emoji.emojize(content, language="alias") with output_file.open("w") as outfile: outfile.write(content_with_emojis) def _change_alerts_to_admonitions(input_text: str) -> str: # Splitting the text into lines lines = input_text.split("\n") # Placeholder for the edited text edited_text = [] # Mapping of markdown markers to their new format mapping = { "IMPORTANT": "important", "NOTE": "note", "TIP": "tip", "WARNING": "caution", } # Variable to keep track of the current block type current_block_type = None for line in lines: # Check if the line starts with any of the markers if any(line.strip().startswith(f"> [!{marker}]") for marker in mapping): # Find the marker and set the current block type current_block_type = next( marker for marker in mapping if f"> [!{marker}]" in line ) # Start of a new block edited_text.append("```{" + mapping[current_block_type] + "}") elif current_block_type and line.strip() == ">": # Empty line within the block, skip it continue elif current_block_type and not line.strip().startswith(">"): # End of the current block edited_text.append("```") edited_text.append(line) # Add the current line as it is current_block_type = None # Reset the block type elif current_block_type: # Inside the block, so remove '>' and add the line edited_text.append(line.lstrip("> ").rstrip()) else: # Outside any block, add the line as it is edited_text.append(line) # Join the edited lines back into a single string return "\n".join(edited_text) def change_alerts_to_admonitions(input_file: Path, output_file: Path) -> None: """Change markdown alerts to admonitions. For example, changes > [!NOTE] > This is a note. to ```{note} This is a note. ``` """ with input_file.open("r") as infile: content = infile.read() new_content = _change_alerts_to_admonitions(content) with output_file.open("w") as outfile: outfile.write(new_content) def replace_example_links(input_file: Path, output_file: Path) -> None: """Replace relative links to `example/` files with absolute links to GitHub.""" with input_file.open("r") as infile: content = infile.read() new_content = content.replace( "(example/", "(https://github.com/basnijholt/unidep/tree/main/example/", ) with output_file.open("w") as outfile: outfile.write(new_content) def fix_anchors_with_named_emojis(input_file: Path, output_file: Path) -> None: """Fix anchors with named emojis. WARNING: this currently hardcodes the emojis to remove. """ to_remove = [ "package", "memo", "jigsaw", "desktop_computer", "hammer_and_wrench", "warning", ] with input_file.open("r") as infile: content = infile.read() new_content = content for emoji_name in to_remove: new_content = new_content.replace(f"#{emoji_name}-", "#") with output_file.open("w") as outfile: outfile.write(new_content) def normalize_slug(slug: str) -> str: """Normalize a slug.""" return "#" + slug[1:].lstrip("-").rstrip("-") def split_markdown_by_headers( readme_path: Path, out_folder: Path, links: dict[str, str], level: int = 2, to_skip: tuple[str, ...] = ("Table of Contents",), ) -> list[str]: """Split a markdown file into individual files based on headers.""" with readme_path.open(encoding="utf-8") as file: content = file.read() # Regex to find second-level headers n = "#" * level headers = re.finditer(rf"\n({n} .+?)(?=\n{n} |\Z)", content, re.DOTALL) # Split content based on headers split_contents: list[str] = [] header_contents: list[str] = [] start = 0 previous_header = "" for header in headers: header_title = header.group(1).strip("# ").strip() header_contents.append(header_title.split("\n", 1)[0]) end = header.start() if not any(s in previous_header for s in to_skip): split_contents.append(content[start:end].strip()) start = end previous_header = header_title # Add the last section split_contents.append(content[start:].strip()) # Create individual files for each section toctree_entries = [] for i, (section, header_content) in enumerate( zip(split_contents, header_contents), ): name = ( normalize_slug(links[header_content]).lstrip("#") if header_content in links else f"section_{i}" ) fname = out_folder / f"{name}.md" toctree_entries.append(name) with fname.open("w", encoding="utf-8") as file: file.write(section) return toctree_entries def replace_header(file_path: Path, new_header: str) -> None: """Replace the first-level header in a markdown file.""" with file_path.open("r", encoding="utf-8") as file: content = file.read() # Find the first-level header (indicated by '# ') # We use a regular expression to match the first occurrence of '# ' # and any following characters until a newline content = re.sub( r"^# .+?\n", f"# {new_header}\n", content, count=1, flags=re.MULTILINE, ) with file_path.open("w", encoding="utf-8") as file: file.write(content) def extract_toc_links(md_file_path: Path) -> dict[str, str]: """Extracts the table of contents with title to link mapping from the given README content. Parameters ---------- md_file_path Markdown file path. Returns ------- A dictionary where keys are section titles and values are the corresponding links. """ with md_file_path.open("r") as infile: readme_content = infile.read() toc_start = "" toc_end = "" # Extract the TOC section toc_section = re.search(f"{toc_start}(.*?){toc_end}", readme_content, re.DOTALL) if not toc_section: msg = "Table of Contents section not found." raise RuntimeError(msg) toc_content = toc_section.group(1) # Regular expression to match the markdown link syntax link_regex = re.compile(r"- \[([^]]+)\]\(([^)]+)\)") # Extracting links return { match.group(1).strip(): match.group(2) for match in link_regex.finditer(toc_content) } def extract_headers_from_markdown(md_file_path: Path) -> list[tuple[int, str]]: """Extracts all headers from a markdown file. Parameters ---------- md_file_path Path to the markdown file. Returns ------- A list of tuples containing the level of the header and the header text. """ with md_file_path.open("r") as infile: content = infile.read() # Regex to match markdown headers (e.g., ## Header) header_regex = re.compile(r"^(#+)\s+(.+)$", re.MULTILINE) # Extract headers return [ (len(match.group(1)), match.group(2).strip()) for match in header_regex.finditer(content) ] def replace_links_in_markdown( md_file_path: Path, headers_mapping: dict[str, list[tuple[int, str]]], links: dict[str, str], ) -> None: """Replaces markdown links with updated links that point to the correct file and header anchor. Parameters ---------- md_file_path Path to the markdown file to process. headers_mapping A dictionary where keys are markdown file names and values are lists of headers. links A dictionary of original header texts mapped to their slug (anchor) in the original README. """ with md_file_path.open("r") as infile: content = infile.read() # Replace links based on headers_mapping and links dictionary for file_name, headers in headers_mapping.items(): for _header_level, header_text in headers: # Find the original slug for this header text from the links dictionary original_slug = links.get(header_text, "") if original_slug: # Remove the '#' from the slug and update the link in the content new_slug = normalize_slug(original_slug) original_slug = original_slug.lstrip("#") content = content.replace( f"(#{original_slug})", f"({file_name}{new_slug})", ) # Write updated content back to file with md_file_path.open("w") as outfile: outfile.write(content) def decrease_header_levels(md_file_path: Path) -> None: """Decreases the header levels by one in a Markdown file, without going below level 1. Parameters ---------- md_file_path Path to the Markdown file. """ with md_file_path.open("r", encoding="utf-8") as file: content = file.read() # Function to decrease the header level def lower_header_level(match: re.Match) -> str: header_level = len(match.group(1)) new_header_level = "#" * max(1, header_level - 1) # Ensure at least one '#' return f"{new_header_level} {match.group(2)}" # Regular expression for Markdown headers header_regex = re.compile(r"^(#+)\s+(.+)$", re.MULTILINE) # Replace headers with decreased levels new_content = header_regex.sub(lower_header_level, content) # Write the updated content back to the file with md_file_path.open("w", encoding="utf-8") as file: file.write(new_content) def write_index_file(docs_path: Path, toctree_entries: list[str]) -> None: """Write an index file for the documentation.""" index_path = docs_path / "source" / "index.md" # Skip section_0.md as it is renamed to introduction.md pages = "\n".join(f"{entry}" for entry in toctree_entries[1:]) # Constructing the content using textwrap.dedent for better readability content = textwrap.dedent( """ ```{{include}} introduction.md ``` ```{{toctree}} :hidden: true :maxdepth: 2 :glob: introduction {pages} reference/index ``` """, ).format(pages=pages) # Write the content to the file with index_path.open("w", encoding="utf-8") as index_file: index_file.write(content) def process_readme_for_sphinx_docs(readme_path: Path, docs_path: Path) -> None: """Process the README.md file for Sphinx documentation generation. Parameters ---------- readme_path Path to the original README.md file. docs_path Path to the Sphinx documentation source directory. """ # Step 1: Copy README.md to the Sphinx source directory and apply transformations output_file = docs_path / "source" / "README.md" replace_named_emojis(readme_path, output_file) change_alerts_to_admonitions(output_file, output_file) replace_example_links(output_file, output_file) fix_anchors_with_named_emojis(output_file, output_file) # Step 2: Extract the table of contents links from the processed README links = extract_toc_links(output_file) # Step 3: Split the README into individual sections for Sphinx src_folder = docs_path / "source" for md_file in src_folder.glob("sections_*.md"): md_file.unlink() toctree_entries = split_markdown_by_headers(output_file, src_folder, links) output_file.unlink() # Remove the original README file from Sphinx source write_index_file(docs_path, toctree_entries) # Step 4: Extract headers from each section for link replacement headers_in_files = {} for md_file in src_folder.glob("*.md"): headers = extract_headers_from_markdown(md_file) decrease_header_levels(md_file) headers_in_files[md_file.name] = headers # Rename the first section to 'introduction.md' and update its header shutil.move(src_folder / "section_0.md", src_folder / "introduction.md") # type: ignore[arg-type] replace_header(src_folder / "introduction.md", new_header="🌟 Introduction") # Step 5: Replace links in each markdown file to point to the correct section for md_file in (*src_folder.glob("*.md"), src_folder / "introduction.md"): replace_links_in_markdown(md_file, headers_in_files, links) readme_path = package_path / "README.md" process_readme_for_sphinx_docs(readme_path, docs_path) ================================================ FILE: example/README.md ================================================ # Examples > [!TIP] > Try out `unidep` in this folder by running: > - `unidep install ./setup_py_project ./hatch_project` to install the `setup_py_project` and `hatch_project` packages and its dependencies with `conda`, then the remaining dependencies with `pip`, and finally the local packages with `pip` > - `unidep install-all -e` to install all packages (`setup_py_project`, `hatch_project`, `setuptools_project`, etc.) in editable mode > - `unidep conda-lock` to generate a global `conda-lock.yml` file and consistent per package `conda-lock.yml` files > - `unidep merge` to merge all `requirements.yaml` files into a single `environment.yaml` file > - `unidep pip-compile` to generate a locked `requirements.txt` file Explore these example projects to understand how `unidep` integrates with different build tools and configurations: | Project | Build Tool | `pyproject.toml` | `requirements.yaml` | `setup.py` | Description | | -------------------------------------------------- | ------------ | ---------------- | ------------------- | ---------- | ---------------------------------------------------------------------------------- | | [`setup_py_project`](setup_py_project) | `setuptools` | ✅ | ✅ | ✅ | Traditional `setuptools` project with `requirements.yaml`. | | [`setuptools_project`](setuptools_project) | `setuptools` | ✅ | ✅ | ❌ | Modern `setuptools` usage with both `pyproject.toml` and `requirements.yaml`. | | [`pyproject_toml_project`](pyproject_toml_project) | `setuptools` | ✅ | ❌ | ❌ | Pure `pyproject.toml` setup, showcasing comprehensive dependency management. | | [`hatch_project`](hatch_project) | `hatch` | ✅ | ✅ | ❌ | Demonstrates `unidep` integration in a Hatchling project with `requirements.yaml`. | | [`hatch2_project`](hatch2_project) | `hatch` | ✅ | ❌ | ❌ | Pure `pyproject.toml` Hatchling project. | ## Exploring `unidep` Through Practical Examples - [Combine one or multiple `requirements.yaml`/`pyproject.toml` files into a single `environment.yaml` file](#combine-one-or-multiple-requirementsyamlpyprojecttoml-files-into-a-single-environmentyaml-file) - [Using `pip install`](#using-pip-install) - [Using `unidep install`](#using-unidep-install) - [Using `unidep install-all` for installation across multiple projects](#using-unidep-install-all-for-installation-across-multiple-projects) ### Combine one or multiple `requirements.yaml`/`pyproject.toml` files into a single `environment.yaml` file Combine `requirements.yaml` files in subdirectories and into an `environment.yaml` file that can be installed with `conda`. Here we can just run `unidep merge` with no arguments, since the defaults are the same as what we want. This would be the same as running `unidep merge --name myenv --verbose`: ```bash 🔍 Scanning in `.` at depth 0 🔍 Scanning in `hatch2_project` at depth 1 🔍 Found `"pyproject.toml"` with dependencies at `hatch2_project/pyproject.toml` 🔍 Scanning in `hatch_project` at depth 1 🔍 Found `"requirements.yaml"` at `hatch_project/requirements.yaml` 🔍 Scanning in `pyproject_toml_project` at depth 1 🔍 Found `"pyproject.toml"` with dependencies at `pyproject_toml_project/pyproject.toml` 🔍 Scanning in `setup_py_project` at depth 1 🔍 Found `"requirements.yaml"` at `setup_py_project/requirements.yaml` 🔍 Scanning in `setuptools_project` at depth 1 🔍 Found `"requirements.yaml"` at `setuptools_project/requirements.yaml` 📄 Parsing `hatch2_project/pyproject.toml` 📄 Parsing `hatch_project/requirements.yaml` 📄 Parsing `pyproject_toml_project/pyproject.toml` 📄 Parsing `../hatch_project[test]` from `local_dependencies` 📄 Parsing `pyproject_toml_project/../hatch_project/requirements.yaml[test]` 📄 Moving `test` optional dependencies to main dependencies for `pyproject_toml_project/../hatch_project/requirements.yaml[test]` 📄 Parsing `setup_py_project/requirements.yaml` 📄 Parsing `../setuptools_project` from `local_dependencies` 📄 Parsing `setup_py_project/../setuptools_project/requirements.yaml` 📄 Parsing `setuptools_project/requirements.yaml` 📝 Generating environment file at `environment.yaml` 📝 Environment file generated successfully. ✅ Generated environment file at `environment.yaml` from `hatch2_project/pyproject.toml`, `hatch_project/requirements.yaml`, `pyproject_toml_project/pyproject.toml`, `setup_py_project/requirements.yaml`, `setuptools_project/requirements.yaml` ``` See the resulting [`environment.yaml`](environment.yaml) file which is installable with [`mamba`](https://mamba.readthedocs.io/en/latest/). This file is using `sel(linux|osx|win)` to specify platform specific dependencies. Alternatively, use `unidep merge --selector comment` to generate a file that uses comments to specify platform specific dependencies, which can be read by [`conda-lock`](https://github.com/conda/conda-lock). ### Using `pip install` This method allows you to install packages defined in a `requirements.yaml` file using `pip`. It focuses on installing only those dependencies that are pip-installable, followed by the local project package. **How to Use**: - Run `pip install ./setup_py_project`. - This command will process the `requirements.yaml` in the specified directory (`./setup_py_project/`), installing all pip-installable dependencies, including the local project itself. ### Using `unidep install` Using `unidep` for installation offers a more comprehensive approach. It handles both Conda and Pip dependencies specified in the `requirements.yaml` file, ensuring all necessary packages are installed, including those not available through pip. **How to Use**: - To perform a standard installation, run `unidep install ./setup_py_project`. - For an editable installation (useful during development), use `unidep install -e ./setup_py_project`. - The `unidep install` command first installs any Conda-specific dependencies from the `requirements.yaml` file, then proceeds to install pip-specific dependencies. Finally, it installs the local project package. ```bash $ unidep install --dry-run -e ./setup_py_project 📦 Installing conda dependencies with `conda install --yes --override-channels --channel conda-forge adaptive">=0.15.0, <2.0.0" adaptive-scheduler hpc05 pexpect pfapack numpy">=1.21" packaging pandas">=1,<3" pytest pytest-cov` 📦 Installing pip dependencies with `/opt/hostedtoolcache/Python/3.14.2/x64/bin/python -m pip install yaml2bib aiokef markdown-code-runner numthreads pyyaml rsync-time-machine slurm-usage unidep` 📝 Found local dependencies: {'setup_py_project': ['hatch_project', 'setuptools_project']} 📦 Installing project with `/opt/hostedtoolcache/Python/3.14.2/x64/bin/python -m pip install --no-deps -e /home/runner/work/unidep/unidep/example/hatch_project -e /home/runner/work/unidep/unidep/example/setuptools_project -e ./setup_py_project` ``` ### Using `unidep install-all` for installation across multiple projects The `unidep install-all` command provides a convenient way to install all dependencies across multiple projects or packages within a given directory. This command is especially useful in monorepos or when managing several related projects with their own `requirements.yaml` files. **How `unidep install-all` Works**: - This command scans a specified directory (or the current directory if none is specified) for `requirements.yaml` files. - It then installs dependencies for each found project, handling both Conda and Pip dependencies. - The local packages are also installed, making this command a one-stop solution for setting up your entire workspace. **Usage Examples**: - Run `unidep install-all` to install all dependencies in the current directory. - Use `unidep install-all -e` for an editable install, which is useful during development. This flag ensures that local packages are installed in a way that allows changes to be reflected immediately without needing reinstallation. **Example Command**: ```bash # To install all projects in the current directory in editable mode unidep install-all -e ``` **Output Example**: ```bash $ unidep install-all -e --dry-run 📦 Installing conda dependencies with `conda install --yes --override-channels --channel conda-forge adaptive">=0.15.0, <2.0.0" adaptive-scheduler hpc05 pexpect pfapack numpy">=1.21" packaging pandas">=1,<3" pytest pytest-cov` 📦 Installing pip dependencies with `/opt/hostedtoolcache/Python/3.14.2/x64/bin/python -m pip install yaml2bib aiokef markdown-code-runner numthreads pyyaml rsync-time-machine slurm-usage unidep` 📝 Found local dependencies: {'pyproject_toml_project': ['hatch_project'], 'setup_py_project': ['hatch_project', 'setuptools_project'], 'setuptools_project': ['hatch_project']} 📦 Installing project with `/opt/hostedtoolcache/Python/3.14.2/x64/bin/python -m pip install --no-deps -e ./hatch2_project -e ./hatch_project -e ./pyproject_toml_project -e ./setup_py_project -e ./setuptools_project` ``` This command streamlines the process of getting a development environment up and running, particularly in complex setups with multiple interdependent projects. ================================================ FILE: example/environment.yaml ================================================ # This file is created and managed by `unidep` 3.2.0. # For details see https://github.com/basnijholt/unidep # File generated with: `unidep merge --name myenv --verbose` name: myenv channels: - conda-forge dependencies: - sel(linux): adaptive >=0.15.0, <2.0.0 - sel(linux): adaptive-scheduler - sel(linux): hpc05 - sel(linux): pexpect - sel(osx): pexpect - sel(linux): pfapack - numpy >=1.21 - packaging - pandas >=1,<3 - pytest - pytest-cov - pip: - yaml2bib; sys_platform == 'linux' and platform_machine == 'x86_64' - aiokef - markdown-code-runner - numthreads - pyyaml - rsync-time-machine - slurm-usage - unidep - fileup; sys_platform == 'darwin' platforms: - linux-64 - osx-64 - osx-arm64 ================================================ FILE: example/hatch2_project/README.md ================================================ # Hatchling Integration > [!TIP] > - **Standard Installation**: In this example folder, use `pip install .` to install all Python dependencies that are pip-installable, along with the local package itself. > - **Comprehensive Installation with `unidep`**: To install all dependencies, including those that are not Python-specific, use `unidep install .`. This command performs the following actions in sequence: > 1. `conda install [dependencies from pyproject.toml]` – Installs all Conda installable dependencies. > 2. `pip install [dependencies from pyproject.toml]` – Installs remaining pip-only dependencies. > 3. `pip install .` – Installs the local package. For projects managed with [Hatch](https://hatch.pypa.io/), `unidep` can be configured fully in `pyproject.toml` including all its dependencies. **Example Configuration for Hatch**: ```toml [build-system] requires = ["hatchling", "unidep[toml]"] # add "unidep[toml]" here build-backend = "hatchling.build" [project] dynamic = ["dependencies"] # add "dependencies" here # Additional project configurations [tool.hatch] # Additional Hatch configurations [tool.hatch.metadata] allow-direct-references = true # allow VCS URLs, local paths, etc. [tool.hatch.metadata.hooks.unidep] # add this to enable the hook # Specify pip and conda dependencies here [tool.unidep] channels = ["conda-forge"] dependencies = [ { conda = "adaptive-scheduler:linux64" }, { pip = "unidep" }, "numpy >=1.21", "hpc05:linux64", "pandas >=1,<3", "pexpect:unix", "wexpect:win64", ] ``` > [!NOTE] > See the [`pyproject.toml`](pyproject.toml) for a working example. ================================================ FILE: example/hatch2_project/hatch2_project.py ================================================ x = 1 ================================================ FILE: example/hatch2_project/pyproject.toml ================================================ [build-system] requires = ["hatchling", "unidep[toml]"] build-backend = "hatchling.build" [project] name = "hatch2_project" description = "Example hatch2_project for `unidep`." authors = [{ name = "Bas Nijholt", email = "bas@nijho.lt" }] # `dependencies` is not needed because it is automatically # populated by `unidep` with the dependencies defined in the [tool.unidep] section! # dependencies = [] dynamic = ["dependencies"] version = "0.1.0" [tool.hatch] # Allow direct references (e.g., VCS URLs, local paths) in dependencies [tool.hatch.metadata] allow-direct-references = true [tool.hatch.metadata.hooks.unidep] [tool.unidep] channels = ["conda-forge"] dependencies = [ { conda = "adaptive-scheduler:linux64" }, { pip = "unidep" }, "numpy >=1.21", "hpc05:linux64", "pandas >=1,<3", "pexpect:unix", "wexpect:win64", ] ================================================ FILE: example/hatch_project/README.md ================================================ # Hatchling Integration > [!TIP] > - **Standard Installation**: In this example folder, use `pip install .` to install all Python dependencies that are pip-installable, along with the local package itself. > - **Comprehensive Installation with `unidep`**: To install all dependencies, including those that are not Python-specific, use `unidep install .`. This command performs the following actions in sequence: > 1. `conda install [dependencies from requirements.yaml]` – Installs all Conda installable dependencies. > 2. `pip install [dependencies from requirements.yaml]` – Installs remaining pip-only dependencies. > 3. `pip install .` – Installs the local package. For projects managed with [Hatch](https://hatch.pypa.io/), `unidep` can be configured in `pyproject.toml` to automatically process `requirements.yaml`. **Example Configuration for Hatch**: ```toml [build-system] requires = ["hatchling", "unidep"] # add "unidep" here build-backend = "hatchling.build" [project] dynamic = ["dependencies"] # add "dependencies" here # Additional project configurations [tool.hatch] # Additional Hatch configurations [tool.hatch.metadata] allow-direct-references = true # allow VCS URLs, local paths, etc. [tool.hatch.metadata.hooks.unidep] # add this to enable the hook ``` > [!NOTE] > See the [`pyproject.toml`](pyproject.toml) a working example. ================================================ FILE: example/hatch_project/hatch_project.py ================================================ x = 1 ================================================ FILE: example/hatch_project/pyproject.toml ================================================ [build-system] requires = ["hatchling", "unidep"] build-backend = "hatchling.build" [project] name = "hatch_project" description = "Example hatch_project for `unidep`." authors = [{ name = "Bas Nijholt", email = "bas@nijho.lt" }] # `dependencies` is not needed because it is automatically # populated by `unidep` with the dependencies from the `requirements.yaml` # dependencies = [] dynamic = ["dependencies", "optional-dependencies"] version = "0.1.0" [tool.hatch] # Allow direct references (e.g., VCS URLs, local paths) in dependencies [tool.hatch.metadata] allow-direct-references = true [tool.hatch.metadata.hooks.unidep] ================================================ FILE: example/hatch_project/requirements.yaml ================================================ name: hatch_project channels: - conda-forge dependencies: - conda: adaptive-scheduler # [linux64] - pip: unidep - numpy >=1.21 - hpc05 # [linux64] - pandas >=1,<3 - pexpect # [unix] - wexpect # [win] optional_dependencies: test: - pytest - pytest-cov ================================================ FILE: example/pyproject_toml_project/README.md ================================================ # Full `pyproject.toml` integration example > [!TIP] > - **Standard Installation**: In this example folder, use `pip install .` to install all Python dependencies that are pip-installable, along with the local package itself. > - **Comprehensive Installation with `unidep`**: To install all dependencies, including those that are not Python-specific, use `unidep install .`. This command performs the following actions in sequence: > 1. `conda install [dependencies from pyproject.toml]` – Installs all Conda installable dependencies. > 2. `pip install [dependencies from pyproject.toml]` – Installs remaining pip-only dependencies. > 3. `pip install .` – Installs the local package. For projects using `setuptools` with only a `pyproject.toml` file, configure `unidep` in `pyproject.toml` and specify all dependencies there too. **Example Configuration for projects using `pyproject.toml`**: Add this to `pyproject.toml`: ```toml [build-system] build-backend = "setuptools.build_meta" requires = ["setuptools", "unidep[toml]"] # add "unidep[toml]" here [project] dynamic = ["dependencies"] # add "dependencies" here [tool.unidep] channels = ["conda-forge"] dependencies = [ "adaptive", "pfapack:linux64", "packaging", { pip = "markdown-code-runner" }, { pip = "numthreads" }, ] ``` Then, of course, add a `requirements.yaml` and you are good to go! 🎉 > [!NOTE] > See the [`pyproject.toml`](pyproject.toml) for a working example. ================================================ FILE: example/pyproject_toml_project/pyproject.toml ================================================ [build-system] requires = ["setuptools", "unidep[toml]"] build-backend = "setuptools.build_meta" [project] name = "pyproject_toml_project" description = "Example pyproject_toml_project for `unidep`." authors = [{ name = "Bas Nijholt", email = "bas@nijho.lt" }] # `dependencies` is not needed because it is automatically # populated by `unidep` with the dependencies defined in the [tool.unidep] section! # dependencies = [] version = "0.1.0" dynamic = ["dependencies", "optional-dependencies"] [tool.setuptools] py-modules = ["pyproject_toml_project"] [tool.unidep] channels = ["conda-forge"] dependencies = [ "adaptive:linux64", "pfapack:linux64", "packaging", { pip = "markdown-code-runner" }, { pip = "numthreads" }, ] local_dependencies = [ "../hatch_project[test]", # Local dependency with optional dependencies ] [tool.unidep.optional_dependencies] dev = ["mypy", "ruff"] test = ["pytest"] ================================================ FILE: example/pyproject_toml_project/pyproject_toml_project.py ================================================ ================================================ FILE: example/setup_py_project/README.md ================================================ # `setup.py` integration example > [!TIP] > - **Standard Installation**: In this example folder, use `pip install .` to install all Python dependencies that are pip-installable, along with the local package itself. > - **Comprehensive Installation with `unidep`**: To install all dependencies, including those that are not Python-specific, use `unidep install .`. This command performs the following actions in sequence: > 1. `conda install [dependencies from requirements.yaml]` – Installs all Conda installable dependencies. > 2. `pip install [dependencies from requirements.yaml]` – Installs remaining pip-only dependencies. > 3. `pip install .` – Installs the local package. For projects using `setuptools` with a `setup.py` file, configure `unidep` in `pyproject.toml` alongside a `requirements.yaml` file. **Example Configuration for projects using `setup.py`**: Add this to `pyproject.toml`: ```toml [build-system] build-backend = "setuptools.build_meta" requires = ["setuptools", "unidep"] ``` And just do not use `install_requires` in `setup.py`. > [!NOTE] > See the [`pyproject.toml`](pyproject.toml) and [`setup.py`](setup.py) for a working example. ================================================ FILE: example/setup_py_project/pyproject.toml ================================================ [build-system] requires = ["setuptools", "unidep"] build-backend = "setuptools.build_meta" ================================================ FILE: example/setup_py_project/requirements.yaml ================================================ name: setup_py_project channels: - conda-forge dependencies: - pandas - adaptive >=0.15.0, <2.0.0 # [linux64] - pip: yaml2bib # [linux64] - pip: rsync-time-machine - pip: slurm-usage - pip: fileup # [macos] - pip: pyyaml - pip: aiokef local_dependencies: - ../setuptools_project # depends on setuptools_project platforms: - linux-64 - osx-64 - osx-arm64 ================================================ FILE: example/setup_py_project/setup.py ================================================ from setuptools import setup setup( name="setup_py_project", version="0.1.0", description="A short description of your package", py_modules=["setup_py_project"], # This is not needed because `install_requires` is automatically # populated by `unidep` with the dependencies from the `requirements.yaml` ) ================================================ FILE: example/setup_py_project/setup_py_project.py ================================================ ================================================ FILE: example/setuptools_project/README.md ================================================ # Setuptools `pyproject.toml` integration example > [!TIP] > - **Standard Installation**: In this example folder, use `pip install .` to install all Python dependencies that are pip-installable, along with the local package itself. > - **Comprehensive Installation with `unidep`**: To install all dependencies, including those that are not Python-specific, use `unidep install .`. This command performs the following actions in sequence: > 1. `conda install [dependencies from requirements.yaml]` – Installs all Conda installable dependencies. > 2. `pip install [dependencies from requirements.yaml]` – Installs remaining pip-only dependencies. > 3. `pip install .` – Installs the local package. For projects using `setuptools` with only a `pyproject.toml` file, configure `unidep` in `pyproject.toml` alongside a `requirements.yaml` file. **Example Configuration for projects using `pyproject.toml`**: Add this to `pyproject.toml`: ```toml [build-system] build-backend = "setuptools.build_meta" requires = ["setuptools", "unidep"] # add "unidep" here [project] dynamic = ["dependencies"] # add "dependencies" here ``` Then, of course, add a `requirements.yaml` and you are good to go! 🎉 > [!NOTE] > See the [`pyproject.toml`](pyproject.toml) for a working example. ================================================ FILE: example/setuptools_project/pyproject.toml ================================================ [build-system] requires = ["setuptools", "unidep"] build-backend = "setuptools.build_meta" [project] name = "setuptools_project" description = "Example setuptools_project for `unidep`." authors = [{ name = "Bas Nijholt", email = "bas@nijho.lt" }] # `dependencies` is not needed because it is automatically # populated by `unidep` with the dependencies from the `requirements.yaml` # dependencies = [] version = "0.1.0" dynamic = ["dependencies", "optional-dependencies"] [tool.setuptools] py-modules = ["setuptools_project"] ================================================ FILE: example/setuptools_project/requirements.yaml ================================================ name: setuptools_project channels: - conda-forge dependencies: - adaptive # [linux64] - pfapack # [linux64] - packaging - pip: markdown-code-runner - pip: numthreads local_dependencies: - ../hatch_project[test] # depends on hatch_project optional_dependencies: dev: - mypy - ruff test: - pytest-xdist setup_py: # Optional local dependency - ../setup_py_project ================================================ FILE: example/setuptools_project/setuptools_project.py ================================================ ================================================ FILE: pyproject.toml ================================================ [build-system] requires = ["setuptools>=42", "wheel"] build-backend = "setuptools.build_meta" [project] name = "unidep" description = "Unified Conda and Pip requirements management." dynamic = ["version"] authors = [{ name = "Bas Nijholt", email = "bas@nijho.lt" }] dependencies = [ "packaging", "ruamel.yaml", "typing_extensions; python_version < '3.8'", "tomli; python_version < '3.11'", ] requires-python = ">=3.7" [project.readme] file = "README.md" content-type = "text/markdown" [project.urls] Homepage = "https://github.com/basnijholt/unidep" [project.optional-dependencies] toml = ["tomli; python_version < '3.11'"] conda-lock = ["conda-lock", "conda-package-handling"] pip-compile = ["pip-tools"] pytest = ["pytest", "GitPython"] # The pytest plugin rich = ["rich-argparse"] pixi = ["pixi-to-conda-lock; python_version >= '3.9'", "tomli_w"] # Everything except 'test' and 'docs' all = [ "unidep[toml,conda-lock,pip-compile,pytest,rich,pixi]", ] docs = [ "myst-parser", "sphinx", "furo", "emoji", "sphinx-autodoc-typehints", ] test = [ "unidep[all]", "tomli_w", "pytest", "pre-commit", "coverage", "pytest-cov", "pytest-mock", "conda-package-handling", "rich", ] [project.scripts] unidep = "unidep:_cli.main" [project.entry-points."setuptools.finalize_distribution_options"] unidep = "unidep._setuptools_integration:_setuptools_finalizer" [project.entry-points.hatch] unidep = "unidep._hatch_integration" [project.entry-points.pytest11] affected = "unidep._pytest_plugin" [tool.setuptools.packages.find] include = ["unidep.*", "unidep"] [tool.setuptools.dynamic] version = { attr = "unidep._version.__version__" } [tool.setuptools.package-data] "unidep" = ["py.typed"] [tool.pytest.ini_options] addopts = """ --cov=unidep --cov-report term --cov-report html --cov-report xml --cov-fail-under=100 -W error -vvv """ [tool.coverage.run] omit = ["unidep/_pytest_plugin.py", "unidep/_hatch_integration.py"] patch = ["subprocess"] [tool.coverage.report] exclude_lines = [ "pragma: no cover", "raise NotImplementedError", "if TYPE_CHECKING:", "if __name__ == .__main__.:", ] [tool.black] line_length = 88 [tool.ruff] line-length = 88 target-version = "py37" [tool.ruff.lint] select = ["ALL"] ignore = [ "T20", # flake8-print "ANN101", # Missing type annotation for {name} in method "S101", # Use of assert detected "S603", # S603 `subprocess` call: check for execution of untrusted input "PD901", # df is a bad variable name. Be kinder to your future self. "ANN401", # Dynamically typed expressions (typing.Any) are disallowed in {name} "D402", # First line should not be the function's signature "PLW0603", # Using the global statement to update `X` is discouraged "D401", # First line of docstring should be in imperative mood "SLF001", # Private member accessed "PLR0913", # Too many arguments in function definition "TD002", # Missing author in TODO ] [tool.ruff.lint.per-file-ignores] "tests/*" = ["SLF001", "D103", "E501", "PLR2004"] "tests/test_examples.py" = ["E501"] ".github/*" = ["INP001"] "example/*" = ["INP001", "D100"] "docs/*" = ["INP001", "E501"] [tool.ruff.lint.mccabe] max-complexity = 18 [tool.mypy] python_version = "3.8" # 3.7 is no longer supported by mypy # Use bump-my-version, e.g., call `bump-my-version bump minor` [tool.bumpversion] current_version = "3.2.0" commit = true commit_args = "--no-verify" tag = true tag_name = "v{new_version}" [[tool.bumpversion.files]] filename = "unidep/_version.py" replace = '__version__ = "{new_version}"' search = '__version__ = "{current_version}"' ================================================ FILE: tests/__init__.py ================================================ """Tests for the ``unidep`` package.""" ================================================ FILE: tests/helpers.py ================================================ """unidep tests.""" from __future__ import annotations from pathlib import Path from typing import TYPE_CHECKING from unidep._dependencies_parsing import yaml_to_toml if TYPE_CHECKING: import sys if sys.version_info >= (3, 8): from typing import Literal else: # pragma: no cover from typing_extensions import Literal REPO_ROOT = Path(__file__).parent.parent def maybe_as_toml(toml_or_yaml: Literal["toml", "yaml"], p: Path) -> Path: if toml_or_yaml == "toml": toml = yaml_to_toml(p) p.unlink() p = p.with_name("pyproject.toml") p.write_text(toml) return p ================================================ FILE: tests/shared_local_install_monorepo/project1/pyproject.toml ================================================ [build-system] requires = ["setuptools>=42", "wheel"] build-backend = "setuptools.build_meta" [project] name = "project1" version = "0.0.1" ================================================ FILE: tests/shared_local_install_monorepo/project1/requirements.yaml ================================================ name: project1 local_dependencies: - ../shared ================================================ FILE: tests/shared_local_install_monorepo/project2/pyproject.toml ================================================ [build-system] requires = ["setuptools>=42", "wheel"] build-backend = "setuptools.build_meta" [project] name = "project2" version = "0.0.1" ================================================ FILE: tests/shared_local_install_monorepo/project2/requirements.yaml ================================================ name: project2 local_dependencies: - ../shared ================================================ FILE: tests/shared_local_install_monorepo/shared/pyproject.toml ================================================ [build-system] requires = ["setuptools>=42", "wheel"] build-backend = "setuptools.build_meta" [project] name = "shared" version = "0.0.1" ================================================ FILE: tests/shared_local_install_monorepo/shared/requirements.yaml ================================================ name: shared dependencies: [] ================================================ FILE: tests/simple_monorepo/common-requirements.yaml ================================================ # This file is uses in the `local_dependencies:` section in `project1/requirements.yml` # and `project2/requirements.yml`. name: common-requirements channels: - conda-forge dependencies: - conda: python_abi ================================================ FILE: tests/simple_monorepo/conda-lock.yml ================================================ # This file is created and managed by `unidep` 0.41.0. # For details see https://github.com/basnijholt/unidep # File generated with: `unidep conda-lock -d tests/simple_monorepo` # # This environment can be installed with # `micromamba create -f conda-lock.yml -n myenv` # This file is a `conda-lock` file generated via `unidep`. # For details see https://conda.github.io/conda-lock/ version: 1 metadata: content_hash: osx-64: ee56565c906fa861ded63721f99e398fd1734b57368e6f701e25dddf03e7960a osx-arm64: 08362c60bc03c882ae95fa83c4d29e9fb0b7795d63d74ada081ac0fa8a7c69f8 channels: - url: conda-forge used_env_vars: [] platforms: - osx-64 - osx-arm64 sources: - tmp.environment.yaml package: - name: bzip2 version: 1.0.8 manager: conda platform: osx-64 dependencies: {} url: https://conda.anaconda.org/conda-forge/osx-64/bzip2-1.0.8-h10d778d_5.conda hash: md5: 6097a6ca9ada32699b5fc4312dd6ef18 sha256: 61fb2b488928a54d9472113e1280b468a309561caa54f33825a3593da390b242 category: main optional: false - name: bzip2 version: 1.0.8 manager: conda platform: osx-arm64 dependencies: {} url: https://conda.anaconda.org/conda-forge/osx-arm64/bzip2-1.0.8-h93a5062_5.conda hash: md5: 1bbc659ca658bfd49a481b5ef7a0f40f sha256: bfa84296a638bea78a8bb29abc493ee95f2a0218775642474a840411b950fe5f category: main optional: false - name: python_abi version: '3.12' manager: conda platform: osx-64 dependencies: {} url: https://conda.anaconda.org/conda-forge/osx-64/python_abi-3.12-4_cp312.conda hash: md5: 87201ac4314b911b74197e588cca3639 sha256: 82c154d95c1637604671a02a89e72f1382e89a4269265a03506496bd928f6f14 category: main optional: false - name: python_abi version: '3.12' manager: conda platform: osx-arm64 dependencies: {} url: https://conda.anaconda.org/conda-forge/osx-arm64/python_abi-3.12-4_cp312.conda hash: md5: bbb3a02c78b2d8219d7213f76d644a2a sha256: db25428e4f24f8693ffa39f3ff6dfbb8fd53bc298764b775b57edab1c697560f category: main optional: false - name: tzdata version: 2023d manager: conda platform: osx-arm64 dependencies: {} url: https://conda.anaconda.org/conda-forge/noarch/tzdata-2023d-h0c530f3_0.conda hash: md5: 8dee24b8be2d9ff81e7bd4d7d97ff1b0 sha256: 04f2ab3e36f2015841551415bf16bf62933bd94b7085d4be5493b388e95a9c3d category: main optional: false ================================================ FILE: tests/simple_monorepo/project1/conda-lock.yml ================================================ # This file is created and managed by `unidep` 0.41.0. # For details see https://github.com/basnijholt/unidep # File generated with: `unidep conda-lock -d tests/simple_monorepo` # # This environment can be installed with # `micromamba create -f conda-lock.yml -n myenv` # This file is a `conda-lock` file generated via `unidep`. # For details see https://conda.github.io/conda-lock/ version: 1 metadata: content_hash: osx-64: unidep-is-awesome osx-arm64: unidep-is-awesome channels: - url: conda-forge used_env_vars: [] platforms: - osx-64 - osx-arm64 sources: - tests/simple_monorepo/project1/requirements.yaml package: - name: bzip2 version: 1.0.8 manager: conda platform: osx-64 dependencies: {} url: https://conda.anaconda.org/conda-forge/osx-64/bzip2-1.0.8-h10d778d_5.conda hash: md5: 6097a6ca9ada32699b5fc4312dd6ef18 sha256: 61fb2b488928a54d9472113e1280b468a309561caa54f33825a3593da390b242 category: main optional: false - name: bzip2 version: 1.0.8 manager: conda platform: osx-arm64 dependencies: {} url: https://conda.anaconda.org/conda-forge/osx-arm64/bzip2-1.0.8-h93a5062_5.conda hash: md5: 1bbc659ca658bfd49a481b5ef7a0f40f sha256: bfa84296a638bea78a8bb29abc493ee95f2a0218775642474a840411b950fe5f category: main optional: false - name: python_abi version: '3.12' manager: conda platform: osx-64 dependencies: {} url: https://conda.anaconda.org/conda-forge/osx-64/python_abi-3.12-4_cp312.conda hash: md5: 87201ac4314b911b74197e588cca3639 sha256: 82c154d95c1637604671a02a89e72f1382e89a4269265a03506496bd928f6f14 category: main optional: false - name: python_abi version: '3.12' manager: conda platform: osx-arm64 dependencies: {} url: https://conda.anaconda.org/conda-forge/osx-arm64/python_abi-3.12-4_cp312.conda hash: md5: bbb3a02c78b2d8219d7213f76d644a2a sha256: db25428e4f24f8693ffa39f3ff6dfbb8fd53bc298764b775b57edab1c697560f category: main optional: false - name: tzdata version: 2023d manager: conda platform: osx-arm64 dependencies: {} url: https://conda.anaconda.org/conda-forge/noarch/tzdata-2023d-h0c530f3_0.conda hash: md5: 8dee24b8be2d9ff81e7bd4d7d97ff1b0 sha256: 04f2ab3e36f2015841551415bf16bf62933bd94b7085d4be5493b388e95a9c3d category: main optional: false ================================================ FILE: tests/simple_monorepo/project1/requirements.yaml ================================================ name: project1 channels: - conda-forge dependencies: - conda: bzip2 local_dependencies: - ../project2 # this means `project2` is a dependency of `project1` - ../common-requirements.yaml ================================================ FILE: tests/simple_monorepo/project2/conda-lock.yml ================================================ # This file is created and managed by `unidep` 0.41.0. # For details see https://github.com/basnijholt/unidep # File generated with: `unidep conda-lock -d tests/simple_monorepo` # # This environment can be installed with # `micromamba create -f conda-lock.yml -n myenv` # This file is a `conda-lock` file generated via `unidep`. # For details see https://conda.github.io/conda-lock/ version: 1 metadata: content_hash: osx-64: unidep-is-awesome osx-arm64: unidep-is-awesome channels: - url: conda-forge used_env_vars: [] platforms: - osx-64 - osx-arm64 sources: - tests/simple_monorepo/project2/requirements.yaml package: - name: python_abi version: '3.12' manager: conda platform: osx-64 dependencies: {} url: https://conda.anaconda.org/conda-forge/osx-64/python_abi-3.12-4_cp312.conda hash: md5: 87201ac4314b911b74197e588cca3639 sha256: 82c154d95c1637604671a02a89e72f1382e89a4269265a03506496bd928f6f14 category: main optional: false - name: python_abi version: '3.12' manager: conda platform: osx-arm64 dependencies: {} url: https://conda.anaconda.org/conda-forge/osx-arm64/python_abi-3.12-4_cp312.conda hash: md5: bbb3a02c78b2d8219d7213f76d644a2a sha256: db25428e4f24f8693ffa39f3ff6dfbb8fd53bc298764b775b57edab1c697560f category: main optional: false - name: tzdata version: 2023d manager: conda platform: osx-arm64 dependencies: {} url: https://conda.anaconda.org/conda-forge/noarch/tzdata-2023d-h0c530f3_0.conda hash: md5: 8dee24b8be2d9ff81e7bd4d7d97ff1b0 sha256: 04f2ab3e36f2015841551415bf16bf62933bd94b7085d4be5493b388e95a9c3d category: main optional: false ================================================ FILE: tests/simple_monorepo/project2/requirements.yaml ================================================ name: project2 channels: - conda-forge dependencies: - conda: tzdata # [arm64] local_dependencies: - ../common-requirements.yaml platforms: - osx-arm64 - osx-64 ================================================ FILE: tests/test-pip-and-conda-different-name/conda-lock.yml ================================================ # This file is created and managed by `unidep` 0.23.0. # For details see https://github.com/basnijholt/unidep # File generated with: `unidep conda-lock -d /Users/basnijholt/Code/unidep/tests/test-pip-and-conda-different-name` # # This environment can be installed with # `micromamba create -f conda-lock.yml -n myenv` # This file is a `conda-lock` file generated via `unidep`. # For details see https://conda.github.io/conda-lock/ version: 1 metadata: content_hash: linux-64: c18392f096a6c21233400900e6ba90c299ad2d28348b69cb62a7cf66734bfe81 channels: - url: conda-forge used_env_vars: [] platforms: - linux-64 sources: - tmp.environment.yaml package: - name: _libgcc_mutex version: '0.1' manager: conda platform: linux-64 dependencies: {} url: https://conda.anaconda.org/conda-forge/linux-64/_libgcc_mutex-0.1-conda_forge.tar.bz2 hash: md5: d7c89558ba9fa0495403155b64376d81 sha256: fe51de6107f9edc7aa4f786a70f4a883943bc9d39b3bb7307c04c41410990726 category: main optional: false - name: _openmp_mutex version: '4.5' manager: conda platform: linux-64 dependencies: _libgcc_mutex: '0.1' libgomp: '>=7.5.0' url: https://conda.anaconda.org/conda-forge/linux-64/_openmp_mutex-4.5-2_gnu.tar.bz2 hash: md5: 73aaf86a425cc6e73fcf236a5a46396d sha256: fbe2c5e56a653bebb982eda4876a9178aedfc2b545f25d0ce9c4c0b508253d22 category: main optional: false - name: bzip2 version: 1.0.8 manager: conda platform: linux-64 dependencies: libgcc-ng: '>=12' url: https://conda.anaconda.org/conda-forge/linux-64/bzip2-1.0.8-hd590300_5.conda hash: md5: 69b8b6202a07720f448be700e300ccf4 sha256: 242c0c324507ee172c0e0dd2045814e746bb303d1eb78870d182ceb0abc726a8 category: main optional: false - name: ca-certificates version: 2023.11.17 manager: conda platform: linux-64 dependencies: {} url: https://conda.anaconda.org/conda-forge/linux-64/ca-certificates-2023.11.17-hbcca054_0.conda hash: md5: 01ffc8d36f9eba0ce0b3c1955fa780ee sha256: fb4b9f4b7d885002db0b93e22f44b5b03791ef3d4efdc9d0662185a0faafd6b6 category: main optional: false - name: ld_impl_linux-64 version: '2.40' manager: conda platform: linux-64 dependencies: {} url: https://conda.anaconda.org/conda-forge/linux-64/ld_impl_linux-64-2.40-h41732ed_0.conda hash: md5: 7aca3059a1729aa76c597603f10b0dd3 sha256: f6cc89d887555912d6c61b295d398cff9ec982a3417d38025c45d5dd9b9e79cd category: main optional: false - name: libffi version: 3.4.2 manager: conda platform: linux-64 dependencies: libgcc-ng: '>=9.4.0' url: https://conda.anaconda.org/conda-forge/linux-64/libffi-3.4.2-h7f98852_5.tar.bz2 hash: md5: d645c6d2ac96843a2bfaccd2d62b3ac3 sha256: ab6e9856c21709b7b517e940ae7028ae0737546122f83c2aa5d692860c3b149e category: main optional: false - name: libgcc-ng version: 13.2.0 manager: conda platform: linux-64 dependencies: _libgcc_mutex: '0.1' _openmp_mutex: '>=4.5' url: https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-13.2.0-h807b86a_3.conda hash: md5: 23fdf1fef05baeb7eadc2aed5fb0011f sha256: 5e88f658e07a30ab41b154b42c59f079b168acfa9551a75bdc972099453f4105 category: main optional: false - name: libgomp version: 13.2.0 manager: conda platform: linux-64 dependencies: _libgcc_mutex: '0.1' url: https://conda.anaconda.org/conda-forge/linux-64/libgomp-13.2.0-h807b86a_3.conda hash: md5: 7124cbb46b13d395bdde68f2d215c989 sha256: 6ebedee39b6bbbc969715d0d7fa4b381cce67e1139862604ffa393f821c08e81 category: main optional: false - name: libnsl version: 2.0.1 manager: conda platform: linux-64 dependencies: libgcc-ng: '>=12' url: https://conda.anaconda.org/conda-forge/linux-64/libnsl-2.0.1-hd590300_0.conda hash: md5: 30fd6e37fe21f86f4bd26d6ee73eeec7 sha256: 26d77a3bb4dceeedc2a41bd688564fe71bf2d149fdcf117049970bc02ff1add6 category: main optional: false - name: libsqlite version: 3.44.2 manager: conda platform: linux-64 dependencies: libgcc-ng: '>=12' libzlib: '>=1.2.13,<1.3.0a0' url: https://conda.anaconda.org/conda-forge/linux-64/libsqlite-3.44.2-h2797004_0.conda hash: md5: 3b6a9f225c3dbe0d24f4fedd4625c5bf sha256: ee2c4d724a3ed60d5b458864d66122fb84c6ce1df62f735f90d8db17b66cd88a category: main optional: false - name: libstdcxx-ng version: 13.2.0 manager: conda platform: linux-64 dependencies: {} url: https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-ng-13.2.0-h7e041cc_3.conda hash: md5: 937eaed008f6bf2191c5fe76f87755e9 sha256: 6c6c49efedcc5709a66f19fb6b26b69c6a5245310fd1d9a901fd5e38aaf7f882 category: main optional: false - name: libuuid version: 2.38.1 manager: conda platform: linux-64 dependencies: libgcc-ng: '>=12' url: https://conda.anaconda.org/conda-forge/linux-64/libuuid-2.38.1-h0b41bf4_0.conda hash: md5: 40b61aab5c7ba9ff276c41cfffe6b80b sha256: 787eb542f055a2b3de553614b25f09eefb0a0931b0c87dbcce6efdfd92f04f18 category: main optional: false - name: libzlib version: 1.2.13 manager: conda platform: linux-64 dependencies: libgcc-ng: '>=12' url: https://conda.anaconda.org/conda-forge/linux-64/libzlib-1.2.13-hd590300_5.conda hash: md5: f36c115f1ee199da648e0597ec2047ad sha256: 370c7c5893b737596fd6ca0d9190c9715d89d888b8c88537ae1ef168c25e82e4 category: main optional: false - name: msgpack-python version: 1.0.7 manager: conda platform: linux-64 dependencies: libgcc-ng: '>=12' libstdcxx-ng: '>=12' python: '>=3.10,<3.11.0a0' python_abi: 3.10.* url: https://conda.anaconda.org/conda-forge/linux-64/msgpack-python-1.0.7-py310hd41b1e2_0.conda hash: md5: dc5263dcaa1347e5a456ead3537be27d sha256: a5c7612029e3871b0af0bd69e8ee1545d3deb93b5bec29cf1bf72522375fda31 category: main optional: false - name: ncurses version: '6.4' manager: conda platform: linux-64 dependencies: libgcc-ng: '>=12' url: https://conda.anaconda.org/conda-forge/linux-64/ncurses-6.4-h59595ed_2.conda hash: md5: 7dbaa197d7ba6032caf7ae7f32c1efa0 sha256: 91cc03f14caf96243cead96c76fe91ab5925a695d892e83285461fb927dece5e category: main optional: false - name: openssl version: 3.2.0 manager: conda platform: linux-64 dependencies: ca-certificates: '' libgcc-ng: '>=12' url: https://conda.anaconda.org/conda-forge/linux-64/openssl-3.2.0-hd590300_1.conda hash: md5: 603827b39ea2b835268adb8c821b8570 sha256: 80efc6f429bd8e622d999652e5cba2ca56fcdb9c16a439d2ce9b4313116e4a87 category: main optional: false - name: pip version: 23.3.1 manager: conda platform: linux-64 dependencies: python: '>=3.7' setuptools: '' wheel: '' url: https://conda.anaconda.org/conda-forge/noarch/pip-23.3.1-pyhd8ed1ab_0.conda hash: md5: 2400c0b86889f43aa52067161e1fb108 sha256: 435829a03e1c6009f013f29bb83de8b876c388820bf8cf69a7baeec25f6a3563 category: main optional: false - name: python version: 3.10.13 manager: conda platform: linux-64 dependencies: bzip2: '>=1.0.8,<2.0a0' ld_impl_linux-64: '>=2.36.1' libffi: '>=3.4,<4.0a0' libgcc-ng: '>=12' libnsl: '>=2.0.1,<2.1.0a0' libsqlite: '>=3.43.2,<4.0a0' libuuid: '>=2.38.1,<3.0a0' libzlib: '>=1.2.13,<1.3.0a0' ncurses: '>=6.4,<7.0a0' openssl: '>=3.1.4,<4.0a0' readline: '>=8.2,<9.0a0' tk: '>=8.6.13,<8.7.0a0' tzdata: '' xz: '>=5.2.6,<6.0a0' url: https://conda.anaconda.org/conda-forge/linux-64/python-3.10.13-hd12c33a_0_cpython.conda hash: md5: f3a8c32aa764c3e7188b4b810fc9d6ce sha256: a53410f459f314537b379982717b1c5911efc2f0cc26d63c4d6f831bcb31c964 category: main optional: false - name: python_abi version: '3.10' manager: conda platform: linux-64 dependencies: {} url: https://conda.anaconda.org/conda-forge/linux-64/python_abi-3.10-4_cp310.conda hash: md5: 26322ec5d7712c3ded99dd656142b8ce sha256: 456bec815bfc2b364763084d08b412fdc4c17eb9ccc66a36cb775fa7ac3cbaec category: main optional: false - name: readline version: '8.2' manager: conda platform: linux-64 dependencies: libgcc-ng: '>=12' ncurses: '>=6.3,<7.0a0' url: https://conda.anaconda.org/conda-forge/linux-64/readline-8.2-h8228510_1.conda hash: md5: 47d31b792659ce70f470b5c82fdfb7a4 sha256: 5435cf39d039387fbdc977b0a762357ea909a7694d9528ab40f005e9208744d7 category: main optional: false - name: setuptools version: 68.2.2 manager: conda platform: linux-64 dependencies: python: '>=3.7' url: https://conda.anaconda.org/conda-forge/noarch/setuptools-68.2.2-pyhd8ed1ab_0.conda hash: md5: fc2166155db840c634a1291a5c35a709 sha256: 851901b1f8f2049edb36a675f0c3f9a98e1495ef4eb214761b048c6f696a06f7 category: main optional: false - name: tk version: 8.6.13 manager: conda platform: linux-64 dependencies: libgcc-ng: '>=12' libzlib: '>=1.2.13,<1.3.0a0' url: https://conda.anaconda.org/conda-forge/linux-64/tk-8.6.13-noxft_h4845f30_101.conda hash: md5: d453b98d9c83e71da0741bb0ff4d76bc sha256: e0569c9caa68bf476bead1bed3d79650bb080b532c64a4af7d8ca286c08dea4e category: main optional: false - name: tzdata version: 2023c manager: conda platform: linux-64 dependencies: {} url: https://conda.anaconda.org/conda-forge/noarch/tzdata-2023c-h71feb2d_0.conda hash: md5: 939e3e74d8be4dac89ce83b20de2492a sha256: 0449138224adfa125b220154408419ec37c06b0b49f63c5954724325903ecf55 category: main optional: false - name: wheel version: 0.42.0 manager: conda platform: linux-64 dependencies: python: '>=3.7' url: https://conda.anaconda.org/conda-forge/noarch/wheel-0.42.0-pyhd8ed1ab_0.conda hash: md5: 1cdea58981c5cbc17b51973bcaddcea7 sha256: 80be0ccc815ce22f80c141013302839b0ed938a2edb50b846cf48d8a8c1cfa01 category: main optional: false - name: xz version: 5.2.6 manager: conda platform: linux-64 dependencies: libgcc-ng: '>=12' url: https://conda.anaconda.org/conda-forge/linux-64/xz-5.2.6-h166bdaf_0.tar.bz2 hash: md5: 2161070d867d1b1204ea749c8eec4ef0 sha256: 03a6d28ded42af8a347345f82f3eebdd6807a08526d47899a42d62d319609162 category: main optional: false - name: fluent-logger version: 0.10.0 manager: pip platform: linux-64 dependencies: msgpack: '>1.0' url: https://files.pythonhosted.org/packages/00/43/9cbd7756dfe2cddc0a76ec2eaec56449ac126455c36fe03ecc86f7feac8f/fluent_logger-0.10.0-py2.py3-none-any.whl hash: sha256: 543637e5e62ec3fc3c92b44e5a4e148a3cea88a0f8ca4fae26c7e60fda7564c1 category: main optional: false - name: rsync-time-machine version: 1.3.0 manager: pip platform: linux-64 dependencies: {} url: https://files.pythonhosted.org/packages/42/88/f32647517b00f937c66ae2891f22ebb614ac521386254c2eefd9d770c05e/rsync_time_machine-1.3.0-py3-none-any.whl hash: sha256: 371c23dddddedee51c57dec1f31de82465b9139f17357754dc92269d58c3d454 category: main optional: false ================================================ FILE: tests/test-pip-and-conda-different-name/project1/requirements.yaml ================================================ name: project2 channels: - conda-forge dependencies: - conda: python=3.10 - pip: fluent-logger # depends on msgpack, but on conda-forge it's called msgpack-python - pip: rsync-time-machine platforms: - linux-64 ================================================ FILE: tests/test-pip-and-conda-different-name/project2/requirements.yaml ================================================ name: project2 channels: - conda-forge dependencies: - conda: msgpack-python platforms: - linux-64 ================================================ FILE: tests/test-pip-package-with-conda-dependency/conda-lock.yml ================================================ # This file is created and managed by `unidep` 0.23.0. # For details see https://github.com/basnijholt/unidep # File generated with: `unidep conda-lock -d tests/test-pip-package-with-conda-dependency` # # This environment can be installed with # `micromamba create -f conda-lock.yml -n myenv` # This file is a `conda-lock` file generated via `unidep`. # For details see https://conda.github.io/conda-lock/ version: 1 metadata: content_hash: linux-64: 64492feacfc7d0ed4ee041529c75ad1ec9543bb69603d7519427014d47061f9a channels: - url: conda-forge used_env_vars: [] platforms: - linux-64 sources: - tmp.environment.yaml package: - name: _libgcc_mutex version: '0.1' manager: conda platform: linux-64 dependencies: {} url: https://conda.anaconda.org/conda-forge/linux-64/_libgcc_mutex-0.1-conda_forge.tar.bz2 hash: md5: d7c89558ba9fa0495403155b64376d81 sha256: fe51de6107f9edc7aa4f786a70f4a883943bc9d39b3bb7307c04c41410990726 category: main optional: false - name: _openmp_mutex version: '4.5' manager: conda platform: linux-64 dependencies: _libgcc_mutex: '0.1' libgomp: '>=7.5.0' url: https://conda.anaconda.org/conda-forge/linux-64/_openmp_mutex-4.5-2_gnu.tar.bz2 hash: md5: 73aaf86a425cc6e73fcf236a5a46396d sha256: fbe2c5e56a653bebb982eda4876a9178aedfc2b545f25d0ce9c4c0b508253d22 category: main optional: false - name: bzip2 version: 1.0.8 manager: conda platform: linux-64 dependencies: libgcc-ng: '>=12' url: https://conda.anaconda.org/conda-forge/linux-64/bzip2-1.0.8-hd590300_5.conda hash: md5: 69b8b6202a07720f448be700e300ccf4 sha256: 242c0c324507ee172c0e0dd2045814e746bb303d1eb78870d182ceb0abc726a8 category: main optional: false - name: ca-certificates version: 2023.11.17 manager: conda platform: linux-64 dependencies: {} url: https://conda.anaconda.org/conda-forge/linux-64/ca-certificates-2023.11.17-hbcca054_0.conda hash: md5: 01ffc8d36f9eba0ce0b3c1955fa780ee sha256: fb4b9f4b7d885002db0b93e22f44b5b03791ef3d4efdc9d0662185a0faafd6b6 category: main optional: false - name: ld_impl_linux-64 version: '2.40' manager: conda platform: linux-64 dependencies: {} url: https://conda.anaconda.org/conda-forge/linux-64/ld_impl_linux-64-2.40-h41732ed_0.conda hash: md5: 7aca3059a1729aa76c597603f10b0dd3 sha256: f6cc89d887555912d6c61b295d398cff9ec982a3417d38025c45d5dd9b9e79cd category: main optional: false - name: libexpat version: 2.5.0 manager: conda platform: linux-64 dependencies: libgcc-ng: '>=12' url: https://conda.anaconda.org/conda-forge/linux-64/libexpat-2.5.0-hcb278e6_1.conda hash: md5: 6305a3dd2752c76335295da4e581f2fd sha256: 74c98a563777ae2ad71f1f74d458a8ab043cee4a513467c159ccf159d0e461f3 category: main optional: false - name: libffi version: 3.4.2 manager: conda platform: linux-64 dependencies: libgcc-ng: '>=9.4.0' url: https://conda.anaconda.org/conda-forge/linux-64/libffi-3.4.2-h7f98852_5.tar.bz2 hash: md5: d645c6d2ac96843a2bfaccd2d62b3ac3 sha256: ab6e9856c21709b7b517e940ae7028ae0737546122f83c2aa5d692860c3b149e category: main optional: false - name: libgcc-ng version: 13.2.0 manager: conda platform: linux-64 dependencies: _libgcc_mutex: '0.1' _openmp_mutex: '>=4.5' url: https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-13.2.0-h807b86a_3.conda hash: md5: 23fdf1fef05baeb7eadc2aed5fb0011f sha256: 5e88f658e07a30ab41b154b42c59f079b168acfa9551a75bdc972099453f4105 category: main optional: false - name: libgomp version: 13.2.0 manager: conda platform: linux-64 dependencies: _libgcc_mutex: '0.1' url: https://conda.anaconda.org/conda-forge/linux-64/libgomp-13.2.0-h807b86a_3.conda hash: md5: 7124cbb46b13d395bdde68f2d215c989 sha256: 6ebedee39b6bbbc969715d0d7fa4b381cce67e1139862604ffa393f821c08e81 category: main optional: false - name: libnsl version: 2.0.1 manager: conda platform: linux-64 dependencies: libgcc-ng: '>=12' url: https://conda.anaconda.org/conda-forge/linux-64/libnsl-2.0.1-hd590300_0.conda hash: md5: 30fd6e37fe21f86f4bd26d6ee73eeec7 sha256: 26d77a3bb4dceeedc2a41bd688564fe71bf2d149fdcf117049970bc02ff1add6 category: main optional: false - name: libsqlite version: 3.44.2 manager: conda platform: linux-64 dependencies: libgcc-ng: '>=12' libzlib: '>=1.2.13,<1.3.0a0' url: https://conda.anaconda.org/conda-forge/linux-64/libsqlite-3.44.2-h2797004_0.conda hash: md5: 3b6a9f225c3dbe0d24f4fedd4625c5bf sha256: ee2c4d724a3ed60d5b458864d66122fb84c6ce1df62f735f90d8db17b66cd88a category: main optional: false - name: libstdcxx-ng version: 13.2.0 manager: conda platform: linux-64 dependencies: {} url: https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-ng-13.2.0-h7e041cc_3.conda hash: md5: 937eaed008f6bf2191c5fe76f87755e9 sha256: 6c6c49efedcc5709a66f19fb6b26b69c6a5245310fd1d9a901fd5e38aaf7f882 category: main optional: false - name: libuuid version: 2.38.1 manager: conda platform: linux-64 dependencies: libgcc-ng: '>=12' url: https://conda.anaconda.org/conda-forge/linux-64/libuuid-2.38.1-h0b41bf4_0.conda hash: md5: 40b61aab5c7ba9ff276c41cfffe6b80b sha256: 787eb542f055a2b3de553614b25f09eefb0a0931b0c87dbcce6efdfd92f04f18 category: main optional: false - name: libzlib version: 1.2.13 manager: conda platform: linux-64 dependencies: libgcc-ng: '>=12' url: https://conda.anaconda.org/conda-forge/linux-64/libzlib-1.2.13-hd590300_5.conda hash: md5: f36c115f1ee199da648e0597ec2047ad sha256: 370c7c5893b737596fd6ca0d9190c9715d89d888b8c88537ae1ef168c25e82e4 category: main optional: false - name: ncurses version: '6.4' manager: conda platform: linux-64 dependencies: libgcc-ng: '>=12' url: https://conda.anaconda.org/conda-forge/linux-64/ncurses-6.4-h59595ed_2.conda hash: md5: 7dbaa197d7ba6032caf7ae7f32c1efa0 sha256: 91cc03f14caf96243cead96c76fe91ab5925a695d892e83285461fb927dece5e category: main optional: false - name: openssl version: 3.2.0 manager: conda platform: linux-64 dependencies: ca-certificates: '' libgcc-ng: '>=12' url: https://conda.anaconda.org/conda-forge/linux-64/openssl-3.2.0-hd590300_1.conda hash: md5: 603827b39ea2b835268adb8c821b8570 sha256: 80efc6f429bd8e622d999652e5cba2ca56fcdb9c16a439d2ce9b4313116e4a87 category: main optional: false - name: pip version: 23.3.1 manager: conda platform: linux-64 dependencies: python: '>=3.7' setuptools: '' wheel: '' url: https://conda.anaconda.org/conda-forge/noarch/pip-23.3.1-pyhd8ed1ab_0.conda hash: md5: 2400c0b86889f43aa52067161e1fb108 sha256: 435829a03e1c6009f013f29bb83de8b876c388820bf8cf69a7baeec25f6a3563 category: main optional: false - name: pybind11 version: 2.11.1 manager: conda platform: linux-64 dependencies: libgcc-ng: '>=12' libstdcxx-ng: '>=12' pybind11-global: 2.11.1 python: '>=3.11,<3.12.0a0' python_abi: 3.11.* url: https://conda.anaconda.org/conda-forge/linux-64/pybind11-2.11.1-py311h9547e67_2.conda hash: md5: 64a8933c635a78a6dc0f0cb07ef19a6e sha256: 98ea0d8edd21b6ef7205aeafa6dbdcb1829aeb888ec8a4ba69d58effb912d536 category: main optional: false - name: pybind11-global version: 2.11.1 manager: conda platform: linux-64 dependencies: libgcc-ng: '>=12' libstdcxx-ng: '>=12' python: '>=3.11,<3.12.0a0' python_abi: 3.11.* url: https://conda.anaconda.org/conda-forge/linux-64/pybind11-global-2.11.1-py311h9547e67_2.conda hash: md5: 71330b362711dd503ef2e8139570b8e0 sha256: 6f231d62f03e99c0e45d70f17a82c0482dbe8286412fe44556bcfeccbacd5c0c category: main optional: false - name: python version: 3.11.6 manager: conda platform: linux-64 dependencies: bzip2: '>=1.0.8,<2.0a0' ld_impl_linux-64: '>=2.36.1' libexpat: '>=2.5.0,<3.0a0' libffi: '>=3.4,<4.0a0' libgcc-ng: '>=12' libnsl: '>=2.0.0,<2.1.0a0' libsqlite: '>=3.43.0,<4.0a0' libuuid: '>=2.38.1,<3.0a0' libzlib: '>=1.2.13,<1.3.0a0' ncurses: '>=6.4,<7.0a0' openssl: '>=3.1.3,<4.0a0' readline: '>=8.2,<9.0a0' tk: '>=8.6.13,<8.7.0a0' tzdata: '' xz: '>=5.2.6,<6.0a0' url: https://conda.anaconda.org/conda-forge/linux-64/python-3.11.6-hab00c5b_0_cpython.conda hash: md5: b0dfbe2fcbfdb097d321bfd50ecddab1 sha256: 84f13bd70cff5dcdaee19263b2d4291d5793856a718efc1b63a9cfa9eb6e2ca1 category: main optional: false - name: python_abi version: '3.11' manager: conda platform: linux-64 dependencies: {} url: https://conda.anaconda.org/conda-forge/linux-64/python_abi-3.11-4_cp311.conda hash: md5: d786502c97404c94d7d58d258a445a65 sha256: 0be3ac1bf852d64f553220c7e6457e9c047dfb7412da9d22fbaa67e60858b3cf category: main optional: false - name: readline version: '8.2' manager: conda platform: linux-64 dependencies: libgcc-ng: '>=12' ncurses: '>=6.3,<7.0a0' url: https://conda.anaconda.org/conda-forge/linux-64/readline-8.2-h8228510_1.conda hash: md5: 47d31b792659ce70f470b5c82fdfb7a4 sha256: 5435cf39d039387fbdc977b0a762357ea909a7694d9528ab40f005e9208744d7 category: main optional: false - name: setuptools version: 68.2.2 manager: conda platform: linux-64 dependencies: python: '>=3.7' url: https://conda.anaconda.org/conda-forge/noarch/setuptools-68.2.2-pyhd8ed1ab_0.conda hash: md5: fc2166155db840c634a1291a5c35a709 sha256: 851901b1f8f2049edb36a675f0c3f9a98e1495ef4eb214761b048c6f696a06f7 category: main optional: false - name: tk version: 8.6.13 manager: conda platform: linux-64 dependencies: libgcc-ng: '>=12' libzlib: '>=1.2.13,<1.3.0a0' url: https://conda.anaconda.org/conda-forge/linux-64/tk-8.6.13-noxft_h4845f30_101.conda hash: md5: d453b98d9c83e71da0741bb0ff4d76bc sha256: e0569c9caa68bf476bead1bed3d79650bb080b532c64a4af7d8ca286c08dea4e category: main optional: false - name: tzdata version: 2023c manager: conda platform: linux-64 dependencies: {} url: https://conda.anaconda.org/conda-forge/noarch/tzdata-2023c-h71feb2d_0.conda hash: md5: 939e3e74d8be4dac89ce83b20de2492a sha256: 0449138224adfa125b220154408419ec37c06b0b49f63c5954724325903ecf55 category: main optional: false - name: wheel version: 0.42.0 manager: conda platform: linux-64 dependencies: python: '>=3.7' url: https://conda.anaconda.org/conda-forge/noarch/wheel-0.42.0-pyhd8ed1ab_0.conda hash: md5: 1cdea58981c5cbc17b51973bcaddcea7 sha256: 80be0ccc815ce22f80c141013302839b0ed938a2edb50b846cf48d8a8c1cfa01 category: main optional: false - name: xz version: 5.2.6 manager: conda platform: linux-64 dependencies: libgcc-ng: '>=12' url: https://conda.anaconda.org/conda-forge/linux-64/xz-5.2.6-h166bdaf_0.tar.bz2 hash: md5: 2161070d867d1b1204ea749c8eec4ef0 sha256: 03a6d28ded42af8a347345f82f3eebdd6807a08526d47899a42d62d319609162 category: main optional: false - name: cutde version: 23.6.25 manager: pip platform: linux-64 dependencies: mako: '*' pybind11: '*' url: https://files.pythonhosted.org/packages/08/15/0ae45db8fcc0d2da6002d13900689e2fe7773da038922b1ff450ab08088e/cutde-23.6.25.tar.gz hash: sha256: 946aeb03b3bf2f9060dabda1dd84330a67a7fddab27879010107382bcca31eac category: main optional: false - name: mako version: 1.3.0 manager: pip platform: linux-64 dependencies: markupsafe: '>=0.9.2' url: https://files.pythonhosted.org/packages/24/3b/11fe92d68c6a42468ddab0cf03f454419b0788fff4e91ba46b8bebafeffd/Mako-1.3.0-py3-none-any.whl hash: sha256: 57d4e997349f1a92035aa25c17ace371a4213f2ca42f99bee9a602500cfd54d9 category: main optional: false - name: markupsafe version: 2.1.3 manager: pip platform: linux-64 dependencies: {} url: https://files.pythonhosted.org/packages/fe/21/2eff1de472ca6c99ec3993eab11308787b9879af9ca8bbceb4868cf4f2ca/MarkupSafe-2.1.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl hash: sha256: bfce63a9e7834b12b87c64d6b155fdd9b3b96191b6bd334bf37db7ff1fe457f2 category: main optional: false - name: rsync-time-machine version: 1.3.0 manager: pip platform: linux-64 dependencies: {} url: https://files.pythonhosted.org/packages/42/88/f32647517b00f937c66ae2891f22ebb614ac521386254c2eefd9d770c05e/rsync_time_machine-1.3.0-py3-none-any.whl hash: sha256: 371c23dddddedee51c57dec1f31de82465b9139f17357754dc92269d58c3d454 category: main optional: false ================================================ FILE: tests/test-pip-package-with-conda-dependency/project1/requirements.yaml ================================================ name: project1 channels: - conda-forge dependencies: - pybind11 platforms: - linux-64 ================================================ FILE: tests/test-pip-package-with-conda-dependency/project2/requirements.yaml ================================================ name: project2 channels: - conda-forge dependencies: - conda: python=3.11 - pip: cutde # depends on pybind11, but pybind11 is installed via conda because project1/ - pip: rsync-time-machine platforms: - linux-64 ================================================ FILE: tests/test_cli.py ================================================ """unidep CLI tests.""" from __future__ import annotations import os import platform import re import shutil import subprocess import sys import textwrap from contextlib import contextmanager from pathlib import Path from typing import Any, Generator from unittest.mock import patch import pytest try: import tomllib except ImportError: # pragma: no cover import tomli as tomllib from unidep._cli import ( CondaExecutable, _capitalize_dir, _collect_available_optional_dependency_groups, _collect_selected_conda_like_platforms, _conda_env_list, _conda_info, _conda_root_prefix, _find_windows_path, _flatten_selected_dependency_entries, _get_conda_executable, _identify_conda_executable, _install_all_command, _install_command, _maybe_conda_run, _maybe_create_conda_env_args, _merge_command, _merge_optional_dependency_extras, _pip_compile_command, _pip_subcommand, _print_versions, ) from unidep._dependencies_parsing import parse_requirements REPO_ROOT = Path(__file__).parent.parent EXAMPLE_PROJECTS = [ "setup_py_project", "setuptools_project", "hatch_project", "pyproject_toml_project", "hatch2_project", ] def current_env_and_prefix() -> tuple[str, Path]: """Get the current conda environment name and prefix.""" try: prefix = _conda_root_prefix("conda") except (KeyError, FileNotFoundError): prefix = _conda_root_prefix("micromamba") folder, env_name = Path(os.environ["CONDA_PREFIX"]).parts[-2:] if folder != "envs": return "base", prefix return env_name, prefix / "envs" / env_name @pytest.mark.parametrize( "project", EXAMPLE_PROJECTS, ) def test_install_command(project: str, capsys: pytest.CaptureFixture) -> None: current_env, prefix = current_env_and_prefix() print(f"current_env: {current_env}, prefix: {prefix}") for kw in [ {"conda_env_name": current_env, "conda_env_prefix": None}, {"conda_env_name": None, "conda_env_prefix": prefix}, ]: _install_command( REPO_ROOT / "example" / project, conda_executable="", # type: ignore[arg-type] conda_lock_file=None, dry_run=True, editable=False, verbose=True, **kw, # type: ignore[arg-type] ) captured = capsys.readouterr() assert "Installing conda dependencies" in captured.out assert "Installing pip dependencies" in captured.out assert "Installing project with" in captured.out @pytest.mark.parametrize( "project", EXAMPLE_PROJECTS, ) def test_unidep_install_dry_run(project: str) -> None: # Path to the requirements file requirements_path = REPO_ROOT / "example" / project # Ensure the requirements file exists assert requirements_path.exists(), "Requirements file does not exist" # Run the unidep install command result = subprocess.run( [ # noqa: S607 "unidep", "install", "--dry-run", str(requirements_path), ], check=True, capture_output=True, text=True, encoding="utf-8", ) # Check the output assert result.returncode == 0, "Command failed to execute successfully" if project in ("setup_py_project", "setuptools_project"): assert "📦 Installing conda dependencies with" in result.stdout assert "📦 Installing pip dependencies with" in result.stdout assert "📦 Installing project with" in result.stdout def test_install_all_command(capsys: pytest.CaptureFixture) -> None: _install_all_command( conda_executable="", # type: ignore[arg-type] conda_env_name=None, conda_env_prefix=None, conda_lock_file=None, dry_run=True, editable=True, directory=REPO_ROOT / "example", depth=1, verbose=False, ) captured = capsys.readouterr() assert "Installing conda dependencies" in captured.out assert "Installing pip dependencies" in captured.out projects = [REPO_ROOT / "example" / p for p in EXAMPLE_PROJECTS] pkgs = " ".join([f"-e {p}" for p in sorted(projects)]) assert f"pip install --no-deps {pkgs}`" in captured.out def test_install_command_deduplicates_shared_local_dependencies( tmp_path: Path, capsys: pytest.CaptureFixture, ) -> None: fixture_root = REPO_ROOT / "tests" / "shared_local_install_monorepo" monorepo = tmp_path / fixture_root.name shutil.copytree(fixture_root, monorepo) shared = monorepo / "shared" project1 = monorepo / "project1" project2 = monorepo / "project2" _install_command( project1, project2, conda_executable="", # type: ignore[arg-type] conda_env_name=None, conda_env_prefix=None, conda_lock_file=None, dry_run=True, editable=True, no_dependencies=True, no_uv=True, verbose=False, ) captured = capsys.readouterr() pkgs = " ".join([f"-e {p}" for p in sorted((project1, project2, shared))]) assert f"pip install --no-deps {pkgs}`" in captured.out assert captured.out.count(f"-e {shared}") == 1 def mock_uv_env(tmp_path: Path) -> dict[str, str]: """Create a mock uv executable and return env with it in the PATH.""" mock_uv_path = tmp_path / ("uv.bat" if platform.system() == "Windows" else "uv") if platform.system() == "Windows": mock_uv_path.write_text("@echo off\necho Mock uv called %*") else: mock_uv_path.write_text("#!/bin/sh\necho 'Mock uv called' \"$@\"") mock_uv_path.chmod(0o755) # Make it executable # Add tmp_path to the PATH environment variable env = os.environ.copy() env["PATH"] = f"{tmp_path}{os.pathsep}{env['PATH']}" return env @pytest.mark.parametrize("with_uv", [True, False]) def test_unidep_install_all_dry_run(tmp_path: Path, with_uv: bool) -> None: # noqa: FBT001 # Path to the requirements file requirements_path = REPO_ROOT / "example" # Ensure the requirements file exists assert requirements_path.exists(), "Requirements file does not exist" # Run the unidep install command result = subprocess.run( [ # noqa: S607 "unidep", "install-all", "--dry-run", "--editable", "--directory", str(requirements_path), *(["--no-uv"] if not with_uv else []), ], check=True, capture_output=True, text=True, encoding="utf-8", env=mock_uv_env(tmp_path) if with_uv else None, ) # Check the output assert result.returncode == 0, "Command failed to execute successfully" assert "📦 Installing conda dependencies with `" in result.stdout assert r"📦 Installing pip dependencies with `" in result.stdout assert ( "📝 Found local dependencies: {'pyproject_toml_project': ['hatch_project'], 'setup_py_project': ['hatch_project', 'setuptools_project'], 'setuptools_project': ['hatch_project']}" in result.stdout ) projects = [REPO_ROOT / "example" / p for p in EXAMPLE_PROJECTS] pkgs = " ".join([f"-e {p}" for p in sorted(projects)]) assert "📦 Installing project with `" in result.stdout if with_uv: assert "uv pip install --python" in result.stdout else: assert f" -m pip install --no-deps {pkgs}" in result.stdout def test_unidep_conda() -> None: # Path to the requirements file requirements_path = REPO_ROOT / "example" / "setup_py_project" assert requirements_path.exists(), "Requirements file does not exist" result = subprocess.run( [ # noqa: S607 "unidep", "conda", "--file", str(requirements_path), ], check=True, capture_output=True, text=True, encoding="utf-8", ) # Check the output assert result.returncode == 0, "Command failed to execute successfully" assert "pandas" in result.stdout def test_unidep_pixi_cli_respects_overrides(tmp_path: Path) -> None: req_file = tmp_path / "requirements.yaml" req_file.write_text( textwrap.dedent( """\ channels: - conda-forge dependencies: - numpy >=1.20 - pandas >=2.0 - scipy <1.10 - pyobjc # [osx] platforms: - linux-64 - osx-arm64 """, ), ) output_file = tmp_path / "pixi.toml" result = subprocess.run( [ # noqa: S607 "unidep", "pixi", "--file", str(req_file), "--output", str(output_file), "--name", "test-project", "--platform", "linux-64", "--ignore-pin", "numpy", "--skip-dependency", "pandas", "--overwrite-pin", "scipy>=1.11", ], check=True, capture_output=True, text=True, encoding="utf-8", ) assert result.returncode == 0, "Command failed to execute successfully" with output_file.open("rb") as f: data = tomllib.load(f) deps = data["dependencies"] assert deps["numpy"] == "*" assert "pandas" not in deps assert deps["scipy"] == ">=1.11" assert data["workspace"]["platforms"] == ["linux-64"] assert "target" not in data or "osx-arm64" not in data["target"] def test_unidep_pixi_cli_channel_override(tmp_path: Path) -> None: req_file = tmp_path / "requirements.yaml" req_file.write_text( textwrap.dedent( """\ channels: - conda-forge dependencies: - numpy platforms: - linux-64 """, ), ) output_file = tmp_path / "pixi.toml" result = subprocess.run( [ # noqa: S607 "unidep", "pixi", "--file", str(req_file), "--output", str(output_file), "--channel", "defaults", "--channel", "bioconda", ], check=True, capture_output=True, text=True, encoding="utf-8", ) assert result.returncode == 0 with output_file.open("rb") as f: data = tomllib.load(f) assert data["workspace"]["channels"] == ["defaults", "bioconda"] def test_unidep_pixi_cli_ranged_build_string(tmp_path: Path) -> None: req_file = tmp_path / "requirements.yaml" req_file.write_text( textwrap.dedent( """\ channels: - conda-forge dependencies: - conda: numpy >=1.20,<1.21 py310* platforms: - linux-64 """, ), ) output_file = tmp_path / "pixi.toml" result = subprocess.run( [ # noqa: S607 "unidep", "pixi", "--file", str(req_file), "--output", str(output_file), ], check=True, capture_output=True, text=True, encoding="utf-8", ) assert result.returncode == 0, "Command failed to execute successfully" with output_file.open("rb") as f: data = tomllib.load(f) numpy_spec = data["dependencies"]["numpy"] assert numpy_spec["version"] == ">=1.20,<1.21" assert numpy_spec["build"] == "py310*" def test_merge_uses_selector_platforms_when_no_platforms_declared( tmp_path: Path, ) -> None: req_file = tmp_path / "requirements.yaml" req_file.write_text( textwrap.dedent( """\ channels: - conda-forge dependencies: - cuda-toolkit # [linux64] """, ), ) output_file = tmp_path / "environment.yaml" with patch("unidep.utils.identify_current_platform", return_value="osx-arm64"): _merge_command( depth=1, directory=tmp_path, files=[req_file], name="myenv", output=output_file, stdout=False, selector="comment", platforms=[], optional_dependencies=[], all_optional_dependencies=False, ignore_pins=[], skip_dependencies=[], overwrite_pins=[], verbose=False, ) content = output_file.read_text() assert "platforms:" in content assert " - linux-64" in content assert " - osx-arm64" not in content @pytest.mark.parametrize( ( "content", "current_platform", "expected_dependency", "expected_platforms", "excluded_platform", ), [ ( """\ dependencies: - conda: click >=8 - pip: click # [osx] """, "linux-64", " - click >=8", [" - osx-64", " - osx-arm64"], " - linux-64", ), ( """\ dependencies: - pip: click ==0.1 - conda: click # [linux64] """, "osx-arm64", " - click ==0.1", [" - linux-64"], " - osx-arm64", ), ], ) def test_merge_uses_selector_platforms_even_for_losing_alternatives( tmp_path: Path, content: str, current_platform: str, expected_dependency: str, expected_platforms: list[str], excluded_platform: str, ) -> None: req_file = tmp_path / "requirements.yaml" req_file.write_text(textwrap.dedent(content)) output_file = tmp_path / "environment.yaml" with patch("unidep.utils.identify_current_platform", return_value=current_platform): _merge_command( depth=1, directory=tmp_path, files=[req_file], name="myenv", output=output_file, stdout=False, selector="comment", platforms=[], optional_dependencies=[], all_optional_dependencies=False, ignore_pins=[], skip_dependencies=[], overwrite_pins=[], verbose=False, ) merged = output_file.read_text() assert expected_dependency in merged assert "platforms:" in merged for expected_platform in expected_platforms: assert expected_platform in merged assert excluded_platform not in merged def test_merge_command_includes_selected_optional_dependencies( tmp_path: Path, ) -> None: req_file = tmp_path / "requirements.yaml" req_file.write_text( textwrap.dedent( """\ dependencies: - numpy optional_dependencies: docs: - sphinx test: - pytest """, ), ) output_file = tmp_path / "environment.yaml" _merge_command( depth=1, directory=tmp_path, files=[req_file], name="myenv", output=output_file, stdout=False, selector="comment", platforms=[], optional_dependencies=["docs", "test"], all_optional_dependencies=False, ignore_pins=[], skip_dependencies=[], overwrite_pins=[], verbose=False, ) merged = output_file.read_text() assert " - numpy" in merged assert " - sphinx" in merged assert " - pytest" in merged def test_merge_command_includes_all_optional_dependencies( tmp_path: Path, ) -> None: req_file = tmp_path / "requirements.yaml" req_file.write_text( textwrap.dedent( """\ dependencies: - numpy optional_dependencies: docs: - sphinx test: - pytest """, ), ) output_file = tmp_path / "environment.yaml" _merge_command( depth=1, directory=tmp_path, files=[req_file], name="myenv", output=output_file, stdout=False, selector="comment", platforms=[], optional_dependencies=[], all_optional_dependencies=True, ignore_pins=[], skip_dependencies=[], overwrite_pins=[], verbose=False, ) merged = output_file.read_text() assert " - numpy" in merged assert " - sphinx" in merged assert " - pytest" in merged def test_merge_command_includes_local_only_optional_dependencies( tmp_path: Path, ) -> None: local_project = tmp_path / "local-project" local_project.mkdir() (local_project / "requirements.yaml").write_text( textwrap.dedent( """\ dependencies: - adaptive optional_dependencies: test: - pytest """, ), ) req_file = tmp_path / "requirements.yaml" req_file.write_text( textwrap.dedent( """\ dependencies: - numpy optional_dependencies: local: - ./local-project[test] """, ), ) output_file = tmp_path / "environment.yaml" _merge_command( depth=1, directory=tmp_path, files=[req_file], name="myenv", output=output_file, stdout=False, selector="comment", platforms=[], optional_dependencies=["local"], all_optional_dependencies=False, ignore_pins=[], skip_dependencies=[], overwrite_pins=[], verbose=False, ) merged = output_file.read_text() assert " - numpy" in merged assert " - adaptive" in merged assert " - pytest" in merged def test_merge_optional_dependency_extras_rejects_unknown_group( tmp_path: Path, capsys: pytest.CaptureFixture[str], ) -> None: req_file = tmp_path / "requirements.yaml" req_file.write_text( textwrap.dedent( """\ optional_dependencies: docs: - sphinx test: - pytest """, ), ) with pytest.raises(SystemExit, match="1"): _merge_optional_dependency_extras( found_files=[req_file], optional_dependencies=["dev"], all_optional_dependencies=False, ) captured = capsys.readouterr() assert "Unknown optional dependency group(s): `dev`" in captured.out assert "Valid groups: `docs`, `test`." in captured.out def test_merge_optional_dependency_extras_validates_across_all_files( tmp_path: Path, ) -> None: project1 = tmp_path / "project1" project1.mkdir() req1 = project1 / "pyproject.toml" req1.write_text( textwrap.dedent( """\ [tool.unidep] [tool.unidep.optional_dependencies] docs = ["sphinx"] """, ), ) project2 = tmp_path / "project2" project2.mkdir() req2 = project2 / "requirements.yaml" req2.write_text( textwrap.dedent( """\ optional_dependencies: test: - pytest """, ), ) extras = _merge_optional_dependency_extras( found_files=[req1, req2], optional_dependencies=["test"], all_optional_dependencies=False, ) assert extras == [["test"], ["test"]] def test_collect_available_optional_dependency_groups_preserves_local_only_groups( tmp_path: Path, ) -> None: req_file = tmp_path / "requirements.yaml" req_file.write_text( textwrap.dedent( """\ optional_dependencies: docs: - sphinx local: - ../missing-project[test] """, ), ) groups = _collect_available_optional_dependency_groups([req_file]) assert groups == ["docs", "local"] def test_merge_optional_dependency_extras_reports_when_no_groups_exist( tmp_path: Path, capsys: pytest.CaptureFixture[str], ) -> None: req_file = tmp_path / "requirements.yaml" req_file.write_text("dependencies:\n - numpy\n") with pytest.raises(SystemExit, match="1"): _merge_optional_dependency_extras( found_files=[req_file], optional_dependencies=["dev"], all_optional_dependencies=False, ) captured = capsys.readouterr() assert "Unknown optional dependency group(s): `dev`" in captured.out assert "No optional dependency groups were found." in captured.out def test_flatten_selected_dependency_entries_includes_optional_groups( tmp_path: Path, ) -> None: req_file = tmp_path / "requirements.yaml" req_file.write_text( textwrap.dedent( """\ dependencies: - numpy optional_dependencies: dev: - pytest """, ), ) requirements = parse_requirements(req_file, extras=[["*"]]) entries = _flatten_selected_dependency_entries( requirements.dependency_entries, requirements.optional_dependency_entries, ) def entry_name(entry: Any) -> str: conda = entry.conda pip = entry.pip if conda is not None: return conda.name assert pip is not None return pip.name assert [entry_name(entry) for entry in entries] == [ "numpy", "pytest", ] def test_collect_selected_conda_like_platforms_uses_both_source_selectors( tmp_path: Path, ) -> None: req_file = tmp_path / "requirements.yaml" req_file.write_text( textwrap.dedent( """\ dependencies: - conda: click # [linux64] pip: click # [osx] """, ), ) requirements = parse_requirements(req_file) entries = _flatten_selected_dependency_entries( requirements.dependency_entries, requirements.optional_dependency_entries, ) assert _collect_selected_conda_like_platforms(entries) == [ "linux-64", "osx-64", "osx-arm64", ] def test_collect_selected_conda_like_platforms_preserves_selector_platforms( tmp_path: Path, ) -> None: req_file = tmp_path / "requirements.yaml" req_file.write_text( textwrap.dedent( """\ dependencies: - conda: click >=8 - pip: click # [osx] """, ), ) requirements = parse_requirements(req_file) entries = _flatten_selected_dependency_entries( requirements.dependency_entries, requirements.optional_dependency_entries, ) assert _collect_selected_conda_like_platforms(entries) == [ "osx-64", "osx-arm64", ] def test_unidep_pixi_cli_optional_monorepo_env_includes_base( tmp_path: Path, ) -> None: project1_dir = tmp_path / "project1" project1_dir.mkdir() req1 = project1_dir / "requirements.yaml" req1.write_text( textwrap.dedent( """\ channels: - conda-forge dependencies: - numpy optional_dependencies: dev: - pytest platforms: - linux-64 """, ), ) project2_dir = tmp_path / "project2" project2_dir.mkdir() req2 = project2_dir / "requirements.yaml" req2.write_text( textwrap.dedent( """\ channels: - conda-forge dependencies: - pandas platforms: - linux-64 """, ), ) output_file = tmp_path / "pixi.toml" result = subprocess.run( [ # noqa: S607 "unidep", "pixi", "--file", str(project1_dir), "--file", str(project2_dir), "--output", str(output_file), ], check=True, capture_output=True, text=True, encoding="utf-8", ) assert result.returncode == 0, "Command failed to execute successfully" with output_file.open("rb") as f: data = tomllib.load(f) envs = data["environments"] assert set(envs["project1-dev"]) == {"project1", "project1-dev"} def test_unidep_file_not_found_error() -> None: # Path to the requirements file requirements_path = REPO_ROOT / "yolo" assert not requirements_path.exists() # Run the unidep install command result = subprocess.run( [ # noqa: S607 "unidep", "conda", "--file", str(requirements_path), ], check=False, capture_output=True, text=True, encoding="utf-8", ) assert result.returncode == 1, "Command unexpectedly succeeded" assert "❌ One or more files" in result.stdout def test_doubly_nested_project_folder_installable( tmp_path: Path, ) -> None: example_folder = tmp_path / "example" shutil.copytree(REPO_ROOT / "example", example_folder) # Add an extra project extra_projects = example_folder / "extra_projects" extra_projects.mkdir(exist_ok=True, parents=True) project4 = extra_projects / "project4" project4.mkdir(exist_ok=True, parents=True) (project4 / "requirements.yaml").write_text( "local_dependencies: [../../setup_py_project]", ) pyproject_toml = "\n".join( # noqa: FLY002 ( "[build-system]", 'requires = ["setuptools", "unidep"]', 'build-backend = "setuptools.build_meta"', ), ) (project4 / "pyproject.toml").write_text(pyproject_toml) setup = "\n".join( # noqa: FLY002 ( "from setuptools import setup", 'setup(name="project4", version="0.1.0", description="yolo", py_modules=["setup_py_project"])', ), ) (project4 / "setup.py").write_text(setup) (project4 / "project4.py").write_text("print('hello')") # Run the unidep install command result = subprocess.run( [ # noqa: S607 "unidep", "install", "--dry-run", "--editable", "--no-dependencies", "--no-uv", str(project4 / "requirements.yaml"), ], check=True, capture_output=True, text=True, encoding="utf-8", ) p1 = str(tmp_path / "example" / "hatch_project") p2 = str(tmp_path / "example" / "setup_py_project") p3 = str(tmp_path / "example" / "setuptools_project") p4 = str(tmp_path / "example" / "extra_projects" / "project4") pkgs = " ".join([f"-e {p}" for p in sorted((p1, p2, p3, p4))]) assert f"pip install --no-deps {pkgs}`" in result.stdout p5 = str(tmp_path / "example" / "pyproject_toml_project") p6 = str(tmp_path / "example" / "hatch2_project") # Test depth 2 result = subprocess.run( [ # noqa: S607 "unidep", "install-all", "--dry-run", "--editable", "--no-dependencies", "--no-uv", "--directory", str(example_folder), "--depth", "2", ], check=True, capture_output=True, text=True, encoding="utf-8", ) pkgs = " ".join([f"-e {p}" for p in sorted((p1, p2, p3, p4, p5, p6))]) assert f"pip install --no-deps {pkgs}`" in result.stdout # Test depth 1 (should not install project4) result = subprocess.run( [ # noqa: S607 "unidep", "install-all", "--dry-run", "--editable", "--no-dependencies", "--no-uv", "--directory", str(example_folder), "--depth", "1", ], check=True, capture_output=True, text=True, encoding="utf-8", ) pkgs = " ".join([f"-e {p}" for p in sorted((p1, p2, p3, p5, p6))]) assert f"pip install --no-deps {pkgs}`" in result.stdout def test_pip_compile_command(tmp_path: Path, capsys: pytest.CaptureFixture) -> None: folder = tmp_path / "example" shutil.copytree(REPO_ROOT / "example", folder) with patch("subprocess.run", return_value=None), patch( "importlib.util.find_spec", return_value=True, ): _pip_compile_command( depth=2, directory=folder, platform="linux-64", ignore_pins=[], skip_dependencies=[], overwrite_pins=[], verbose=True, extra_flags=["--", "--allow-unsafe"], ) requirements_in = folder / "requirements.in" assert requirements_in.exists() with requirements_in.open() as f: assert "adaptive" in f.read() requirements_txt = folder / "requirements.txt" assert ( f"Locking dependencies with `pip-compile --output-file {requirements_txt} --allow-unsafe {requirements_in}`" in capsys.readouterr().out ) def test_install_non_existing_file() -> None: with pytest.raises(FileNotFoundError, match=r"File `does_not_exist` not found\."): _install_command( Path("does_not_exist"), conda_executable="", # type: ignore[arg-type] conda_env_name=None, conda_env_prefix=None, conda_lock_file=None, dry_run=True, editable=True, verbose=True, ) def test_install_non_existing_folder(tmp_path: Path) -> None: requirements_file = tmp_path / "requirements.yaml" pyproject_file = tmp_path / "pyproject.toml" match = re.escape( f"File `{requirements_file}` or `{pyproject_file}`" f" (with unidep configuration) not found in `{tmp_path}`", ) with pytest.raises(FileNotFoundError, match=match): _install_command( tmp_path, conda_executable="", # type: ignore[arg-type] conda_env_name=None, conda_env_prefix=None, conda_lock_file=None, dry_run=True, editable=True, verbose=True, ) def test_version(capsys: pytest.CaptureFixture) -> None: _print_versions() captured = capsys.readouterr() assert "unidep location" in captured.out assert "unidep version" in captured.out assert "packaging" in captured.out def test_conda_env_list() -> None: conda_executable = _identify_conda_executable() _conda_env_list(conda_executable) def test_conda_root_prefix_uses_conda_info_when_env_vars_are_unset( monkeypatch: pytest.MonkeyPatch, ) -> None: _conda_info.cache_clear() monkeypatch.delenv("MAMBA_ROOT_PREFIX", raising=False) monkeypatch.delenv("CONDA_ROOT", raising=False) try: with patch( "unidep._cli._conda_cli_command_json", return_value={"root_prefix": "/opt/conda", "conda_prefix": "/fallback"}, ) as conda_cli_command_json: assert _conda_root_prefix("conda") == Path("/opt/conda") conda_cli_command_json.assert_called_once_with("conda", "info") finally: _conda_info.cache_clear() @pytest.mark.parametrize( ("which", "env_var"), [("conda", "CONDA_EXE"), ("micromamba", "MAMBA_EXE")], ) def test_get_conda_executable_uses_env_var_fallback( tmp_path: Path, monkeypatch: pytest.MonkeyPatch, which: CondaExecutable, env_var: str, ) -> None: exe = str(tmp_path / which) monkeypatch.setenv(env_var, exe) with patch("shutil.which", return_value=None): assert _get_conda_executable(which) == exe def test_unidep_version_uses_rich_when_available( tmp_path: Path, monkeypatch: pytest.MonkeyPatch, capsys: pytest.CaptureFixture, ) -> None: rich_dir = tmp_path / "rich" rich_dir.mkdir() (rich_dir / "__init__.py").write_text("") (rich_dir / "console.py").write_text( textwrap.dedent( """\ class Console: def print(self, table): print(f"RICH:{table.columns}|{table.rows}") """, ), ) (rich_dir / "table.py").write_text( textwrap.dedent( """\ class Table: def __init__(self, *, show_header): self.show_header = show_header self.columns = [] self.rows = [] def add_column(self, name, *, style): self.columns.append((name, style)) def add_row(self, prop, value): self.rows.append((prop, value)) """, ), ) monkeypatch.syspath_prepend(str(tmp_path)) monkeypatch.delitem(sys.modules, "rich", raising=False) monkeypatch.delitem(sys.modules, "rich.console", raising=False) monkeypatch.delitem(sys.modules, "rich.table", raising=False) _print_versions() output = capsys.readouterr().out assert "RICH:[('Property', 'cyan'), ('Value', 'magenta')]" in output assert "('unidep version'," in output assert "('packaging version'," in output def test_pip_optional(tmp_path: Path) -> None: p = tmp_path / "requirements.yaml" p.write_text( textwrap.dedent( """\ dependencies: - foo optional_dependencies: test: - bar """, ), ) txt = _pip_subcommand( file=[p], platforms=[], verbose=True, ignore_pins=None, skip_dependencies=None, overwrite_pins=None, separator=" ", ) assert txt == "foo" txt = _pip_subcommand( file=[f"{p}[test]"], # type: ignore[list-item] platforms=[], verbose=True, ignore_pins=None, skip_dependencies=None, overwrite_pins=None, separator=" ", ) assert txt == "foo bar" def test_capitalize_last_dir() -> None: # Just needs to work for Windows paths assert _capitalize_dir(r"foo\bar\baz") == r"foo\bar\Baz" assert _capitalize_dir(r"foo\bar\baz", capitalize=False) == r"foo\bar\baz" assert _capitalize_dir(r"foo\bar\baz", capitalize=True) == r"foo\bar\Baz" @pytest.mark.skipif( os.name == "nt", reason="Don't test on Windows to make sure that conda is not found.", ) def test_find_conda_windows() -> None: """Tests whether the function searches the expected paths.""" with pytest.raises( FileNotFoundError, match=r"Could not find conda\.", ) as excinfo: _find_windows_path("conda") # This Windows hell... 🤦‍♂️ paths = [ r"👉 %USERPROFILE%\Anaconda3\condabin\conda.exe", r"👉 %USERPROFILE%\anaconda3\condabin\conda.exe", r"👉 %USERPROFILE%\Anaconda3\condabin\conda", r"👉 %USERPROFILE%\anaconda3\condabin\conda", r"👉 %USERPROFILE%\Anaconda3\condabin\conda.bat", r"👉 %USERPROFILE%\anaconda3\condabin\conda.bat", r"👉 %USERPROFILE%\Anaconda3\Scripts\conda.exe", r"👉 %USERPROFILE%\anaconda3\Scripts\conda.exe", r"👉 %USERPROFILE%\Anaconda3\Scripts\conda", r"👉 %USERPROFILE%\anaconda3\Scripts\conda", r"👉 %USERPROFILE%\Anaconda3\Scripts\conda.bat", r"👉 %USERPROFILE%\anaconda3\Scripts\conda.bat", r"👉 %USERPROFILE%\Anaconda3\conda.exe", r"👉 %USERPROFILE%\anaconda3\conda.exe", r"👉 %USERPROFILE%\Anaconda3\conda", r"👉 %USERPROFILE%\anaconda3\conda", r"👉 %USERPROFILE%\Anaconda3\conda.bat", r"👉 %USERPROFILE%\anaconda3\conda.bat", r"👉 %USERPROFILE%\Miniconda3\condabin\conda.exe", r"👉 %USERPROFILE%\miniconda3\condabin\conda.exe", r"👉 %USERPROFILE%\Miniconda3\condabin\conda", r"👉 %USERPROFILE%\miniconda3\condabin\conda", r"👉 %USERPROFILE%\Miniconda3\condabin\conda.bat", r"👉 %USERPROFILE%\miniconda3\condabin\conda.bat", r"👉 %USERPROFILE%\Miniconda3\Scripts\conda.exe", r"👉 %USERPROFILE%\miniconda3\Scripts\conda.exe", r"👉 %USERPROFILE%\Miniconda3\Scripts\conda", r"👉 %USERPROFILE%\miniconda3\Scripts\conda", r"👉 %USERPROFILE%\Miniconda3\Scripts\conda.bat", r"👉 %USERPROFILE%\miniconda3\Scripts\conda.bat", r"👉 %USERPROFILE%\Miniconda3\conda.exe", r"👉 %USERPROFILE%\miniconda3\conda.exe", r"👉 %USERPROFILE%\Miniconda3\conda", r"👉 %USERPROFILE%\miniconda3\conda", r"👉 %USERPROFILE%\Miniconda3\conda.bat", r"👉 %USERPROFILE%\miniconda3\conda.bat", r"👉 C:\Anaconda3\condabin\conda.exe", r"👉 C:\anaconda3\condabin\conda.exe", r"👉 C:\Anaconda3\condabin\conda", r"👉 C:\anaconda3\condabin\conda", r"👉 C:\Anaconda3\condabin\conda.bat", r"👉 C:\anaconda3\condabin\conda.bat", r"👉 C:\Anaconda3\Scripts\conda.exe", r"👉 C:\anaconda3\Scripts\conda.exe", r"👉 C:\Anaconda3\Scripts\conda", r"👉 C:\anaconda3\Scripts\conda", r"👉 C:\Anaconda3\Scripts\conda.bat", r"👉 C:\anaconda3\Scripts\conda.bat", r"👉 C:\Anaconda3\conda.exe", r"👉 C:\anaconda3\conda.exe", r"👉 C:\Anaconda3\conda", r"👉 C:\anaconda3\conda", r"👉 C:\Anaconda3\conda.bat", r"👉 C:\anaconda3\conda.bat", r"👉 C:\Miniconda3\condabin\conda.exe", r"👉 C:\miniconda3\condabin\conda.exe", r"👉 C:\Miniconda3\condabin\conda", r"👉 C:\miniconda3\condabin\conda", r"👉 C:\Miniconda3\condabin\conda.bat", r"👉 C:\miniconda3\condabin\conda.bat", r"👉 C:\Miniconda3\Scripts\conda.exe", r"👉 C:\miniconda3\Scripts\conda.exe", r"👉 C:\Miniconda3\Scripts\conda", r"👉 C:\miniconda3\Scripts\conda", r"👉 C:\Miniconda3\Scripts\conda.bat", r"👉 C:\miniconda3\Scripts\conda.bat", r"👉 C:\Miniconda3\conda.exe", r"👉 C:\miniconda3\conda.exe", r"👉 C:\Miniconda3\conda", r"👉 C:\miniconda3\conda", r"👉 C:\Miniconda3\conda.bat", r"👉 C:\miniconda3\conda.bat", r"👉 C:\ProgramData\Anaconda3\condabin\conda.exe", r"👉 C:\ProgramData\anaconda3\condabin\conda.exe", r"👉 C:\ProgramData\Anaconda3\condabin\conda", r"👉 C:\ProgramData\anaconda3\condabin\conda", r"👉 C:\ProgramData\Anaconda3\condabin\conda.bat", r"👉 C:\ProgramData\anaconda3\condabin\conda.bat", r"👉 C:\ProgramData\Anaconda3\Scripts\conda.exe", r"👉 C:\ProgramData\anaconda3\Scripts\conda.exe", r"👉 C:\ProgramData\Anaconda3\Scripts\conda", r"👉 C:\ProgramData\anaconda3\Scripts\conda", r"👉 C:\ProgramData\Anaconda3\Scripts\conda.bat", r"👉 C:\ProgramData\anaconda3\Scripts\conda.bat", r"👉 C:\ProgramData\Anaconda3\conda.exe", r"👉 C:\ProgramData\anaconda3\conda.exe", r"👉 C:\ProgramData\Anaconda3\conda", r"👉 C:\ProgramData\anaconda3\conda", r"👉 C:\ProgramData\Anaconda3\conda.bat", r"👉 C:\ProgramData\anaconda3\conda.bat", r"👉 C:\ProgramData\Miniconda3\condabin\conda.exe", r"👉 C:\ProgramData\miniconda3\condabin\conda.exe", r"👉 C:\ProgramData\Miniconda3\condabin\conda", r"👉 C:\ProgramData\miniconda3\condabin\conda", r"👉 C:\ProgramData\Miniconda3\condabin\conda.bat", r"👉 C:\ProgramData\miniconda3\condabin\conda.bat", r"👉 C:\ProgramData\Miniconda3\Scripts\conda.exe", r"👉 C:\ProgramData\miniconda3\Scripts\conda.exe", r"👉 C:\ProgramData\Miniconda3\Scripts\conda", r"👉 C:\ProgramData\miniconda3\Scripts\conda", r"👉 C:\ProgramData\Miniconda3\Scripts\conda.bat", r"👉 C:\ProgramData\miniconda3\Scripts\conda.bat", r"👉 C:\ProgramData\Miniconda3\conda.exe", r"👉 C:\ProgramData\miniconda3\conda.exe", r"👉 C:\ProgramData\Miniconda3\conda", r"👉 C:\ProgramData\miniconda3\conda", r"👉 C:\ProgramData\Miniconda3\conda.bat", r"👉 C:\ProgramData\miniconda3\conda.bat", ] for path in paths: assert path in excinfo.value.args[0] def test_find_windows_path_returns_existing_mamba_location( monkeypatch: pytest.MonkeyPatch, ) -> None: monkeypatch.setattr( "os.path.exists", lambda path: "mambaforge" in path and str(path).endswith("mamba.exe"), ) found = _find_windows_path("mamba") assert found.endswith(r"mambaforge\condabin\mamba.exe") def test_find_windows_path_returns_existing_micromamba_location( monkeypatch: pytest.MonkeyPatch, ) -> None: monkeypatch.setattr( "os.path.exists", lambda path: "micromamba" in path and str(path).endswith("micromamba.exe"), ) found = _find_windows_path("micromamba") assert found.endswith(r"Micromamba\condabin\micromamba.exe") @contextmanager def set_env_var(key: str, value: str) -> Generator[None, None, None]: original_value = os.environ.get(key) os.environ[key] = value try: yield finally: if original_value is None: del os.environ[key] else: os.environ[key] = original_value @pytest.mark.skipif( os.name == "nt", reason="On Windows it will search for Conda because of `_maybe_exe`.", ) def test_maybe_conda_run() -> None: with set_env_var("CONDA_EXE", "conda"): result = _maybe_conda_run("conda", "my_env", None) assert result == ["conda", "run", "--name", "my_env"] p = Path("/path/to/env") with set_env_var("CONDA_EXE", "conda"): result = _maybe_conda_run("conda", None, p) assert result == ["conda", "run", "--prefix", str(p)] with set_env_var("MAMBA_EXE", "mamba"): result = _maybe_conda_run("mamba", "my_env", None) assert result == ["mamba", "run", "--name", "my_env"] def test_maybe_conda_run_without_executable_returns_empty() -> None: assert _maybe_conda_run(None, "my_env", None) == [] def test_maybe_conda_run_without_active_environment_returns_empty( monkeypatch: pytest.MonkeyPatch, ) -> None: monkeypatch.delenv("CONDA_PREFIX", raising=False) monkeypatch.delenv("MAMBA_ROOT_PREFIX", raising=False) assert _maybe_conda_run("conda", None, None) == [] def test_maybe_create_conda_env_args_creates_env( monkeypatch: pytest.MonkeyPatch, capsys: pytest.CaptureFixture, ) -> None: """Test that _maybe_create_conda_env_args creates the environment if it doesn't exist. This simulates running: unidep install --conda-env-name non-existing-env . and checks that the function to create a conda environment is called. """ # Create a flag to record if _create_conda_environment is called created = [] # Define a fake _create_conda_environment that records its call def fake_create( conda_executable: CondaExecutable, # noqa: ARG001 *args: str, ) -> None: created.append(args) print("Fake create called with", args) # Patch the _create_conda_environment function monkeypatch.setattr( "unidep._cli._create_conda_environment", fake_create, ) # Patch _conda_env_name_to_prefix to simulate that the environment is missing. def fake_env_name_to_prefix( conda_executable: CondaExecutable, # noqa: ARG001 env_name: str, # noqa: ARG001 *, raise_if_not_found: bool = True, # noqa: ARG001 ) -> Path | None: # Simulate that for "non-existing-env" no environment exists. return None monkeypatch.setattr( "unidep._cli._conda_env_name_to_prefix", fake_env_name_to_prefix, ) # Now call _maybe_create_conda_env_args with a non-existing environment name. args = _maybe_create_conda_env_args("conda", "non-existing-env", None) # Check that our fake_create was called (i.e. the environment creation was triggered) assert created, ( "Expected environment creation to be triggered for non-existing env." ) # Also, the returned arguments should be the standard ones for a named env. assert args == ["--name", "non-existing-env"] # Optionally, verify that our fake function printed the expected message. output = capsys.readouterr().out assert "Fake create called with" in output # Now with a prefix prefix = Path("/home/user/micromamba/envs/non-existing-env") args = _maybe_create_conda_env_args("conda", None, prefix) # Check that our fake_create was called (i.e. the environment creation was triggered) assert created, ( "Expected environment creation to be triggered for non-existing env." ) # Also, the returned arguments should be the standard ones for a named env. assert args == ["--prefix", str(prefix)] # Optionally, verify that our fake function printed the expected message. output = capsys.readouterr().out assert "Fake create called with" in output def test_install_command_with_conda_lock_skips_dependency_install( tmp_path: Path, monkeypatch: pytest.MonkeyPatch, capsys: pytest.CaptureFixture, ) -> None: req_file = tmp_path / "requirements.yaml" req_file.write_text( textwrap.dedent( """\ channels: - conda-forge dependencies: - numpy """, ), ) created: list[tuple[Path, str]] = [] def fake_create_env_from_lock( conda_lock_file: Path, conda_executable: str, **_: object, ) -> None: created.append((conda_lock_file, conda_executable)) def fake_python_executable(*_args: object) -> str: return "python" monkeypatch.setattr("unidep._cli._create_env_from_lock", fake_create_env_from_lock) monkeypatch.setattr("unidep._cli.identify_current_platform", lambda: "linux-64") monkeypatch.setattr("unidep._cli._python_executable", fake_python_executable) _install_command( req_file, conda_executable="conda", conda_env_name="test-env", conda_env_prefix=None, conda_lock_file=Path("conda-lock.yml"), dry_run=True, editable=False, skip_local=True, verbose=False, ) assert created == [(Path("conda-lock.yml"), "conda")] output = capsys.readouterr().out assert "Installing conda dependencies" not in output assert "Installing pip dependencies" not in output def test_unidep_merge_cli_optional_dependencies(tmp_path: Path) -> None: req_file = tmp_path / "requirements.yaml" req_file.write_text( textwrap.dedent( """\ dependencies: - numpy optional_dependencies: docs: - sphinx """, ), ) output_file = tmp_path / "environment.yaml" env = os.environ.copy() env["PYTHONPATH"] = str(REPO_ROOT) result = subprocess.run( [ sys.executable, "-c", "from unidep._cli import main; main()", "merge", "--directory", str(tmp_path), "--depth", "0", "--output", str(output_file), "--optional-dependencies", "docs", ], check=True, capture_output=True, text=True, encoding="utf-8", env=env, ) assert result.returncode == 0 merged = output_file.read_text() assert " - numpy" in merged assert " - sphinx" in merged def test_unidep_merge_cli_all_optional_dependencies(tmp_path: Path) -> None: req_file = tmp_path / "requirements.yaml" req_file.write_text( textwrap.dedent( """\ dependencies: - numpy optional_dependencies: docs: - sphinx test: - pytest """, ), ) output_file = tmp_path / "environment.yaml" env = os.environ.copy() env["PYTHONPATH"] = str(REPO_ROOT) result = subprocess.run( [ sys.executable, "-c", "from unidep._cli import main; main()", "merge", "--directory", str(tmp_path), "--depth", "0", "--output", str(output_file), "--all-optional-dependencies", ], check=True, capture_output=True, text=True, encoding="utf-8", env=env, ) assert result.returncode == 0 merged = output_file.read_text() assert " - numpy" in merged assert " - sphinx" in merged assert " - pytest" in merged def test_unidep_merge_cli_rejects_unknown_optional_dependency_group( tmp_path: Path, ) -> None: req_file = tmp_path / "requirements.yaml" req_file.write_text( textwrap.dedent( """\ optional_dependencies: docs: - sphinx """, ), ) env = os.environ.copy() env["PYTHONPATH"] = str(REPO_ROOT) result = subprocess.run( [ sys.executable, "-c", "from unidep._cli import main; main()", "merge", "--directory", str(tmp_path), "--depth", "0", "--optional-dependencies", "dev", ], capture_output=True, text=True, encoding="utf-8", env=env, check=False, ) assert result.returncode == 1 assert "Unknown optional dependency group(s): `dev`" in result.stdout assert "Valid groups: `docs`." in result.stdout def test_unidep_merge_cli_rejects_mutually_exclusive_optional_flags( tmp_path: Path, ) -> None: req_file = tmp_path / "requirements.yaml" req_file.write_text( textwrap.dedent( """\ optional_dependencies: docs: - sphinx """, ), ) env = os.environ.copy() env["PYTHONPATH"] = str(REPO_ROOT) result = subprocess.run( [ sys.executable, "-c", "from unidep._cli import main; main()", "merge", "--directory", str(tmp_path), "--depth", "0", "--optional-dependencies", "docs", "--all-optional-dependencies", ], capture_output=True, text=True, encoding="utf-8", env=env, check=False, ) assert result.returncode == 2 assert "not allowed with argument" in result.stderr def test_unidep_merge_cli_optional_dependencies_across_multiple_files( tmp_path: Path, ) -> None: project1 = tmp_path / "project1" project1.mkdir() (project1 / "requirements.yaml").write_text( textwrap.dedent( """\ dependencies: - numpy optional_dependencies: docs: - sphinx """, ), ) project2 = tmp_path / "project2" project2.mkdir() (project2 / "requirements.yaml").write_text( textwrap.dedent( """\ dependencies: - pandas optional_dependencies: test: - pytest """, ), ) output_file = tmp_path / "environment.yaml" env = os.environ.copy() env["PYTHONPATH"] = str(REPO_ROOT) result = subprocess.run( [ sys.executable, "-c", "from unidep._cli import main; main()", "merge", "--directory", str(tmp_path), "--depth", "1", "--output", str(output_file), "--optional-dependencies", "test", ], check=True, capture_output=True, text=True, encoding="utf-8", env=env, ) assert result.returncode == 0 merged = output_file.read_text() assert " - numpy" in merged assert " - pandas" in merged assert " - pytest" in merged assert " - sphinx" not in merged ================================================ FILE: tests/test_cli_install_conda_lock.py ================================================ """Tests for the `unidep._cli` module (installing conda environment from lock file).""" import subprocess from pathlib import Path from unittest.mock import Mock, call, patch import pytest from unidep._cli import ( CondaExecutable, _create_env_from_lock, _verify_conda_lock_installed, ) @pytest.fixture def mock_subprocess_run(monkeypatch: pytest.MonkeyPatch) -> Mock: mock = Mock() monkeypatch.setattr("subprocess.run", mock) return mock @pytest.fixture def mock_print(monkeypatch: pytest.MonkeyPatch) -> Mock: mock = Mock() monkeypatch.setattr("builtins.print", mock) return mock @pytest.mark.parametrize("conda_executable", ["conda", "mamba", "micromamba"]) @pytest.mark.parametrize( "env_spec", [ {"conda_env_name": "test_env", "conda_env_prefix": None}, {"conda_env_name": None, "conda_env_prefix": Path("/path/to/env")}, ], ) def test_create_env_from_lock_dry_run( conda_executable: CondaExecutable, env_spec: dict, mock_subprocess_run: Mock, mock_print: Mock, ) -> None: conda_lock_file = Path("conda-lock.yml") with patch("unidep._cli._verify_conda_lock_installed"): _create_env_from_lock( conda_lock_file=conda_lock_file, conda_executable=conda_executable, **env_spec, dry_run=True, verbose=True, ) # Check that subprocess.run was not called mock_subprocess_run.assert_not_called() # Check that appropriate messages were printed env_identifier = ( f"'{env_spec['conda_env_name']}'" if env_spec["conda_env_name"] else f"at '{env_spec['conda_env_prefix']}'" ) assert len(mock_print.call_args_list) == 2 # Check the first message (creating environment) first_call = mock_print.call_args_list[0] assert first_call.args[0].startswith( f"📦 Creating conda environment {env_identifier} with ", ) # Check the command string separately cmd_str = first_call.args[0] if conda_executable == "micromamba": assert "micromamba create" in cmd_str or "micromamba.exe create" in cmd_str assert "-f conda-lock.yml" in cmd_str assert "--yes" in cmd_str assert "--verbose" in cmd_str else: assert "conda-lock install" in cmd_str assert "--log-level=DEBUG" in cmd_str if conda_executable == "mamba": assert "--mamba" in cmd_str elif conda_executable == "conda": assert "--conda conda" in cmd_str if env_spec["conda_env_name"]: assert f"--name {env_spec['conda_env_name']}" in cmd_str elif env_spec["conda_env_prefix"]: assert f"--prefix {env_spec['conda_env_prefix']}" in cmd_str # Check the second message (dry run completed) assert mock_print.call_args_list[1] == call( "🏁 Dry run completed. No environment was created.", ) def test_create_env_from_lock_no_env_specified(mock_print: Mock) -> None: conda_lock_file = Path("conda-lock.yml") with pytest.raises(SystemExit): _create_env_from_lock( conda_lock_file=conda_lock_file, conda_executable="conda", conda_env_name=None, conda_env_prefix=None, dry_run=True, verbose=True, ) mock_print.assert_called_once_with( "❌ Please provide either `--conda-env-name` or" " `--conda-env-prefix` when using `--conda-lock-file`.", ) def test_create_env_from_lock_verifies_installation_for_conda( mock_subprocess_run: Mock, ) -> None: with patch("unidep._cli._verify_conda_lock_installed") as verify: _create_env_from_lock( conda_lock_file=Path("conda-lock.yml"), conda_executable="conda", conda_env_name="test-env", conda_env_prefix=None, dry_run=False, verbose=False, ) verify.assert_called_once() mock_subprocess_run.assert_called_once() def test_verify_conda_lock_installed_not_found( monkeypatch: pytest.MonkeyPatch, mock_print: Mock, ) -> None: monkeypatch.setattr("shutil.which", lambda _: None) with pytest.raises(SystemExit): _verify_conda_lock_installed() assert ( "❌ conda-lock is not installed or not found in PATH." in mock_print.call_args[0][0] ) def test_verify_conda_lock_installed_not_working( monkeypatch: pytest.MonkeyPatch, mock_print: Mock, ) -> None: monkeypatch.setattr("shutil.which", lambda _: "/path/to/conda-lock") monkeypatch.setattr( subprocess, "run", Mock(side_effect=subprocess.CalledProcessError(1, "conda-lock")), ) with pytest.raises(SystemExit): _verify_conda_lock_installed() assert ( "❌ conda-lock is installed but not working correctly." in mock_print.call_args[0][0] ) ================================================ FILE: tests/test_conda_lock.py ================================================ """unidep conda-lock tests.""" from __future__ import annotations import shutil import sys import types from pathlib import Path from typing import TYPE_CHECKING, Any from unittest.mock import patch import pytest from ruamel.yaml import YAML from unidep._conda_lock import ( LockSpec, _check_consistent_lock_files, _conda_lock_subpackage, _conda_lock_subpackages, _download_and_get_package_names, _handle_missing_keys, _parse_conda_lock_packages, conda_lock_command, ) from unidep.utils import remove_top_comments if TYPE_CHECKING: from unidep.platform_definitions import CondaPip, Platform def test_conda_lock_command(tmp_path: Path) -> None: folder = tmp_path / "simple_monorepo" shutil.copytree(Path(__file__).parent / "simple_monorepo", folder) with patch("unidep._conda_lock._run_conda_lock", return_value=None): conda_lock_command( depth=1, directory=folder, files=None, platforms=["linux-64", "osx-arm64"], verbose=True, only_global=False, check_input_hash=True, ignore_pins=[], overwrite_pins=[], skip_dependencies=[], extra_flags=["--", "--micromamba"], ) with YAML(typ="safe") as yaml: with (folder / "project1" / "conda-lock.yml").open() as f: lock1 = yaml.load(f) with (folder / "project2" / "conda-lock.yml").open() as f: lock2 = yaml.load(f) assert [p["name"] for p in lock1["package"] if p["platform"] == "osx-arm64"] == [ "bzip2", "python_abi", "tzdata", ] assert [p["name"] for p in lock2["package"] if p["platform"] == "osx-arm64"] == [ "python_abi", "tzdata", ] def test_conda_lock_command_pip_package_with_conda_dependency(tmp_path: Path) -> None: folder = tmp_path / "test-pip-package-with-conda-dependency" shutil.copytree( Path(__file__).parent / "test-pip-package-with-conda-dependency", folder, ) with patch("unidep._conda_lock._run_conda_lock", return_value=None): conda_lock_command( depth=1, directory=folder, files=None, platforms=["linux-64"], verbose=True, only_global=False, check_input_hash=True, ignore_pins=[], overwrite_pins=[], skip_dependencies=[], extra_flags=[], ) with YAML(typ="safe") as yaml: with (folder / "project1" / "conda-lock.yml").open() as f: lock1 = yaml.load(f) with (folder / "project2" / "conda-lock.yml").open() as f: lock2 = yaml.load(f) assert [p["name"] for p in lock1["package"]] == [ "_libgcc_mutex", "_openmp_mutex", "bzip2", "ca-certificates", "ld_impl_linux-64", "libexpat", "libffi", "libgcc-ng", "libgomp", "libnsl", "libsqlite", "libstdcxx-ng", "libuuid", "libzlib", "ncurses", "openssl", "pybind11", "pybind11-global", "python", "python_abi", "readline", "tk", "tzdata", "xz", ] assert [p["name"] for p in lock2["package"]] == [ "_libgcc_mutex", "_openmp_mutex", "bzip2", "ca-certificates", "ld_impl_linux-64", "libexpat", "libffi", "libgcc-ng", "libgomp", "libnsl", "libsqlite", "libstdcxx-ng", "libuuid", "libzlib", "ncurses", "openssl", "pybind11", "pybind11-global", "python", "python_abi", "readline", "tk", "tzdata", "xz", "cutde", "mako", "markupsafe", "rsync-time-machine", ] def test_conda_lock_global_infers_selector_platforms(tmp_path: Path) -> None: req_file = tmp_path / "requirements.yaml" req_file.write_text( """\ channels: - conda-forge dependencies: - cuda-toolkit # [linux64] """, ) with patch("unidep._conda_lock._run_conda_lock", return_value=None), patch( "unidep.utils.identify_current_platform", return_value="osx-arm64", ): conda_lock_command( depth=1, directory=tmp_path, files=[req_file], platforms=[], verbose=False, only_global=True, check_input_hash=False, ignore_pins=[], overwrite_pins=[], skip_dependencies=[], extra_flags=[], ) tmp_env = tmp_path / "tmp.environment.yaml" with YAML(typ="safe") as yaml, tmp_env.open() as f: data = yaml.load(f) assert data["platforms"] == ["linux-64"] @pytest.mark.filterwarnings("ignore::DeprecationWarning") def test_conda_lock_command_pip_and_conda_different_name( tmp_path: Path, capsys: pytest.CaptureFixture, ) -> None: folder = tmp_path / "test-pip-and-conda-different-name" shutil.copytree(Path(__file__).parent / "test-pip-and-conda-different-name", folder) files = [ folder / "project1" / "requirements.yaml", folder / "project2" / "requirements.yaml", ] with patch("unidep._conda_lock._run_conda_lock", return_value=None): conda_lock_command( depth=1, directory=folder, # ignored when using files files=files, platforms=["linux-64"], verbose=True, only_global=False, check_input_hash=True, ignore_pins=[], overwrite_pins=[], skip_dependencies=[], extra_flags=[], ) assert "Missing keys" not in capsys.readouterr().out def test_remove_top_comments(tmp_path: Path) -> None: test_file = tmp_path / "test_file.txt" test_file.write_text( "# Comment line 1\n# Comment line 2\nActual content line 1\nActual content line 2", ) remove_top_comments(test_file) with test_file.open("r") as file: content = file.read() assert content == "Actual content line 1\nActual content line 2" def test_handle_missing_keys(capsys: pytest.CaptureFixture) -> None: lock_spec = LockSpec( packages={ ("conda", "linux-64", "python-nonexistent"): { "name": "python-nonexistent", "manager": "conda", "platform": "linux-64", "dependencies": [], "url": "https://example.com/nonexistent", }, }, dependencies={("conda", "linux-64", "nonexistent"): set()}, ) # Here the package name on pip contains the conda package name, so we will download # the conda package to verify that this is our package. locked: list[dict[str, Any]] = [] locked_keys: set[tuple[CondaPip, Platform, str]] = {} # type: ignore[assignment] missing_keys: set[tuple[CondaPip, Platform, str]] = { ("pip", "linux-64", "nonexistent"), } with patch( "unidep._conda_lock._download_and_get_package_names", return_value=None, ) as mock: _handle_missing_keys( lock_spec=lock_spec, locked_keys=locked_keys, missing_keys=missing_keys, locked=locked, ) mock.assert_called_once() assert f"❌ Missing keys {missing_keys}" in capsys.readouterr().out assert ("pip", "linux-64", "nonexistent") in missing_keys def test_handle_missing_keys_adds_matching_conda_package() -> None: pkg = { "name": "msgpack-python", "manager": "conda", "platform": "linux-64", "dependencies": {}, "url": "https://example.com/msgpack-python.conda", } lock_spec = LockSpec( packages={("conda", "linux-64", "msgpack-python"): pkg}, dependencies={("conda", "linux-64", "msgpack-python"): set()}, ) locked: list[dict[str, Any]] = [] locked_keys: set[tuple[CondaPip, Platform, str]] = set() missing_keys: set[tuple[CondaPip, Platform, str]] = { ("pip", "linux-64", "msgpack"), } with patch( "unidep._conda_lock._download_and_get_package_names", return_value=["msgpack"], ): _handle_missing_keys( lock_spec=lock_spec, locked_keys=locked_keys, missing_keys=missing_keys, locked=locked, ) assert missing_keys == set() assert locked == [pkg] assert ("conda", "linux-64", "msgpack-python") in locked_keys def test_download_and_get_package_names_reads_site_packages( monkeypatch: pytest.MonkeyPatch, ) -> None: def fake_urlretrieve(_url: str, filename: str) -> None: Path(filename).write_text("archive") def fake_extract( _src: str, *, dest_dir: str, components: str | None = None, ) -> None: del components site_packages = Path(dest_dir) / "site-packages" (site_packages / "pkg").mkdir(parents=True) (site_packages / "pkg.dist-info").mkdir() (site_packages / "pkg.egg-info").mkdir() api_module = types.ModuleType("conda_package_handling.api") api_module.extract = fake_extract # type: ignore[attr-defined] package_module = types.ModuleType("conda_package_handling") package_module.api = api_module # type: ignore[attr-defined] monkeypatch.setitem(sys.modules, "conda_package_handling", package_module) monkeypatch.setitem(sys.modules, "conda_package_handling.api", api_module) monkeypatch.setattr("urllib.request.urlretrieve", fake_urlretrieve) names = _download_and_get_package_names( { "name": "pkg", "manager": "conda", "platform": "linux-64", "url": "https://example.com/pkg.conda", }, ) assert names == ["pkg"] def test_download_and_get_package_names_returns_none_without_python_dirs( monkeypatch: pytest.MonkeyPatch, ) -> None: def fake_urlretrieve(_url: str, filename: str) -> None: Path(filename).write_text("archive") def fake_extract( _src: str, *, dest_dir: str, components: str | None = None, ) -> None: del components (Path(dest_dir) / "lib" / "not-python").mkdir(parents=True) api_module = types.ModuleType("conda_package_handling.api") api_module.extract = fake_extract # type: ignore[attr-defined] package_module = types.ModuleType("conda_package_handling") package_module.api = api_module # type: ignore[attr-defined] monkeypatch.setitem(sys.modules, "conda_package_handling", package_module) monkeypatch.setitem(sys.modules, "conda_package_handling.api", api_module) monkeypatch.setattr("urllib.request.urlretrieve", fake_urlretrieve) names = _download_and_get_package_names( { "name": "pkg", "manager": "conda", "platform": "linux-64", "url": "https://example.com/pkg.conda", }, ) assert names is None def test_download_and_get_package_names_returns_none_without_lib_or_site_packages( monkeypatch: pytest.MonkeyPatch, ) -> None: def fake_urlretrieve(_url: str, filename: str) -> None: Path(filename).write_text("archive") def fake_extract( _src: str, *, dest_dir: str, components: str | None = None, ) -> None: del components (Path(dest_dir) / "share").mkdir(parents=True) api_module = types.ModuleType("conda_package_handling.api") api_module.extract = fake_extract # type: ignore[attr-defined] package_module = types.ModuleType("conda_package_handling") package_module.api = api_module # type: ignore[attr-defined] monkeypatch.setitem(sys.modules, "conda_package_handling", package_module) monkeypatch.setitem(sys.modules, "conda_package_handling.api", api_module) monkeypatch.setattr("urllib.request.urlretrieve", fake_urlretrieve) names = _download_and_get_package_names( { "name": "pkg", "manager": "conda", "platform": "linux-64", "url": "https://example.com/pkg.conda", }, ) assert names is None def test_download_and_get_package_names_returns_none_without_site_packages( monkeypatch: pytest.MonkeyPatch, ) -> None: def fake_urlretrieve(_url: str, filename: str) -> None: Path(filename).write_text("archive") def fake_extract( _src: str, *, dest_dir: str, components: str | None = None, ) -> None: del components (Path(dest_dir) / "lib" / "python3.12").mkdir(parents=True) api_module = types.ModuleType("conda_package_handling.api") api_module.extract = fake_extract # type: ignore[attr-defined] package_module = types.ModuleType("conda_package_handling") package_module.api = api_module # type: ignore[attr-defined] monkeypatch.setitem(sys.modules, "conda_package_handling", package_module) monkeypatch.setitem(sys.modules, "conda_package_handling.api", api_module) monkeypatch.setattr("urllib.request.urlretrieve", fake_urlretrieve) names = _download_and_get_package_names( { "name": "pkg", "manager": "conda", "platform": "linux-64", "url": "https://example.com/pkg.conda", }, ) assert names is None def test_conda_lock_subpackages_skips_root_requirements( tmp_path: Path, ) -> None: root_req = tmp_path / "requirements.yaml" root_req.write_text("dependencies:\n - numpy\n") subdir = tmp_path / "project" subdir.mkdir() sub_req = subdir / "requirements.yaml" sub_req.write_text("dependencies:\n - pandas\n") conda_lock_file = tmp_path / "conda-lock.yml" yaml = YAML(typ="rt") with conda_lock_file.open("w") as fp: yaml.dump( { "metadata": { "channels": [{"url": "conda-forge"}], "platforms": ["linux-64"], }, "package": [], }, fp, ) with patch( "unidep._conda_lock.find_requirements_files", return_value=[root_req, sub_req], ), patch( "unidep._conda_lock._conda_lock_subpackage", return_value=subdir / "conda-lock.yml", ) as mock: lock_files = _conda_lock_subpackages(tmp_path, 1, conda_lock_file) mock.assert_called_once() assert mock.call_args.kwargs["file"] == sub_req assert lock_files == [subdir / "conda-lock.yml"] def test_check_consistent_lock_files_reports_mismatches(tmp_path: Path) -> None: global_lock = tmp_path / "global.yml" sub_lock = tmp_path / "sub.yml" lock_data = { "metadata": {"channels": [], "platforms": ["linux-64"]}, "package": [ { "name": "numpy", "platform": "linux-64", "manager": "conda", "version": "1.0", }, ], } sub_data = { "metadata": {"channels": [], "platforms": ["linux-64"]}, "package": [ { "name": "numpy", "platform": "linux-64", "manager": "conda", "version": "2.0", }, ], } yaml = YAML(typ="safe") with global_lock.open("w") as fp: yaml.dump(lock_data, fp) with sub_lock.open("w") as fp: yaml.dump(sub_data, fp) mismatches = _check_consistent_lock_files(global_lock, [sub_lock]) assert len(mismatches) == 1 assert mismatches[0].name == "numpy" assert mismatches[0].version == "2.0" assert mismatches[0].version_global == "1.0" def test_conda_lock_subpackage_uses_selected_same_name_pip_winner( tmp_path: Path, capsys: pytest.CaptureFixture[str], ) -> None: req_file = tmp_path / "requirements.yaml" req_file.write_text( """\ dependencies: - conda: foo - pip: foo >1 """, ) lock_spec = LockSpec( packages={ ("pip", "linux-64", "foo"): { "name": "foo", "manager": "pip", "platform": "linux-64", "version": "2.0", "dependencies": {}, }, }, dependencies={("pip", "linux-64", "foo"): set()}, ) output = _conda_lock_subpackage( file=req_file, lock_spec=lock_spec, channels=["conda-forge"], platforms=["linux-64"], yaml=YAML(typ="rt"), ) assert "Missing keys" not in capsys.readouterr().out yaml = YAML(typ="safe") with output.open() as fp: data = yaml.load(fp) assert [(pkg["manager"], pkg["name"]) for pkg in data["package"]] == [ ("pip", "foo"), ] def test_conda_lock_subpackage_uses_selected_paired_different_name_pip_winner( tmp_path: Path, capsys: pytest.CaptureFixture[str], ) -> None: req_file = tmp_path / "requirements.yaml" req_file.write_text( """\ dependencies: - conda: python-graphviz pip: graphviz >1 """, ) lock_spec = LockSpec( packages={ ("pip", "linux-64", "graphviz"): { "name": "graphviz", "manager": "pip", "platform": "linux-64", "version": "2.0", "dependencies": {}, }, }, dependencies={("pip", "linux-64", "graphviz"): set()}, ) output = _conda_lock_subpackage( file=req_file, lock_spec=lock_spec, channels=["conda-forge"], platforms=["linux-64"], yaml=YAML(typ="rt"), ) assert "Missing keys" not in capsys.readouterr().out yaml = YAML(typ="safe") with output.open() as fp: data = yaml.load(fp) assert [(pkg["manager"], pkg["name"]) for pkg in data["package"]] == [ ("pip", "graphviz"), ] def test_conda_lock_subpackage_uses_selected_pip_winner_with_extras( tmp_path: Path, capsys: pytest.CaptureFixture[str], ) -> None: req_file = tmp_path / "requirements.yaml" req_file.write_text( """\ dependencies: - conda: adaptive pip: adaptive[notebook] """, ) lock_spec = LockSpec( packages={ ("pip", "linux-64", "adaptive"): { "name": "adaptive", "manager": "pip", "platform": "linux-64", "version": "1.0", "dependencies": {"rich": "13.0"}, }, ("pip", "linux-64", "rich"): { "name": "rich", "manager": "pip", "platform": "linux-64", "version": "13.0", "dependencies": {}, }, }, dependencies={ ("pip", "linux-64", "adaptive"): {"rich"}, ("pip", "linux-64", "rich"): set(), }, ) output = _conda_lock_subpackage( file=req_file, lock_spec=lock_spec, channels=["conda-forge"], platforms=["linux-64"], yaml=YAML(typ="rt"), ) assert "Missing keys" not in capsys.readouterr().out yaml = YAML(typ="safe") with output.open() as fp: data = yaml.load(fp) assert [(pkg["manager"], pkg["name"]) for pkg in data["package"]] == [ ("pip", "adaptive"), ("pip", "rich"), ] def test_circular_dependency() -> None: """Test that circular dependencies are handled correctly. This test is based on the following requirements.yml file: ```yaml channels: - conda-forge dependencies: - sphinx platforms: - linux-64 ``` The sphinx package has a circular dependency to itself, e.g., `sphinx` depends on `sphinxcontrib-applehelp` which depends on `sphinx`. Then we called `unidep conda-lock` on the above requirements.yml file. The bit to reproduce the error is in the `package` list below. """ package = [ { "name": "sphinx", "manager": "conda", "platform": "linux-64", "dependencies": {"sphinxcontrib-applehelp": ""}, }, { "name": "sphinxcontrib-applehelp", "version": "1.0.8", "manager": "conda", "platform": "linux-64", "dependencies": {"sphinx": ">=5"}, }, ] lock_spec = _parse_conda_lock_packages(package) assert lock_spec.packages == { ("conda", "linux-64", "sphinx"): { "name": "sphinx", "manager": "conda", "platform": "linux-64", "dependencies": {"sphinxcontrib-applehelp": ""}, }, ("conda", "linux-64", "sphinxcontrib-applehelp"): { "name": "sphinxcontrib-applehelp", "version": "1.0.8", "manager": "conda", "platform": "linux-64", "dependencies": {"sphinx": ">=5"}, }, } ================================================ FILE: tests/test_dependencies_parsing_internal.py ================================================ """Focused tests for active internal dependency-parsing helpers.""" from __future__ import annotations from pathlib import Path from typing import TYPE_CHECKING if TYPE_CHECKING: import pytest from unidep._dependencies_parsing import ( _is_empty_git_submodule, _move_optional_dependencies_to_dependencies, parse_requirements, ) from unidep.utils import PathWithExtras def test_move_optional_dependencies_star_promotes_all_groups( capsys: pytest.CaptureFixture[str], ) -> None: data = { "dependencies": ["numpy"], "optional_dependencies": { "dev": ["pytest"], "docs": ["sphinx"], }, } _move_optional_dependencies_to_dependencies( data, PathWithExtras(Path("requirements.yaml"), ["*"]), verbose=True, ) assert data["dependencies"] == ["numpy", "pytest", "sphinx"] assert "optional_dependencies" not in data assert "Moving all optional dependencies" in capsys.readouterr().out def test_parse_requirements_skips_empty_paired_dependency_after_filtering( tmp_path: Path, ) -> None: req_file = tmp_path / "requirements.yaml" req_file.write_text( """\ dependencies: - conda: numpy pip: numpy """, ) requirements = parse_requirements(req_file, skip_dependencies=["numpy"]) assert requirements.requirements == {} assert requirements.dependency_entries == [] def test_is_empty_git_submodule_false_for_non_directory(tmp_path: Path) -> None: file_path = tmp_path / "file.txt" file_path.write_text("not a directory") assert _is_empty_git_submodule(file_path) is False ================================================ FILE: tests/test_dependency_selection.py ================================================ """Tests for user-shaped dependency selection behavior.""" from __future__ import annotations import textwrap from pathlib import Path, PureWindowsPath from typing import TYPE_CHECKING, Tuple, cast import pytest from unidep._conflicts import VersionConflictError from unidep._dependencies_parsing import DependencyOrigin, parse_requirements from unidep._dependency_selection import ( MergedSourceCandidate, _joined_pinnings_are_safely_satisfiable, _origin_to_text, collapse_selected_universals, select_conda_like_requirements, select_pip_requirements, ) if TYPE_CHECKING: from unidep.platform_definitions import Platform def _write_requirements(tmp_path: Path, content: str) -> Path: req_file = tmp_path / "requirements.yaml" req_file.write_text(textwrap.dedent(content)) return req_file def _selected_summary( selected: dict[Platform | None, list[MergedSourceCandidate]], ) -> dict[Platform | None, list[tuple[str, str, str | None]]]: return { platform: [ (candidate.source, candidate.spec.name, candidate.spec.pin) for candidate in candidates ] for platform, candidates in selected.items() } def test_origin_to_text_includes_optional_group_and_local_chain() -> None: origin = DependencyOrigin( source_file=Path("requirements.yaml"), dependency_index=3, optional_group="dev", local_dependency_chain=(Path("libs/a"), Path("libs/b")), ) assert _origin_to_text(origin) == ( "requirements.yaml, item 3, group dev, via libs/a -> libs/b" ) def test_origin_to_text_normalizes_windows_style_local_chain() -> None: origin = DependencyOrigin( source_file=Path("requirements.yaml"), dependency_index=3, optional_group="dev", local_dependency_chain=cast( Tuple[Path, ...], ( PureWindowsPath("libs\\a"), PureWindowsPath("libs\\b"), ), ), ) assert _origin_to_text(origin) == ( "requirements.yaml, item 3, group dev, via libs/a -> libs/b" ) def test_joined_pinnings_are_safely_satisfiable_for_user_shaped_pin_strings() -> None: assert _joined_pinnings_are_safely_satisfiable( [">=2", ">=1", ">2", "<=3", "<4"], ) assert _joined_pinnings_are_safely_satisfiable(["==1", "~=1.0"]) assert not _joined_pinnings_are_safely_satisfiable(["==2.*", "<=1"]) assert not _joined_pinnings_are_safely_satisfiable(["==1.*", "<=1", "!=1"]) assert not _joined_pinnings_are_safely_satisfiable(["!=1.*"]) assert not _joined_pinnings_are_safely_satisfiable(["===1"]) assert not _joined_pinnings_are_safely_satisfiable( ["@ git+https://example.com/example.git"], ) def test_select_conda_like_requirements_prefers_pinned_conda_over_unpinned_pip( tmp_path: Path, ) -> None: req_file = _write_requirements( tmp_path, """\ platforms: - linux-64 dependencies: - conda: click >=8 pip: click """, ) requirements = parse_requirements(req_file) selected = select_conda_like_requirements( requirements.dependency_entries, requirements.platforms, ) assert _selected_summary(selected) == { "linux-64": [("conda", "click", ">=8")], } def test_select_conda_like_requirements_prefers_pip_extras_over_conda( tmp_path: Path, ) -> None: req_file = _write_requirements( tmp_path, """\ platforms: - linux-64 dependencies: - conda: adaptive pip: adaptive[notebook] """, ) requirements = parse_requirements(req_file) selected = select_conda_like_requirements( requirements.dependency_entries, requirements.platforms, ) assert _selected_summary(selected) == { "linux-64": [("pip", "adaptive[notebook]", None)], } def test_select_conda_like_requirements_prefers_narrower_pinned_selector_scope( tmp_path: Path, ) -> None: req_file = _write_requirements( tmp_path, """\ platforms: - linux-64 - osx-64 - osx-arm64 dependencies: - conda: click >=8 - pip: click >1 # [osx] """, ) requirements = parse_requirements(req_file) selected = select_conda_like_requirements( requirements.dependency_entries, requirements.platforms, ) assert _selected_summary(selected) == { "linux-64": [("conda", "click", ">=8")], "osx-64": [("pip", "click", ">1")], "osx-arm64": [("pip", "click", ">1")], } def test_select_conda_like_requirements_reports_final_collisions_with_origins( tmp_path: Path, ) -> None: req_file = _write_requirements( tmp_path, """\ platforms: - linux-64 dependencies: - conda: foo - conda: python-foo pip: foo >1 """, ) requirements = parse_requirements(req_file) match = ( r"(?s)Final Dependency Collision:" r".*'foo' on platform 'linux-64'" r".*conda: foo \(" r".*requirements\.yaml, item 1" r".*pip: foo >1 \(" r".*requirements\.yaml, item 2" ) with pytest.raises(ValueError, match=match): select_conda_like_requirements( requirements.dependency_entries, requirements.platforms, ) def test_select_pip_requirements_merges_supported_wildcard_pinnings( tmp_path: Path, ) -> None: req_file = _write_requirements( tmp_path, """\ platforms: - linux-64 dependencies: - conda: numpy - pip: foo ==1.* - pip: foo >=1.5 """, ) requirements = parse_requirements(req_file) selected = select_pip_requirements( requirements.dependency_entries, requirements.platforms, ) assert _selected_summary(selected) == { "linux-64": [("pip", "foo", "==1.*,>=1.5")], } def test_select_pip_requirements_merges_compatible_compatible_release_pinnings( tmp_path: Path, ) -> None: req_file = _write_requirements( tmp_path, """\ platforms: - linux-64 dependencies: - pip: foo ~=1.4 - pip: foo <2 """, ) requirements = parse_requirements(req_file) selected = select_pip_requirements( requirements.dependency_entries, requirements.platforms, ) assert _selected_summary(selected) == { "linux-64": [("pip", "foo", "~=1.4,<2")], } def test_select_pip_requirements_rejects_unsafely_merged_wildcard_pinnings( tmp_path: Path, ) -> None: req_file = _write_requirements( tmp_path, """\ platforms: - linux-64 dependencies: - pip: foo ==1.* - pip: foo >2 """, ) requirements = parse_requirements(req_file) with pytest.raises(VersionConflictError, match="Invalid version pinning '==1."): select_pip_requirements( requirements.dependency_entries, requirements.platforms, ) def test_select_pip_requirements_rejects_multiple_exact_pinnings( tmp_path: Path, ) -> None: req_file = _write_requirements( tmp_path, """\ platforms: - linux-64 dependencies: - pip: foo ==1 - pip: foo ==2 """, ) requirements = parse_requirements(req_file) with pytest.raises( VersionConflictError, match="Multiple exact version pinnings found: ==1, ==2 for `foo`", ): select_pip_requirements( requirements.dependency_entries, requirements.platforms, ) def test_collapse_selected_universals_collapses_user_declared_universal_dependencies( tmp_path: Path, ) -> None: req_file = _write_requirements( tmp_path, """\ platforms: - linux-64 - osx-arm64 dependencies: - conda: numpy >=1 """, ) requirements = parse_requirements(req_file) selected = select_conda_like_requirements( requirements.dependency_entries, requirements.platforms, ) collapsed = collapse_selected_universals(selected, requirements.platforms) assert _selected_summary(collapsed) == { None: [("conda", "numpy", ">=1")], } ================================================ FILE: tests/test_local_wheels_and_zip.py ================================================ """Tests for parsing local dependencies from wheels and zips.""" import textwrap from pathlib import Path from typing import Literal import pytest from unidep import parse_local_dependencies, parse_requirements from .helpers import maybe_as_toml @pytest.mark.parametrize("toml_or_yaml", ["toml", "yaml"]) def test_local_wheel(tmp_path: Path, toml_or_yaml: Literal["toml", "yaml"]) -> None: project1 = tmp_path / "project1" project1.mkdir(exist_ok=True, parents=True) r1 = project1 / "requirements.yaml" r1.write_text( textwrap.dedent( """\ local_dependencies: - ../example.whl """, ), ) r1 = maybe_as_toml(toml_or_yaml, r1) r1 = maybe_as_toml(toml_or_yaml, r1) local_dep = tmp_path / "example.whl" local_dep.touch() # Create a dummy .whl file dependencies = parse_local_dependencies( r1, check_pip_installable=False, verbose=True, ) assert dependencies[project1.resolve()] == [local_dep.resolve()] @pytest.mark.parametrize("toml_or_yaml", ["toml", "yaml"]) def test_local_zip(tmp_path: Path, toml_or_yaml: Literal["toml", "yaml"]) -> None: project1 = tmp_path / "project1" project1.mkdir(exist_ok=True, parents=True) r1 = project1 / "requirements.yaml" r1.write_text( textwrap.dedent( """\ local_dependencies: - ../example.zip """, ), ) r1 = maybe_as_toml(toml_or_yaml, r1) local_dep = tmp_path / "example.zip" local_dep.touch() # Create a dummy .zip file dependencies = parse_local_dependencies(r1, check_pip_installable=False) assert dependencies[project1.resolve()] == [local_dep.resolve()] @pytest.mark.parametrize("toml_or_yaml", ["toml", "yaml"]) def test_local_wheel_and_folder( tmp_path: Path, toml_or_yaml: Literal["toml", "yaml"], ) -> None: project1 = tmp_path / "project1" project1.mkdir(exist_ok=True, parents=True) project2 = tmp_path / "project2" project2.mkdir(exist_ok=True, parents=True) (project2 / "setup.py").touch() # Make project2 pip installable r1 = project1 / "requirements.yaml" r1.write_text( textwrap.dedent( """\ local_dependencies: - ../example.whl - ../project2 """, ), ) r1 = maybe_as_toml(toml_or_yaml, r1) local_dep = tmp_path / "example.whl" local_dep.touch() # Create a dummy .whl file with pytest.warns(UserWarning, match="is not managed by unidep"): dependencies = parse_local_dependencies(r1, check_pip_installable=False) assert dependencies[project1.resolve()] == [ local_dep.resolve(), project2.resolve(), ] requirements = parse_requirements(r1, verbose=True) assert requirements.requirements == {} @pytest.mark.parametrize("toml_or_yaml", ["toml", "yaml"]) def test_local_wheel_with_extras( tmp_path: Path, toml_or_yaml: Literal["toml", "yaml"], ) -> None: project1 = tmp_path / "project1" project1.mkdir(exist_ok=True, parents=True) r1 = project1 / "requirements.yaml" r1.write_text( textwrap.dedent( """\ local_dependencies: - ../example.whl[extra1,extra2] """, ), ) r1 = maybe_as_toml(toml_or_yaml, r1) local_dep = tmp_path / "example.whl" local_dep.touch() # Create a dummy .whl file dependencies = parse_local_dependencies(r1, check_pip_installable=False) assert dependencies[project1.resolve()] == [local_dep.resolve()] @pytest.mark.parametrize("toml_or_yaml", ["toml", "yaml"]) def test_local_wheel_in_dependencies( tmp_path: Path, toml_or_yaml: Literal["toml", "yaml"], ) -> None: project1 = tmp_path / "project1" project1.mkdir(exist_ok=True, parents=True) r1 = project1 / "requirements.yaml" r1.write_text( textwrap.dedent( """\ local_dependencies: - ../example.whl """, ), ) r1 = maybe_as_toml(toml_or_yaml, r1) local_dep = tmp_path / "example.whl" local_dep.touch() # Create a dummy .whl file dependencies = parse_local_dependencies(r1, check_pip_installable=False) assert dependencies[project1.resolve()] == [local_dep.resolve()] @pytest.mark.parametrize("toml_or_yaml", ["toml", "yaml"]) def test_nested_local_dependencies_with_wheel( tmp_path: Path, toml_or_yaml: Literal["toml", "yaml"], ) -> None: project1 = tmp_path / "project1" project2 = tmp_path / "project2" project3 = tmp_path / "project3" for project in [project1, project2, project3]: project.mkdir(exist_ok=True, parents=True) (project / "setup.py").touch() # Make projects pip installable wheel_dep = tmp_path / "example.whl" wheel_dep.touch() # Create a dummy .whl file r1 = project1 / "requirements.yaml" r2 = project2 / "requirements.yaml" r3 = project3 / "requirements.yaml" r1.write_text( textwrap.dedent( """\ local_dependencies: - ../project2 """, ), ) r2.write_text( textwrap.dedent( """\ local_dependencies: - ../project3 - ../example.whl """, ), ) r3.write_text( textwrap.dedent( """\ dependencies: - pytest """, ), ) r1 = maybe_as_toml(toml_or_yaml, r1) r2 = maybe_as_toml(toml_or_yaml, r2) r3 = maybe_as_toml(toml_or_yaml, r3) local_dependencies = parse_local_dependencies(r1, verbose=True) assert local_dependencies == { project1.resolve(): [ wheel_dep.resolve(), project2.resolve(), project3.resolve(), ], } ================================================ FILE: tests/test_parse_yaml_local_dependencies.py ================================================ """unidep's YAML parsing of the `local_dependencies` list.""" from __future__ import annotations import shutil import textwrap from contextlib import nullcontext from pathlib import Path from typing import TYPE_CHECKING import pytest from ruamel.yaml import YAML from unidep import ( find_requirements_files, parse_local_dependencies, parse_requirements, ) from unidep._conflicts import resolve_conflicts from .helpers import maybe_as_toml if TYPE_CHECKING: import sys if sys.version_info >= (3, 8): from typing import Literal else: # pragma: no cover from typing_extensions import Literal REPO_ROOT = Path(__file__).parent.parent @pytest.mark.parametrize("toml_or_yaml", ["toml", "yaml"]) def test_circular_local_dependencies( toml_or_yaml: Literal["toml", "yaml"], tmp_path: Path, ) -> None: project1 = tmp_path / "project1" project1.mkdir(exist_ok=True, parents=True) project2 = tmp_path / "project2" project2.mkdir(exist_ok=True, parents=True) r1 = project1 / "requirements.yaml" r2 = project2 / "requirements.yaml" r1.write_text( textwrap.dedent( """\ dependencies: - adaptive-scheduler local_dependencies: - ../project2 - ../project2 # duplicate include (shouldn't affect the result) """, ), ) # Test with old `includes` name r2.write_text( textwrap.dedent( """\ dependencies: - adaptive includes: # `local_dependencies` was called `includes` in <=0.41.0 - ../project1 """, ), ) r1 = maybe_as_toml(toml_or_yaml, r1) # Only convert r1 to toml, not r2, because we want to test that with pytest.warns(DeprecationWarning, match="is deprecated since 0.42.0"): requirements = parse_requirements(r1, r2, verbose=False) # Both will be duplicated because of the circular dependency # but `resolve_conflicts` will remove the duplicates assert len(requirements.requirements["adaptive"]) == 4 assert len(requirements.requirements["adaptive-scheduler"]) == 2 resolved = resolve_conflicts(requirements.requirements, requirements.platforms) assert len(resolved["adaptive"]) == 1 assert len(resolved["adaptive"][None]) == 2 assert len(resolved["adaptive-scheduler"]) == 1 assert len(resolved["adaptive-scheduler"][None]) == 2 @pytest.mark.parametrize("toml_or_yaml", ["toml", "yaml"]) def test_parse_local_dependencies( toml_or_yaml: Literal["toml", "yaml"], tmp_path: Path, ) -> None: project1 = tmp_path / "project1" project1.mkdir(exist_ok=True, parents=True) project2 = tmp_path / "project2" project2.mkdir(exist_ok=True, parents=True) r1 = project1 / "requirements.yaml" r2 = project2 / "requirements.yaml" r1.write_text( textwrap.dedent( """\ local_dependencies: - ../project2 - ../project2 # duplicate include (shouldn't affect the result) """, ), ) r2.write_text( textwrap.dedent( """\ local_dependencies: - ../project1 """, ), ) r2 = maybe_as_toml(toml_or_yaml, r2) # Only convert r2 to toml, not r1, because we want to test that local_dependencies = parse_local_dependencies( r1, r2, verbose=False, check_pip_installable=False, ) expected_dependencies = { project1.resolve(): [project2.resolve()], project2.resolve(): [project1.resolve()], } assert local_dependencies == expected_dependencies @pytest.mark.parametrize("toml_or_yaml", ["toml", "yaml"]) def test_parse_local_dependencies_respects_use( toml_or_yaml: Literal["toml", "yaml"], tmp_path: Path, ) -> None: project = tmp_path / "project" project.mkdir(parents=True, exist_ok=True) for name in ["dep-local", "dep-skip", "dep-pypi"]: dep_dir = project / name dep_dir.mkdir() (dep_dir / "setup.py").write_text( "from setuptools import setup\nsetup(name='dep', version='0.0.1')\n", ) (dep_dir / "requirements.yaml").write_text("dependencies: []\n") req_file = project / "requirements.yaml" req_file.write_text( textwrap.dedent( """ local_dependencies: - local: ./dep-local - local: ./dep-skip use: skip - local: ./dep-pypi pypi: company-dep>=1.0 use: pypi """, ), ) req_file = maybe_as_toml(toml_or_yaml, req_file) local_dependencies = parse_local_dependencies( req_file, verbose=False, check_pip_installable=False, ) assert local_dependencies == { project.resolve(): [ (project / "dep-local").resolve(), ], } @pytest.mark.parametrize("toml_or_yaml", ["toml", "yaml"]) def test_nested_local_dependencies( toml_or_yaml: Literal["toml", "yaml"], tmp_path: Path, ) -> None: project1 = tmp_path / "project1" project2 = tmp_path / "project2" project3 = tmp_path / "project3" project4 = tmp_path / "project4" for project in [project1, project2, project3, project4]: project.mkdir(exist_ok=True, parents=True) p1 = project1 / "requirements.yaml" p2 = project2 / "requirements.yaml" p3 = project3 / "requirements.yaml" p4 = project4 / "requirements.yaml" p1.write_text( textwrap.dedent( """\ local_dependencies: - ../project2 """, ), ) p2.write_text( textwrap.dedent( """\ local_dependencies: - ../project3 """, ), ) p3.write_text( textwrap.dedent( """\ local_dependencies: - ../project4 """, ), ) p4.write_text( textwrap.dedent( """\ dependencies: - numpy """, ), ) p1 = maybe_as_toml(toml_or_yaml, p1) p2 = maybe_as_toml(toml_or_yaml, p2) p3 = maybe_as_toml(toml_or_yaml, p3) p4 = maybe_as_toml(toml_or_yaml, p4) local_dependencies = parse_local_dependencies( p1, p2, p3, verbose=False, check_pip_installable=False, ) expected_dependencies = { project1.resolve(): [ project2.resolve(), project3.resolve(), project4.resolve(), ], project2.resolve(): [project3.resolve(), project4.resolve()], project3.resolve(): [project4.resolve()], } assert local_dependencies == expected_dependencies @pytest.mark.parametrize("toml_or_yaml", ["toml", "yaml"]) def test_nonexistent_local_dependencies( toml_or_yaml: Literal["toml", "yaml"], tmp_path: Path, ) -> None: project1 = tmp_path / "project1" project1.mkdir(exist_ok=True, parents=True) r1 = project1 / "requirements.yaml" r1.write_text( textwrap.dedent( """\ local_dependencies: - ../nonexistent_project """, ), ) r1 = maybe_as_toml(toml_or_yaml, r1) with pytest.raises(FileNotFoundError, match=r"not found\."): parse_local_dependencies(r1, verbose=False, check_pip_installable=False) @pytest.mark.parametrize("toml_or_yaml", ["toml", "yaml"]) def test_no_local_dependencies( toml_or_yaml: Literal["toml", "yaml"], tmp_path: Path, ) -> None: project1 = tmp_path / "project1" project1.mkdir(exist_ok=True, parents=True) r1 = project1 / "requirements.yaml" r1.write_text( textwrap.dedent( """\ dependencies: - pandas """, ), ) r1 = maybe_as_toml(toml_or_yaml, r1) local_dependencies = parse_local_dependencies( r1, verbose=False, check_pip_installable=False, ) assert local_dependencies == {} @pytest.mark.parametrize("toml_or_yaml", ["toml", "yaml"]) def test_mixed_real_and_placeholder_dependencies( toml_or_yaml: Literal["toml", "yaml"], tmp_path: Path, ) -> None: project1 = tmp_path / "project1" project1.mkdir(exist_ok=True, parents=True) r1 = project1 / "requirements.yaml" r1.write_text( textwrap.dedent( """\ dependencies: - scipy local_dependencies: - ../project1 # Self include (circular dependency) """, ), ) r1 = maybe_as_toml(toml_or_yaml, r1) local_dependencies = parse_local_dependencies( r1, verbose=False, check_pip_installable=False, ) assert local_dependencies == {} @pytest.mark.parametrize("toml_or_yaml", ["toml", "yaml"]) def test_parse_local_dependencies_pip_installable( toml_or_yaml: Literal["toml", "yaml"], tmp_path: Path, ) -> None: example_folder = tmp_path / "example" shutil.copytree(REPO_ROOT / "example", example_folder) # Add an extra project extra_project = example_folder / "extra_project" extra_project.mkdir(exist_ok=True, parents=True) (extra_project / "requirements.yaml").write_text( "local_dependencies: [../setup_py_project]", ) # Add a line to project1 local_dependencies setup_py_project_req = example_folder / "setup_py_project" / "requirements.yaml" yaml = YAML(typ="safe") with setup_py_project_req.open("r") as f: requirements = yaml.load(f) requirements["local_dependencies"].append("../extra_project") with setup_py_project_req.open("w") as f: yaml.dump(requirements, f) setup_py_project_req = maybe_as_toml(toml_or_yaml, setup_py_project_req) found_files = find_requirements_files(example_folder) assert len(found_files) == 6 # Add a common requirements file common_requirements = example_folder / "common-requirements.yaml" common_requirements.write_text("local_dependencies: [./setup_py_project]") common_requirements = maybe_as_toml(toml_or_yaml, common_requirements) found_files.append(common_requirements) local_dependencies = parse_local_dependencies( *found_files, check_pip_installable=True, verbose=True, ) assert local_dependencies # extra_project is not `pip installable` so it should not be included in the values() assert local_dependencies == { example_folder / "setup_py_project": [ example_folder / "hatch_project", example_folder / "setuptools_project", ], example_folder / "setuptools_project": [ example_folder / "hatch_project", ], example_folder / "pyproject_toml_project": [ example_folder / "hatch_project", ], example_folder / "extra_project": [ example_folder / "hatch_project", example_folder / "setup_py_project", example_folder / "setuptools_project", ], example_folder: [ example_folder / "hatch_project", example_folder / "setup_py_project", example_folder / "setuptools_project", ], } @pytest.mark.parametrize("toml_or_yaml", ["toml", "yaml"]) def test_parse_local_dependencies_pip_installable_with_non_installable_project( toml_or_yaml: Literal["toml", "yaml"], tmp_path: Path, ) -> None: example_folder = tmp_path / "example" shutil.copytree(REPO_ROOT / "example", example_folder) # Add an extra project extra_project = example_folder / "extra_project" extra_project.mkdir(exist_ok=True, parents=True) r_extra = extra_project / "requirements.yaml" r_extra.write_text("local_dependencies: [../setup_py_project]") r_extra = maybe_as_toml(toml_or_yaml, r_extra) # Add a line to hatch_project local_dependencies which should # make hatch_project depend on setup_py_project, via extra_project! However, extra_project is # not `pip installable` so we're testing that path. setup_py_project_req = example_folder / "hatch_project" / "requirements.yaml" yaml = YAML(typ="safe") with setup_py_project_req.open("r") as f: requirements = yaml.load(f) requirements["local_dependencies"] = ["../extra_project"] with setup_py_project_req.open("w") as f: yaml.dump(requirements, f) found_files = find_requirements_files(example_folder) assert len(found_files) == 6 local_dependencies = parse_local_dependencies( *found_files, check_pip_installable=True, verbose=True, ) assert local_dependencies assert local_dependencies == { example_folder / "hatch_project": [ example_folder / "setup_py_project", example_folder / "setuptools_project", ], example_folder / "setup_py_project": [ example_folder / "hatch_project", example_folder / "setuptools_project", ], example_folder / "pyproject_toml_project": [ example_folder / "hatch_project", example_folder / "setup_py_project", example_folder / "setuptools_project", ], example_folder / "setuptools_project": [ example_folder / "hatch_project", example_folder / "setup_py_project", ], example_folder / "extra_project": [ example_folder / "hatch_project", example_folder / "setup_py_project", example_folder / "setuptools_project", ], } def test_local_non_unidep_managed_dependency(tmp_path: Path) -> None: project1 = tmp_path / "project1" project1.mkdir(exist_ok=True, parents=True) project2 = tmp_path / "project2" project2.mkdir(exist_ok=True, parents=True) r1 = project1 / "requirements.yaml" r1.write_text( textwrap.dedent( """\ local_dependencies: - ../project2 # is not managed by unidep """, ), ) r2 = project2 / "setup.py" # not managed by unidep r2.touch() requirements = parse_requirements(r1, verbose=True) # This should not raise assert requirements.requirements == {} with pytest.warns(UserWarning, match="not managed by unidep"): data = parse_local_dependencies(r1, verbose=True) assert data == {project1.resolve(): [project2.resolve()]} def test_local_non_unidep_and_non_installable_managed_dependency( tmp_path: Path, ) -> None: project1 = tmp_path / "project1" project1.mkdir(exist_ok=True, parents=True) project2 = tmp_path / "project2" project2.mkdir(exist_ok=True, parents=True) r1 = project1 / "requirements.yaml" r1.write_text( textwrap.dedent( """\ local_dependencies: - ../project2 # is not managed by unidep and not installable """, ), ) with pytest.raises(RuntimeError, match="is not pip installable"): parse_local_dependencies(r1, verbose=True) def test_local_empty_git_submodule_dependency( tmp_path: Path, ) -> None: project1 = tmp_path / "project1" project1.mkdir(exist_ok=True, parents=True) project2 = tmp_path / "project2" project2.mkdir(exist_ok=True, parents=True) (project2 / ".git").touch() r1 = project1 / "requirements.yaml" r1.write_text( textwrap.dedent( """\ local_dependencies: - ../project2 # has only `.git` file """, ), ) with pytest.raises(RuntimeError, match="is an empty Git submodule"): parse_local_dependencies(r1, verbose=True) def test_parse_local_dependencies_missing( tmp_path: Path, ) -> None: project1 = tmp_path / "project1" project1.mkdir(exist_ok=True, parents=True) r1 = project1 / "requirements.yaml" r1.write_text( textwrap.dedent( """\ local_dependencies: - ../does-not-exist """, ), ) with pytest.raises(FileNotFoundError, match=r"not found\."): parse_local_dependencies(r1, verbose=True, raise_if_missing=True) local_dependencies = parse_local_dependencies( r1, verbose=True, raise_if_missing=False, ) assert local_dependencies == {} @pytest.mark.parametrize("unidep_managed", [True, False]) def test_parse_local_dependencies_without_local_deps_themselves( tmp_path: Path, unidep_managed: bool, # noqa: FBT001 ) -> None: project1 = tmp_path / "project1" project1.mkdir(exist_ok=True, parents=True) r1 = project1 / "requirements.yaml" r1.write_text( textwrap.dedent( """\ local_dependencies: - ../project2 """, ), ) project2 = tmp_path / "project2" project2.mkdir(exist_ok=True, parents=True) r2 = project2 / "pyproject.toml" txt = textwrap.dedent( """\ [build-system] requires = ["setuptools", "wheel"] """, ) if unidep_managed: txt += '[tool.unidep]\ndependencies = ["numpy"]' r2.write_text(txt) ctx = ( pytest.warns(UserWarning, match="not managed by unidep") if not unidep_managed else nullcontext() ) with ctx: local_dependencies = parse_local_dependencies( r1, verbose=True, raise_if_missing=True, ) assert local_dependencies == {project1: [project2]} r2.write_text("") with pytest.raises(RuntimeError, match="is not pip installable"): parse_local_dependencies(r1, verbose=True, raise_if_missing=True) def test_parse_requirements_unmanaged_local_dependency(tmp_path: Path) -> None: """Local dep without requirements.yaml hits the None branch in _add_local_dependencies.""" project = tmp_path / "project" project.mkdir() unmanaged = tmp_path / "unmanaged" unmanaged.mkdir() # pip-installable but not unidep-managed (unmanaged / "setup.py").write_text( "from setuptools import setup; setup(name='unmanaged')", ) req = project / "requirements.yaml" req.write_text( textwrap.dedent("""\ dependencies: - numpy local_dependencies: - ../unmanaged """), ) result = parse_requirements(req, verbose=False) # Parsing succeeds; unmanaged dep is silently skipped assert "numpy" in result.requirements ================================================ FILE: tests/test_parse_yaml_nested_local_dependencies.py ================================================ """Test parsing nested local dependencies from YAML files.""" from __future__ import annotations import textwrap from pathlib import Path from typing import TYPE_CHECKING import pytest from unidep import ( parse_local_dependencies, parse_requirements, ) from .helpers import maybe_as_toml if TYPE_CHECKING: import sys if sys.version_info >= (3, 8): from typing import Literal else: # pragma: no cover from typing_extensions import Literal REPO_ROOT = Path(__file__).parent.parent @pytest.mark.parametrize("toml_or_yaml", ["toml", "yaml"]) def test_nested_local_dependencies_multiple_levels( toml_or_yaml: Literal["toml", "yaml"], tmp_path: Path, ) -> None: project1 = tmp_path / "project1" project2 = tmp_path / "project2" project3 = tmp_path / "project3" project4 = tmp_path / "project4" for project in [project1, project2, project3, project4]: project.mkdir(exist_ok=True, parents=True) (project / "setup.py").touch() # Make projects pip installable r1 = project1 / "requirements.yaml" r2 = project2 / "requirements.yaml" r3 = project3 / "requirements.yaml" r4 = project4 / "requirements.yaml" r1.write_text( textwrap.dedent(""" dependencies: - package1 local_dependencies: - ../project2 """), ) r2.write_text( textwrap.dedent(""" dependencies: - package2 local_dependencies: - ../project3 """), ) r3.write_text( textwrap.dedent(""" dependencies: - package3 local_dependencies: - ../project4 """), ) r4.write_text( textwrap.dedent(""" dependencies: - package4 """), ) r1 = maybe_as_toml(toml_or_yaml, r1) r2 = maybe_as_toml(toml_or_yaml, r2) r3 = maybe_as_toml(toml_or_yaml, r3) r4 = maybe_as_toml(toml_or_yaml, r4) local_dependencies = parse_local_dependencies( r1, verbose=True, check_pip_installable=True, ) assert local_dependencies == { project1.resolve(): [ project2.resolve(), project3.resolve(), project4.resolve(), ], } requirements = parse_requirements(r1, verbose=True) assert set(requirements.requirements.keys()) == { "package1", "package2", "package3", "package4", } @pytest.mark.parametrize("toml_or_yaml", ["toml", "yaml"]) def test_nested_local_dependencies_with_circular_reference( toml_or_yaml: Literal["toml", "yaml"], tmp_path: Path, ) -> None: project1 = tmp_path / "project1" project2 = tmp_path / "project2" project3 = tmp_path / "project3" for project in [project1, project2, project3]: project.mkdir(exist_ok=True, parents=True) (project / "setup.py").touch() # Make projects pip installable r1 = project1 / "requirements.yaml" r2 = project2 / "requirements.yaml" r3 = project3 / "requirements.yaml" r1.write_text( textwrap.dedent(""" dependencies: - package1 local_dependencies: - ../project2 """), ) r2.write_text( textwrap.dedent(""" dependencies: - package2 local_dependencies: - ../project3 """), ) r3.write_text( textwrap.dedent(""" dependencies: - package3 local_dependencies: - ../project1 """), ) r1 = maybe_as_toml(toml_or_yaml, r1) r2 = maybe_as_toml(toml_or_yaml, r2) r3 = maybe_as_toml(toml_or_yaml, r3) local_dependencies = parse_local_dependencies( r1, verbose=True, check_pip_installable=True, ) assert local_dependencies == { project1.resolve(): [project2.resolve(), project3.resolve()], } requirements = parse_requirements(r1, verbose=True) assert set(requirements.requirements.keys()) == {"package1", "package2", "package3"} @pytest.mark.parametrize("toml_or_yaml", ["toml", "yaml"]) def test_nested_local_dependencies_with_non_unidep_managed_project( toml_or_yaml: Literal["toml", "yaml"], tmp_path: Path, ) -> None: project1 = tmp_path / "project1" project2 = tmp_path / "project2" project3 = tmp_path / "project3" for project in [project1, project2]: project.mkdir(exist_ok=True, parents=True) (project / "setup.py").touch() # Make projects pip installable # Create project3 as a non-unidep managed project project3.mkdir(exist_ok=True, parents=True) (project3 / "setup.py").touch() # Make it pip installable but not unidep managed r1 = project1 / "requirements.yaml" r2 = project2 / "requirements.yaml" r1.write_text( textwrap.dedent(""" dependencies: - package1 local_dependencies: - ../project2 """), ) r2.write_text( textwrap.dedent(""" dependencies: - package2 local_dependencies: - ../project3 """), ) r1 = maybe_as_toml(toml_or_yaml, r1) r2 = maybe_as_toml(toml_or_yaml, r2) # project3 is non-unidep managed but pip installable with pytest.warns(UserWarning, match="not managed by unidep"): local_dependencies = parse_local_dependencies( r1, verbose=True, check_pip_installable=True, warn_non_managed=True, ) assert local_dependencies == { project1.resolve(): [project2.resolve(), project3.resolve()], } # We don't expect a warning here anymore, as it should have been raised in parse_local_dependencies requirements = parse_requirements(r1, verbose=True) assert set(requirements.requirements.keys()) == {"package1", "package2"} @pytest.mark.parametrize("toml_or_yaml", ["toml", "yaml"]) def test_skip_propagates_to_nested_local_dependency( toml_or_yaml: Literal["toml", "yaml"], tmp_path: Path, ) -> None: root = tmp_path / "root" system = root / "system" shared = root / "shared" system.mkdir(parents=True, exist_ok=True) shared.mkdir(parents=True, exist_ok=True) root_req = root / "requirements.yaml" system_req = system / "requirements.yaml" root_req.write_text( textwrap.dedent( """ local_dependencies: - ./system - local: ./shared use: skip """, ), ) system_req.write_text( textwrap.dedent( """ local_dependencies: - ../shared """, ), ) root_req = maybe_as_toml(toml_or_yaml, root_req) system_req = maybe_as_toml(toml_or_yaml, system_req) requirements = parse_requirements(root_req) assert "shared" not in requirements.requirements local_dependencies = parse_local_dependencies(root_req, check_pip_installable=False) assert local_dependencies == {root.resolve(): [system.resolve()]} @pytest.mark.parametrize("toml_or_yaml", ["toml", "yaml"]) def test_pypi_override_propagates_to_nested_local_dependency( toml_or_yaml: Literal["toml", "yaml"], tmp_path: Path, ) -> None: root = tmp_path / "root" system = root / "system" shared = root / "shared" system.mkdir(parents=True, exist_ok=True) shared.mkdir(parents=True, exist_ok=True) root_req = root / "requirements.yaml" system_req = system / "requirements.yaml" root_req.write_text( textwrap.dedent( """ local_dependencies: - ./system - local: ./shared pypi: company-shared>=1.0 use: pypi """, ), ) system_req.write_text( textwrap.dedent( """ local_dependencies: - ../shared """, ), ) root_req = maybe_as_toml(toml_or_yaml, root_req) system_req = maybe_as_toml(toml_or_yaml, system_req) requirements = parse_requirements(root_req) assert "company-shared" in requirements.requirements local_dependencies = parse_local_dependencies(root_req, check_pip_installable=False) assert local_dependencies == {root.resolve(): [system.resolve()]} @pytest.mark.parametrize("toml_or_yaml", ["toml", "yaml"]) def test_skip_propagates_when_nested_entry_is_dict( toml_or_yaml: Literal["toml", "yaml"], tmp_path: Path, ) -> None: root = tmp_path / "root" system = root / "system" shared = root / "shared" system.mkdir(parents=True, exist_ok=True) shared.mkdir(parents=True, exist_ok=True) root_req = root / "requirements.yaml" system_req = system / "requirements.yaml" root_req.write_text( textwrap.dedent( """ local_dependencies: - ./system - local: ./shared use: skip """, ), ) system_req.write_text( textwrap.dedent( """ local_dependencies: - local: ../shared """, ), ) root_req = maybe_as_toml(toml_or_yaml, root_req) system_req = maybe_as_toml(toml_or_yaml, system_req) requirements = parse_requirements(root_req) assert "shared" not in requirements.requirements local_dependencies = parse_local_dependencies(root_req, check_pip_installable=False) assert local_dependencies == {root.resolve(): [system.resolve()]} @pytest.mark.parametrize("toml_or_yaml", ["toml", "yaml"]) def test_nested_local_dependencies_with_extras( toml_or_yaml: Literal["toml", "yaml"], tmp_path: Path, ) -> None: project1 = tmp_path / "project1" project2 = tmp_path / "project2" project3 = tmp_path / "project3" for project in [project1, project2, project3]: project.mkdir(exist_ok=True, parents=True) (project / "setup.py").touch() # Make projects pip installable r1 = project1 / "requirements.yaml" r2 = project2 / "requirements.yaml" r3 = project3 / "requirements.yaml" r1.write_text( textwrap.dedent(""" dependencies: - package1 local_dependencies: - ../project2[test,docs] optional_dependencies: dev: - dev-package """), ) r2.write_text( textwrap.dedent(""" dependencies: - package2 local_dependencies: - ../project3[full] optional_dependencies: test: - pytest docs: - sphinx """), ) r3.write_text( textwrap.dedent(""" dependencies: - package3 optional_dependencies: full: - extra-package """), ) r1 = maybe_as_toml(toml_or_yaml, r1) r2 = maybe_as_toml(toml_or_yaml, r2) r3 = maybe_as_toml(toml_or_yaml, r3) local_dependencies = parse_local_dependencies( Path(f"{r1}[dev]"), verbose=True, check_pip_installable=True, ) assert local_dependencies == { project1.resolve(): [project2.resolve(), project3.resolve()], } requirements = parse_requirements(r1, verbose=True, extras=[["dev"]]) assert set(requirements.requirements.keys()) == { "package1", "package2", "package3", "pytest", "sphinx", "extra-package", } # Test with different extras requirements_full = parse_requirements(r1, verbose=True, extras=[["dev", "full"]]) assert set(requirements_full.requirements.keys()) == { "package1", "package2", "package3", "pytest", "sphinx", "extra-package", } assert requirements_full.optional_dependencies.keys() == {"dev"} assert requirements_full.optional_dependencies["dev"].keys() == {"dev-package"} ================================================ FILE: tests/test_pip_indices.py ================================================ """Unit tests for pip_indices support in unidep.""" from __future__ import annotations import os from pathlib import Path # noqa: TC003 from textwrap import dedent import pytest import yaml from unidep._conda_env import CondaEnvironmentSpec, write_conda_environment_file from unidep._dependencies_parsing import ( _collect_pip_indices, parse_requirements, ) class TestPipIndicesParsing: """Test parsing of pip_indices from requirements.yaml and pyproject.toml.""" def test_parse_pip_indices_from_yaml(self, tmp_path: Path) -> None: """Test parsing pip_indices from requirements.yaml.""" requirements_file = tmp_path / "requirements.yaml" requirements_file.write_text( dedent( """ name: test_project channels: - conda-forge pip_indices: - https://pypi.org/simple/ - https://private.company.com/simple/ dependencies: - numpy - pip: private-package """, ), ) parsed = parse_requirements(requirements_file) assert parsed.pip_indices == ( "https://pypi.org/simple/", "https://private.company.com/simple/", ) def test_parse_pip_indices_from_toml(self, tmp_path: Path) -> None: """Test parsing pip_indices from pyproject.toml.""" pyproject_file = tmp_path / "pyproject.toml" pyproject_file.write_text( dedent( """ [tool.unidep] channels = ["conda-forge"] pip_indices = [ "https://pypi.org/simple/", "https://test.pypi.org/simple/" ] dependencies = [ "numpy", {pip = "test-package"} ] """, ), ) parsed = parse_requirements(pyproject_file) assert parsed.pip_indices == ( "https://pypi.org/simple/", "https://test.pypi.org/simple/", ) def test_parse_empty_pip_indices(self, tmp_path: Path) -> None: """Test that missing pip_indices defaults to empty list.""" requirements_file = tmp_path / "requirements.yaml" requirements_file.write_text( dedent( """ name: test_project channels: - conda-forge dependencies: - numpy """, ), ) parsed = parse_requirements(requirements_file) assert parsed.pip_indices == () def test_parse_pip_indices_with_env_vars(self, tmp_path: Path) -> None: """Test parsing pip_indices with environment variables.""" requirements_file = tmp_path / "requirements.yaml" requirements_file.write_text( dedent( """ name: test_project pip_indices: - https://${PIP_USER}:${PIP_PASSWORD}@private.company.com/simple/ - https://pypi.org/simple/ dependencies: - pip: private-package """, ), ) parsed = parse_requirements(requirements_file) assert parsed.pip_indices == ( "https://${PIP_USER}:${PIP_PASSWORD}@private.company.com/simple/", "https://pypi.org/simple/", ) def test_merge_pip_indices_from_multiple_files(self, tmp_path: Path) -> None: """Test merging pip_indices from multiple requirements files.""" # First requirements file req1 = tmp_path / "req1.yaml" req1.write_text( dedent( """ name: project1 pip_indices: - https://pypi.org/simple/ - https://index1.com/simple/ dependencies: - numpy """, ), ) # Second requirements file req2 = tmp_path / "req2.yaml" req2.write_text( dedent( """ name: project2 pip_indices: - https://index2.com/simple/ - https://pypi.org/simple/ # Duplicate dependencies: - pandas """, ), ) # Parse and merge parsed1 = parse_requirements(req1) parsed2 = parse_requirements(req2) # In real implementation, we'd have a merge function # For now, test that both parse correctly assert parsed1.pip_indices == ( "https://pypi.org/simple/", "https://index1.com/simple/", ) assert parsed2.pip_indices == ( "https://index2.com/simple/", "https://pypi.org/simple/", ) def test_pip_indices_ordering_preserved(self, tmp_path: Path) -> None: """Test that pip_indices order is preserved (first is primary).""" requirements_file = tmp_path / "requirements.yaml" indices = [ "https://primary.com/simple/", "https://secondary.com/simple/", "https://tertiary.com/simple/", ] requirements_file.write_text( dedent( f""" name: test_project pip_indices: - {indices[0]} - {indices[1]} - {indices[2]} dependencies: - numpy """, ), ) parsed = parse_requirements(requirements_file) assert parsed.pip_indices == tuple(indices) # First index should be treated as primary (--index-url) assert parsed.pip_indices[0] == indices[0] def test_collect_pip_indices_supports_single_string(self) -> None: """Test the string form of pip_indices.""" indices = _collect_pip_indices( {"pip_indices": "https://pypi.org/simple/"}, ) assert indices == ["https://pypi.org/simple/"] def test_collect_pip_indices_rejects_invalid_value_type(self) -> None: """Test invalid top-level pip index values.""" with pytest.raises( TypeError, match="`pip_indices` must be a string or a list of strings.", ): _collect_pip_indices({"pip_indices": 123}) def test_collect_pip_indices_rejects_non_string_entries(self) -> None: """Test invalid pip index list entries.""" with pytest.raises( TypeError, match="`pip_indices` entries must be strings.", ): _collect_pip_indices( {"pip_indices": ["https://pypi.org/simple/", 123]}, ) class TestEnvironmentGeneration: """Test generation of environment.yaml with pip_indices.""" def test_environment_yaml_with_pip_indices(self, tmp_path: Path) -> None: """Test that pip_indices are included as pip-repositories in environment.yaml.""" env_spec = CondaEnvironmentSpec( channels=["conda-forge"], pip_indices=[ "https://pypi.org/simple/", "https://private.company.com/simple/", ], platforms=[], conda=["numpy", "pandas"], pip=["private-package", "requests"], ) env_file = tmp_path / "environment.yaml" write_conda_environment_file(env_spec, env_file) with env_file.open() as f: env_dict = yaml.safe_load(f) # Check that pip-repositories is included assert "pip-repositories" in env_dict assert env_dict["pip-repositories"] == [ "https://pypi.org/simple/", "https://private.company.com/simple/", ] # Check that dependencies structure is correct assert "dependencies" in env_dict deps = env_dict["dependencies"] # Find pip dependencies pip_deps = None for dep in deps: if isinstance(dep, dict) and "pip" in dep: pip_deps = dep["pip"] break assert pip_deps is not None assert "private-package" in pip_deps assert "requests" in pip_deps def test_environment_yaml_without_pip_indices(self, tmp_path: Path) -> None: """Test environment.yaml generation without pip_indices.""" env_spec = CondaEnvironmentSpec( channels=["conda-forge"], pip_indices=[], # Empty pip_indices platforms=[], conda=["numpy"], pip=["requests"], ) env_file = tmp_path / "environment.yaml" write_conda_environment_file(env_spec, env_file) with env_file.open() as f: env_dict = yaml.safe_load(f) # pip-repositories should not be included if empty assert "pip-repositories" not in env_dict def test_environment_yaml_with_env_vars_in_indices(self, tmp_path: Path) -> None: """Test that environment variables in pip_indices are preserved.""" env_spec = CondaEnvironmentSpec( channels=["conda-forge"], pip_indices=[ "https://${USER}:${PASS}@private.com/simple/", "https://pypi.org/simple/", ], platforms=[], conda=[], pip=["private-package"], ) env_file = tmp_path / "environment.yaml" write_conda_environment_file(env_spec, env_file) with env_file.open() as f: content = f.read() env_dict = yaml.safe_load(content) # Environment variables should be preserved assert ( env_dict["pip-repositories"][0] == "https://${USER}:${PASS}@private.com/simple/" ) class TestPipCommandConstruction: """Test construction of pip install commands with indices.""" def test_build_pip_command_with_indices(self) -> None: """Test building pip install command with index URLs.""" pip_indices = [ "https://pypi.org/simple/", "https://private.company.com/simple/", ] # Verify the logic: first index is primary, rest are extra assert pip_indices[0] == "https://pypi.org/simple/" # Primary assert pip_indices[1] == "https://private.company.com/simple/" # Extra def test_build_pip_command_without_indices(self) -> None: """Test building pip install command without custom indices.""" pip_indices: list[str] = [] assert len(pip_indices) == 0 def test_build_pip_command_single_index(self) -> None: """Test building pip install command with single index.""" pip_indices = ["https://custom.pypi.org/simple/"] assert len(pip_indices) == 1 assert pip_indices[0] == "https://custom.pypi.org/simple/" def test_uv_compatibility(self) -> None: """Test that index flags are compatible with uv.""" # uv uses the same --index-url and --extra-index-url flags as pip pip_indices = [ "https://pypi.org/simple/", "https://test.pypi.org/simple/", ] # Both pip and uv support these flags pip_args = ["--index-url", pip_indices[0]] uv_args = ["--index-url", pip_indices[0]] assert pip_args == uv_args # Same flags for both class TestEdgeCases: """Test edge cases and error handling.""" def test_invalid_url_format(self, tmp_path: Path) -> None: """Test handling of invalid URL formats.""" requirements_file = tmp_path / "requirements.yaml" requirements_file.write_text( dedent( """ name: test_project pip_indices: - not-a-valid-url - https://valid.url.com/simple/ dependencies: - numpy """, ), ) # Should either validate and fail, or accept and let pip handle it parsed = parse_requirements(requirements_file) assert "not-a-valid-url" in parsed.pip_indices def test_duplicate_indices(self, tmp_path: Path) -> None: """Test handling of duplicate indices.""" requirements_file = tmp_path / "requirements.yaml" requirements_file.write_text( dedent( """ name: test_project pip_indices: - https://pypi.org/simple/ - https://private.com/simple/ - https://pypi.org/simple/ # Duplicate dependencies: - numpy """, ), ) parsed = parse_requirements(requirements_file) # The implementation deduplicates indices assert len(parsed.pip_indices) == 2 assert parsed.pip_indices == ( "https://pypi.org/simple/", "https://private.com/simple/", ) def test_empty_string_in_indices(self, tmp_path: Path) -> None: """Test handling of empty strings in pip_indices.""" requirements_file = tmp_path / "requirements.yaml" requirements_file.write_text( dedent( """ name: test_project pip_indices: - "" - https://pypi.org/simple/ dependencies: - numpy """, ), ) parsed = parse_requirements(requirements_file) # Empty strings should be filtered out or raise an error assert parsed.pip_indices # Should have at least the valid URL def test_missing_env_var_in_url(self, tmp_path: Path) -> None: """Test handling of missing environment variables.""" requirements_file = tmp_path / "requirements.yaml" requirements_file.write_text( dedent( """ name: test_project pip_indices: - https://${MISSING_VAR}@private.com/simple/ dependencies: - numpy """, ), ) # Environment variable not set if "MISSING_VAR" in os.environ: del os.environ["MISSING_VAR"] parsed = parse_requirements(requirements_file) # Should preserve the ${MISSING_VAR} syntax for later expansion assert "${MISSING_VAR}" in parsed.pip_indices[0] ================================================ FILE: tests/test_pip_indices_cli.py ================================================ """Tests for pip_indices CLI functionality to achieve 100% coverage.""" from __future__ import annotations import os from pathlib import Path # noqa: TC003 from unittest.mock import MagicMock, patch import pytest from unidep._cli import _build_pip_index_arguments from unidep._conda_env import create_conda_env_specification class TestBuildPipIndexArguments: """Test the _build_pip_index_arguments function.""" def test_empty_indices(self) -> None: """Test with empty pip_indices list.""" args = _build_pip_index_arguments([]) assert args == [] def test_single_index(self) -> None: """Test with a single index URL.""" indices = ["https://pypi.org/simple/"] args = _build_pip_index_arguments(indices) assert args == ["--index-url", "https://pypi.org/simple/"] def test_multiple_indices(self) -> None: """Test with multiple index URLs.""" indices = [ "https://pypi.org/simple/", "https://test.pypi.org/simple/", "https://private.com/simple/", ] args = _build_pip_index_arguments(indices) assert args == [ "--index-url", "https://pypi.org/simple/", "--extra-index-url", "https://test.pypi.org/simple/", "--extra-index-url", "https://private.com/simple/", ] def test_environment_variable_expansion(self) -> None: """Test that environment variables are expanded in URLs.""" # Set environment variables os.environ["PIP_USER"] = "testuser" os.environ["PIP_PASSWORD"] = "testpass" # noqa: S105 try: indices = [ "https://${PIP_USER}:${PIP_PASSWORD}@private.com/simple/", "https://public.com/simple/", ] args = _build_pip_index_arguments(indices) assert args == [ "--index-url", "https://testuser:testpass@private.com/simple/", "--extra-index-url", "https://public.com/simple/", ] finally: # Clean up del os.environ["PIP_USER"] del os.environ["PIP_PASSWORD"] def test_missing_environment_variable(self) -> None: """Test handling of missing environment variables.""" # Ensure the variable is not set os.environ.pop("NONEXISTENT_VAR", None) indices = ["https://${NONEXISTENT_VAR}@private.com/simple/"] args = _build_pip_index_arguments(indices) # expandvars leaves the ${VAR} as-is if not found assert args == ["--index-url", "https://${NONEXISTENT_VAR}@private.com/simple/"] def test_complex_environment_variables(self) -> None: """Test complex environment variable patterns.""" os.environ["DOMAIN"] = "example.com" os.environ["PORT"] = "8080" try: indices = [ "https://${DOMAIN}:${PORT}/simple/", "https://backup.${DOMAIN}/simple/", ] args = _build_pip_index_arguments(indices) assert args == [ "--index-url", "https://example.com:8080/simple/", "--extra-index-url", "https://backup.example.com/simple/", ] finally: del os.environ["DOMAIN"] del os.environ["PORT"] class TestPipInstallLocalWithIndices: """Test pip install with custom indices.""" @patch("unidep._cli.subprocess.run") @patch("unidep._cli.shutil.which") def test_pip_install_with_indices( self, mock_which: MagicMock, mock_run: MagicMock, ) -> None: """Test that pip install uses the correct index arguments.""" from unidep._cli import _pip_install_local mock_which.return_value = "/usr/bin/pip" mock_run.return_value = MagicMock(returncode=0) # Call with pip_indices _pip_install_local( "test_package", editable=False, dry_run=False, python_executable="/usr/bin/python", conda_run=[], no_uv=True, pip_indices=["https://pypi.org/simple/", "https://test.pypi.org/simple/"], flags=["--no-deps"], ) # Verify the command includes index arguments call_args = mock_run.call_args[0][0] assert "--index-url" in call_args assert "https://pypi.org/simple/" in call_args assert "--extra-index-url" in call_args assert "https://test.pypi.org/simple/" in call_args @patch("unidep._cli.subprocess.run") @patch("unidep._cli.shutil.which") def test_uv_install_with_indices( self, mock_which: MagicMock, mock_run: MagicMock, ) -> None: """Test that uv install uses the correct index arguments.""" from unidep._cli import _pip_install_local # Mock uv as the installer def which_side_effect(cmd: str) -> str | None: if cmd == "uv": return "/usr/bin/uv" return None mock_which.side_effect = which_side_effect mock_run.return_value = MagicMock(returncode=0) # Call with pip_indices _pip_install_local( "test_package", editable=False, dry_run=False, python_executable="/usr/bin/python", conda_run=[], no_uv=False, # Enable uv pip_indices=["https://private.com/simple/"], flags=["--no-deps"], ) # Verify uv command includes index arguments call_args = mock_run.call_args[0][0] assert "uv" in call_args assert "pip" in call_args assert "install" in call_args assert "--index-url" in call_args assert "https://private.com/simple/" in call_args class TestCondaEnvWithPipRepositories: """Test conda environment generation with pip-repositories.""" def test_write_env_with_pip_repositories(self, tmp_path: Path) -> None: """Test that `pip-repositories` is written to environment.yaml.""" from unidep._conda_env import CondaEnvironmentSpec, write_conda_environment_file env_spec = CondaEnvironmentSpec( channels=["conda-forge"], pip_indices=[ "https://pypi.org/simple/", "https://private.company.com/simple/", ], platforms=["linux-64"], conda=["python=3.11"], pip=["requests"], ) env_file = tmp_path / "environment.yaml" write_conda_environment_file(env_spec, env_file, name="test_env") content = env_file.read_text() assert "pip-repositories:" in content assert "https://pypi.org/simple/" in content assert "https://private.company.com/simple/" in content # Verify order is preserved lines = content.split("\n") repo_lines = [ line for line in lines if "https://" in line and "simple/" in line ] assert "pypi.org" in repo_lines[0] assert "private.company.com" in repo_lines[1] def test_write_env_without_pip_repositories(self, tmp_path: Path) -> None: """Test environment.yaml without `pip-repositories` when the list is empty.""" from unidep._conda_env import CondaEnvironmentSpec, write_conda_environment_file env_spec = CondaEnvironmentSpec( channels=["conda-forge"], pip_indices=[], # Empty list platforms=["linux-64"], conda=["python=3.11"], pip=["requests"], ) env_file = tmp_path / "environment.yaml" write_conda_environment_file(env_spec, env_file, name="test_env") content = env_file.read_text() assert "pip-repositories:" not in content class TestCreateCondaEnvSpecificationCompatibility: """Test compatibility paths in create_conda_env_specification.""" def test_accepts_string_keyword_pip_indices(self) -> None: """Test pip_indices passed as a single string keyword.""" env_spec = create_conda_env_specification( [], [], platforms=["linux-64"], pip_indices="https://pypi.org/simple/", ) assert env_spec.platforms == ["linux-64"] assert env_spec.pip_indices == ("https://pypi.org/simple/",) def test_accepts_legacy_positional_selector(self) -> None: """Test the older positional selector calling convention.""" env_spec = create_conda_env_specification([], [], ["linux-64"], "comment") assert env_spec.platforms == ["linux-64"] assert env_spec.pip_indices == () def test_accepts_legacy_positional_pip_indices_and_selector(self) -> None: """Test the fully positional legacy calling convention.""" env_spec = create_conda_env_specification( [], [], ["https://pypi.org/simple/"], ["linux-64"], "comment", ) assert env_spec.platforms == ["linux-64"] assert env_spec.pip_indices == ("https://pypi.org/simple/",) def test_rejects_missing_platforms_argument(self) -> None: """Test that platforms remain required.""" with pytest.raises(TypeError, match="Missing required `platforms` argument."): create_conda_env_specification([], []) def test_rejects_too_many_positionals_with_platforms_keyword(self) -> None: """Test too many positional arguments when platforms is keyword-only.""" with pytest.raises( TypeError, match="Too many positional arguments", ): create_conda_env_specification( [], [], ["https://pypi.org/simple/"], "comment", platforms=["linux-64"], ) def test_rejects_duplicate_pip_indices_with_platforms_keyword(self) -> None: """Test duplicate positional and keyword pip_indices.""" with pytest.raises( TypeError, match="`pip_indices` was provided both positionally and by keyword.", ): create_conda_env_specification( [], [], ["https://pypi.org/simple/"], platforms=["linux-64"], pip_indices=["https://test.pypi.org/simple/"], ) def test_rejects_duplicate_pip_indices_in_legacy_two_argument_form(self) -> None: """Test duplicate pip_indices in the legacy two-argument form.""" with pytest.raises( TypeError, match="`pip_indices` was provided both positionally and by keyword.", ): create_conda_env_specification( [], [], ["https://pypi.org/simple/"], ["linux-64"], pip_indices=["https://test.pypi.org/simple/"], ) def test_rejects_duplicate_pip_indices_in_legacy_three_argument_form( self, ) -> None: """Test duplicate pip_indices in the legacy three-argument form.""" with pytest.raises( TypeError, match="`pip_indices` was provided both positionally and by keyword.", ): create_conda_env_specification( [], [], ["https://pypi.org/simple/"], ["linux-64"], "comment", pip_indices=["https://test.pypi.org/simple/"], ) def test_rejects_too_many_legacy_positional_arguments(self) -> None: """Test too many positional arguments in the legacy form.""" with pytest.raises( TypeError, match="Too many positional arguments", ): create_conda_env_specification( [], [], ["https://pypi.org/simple/"], ["linux-64"], "comment", "extra", ) class TestInstallCommandWithIndices: """Test the install command with pip_indices.""" @patch("unidep._cli.subprocess.run") @patch("unidep._cli._maybe_conda_executable") @patch("unidep._cli._use_uv") def test_install_command_with_pip_indices( self, mock_use_uv: MagicMock, mock_conda: MagicMock, mock_run: MagicMock, tmp_path: Path, ) -> None: """Test install command properly passes pip_indices to pip install.""" from unidep._cli import _install_command # Setup mocks mock_use_uv.return_value = False # Don't use uv mock_conda.return_value = None # No conda mock_run.return_value = MagicMock(returncode=0) # Create a requirements file with pip_indices req_file = tmp_path / "requirements.yaml" req_file.write_text(""" name: test_project pip_indices: - https://pypi.org/simple/ - https://private.com/simple/ dependencies: - pip: requests - pip: private-package """) # Run install command _install_command( req_file, conda_executable=None, conda_env_name=None, conda_env_prefix=None, conda_lock_file=None, dry_run=False, editable=False, skip_local=True, skip_pip=False, skip_conda=True, no_dependencies=False, no_uv=True, verbose=False, ) # Check that pip was called with index arguments pip_call_found = False for call in mock_run.call_args_list: args = call[0][0] if call[0] else [] if "pip" in args and "install" in args: pip_call_found = True assert "--index-url" in args assert "https://pypi.org/simple/" in args assert "--extra-index-url" in args assert "https://private.com/simple/" in args break assert pip_call_found, "pip install was not called with indices" @patch("unidep._cli.subprocess.run") @patch("unidep._cli._maybe_conda_executable") @patch("unidep._cli._use_uv") def test_install_command_with_uv_and_indices( self, mock_use_uv: MagicMock, mock_conda: MagicMock, mock_run: MagicMock, tmp_path: Path, ) -> None: """Test install command with uv properly passes pip_indices.""" from unidep._cli import _install_command # Setup mocks mock_use_uv.return_value = True # Use uv mock_conda.return_value = None # No conda mock_run.return_value = MagicMock(returncode=0) # Create a requirements file with pip_indices req_file = tmp_path / "requirements.yaml" req_file.write_text(""" name: test_project pip_indices: - https://private.com/simple/ dependencies: - pip: private-package """) # Run install command _install_command( req_file, conda_executable=None, conda_env_name=None, conda_env_prefix=None, conda_lock_file=None, dry_run=False, editable=False, skip_local=True, skip_pip=False, skip_conda=True, no_dependencies=False, no_uv=False, # Allow uv verbose=False, ) # Check that uv was called with index arguments uv_call_found = False for call in mock_run.call_args_list: args = call[0][0] if call[0] else [] if "uv" in args and "pip" in args and "install" in args: uv_call_found = True assert "--index-url" in args assert "https://private.com/simple/" in args break assert uv_call_found, "uv pip install was not called with indices" class TestPipIndicesIntegration: """Integration tests for pip_indices throughout the workflow.""" def test_full_workflow_with_indices(self, tmp_path: Path) -> None: """Test complete workflow from parsing to environment generation.""" from unidep._conda_env import ( create_conda_env_specification, write_conda_environment_file, ) from unidep._dependencies_parsing import parse_requirements # Create a requirements file with pip_indices req_file = tmp_path / "requirements.yaml" req_file.write_text(""" name: test_project channels: - conda-forge pip_indices: - https://pypi.org/simple/ - https://test.pypi.org/simple/ dependencies: - python=3.11 - pip: requests - pip: pytest platforms: - linux-64 - osx-arm64 """) # Parse requirements parsed = parse_requirements(req_file) assert len(parsed.pip_indices) == 2 assert parsed.pip_indices[0] == "https://pypi.org/simple/" assert parsed.pip_indices[1] == "https://test.pypi.org/simple/" # Create conda env specification env_spec = create_conda_env_specification( parsed.dependency_entries, parsed.channels, parsed.platforms, pip_indices=parsed.pip_indices, ) assert env_spec.pip_indices == parsed.pip_indices # Write environment file env_file = tmp_path / "environment.yaml" write_conda_environment_file(env_spec, env_file) # Verify the output content = env_file.read_text() assert "pip-repositories:" in content assert "- https://pypi.org/simple/" in content assert "- https://test.pypi.org/simple/" in content @patch("unidep._conda_lock.conda_lock_command") def test_conda_lock_with_pip_indices( self, mock_conda_lock: MagicMock, tmp_path: Path, ) -> None: """Test that conda-lock properly includes pip_indices.""" from unidep._conda_lock import conda_lock_command # Create requirements file with pip_indices req_file = tmp_path / "requirements.yaml" req_file.write_text(""" name: test channels: - conda-forge pip_indices: - https://pypi.org/simple/ - https://private.com/simple/ dependencies: - numpy - pip: requests """) # Run conda-lock command (mocked) conda_lock_command( depth=1, directory=tmp_path, files=None, platforms=["linux-64"], verbose=False, only_global=False, ignore_pins=[], skip_dependencies=[], overwrite_pins=[], check_input_hash=False, extra_flags=[], lockfile=str(tmp_path / "conda-lock.yml"), ) # Verify that the mock was called and pip_indices were passed through assert mock_conda_lock.called def test_merge_command_with_indices(self, tmp_path: Path) -> None: """Test unidep merge command with pip_indices.""" from unidep._cli import _merge_command # Create requirements file req_file = tmp_path / "requirements.yaml" req_file.write_text(""" name: test channels: - conda-forge pip_indices: - https://private.com/simple/ dependencies: - numpy """) output_file = tmp_path / "environment.yaml" # Run merge command _merge_command( depth=1, directory=tmp_path, files=[req_file], name="merged_env", output=output_file, stdout=False, selector="sel", platforms=[], ignore_pins=[], skip_dependencies=[], overwrite_pins=[], verbose=False, ) # Check output file assert output_file.exists() content = output_file.read_text() assert "pip-repositories:" in content assert "https://private.com/simple/" in content ================================================ FILE: tests/test_pip_indices_integration.py ================================================ """End-to-end integration tests for pip_indices support in unidep.""" import os from pathlib import Path from textwrap import dedent from typing import Any from unittest.mock import MagicMock, patch import pytest class TestUnidepInstallIntegration: """Integration tests for unidep install with pip_indices.""" @pytest.fixture def mock_project(self, tmp_path: Path) -> Path: """Create a mock project with pip_indices configuration.""" project_dir = tmp_path / "test_project" project_dir.mkdir() # Create requirements.yaml with pip_indices requirements_file = project_dir / "requirements.yaml" requirements_file.write_text( dedent( """ name: test_project channels: - conda-forge pip_indices: - https://pypi.org/simple/ - https://test.pypi.org/simple/ dependencies: - numpy - pip: requests - pip: test-package # From test.pypi.org """, ), ) # Create a simple setup.py setup_file = project_dir / "setup.py" setup_file.write_text( dedent( """ from setuptools import setup, find_packages setup( name="test_project", version="0.1.0", packages=find_packages(), ) """, ), ) # Create package directory (project_dir / "test_project").mkdir() (project_dir / "test_project" / "__init__.py").touch() return project_dir @patch("subprocess.run") def test_install_with_pip_indices(self, mock_run: Any, mock_project: Path) -> None: # noqa: ARG002 """Test that unidep install uses pip_indices correctly.""" mock_run.return_value = MagicMock(returncode=0, stdout="", stderr="") # Simulate running unidep install # Mock the install command execution with patch("unidep._cli._pip_install_local") as mock_pip_install: mock_pip_install.return_value = None # This would be the actual command execution # For now, verify the expected behavior expected_pip_args = [ "--index-url", "https://pypi.org/simple/", "--extra-index-url", "https://test.pypi.org/simple/", ] # The actual implementation would construct these args assert expected_pip_args[0] == "--index-url" assert expected_pip_args[2] == "--extra-index-url" @patch("subprocess.run") def test_install_with_env_var_indices(self, mock_run: Any, tmp_path: Path) -> None: """Test that environment variables in pip_indices are expanded.""" project_dir = tmp_path / "test_project" project_dir.mkdir() # Set environment variables os.environ["PIP_USER"] = "testuser" os.environ["PIP_PASSWORD"] = "testpass" # noqa: S105 requirements_file = project_dir / "requirements.yaml" requirements_file.write_text( dedent( """ name: test_project pip_indices: - https://${PIP_USER}:${PIP_PASSWORD}@private.pypi.org/simple/ - https://pypi.org/simple/ dependencies: - pip: private-package """, ), ) mock_run.return_value = MagicMock(returncode=0) # In actual implementation, env vars would be expanded expected_url = "https://testuser:testpass@private.pypi.org/simple/" # Verify env var expansion logic url = "https://${PIP_USER}:${PIP_PASSWORD}@private.pypi.org/simple/" expanded = url.replace("${PIP_USER}", os.environ["PIP_USER"]) expanded = expanded.replace("${PIP_PASSWORD}", os.environ["PIP_PASSWORD"]) assert expanded == expected_url # Clean up env vars del os.environ["PIP_USER"] del os.environ["PIP_PASSWORD"] def test_install_with_uv_backend(self, mock_project: Path) -> None: # noqa: ARG002 """Test that pip_indices work with uv backend.""" # uv uses the same --index-url and --extra-index-url flags with patch("shutil.which", return_value="/path/to/uv"), patch( "subprocess.run", ) as mock_run: mock_run.return_value = MagicMock(returncode=0) # Expected uv command structure expected_args = [ "uv", "pip", "install", "--index-url", "https://pypi.org/simple/", "--extra-index-url", "https://test.pypi.org/simple/", ] # Verify uv compatibility assert "--index-url" in expected_args assert "--extra-index-url" in expected_args def test_install_without_pip_indices(self, tmp_path: Path) -> None: """Test that unidep install works without pip_indices.""" project_dir = tmp_path / "test_project" project_dir.mkdir() requirements_file = project_dir / "requirements.yaml" requirements_file.write_text( dedent( """ name: test_project channels: - conda-forge dependencies: - numpy - pip: requests """, ), ) with patch("subprocess.run") as mock_run: mock_run.return_value = MagicMock(returncode=0) # No index flags should be added # Command should work with default PyPI assert True # Placeholder for actual test class TestUnidepCondaLockIntegration: """Integration tests for unidep conda-lock with pip_indices.""" @pytest.fixture def mock_monorepo(self, tmp_path: Path) -> Path: """Create a mock monorepo with multiple projects using pip_indices.""" monorepo = tmp_path / "monorepo" monorepo.mkdir() # Project 1 with pip_indices proj1 = monorepo / "project1" proj1.mkdir() (proj1 / "requirements.yaml").write_text( dedent( """ name: project1 channels: - conda-forge pip_indices: - https://pypi.org/simple/ - https://private1.com/simple/ dependencies: - numpy - pip: private-package1 """, ), ) # Project 2 with different pip_indices proj2 = monorepo / "project2" proj2.mkdir() (proj2 / "requirements.yaml").write_text( dedent( """ name: project2 channels: - conda-forge pip_indices: - https://pypi.org/simple/ - https://private2.com/simple/ dependencies: - pandas - pip: private-package2 """, ), ) return monorepo def test_conda_lock_generates_pip_repositories(self, mock_monorepo: Path) -> None: """Test that conda-lock generates environment.yaml with `pip-repositories`.""" _ = mock_monorepo # Used to ensure fixture is called with patch("subprocess.run") as mock_run: mock_run.return_value = MagicMock(returncode=0) # Expected environment.yaml structure expected_env = { "name": "myenv", "channels": ["conda-forge"], "pip-repositories": [ "https://pypi.org/simple/", "https://private1.com/simple/", "https://private2.com/simple/", ], "dependencies": [ "numpy", "pandas", {"pip": ["private-package1", "private-package2"]}, ], } # Verify the structure assert "pip-repositories" in expected_env assert len(expected_env["pip-repositories"]) == 3 def test_conda_lock_with_merged_indices(self, mock_monorepo: Path) -> None: # noqa: ARG002 """Test that conda-lock merges pip_indices from multiple projects.""" with patch("unidep._conda_lock.conda_lock_command") as mock_conda_lock: mock_conda_lock.return_value = None # Expected merged pip_indices (deduplicated) expected_indices = [ "https://pypi.org/simple/", # Common to both "https://private1.com/simple/", # From project1 "https://private2.com/simple/", # From project2 ] # Verify deduplication logic all_indices = [ "https://pypi.org/simple/", "https://private1.com/simple/", "https://pypi.org/simple/", # Duplicate "https://private2.com/simple/", ] deduplicated = list(dict.fromkeys(all_indices)) # Preserve order assert deduplicated == expected_indices def test_conda_lock_creates_valid_lockfile(self, tmp_path: Path) -> None: """Test that conda-lock creates a valid lock file with pip-repositories.""" project_dir = tmp_path / "test_project" project_dir.mkdir() requirements_file = project_dir / "requirements.yaml" requirements_file.write_text( dedent( """ name: test_project channels: - conda-forge pip_indices: - https://pypi.org/simple/ - https://custom.pypi.org/simple/ dependencies: - python=3.11 - pip: custom-package """, ), ) # Mock conda-lock execution with patch("subprocess.run") as mock_run: # First call generates environment.yaml # Second call runs conda-lock mock_run.return_value = MagicMock(returncode=0) # Verify that the generated environment.yaml includes pip-repositories env_content = { "name": "test_project", "channels": ["conda-forge"], "pip-repositories": [ "https://pypi.org/simple/", "https://custom.pypi.org/simple/", ], "dependencies": [ "python=3.11", {"pip": ["custom-package"]}, ], } # Verify structure for conda-lock compatibility assert "pip-repositories" in env_content assert isinstance(env_content["pip-repositories"], list) class TestErrorHandling: """Test error handling and edge cases in integration.""" def test_install_with_unreachable_index(self, tmp_path: Path) -> None: """Test behavior when a pip index is unreachable.""" project_dir = tmp_path / "test_project" project_dir.mkdir() requirements_file = project_dir / "requirements.yaml" requirements_file.write_text( dedent( """ name: test_project pip_indices: - https://unreachable.invalid.com/simple/ - https://pypi.org/simple/ dependencies: - pip: numpy # Should fall back to pypi.org """, ), ) # Test that installation can continue with fallback with patch("subprocess.run") as mock_run: # First attempt might fail, but should retry with pypi.org mock_run.return_value = MagicMock(returncode=0) # Installation should succeed using the second index assert True # Placeholder def test_install_with_conflicting_packages(self, tmp_path: Path) -> None: """Test handling of conflicting packages across indices.""" project_dir = tmp_path / "test_project" project_dir.mkdir() requirements_file = project_dir / "requirements.yaml" requirements_file.write_text( dedent( """ name: test_project pip_indices: - https://index1.com/simple/ # Has package-a v1.0 - https://index2.com/simple/ # Has package-a v2.0 dependencies: - pip: package-a # Which version gets installed? """, ), ) # First index should take precedence with patch("subprocess.run") as mock_run: mock_run.return_value = MagicMock(returncode=0) # Verify that first index is primary assert True # Placeholder def test_merge_with_circular_dependencies(self, tmp_path: Path) -> None: """Test handling of circular local dependencies with pip_indices.""" project_dir = tmp_path / "test_project" project_dir.mkdir() # Project A depends on B proj_a = project_dir / "project_a" proj_a.mkdir() (proj_a / "requirements.yaml").write_text( dedent( """ name: project_a pip_indices: - https://pypi.org/simple/ local_dependencies: - ../project_b dependencies: - pip: package-a """, ), ) # Project B depends on A (circular) proj_b = project_dir / "project_b" proj_b.mkdir() (proj_b / "requirements.yaml").write_text( dedent( """ name: project_b pip_indices: - https://custom.pypi.org/simple/ local_dependencies: - ../project_a dependencies: - pip: package-b """, ), ) # Should handle circular dependencies gracefully # pip_indices should be merged without infinite loop with patch("unidep._dependencies_parsing.parse_requirements"): # Implementation should detect and break circular dependencies assert True # Placeholder class TestCompatibility: """Test compatibility with existing unidep features.""" def test_pip_indices_with_platforms(self, tmp_path: Path) -> None: """Test that pip_indices work with platform selectors.""" project_dir = tmp_path / "test_project" project_dir.mkdir() requirements_file = project_dir / "requirements.yaml" requirements_file.write_text( dedent( """ name: test_project channels: - conda-forge pip_indices: - https://pypi.org/simple/ platforms: - linux-64 - osx-arm64 dependencies: - numpy # [linux64] - pip: tensorflow # [linux64] - pip: tensorflow-metal # [osx-arm64] """, ), ) # pip_indices should apply to all platforms with patch("subprocess.run") as mock_run: mock_run.return_value = MagicMock(returncode=0) # Verify platform-specific handling assert True # Placeholder def test_pip_indices_with_optional_dependencies(self, tmp_path: Path) -> None: """Test that pip_indices work with optional dependencies.""" project_dir = tmp_path / "test_project" project_dir.mkdir() requirements_file = project_dir / "requirements.yaml" requirements_file.write_text( dedent( """ name: test_project pip_indices: - https://pypi.org/simple/ - https://test.pypi.org/simple/ dependencies: - numpy optional_dependencies: test: - pip: pytest - pip: test-package # From test.pypi.org dev: - pip: black - pip: mypy """, ), ) # pip_indices should apply to optional dependencies too with patch("subprocess.run") as mock_run: mock_run.return_value = MagicMock(returncode=0) # When installing with [test], should use pip_indices assert True # Placeholder def test_coexistence_with_uv_index_config(self, tmp_path: Path) -> None: """Test that pip_indices can coexist with [[tool.uv.index]] config.""" project_dir = tmp_path / "test_project" project_dir.mkdir() pyproject_file = project_dir / "pyproject.toml" pyproject_file.write_text( dedent( """ [tool.unidep] pip_indices = [ "https://pypi.org/simple/", "https://unidep.index.com/simple/" ] dependencies = ["numpy"] [[tool.uv.index]] url = "https://uv.specific.com/simple/" name = "uv-index" """, ), ) # Both configurations should be respected # unidep should use pip_indices # uv might use its own config when called directly with patch("subprocess.run") as mock_run: mock_run.return_value = MagicMock(returncode=0) # Verify both configs can coexist assert True # Placeholder ================================================ FILE: tests/test_pixi.py ================================================ """Tests for simple Pixi.toml generation.""" from __future__ import annotations import os import textwrap from itertools import permutations from pathlib import Path from typing import Any import pytest try: import tomllib except ImportError: # pragma: no cover import tomli as tomllib from unidep._conflicts import VersionConflictError from unidep._dependencies_parsing import DependencyEntry, DependencyOrigin from unidep._pixi import ( _add_single_file_optional_environments, _collect_transitive_nodes, _derive_feature_names, _discover_local_dependency_graph, _editable_dependency_path, _extract_dependencies, _feature_platforms_for_entries, _filter_targets_by_platforms, _make_pip_version_spec, _parse_direct_requirements_for_node, _parse_version_build, _unique_env_name, _unique_optional_feature_name, _with_unique_order_paths, generate_pixi_toml, ) from unidep.platform_definitions import Spec from unidep.utils import PathWithExtras _UNSET = object() def _write_file(path: Path, content: str) -> Path: path.write_text(textwrap.dedent(content)) return path def _generate_and_load( output_file: Path, *requirements_files: Path, **kwargs: Any, ) -> dict[str, Any]: if "verbose" not in kwargs: kwargs["verbose"] = False generate_pixi_toml(*requirements_files, output_file=output_file, **kwargs) with output_file.open("rb") as f: return tomllib.load(f) def _setup_app_lib_other( tmp_path: Path, app_optional_deps: str, ) -> tuple[Path, Path]: """Create app/lib/other monorepo layout and return (app_req, other_req).""" app_dir = tmp_path / "app" app_dir.mkdir() deps_block = textwrap.indent(textwrap.dedent(app_optional_deps), " ") yaml_content = ( "channels:\n" " - conda-forge\n" "dependencies:\n" " - pandas\n" "optional_dependencies:\n" " dev:\n" f"{deps_block}" ) app_req = app_dir / "requirements.yaml" app_req.write_text(yaml_content) lib_dir = tmp_path / "lib" lib_dir.mkdir() _write_file( lib_dir / "requirements.yaml", """\ channels: - conda-forge dependencies: - numpy """, ) other_dir = tmp_path / "other" other_dir.mkdir() other_req = _write_file( other_dir / "requirements.yaml", """\ channels: - conda-forge dependencies: - scipy """, ) return app_req, other_req def test_simple_pixi_generation(tmp_path: Path) -> None: """Test basic pixi.toml generation from a single requirements.yaml.""" req_file = _write_file( tmp_path / "requirements.yaml", """\ channels: - conda-forge dependencies: - numpy >=1.20 - pandas - pip: requests platforms: - linux-64 - osx-arm64 """, ) output_file = tmp_path / "pixi.toml" generate_pixi_toml( req_file, project_name="test-project", output_file=output_file, verbose=False, ) assert output_file.exists() content = output_file.read_text() # Check basic structure assert "[workspace]" in content assert 'name = "test-project"' in content assert "conda-forge" in content assert "linux-64" in content assert "osx-arm64" in content # Check dependencies assert "[dependencies]" in content assert 'numpy = ">=1.20"' in content assert 'pandas = "*"' in content assert "[pypi-dependencies]" in content assert 'requests = "*"' in content def test_channels_resolution_behaviors(tmp_path: Path) -> None: """Explicit channels override file/default channels, while None falls back.""" cases: list[tuple[str, str, object, list[str]]] = [ ( "override", """\ channels: - conda-forge dependencies: - numpy platforms: - linux-64 """, ["defaults", "bioconda"], ["defaults", "bioconda"], ), ( "fallback", """\ dependencies: - numpy platforms: - linux-64 """, _UNSET, ["conda-forge"], ), ( "empty-explicit", """\ channels: - conda-forge dependencies: - numpy platforms: - linux-64 """, [], [], ), ] for case_name, req_content, channels_arg, expected in cases: case_dir = tmp_path / case_name case_dir.mkdir() req_file = _write_file(case_dir / "requirements.yaml", req_content) output_file = case_dir / "pixi.toml" kwargs: dict[str, Any] = {} if channels_arg is not _UNSET: kwargs["channels"] = channels_arg data = _generate_and_load(output_file, req_file, **kwargs) assert data["workspace"]["channels"] == expected def test_monorepo_pixi_generation(tmp_path: Path) -> None: """Test pixi.toml generation with features for multiple requirements files.""" project1_dir = tmp_path / "project1" project1_dir.mkdir() req1 = _write_file( project1_dir / "requirements.yaml", """\ channels: - conda-forge dependencies: - numpy - conda: scipy """, ) project2_dir = tmp_path / "project2" project2_dir.mkdir() req2 = _write_file( project2_dir / "requirements.yaml", """\ channels: - conda-forge dependencies: - pandas - pip: requests """, ) output_file = tmp_path / "pixi.toml" generate_pixi_toml( req1, req2, project_name="monorepo", output_file=output_file, verbose=False, ) assert output_file.exists() content = output_file.read_text() # Check project section assert "[workspace]" in content assert 'name = "monorepo"' in content # Check feature dependencies (TOML writes them directly without parent section) assert "[feature.project1.dependencies]" in content assert 'numpy = "*"' in content assert 'scipy = "*"' in content assert "[feature.project2.dependencies]" in content assert 'pandas = "*"' in content assert "[feature.project2.pypi-dependencies]" in content assert 'requests = "*"' in content # Check environments (be flexible with TOML formatting) assert "[environments]" in content assert "default =" in content assert "project1" in content assert "project2" in content # Verify that default includes both projects assert content.count('"project1"') >= 1 assert content.count('"project2"') >= 1 def test_pixi_monorepo_feature_names_unique_for_same_leaf_dir(tmp_path: Path) -> None: """Feature names should not collide when leaf directory names are identical.""" apps_api_dir = tmp_path / "apps" / "api" apps_api_dir.mkdir(parents=True) apps_req = _write_file( apps_api_dir / "requirements.yaml", """\ channels: - conda-forge dependencies: - numpy """, ) libs_api_dir = tmp_path / "libs" / "api" libs_api_dir.mkdir(parents=True) libs_req = _write_file( libs_api_dir / "requirements.yaml", """\ channels: - conda-forge dependencies: - pandas """, ) data = _generate_and_load(tmp_path / "pixi.toml", apps_req, libs_req) features = data["feature"] assert len(features) == 2 assert len(set(features)) == 2 numpy_features = [ name for name, feature in features.items() if feature.get("dependencies", {}).get("numpy") == "*" ] pandas_features = [ name for name, feature in features.items() if feature.get("dependencies", {}).get("pandas") == "*" ] assert len(numpy_features) == 1 assert len(pandas_features) == 1 assert numpy_features[0] != pandas_features[0] assert set(data["environments"]["default"]) == { numpy_features[0], pandas_features[0], } def test_pixi_monorepo_feature_name_not_empty_for_relative_root_file( tmp_path: Path, monkeypatch: pytest.MonkeyPatch, ) -> None: """Relative root-level requirements file should not produce an empty feature key.""" root_req = _write_file( tmp_path / "requirements.yaml", """\ channels: - conda-forge dependencies: - numpy """, ) sub_dir = tmp_path / "project" sub_dir.mkdir() sub_req = _write_file( sub_dir / "requirements.yaml", """\ channels: - conda-forge dependencies: - pandas """, ) monkeypatch.chdir(tmp_path) # type: ignore[attr-defined] output_file = tmp_path / "pixi.toml" data = _generate_and_load( output_file, root_req.relative_to(tmp_path), sub_req.relative_to(tmp_path), ) features = data["feature"] assert len(features) == 2 assert "" not in features assert all(name for name in features) def test_pixi_with_version_pins(tmp_path: Path) -> None: """Test that version pins are passed through without resolution.""" req_file = _write_file( tmp_path / "requirements.yaml", """\ channels: - conda-forge dependencies: - numpy >=1.20,<2.0 - conda: scipy =1.9.0 - pip: requests >2.20 - sympy >= 1.11 """, ) output_file = tmp_path / "pixi.toml" generate_pixi_toml( req_file, output_file=output_file, verbose=False, ) content = output_file.read_text() # Check that pins are preserved exactly (spaces removed) assert 'numpy = ">=1.20,<2.0"' in content assert 'scipy = "=1.9.0"' in content assert 'requests = ">2.20"' in content assert 'sympy = ">=1.11"' in content # Space should be removed def test_pixi_normalizes_single_equals_for_pip_pins(tmp_path: Path) -> None: """Pip pins with single '=' should be normalized to '=='.""" req_file = _write_file( tmp_path / "requirements.yaml", """\ channels: - conda-forge dependencies: - pip: pygsti =0.9.13.3 """, ) data = _generate_and_load( tmp_path / "pixi.toml", req_file, ) assert data["pypi-dependencies"]["pygsti"] == "==0.9.13.3" def test_pixi_prefers_pip_pin_over_unpinned_conda(tmp_path: Path) -> None: """Pinned pip spec should override unpinned conda spec.""" req_file = _write_file( tmp_path / "requirements.yaml", """\ channels: - conda-forge dependencies: - numpy - pip: foo >=1.2 conda: foo """, ) data = _generate_and_load( tmp_path / "pixi.toml", req_file, ) assert data["dependencies"].get("foo") is None assert data["pypi-dependencies"]["foo"] == ">=1.2" def test_pixi_prefers_conda_for_unpinned_both_sources(tmp_path: Path) -> None: """Unpinned dependencies available in both sources should use conda only.""" req_file = _write_file( tmp_path / "requirements.yaml", """\ channels: - conda-forge dependencies: - numpy - pandas """, ) data = _generate_and_load( tmp_path / "pixi.toml", req_file, ) deps = data["dependencies"] assert deps["numpy"] == "*" assert deps["pandas"] == "*" assert "pypi-dependencies" not in data def test_pixi_prefers_conda_for_equally_pinned_both_sources(tmp_path: Path) -> None: """When conda and pip have the same pin, use conda only.""" req_file = _write_file( tmp_path / "requirements.yaml", """\ channels: - conda-forge dependencies: - scipy >=1.10 """, ) data = _generate_and_load( tmp_path / "pixi.toml", req_file, ) assert data["dependencies"]["scipy"] == ">=1.10" assert "pypi-dependencies" not in data # --- Parametrized single-platform conflict resolution tests --- @pytest.mark.parametrize( ( "deps_yaml", "in_universal", "in_universal_pypi", "in_target_deps", "in_target_pypi", ), [ pytest.param( """\ - click - pip: click ==0.1 # [linux64] """, None, "==0.1", None, None, id="universal-conda-target-pip", ), pytest.param( """\ - conda: click >=8 - pip: click ==0.1 # [linux64] """, None, "==0.1", None, None, id="universal-pinned-conda-target-pinned-pip-prefers-target", ), pytest.param( """\ - conda: click >=8 - pip: click # [linux64] """, ">=8", None, None, None, id="universal-pinned-conda-beats-target-unpinned-pip", ), pytest.param( """\ - pip: click - conda: click >=8 # [linux64] """, ">=8", None, None, None, id="universal-pip-target-conda-prefers-narrower-conda", ), pytest.param( """\ - pip: click ==0.1 - conda: click # [linux64] """, None, "==0.1", None, None, id="universal-pinned-pip-beats-target-unpinned-conda", ), ], ) def test_pixi_reconciles_single_platform_conflict( tmp_path: Path, deps_yaml: str, in_universal: str | None, in_universal_pypi: str | None, in_target_deps: str | None, in_target_pypi: str | None, ) -> None: """Single-platform pixi output compresses the winner into the universal section.""" deps_block = textwrap.indent(textwrap.dedent(deps_yaml), " ") yaml_content = ( "channels:\n" " - conda-forge\n" "dependencies:\n" f"{deps_block}" "platforms:\n" " - linux-64\n" ) req_file = tmp_path / "requirements.yaml" req_file.write_text(yaml_content) data = _generate_and_load(tmp_path / "pixi.toml", req_file) if in_universal is not None: assert data["dependencies"]["click"] == in_universal else: assert "click" not in data.get("dependencies", {}) if in_universal_pypi is not None: assert data["pypi-dependencies"]["click"] == in_universal_pypi else: assert "click" not in data.get("pypi-dependencies", {}) linux_target = data.get("target", {}).get("linux-64", {}) if in_target_deps is not None: assert linux_target["dependencies"]["click"] == in_target_deps else: assert "click" not in linux_target.get("dependencies", {}) if in_target_pypi is not None: assert linux_target["pypi-dependencies"]["click"] == in_target_pypi else: assert "click" not in linux_target.get("pypi-dependencies", {}) def test_pixi_reconcile_is_order_independent_for_universal_and_target_conflicts( tmp_path: Path, ) -> None: """Universal/target conflict reconciliation should not depend on declaration order.""" req_target_then_universal = _write_file( tmp_path / "target_then_universal.yaml", """\ channels: - conda-forge dependencies: - pip: click ==0.1 # [linux64] - conda: click >=8 platforms: - linux-64 """, ) req_universal_then_target = _write_file( tmp_path / "universal_then_target.yaml", """\ channels: - conda-forge dependencies: - conda: click >=8 - pip: click ==0.1 # [linux64] platforms: - linux-64 """, ) out1 = tmp_path / "pixi-target-then-universal.toml" out2 = tmp_path / "pixi-universal-then-target.toml" data1 = _generate_and_load(out1, req_target_then_universal) data2 = _generate_and_load(out2, req_universal_then_target) assert data1 == data2 assert "click" not in data1.get("dependencies", {}) assert data1["pypi-dependencies"]["click"] == "==0.1" assert "target" not in data1 def test_pixi_demoted_reconciliation_is_order_independent_with_repeated_universals( tmp_path: Path, ) -> None: """All declaration orders should yield the same reconciled demoted result.""" deps = [ "- conda: click >=8", "- pip: click ==0.1 # [linux64]", "- conda: click >=9", ] results = [] for i, dep_order in enumerate(permutations(deps)): deps_block = "\n".join(f" {dep}" for dep in dep_order) req_file = _write_file( tmp_path / f"requirements-{i}.yaml", ( "channels:\n" " - conda-forge\n" "dependencies:\n" f"{deps_block}\n" "platforms:\n" " - linux-64\n" " - osx-64\n" ), ) data = _generate_and_load(tmp_path / f"pixi-{i}.toml", req_file) assert data["target"]["linux-64"]["pypi-dependencies"]["click"] == "==0.1" assert data["target"]["osx-64"]["dependencies"]["click"] == ">=9" assert "click" not in data.get("dependencies", {}) assert "click" not in data.get("pypi-dependencies", {}) results.append(data) assert all(result == results[0] for result in results[1:]) # --- Parametrized multiplatform conflict resolution tests --- @pytest.mark.parametrize( ("deps_yaml", "linux_section", "linux_val", "osx_section", "osx_val"), [ pytest.param( """\ - conda: click >=8 - pip: click ==0.1 # [linux64] """, "pypi-dependencies", "==0.1", "dependencies", ">=8", id="universal-conda-target-pip-multiplatform", ), pytest.param( """\ - pip: click ==0.1 - conda: click >=8 # [linux64] """, "dependencies", ">=8", "pypi-dependencies", "==0.1", id="universal-pip-target-conda-multiplatform", ), ], ) def test_pixi_reconciles_multiplatform_conflict( tmp_path: Path, deps_yaml: str, linux_section: str, linux_val: str, osx_section: str, osx_val: str, ) -> None: """Universal deps should be promoted to non-overriding target platforms.""" deps_block = textwrap.indent(textwrap.dedent(deps_yaml), " ") yaml_content = ( "channels:\n" " - conda-forge\n" "dependencies:\n" f"{deps_block}" "platforms:\n" " - linux-64\n" " - osx-arm64\n" ) req_file = tmp_path / "requirements.yaml" req_file.write_text(yaml_content) data = _generate_and_load(tmp_path / "pixi.toml", req_file) assert "click" not in data.get("dependencies", {}) assert "click" not in data.get("pypi-dependencies", {}) assert data["target"]["linux-64"][linux_section]["click"] == linux_val assert data["target"]["osx-arm64"][osx_section]["click"] == osx_val def test_pixi_with_local_package(tmp_path: Path) -> None: """Test that local packages are added as editable dependencies.""" project_dir = tmp_path / "my_package" project_dir.mkdir() _write_file( project_dir / "requirements.yaml", """\ channels: - conda-forge dependencies: - numpy """, ) _write_file( project_dir / "pyproject.toml", """\ [build-system] requires = ["setuptools"] [project] name = "my-package" """, ) output_file = tmp_path / "pixi.toml" generate_pixi_toml( project_dir, output_file=output_file, verbose=False, ) assert output_file.exists() content = output_file.read_text() assert "pypi-dependencies" in content assert "my_package" in content assert 'path = "./my_package"' in content assert "editable = true" in content assert 'numpy = "*"' in content def test_pixi_single_file_editable_path_relative_to_output(tmp_path: Path) -> None: """Single-file mode should use editable path relative to output location.""" project_dir = tmp_path / "services" / "api" project_dir.mkdir(parents=True) _write_file( project_dir / "requirements.yaml", """\ channels: - conda-forge dependencies: - numpy """, ) _write_file( project_dir / "pyproject.toml", """\ [build-system] requires = ["setuptools"] [project] name = "service-api" """, ) data = _generate_and_load(tmp_path / "pixi.toml", project_dir / "requirements.yaml") editable_dep = data["pypi-dependencies"]["service_api"] assert editable_dep["editable"] is True assert editable_dep["path"] == "./services/api" def test_pixi_single_file_includes_local_dependency_package_as_editable( tmp_path: Path, ) -> None: """Single-file mode should install local dependency projects as editable packages.""" app_dir = tmp_path / "app" app_dir.mkdir() req_file = _write_file( app_dir / "requirements.yaml", """\ channels: - conda-forge dependencies: - numpy local_dependencies: - ../lib """, ) lib_dir = tmp_path / "lib" lib_dir.mkdir() _write_file( lib_dir / "requirements.yaml", """\ dependencies: - pandas """, ) _write_file( lib_dir / "pyproject.toml", """\ [build-system] requires = ["setuptools"] [project] name = "lib" """, ) data = _generate_and_load(tmp_path / "pixi.toml", req_file) assert data["dependencies"]["numpy"] == "*" assert data["dependencies"]["pandas"] == "*" lib_editable = data["pypi-dependencies"]["lib"] assert lib_editable["editable"] is True assert lib_editable["path"] == "./lib" def test_pixi_empty_dependencies(tmp_path: Path) -> None: """Test handling of requirements file with no dependencies.""" req_file = _write_file( tmp_path / "requirements.yaml", """\ channels: - conda-forge platforms: - linux-64 """, ) output_file = tmp_path / "pixi.toml" generate_pixi_toml( req_file, output_file=output_file, verbose=False, ) assert output_file.exists() content = output_file.read_text() assert "[workspace]" in content assert "[dependencies]" not in content assert "[pypi-dependencies]" not in content def test_pixi_with_platform_selectors(tmp_path: Path) -> None: """Test that platform selectors are converted to target sections.""" req_file = _write_file( tmp_path / "requirements.yaml", """\ channels: - conda-forge dependencies: - numpy - cuda-toolkit =11.8 # [linux64] - pip: pyobjc # [osx] platforms: - linux-64 - osx-arm64 """, ) data = _generate_and_load( tmp_path / "pixi.toml", req_file, project_name="test-selectors", ) assert data["dependencies"]["numpy"] == "*" assert "cuda-toolkit" not in data["dependencies"] assert "pyobjc" not in data.get("pypi-dependencies", {}) assert data["target"]["linux-64"]["dependencies"]["cuda-toolkit"] == "=11.8" osx_target = data["target"].get("osx-arm64") or data["target"].get("osx-64") assert osx_target is not None assert osx_target["pypi-dependencies"]["pyobjc"] == "*" def test_pixi_selector_targets_preserved_without_explicit_platforms( tmp_path: Path, ) -> None: """Selector targets should not be dropped when input files omit platforms.""" req_file = _write_file( tmp_path / "requirements.yaml", """\ channels: - conda-forge dependencies: - numpy - cuda-toolkit # [linux64] - pip: pyobjc # [osx] """, ) data = _generate_and_load(tmp_path / "pixi.toml", req_file) assert "linux-64" in data["workspace"]["platforms"] assert any(p in data["workspace"]["platforms"] for p in ("osx-64", "osx-arm64")) assert data["target"]["linux-64"]["dependencies"]["cuda-toolkit"] == "*" osx_target = data["target"].get("osx-arm64") or data["target"].get("osx-64") assert osx_target is not None assert osx_target["pypi-dependencies"]["pyobjc"] == "*" def test_pixi_with_multiple_platform_selectors(tmp_path: Path) -> None: """Test that broad selectors like 'unix' expand to multiple platforms.""" req_file = _write_file( tmp_path / "requirements.yaml", """\ channels: - conda-forge dependencies: - numpy - readline # [unix] - pywin32 # [win64] platforms: - linux-64 - osx-arm64 - win-64 """, ) data = _generate_and_load( tmp_path / "pixi.toml", req_file, project_name="test-multi-platform", ) assert data["dependencies"]["numpy"] == "*" assert "readline" not in data["dependencies"] assert "pywin32" not in data["dependencies"] assert data["target"]["linux-64"]["dependencies"]["readline"] == "*" assert data["target"]["osx-arm64"]["dependencies"]["readline"] == "*" assert data["target"]["win-64"]["dependencies"]["pywin32"] == "*" def test_pixi_monorepo_with_platform_selectors(tmp_path: Path) -> None: """Test platform selectors in monorepo mode (multiple files).""" project1_dir = tmp_path / "project1" project1_dir.mkdir() req1 = _write_file( project1_dir / "requirements.yaml", """\ channels: - conda-forge dependencies: - numpy - cuda-toolkit # [linux64] platforms: - linux-64 - osx-arm64 """, ) project2_dir = tmp_path / "project2" project2_dir.mkdir() req2 = _write_file( project2_dir / "requirements.yaml", """\ channels: - conda-forge dependencies: - pandas - pip: pyobjc # [arm64] """, ) data = _generate_and_load( tmp_path / "pixi.toml", req1, req2, project_name="monorepo-selectors", ) project1 = data["feature"]["project1"] project2 = data["feature"]["project2"] assert "dependencies" not in project1 assert project1["target"]["linux-64"]["dependencies"]["numpy"] == "*" assert project1["target"]["osx-arm64"]["dependencies"]["numpy"] == "*" assert project1["target"]["linux-64"]["dependencies"]["cuda-toolkit"] == "*" assert project2["dependencies"]["pandas"] == "*" assert "pyobjc" not in project2.get("pypi-dependencies", {}) assert project2["target"]["osx-arm64"]["pypi-dependencies"]["pyobjc"] == "*" def test_pixi_monorepo_preserves_selector_only_platforms_without_declared_platforms( tmp_path: Path, ) -> None: project1_dir = tmp_path / "project1" project1_dir.mkdir() req1 = _write_file( project1_dir / "requirements.yaml", """\ channels: - conda-forge platforms: - linux-64 dependencies: - numpy """, ) project2_dir = tmp_path / "project2" project2_dir.mkdir() req2 = _write_file( project2_dir / "requirements.yaml", """\ channels: - conda-forge dependencies: - pip: pyobjc # [osx] """, ) data = _generate_and_load( tmp_path / "pixi.toml", req1, req2, project_name="monorepo-selector-only-platforms", ) assert data["workspace"]["platforms"] == ["linux-64", "osx-64", "osx-arm64"] project2 = data["feature"]["project2"] assert "pypi-dependencies" not in project2 assert project2["target"]["osx-64"]["pypi-dependencies"]["pyobjc"] == "*" assert project2["target"]["osx-arm64"]["pypi-dependencies"]["pyobjc"] == "*" def test_pixi_single_file_preserves_selector_only_platforms_without_declared_platforms( tmp_path: Path, ) -> None: req = _write_file( tmp_path / "requirements.yaml", """\ channels: - conda-forge dependencies: - numpy # [linux] - numpy # [osx] """, ) data = _generate_and_load( tmp_path / "pixi.toml", req, project_name="single-file-selector-only-platforms", ) assert data["workspace"]["platforms"] == [ "linux-64", "linux-aarch64", "linux-ppc64le", "osx-64", "osx-arm64", ] assert data["dependencies"]["numpy"] == "*" assert "target" not in data def test_pixi_monorepo_optional_group_preserves_selector_only_platforms( tmp_path: Path, ) -> None: project1_dir = tmp_path / "project1" project1_dir.mkdir() req1 = _write_file( project1_dir / "requirements.yaml", """\ channels: - conda-forge platforms: - linux-64 dependencies: - numpy """, ) project2_dir = tmp_path / "project2" project2_dir.mkdir() req2 = _write_file( project2_dir / "requirements.yaml", """\ channels: - conda-forge optional_dependencies: dev: - pip: pyobjc # [osx] """, ) data = _generate_and_load( tmp_path / "pixi.toml", req1, req2, project_name="monorepo-selector-only-optional-platforms", ) assert data["workspace"]["platforms"] == ["linux-64", "osx-64", "osx-arm64"] project2_dev = data["feature"]["project2-dev"] assert "pypi-dependencies" not in project2_dev assert project2_dev["target"]["osx-64"]["pypi-dependencies"]["pyobjc"] == "*" assert project2_dev["target"]["osx-arm64"]["pypi-dependencies"]["pyobjc"] == "*" def test_pixi_single_file_optional_group_preserves_selector_only_platforms( tmp_path: Path, ) -> None: req = _write_file( tmp_path / "requirements.yaml", """\ channels: - conda-forge optional_dependencies: dev: - numpy # [linux] - numpy # [osx] """, ) data = _generate_and_load( tmp_path / "pixi.toml", req, project_name="single-file-selector-only-optional-platforms", ) assert data["workspace"]["platforms"] == [ "linux-64", "linux-aarch64", "linux-ppc64le", "osx-64", "osx-arm64", ] assert data["feature"]["dev"]["dependencies"]["numpy"] == "*" def test_pixi_single_file_optional_group_keeps_platform_specific_dep_targeted( tmp_path: Path, ) -> None: req = _write_file( tmp_path / "requirements.yaml", """\ channels: - conda-forge dependencies: - click # [linux] optional_dependencies: dev: - pip: pyobjc # [osx] """, ) data = _generate_and_load( tmp_path / "pixi.toml", req, project_name="single-file-optional-platform-hoist", ) assert data["workspace"]["platforms"] == [ "linux-64", "linux-aarch64", "linux-ppc64le", "osx-64", "osx-arm64", ] dev = data["feature"]["dev"] assert "pypi-dependencies" not in dev assert dev["target"]["osx-64"]["pypi-dependencies"]["pyobjc"] == "*" assert dev["target"]["osx-arm64"]["pypi-dependencies"]["pyobjc"] == "*" @pytest.mark.parametrize( ("first_pin", "second_pin"), [ (">1", "<1"), ("~=1.0", "<1"), ("==1", "!=1"), ], ) def test_pixi_rejects_contradictory_pip_constraints( tmp_path: Path, first_pin: str, second_pin: str, ) -> None: req_file = _write_file( tmp_path / "requirements.yaml", f"""\ channels: - conda-forge dependencies: - pip: pkg {first_pin} - pip: pkg {second_pin} """, ) with pytest.raises(VersionConflictError, match="pkg"): generate_pixi_toml(req_file, output_file=tmp_path / "pixi.toml", verbose=False) def test_pixi_monorepo_with_local_packages(tmp_path: Path) -> None: """Test that local packages in monorepo are added as editable dependencies.""" project1_dir = tmp_path / "project1" project1_dir.mkdir() req1 = _write_file( project1_dir / "requirements.yaml", """\ channels: - conda-forge dependencies: - numpy """, ) _write_file( project1_dir / "pyproject.toml", """\ [build-system] requires = ["setuptools"] [project] name = "project-one" """, ) project2_dir = tmp_path / "project2" project2_dir.mkdir() req2 = _write_file( project2_dir / "requirements.yaml", """\ channels: - conda-forge dependencies: - pandas """, ) _write_file( project2_dir / "pyproject.toml", """\ [build-system] requires = ["setuptools"] [project] name = "project-two" """, ) output_file = tmp_path / "pixi.toml" generate_pixi_toml( req1, req2, project_name="monorepo-local", output_file=output_file, verbose=False, ) assert output_file.exists() content = output_file.read_text() assert "[feature.project1.pypi-dependencies.project_one]" in content assert "[feature.project2.pypi-dependencies.project_two]" in content assert 'path = "./project1"' in content assert 'path = "./project2"' in content assert "editable = true" in content def test_pixi_monorepo_keeps_unmanaged_local_dependency_as_editable( tmp_path: Path, ) -> None: """Monorepo mode should keep unmanaged but installable local packages.""" app_dir = tmp_path / "app" app_dir.mkdir() req_app = _write_file( app_dir / "requirements.yaml", """\ channels: - conda-forge dependencies: - numpy local_dependencies: - ../lib """, ) other_dir = tmp_path / "other" other_dir.mkdir() req_other = _write_file( other_dir / "requirements.yaml", """\ channels: - conda-forge dependencies: - pandas """, ) lib_dir = tmp_path / "lib" lib_dir.mkdir() _write_file( lib_dir / "pyproject.toml", """\ [build-system] requires = ["setuptools"] [project] name = "lib-pkg" """, ) data = _generate_and_load(tmp_path / "pixi.toml", req_app, req_other) assert "lib" not in data["feature"] app_editable = data["feature"]["app"]["pypi-dependencies"]["lib_pkg"] assert app_editable["editable"] is True assert app_editable["path"] == "./lib" def test_pixi_monorepo_optional_unmanaged_deduped_against_base( tmp_path: Path, ) -> None: """Unmanaged local dep in both base and optional should only appear in base feature.""" app_dir = tmp_path / "app" app_dir.mkdir() _write_file( app_dir / "requirements.yaml", """\ channels: - conda-forge dependencies: - numpy local_dependencies: - ../lib optional_dependencies: dev: - ../lib """, ) lib_dir = tmp_path / "lib" lib_dir.mkdir() _write_file( lib_dir / "pyproject.toml", """\ [build-system] requires = ["setuptools"] [project] name = "lib-pkg" """, ) other_dir = tmp_path / "other" other_dir.mkdir() _write_file( other_dir / "requirements.yaml", """\ channels: - conda-forge dependencies: - pandas """, ) data = _generate_and_load( tmp_path / "pixi.toml", app_dir / "requirements.yaml", other_dir / "requirements.yaml", ) assert "lib_pkg" in data["feature"]["app"]["pypi-dependencies"] opt_feature_name = "app-dev" if opt_feature_name in data.get("feature", {}): opt_pypi = data["feature"][opt_feature_name].get("pypi-dependencies", {}) assert "lib_pkg" not in opt_pypi, ( "Unmanaged local dep should be deduped from optional feature" ) def test_pixi_monorepo_optional_unmanaged_only_group_creates_feature( tmp_path: Path, ) -> None: """An optional group with only unmanaged local deps should still create a feature.""" app_dir = tmp_path / "app" app_dir.mkdir() _write_file( app_dir / "requirements.yaml", """\ channels: - conda-forge dependencies: - numpy optional_dependencies: dev: - ../lib """, ) lib_dir = tmp_path / "lib" lib_dir.mkdir() _write_file( lib_dir / "pyproject.toml", """\ [build-system] requires = ["setuptools"] [project] name = "lib-pkg" """, ) other_dir = tmp_path / "other" other_dir.mkdir() _write_file( other_dir / "requirements.yaml", """\ channels: - conda-forge dependencies: - pandas """, ) data = _generate_and_load( tmp_path / "pixi.toml", app_dir / "requirements.yaml", other_dir / "requirements.yaml", ) opt_feature_name = "app-dev" assert opt_feature_name in data["feature"], ( f"Expected feature '{opt_feature_name}' for unmanaged-only optional group" ) opt_pypi = data["feature"][opt_feature_name].get("pypi-dependencies", {}) assert "lib_pkg" in opt_pypi assert opt_pypi["lib_pkg"]["editable"] is True env_name = opt_feature_name.replace("_", "-") assert env_name in data["environments"] assert opt_feature_name in data["environments"][env_name] def test_pixi_monorepo_editable_paths_use_project_paths(tmp_path: Path) -> None: """Editable paths should point to project dirs, not derived feature names.""" apps_api_dir = tmp_path / "apps" / "api" apps_api_dir.mkdir(parents=True) _write_file( apps_api_dir / "requirements.yaml", """\ channels: - conda-forge dependencies: - numpy """, ) _write_file( apps_api_dir / "pyproject.toml", """\ [build-system] requires = ["setuptools"] [project] name = "apps-api" """, ) libs_api_dir = tmp_path / "libs" / "api" libs_api_dir.mkdir(parents=True) _write_file( libs_api_dir / "requirements.yaml", """\ channels: - conda-forge dependencies: - pandas """, ) _write_file( libs_api_dir / "pyproject.toml", """\ [build-system] requires = ["setuptools"] [project] name = "libs-api" """, ) data = _generate_and_load( tmp_path / "pixi.toml", apps_api_dir / "requirements.yaml", libs_api_dir / "requirements.yaml", ) editable_paths = { dep_data["path"] for feature in data["feature"].values() for dep_data in feature.get("pypi-dependencies", {}).values() if isinstance(dep_data, dict) and dep_data.get("editable") is True } assert editable_paths == {"./apps/api", "./libs/api"} def test_pixi_monorepo_shared_local_file_becomes_single_feature(tmp_path: Path) -> None: """Shared local requirements should be represented as a separate feature.""" _write_file( tmp_path / "dev-requirements.yaml", """\ channels: - conda-forge dependencies: - pytest """, ) project1_dir = tmp_path / "project1" project1_dir.mkdir() req1 = _write_file( project1_dir / "requirements.yaml", """\ channels: - conda-forge dependencies: - numpy local_dependencies: - ../dev-requirements.yaml """, ) project2_dir = tmp_path / "project2" project2_dir.mkdir() req2 = _write_file( project2_dir / "requirements.yaml", """\ channels: - conda-forge dependencies: - pandas local_dependencies: - ../dev-requirements.yaml """, ) data = _generate_and_load(tmp_path / "pixi.toml", req1, req2) features = data["feature"] project1_feature = next( name for name, feature in features.items() if feature.get("dependencies", {}).get("numpy") == "*" ) project2_feature = next( name for name, feature in features.items() if feature.get("dependencies", {}).get("pandas") == "*" ) shared_feature = next( name for name, feature in features.items() if feature.get("dependencies", {}).get("pytest") == "*" ) assert project1_feature != shared_feature assert project2_feature != shared_feature assert shared_feature.startswith("dev-requirements") assert "pytest" not in features[project1_feature].get("dependencies", {}) assert "pytest" not in features[project2_feature].get("dependencies", {}) assert set(data["environments"]["default"]) == { project1_feature, project2_feature, shared_feature, } def test_pixi_monorepo_transitive_local_dependencies_are_composed_in_envs( tmp_path: Path, ) -> None: """Features should stay local while envs include transitive local dependencies.""" project_c = tmp_path / "project_c" project_c.mkdir() _write_file( project_c / "requirements.yaml", """\ channels: - conda-forge dependencies: - sympy """, ) project_b = tmp_path / "project_b" project_b.mkdir() _write_file( project_b / "requirements.yaml", """\ channels: - conda-forge dependencies: - pandas local_dependencies: - ../project_c """, ) project_a = tmp_path / "project_a" project_a.mkdir() req_a = _write_file( project_a / "requirements.yaml", """\ channels: - conda-forge dependencies: - numpy local_dependencies: - ../project_b """, ) data = _generate_and_load( tmp_path / "pixi.toml", req_a, project_c / "requirements.yaml", ) features = data["feature"] feature_a = next( name for name, feature in features.items() if feature.get("dependencies", {}).get("numpy") == "*" ) feature_b = next( name for name, feature in features.items() if feature.get("dependencies", {}).get("pandas") == "*" ) feature_c = next( name for name, feature in features.items() if feature.get("dependencies", {}).get("sympy") == "*" ) assert "pandas" not in features[feature_a].get("dependencies", {}) assert "sympy" not in features[feature_a].get("dependencies", {}) assert "sympy" not in features[feature_b].get("dependencies", {}) assert set(data["environments"]["default"]) == {feature_a, feature_b, feature_c} def test_pixi_monorepo_ignores_wheel_local_dependencies_in_graph( tmp_path: Path, ) -> None: """Multi-file mode should skip wheel/zip locals while discovering features.""" wheels_dir = tmp_path / "wheels" wheels_dir.mkdir() wheel_file = wheels_dir / "example-0.1.0-py3-none-any.whl" wheel_file.write_text("not-a-real-wheel") project1 = tmp_path / "project1" project1.mkdir() req1 = _write_file( project1 / "requirements.yaml", """\ channels: - conda-forge dependencies: - numpy local_dependencies: - ../wheels/example-0.1.0-py3-none-any.whl """, ) project2 = tmp_path / "project2" project2.mkdir() req2 = _write_file( project2 / "requirements.yaml", """\ channels: - conda-forge dependencies: - pandas """, ) data = _generate_and_load(tmp_path / "pixi.toml", req1, req2) assert set(data["feature"]) == {"project1", "project2"} def test_pixi_single_file_local_dependency_use_modes(tmp_path: Path) -> None: """`use: pypi` should add pip dep, while `use: skip` should add nothing.""" pypi_local = tmp_path / "pypi_local" pypi_local.mkdir() _write_file( pypi_local / "requirements.yaml", """\ channels: - conda-forge dependencies: - pandas """, ) skipped_local = tmp_path / "skipped_local" skipped_local.mkdir() _write_file( skipped_local / "requirements.yaml", """\ channels: - conda-forge dependencies: - scipy """, ) req_file = _write_file( tmp_path / "requirements.yaml", """\ channels: - conda-forge dependencies: - numpy local_dependencies: - local: ./pypi_local use: pypi pypi: pypi-local-package >=1.2 - local: ./skipped_local use: skip """, ) data = _generate_and_load(tmp_path / "pixi.toml", req_file) assert data["dependencies"]["numpy"] == "*" assert "pandas" not in data["dependencies"] assert "scipy" not in data["dependencies"] assert data["pypi-dependencies"]["pypi-local-package"] == ">=1.2" assert "skipped_local" not in data.get("pypi-dependencies", {}) assert "target" not in data def test_pixi_with_directory_input(tmp_path: Path) -> None: """Test passing a directory instead of a file.""" project_dir = tmp_path / "myproject" project_dir.mkdir() _write_file( project_dir / "requirements.yaml", """\ channels: - conda-forge dependencies: - numpy platforms: - linux-64 """, ) output_file = tmp_path / "pixi.toml" generate_pixi_toml( project_dir, output_file=output_file, verbose=False, ) assert output_file.exists() content = output_file.read_text() assert 'numpy = "*"' in content def test_pixi_verbose_output(tmp_path: Path, capsys: object) -> None: """Test verbose output mode.""" req_file = _write_file( tmp_path / "requirements.yaml", """\ channels: - conda-forge dependencies: - numpy """, ) output_file = tmp_path / "pixi.toml" generate_pixi_toml( req_file, output_file=output_file, verbose=True, ) captured = capsys.readouterr() # type: ignore[attr-defined] assert "Generated pixi.toml" in captured.out def test_pixi_fallback_package_name(tmp_path: Path) -> None: """Test fallback to directory name when pyproject.toml has no project.name.""" project_dir = tmp_path / "my_fallback_pkg" project_dir.mkdir() _write_file( project_dir / "requirements.yaml", """\ channels: - conda-forge dependencies: - numpy """, ) _write_file( project_dir / "pyproject.toml", """\ [build-system] requires = ["setuptools"] """, ) output_file = tmp_path / "pixi.toml" generate_pixi_toml( project_dir, output_file=output_file, verbose=False, ) content = output_file.read_text() assert "my_fallback_pkg" in content def test_pixi_filtering_removes_empty_targets(tmp_path: Path) -> None: """Test that filtering removes targets entirely when no platforms match.""" req_file = _write_file( tmp_path / "requirements.yaml", """\ channels: - conda-forge dependencies: - numpy - cuda-toolkit # [linux64] platforms: - osx-arm64 """, ) output_file = tmp_path / "pixi.toml" generate_pixi_toml( req_file, output_file=output_file, verbose=False, ) content = output_file.read_text() assert "cuda-toolkit" not in content assert "[target." not in content def test_pixi_stdout_output(tmp_path: Path, capsys: object) -> None: """Test output to stdout when output_file is None.""" _write_file( tmp_path / "requirements.yaml", """\ channels: - conda-forge dependencies: - numpy platforms: - linux-64 """, ) generate_pixi_toml( tmp_path / "requirements.yaml", output_file=None, verbose=False, ) captured = capsys.readouterr() # type: ignore[attr-defined] assert 'numpy = "*"' in captured.out assert "[workspace]" in captured.out def test_pixi_monorepo_with_directory_input(tmp_path: Path) -> None: """Test monorepo mode passing directories instead of files.""" project1_dir = tmp_path / "proj1" project1_dir.mkdir() _write_file( project1_dir / "requirements.yaml", """\ channels: - conda-forge dependencies: - numpy """, ) project2_dir = tmp_path / "proj2" project2_dir.mkdir() _write_file( project2_dir / "requirements.yaml", """\ channels: - conda-forge dependencies: - pandas """, ) output_file = tmp_path / "pixi.toml" generate_pixi_toml( project1_dir, project2_dir, project_name="monorepo-dirs", output_file=output_file, verbose=False, ) assert output_file.exists() content = output_file.read_text() assert "[feature.proj1.dependencies]" in content assert "[feature.proj2.dependencies]" in content def test_pixi_monorepo_filtering_removes_empty_feature_targets(tmp_path: Path) -> None: """Test that filtering removes empty feature targets in monorepo mode.""" project1_dir = tmp_path / "project1" project1_dir.mkdir() req1 = _write_file( project1_dir / "requirements.yaml", """\ channels: - conda-forge dependencies: - numpy - cuda-toolkit # [linux64] platforms: - osx-arm64 """, ) project2_dir = tmp_path / "project2" project2_dir.mkdir() req2 = _write_file( project2_dir / "requirements.yaml", """\ channels: - conda-forge dependencies: - pandas """, ) output_file = tmp_path / "pixi.toml" generate_pixi_toml( req1, req2, project_name="monorepo-filter", output_file=output_file, verbose=False, ) content = output_file.read_text() assert "cuda-toolkit" not in content assert "[feature.project1.target.osx-arm64.dependencies]" in content assert "[feature.project1.target.linux-64.dependencies]" not in content def test_pixi_default_cwd(tmp_path: Path, monkeypatch: pytest.MonkeyPatch) -> None: """Test that generate_pixi_toml uses cwd when no args provided.""" _write_file( tmp_path / "requirements.yaml", """\ channels: - conda-forge dependencies: - numpy platforms: - linux-64 """, ) monkeypatch.chdir(tmp_path) # type: ignore[attr-defined] output_file = tmp_path / "pixi.toml" generate_pixi_toml( output_file=output_file, verbose=False, ) assert output_file.exists() content = output_file.read_text() assert 'numpy = "*"' in content def test_pixi_optional_dependencies_single_file(tmp_path: Path) -> None: """Test optional dependencies with realistic user scenario.""" req_file = _write_file( tmp_path / "requirements.yaml", """\ channels: - conda-forge dependencies: - numpy >=1.20 optional_dependencies: dev: - pytest >=7.0 - pip: black - pexpect # [unix] - wexpect # [win64] docs: - sphinx - sphinx-rtd-theme platforms: - linux-64 - win-64 """, ) output_file = tmp_path / "pixi.toml" generate_pixi_toml( req_file, project_name="test-project", output_file=output_file, verbose=False, ) assert output_file.exists() content = output_file.read_text() assert "[dependencies]" in content assert 'numpy = ">=1.20"' in content assert "[feature.dev.dependencies]" in content assert 'pytest = ">=7.0"' in content assert "[feature.dev.pypi-dependencies]" in content assert 'black = "*"' in content assert "[feature.dev.target.linux-64.dependencies]" in content assert "[feature.dev.target.win-64.dependencies]" in content assert "[feature.docs.dependencies]" in content assert 'sphinx = "*"' in content assert "[environments]" in content assert "default = []" in content assert "dev = [" in content assert "docs = [" in content assert "all = [" in content def test_pixi_optional_dependencies_single_group(tmp_path: Path) -> None: """Test single optional group doesn't create 'all' environment.""" req_file = _write_file( tmp_path / "requirements.yaml", """\ channels: - conda-forge dependencies: - numpy optional_dependencies: test: - pytest platforms: - linux-64 """, ) output_file = tmp_path / "pixi.toml" generate_pixi_toml( req_file, project_name="test-project", output_file=output_file, verbose=False, ) content = output_file.read_text() assert "[feature.test.dependencies]" in content assert 'pytest = "*"' in content assert "all = [" not in content def test_pixi_single_file_optional_group_named_all_keeps_unique_env( tmp_path: Path, ) -> None: """A user-defined optional group named 'all' should not be overwritten.""" req_file = _write_file( tmp_path / "requirements.yaml", """\ channels: - conda-forge dependencies: - numpy optional_dependencies: all: - pandas dev: - pytest platforms: - linux-64 """, ) data = _generate_and_load(tmp_path / "pixi.toml", req_file) assert "all" in data["feature"] assert "dev" in data["feature"] envs = data["environments"] assert envs["all"] == ["all", "dev"] user_all_envs = [name for name, feats in envs.items() if feats == ["all"]] assert len(user_all_envs) == 1 assert user_all_envs[0] != "all" def test_pixi_single_file_optional_local_dependency_stays_optional( tmp_path: Path, ) -> None: """Optional local deps should appear in optional features, not root deps.""" local_dep_dir = tmp_path / "localdep" local_dep_dir.mkdir() _write_file( local_dep_dir / "requirements.yaml", """\ channels: - conda-forge dependencies: - pandas """, ) root_req = _write_file( tmp_path / "requirements.yaml", """\ channels: - conda-forge dependencies: - numpy optional_dependencies: dev: - ./localdep """, ) data = _generate_and_load(tmp_path / "pixi.toml", root_req) assert data["dependencies"]["numpy"] == "*" assert "pandas" not in data.get("dependencies", {}) assert data["feature"]["dev"]["dependencies"]["pandas"] == "*" assert data["environments"]["default"] == [] assert data["environments"]["dev"] == ["dev"] def test_pixi_optional_dependencies_monorepo(tmp_path: Path) -> None: """Test optional dependencies in monorepo setup.""" project1_dir = tmp_path / "project1" project1_dir.mkdir() req1 = _write_file( project1_dir / "requirements.yaml", """\ channels: - conda-forge dependencies: - numpy optional_dependencies: test: - pytest platforms: - linux-64 """, ) project2_dir = tmp_path / "project2" project2_dir.mkdir() req2 = _write_file( project2_dir / "requirements.yaml", """\ channels: - conda-forge dependencies: - pandas optional_dependencies: lint: - black platforms: - linux-64 """, ) output_file = tmp_path / "pixi.toml" generate_pixi_toml( req1, req2, project_name="monorepo", output_file=output_file, verbose=False, ) content = output_file.read_text() assert "[feature.project1.target.linux-64.dependencies]" in content assert 'numpy = "*"' in content assert "[feature.project2.target.linux-64.dependencies]" in content assert 'pandas = "*"' in content assert "[feature.project1-test.target.linux-64.dependencies]" in content assert 'pytest = "*"' in content assert "[feature.project2-lint.target.linux-64.dependencies]" in content assert 'black = "*"' in content def test_pixi_monorepo_optional_local_dependency_is_only_in_optional_env( tmp_path: Path, ) -> None: """Optional local projects should be included only in the optional env.""" app_req, other_req = _setup_app_lib_other( tmp_path, """\ - ../lib - pytest """, ) data = _generate_and_load(tmp_path / "pixi.toml", app_req, other_req) features = data["feature"] assert "app" in features assert "app-dev" in features assert "lib" in features assert "other" in features envs = data["environments"] assert "lib" not in envs["default"] assert "lib" in envs["app-dev"] def test_pixi_monorepo_optional_group_with_only_local_deps_creates_env( tmp_path: Path, ) -> None: """Local-only optional groups should still create optional environments.""" app_req, other_req = _setup_app_lib_other( tmp_path, """\ - ../lib """, ) data = _generate_and_load(tmp_path / "pixi.toml", app_req, other_req) features = data["feature"] envs = data["environments"] assert "app" in features assert "lib" in features assert "other" in features assert "app-dev" not in features assert "app-dev" in envs assert "lib" not in envs["default"] assert "lib" in envs["app-dev"] def test_pixi_monorepo_optional_feature_name_collision_does_not_overwrite_base_feature( tmp_path: Path, ) -> None: """Optional feature names must not overwrite existing base feature keys.""" project_dir = tmp_path / "project" project_dir.mkdir() _write_file( project_dir / "requirements.yaml", """\ channels: - conda-forge dependencies: - numpy optional_dependencies: dev: - pytest """, ) project_dev_dir = tmp_path / "project-dev" project_dev_dir.mkdir() _write_file( project_dev_dir / "requirements.yaml", """\ channels: - conda-forge dependencies: - pandas """, ) data = _generate_and_load( tmp_path / "pixi.toml", project_dir / "requirements.yaml", project_dev_dir / "requirements.yaml", ) features = data["feature"] assert features["project-dev"]["dependencies"]["pandas"] == "*" assert features["project-dev-opt"]["dependencies"]["pytest"] == "*" assert data["environments"]["project-dev-opt"] == ["project", "project-dev-opt"] def test_pixi_monorepo_default_env_excludes_optional_features( tmp_path: Path, ) -> None: """Ensure monorepo default env only includes base features.""" project1_dir = tmp_path / "project1" project1_dir.mkdir() req1 = _write_file( project1_dir / "requirements.yaml", """\ channels: - conda-forge dependencies: - numpy optional_dependencies: dev: - pytest platforms: - linux-64 """, ) project2_dir = tmp_path / "project2" project2_dir.mkdir() req2 = _write_file( project2_dir / "requirements.yaml", """\ channels: - conda-forge dependencies: - pandas platforms: - linux-64 """, ) data = _generate_and_load( tmp_path / "pixi.toml", req1, req2, project_name="monorepo", ) envs = data["environments"] assert set(envs["default"]) == {"project1", "project2"} assert "project1-dev" not in envs["default"] assert set(envs["project1-dev"]) == {"project1", "project1-dev"} def test_pixi_empty_platform_override_uses_file_platforms(tmp_path: Path) -> None: """Passing platforms=[] should fall back to platforms from requirements files.""" req_file = _write_file( tmp_path / "requirements.yaml", """\ channels: - conda-forge dependencies: - numpy platforms: - linux-64 - osx-arm64 """, ) data = _generate_and_load( tmp_path / "pixi.toml", req_file, platforms=[], ) assert set(data["workspace"]["platforms"]) == {"linux-64", "osx-arm64"} def test_pixi_monorepo_keeps_optional_groups_when_base_feature_empty( tmp_path: Path, ) -> None: """Optional sub-features should be preserved even when base feature is empty.""" project1 = tmp_path / "project1" project1.mkdir() req1 = _write_file( project1 / "requirements.yaml", """\ channels: - conda-forge dependencies: [] optional_dependencies: docs: - sphinx """, ) project2 = tmp_path / "project2" project2.mkdir() req2 = _write_file( project2 / "requirements.yaml", """\ channels: - conda-forge dependencies: - numpy """, ) data = _generate_and_load(tmp_path / "pixi.toml", req1, req2) features = data["feature"] assert "project1" not in features assert features["project1-docs"]["dependencies"]["sphinx"] == "*" assert "project2" in features envs = data["environments"] assert envs["default"] == ["project2"] assert envs["project1-docs"] == ["project1-docs"] def test_pixi_monorepo_skips_empty_optional_feature_group(tmp_path: Path) -> None: """Empty optional groups should not create empty sub-features.""" project1 = tmp_path / "project1" project1.mkdir() req1 = _write_file( project1 / "requirements.yaml", """\ channels: - conda-forge dependencies: - numpy optional_dependencies: docs: - pytest """, ) project2 = tmp_path / "project2" project2.mkdir() req2 = _write_file( project2 / "requirements.yaml", """\ channels: - conda-forge dependencies: - pandas """, ) data = _generate_and_load( tmp_path / "pixi.toml", req1, req2, skip_dependencies=["pytest"], ) assert "project1-docs" not in data["feature"] def test_derive_feature_names_handles_commonpath_valueerror( tmp_path: Path, monkeypatch: pytest.MonkeyPatch, ) -> None: """Feature naming should fall back when commonpath raises ValueError.""" first = tmp_path / "a" / "api" second = tmp_path / "b" / "api" first.mkdir(parents=True) second.mkdir(parents=True) req1 = first / "requirements.yaml" req2 = second / "requirements.yaml" req1.write_text("dependencies: [numpy]\n") req2.write_text("dependencies: [pandas]\n") def _raise_commonpath(_: list[str]) -> str: msg = "boom" raise ValueError(msg) monkeypatch.setattr("unidep._pixi.os.path.commonpath", _raise_commonpath) names = _derive_feature_names([req1, req2]) assert len(names) == 2 assert len(set(names)) == 2 def test_derive_feature_names_handles_relative_to_valueerror( tmp_path: Path, monkeypatch: pytest.MonkeyPatch, ) -> None: """Feature naming should still be unique if relative_to raises ValueError.""" root1 = tmp_path / "a+b" / "api" root2 = tmp_path / "a b" / "api" root3 = tmp_path / "a@b" / "api" root1.mkdir(parents=True) root2.mkdir(parents=True) root3.mkdir(parents=True) req1 = root1 / "requirements.yaml" req2 = root2 / "requirements.yaml" req3 = root3 / "requirements.yaml" req1.write_text("dependencies: [numpy]\n") req2.write_text("dependencies: [pandas]\n") req3.write_text("dependencies: [scipy]\n") path_type = type(tmp_path) def _raise_relative_to(_self: Path, *_args: object, **_kwargs: object) -> Path: msg = "boom" raise ValueError(msg) monkeypatch.setattr(path_type, "relative_to", _raise_relative_to) names = _derive_feature_names([req1, req2, req3]) assert len(names) == 3 assert len(set(names)) == 3 assert any(name.endswith("-2") for name in names) def test_editable_dependency_path_relative_forms(tmp_path: Path) -> None: """Editable path helper should preserve '.' and '../' relative forms.""" project_dir = tmp_path / "pkg" project_dir.mkdir() same_dir_output = project_dir / "pixi.toml" assert _editable_dependency_path(project_dir, same_dir_output) == "." nested_output = tmp_path / "nested" / "pixi.toml" nested_output.parent.mkdir() assert _editable_dependency_path(project_dir, nested_output) == "../pkg" def test_editable_dependency_path_cross_drive( tmp_path: Path, monkeypatch: Any, ) -> None: """On Windows, cross-drive paths should fall back to absolute instead of crashing.""" project_dir = tmp_path / "pkg" project_dir.mkdir() output = tmp_path / "pixi.toml" original_relpath = os.path.relpath def raising_relpath(_path: Any, _start: Any = None) -> str: msg = "path is on mount 'C:', start on mount 'D:'" raise ValueError(msg) monkeypatch.setattr(os.path, "relpath", raising_relpath) result = _editable_dependency_path(project_dir, output) assert project_dir.resolve().as_posix() == result monkeypatch.setattr(os.path, "relpath", original_relpath) assert _editable_dependency_path(project_dir, output) == "./pkg" def test_discover_local_dependency_graph_skips_non_local_and_missing( tmp_path: Path, ) -> None: """Graph discovery should ignore skipped/pypi/missing local entries safely.""" root = tmp_path / "root" root.mkdir() req = _write_file( root / "requirements.yaml", """\ dependencies: - numpy local_dependencies: - local: ../missing use: local - local: ../skipme use: skip - local: ../pypi-alt use: pypi pypi: foo>=1 """, ) result = _discover_local_dependency_graph([req]) assert result.roots == result.discovered assert len(result.roots) == 1 assert result.graph[result.roots[0]] == [] assert result.optional_group_graph == {} assert result.unmanaged_local_graph[result.roots[0]] == [] assert result.optional_group_unmanaged_graph == {} # --- Parametrized _parse_direct_requirements_for_node tests --- @pytest.mark.parametrize( ("extras", "req_content", "expected_in"), [ pytest.param( ["dev"], """\ dependencies: - numpy optional_dependencies: dev: - pytest """, ["numpy", "pytest"], id="selected-extras", ), pytest.param( ["*"], """\ dependencies: - numpy optional_dependencies: dev: - pytest docs: - sphinx """, ["numpy", "pytest", "sphinx"], id="star-extra", ), ], ) def test_parse_direct_requirements_for_node_extras( tmp_path: Path, extras: list[str], req_content: str, expected_in: list[str], ) -> None: """Extras on a local node should merge into required dependencies.""" req = _write_file(tmp_path / "requirements.yaml", req_content) node = PathWithExtras(req, extras) parsed = _parse_direct_requirements_for_node( node, verbose=False, ignore_pins=None, skip_dependencies=None, overwrite_pins=None, ) for name in expected_in: assert name in parsed.requirements assert parsed.optional_dependencies == {} def test_collect_transitive_nodes_deduplicates_seen_nodes(tmp_path: Path) -> None: """Transitive collection should skip already-seen nodes in cyclic graphs.""" req_a = PathWithExtras(tmp_path / "a" / "requirements.yaml", []) req_b = PathWithExtras(tmp_path / "b" / "requirements.yaml", []) graph = {req_a: [req_b, req_b], req_b: [req_a]} collected = _collect_transitive_nodes(req_a, graph) assert collected == [req_b, req_a] def test_pixi_with_build_string(tmp_path: Path) -> None: """Test pixi.toml generation with build strings in version specs.""" req_file = _write_file( tmp_path / "requirements.yaml", """\ channels: - conda-forge dependencies: - conda: qsimcirq >=0.21.0 cuda* # [linux64] - gcc =11 platforms: - linux-64 """, ) output_file = tmp_path / "pixi.toml" generate_pixi_toml(req_file, output_file=output_file, verbose=False) content = output_file.read_text() assert "[dependencies.qsimcirq]" in content assert 'version = ">=0.21.0"' in content assert 'build = "cuda*"' in content assert 'gcc = "=11"' in content def test_pixi_with_pip_extras(tmp_path: Path) -> None: """Test pixi.toml generation with pip extras.""" req_file = _write_file( tmp_path / "requirements.yaml", """\ channels: - conda-forge dependencies: - pip: pipefunc[extras] - pip: package[dev,test] >=1.0 platforms: - linux-64 """, ) output_file = tmp_path / "pixi.toml" generate_pixi_toml(req_file, output_file=output_file, verbose=False) content = output_file.read_text() assert "[pypi-dependencies.pipefunc]" in content assert 'version = "*"' in content assert '"extras"' in content assert "[pypi-dependencies.package]" in content assert 'version = ">=1.0"' in content assert '"dev"' in content assert '"test"' in content def test_pixi_with_merged_constraints(tmp_path: Path) -> None: """Test pixi.toml generation merges version constraints.""" req_file = _write_file( tmp_path / "requirements.yaml", """\ channels: - conda-forge dependencies: - scipy >=1.7,<2 - scipy <1.16 - numpy >=1.20 - numpy <2.0 platforms: - linux-64 """, ) output_file = tmp_path / "pixi.toml" generate_pixi_toml(req_file, output_file=output_file, verbose=False) content = output_file.read_text() assert 'scipy = ">=1.7,<1.16"' in content assert 'numpy = ">=1.20,<2.0"' in content def test_pixi_optional_local_dep_does_not_leak_base_local_deps( tmp_path: Path, ) -> None: """Base local deps must not appear in optional features.""" lib1 = tmp_path / "lib1" lib1.mkdir() _write_file( lib1 / "requirements.yaml", """\ dependencies: - pandas - scipy """, ) lib2 = tmp_path / "lib2" lib2.mkdir() _write_file( lib2 / "requirements.yaml", """\ dependencies: - requests """, ) root_req = _write_file( tmp_path / "requirements.yaml", """\ dependencies: - numpy local_dependencies: - ./lib1 optional_dependencies: dev: - ./lib2 - pytest """, ) data = _generate_and_load(tmp_path / "pixi.toml", root_req) root_deps = set(data.get("dependencies", {}).keys()) dev_deps = set(data["feature"]["dev"].get("dependencies", {}).keys()) assert "pandas" in root_deps assert "scipy" in root_deps assert "pandas" not in dev_deps, "base local dep leaked into optional feature" assert "scipy" not in dev_deps, "base local dep leaked into optional feature" assert "requests" in dev_deps assert "pytest" in dev_deps # --- Parametrized demotion weak-target tests --- @pytest.mark.parametrize( ("deps_yaml", "osx_dep_section", "osx_click_val"), [ pytest.param( """\ - conda: click >=8 - pip: click ==0.1 # [linux64] - pip: click # [osx64] """, "dependencies", ">=8", id="pinned-narrower-pip-beats-universal-conda", ), pytest.param( """\ - conda: click - pip: click ==0.1 # [linux64] - pip: click # [osx64] """, "dependencies", "*", id="unpinned-universal-conda-beats-unpinned-target-pip", ), ], ) def test_pixi_demoted_universal_weak_target( tmp_path: Path, deps_yaml: str, osx_dep_section: str, osx_click_val: str, ) -> None: """Demoted universals should replace weak target overrides correctly.""" deps_block = textwrap.indent(textwrap.dedent(deps_yaml), " ") yaml_content = ( "channels:\n" " - conda-forge\n" "dependencies:\n" f"{deps_block}" "platforms:\n" " - linux-64\n" " - osx-64\n" ) req_file = tmp_path / "requirements.yaml" req_file.write_text(yaml_content) data = _generate_and_load(tmp_path / "pixi.toml", req_file) linux = data["target"]["linux-64"] assert linux["pypi-dependencies"]["click"] == "==0.1" osx = data["target"]["osx-64"] assert osx[osx_dep_section]["click"] == osx_click_val if osx_dep_section == "pypi-dependencies": assert "click" not in osx.get("dependencies", {}) else: assert "click" not in osx.get("pypi-dependencies", {}) def test_pixi_demoted_universal_uses_latest_merged_constraint( tmp_path: Path, ) -> None: """Repeated universal specs must not leave a stale weaker constraint in demoted.""" req_file = _write_file( tmp_path / "requirements.yaml", """\ channels: - conda-forge dependencies: - conda: click >=8 - pip: click ==0.1 # [linux64] - conda: click >=9 platforms: - linux-64 - osx-64 """, ) data = _generate_and_load(tmp_path / "pixi.toml", req_file) # linux-64 should keep the target-specific pip override assert data["target"]["linux-64"]["pypi-dependencies"]["click"] == "==0.1" # osx-64 must get the final merged constraint (>=9), NOT the stale first (>=8) assert data["target"]["osx-64"]["dependencies"]["click"] == ">=9" assert "click" not in data["target"]["osx-64"].get("pypi-dependencies", {}) # Universal should be empty (demoted to per-platform targets) assert "click" not in data.get("dependencies", {}) assert "click" not in data.get("pypi-dependencies", {}) def test_pixi_demoted_universal_merges_constraints_across_demotions( tmp_path: Path, ) -> None: """Demoted universal constraints should keep cumulative merged bounds.""" req_file = _write_file( tmp_path / "requirements.yaml", """\ channels: - conda-forge dependencies: - conda: click >=8 - pip: click ==0.1 # [linux64] - conda: click <=10 platforms: - linux-64 - osx-64 """, ) data = _generate_and_load(tmp_path / "pixi.toml", req_file) assert data["target"]["linux-64"]["pypi-dependencies"]["click"] == "==0.1" assert data["target"]["osx-64"]["dependencies"]["click"] == ">=8,<=10" def test_pixi_raises_when_losing_pip_alternative_is_internally_contradictory( tmp_path: Path, ) -> None: """Contradictory pip alternatives should fail even if conda would otherwise win.""" req_file = _write_file( tmp_path / "requirements.yaml", """\ channels: - conda-forge dependencies: - conda: click >=8 - pip: click ==0.1 # [linux64] - pip: click >=8 - conda: click >=9 # [linux64] platforms: - linux-64 - osx-64 """, ) with pytest.raises(VersionConflictError, match="click"): _generate_and_load(tmp_path / "pixi.toml", req_file) def test_parse_version_build_whitespace_only() -> None: assert _parse_version_build(" ") == "*" def test_make_pip_version_spec_dict_with_extras() -> None: result = _make_pip_version_spec({"version": ">=1.0", "build": "py3*"}, ["extra1"]) assert result == {"version": ">=1.0", "build": "py3*", "extras": ["extra1"]} def test_with_unique_order_paths_deduplicates(tmp_path: Path) -> None: d = tmp_path / "a" d.mkdir() result = _with_unique_order_paths([d, d, d]) assert result == [d] def test_unique_optional_feature_name_double_collision() -> None: taken: set[str] = {"feat-dev", "feat-dev-opt"} name = _unique_optional_feature_name( parent_feature="feat", group_name="dev", taken_names=taken, ) assert name == "feat-dev-opt-2" assert name in taken def test_unique_env_name_triple_collision() -> None: taken: set[str] = {"foo-bar", "foo-bar-2"} assert _unique_env_name("foo_bar", taken) == "foo-bar-3" def test_add_single_file_optional_environments_noop_without_features() -> None: pixi_data: dict[str, Any] = {"environments": {}} _add_single_file_optional_environments(pixi_data, []) assert pixi_data == {"environments": {}} def test_feature_platforms_for_entries_prefers_override() -> None: origin = DependencyOrigin(Path("requirements.yaml"), 0) entries = [ DependencyEntry( identifier="numpy", selector=None, conda=Spec(name="numpy", which="conda"), pip=None, origin=origin, ), ] assert _feature_platforms_for_entries( entries=entries, declared_platforms=["linux-64"], global_declared_platforms={"osx-arm64"}, platforms_override=["win-64"], ) == ["win-64"] def test_extract_dependencies_handles_universal_pip_and_mixed_buckets() -> None: origin = DependencyOrigin(Path("requirements.yaml"), 0) entries = [ DependencyEntry( identifier="numpy", selector=None, conda=Spec(name="numpy", which="conda"), pip=None, origin=origin, ), DependencyEntry( identifier="click", selector="linux64", conda=None, pip=Spec(name="click", which="pip", selector="linux64"), origin=origin, ), ] deps = _extract_dependencies( entries, platforms=["linux-64", "osx-64"], allow_hoist_without_universal_origin=True, ) assert deps[None][0]["numpy"] == "*" assert deps["linux-64"][1]["click"] == "*" assert "osx-64" not in deps or "click" not in deps["osx-64"][1] def test_filter_targets_by_platforms_removes_empty_sections() -> None: pixi_data: dict[str, Any] = { "target": { "osx-64": {"dependencies": {"numpy": "*"}}, }, "feature": { "dev": { "target": { "linux-64": {"dependencies": {"pytest": "*"}}, }, }, }, } _filter_targets_by_platforms(pixi_data, {"osx-arm64"}) assert "target" not in pixi_data assert "target" not in pixi_data["feature"]["dev"] def test_pixi_single_file_optional_local_dep_transitive_dedup( tmp_path: Path, ) -> None: """Cover single-file optional local dep dedup and pip-installable path.""" shared_dir = tmp_path / "shared" shared_dir.mkdir() _write_file( shared_dir / "requirements.yaml", """\ dependencies: - scipy """, ) (shared_dir / "setup.py").write_text( "from setuptools import setup; setup(name='shared')", ) opt_a_dir = tmp_path / "opt_a" opt_a_dir.mkdir() _write_file( opt_a_dir / "requirements.yaml", """\ dependencies: - pandas local_dependencies: - ../shared """, ) (opt_a_dir / "setup.py").write_text( "from setuptools import setup; setup(name='opt-a')", ) opt_b_dir = tmp_path / "opt_b" opt_b_dir.mkdir() _write_file( opt_b_dir / "requirements.yaml", """\ dependencies: - polars local_dependencies: - ../shared """, ) (opt_b_dir / "setup.py").write_text( "from setuptools import setup; setup(name='opt-b')", ) (tmp_path / "setup.py").write_text( "from setuptools import setup; setup(name='root')", ) req = _write_file( tmp_path / "requirements.yaml", """\ channels: - conda-forge dependencies: - numpy optional_dependencies: extras: - ./opt_a - ./opt_b platforms: - linux-64 """, ) data = _generate_and_load(tmp_path / "pixi.toml", req) assert "extras" in data["feature"] extras_deps = data["feature"]["extras"].get("dependencies", {}) assert "pandas" in extras_deps or "polars" in extras_deps def test_pixi_single_file_optional_group_demoted_universal( tmp_path: Path, ) -> None: """Cover optional group's own deps trigger demotion.""" req = _write_file( tmp_path / "requirements.yaml", """\ channels: - conda-forge dependencies: - numpy optional_dependencies: special: - conda: click - pip: click >=2.0 # [linux64] platforms: - linux-64 - osx-arm64 """, ) data = _generate_and_load(tmp_path / "pixi.toml", req) special = data["feature"]["special"] assert special["target"]["linux-64"]["pypi-dependencies"]["click"] == ">=2.0" assert special["target"]["osx-arm64"]["dependencies"]["click"] == "*" assert "click" not in special.get("dependencies", {}) assert "click" not in special.get("pypi-dependencies", {}) # --- Parametrized monorepo demotion tests --- @pytest.mark.parametrize( ( "proj_deps", "proj_feature_key", "universal_pkg", "linux_pip_val", "osx_conda_val", ), [ pytest.param( """\ channels: - conda-forge dependencies: - conda: requests - pip: requests >=2.0 # [linux64] platforms: - linux-64 - osx-arm64 """, "proj", "requests", ">=2.0", "*", id="feature-demoted-universal", ), pytest.param( """\ channels: - conda-forge dependencies: - numpy optional_dependencies: special: - conda: click - pip: click >=2.0 # [linux64] platforms: - linux-64 - osx-arm64 """, "proj-special", "click", ">=2.0", "*", id="optional-group-demoted", ), ], ) def test_pixi_monorepo_demotion( tmp_path: Path, proj_deps: str, proj_feature_key: str, universal_pkg: str, linux_pip_val: str, osx_conda_val: str, ) -> None: """Monorepo feature/optional-group demotion + restore.""" proj = tmp_path / "proj" proj.mkdir() _write_file(proj / "requirements.yaml", proj_deps) proj2 = tmp_path / "proj2" proj2.mkdir() _write_file( proj2 / "requirements.yaml", """\ channels: - conda-forge dependencies: - pandas platforms: - linux-64 """, ) data = _generate_and_load( tmp_path / "pixi.toml", proj / "requirements.yaml", proj2 / "requirements.yaml", ) feature = data["feature"][proj_feature_key] assert ( feature["target"]["linux-64"]["pypi-dependencies"][universal_pkg] == linux_pip_val ) assert ( feature["target"]["osx-arm64"]["dependencies"][universal_pkg] == osx_conda_val ) assert universal_pkg not in feature.get("dependencies", {}) assert universal_pkg not in feature.get("pypi-dependencies", {}) assert ( data["feature"]["proj2"]["target"]["linux-64"]["dependencies"]["pandas"] == "*" ) def test_pixi_monorepo_feature_subset_does_not_leak_universal_deps( tmp_path: Path, ) -> None: proj = tmp_path / "proj" proj.mkdir() _write_file( proj / "requirements.yaml", """\ channels: - conda-forge dependencies: - pandas platforms: - osx-arm64 """, ) proj2 = tmp_path / "proj2" proj2.mkdir() _write_file( proj2 / "requirements.yaml", """\ channels: - conda-forge dependencies: - click platforms: - linux-64 """, ) data = _generate_and_load( tmp_path / "pixi.toml", proj / "requirements.yaml", proj2 / "requirements.yaml", ) feature = data["feature"]["proj"] assert "pandas" not in feature.get("dependencies", {}) assert feature["target"]["osx-arm64"]["dependencies"]["pandas"] == "*" assert "linux-64" not in feature.get("target", {}) def test_pixi_single_file_env_name_collision(tmp_path: Path) -> None: """Optional groups whose names collide after underscore-to-hyphen normalization.""" req = _write_file( tmp_path / "requirements.yaml", """\ channels: - conda-forge dependencies: - numpy optional_dependencies: foo_bar: - pandas foo-bar: - polars platforms: - linux-64 """, ) data = _generate_and_load(tmp_path / "pixi.toml", req) envs = data["environments"] env_feature_lists = [v for k, v in envs.items() if k not in ("default", "all")] flat_features = [feat for lst in env_feature_lists for feat in lst] assert "foo_bar" in flat_features assert "foo-bar" in flat_features assert len(env_feature_lists) == 2 def test_pixi_discover_graph_skips_non_list_optional_group( tmp_path: Path, ) -> None: """Cover optional group dep that is not a list.""" req = _write_file( tmp_path / "requirements.yaml", """\ dependencies: - numpy optional_dependencies: bad_group: "not a list" platforms: - linux-64 """, ) result = _discover_local_dependency_graph([req]) assert len(result.roots) == 1 assert not result.optional_group_graph def test_pixi_discover_graph_skips_non_local_optional_dep( tmp_path: Path, ) -> None: """Cover optional dep with use != local via override side-effect.""" proj = tmp_path / "proj" proj.mkdir() other = tmp_path / "other" other.mkdir() (other / "setup.py").write_text( "from setuptools import setup; setup(name='other')", ) _write_file( other / "requirements.yaml", """\ dependencies: - scipy """, ) _write_file( proj / "requirements.yaml", """\ dependencies: - numpy local_dependencies: - local: ../other use: pypi pypi: other-pkg optional_dependencies: extras: - ../other platforms: - linux-64 """, ) result = _discover_local_dependency_graph( [proj / "requirements.yaml"], ) assert len(result.roots) == 1 assert not result.optional_group_graph.get(result.roots[0], {}).get("extras", []) def test_pixi_discover_graph_skips_non_installable_optional_unmanaged( tmp_path: Path, ) -> None: """Cover optional unmanaged dep that is not pip-installable.""" proj = tmp_path / "proj" proj.mkdir() not_installable = tmp_path / "nosetup" not_installable.mkdir() _write_file( proj / "requirements.yaml", """\ dependencies: - numpy optional_dependencies: extras: - ../nosetup platforms: - linux-64 """, ) result = _discover_local_dependency_graph( [proj / "requirements.yaml"], ) assert len(result.roots) == 1 assert not result.optional_group_graph.get(result.roots[0], {}).get("extras", []) assert not result.optional_group_unmanaged_graph.get(result.roots[0], {}).get( "extras", [], ) def test_restore_demoted_skips_when_still_in_universal(tmp_path: Path) -> None: """Cover restore skips when pkg is in universal deps or target.""" req = _write_file( tmp_path / "requirements.yaml", """\ channels: - conda-forge dependencies: - conda: numpy >=1.0 - pip: numpy >=2.0 # [linux64] - conda: scipy platforms: - linux-64 - osx-arm64 """, ) data = _generate_and_load(tmp_path / "pixi.toml", req) assert "scipy" in data["dependencies"] def test_pixi_monorepo_optional_local_feature_not_in_pixi_data( tmp_path: Path, ) -> None: """Cover optional local dep feature not in pixi_data.""" empty_dir = tmp_path / "empty" empty_dir.mkdir() _write_file( empty_dir / "requirements.yaml", """\ dependencies: [] """, ) proj = tmp_path / "proj" proj.mkdir() _write_file( proj / "requirements.yaml", """\ channels: - conda-forge dependencies: - numpy optional_dependencies: extras: - ../empty platforms: - linux-64 """, ) proj2 = tmp_path / "proj2" proj2.mkdir() _write_file( proj2 / "requirements.yaml", """\ channels: - conda-forge dependencies: - pandas platforms: - linux-64 """, ) data = _generate_and_load( tmp_path / "pixi.toml", proj / "requirements.yaml", proj2 / "requirements.yaml", ) for env_features in data.get("environments", {}).values(): assert "empty" not in env_features def test_pixi_single_file_installable_optional_local_dep_not_in_root( tmp_path: Path, ) -> None: """Pip-installable optional local deps must NOT leak into root pypi-dependencies.""" localdep_dir = tmp_path / "localdep" localdep_dir.mkdir() _write_file( localdep_dir / "requirements.yaml", """\ dependencies: - pandas """, ) (localdep_dir / "setup.py").write_text( "from setuptools import setup; setup(name='localdep')", ) root_req = _write_file( tmp_path / "requirements.yaml", """\ channels: - conda-forge dependencies: - numpy optional_dependencies: dev: - ./localdep platforms: - linux-64 """, ) data = _generate_and_load(tmp_path / "pixi.toml", root_req) # localdep must NOT appear in root pypi-dependencies root_pypi = data.get("pypi-dependencies", {}) assert "localdep" not in root_pypi, ( "pip-installable optional local dep leaked into root pypi-dependencies" ) # localdep MUST appear in the dev feature dev_pypi = data["feature"]["dev"].get("pypi-dependencies", {}) assert "localdep" in dev_pypi, "optional local dep missing from dev feature" assert dev_pypi["localdep"]["editable"] is True def test_pixi_monorepo_optional_aggregator_transitive_deps_in_env( tmp_path: Path, ) -> None: """Empty aggregator in optional group must still pull transitive features into env.""" lib_dir = tmp_path / "lib" lib_dir.mkdir() _write_file( lib_dir / "requirements.yaml", """\ channels: - conda-forge dependencies: - scipy """, ) agg_dir = tmp_path / "agg" agg_dir.mkdir() _write_file( agg_dir / "requirements.yaml", """\ dependencies: [] local_dependencies: - ../lib """, ) app_dir = tmp_path / "app" app_dir.mkdir() _write_file( app_dir / "requirements.yaml", """\ channels: - conda-forge dependencies: - numpy optional_dependencies: extras: - ../agg platforms: - linux-64 """, ) other_dir = tmp_path / "other" other_dir.mkdir() _write_file( other_dir / "requirements.yaml", """\ channels: - conda-forge dependencies: - pandas platforms: - linux-64 """, ) data = _generate_and_load( tmp_path / "pixi.toml", app_dir / "requirements.yaml", other_dir / "requirements.yaml", ) app_extras_env = data["environments"].get("app-extras", []) assert "lib" in app_extras_env, ( f"transitive dep 'lib' missing from app-extras env: {app_extras_env}" ) ================================================ FILE: tests/test_project_dependency_handling.py ================================================ """Tests for the `project_dependency_handling` feature.""" from __future__ import annotations import textwrap from typing import TYPE_CHECKING, Literal import pytest from unidep._dependencies_parsing import ( _add_project_dependencies, parse_requirements, ) from unidep.platform_definitions import Spec if TYPE_CHECKING: from pathlib import Path @pytest.mark.parametrize( ("project_dependencies", "handling_mode", "expected"), [ # Test same-name ( ["pandas", "requests"], "same-name", ["pandas", "requests"], ), # Test pip-only ( ["pandas", "requests"], "pip-only", [{"pip": "pandas"}, {"pip": "requests"}], ), # Test ignore (["pandas", "requests"], "ignore", []), # Test invalid handling mode (["pandas", "requests"], "invalid", []), ], ) def test_project_dependency_handling( project_dependencies: list[str], handling_mode: Literal["same-name", "pip-only", "ignore", "invalid"], expected: list[dict[str, str] | str], ) -> None: valid_unidep_dependencies: list[dict[str, str] | str] = [ {"conda": "pandas", "pip": "pandas"}, "requests", {"conda": "zstd", "pip": "zstandard"}, ] unidep_dependencies = valid_unidep_dependencies.copy() if handling_mode == "invalid": with pytest.raises(ValueError, match="Invalid `project_dependency_handling`"): _add_project_dependencies( project_dependencies, unidep_dependencies, handling_mode, # type: ignore[arg-type] ) else: _add_project_dependencies( project_dependencies, unidep_dependencies, handling_mode, # type: ignore[arg-type] ) assert unidep_dependencies == valid_unidep_dependencies + expected @pytest.mark.parametrize( "project_dependency_handling", ["same-name", "pip-only", "ignore"], ) def test_project_dependency_handling_in_pyproject_toml( tmp_path: Path, project_dependency_handling: Literal["same-name", "pip-only", "ignore"], ) -> None: p = tmp_path / "pyproject.toml" p.write_text( textwrap.dedent( f"""\ [build-system] requires = ["hatchling", "unidep"] build-backend = "hatchling.build" [project] name = "my-project" version = "0.1.0" dependencies = [ "requests", "pandas", ] [tool.unidep] project_dependency_handling = "{project_dependency_handling}" dependencies = [ {{ conda = "python-graphviz", pip = "graphviz" }}, {{ conda = "graphviz" }}, ] """, ), ) requirements = parse_requirements(p) expected = { "python-graphviz": [ Spec(name="python-graphviz", which="conda", identifier="17e5d607"), ], "graphviz": [ Spec(name="graphviz", which="pip", identifier="17e5d607"), Spec(name="graphviz", which="conda", identifier="5eb93b8c"), ], } if project_dependency_handling == "pip-only": expected.update( { "requests": [Spec(name="requests", which="pip", identifier="08fd8713")], "pandas": [Spec(name="pandas", which="pip", identifier="9e467fa1")], }, ) elif project_dependency_handling == "same-name": expected.update( { "requests": [ Spec(name="requests", which="conda", identifier="08fd8713"), Spec(name="requests", which="pip", identifier="08fd8713"), ], "pandas": [ Spec(name="pandas", which="conda", identifier="9e467fa1"), Spec(name="pandas", which="pip", identifier="9e467fa1"), ], }, ) else: assert project_dependency_handling == "ignore" assert requirements.requirements == expected ================================================ FILE: tests/test_pypi_alternatives/main_app/main_app/__init__.py ================================================ """Main application module.""" def main() -> str: """Run the main application logic.""" from shared_lib import greet return f"Main app says: {greet()}" ================================================ FILE: tests/test_pypi_alternatives/main_app/pyproject.toml ================================================ [build-system] requires = ["hatchling", "unidep @ file:///Users/bas.nijholt/Work/unidep"] build-backend = "hatchling.build" [project] name = "test-main-app" version = "0.1.0" description = "Main app testing PyPI alternatives" dynamic = ["dependencies"] [tool.hatch.metadata] allow-direct-references = true [tool.hatch.metadata.hooks.unidep] [tool.hatch.build.targets.wheel] packages = ["main_app"] [tool.unidep] dependencies = ["numpy"] local_dependencies = [ {local = "../shared_lib", pypi = "pipefunc"} ] ================================================ FILE: tests/test_pypi_alternatives/shared_lib/pyproject.toml ================================================ [build-system] requires = ["hatchling"] build-backend = "hatchling.build" [project] name = "test-shared-lib" version = "1.0.0" description = "Shared library for testing PyPI alternatives" [tool.hatch.build.targets.wheel] packages = ["shared_lib"] ================================================ FILE: tests/test_pypi_alternatives/shared_lib/shared_lib/__init__.py ================================================ """Shared library module.""" def greet() -> str: """Return a greeting message.""" return "Hello from LOCAL shared library!" ================================================ FILE: tests/test_pypi_alternatives/test_all_scenarios.sh ================================================ #!/bin/bash # Test PyPI alternatives feature in different scenarios set -e # Exit on error echo "=== Testing PyPI Alternatives Feature ===" echo export UV_NO_CACHE=1 # Colors for output GREEN='\033[0;32m' RED='\033[0;31m' YELLOW='\033[1;33m' NC='\033[0m' # No Color # Function to extract and show dependencies show_dependencies() { local wheel_file="$1" local scenario="$2" echo -e "${YELLOW}${scenario}${NC}" unzip -p "$wheel_file" '*/METADATA' | grep "Requires-Dist:" || echo "No dependencies found" echo } # Clean up function cleanup() { rm -rf main_app/dist rm -rf test_main_app-0.1.0.dist-info } # Start fresh echo "Cleaning up previous builds..." cleanup # Scenario 1: Normal build (local path exists) echo -e "${GREEN}=== Scenario 1: Normal build (local path exists) ===${NC}" echo "Expected: Should use file:// URL" echo cd main_app uv build > /dev/null 2>&1 show_dependencies "dist/test_main_app-0.1.0-py2.py3-none-any.whl" "Dependencies in wheel:" cd .. cleanup # Scenario 2: Build with local path missing (simulating CI) echo -e "${GREEN}=== Scenario 2: Build with local path missing (CI simulation) ===${NC}" echo "Expected: Should use PyPI alternative (pipefunc)" echo mv shared_lib shared_lib.tmp cd main_app uv build > /dev/null 2>&1 show_dependencies "dist/test_main_app-0.1.0-py2.py3-none-any.whl" "Dependencies in wheel:" cd .. mv shared_lib.tmp shared_lib cleanup # Scenario 3: Build with UNIDEP_SKIP_LOCAL_DEPS=1 (local path exists) echo -e "${GREEN}=== Scenario 3: Build with UNIDEP_SKIP_LOCAL_DEPS=1 (local path exists) ===${NC}" echo "Expected: Should use PyPI alternative (pipefunc) even though local exists" echo cd main_app UNIDEP_SKIP_LOCAL_DEPS=1 uv build > /dev/null 2>&1 show_dependencies "dist/test_main_app-0.1.0-py2.py3-none-any.whl" "Dependencies in wheel:" cd .. cleanup # Scenario 4: Build with UNIDEP_SKIP_LOCAL_DEPS=1 and local path missing echo -e "${GREEN}=== Scenario 4: Build with UNIDEP_SKIP_LOCAL_DEPS=1 (local path missing) ===${NC}" echo "Expected: Should use PyPI alternative (pipefunc)" echo mv shared_lib shared_lib.tmp cd main_app UNIDEP_SKIP_LOCAL_DEPS=1 uv build > /dev/null 2>&1 show_dependencies "dist/test_main_app-0.1.0-py2.py3-none-any.whl" "Dependencies in wheel:" cd .. mv shared_lib.tmp shared_lib cleanup ================================================ FILE: tests/test_pypi_alternatives.py ================================================ """Test PyPI alternatives for local dependencies.""" from __future__ import annotations import sys import textwrap from typing import TYPE_CHECKING import pytest from ruamel.yaml import YAML, YAMLError from unidep import parse_local_dependencies, parse_requirements from unidep._dependencies_parsing import ( LocalDependency, _parse_local_dependency_item, get_local_dependencies, yaml_to_toml, ) from unidep._setuptools_integration import get_python_dependencies from .helpers import maybe_as_toml if sys.version_info >= (3, 11): import tomllib else: # pragma: no cover import tomli as tomllib if TYPE_CHECKING: from pathlib import Path if sys.version_info >= (3, 8): from typing import Literal else: # pragma: no cover from typing_extensions import Literal def test_parse_local_dependency_item_string() -> None: """Test parsing string format local dependency.""" item = "../foo" result = _parse_local_dependency_item(item) assert result == LocalDependency(local="../foo", pypi=None) def test_parse_local_dependency_item_dict() -> None: """Test parsing dict format local dependency.""" item = {"local": "../foo", "pypi": "company-foo"} result = _parse_local_dependency_item(item) assert result == LocalDependency(local="../foo", pypi="company-foo") def test_parse_local_dependency_item_dict_with_use() -> None: """Test parsing dict format with explicit `use`.""" item = {"local": "../foo", "pypi": "company-foo", "use": "pypi"} result = _parse_local_dependency_item(item) assert result == LocalDependency( local="../foo", pypi="company-foo", use="pypi", ) def test_parse_local_dependency_item_dict_no_pypi() -> None: """Test parsing dict format without pypi key.""" item = {"local": "../foo"} result = _parse_local_dependency_item(item) assert result == LocalDependency(local="../foo", pypi=None) def test_parse_local_dependency_item_invalid_dict() -> None: """Test parsing dict without local key raises error.""" item = {"pypi": "company-foo"} with pytest.raises( ValueError, match="Dictionary-style local dependency must have a 'local' key", ): _parse_local_dependency_item(item) def test_parse_local_dependency_item_invalid_type() -> None: """Test parsing invalid type raises error.""" item = 123 with pytest.raises(TypeError, match="Invalid local dependency format"): _parse_local_dependency_item(item) # type: ignore[arg-type] def test_parse_local_dependency_item_invalid_use() -> None: """Invalid `use` value raises an error.""" item = {"local": "../foo", "use": "invalid"} with pytest.raises(ValueError, match="Invalid `use` value"): _parse_local_dependency_item(item) def test_parse_local_dependency_item_use_pypi_requires_pypi() -> None: """`use: pypi` must provide a PyPI alternative.""" item = {"local": "../foo", "use": "pypi"} with pytest.raises(ValueError, match="must specify a `pypi` alternative"): _parse_local_dependency_item(item) @pytest.mark.parametrize("toml_or_yaml", ["toml", "yaml"]) def test_get_local_dependencies_mixed_format( toml_or_yaml: Literal["toml", "yaml"], tmp_path: Path, ) -> None: """Test parsing mixed string and dict format local dependencies.""" project = tmp_path / "project" project.mkdir(exist_ok=True, parents=True) req_file = project / "requirements.yaml" req_file.write_text( textwrap.dedent( """\ dependencies: - numpy local_dependencies: - ../foo - local: ../bar pypi: company-bar - local: ../baz pypi: company-baz - ../qux """, ), ) req_file = maybe_as_toml(toml_or_yaml, req_file) # Load the file to get the data dict yaml = YAML(typ="rt") with req_file.open() as f: if req_file.suffix == ".toml": with req_file.open("rb") as fb: pyproject = tomllib.load(fb) data = pyproject["tool"]["unidep"] else: data = yaml.load(f) local_deps = get_local_dependencies(data) assert len(local_deps) == 4 assert local_deps[0] == LocalDependency(local="../foo", pypi=None) assert local_deps[1] == LocalDependency(local="../bar", pypi="company-bar") assert local_deps[2] == LocalDependency(local="../baz", pypi="company-baz") assert local_deps[3] == LocalDependency(local="../qux", pypi=None) @pytest.mark.parametrize("toml_or_yaml", ["toml", "yaml"]) def test_setuptools_integration_with_pypi_alternatives( toml_or_yaml: Literal["toml", "yaml"], tmp_path: Path, monkeypatch: pytest.MonkeyPatch, # noqa: ARG001 ) -> None: """Test setuptools integration uses local paths when they exist.""" project = tmp_path / "project" project.mkdir(exist_ok=True, parents=True) # Create local dependency projects foo = tmp_path / "foo" foo.mkdir(exist_ok=True) (foo / "pyproject.toml").write_text( textwrap.dedent( """\ [build-system] requires = ["setuptools"] build-backend = "setuptools.build_meta" [project] name = "foo-pkg" version = "0.1.0" """, ), ) # Create a Python module to make it a valid package (foo / "foo_pkg").mkdir(exist_ok=True) (foo / "foo_pkg" / "__init__.py").write_text("") bar = tmp_path / "bar" bar.mkdir(exist_ok=True) (bar / "setup.py").write_text( textwrap.dedent( """\ from setuptools import setup setup(name="bar-pkg", version="0.1.0") """, ), ) # Create a Python module to make it a valid package (bar / "bar_pkg").mkdir(exist_ok=True) (bar / "bar_pkg" / "__init__.py").write_text("") req_file = project / "requirements.yaml" req_file.write_text( textwrap.dedent( """\ dependencies: - numpy local_dependencies: - ../foo - local: ../bar pypi: company-bar """, ), ) req_file = maybe_as_toml(toml_or_yaml, req_file) # Test with local paths existing (development mode) - should use file:// URLs deps = get_python_dependencies( req_file, include_local_dependencies=True, ) assert "numpy" in deps.dependencies # Both should use file:// URLs since local paths exist assert any("foo-pkg @ file://" in dep for dep in deps.dependencies) assert any("bar-pkg @ file://" in dep for dep in deps.dependencies) # Should NOT use PyPI alternative when local exists assert not any("company-bar" in dep for dep in deps.dependencies) def test_local_dependency_use_pypi_injects_dependency(tmp_path: Path) -> None: """`use: pypi` should add the PyPI requirement as a normal dependency.""" project = tmp_path / "project" project.mkdir() (project / "requirements.yaml").write_text( textwrap.dedent( """ dependencies: [] local_dependencies: - local: ./dep pypi: company-dep>=1.0 use: pypi """, ), ) (tmp_path / "project" / "dep").mkdir() reqs = parse_requirements(project / "requirements.yaml") assert "company-dep" in reqs.requirements specs = reqs.requirements["company-dep"] assert specs[0].which == "pip" @pytest.mark.parametrize("toml_or_yaml", ["toml", "yaml"]) def test_standard_string_format( toml_or_yaml: Literal["toml", "yaml"], tmp_path: Path, ) -> None: """Test that standard string format for local dependencies works.""" project = tmp_path / "project" project.mkdir(exist_ok=True, parents=True) req_file = project / "requirements.yaml" req_file.write_text( textwrap.dedent( """\ dependencies: - numpy local_dependencies: - ../foo - ../bar - ../baz """, ), ) req_file = maybe_as_toml(toml_or_yaml, req_file) # This should work without errors requirements = parse_requirements(req_file) assert "numpy" in requirements.requirements @pytest.mark.parametrize("toml_or_yaml", ["toml", "yaml"]) def test_yaml_to_toml_with_pypi_alternatives( toml_or_yaml: Literal["toml", "yaml"], tmp_path: Path, ) -> None: """Test that yaml_to_toml preserves PyPI alternatives.""" if toml_or_yaml == "toml": # Skip for TOML as yaml_to_toml only works on YAML files return project = tmp_path / "project" project.mkdir(exist_ok=True, parents=True) req_file = project / "requirements.yaml" req_file.write_text( textwrap.dedent( """\ name: test-project dependencies: - numpy local_dependencies: - ../foo - local: ../bar pypi: company-bar """, ), ) # Convert to TOML toml_content = yaml_to_toml(req_file) # Check that the structure is preserved assert "[tool.unidep]" in toml_content assert '"../foo"' in toml_content assert '{ local = "../bar", pypi = "company-bar" }' in toml_content def test_edge_cases(tmp_path: Path) -> None: # noqa: ARG001 """Test edge cases and error conditions.""" # Test empty dict with pytest.raises( ValueError, match="Dictionary-style local dependency must have a 'local' key", ): _parse_local_dependency_item({}) # Test dict with only pypi key with pytest.raises( ValueError, match="Dictionary-style local dependency must have a 'local' key", ): _parse_local_dependency_item({"pypi": "some-package"}) # Test None value with pytest.raises(TypeError, match="Invalid local dependency format"): _parse_local_dependency_item(None) # type: ignore[arg-type] # Test list value with pytest.raises(TypeError, match="Invalid local dependency format"): _parse_local_dependency_item(["foo", "bar"]) # type: ignore[arg-type] def test_local_dependency_with_extras(tmp_path: Path) -> None: """Test that local dependencies with extras work with PyPI alternatives.""" project = tmp_path / "project" project.mkdir(exist_ok=True, parents=True) # Create a local dependency with optional dependencies dep = tmp_path / "dep" dep.mkdir(exist_ok=True) (dep / "pyproject.toml").write_text( textwrap.dedent( """\ [project] name = "my-dep" version = "0.1.0" [tool.unidep] dependencies = ["requests"] optional_dependencies = {test = ["pytest"]} """, ), ) # Main project references the local dependency with extras req_file = project / "requirements.yaml" req_file.write_text( textwrap.dedent( """\ dependencies: - numpy local_dependencies: - local: ../dep[test] pypi: company-dep[test] """, ), ) # Parse to ensure no errors requirements = parse_requirements(req_file) assert "numpy" in requirements.requirements def test_recursive_local_dependencies_with_pypi_alternatives(tmp_path: Path) -> None: """Test that PyPI alternatives work with nested local dependencies.""" # Create project structure: main -> dep1 -> dep2 main = tmp_path / "main" main.mkdir(exist_ok=True) dep1 = tmp_path / "dep1" dep1.mkdir(exist_ok=True) dep2 = tmp_path / "dep2" dep2.mkdir(exist_ok=True) # dep2 has no dependencies (dep2 / "requirements.yaml").write_text("dependencies: [pandas]") # dep1 depends on dep2 with PyPI alternative (dep1 / "requirements.yaml").write_text( textwrap.dedent( """\ dependencies: - numpy local_dependencies: - local: ../dep2 pypi: company-dep2 """, ), ) # main depends on dep1 with PyPI alternative (main / "requirements.yaml").write_text( textwrap.dedent( """\ dependencies: - scipy local_dependencies: - local: ../dep1 pypi: company-dep1 """, ), ) # Parse and check requirements = parse_requirements(main / "requirements.yaml") assert "scipy" in requirements.requirements assert "numpy" in requirements.requirements # From dep1 assert "pandas" in requirements.requirements # From dep2 def test_empty_local_dependencies_list(tmp_path: Path) -> None: """Test handling of empty local_dependencies list.""" project = tmp_path / "project" project.mkdir(exist_ok=True) req_file = project / "requirements.yaml" req_file.write_text( textwrap.dedent( """\ dependencies: - numpy local_dependencies: [] """, ), ) # Test setuptools integration deps = get_python_dependencies( req_file, include_local_dependencies=True, ) assert "numpy" in deps.dependencies assert len([d for d in deps.dependencies if "file://" in d]) == 0 def test_local_dependencies_with_extras(tmp_path: Path) -> None: """Test local dependencies with extras notation work with PyPI alternatives.""" project = tmp_path / "project" project.mkdir(exist_ok=True) # Create a local dependency with optional dependencies dep = tmp_path / "dep" dep.mkdir(exist_ok=True) (dep / "pyproject.toml").write_text( textwrap.dedent( """\ [build-system] requires = ["setuptools", "unidep"] build-backend = "setuptools.build_meta" [project] name = "my-dep" version = "0.1.0" dynamic = ["dependencies"] [tool.unidep] dependencies = ["requests"] optional_dependencies = {test = ["pytest"], dev = ["black"]} """, ), ) # Make it a valid package (dep / "my_dep").mkdir(exist_ok=True) (dep / "my_dep" / "__init__.py").write_text("") # Main project references the local dependency with extras req_file = project / "requirements.yaml" req_file.write_text( textwrap.dedent( """\ dependencies: - numpy local_dependencies: - local: ../dep[test,dev] pypi: company-dep[test,dev] """, ), ) # Test setuptools integration deps = get_python_dependencies( req_file, include_local_dependencies=True, ) assert "numpy" in deps.dependencies # Should use file:// URL since local path exists assert any("my-dep[test,dev] @ file://" in dep for dep in deps.dependencies) assert not any("company-dep" in dep for dep in deps.dependencies) def test_complex_path_structures(tmp_path: Path) -> None: """Test complex path structures including nested dirs and parent refs.""" # Create complex directory structure root = tmp_path / "workspace" root.mkdir(exist_ok=True) project = root / "apps" / "main" project.mkdir(exist_ok=True, parents=True) shared = root / "libs" / "shared" shared.mkdir(exist_ok=True, parents=True) utils = root / "libs" / "utils" utils.mkdir(exist_ok=True, parents=True) # Create valid packages for pkg_dir, name in [(shared, "shared"), (utils, "utils")]: (pkg_dir / "setup.py").write_text( f'from setuptools import setup; setup(name="{name}", version="1.0")', ) (pkg_dir / name).mkdir(exist_ok=True) (pkg_dir / name / "__init__.py").write_text("") # Project with complex relative paths req_file = project / "requirements.yaml" req_file.write_text( textwrap.dedent( """\ dependencies: - pandas local_dependencies: - local: ../../libs/shared pypi: company-shared>=1.0 - local: ../../libs/utils pypi: company-utils~=2.0 """, ), ) # Test setuptools integration deps = get_python_dependencies( req_file, include_local_dependencies=True, ) assert "pandas" in deps.dependencies # Should use file:// URLs since local paths exist assert any("shared @ file://" in dep for dep in deps.dependencies) assert any("utils @ file://" in dep for dep in deps.dependencies) assert not any("company-shared" in dep for dep in deps.dependencies) assert not any("company-utils" in dep for dep in deps.dependencies) def test_invalid_yaml_handling(tmp_path: Path) -> None: """Test handling of invalid YAML in requirements file.""" project = tmp_path / "project" project.mkdir(exist_ok=True) req_file = project / "requirements.yaml" req_file.write_text( """\ dependencies: - numpy local_dependencies: - local: ../foo pypi: company-foo this is invalid yaml - more invalid """, ) # Should raise an error when parsing with pytest.raises((YAMLError, ValueError)): parse_requirements(req_file) def test_pypi_alternatives_with_absolute_paths(tmp_path: Path) -> None: """Test that absolute paths in local dependencies are handled correctly.""" project = tmp_path / "project" project.mkdir(exist_ok=True) # Create a dependency with absolute path dep = tmp_path / "absolute_dep" dep.mkdir(exist_ok=True) (dep / "setup.py").write_text( 'from setuptools import setup; setup(name="abs-dep", version="1.0")', ) (dep / "abs_dep").mkdir(exist_ok=True) (dep / "abs_dep" / "__init__.py").write_text("") req_file = project / "requirements.yaml" # Note: Using absolute path to trigger the assertion abs_path = str(dep.resolve()) req_file.write_text( textwrap.dedent( f"""\ dependencies: - numpy local_dependencies: - local: {abs_path} pypi: company-abs-dep """, ), ) # This should fail because absolute paths are not allowed with pytest.raises(AssertionError): parse_local_dependencies(req_file) def test_pypi_alternatives_when_local_missing(tmp_path: Path) -> None: """Test that PyPI alternatives are used when local paths don't exist.""" project = tmp_path / "project" project.mkdir(exist_ok=True) req_file = project / "requirements.yaml" req_file.write_text( textwrap.dedent( """\ dependencies: - numpy local_dependencies: - ../missing1 - local: ../missing2 pypi: company-missing2 """, ), ) # Test with missing local paths - should use PyPI alternatives when available deps = get_python_dependencies( req_file, include_local_dependencies=True, ) assert "numpy" in deps.dependencies # missing1 has no PyPI alternative and doesn't exist - should be skipped assert not any("missing1" in dep for dep in deps.dependencies) # missing2 should use PyPI alternative since local doesn't exist assert any("company-missing2" in dep for dep in deps.dependencies) # Should NOT have file:// URLs for missing paths assert not any("file://" in dep for dep in deps.dependencies) @pytest.mark.parametrize("toml_or_yaml", ["toml", "yaml"]) def test_mixed_string_and_dict_in_toml( toml_or_yaml: Literal["toml", "yaml"], tmp_path: Path, ) -> None: """Test that mixed string and dict formats work in TOML.""" project = tmp_path / "project" project.mkdir(exist_ok=True) # Create dependencies for name in ["dep1", "dep2", "dep3"]: dep = tmp_path / name dep.mkdir(exist_ok=True) (dep / "setup.py").write_text( f'from setuptools import setup; setup(name="{name}", version="1.0")', ) (dep / name).mkdir(exist_ok=True) (dep / name / "__init__.py").write_text("") req_file = project / "requirements.yaml" req_file.write_text( textwrap.dedent( """\ dependencies: - numpy local_dependencies: - ../dep1 - local: ../dep2 pypi: company-dep2 - local: ../dep3 """, ), ) req_file = maybe_as_toml(toml_or_yaml, req_file) # Test parsing requirements = parse_requirements(req_file) assert "numpy" in requirements.requirements def test_wheel_file_with_pypi_alternatives(tmp_path: Path) -> None: """Test handling of .whl files with PyPI alternatives.""" project = tmp_path / "project" project.mkdir() # Test 1: Wheel exists - should use it wheel_path = tmp_path / "dep.whl" wheel_path.touch() # Create dummy wheel file (project / "requirements.yaml").write_text( textwrap.dedent( f"""\ dependencies: - numpy local_dependencies: - local: {wheel_path} pypi: company-dep>=1.0 """, ), ) deps = get_python_dependencies( project / "requirements.yaml", include_local_dependencies=True, ) assert "numpy" in deps.dependencies assert any("dep.whl @ file://" in dep for dep in deps.dependencies) assert not any("company-dep" in dep for dep in deps.dependencies) # Test 2: Wheel doesn't exist - should use PyPI alternative wheel_path.unlink() # Remove wheel file deps = get_python_dependencies( project / "requirements.yaml", include_local_dependencies=True, ) assert "numpy" in deps.dependencies assert "company-dep>=1.0" in deps.dependencies assert not any("file://" in dep for dep in deps.dependencies) # Test 3: Wheel with UNIDEP_SKIP_LOCAL_DEPS - should use PyPI wheel_path.touch() # Recreate wheel deps = get_python_dependencies( project / "requirements.yaml", include_local_dependencies=False, # UNIDEP_SKIP_LOCAL_DEPS=1 ) assert "numpy" in deps.dependencies assert "company-dep>=1.0" in deps.dependencies assert not any("file://" in dep for dep in deps.dependencies) def test_skip_local_deps_with_pypi_alternatives(tmp_path: Path) -> None: """Test that UNIDEP_SKIP_LOCAL_DEPS uses PyPI alternatives when available.""" project = tmp_path / "project" project.mkdir() # Create local dependencies dep1 = tmp_path / "dep1" dep1.mkdir() (dep1 / "setup.py").write_text( 'from setuptools import setup; setup(name="dep1-local", version="0.1.0")', ) dep2 = tmp_path / "dep2" dep2.mkdir() (dep2 / "setup.py").write_text( 'from setuptools import setup; setup(name="dep2-local", version="0.1.0")', ) # Create project with mixed local dependencies (project / "requirements.yaml").write_text( textwrap.dedent( """\ dependencies: - numpy local_dependencies: - ../dep1 # String format - no PyPI alternative - local: ../dep2 pypi: company-dep2>=1.0 # Has PyPI alternative """, ), ) # Test with include_local_dependencies=False (UNIDEP_SKIP_LOCAL_DEPS=1) deps = get_python_dependencies( project / "requirements.yaml", include_local_dependencies=False, ) # Check results assert "numpy" in deps.dependencies # dep1 should be completely skipped (no PyPI alternative) assert not any("dep1" in dep for dep in deps.dependencies) # dep2 should use PyPI alternative assert "company-dep2>=1.0" in deps.dependencies # No file:// URLs should be present assert not any("file://" in dep for dep in deps.dependencies) def test_regular_local_deps_with_existing_paths(tmp_path: Path) -> None: """Test regular (non-wheel) local dependencies that exist and are pip-installable.""" project = tmp_path / "project" project.mkdir() # Create local dependency with different package structures # Test 1: pyproject.toml dep1 = tmp_path / "dep1" dep1.mkdir() (dep1 / "pyproject.toml").write_text( textwrap.dedent( """\ [build-system] requires = ["setuptools"] build-backend = "setuptools.build_meta" [project] name = "my-dep1" version = "0.1.0" """, ), ) (dep1 / "my_dep1").mkdir() (dep1 / "my_dep1" / "__init__.py").write_text("") # Test 2: setup.cfg dep2 = tmp_path / "dep2" dep2.mkdir() (dep2 / "setup.cfg").write_text( textwrap.dedent( """\ [metadata] name = my-dep2 version = 0.1.0 """, ), ) (dep2 / "setup.py").write_text("from setuptools import setup; setup()") (dep2 / "my_dep2").mkdir() (dep2 / "my_dep2" / "__init__.py").write_text("") # Test 3: setup.py dep3 = tmp_path / "dep3" dep3.mkdir() (dep3 / "setup.py").write_text( 'from setuptools import setup; setup(name="my-dep3", version="0.1.0")', ) (dep3 / "my_dep3").mkdir() (dep3 / "my_dep3" / "__init__.py").write_text("") # Project with PyPI alternatives (project / "requirements.yaml").write_text( textwrap.dedent( """\ dependencies: - numpy local_dependencies: - local: ../dep1 pypi: company-dep1>=1.0 - local: ../dep2 pypi: company-dep2>=2.0 - local: ../dep3 pypi: company-dep3>=3.0 """, ), ) # Test with local paths existing deps = get_python_dependencies( project / "requirements.yaml", include_local_dependencies=True, ) # All should use file:// URLs since local paths exist assert "numpy" in deps.dependencies assert any("my-dep1 @ file://" in dep for dep in deps.dependencies) assert any("my-dep2 @ file://" in dep for dep in deps.dependencies) assert any("my-dep3 @ file://" in dep for dep in deps.dependencies) # Should NOT use PyPI alternatives assert not any("company-dep" in dep for dep in deps.dependencies) def test_local_deps_with_extras_and_pypi_alternatives(tmp_path: Path) -> None: """Test local dependencies with extras notation and PyPI alternatives.""" project = tmp_path / "project" project.mkdir() # Create dependency with extras dep = tmp_path / "dep" dep.mkdir() (dep / "pyproject.toml").write_text( textwrap.dedent( """\ [build-system] requires = ["setuptools"] build-backend = "setuptools.build_meta" [project] name = "my-dep-extras" version = "0.1.0" dependencies = ["requests"] [project.optional-dependencies] test = ["pytest"] dev = ["black", "ruff"] """, ), ) (dep / "my_dep_extras").mkdir() (dep / "my_dep_extras" / "__init__.py").write_text("") # Test various extras notations (project / "requirements.yaml").write_text( textwrap.dedent( """\ dependencies: - numpy local_dependencies: - local: ../dep[test] pypi: company-dep[test]>=1.0 - local: ../dep[dev] pypi: company-dep[dev]>=1.0 - local: ../dep[test,dev] pypi: company-dep[test,dev]>=1.0 """, ), ) # Test with local paths existing deps = get_python_dependencies( project / "requirements.yaml", include_local_dependencies=True, ) # Should use file:// URLs with extras preserved assert "numpy" in deps.dependencies assert any("my-dep-extras[test] @ file://" in dep for dep in deps.dependencies) assert any("my-dep-extras[dev] @ file://" in dep for dep in deps.dependencies) assert any("my-dep-extras[test,dev] @ file://" in dep for dep in deps.dependencies) # Should NOT use PyPI alternatives assert not any("company-dep" in dep for dep in deps.dependencies) def test_local_deps_missing_with_pypi_fallback(tmp_path: Path) -> None: """Test regular local dependencies that don't exist fall back to PyPI alternatives.""" project = tmp_path / "project" project.mkdir() # Create project with non-existent local dependencies (project / "requirements.yaml").write_text( textwrap.dedent( """\ dependencies: - numpy local_dependencies: - local: ../missing-dep1 pypi: company-dep1>=1.0 - local: ../missing-dep2[extras] pypi: company-dep2[extras]>=2.0 - ../missing-dep3 # No PyPI alternative """, ), ) # Test with missing local paths deps = get_python_dependencies( project / "requirements.yaml", include_local_dependencies=True, ) # Should use PyPI alternatives when available assert "numpy" in deps.dependencies assert "company-dep1>=1.0" in deps.dependencies assert "company-dep2[extras]>=2.0" in deps.dependencies # missing-dep3 should be skipped (no PyPI alternative) assert not any("missing-dep3" in dep for dep in deps.dependencies) # No file:// URLs since paths don't exist assert not any("file://" in dep for dep in deps.dependencies) def test_missing_requirements_file_handling(tmp_path: Path) -> None: """Test handling when requirements.yaml doesn't exist.""" # Test 1: raises_if_missing=True (default) - should raise with pytest.raises(FileNotFoundError): get_python_dependencies( tmp_path / "non_existent.yaml", raises_if_missing=True, ) # Test 2: raises_if_missing=False - should return empty deps = get_python_dependencies( tmp_path / "non_existent.yaml", raises_if_missing=False, ) assert deps.dependencies == [] assert deps.extras == {} def test_package_name_extraction_edge_cases(tmp_path: Path) -> None: """Test edge cases for package name extraction from various file formats.""" project = tmp_path / "project" project.mkdir() # Test 1: setup.cfg without name dep1 = tmp_path / "dep1" dep1.mkdir() (dep1 / "setup.cfg").write_text( textwrap.dedent( """\ [metadata] version = 0.1.0 # Missing name field """, ), ) (dep1 / "setup.py").write_text("from setuptools import setup; setup()") (dep1 / "dep1").mkdir() (dep1 / "dep1" / "__init__.py").write_text("") # Test 2: setup.py without name dep2 = tmp_path / "dep2" dep2.mkdir() (dep2 / "setup.py").write_text( textwrap.dedent( """\ from setuptools import setup setup(version="0.1.0") # Missing name """, ), ) (dep2 / "dep2").mkdir() (dep2 / "dep2" / "__init__.py").write_text("") # Test 3: pyproject.toml with Poetry format dep3 = tmp_path / "dep3" dep3.mkdir() (dep3 / "pyproject.toml").write_text( textwrap.dedent( """\ [build-system] requires = ["poetry-core"] build-backend = "poetry.core.masonry.api" [tool.poetry] name = "poetry-dep" version = "0.1.0" """, ), ) (dep3 / "poetry_dep").mkdir() (dep3 / "poetry_dep" / "__init__.py").write_text("") # Test 4: pyproject.toml without name anywhere dep4 = tmp_path / "dep4" dep4.mkdir() (dep4 / "pyproject.toml").write_text( textwrap.dedent( """\ [build-system] requires = ["setuptools"] build-backend = "setuptools.build_meta" # No project section, no name anywhere [tool.setuptools] packages = ["dep4"] """, ), ) (dep4 / "dep4").mkdir() (dep4 / "dep4" / "__init__.py").write_text("") # Test 5: Minimal setup.py - fallback to folder name dep5 = tmp_path / "folder-name-dep" dep5.mkdir() # Minimal setup.py to make it pip-installable (dep5 / "setup.py").write_text("from setuptools import setup; setup()") (dep5 / "folder_name_dep").mkdir() (dep5 / "folder_name_dep" / "__init__.py").write_text("") # Create project referencing these deps (project / "requirements.yaml").write_text( textwrap.dedent( """\ dependencies: - numpy local_dependencies: - local: ../dep1 pypi: company-dep1 - local: ../dep2 pypi: company-dep2 - local: ../dep3 pypi: company-dep3 - local: ../dep4 pypi: company-dep4 - local: ../folder-name-dep pypi: company-dep5 """, ), ) # Test with local paths existing deps = get_python_dependencies( project / "requirements.yaml", include_local_dependencies=True, ) # Check that all dependencies were processed assert "numpy" in deps.dependencies # dep1: falls back to folder name "dep1" assert any("dep1 @ file://" in dep for dep in deps.dependencies) # dep2: falls back to folder name "dep2" assert any("dep2 @ file://" in dep for dep in deps.dependencies) # dep3: uses poetry name "poetry-dep" assert any("poetry-dep @ file://" in dep for dep in deps.dependencies) # dep4: falls back to folder name "dep4" assert any("dep4 @ file://" in dep for dep in deps.dependencies) # dep5: uses folder name "folder-name-dep" assert any("folder-name-dep @ file://" in dep for dep in deps.dependencies) ================================================ FILE: tests/test_pypi_alternatives_errors.py ================================================ """Test error cases and special scenarios for PyPI alternatives.""" from __future__ import annotations import textwrap from typing import TYPE_CHECKING import pytest from unidep import parse_local_dependencies from unidep._dependencies_parsing import parse_requirements from unidep._setuptools_integration import get_python_dependencies if TYPE_CHECKING: from pathlib import Path def test_local_dependency_wheel_with_pypi_alternative(tmp_path: Path) -> None: """Test that wheel files work with PyPI alternatives.""" project = tmp_path / "project" project.mkdir(exist_ok=True) # Create a dummy wheel file wheel_file = tmp_path / "some_package.whl" wheel_file.write_text("dummy wheel content") req_file = project / "requirements.yaml" req_file.write_text( textwrap.dedent( """\ dependencies: - numpy local_dependencies: - local: ../some_package.whl pypi: company-package==1.0.0 """, ), ) # This should work without errors requirements = parse_requirements(req_file) assert "numpy" in requirements.requirements # The wheel should be handled in parse_local_dependencies deps = parse_local_dependencies(req_file, verbose=True) assert len(deps) == 1 # Get the first (and only) list of paths paths = next(iter(deps.values())) assert len(paths) == 1 # Compare resolved paths to handle Windows path differences assert paths[0].resolve() == wheel_file.resolve() def test_missing_local_dependency_with_pypi_alternative(tmp_path: Path) -> None: """Test behavior when local dependency doesn't exist but has PyPI alternative.""" project = tmp_path / "project" project.mkdir(exist_ok=True) req_file = project / "requirements.yaml" req_file.write_text( textwrap.dedent( """\ dependencies: - numpy local_dependencies: - local: ../missing_dep pypi: company-missing """, ), ) # Should not raise when raise_if_missing=False deps = parse_local_dependencies(req_file, raise_if_missing=False) assert len(deps) == 0 # Should raise when raise_if_missing=True with pytest.raises(FileNotFoundError): parse_local_dependencies(req_file, raise_if_missing=True) def test_empty_folder_with_pypi_alternative(tmp_path: Path) -> None: """Test error when local dependency is an empty folder.""" project = tmp_path / "project" project.mkdir(exist_ok=True) # Create empty folder empty_dep = tmp_path / "empty_dep" empty_dep.mkdir(exist_ok=True) req_file = project / "requirements.yaml" req_file.write_text( textwrap.dedent( """\ dependencies: - numpy local_dependencies: - local: ../empty_dep pypi: company-empty """, ), ) # Should raise RuntimeError for empty folder with pytest.raises( RuntimeError, match="is not pip installable because it is an empty folder", ): parse_local_dependencies(req_file) def test_empty_git_submodule_with_pypi_alternative(tmp_path: Path) -> None: """Test error when local dependency is an empty git submodule.""" project = tmp_path / "project" project.mkdir(exist_ok=True) # Create a directory that looks like an empty git submodule git_submodule = tmp_path / "git_submodule" git_submodule.mkdir(exist_ok=True) (git_submodule / ".git").write_text("gitdir: ../.git/modules/git_submodule") req_file = project / "requirements.yaml" req_file.write_text( textwrap.dedent( """\ dependencies: - numpy local_dependencies: - local: ../git_submodule pypi: company-submodule """, ), ) # Should raise RuntimeError for empty git submodule with pytest.raises( RuntimeError, match="is not installable by pip because it is an empty Git submodule", ): parse_local_dependencies(req_file) def test_non_pip_installable_with_pypi_alternative(tmp_path: Path) -> None: """Test error when local dependency is not pip installable.""" project = tmp_path / "project" project.mkdir(exist_ok=True) # Create a non-pip-installable directory (no setup.py, pyproject.toml, etc.) non_pip = tmp_path / "non_pip" non_pip.mkdir(exist_ok=True) (non_pip / "some_file.txt").write_text("not a python package") req_file = project / "requirements.yaml" req_file.write_text( textwrap.dedent( """\ dependencies: - numpy local_dependencies: - local: ../non_pip pypi: company-non-pip """, ), ) # Should raise RuntimeError with pytest.raises( RuntimeError, match="is not pip installable nor is it managed by unidep", ): parse_local_dependencies(req_file) def test_circular_dependencies_with_pypi_alternatives(tmp_path: Path) -> None: """Test circular dependencies with PyPI alternatives.""" project1 = tmp_path / "project1" project1.mkdir(exist_ok=True) project2 = tmp_path / "project2" project2.mkdir(exist_ok=True) # project1 depends on project2 (project1 / "requirements.yaml").write_text( textwrap.dedent( """\ dependencies: - pandas local_dependencies: - local: ../project2 pypi: company-project2 """, ), ) # project2 depends on project1 (circular) (project2 / "requirements.yaml").write_text( textwrap.dedent( """\ dependencies: - numpy local_dependencies: - local: ../project1 pypi: company-project1 """, ), ) # Should handle circular dependencies gracefully requirements = parse_requirements( project1 / "requirements.yaml", project2 / "requirements.yaml", ) assert "pandas" in requirements.requirements assert "numpy" in requirements.requirements def test_very_long_pypi_alternative_names(tmp_path: Path) -> None: """Test handling of very long PyPI package names in alternatives.""" project = tmp_path / "project" project.mkdir(exist_ok=True) # Create a local dependency dep = tmp_path / "dep" dep.mkdir(exist_ok=True) (dep / "setup.py").write_text( 'from setuptools import setup; setup(name="dep", version="1.0")', ) (dep / "dep").mkdir(exist_ok=True) (dep / "dep" / "__init__.py").write_text("") # Very long PyPI alternative name long_name = "company-" + "x" * 200 + "-package>=1.0.0" req_file = project / "requirements.yaml" req_file.write_text( textwrap.dedent( f"""\ dependencies: - numpy local_dependencies: - local: ../dep pypi: {long_name} """, ), ) # Should handle long names without issues # Test with local path existing - should use file:// URL deps = get_python_dependencies( req_file, include_local_dependencies=True, ) assert "numpy" in deps.dependencies assert any("dep @ file://" in d for d in deps.dependencies) # Test with local path missing - should use PyPI alternative import shutil shutil.rmtree(dep) deps = get_python_dependencies( req_file, include_local_dependencies=True, ) assert "numpy" in deps.dependencies assert long_name in deps.dependencies def test_special_characters_in_paths(tmp_path: Path) -> None: """Test handling of special characters in local dependency paths.""" project = tmp_path / "project" project.mkdir(exist_ok=True) # Create a dependency with special characters in name special_dir = tmp_path / "dep with spaces & special-chars" special_dir.mkdir(exist_ok=True) (special_dir / "setup.py").write_text( 'from setuptools import setup; setup(name="special-dep", version="1.0")', ) (special_dir / "special_dep").mkdir(exist_ok=True) (special_dir / "special_dep" / "__init__.py").write_text("") req_file = project / "requirements.yaml" req_file.write_text( textwrap.dedent( """\ dependencies: - numpy local_dependencies: - local: "../dep with spaces & special-chars" pypi: company-special-dep """, ), ) # Should handle special characters correctly # With local path existing - should use file:// URL deps = get_python_dependencies( req_file, include_local_dependencies=True, ) assert "numpy" in deps.dependencies assert any("special-dep @ file://" in d for d in deps.dependencies) assert not any("company-special-dep" in d for d in deps.dependencies) def test_symlink_local_dependencies(tmp_path: Path) -> None: """Test handling of symlinked local dependencies.""" import os # Skip on Windows where symlinks require admin privileges if os.name == "nt": pytest.skip("Symlink test skipped on Windows") project = tmp_path / "project" project.mkdir(exist_ok=True) # Create actual dependency actual_dep = tmp_path / "actual_dep" actual_dep.mkdir(exist_ok=True) (actual_dep / "setup.py").write_text( 'from setuptools import setup; setup(name="actual", version="1.0")', ) (actual_dep / "actual").mkdir(exist_ok=True) (actual_dep / "actual" / "__init__.py").write_text("") # Create symlink symlink_dep = tmp_path / "symlink_dep" symlink_dep.symlink_to(actual_dep) req_file = project / "requirements.yaml" req_file.write_text( textwrap.dedent( """\ dependencies: - numpy local_dependencies: - local: ../symlink_dep pypi: company-symlink-dep """, ), ) # Should resolve symlinks correctly # With symlink existing - should use file:// URL deps = get_python_dependencies( req_file, include_local_dependencies=True, ) assert "numpy" in deps.dependencies assert any("actual @ file://" in d for d in deps.dependencies) assert not any("company-symlink-dep" in d for d in deps.dependencies) ================================================ FILE: tests/test_pypi_alternatives_integration.py ================================================ """Integration tests for PyPI alternatives in local dependencies.""" from __future__ import annotations import shutil import textwrap from typing import TYPE_CHECKING from unidep._setuptools_integration import get_python_dependencies if TYPE_CHECKING: from pathlib import Path import pytest def test_build_with_pypi_alternatives( tmp_path: Path, monkeypatch: pytest.MonkeyPatch, ) -> None: """Test that building a wheel uses PyPI alternatives when local paths don't exist.""" # Create main project project = tmp_path / "main_project" project.mkdir(exist_ok=True) # Create local dependency local_dep = tmp_path / "local_dep" local_dep.mkdir(exist_ok=True) (local_dep / "pyproject.toml").write_text( textwrap.dedent( """\ [build-system] requires = ["setuptools", "unidep"] build-backend = "setuptools.build_meta" [project] name = "local-dep" version = "0.1.0" [tool.unidep] dependencies = ["requests"] """, ), ) (local_dep / "local_dep.py").write_text("# Local dependency module") # Create main project with PyPI alternative (project / "pyproject.toml").write_text( textwrap.dedent( """\ [build-system] requires = ["setuptools", "unidep"] build-backend = "setuptools.build_meta" [project] name = "main-project" version = "0.1.0" dynamic = ["dependencies"] [tool.unidep] dependencies = ["numpy"] local_dependencies = [ {local = "../local_dep", pypi = "company-local-dep==1.0.0"} ] """, ), ) (project / "main_project.py").write_text("# Main project module") # Change to project directory monkeypatch.chdir(project) # Test 1: Normal development with local paths existing - should use file:// URLs deps = get_python_dependencies( project / "pyproject.toml", include_local_dependencies=True, ) assert "numpy" in deps.dependencies # Should use file:// URL since local path exists assert any("local-dep @ file://" in dep for dep in deps.dependencies) assert not any("company-local-dep" in dep for dep in deps.dependencies) # Test 2: Simulate wheel build where local paths don't exist # Move the local dependency to simulate it not being available local_dep_backup = tmp_path / "local_dep_backup" shutil.move(str(local_dep), str(local_dep_backup)) deps = get_python_dependencies( project / "pyproject.toml", include_local_dependencies=True, ) assert "numpy" in deps.dependencies # Should use PyPI alternative since local path doesn't exist assert "company-local-dep==1.0.0" in deps.dependencies assert not any("file://" in dep for dep in deps.dependencies) def test_mixed_local_deps_with_and_without_pypi(tmp_path: Path) -> None: """Test project with some local deps having PyPI alternatives and some not.""" project = tmp_path / "project" project.mkdir(exist_ok=True) # Create local dependencies for name in ["dep1", "dep2", "dep3"]: dep_dir = tmp_path / name dep_dir.mkdir(exist_ok=True) (dep_dir / "setup.py").write_text( f'from setuptools import setup; setup(name="{name}", version="0.1.0")', ) # Create requirements.yaml with mixed format (project / "requirements.yaml").write_text( textwrap.dedent( """\ dependencies: - pandas local_dependencies: - ../dep1 # No PyPI alternative - local: ../dep2 pypi: company-dep2>=2.0 - local: ../dep3 pypi: company-dep3~=3.0 """, ), ) deps = get_python_dependencies( project / "requirements.yaml", include_local_dependencies=True, ) assert "pandas" in deps.dependencies # All should use file:// since local paths exist assert any("dep1 @ file://" in dep for dep in deps.dependencies) assert any("dep2 @ file://" in dep for dep in deps.dependencies) assert any("dep3 @ file://" in dep for dep in deps.dependencies) # Should NOT use PyPI alternatives when local exists assert not any("company-dep2" in dep for dep in deps.dependencies) assert not any("company-dep3" in dep for dep in deps.dependencies) def test_setuptools_with_skip_local_deps_env_var( tmp_path: Path, monkeypatch: pytest.MonkeyPatch, ) -> None: """Test that UNIDEP_SKIP_LOCAL_DEPS environment variable behavior.""" project = tmp_path / "project" project.mkdir(exist_ok=True) # Create local dependency dep = tmp_path / "dep" dep.mkdir(exist_ok=True) (dep / "setup.py").write_text( 'from setuptools import setup; setup(name="my-dep", version="0.1.0")', ) # Create project with local dependency (no PyPI alternative) (project / "requirements.yaml").write_text( textwrap.dedent( """\ dependencies: - numpy local_dependencies: - ../dep # No PyPI alternative """, ), ) # Test without UNIDEP_SKIP_LOCAL_DEPS deps = get_python_dependencies( project / "requirements.yaml", include_local_dependencies=True, ) assert "numpy" in deps.dependencies assert any("my-dep @ file://" in dep for dep in deps.dependencies) # Test with UNIDEP_SKIP_LOCAL_DEPS=1 monkeypatch.setenv("UNIDEP_SKIP_LOCAL_DEPS", "1") deps = get_python_dependencies( project / "requirements.yaml", include_local_dependencies=False, # This would be set by _deps() ) assert "numpy" in deps.dependencies # Should not include local dependency assert not any("my-dep" in dep for dep in deps.dependencies) assert not any("file://" in dep for dep in deps.dependencies) def test_use_skip_entries_are_ignored(tmp_path: Path) -> None: """Entries marked `use: skip` should never contribute dependencies.""" project = tmp_path / "project" project.mkdir(exist_ok=True) skip_dep = tmp_path / "skip_dep" skip_dep.mkdir(exist_ok=True) (skip_dep / "setup.py").write_text( 'from setuptools import setup; setup(name="skip-dep", version="0.1.0")', ) (project / "requirements.yaml").write_text( textwrap.dedent( """\ dependencies: - numpy local_dependencies: - local: ../skip_dep use: skip """, ), ) deps = get_python_dependencies( project / "requirements.yaml", include_local_dependencies=True, ) assert "numpy" in deps.dependencies assert not any("skip-dep" in dep for dep in deps.dependencies) assert not any("file://" in dep for dep in deps.dependencies) def test_use_pypi_entries_not_readded(tmp_path: Path) -> None: """Entries marked `use: pypi` rely solely on their PyPI alternative.""" project = tmp_path / "project" project.mkdir(exist_ok=True) local_dep = tmp_path / "pypi_dep" local_dep.mkdir(exist_ok=True) (local_dep / "setup.py").write_text( 'from setuptools import setup; setup(name="pypi-dep", version="0.1.0")', ) (project / "requirements.yaml").write_text( textwrap.dedent( """\ dependencies: - numpy local_dependencies: - local: ../pypi_dep use: pypi pypi: company-pypi-dep==2.0 """, ), ) deps = get_python_dependencies( project / "requirements.yaml", include_local_dependencies=True, ) assert "numpy" in deps.dependencies assert any( dep.replace(" ", "") == "company-pypi-dep==2.0" for dep in deps.dependencies ) assert not any("pypi-dep @ file://" in dep for dep in deps.dependencies) ================================================ FILE: tests/test_setuptools_integration.py ================================================ """Tests for setuptools integration.""" import textwrap from pathlib import Path from unittest.mock import patch import pytest from unidep._setuptools_integration import filter_python_dependencies from unidep.utils import ( package_name_from_path, package_name_from_pyproject_toml, package_name_from_setup_cfg, package_name_from_setup_py, ) REPO_ROOT = Path(__file__).parent.parent def test_package_name_from_path() -> None: example = REPO_ROOT / "example" # Could not find the package name, so it uses the folder name assert package_name_from_path(example) == "example" # The following should read from the setup.py or pyproject.toml file assert package_name_from_path(example / "hatch_project") == "hatch_project" assert ( package_name_from_pyproject_toml(example / "hatch_project" / "pyproject.toml") == "hatch_project" ) assert package_name_from_path(example / "hatch2_project") == "hatch2_project" assert ( package_name_from_pyproject_toml(example / "hatch2_project" / "pyproject.toml") == "hatch2_project" ) assert ( package_name_from_path(example / "pyproject_toml_project") == "pyproject_toml_project" ) assert ( package_name_from_pyproject_toml( example / "pyproject_toml_project" / "pyproject.toml", ) == "pyproject_toml_project" ) assert package_name_from_path(example / "setup_py_project") == "setup_py_project" assert ( package_name_from_setup_py(example / "setup_py_project" / "setup.py") == "setup_py_project" ) assert ( package_name_from_path(example / "setuptools_project") == "setuptools_project" ) assert ( package_name_from_pyproject_toml( example / "setuptools_project" / "pyproject.toml", ) == "setuptools_project" ) def test_package_name_from_cfg(tmp_path: Path) -> None: setup_cfg = tmp_path / "setup.cfg" setup_cfg.write_text( textwrap.dedent( """\ [metadata] name = setup_cfg_project """, ), ) assert package_name_from_path(tmp_path) == "setup_cfg_project" assert package_name_from_setup_cfg(setup_cfg) == "setup_cfg_project" missing = tmp_path / "missing" / "setup.cfg" assert not missing.exists() with pytest.raises(KeyError): package_name_from_setup_cfg(missing) setup_cfg2 = tmp_path / "setup.cfg" setup_cfg2.write_text( textwrap.dedent( """\ [metadata] yolo = missing """, ), ) with pytest.raises(KeyError): package_name_from_setup_cfg(setup_cfg2) def test_package_name_from_setup_py_requires_literal_name(tmp_path: Path) -> None: setup_py = tmp_path / "setup.py" setup_py.write_text( textwrap.dedent( """\ from setuptools import setup NAME = "dynamic_name" setup(name=NAME) """, ), ) with pytest.raises( KeyError, match=r"Could not find the package name in the setup\.py", ): package_name_from_setup_py(setup_py) def test_package_name_from_path_falls_back_on_invalid_pyproject(tmp_path: Path) -> None: pyproject_toml = tmp_path / "pyproject.toml" pyproject_toml.write_text("this is not valid toml = [") assert package_name_from_path(tmp_path) == tmp_path.name def test_package_name_from_path_falls_back_on_invalid_setup_py(tmp_path: Path) -> None: setup_py = tmp_path / "setup.py" setup_py.write_text("from setuptools import setup\nsetup(name='missing'") assert package_name_from_path(tmp_path) == tmp_path.name def test_package_name_from_path_does_not_suppress_unexpected_errors( tmp_path: Path, ) -> None: setup_py = tmp_path / "setup.py" setup_py.write_text("from setuptools import setup\nsetup(name='pkg')") with patch( "unidep.utils.package_name_from_setup_py", side_effect=RuntimeError("boom"), ), pytest.raises(RuntimeError, match="boom"): package_name_from_path(tmp_path) def test_filter_python_dependencies_rejects_resolved_dict_input() -> None: with pytest.raises( TypeError, match="now requires dependency entries", ): filter_python_dependencies({}) # type: ignore[arg-type] ================================================ FILE: tests/test_unidep.py ================================================ """unidep tests.""" from __future__ import annotations import textwrap from pathlib import Path, PureWindowsPath from typing import TYPE_CHECKING, Any import pytest from ruamel.yaml import YAML from unidep import ( create_conda_env_specification, filter_python_dependencies, find_requirements_files, get_python_dependencies, parse_local_dependencies, parse_requirements, write_conda_environment_file, ) from unidep._conda_env import CondaEnvironmentSpec from unidep._conflicts import ( VersionConflictError, _pop_unused_platforms_and_maybe_expand_none, resolve_conflicts, ) from unidep._setuptools_integration import _path_to_file_uri from unidep.platform_definitions import Platform, Spec from unidep.utils import is_pip_installable from .helpers import maybe_as_toml if TYPE_CHECKING: import sys from unidep.platform_definitions import CondaPip if sys.version_info >= (3, 8): from typing import Literal else: # pragma: no cover from typing_extensions import Literal REPO_ROOT = Path(__file__).parent.parent @pytest.fixture(params=["toml", "yaml"]) def setup_test_files( request: pytest.FixtureRequest, tmp_path: Path, ) -> tuple[Path, Path]: d1 = tmp_path / "dir1" d1.mkdir() f1 = d1 / "requirements.yaml" f1.write_text("dependencies:\n - numpy\n - conda: mumps") d2 = tmp_path / "dir2" d2.mkdir() f2 = d2 / "requirements.yaml" f2.write_text("dependencies:\n - pip: pandas") f1 = maybe_as_toml(request.param, f1) f2 = maybe_as_toml(request.param, f2) return (f1, f2) def test_find_requirements_files( tmp_path: Path, setup_test_files: tuple[Path, Path], ) -> None: # Make sure to pass the depth argument correctly if your function expects it. found_files = find_requirements_files( tmp_path, depth=1, verbose=True, ) # Convert found_files to absolute paths for comparison absolute_results = sorted(str(p.resolve()) for p in found_files) absolute_test_files = sorted(str(p.resolve()) for p in setup_test_files) assert absolute_results == absolute_test_files def test_find_requirements_files_depth(tmp_path: Path) -> None: # Create a nested directory structure (tmp_path / "dir1").mkdir() (tmp_path / "dir1/dir2").mkdir() (tmp_path / "dir1/dir2/dir3").mkdir() # Create test files (tmp_path / "requirements.yaml").touch() (tmp_path / "dir1/requirements.yaml").touch() (tmp_path / "dir1/dir2/requirements.yaml").touch() (tmp_path / "dir1/dir2/dir3/requirements.yaml").touch() # Test depth=0 assert len(find_requirements_files(tmp_path, depth=0)) == 1 # Test depth=1 assert len(find_requirements_files(tmp_path, depth=1)) == 2 # Test depth=2 assert len(find_requirements_files(tmp_path, depth=2)) == 3 # Test depth=3 assert len(find_requirements_files(tmp_path, depth=3)) == 4 # Test depth=4 (or more) assert len(find_requirements_files(tmp_path, depth=4)) == 4 @pytest.mark.parametrize("toml_or_yaml", ["toml", "yaml"]) def test_parse_requirements( toml_or_yaml: Literal["toml", "yaml"], tmp_path: Path, ) -> None: p = tmp_path / "requirements.yaml" p.write_text( textwrap.dedent( """\ dependencies: - foo >1 # [linux64] - foo # [unix] - bar >1 - bar """, ), ) p = maybe_as_toml(toml_or_yaml, p) requirements = parse_requirements(p, verbose=False) assert requirements.requirements == { "foo": [ Spec( name="foo", which="conda", selector="linux64", pin=">1", identifier="c292b98a", ), Spec( name="foo", which="pip", selector="linux64", pin=">1", identifier="c292b98a", ), Spec( name="foo", which="conda", selector="unix", identifier="530d9eaa", ), Spec( name="foo", which="pip", selector="unix", identifier="530d9eaa", ), ], "bar": [ Spec( name="bar", which="conda", pin=">1", identifier="08fd8713", ), Spec( name="bar", which="pip", pin=">1", identifier="08fd8713", ), Spec( name="bar", which="conda", identifier="9e467fa1", ), Spec( name="bar", which="pip", identifier="9e467fa1", ), ], } @pytest.mark.parametrize("verbose", [True, False]) def test_generate_conda_env_file( tmp_path: Path, verbose: bool, # noqa: FBT001 setup_test_files: tuple[Path, Path], ) -> None: output_file = tmp_path / "environment.yaml" requirements = parse_requirements(*setup_test_files, verbose=verbose) env_spec = create_conda_env_specification( requirements.dependency_entries, requirements.channels, requirements.pip_indices, requirements.platforms, ) write_conda_environment_file(env_spec, str(output_file), verbose=verbose) with output_file.open() as f, YAML(typ="safe") as yaml: env_data = yaml.load(f) assert "dependencies" in env_data assert "numpy" in env_data["dependencies"] assert {"pip": ["pandas"]} in env_data["dependencies"] def test_generate_conda_env_stdout( setup_test_files: tuple[Path, Path], capsys: pytest.CaptureFixture, ) -> None: requirements = parse_requirements(*setup_test_files) env_spec = create_conda_env_specification( requirements.dependency_entries, requirements.channels, requirements.pip_indices, requirements.platforms, ) write_conda_environment_file(env_spec, output_file=None) captured = capsys.readouterr() assert "dependencies" in captured.out assert "numpy" in captured.out assert "- pandas" in captured.out @pytest.mark.parametrize("toml_or_yaml", ["toml", "yaml"]) def test_create_conda_env_specification_platforms( toml_or_yaml: Literal["toml", "yaml"], tmp_path: Path, ) -> None: p = tmp_path / "requirements.yaml" p.write_text( textwrap.dedent( """\ dependencies: - yolo # [arm64] - foo # [linux64] - conda: bar # [win] - pip: pip-package - pip: pip-package2 # [arm64] """, ), ) p = maybe_as_toml(toml_or_yaml, p) requirements = parse_requirements(p) env_spec = create_conda_env_specification( requirements.dependency_entries, requirements.channels, requirements.pip_indices, requirements.platforms, ) assert env_spec.conda == [ {"sel(linux)": "foo"}, {"sel(osx)": "yolo"}, {"sel(win)": "bar"}, ] expected_pip = [ "pip-package", "pip-package2; sys_platform == 'darwin' and platform_machine == 'arm64'", ] assert env_spec.pip == expected_pip # Test on two platforms platforms: list[Platform] = ["osx-arm64", "win-64"] env_spec = create_conda_env_specification( requirements.dependency_entries, requirements.channels, requirements.pip_indices, platforms, ) assert env_spec.conda == [{"sel(osx)": "yolo"}, {"sel(win)": "bar"}] assert sorted(env_spec.pip) == sorted(expected_pip) # Test with comment selector env_spec = create_conda_env_specification( requirements.dependency_entries, requirements.channels, requirements.pip_indices, platforms, selector="comment", ) assert env_spec.conda == ["yolo", "bar"] assert env_spec.pip == ["pip-package", "pip-package2"] write_conda_environment_file(env_spec, str(tmp_path / "environment.yaml")) with (tmp_path / "environment.yaml").open() as f: text = "".join(f.readlines()) assert "- yolo # [arm64]" in text assert "- bar # [win64]" in text with pytest.raises(ValueError, match="Invalid platform"): resolve_conflicts( requirements.requirements, ["unknown-platform"], # type: ignore[list-item] ) def test_verbose_output(tmp_path: Path, capsys: pytest.CaptureFixture) -> None: f = tmp_path / "dir3" / "requirements.yaml" f.parent.mkdir() f.write_text("dependencies:\n - scipy") find_requirements_files(tmp_path, verbose=True) captured = capsys.readouterr() assert "Scanning in" in captured.out assert str(tmp_path / "dir3") in captured.out parse_requirements(f, verbose=True) captured = capsys.readouterr() assert "Parsing" in captured.out assert str(f) in captured.out write_conda_environment_file( CondaEnvironmentSpec( channels=[], pip_indices=[], platforms=[], conda=[], pip=[], ), verbose=True, ) captured = capsys.readouterr() assert "Generating environment file at" in captured.out assert "Environment file generated successfully." in captured.out def test_create_conda_env_specification_rejects_resolved_dict_input() -> None: resolved: Any = {} with pytest.raises( TypeError, match="now requires dependency entries", ): create_conda_env_specification(resolved, [], []) def test_pop_unused_platforms_removes_non_requested_platform() -> None: linux_spec = Spec(name="foo", which="conda", identifier="linux") osx_spec = Spec(name="foo", which="conda", identifier="osx") platform_data: dict[Platform | None, dict[CondaPip, list[Spec]]] = { "linux-64": {"conda": [linux_spec]}, "osx-arm64": {"conda": [osx_spec]}, } _pop_unused_platforms_and_maybe_expand_none(platform_data, ["osx-arm64"]) assert platform_data == {"osx-arm64": {"conda": [osx_spec]}} def test_extract_python_requires(setup_test_files: tuple[Path, Path]) -> None: f1, f2 = setup_test_files requires1 = get_python_dependencies(str(f1)) assert requires1.dependencies == ["numpy"] requires2 = get_python_dependencies(str(f2)) assert requires2.dependencies == ["pandas"] # Test with a file that doesn't exist with pytest.raises(FileNotFoundError): get_python_dependencies("nonexistent_file.yaml", raises_if_missing=True) assert ( get_python_dependencies( "nonexistent_file.yaml", raises_if_missing=False, ).dependencies == [] ) def test_pip_install_local_dependencies(tmp_path: Path) -> None: p = tmp_path / "pkg" / "requirements.yaml" p.parent.mkdir(exist_ok=True) p.write_text( textwrap.dedent( """\ dependencies: - foo local_dependencies: - ../local_package """, ), ) deps = get_python_dependencies(p, raises_if_missing=False) assert deps.dependencies == ["foo"] deps = get_python_dependencies(p, include_local_dependencies=True) assert deps.dependencies == ["foo"] # because the local package doesn't exist local_package = tmp_path / "local_package" local_package.mkdir(exist_ok=True, parents=True) assert not is_pip_installable(local_package) (local_package / "setup.py").touch() assert is_pip_installable(local_package) deps = get_python_dependencies(p, include_local_dependencies=True) assert deps.dependencies == [ "foo", f"local_package @ {_path_to_file_uri(local_package)}", ] def test_path_to_file_uri_handles_windows_drive() -> None: uri = _path_to_file_uri(PureWindowsPath("D:/projects/Uni Dep")) assert uri == "file:///D:/projects/Uni%20Dep" @pytest.mark.parametrize("toml_or_yaml", ["toml", "yaml"]) def test_channels(toml_or_yaml: Literal["toml", "yaml"], tmp_path: Path) -> None: p = tmp_path / "requirements.yaml" p.write_text("channels:\n - conda-forge\n - defaults") p = maybe_as_toml(toml_or_yaml, p) requirements = parse_requirements(p, verbose=False) assert requirements.channels == ["conda-forge", "defaults"] @pytest.mark.parametrize("toml_or_yaml", ["toml", "yaml"]) def test_surrounding_comments( toml_or_yaml: Literal["toml", "yaml"], tmp_path: Path, ) -> None: p = tmp_path / "requirements.yaml" p.write_text( textwrap.dedent( """\ dependencies: # This is a comment before - yolo # [osx] # This is a comment after # This is another comment - foo # [linux] # And this is a comment after - bar # [win] # Next is an empty comment - baz # - pip: pip-package # - pip: pip-package2 # [osx] # """, ), ) p = maybe_as_toml(toml_or_yaml, p) requirements = parse_requirements(p, verbose=False) assert requirements.requirements == { "yolo": [ Spec( name="yolo", which="conda", selector="osx", identifier="8b0c4c31", ), Spec( name="yolo", which="pip", selector="osx", identifier="8b0c4c31", ), ], "foo": [ Spec( name="foo", which="conda", selector="linux", identifier="ecd4baa6", ), Spec( name="foo", which="pip", selector="linux", identifier="ecd4baa6", ), ], "bar": [ Spec( name="bar", which="conda", selector="win", identifier="8528de75", ), Spec( name="bar", which="pip", selector="win", identifier="8528de75", ), ], "baz": [ Spec( name="baz", which="conda", identifier="9e467fa1", ), Spec(name="baz", which="pip", identifier="9e467fa1"), ], "pip-package": [ Spec( name="pip-package", which="pip", identifier="5813b64a", ), ], "pip-package2": [ Spec( name="pip-package2", which="pip", selector="osx", identifier="1c0fa4c4", ), ], } @pytest.mark.parametrize("toml_or_yaml", ["toml", "yaml"]) def test_filter_pip_and_conda( toml_or_yaml: Literal["toml", "yaml"], tmp_path: Path, ) -> None: # Setup a sample ParsedRequirements instance with platform selectors p = tmp_path / "requirements.yaml" p.write_text( textwrap.dedent( """\ dependencies: - conda: package1 # [linux64] - conda: package2 # [osx64] - pip: package3 - pip: package4 # [unix] - common_package # [unix] - conda: shared_package # [linux64] pip: shared_package # [win64] """, ), ) p = maybe_as_toml(toml_or_yaml, p) requirements = parse_requirements(p, verbose=False) assert requirements.requirements == { "package1": [ Spec( name="package1", which="conda", selector="linux64", identifier="c292b98a", ), ], "package2": [ Spec( name="package2", which="conda", selector="osx64", identifier="b2ac468f", ), ], "package3": [ Spec( name="package3", which="pip", identifier="08fd8713", ), ], "package4": [ Spec( name="package4", which="pip", selector="unix", identifier="1d5d7757", ), ], "common_package": [ Spec( name="common_package", which="conda", selector="unix", identifier="f78244dc", ), Spec( name="common_package", which="pip", selector="unix", identifier="f78244dc", ), ], "shared_package": [ Spec( name="shared_package", which="conda", selector="linux64", identifier="1599d575", ), Spec( name="shared_package", which="pip", selector="win64", identifier="46630b59", ), ], } resolved = resolve_conflicts( requirements.requirements, requirements.platforms, ) assert resolved == { "package1": { "linux-64": { "conda": Spec( name="package1", which="conda", selector="linux64", identifier="c292b98a", ), }, }, "package2": { "osx-64": { "conda": Spec( name="package2", which="conda", selector="osx64", identifier="b2ac468f", ), }, }, "package3": { None: { "pip": Spec( name="package3", which="pip", identifier="08fd8713", ), }, }, "package4": { "linux-64": { "pip": Spec( name="package4", which="pip", selector="unix", identifier="1d5d7757", ), }, "linux-aarch64": { "pip": Spec( name="package4", which="pip", selector="unix", identifier="1d5d7757", ), }, "linux-ppc64le": { "pip": Spec( name="package4", which="pip", selector="unix", identifier="1d5d7757", ), }, "osx-64": { "pip": Spec( name="package4", which="pip", selector="unix", identifier="1d5d7757", ), }, "osx-arm64": { "pip": Spec( name="package4", which="pip", selector="unix", identifier="1d5d7757", ), }, }, "common_package": { "linux-64": { "conda": Spec( name="common_package", which="conda", selector="unix", identifier="f78244dc", ), "pip": Spec( name="common_package", which="pip", selector="unix", identifier="f78244dc", ), }, "linux-aarch64": { "conda": Spec( name="common_package", which="conda", selector="unix", identifier="f78244dc", ), "pip": Spec( name="common_package", which="pip", selector="unix", identifier="f78244dc", ), }, "linux-ppc64le": { "conda": Spec( name="common_package", which="conda", selector="unix", identifier="f78244dc", ), "pip": Spec( name="common_package", which="pip", selector="unix", identifier="f78244dc", ), }, "osx-64": { "conda": Spec( name="common_package", which="conda", selector="unix", identifier="f78244dc", ), "pip": Spec( name="common_package", which="pip", selector="unix", identifier="f78244dc", ), }, "osx-arm64": { "conda": Spec( name="common_package", which="conda", selector="unix", identifier="f78244dc", ), "pip": Spec( name="common_package", which="pip", selector="unix", identifier="f78244dc", ), }, }, "shared_package": { "linux-64": { "conda": Spec( name="shared_package", which="conda", selector="linux64", identifier="1599d575", ), }, "win-64": { "pip": Spec( name="shared_package", which="pip", selector="win64", identifier="46630b59", ), }, }, } # Pip python_deps = filter_python_dependencies( requirements.dependency_entries, requirements.platforms, ) assert python_deps == [ "common_package; sys_platform == 'linux' or sys_platform == 'darwin'", "package3", "package4; sys_platform == 'linux' or sys_platform == 'darwin'", "shared_package; sys_platform == 'win32' and platform_machine == 'AMD64'", ] # Conda conda_env_spec = create_conda_env_specification( requirements.dependency_entries, channels=requirements.channels, pip_indices=requirements.pip_indices, platforms=requirements.platforms, ) def sort(x: list[dict[str, str]]) -> list[dict[str, str]]: return sorted(x, key=lambda x: tuple(x.items())) assert sort(conda_env_spec.conda) == sort( # type: ignore[arg-type] [ {"sel(linux)": "package1"}, {"sel(osx)": "package2"}, {"sel(osx)": "common_package"}, {"sel(linux)": "common_package"}, {"sel(linux)": "shared_package"}, ], ) assert sorted(conda_env_spec.pip) == sorted( [ "package3", "package4; sys_platform == 'linux' or sys_platform == 'darwin'", "shared_package; sys_platform == 'win32' and platform_machine == 'AMD64'", ], ) @pytest.mark.parametrize("toml_or_yaml", ["toml", "yaml"]) def test_duplicates_with_version( toml_or_yaml: Literal["toml", "yaml"], tmp_path: Path, ) -> None: p = tmp_path / "requirements.yaml" p.write_text( textwrap.dedent( """\ dependencies: - foo >1 # [linux64] - foo # [linux64] - bar """, ), ) p = maybe_as_toml(toml_or_yaml, p) requirements = parse_requirements(p, verbose=False) assert requirements.requirements == { "foo": [ Spec( name="foo", which="conda", selector="linux64", pin=">1", identifier="c292b98a", ), Spec( name="foo", which="pip", selector="linux64", pin=">1", identifier="c292b98a", ), Spec( name="foo", which="conda", selector="linux64", identifier="dd6a8aaf", ), Spec( name="foo", which="pip", selector="linux64", identifier="dd6a8aaf", ), ], "bar": [ Spec( name="bar", which="conda", identifier="08fd8713", ), Spec( name="bar", which="pip", identifier="08fd8713", ), ], } resolved = resolve_conflicts(requirements.requirements, requirements.platforms) assert resolved == { "foo": { "linux-64": { "conda": Spec( name="foo", which="conda", selector="linux64", pin=">1", identifier="c292b98a", ), "pip": Spec( name="foo", which="pip", selector="linux64", pin=">1", identifier="c292b98a", ), }, }, "bar": { None: { "conda": Spec( name="bar", which="conda", identifier="08fd8713", ), "pip": Spec( name="bar", which="pip", identifier="08fd8713", ), }, }, } env_spec = create_conda_env_specification( requirements.dependency_entries, requirements.channels, requirements.pip_indices, requirements.platforms, ) assert env_spec.conda == ["bar", {"sel(linux)": "foo >1"}] assert env_spec.pip == [] python_deps = filter_python_dependencies( requirements.dependency_entries, requirements.platforms, ) assert python_deps == [ "bar", "foo >1; sys_platform == 'linux' and platform_machine == 'x86_64'", ] @pytest.mark.parametrize("toml_or_yaml", ["toml", "yaml"]) def test_duplicates_different_platforms( toml_or_yaml: Literal["toml", "yaml"], tmp_path: Path, ) -> None: p = tmp_path / "requirements.yaml" p.write_text( textwrap.dedent( """\ dependencies: - foo >1 # [linux64] - foo <=2 # [linux] """, ), ) p = maybe_as_toml(toml_or_yaml, p) requirements = parse_requirements(p, verbose=False) assert requirements.requirements == { "foo": [ Spec( name="foo", which="conda", selector="linux64", pin=">1", identifier="c292b98a", ), Spec( name="foo", which="pip", selector="linux64", pin=">1", identifier="c292b98a", ), Spec( name="foo", which="conda", selector="linux", pin="<=2", identifier="ecd4baa6", ), Spec( name="foo", which="pip", selector="linux", pin="<=2", identifier="ecd4baa6", ), ], } resolved = resolve_conflicts(requirements.requirements, requirements.platforms) assert resolved == { "foo": { "linux-64": { "conda": Spec( name="foo", which="conda", pin=">1,<=2", identifier="c292b98a", ), "pip": Spec( name="foo", which="pip", pin=">1,<=2", identifier="c292b98a", ), }, "linux-aarch64": { "conda": Spec( name="foo", which="conda", selector="linux", pin="<=2", identifier="ecd4baa6", ), "pip": Spec( name="foo", which="pip", selector="linux", pin="<=2", identifier="ecd4baa6", ), }, "linux-ppc64le": { "conda": Spec( name="foo", which="conda", selector="linux", pin="<=2", identifier="ecd4baa6", ), "pip": Spec( name="foo", which="pip", selector="linux", pin="<=2", identifier="ecd4baa6", ), }, }, } env_spec = create_conda_env_specification( requirements.dependency_entries, requirements.channels, requirements.pip_indices, requirements.platforms, ) assert env_spec.conda == [{"sel(linux)": "foo <=2,>1"}] assert env_spec.pip == [] python_deps = filter_python_dependencies( requirements.dependency_entries, requirements.platforms, ) assert python_deps == [ "foo <=2,>1; sys_platform == 'linux' and platform_machine == 'x86_64'", "foo <=2; sys_platform == 'linux' and platform_machine == 'aarch64' or " "sys_platform == 'linux' and platform_machine == 'ppc64le'", ] # now only use linux-64 platforms: list[Platform] = ["linux-64"] env_spec = create_conda_env_specification( requirements.dependency_entries, requirements.channels, requirements.pip_indices, platforms, ) assert env_spec.conda == ["foo <=2,>1"] assert env_spec.pip == [] @pytest.mark.parametrize("toml_or_yaml", ["toml", "yaml"]) def test_expand_none_with_different_platforms( toml_or_yaml: Literal["toml", "yaml"], tmp_path: Path, ) -> None: p = tmp_path / "requirements.yaml" p.write_text( textwrap.dedent( """\ dependencies: - foo >1 # [linux64] - foo <3 """, ), ) p = maybe_as_toml(toml_or_yaml, p) requirements = parse_requirements(p, verbose=False) assert requirements.requirements == { "foo": [ Spec( name="foo", which="conda", selector="linux64", pin=">1", identifier="c292b98a", ), Spec( name="foo", which="pip", selector="linux64", pin=">1", identifier="c292b98a", ), Spec( name="foo", which="conda", pin="<3", identifier="5eb93b8c", ), Spec( name="foo", which="pip", pin="<3", identifier="5eb93b8c", ), ], } resolved = resolve_conflicts(requirements.requirements, requirements.platforms) assert resolved == { "foo": { "linux-64": { "conda": Spec( name="foo", which="conda", pin=">1,<3", identifier="c292b98a", ), "pip": Spec( name="foo", which="pip", pin=">1,<3", identifier="c292b98a", ), }, "linux-aarch64": { "conda": Spec( name="foo", which="conda", pin="<3", identifier="5eb93b8c", ), "pip": Spec( name="foo", which="pip", pin="<3", identifier="5eb93b8c", ), }, "linux-ppc64le": { "conda": Spec( name="foo", which="conda", pin="<3", identifier="5eb93b8c", ), "pip": Spec( name="foo", which="pip", pin="<3", identifier="5eb93b8c", ), }, "osx-64": { "conda": Spec( name="foo", which="conda", pin="<3", identifier="5eb93b8c", ), "pip": Spec( name="foo", which="pip", pin="<3", identifier="5eb93b8c", ), }, "osx-arm64": { "conda": Spec( name="foo", which="conda", pin="<3", identifier="5eb93b8c", ), "pip": Spec( name="foo", which="pip", pin="<3", identifier="5eb93b8c", ), }, "win-64": { "conda": Spec( name="foo", which="conda", pin="<3", identifier="5eb93b8c", ), "pip": Spec( name="foo", which="pip", pin="<3", identifier="5eb93b8c", ), }, }, } env_spec = create_conda_env_specification( requirements.dependency_entries, requirements.channels, requirements.pip_indices, requirements.platforms, ) assert env_spec.conda == [ {"sel(linux)": "foo >1,<3"}, {"sel(osx)": "foo <3"}, {"sel(win)": "foo <3"}, ] assert env_spec.pip == [] python_deps = filter_python_dependencies( requirements.dependency_entries, requirements.platforms, ) assert python_deps == [ "foo <3; sys_platform == 'linux' and platform_machine == 'aarch64' or " "sys_platform == 'linux' and platform_machine == 'ppc64le' or " "sys_platform == 'darwin' and platform_machine == 'x86_64' or " "sys_platform == 'darwin' and platform_machine == 'arm64' or " "sys_platform == 'win32' and platform_machine == 'AMD64'", "foo >1,<3; sys_platform == 'linux' and platform_machine == 'x86_64'", ] @pytest.mark.parametrize("toml_or_yaml", ["toml", "yaml"]) def test_different_pins_on_conda_and_pip( toml_or_yaml: Literal["toml", "yaml"], tmp_path: Path, ) -> None: p = tmp_path / "requirements.yaml" p.write_text( textwrap.dedent( """\ dependencies: - pip: foo >1 conda: foo <1 """, ), ) p = maybe_as_toml(toml_or_yaml, p) requirements = parse_requirements(p, verbose=False) assert requirements.requirements == { "foo": [ Spec( name="foo", which="conda", pin="<1", identifier="17e5d607", ), Spec( name="foo", which="pip", pin=">1", identifier="17e5d607", ), ], } resolved = resolve_conflicts(requirements.requirements, requirements.platforms) assert resolved == { "foo": { None: { "conda": Spec( name="foo", which="conda", pin="<1", identifier="17e5d607", ), "pip": Spec( name="foo", which="pip", pin=">1", identifier="17e5d607", ), }, }, } env_spec = create_conda_env_specification( requirements.dependency_entries, requirements.channels, requirements.pip_indices, requirements.platforms, ) assert env_spec.conda == ["foo <1"] assert env_spec.pip == [] python_deps = filter_python_dependencies( requirements.dependency_entries, requirements.platforms, ) assert python_deps == ["foo >1"] @pytest.mark.parametrize("toml_or_yaml", ["toml", "yaml"]) def test_pip_pinned_conda_not( toml_or_yaml: Literal["toml", "yaml"], tmp_path: Path, ) -> None: p = tmp_path / "requirements.yaml" p.write_text( textwrap.dedent( """\ dependencies: - pip: foo >1 conda: foo """, ), ) p = maybe_as_toml(toml_or_yaml, p) requirements = parse_requirements(p, verbose=False) resolved = resolve_conflicts(requirements.requirements, requirements.platforms) assert resolved == { "foo": { None: { "conda": Spec( name="foo", which="conda", identifier="17e5d607", ), "pip": Spec( name="foo", which="pip", pin=">1", identifier="17e5d607", ), }, }, } env_spec = create_conda_env_specification( requirements.dependency_entries, requirements.channels, requirements.pip_indices, requirements.platforms, ) assert env_spec.conda == [] assert env_spec.pip == ["foo >1"] python_deps = filter_python_dependencies( requirements.dependency_entries, requirements.platforms, ) assert python_deps == ["foo >1"] @pytest.mark.parametrize("toml_or_yaml", ["toml", "yaml"]) def test_conda_pinned_pip_not( toml_or_yaml: Literal["toml", "yaml"], tmp_path: Path, ) -> None: p = tmp_path / "requirements.yaml" p.write_text( textwrap.dedent( """\ dependencies: - pip: foo conda: foo >1 """, ), ) p = maybe_as_toml(toml_or_yaml, p) requirements = parse_requirements(p, verbose=False) resolved = resolve_conflicts(requirements.requirements, requirements.platforms) assert resolved == { "foo": { None: { "conda": Spec( name="foo", which="conda", pin=">1", identifier="17e5d607", ), "pip": Spec( name="foo", which="pip", identifier="17e5d607", ), }, }, } env_spec = create_conda_env_specification( requirements.dependency_entries, requirements.channels, requirements.pip_indices, requirements.platforms, ) assert env_spec.conda == ["foo >1"] assert env_spec.pip == [] python_deps = filter_python_dependencies( requirements.dependency_entries, requirements.platforms, ) assert python_deps == ["foo"] @pytest.mark.parametrize("toml_or_yaml", ["toml", "yaml"]) def test_get_python_dependencies_preserves_platform_specific_pip_with_pinned_conda( tmp_path: Path, toml_or_yaml: Literal["toml", "yaml"], ) -> None: p = tmp_path / "requirements.yaml" p.write_text( textwrap.dedent( """\ platforms: - linux-64 - osx-arm64 dependencies: - conda: mypackage >=1.0 variant* # [linux64] pip: mypackage # [linux64] - mypackage # [arm64] """, ), ) p = maybe_as_toml(toml_or_yaml, p) deps = get_python_dependencies(p, verbose=False) assert deps.dependencies == [ "mypackage; sys_platform == 'linux' and platform_machine == 'x86_64' " "or sys_platform == 'darwin' and platform_machine == 'arm64'", ] @pytest.mark.parametrize("toml_or_yaml", ["toml", "yaml"]) def test_filter_python_dependencies_with_platforms( toml_or_yaml: Literal["toml", "yaml"], tmp_path: Path, ) -> None: p = tmp_path / "requirements.yaml" p.write_text( textwrap.dedent( """\ dependencies: - foo # [unix] """, ), ) p = maybe_as_toml(toml_or_yaml, p) requirements = parse_requirements(p, verbose=False) python_deps = filter_python_dependencies( requirements.dependency_entries, ["linux-64"], ) assert python_deps == [ "foo; sys_platform == 'linux' and platform_machine == 'x86_64'", ] @pytest.mark.parametrize("toml_or_yaml", ["toml", "yaml"]) def test_conda_with_comments( toml_or_yaml: Literal["toml", "yaml"], tmp_path: Path, ) -> None: p = tmp_path / "requirements.yaml" p.write_text( textwrap.dedent( """\ dependencies: - adaptive # [linux64] """, ), ) p = maybe_as_toml(toml_or_yaml, p) requirements = parse_requirements(p, verbose=False) env_spec = create_conda_env_specification( requirements.dependency_entries, requirements.channels, requirements.pip_indices, requirements.platforms, selector="comment", ) assert env_spec.conda == ["adaptive"] assert env_spec.pip == [] write_conda_environment_file(env_spec, str(tmp_path / "environment.yaml")) with (tmp_path / "environment.yaml").open() as f: lines = f.readlines() dependency_line = next(line for line in lines if "adaptive" in line) assert "- adaptive # [linux64]" in dependency_line @pytest.mark.parametrize("toml_or_yaml", ["toml", "yaml"]) def test_duplicate_names(toml_or_yaml: Literal["toml", "yaml"], tmp_path: Path) -> None: p = tmp_path / "requirements.yaml" p.write_text( textwrap.dedent( """\ dependencies: - conda: flatbuffers - pip: flatbuffers conda: python-flatbuffers """, ), ) p = maybe_as_toml(toml_or_yaml, p) requirements = parse_requirements(p, verbose=False) env_spec = create_conda_env_specification( requirements.dependency_entries, requirements.channels, requirements.pip_indices, requirements.platforms, ) assert env_spec.conda == ["flatbuffers", "python-flatbuffers"] assert env_spec.pip == [] python_deps = filter_python_dependencies( requirements.dependency_entries, requirements.platforms, ) assert python_deps == ["flatbuffers"] @pytest.mark.parametrize("toml_or_yaml", ["toml", "yaml"]) def test_conflicts_when_selector_comment( toml_or_yaml: Literal["toml", "yaml"], tmp_path: Path, ) -> None: p = tmp_path / "requirements.yaml" p.write_text( textwrap.dedent( """\ dependencies: - foo >1 # [linux64] - foo <2 # [linux] """, ), ) p = maybe_as_toml(toml_or_yaml, p) requirements = parse_requirements(p, verbose=False) env_spec = create_conda_env_specification( requirements.dependency_entries, requirements.channels, requirements.pip_indices, requirements.platforms, selector="comment", ) assert env_spec.conda == ["foo >1,<2", "foo <2", "foo <2"] assert env_spec.pip == [] write_conda_environment_file(env_spec, str(tmp_path / "environment.yaml")) with (tmp_path / "environment.yaml").open() as f: text = "".join(f.readlines()) assert "- foo >1,<2 # [linux64]" in text assert "- foo <2 # [aarch64]" in text assert "- foo <2 # [ppc64le]" in text # With just [unix] p = tmp_path / "requirements.yaml" p.write_text( textwrap.dedent( """\ dependencies: - foo >1 - foo <2 # [unix] """, ), ) p = maybe_as_toml(toml_or_yaml, p) requirements = parse_requirements(p, verbose=False) env_spec = create_conda_env_specification( requirements.dependency_entries, requirements.channels, requirements.pip_indices, requirements.platforms, selector="comment", ) assert env_spec.conda == [ "foo >1,<2", "foo >1,<2", "foo >1,<2", "foo >1,<2", "foo >1,<2", "foo >1", ] assert env_spec.pip == [] write_conda_environment_file(env_spec, str(tmp_path / "environment.yaml")) with (tmp_path / "environment.yaml").open() as f: text = "".join(f.readlines()) assert "- foo >1,<2 # [linux64]" in text assert "- foo >1,<2 # [osx64]" in text assert "- foo >1,<2 # [arm64]" in text assert "- foo >1,<2 # [aarch64]" in text assert "- foo >1,<2 # [ppc64le]" in text assert "- foo >1 # [win64]" in text @pytest.mark.parametrize("toml_or_yaml", ["toml", "yaml"]) def test_platforms_section_in_yaml( toml_or_yaml: Literal["toml", "yaml"], tmp_path: Path, ) -> None: p = tmp_path / "requirements.yaml" p.write_text( textwrap.dedent( """\ platforms: - linux-64 - osx-arm64 dependencies: - foo - bar # [win] """, ), ) p = maybe_as_toml(toml_or_yaml, p) requirements = parse_requirements(p, verbose=False) env_spec = create_conda_env_specification( requirements.dependency_entries, requirements.channels, requirements.pip_indices, requirements.platforms, selector="sel", ) assert env_spec.conda == ["foo"] assert env_spec.pip == [] assert env_spec.platforms == ["linux-64", "osx-arm64"] python_deps = filter_python_dependencies( requirements.dependency_entries, requirements.platforms, ) assert python_deps == ["foo"] @pytest.mark.parametrize("toml_or_yaml", ["toml", "yaml"]) def test_platforms_section_in_yaml_similar_platforms( toml_or_yaml: Literal["toml", "yaml"], tmp_path: Path, ) -> None: p = tmp_path / "requirements.yaml" p.write_text( textwrap.dedent( """\ channels: - conda-forge platforms: - linux-64 - linux-aarch64 dependencies: - foo - bar # [win] - yolo <1 # [aarch64] - yolo >1 # [linux64] """, ), ) p = maybe_as_toml(toml_or_yaml, p) requirements = parse_requirements(p, verbose=False) with pytest.raises( ValueError, match="Use selector='comment' instead", ): create_conda_env_specification( requirements.dependency_entries, requirements.channels, requirements.pip_indices, requirements.platforms, selector="sel", ) python_deps = filter_python_dependencies( requirements.dependency_entries, requirements.platforms, ) assert python_deps == [ "foo", "yolo <1; sys_platform == 'linux' and platform_machine == 'aarch64'", "yolo >1; sys_platform == 'linux' and platform_machine == 'x86_64'", ] # Test with comment selector env_spec = create_conda_env_specification( requirements.dependency_entries, requirements.channels, requirements.pip_indices, requirements.platforms, selector="comment", ) assert env_spec.conda == ["foo", "yolo >1", "yolo <1"] assert env_spec.pip == [] write_conda_environment_file(env_spec, str(tmp_path / "environment.yaml")) with (tmp_path / "environment.yaml").open() as f: text = "".join(f.readlines()) assert "- yolo >1 # [linux64]" in text assert "- yolo <1 # [aarch64]" in text assert "platforms:" in text assert "- linux-64" in text assert "- linux-aarch64" in text @pytest.mark.parametrize("toml_or_yaml", ["toml", "yaml"]) def test_conda_with_non_platform_comment( toml_or_yaml: Literal["toml", "yaml"], tmp_path: Path, ) -> None: p = tmp_path / "requirements.yaml" p.write_text( textwrap.dedent( """\ channels: - conda-forge dependencies: - pip: qsimcirq # [linux64] - pip: slurm-usage # added to avoid https://github.com/conda/conda-lock/pull/564 """, ), ) p = maybe_as_toml(toml_or_yaml, p) requirements = parse_requirements(p, verbose=False) env_spec = create_conda_env_specification( requirements.dependency_entries, requirements.channels, requirements.pip_indices, requirements.platforms, selector="comment", ) assert env_spec.conda == [] assert env_spec.pip == ["qsimcirq", "slurm-usage"] write_conda_environment_file(env_spec, str(tmp_path / "environment.yaml")) with (tmp_path / "environment.yaml").open() as f: lines = "".join(f.readlines()) assert "- qsimcirq # [linux64]" in lines assert "- slurm-usage" in lines assert " - pip:" in lines @pytest.mark.parametrize("toml_or_yaml", ["toml", "yaml"]) def test_pip_and_conda_different_name_on_linux64( toml_or_yaml: Literal["toml", "yaml"], tmp_path: Path, ) -> None: p = tmp_path / "requirements.yaml" # On linux64, the conda package is called "cuquantum-python" and # the pip package is called "cuquantum". We test that not both # packages are in the final environment file. p.write_text( textwrap.dedent( """\ name: test channels: - conda-forge dependencies: - conda: cuquantum-python # [linux64] pip: cuquantum # [linux64] platforms: - linux-64 """, ), ) p = maybe_as_toml(toml_or_yaml, p) requirements = parse_requirements(p, verbose=True) expected = { "cuquantum-python": [ Spec( name="cuquantum-python", which="conda", selector="linux64", identifier="c292b98a", ), ], "cuquantum": [ Spec( name="cuquantum", which="pip", selector="linux64", identifier="c292b98a", ), ], } assert requirements.requirements == expected resolved = resolve_conflicts(requirements.requirements, requirements.platforms) expected_resolved = { "cuquantum-python": { "linux-64": { "conda": Spec( name="cuquantum-python", which="conda", selector="linux64", identifier="c292b98a", ), }, }, "cuquantum": { "linux-64": { "pip": Spec( name="cuquantum", which="pip", selector="linux64", identifier="c292b98a", ), }, }, } assert resolved == expected_resolved env_spec = create_conda_env_specification( requirements.dependency_entries, requirements.channels, requirements.pip_indices, requirements.platforms, ) assert env_spec.conda == ["cuquantum-python"] assert env_spec.pip == [] @pytest.mark.parametrize("toml_or_yaml", ["toml", "yaml"]) def test_parse_requirements_with_ignore_pin( toml_or_yaml: Literal["toml", "yaml"], tmp_path: Path, ) -> None: p = tmp_path / "requirements.yaml" p.write_text( textwrap.dedent( """\ dependencies: - foo >1 """, ), ) p = maybe_as_toml(toml_or_yaml, p) requirements = parse_requirements(p, ignore_pins=["foo"], verbose=False) assert requirements.requirements == { "foo": [ Spec( name="foo", which="conda", identifier="17e5d607", ), Spec( name="foo", which="pip", identifier="17e5d607", ), ], } @pytest.mark.parametrize("toml_or_yaml", ["toml", "yaml"]) def test_parse_requirements_with_skip_dependency( toml_or_yaml: Literal["toml", "yaml"], tmp_path: Path, ) -> None: p = tmp_path / "requirements.yaml" p.write_text( textwrap.dedent( """\ dependencies: - foo >1 - bar - baz """, ), ) p = maybe_as_toml(toml_or_yaml, p) requirements = parse_requirements( p, skip_dependencies=["foo", "bar"], verbose=False, ) assert requirements.requirements == { "baz": [ Spec( name="baz", which="conda", identifier="08fd8713", ), Spec( name="baz", which="pip", identifier="08fd8713", ), ], } @pytest.mark.parametrize("toml_or_yaml", ["toml", "yaml"]) def test_pin_star_cuda(toml_or_yaml: Literal["toml", "yaml"], tmp_path: Path) -> None: p = tmp_path / "requirements.yaml" p.write_text( textwrap.dedent( """\ dependencies: - conda: qsimcirq * cuda* # [linux64] - conda: qsimcirq * cpu* # [arm64] """, ), ) p = maybe_as_toml(toml_or_yaml, p) requirements = parse_requirements(p) assert requirements.requirements == { "qsimcirq": [ Spec( name="qsimcirq", which="conda", selector="linux64", pin="* cuda*", identifier="c292b98a", ), Spec( name="qsimcirq", which="conda", selector="arm64", pin="* cpu*", identifier="489f33e0", ), ], } @pytest.mark.parametrize("toml_or_yaml", ["toml", "yaml"]) def test_parse_requirements_with_overwrite_pins( toml_or_yaml: Literal["toml", "yaml"], tmp_path: Path, ) -> None: p = tmp_path / "requirements.yaml" p.write_text( textwrap.dedent( """\ dependencies: - foo >1 - conda: bar * cuda* """, ), ) p = maybe_as_toml(toml_or_yaml, p) requirements = parse_requirements( p, overwrite_pins=["foo=1", "bar * cpu*"], verbose=False, ) assert requirements.requirements == { "foo": [ Spec( name="foo", which="conda", pin="=1", identifier="17e5d607", ), Spec( name="foo", which="pip", pin="=1", identifier="17e5d607", ), ], "bar": [ Spec( name="bar", which="conda", pin="* cpu*", identifier="5eb93b8c", ), ], } @pytest.mark.parametrize("toml_or_yaml", ["toml", "yaml"]) def test_duplicate_names_different_platforms( toml_or_yaml: Literal["toml", "yaml"], tmp_path: Path, ) -> None: p = tmp_path / "requirements.yaml" p.write_text( textwrap.dedent( """\ dependencies: - pip: ray # [arm64] - conda: ray-core # [linux64] pip: ray # [linux64] """, ), ) p = maybe_as_toml(toml_or_yaml, p) requirements = parse_requirements( p, overwrite_pins=["foo=1", "bar * cpu*"], verbose=False, ) assert requirements.requirements == { "ray": [ Spec( name="ray", which="pip", selector="arm64", identifier="1b26c5b2", ), Spec( name="ray", which="pip", selector="linux64", identifier="dd6a8aaf", ), ], "ray-core": [ Spec( name="ray-core", which="conda", selector="linux64", identifier="dd6a8aaf", ), ], } platforms_arm64: list[Platform] = ["osx-arm64"] env_spec = create_conda_env_specification( requirements.dependency_entries, requirements.channels, requirements.pip_indices, platforms_arm64, ) assert env_spec.conda == [] assert env_spec.pip == ["ray"] platforms_linux64: list[Platform] = ["linux-64"] env_spec = create_conda_env_specification( requirements.dependency_entries, requirements.channels, requirements.pip_indices, platforms_linux64, ) assert env_spec.conda == ["ray-core"] assert env_spec.pip == [] @pytest.mark.parametrize("toml_or_yaml", ["toml", "yaml"]) def test_with_unused_platform( toml_or_yaml: Literal["toml", "yaml"], tmp_path: Path, ) -> None: p = tmp_path / "requirements.yaml" p.write_text( textwrap.dedent( """\ dependencies: - adaptive # [linux64] - rsync-time-machine >0.1 # [osx64] - rsync-time-machine <3 - rsync-time-machine >1 # [linux64] """, ), ) p = maybe_as_toml(toml_or_yaml, p) requirements = parse_requirements(p, verbose=False) platforms: list[Platform] = ["linux-64"] env_spec = create_conda_env_specification( requirements.dependency_entries, requirements.channels, requirements.pip_indices, platforms, selector="comment", ) assert env_spec.conda == ["adaptive", "rsync-time-machine >1,<3"] assert env_spec.pip == [] @pytest.mark.parametrize("toml_or_yaml", ["toml", "yaml"]) def test_sel_selector_merges_explicit_platform_pinnings( tmp_path: Path, toml_or_yaml: Literal["toml", "yaml"], ) -> None: p = tmp_path / "requirements.yaml" p.write_text( textwrap.dedent( """\ platforms: - linux-64 - linux-aarch64 - linux-ppc64le dependencies: - foo >1 # [linux64] - foo <=2 # [aarch64] - foo <=2 # [ppc64le] """, ), ) p = maybe_as_toml(toml_or_yaml, p) requirements = parse_requirements(p, verbose=False) env_spec = create_conda_env_specification( requirements.dependency_entries, requirements.channels, requirements.platforms, selector="sel", ) assert env_spec.conda == [{"sel(linux)": "foo >1,<=2"}] assert env_spec.pip == [] @pytest.mark.parametrize("toml_or_yaml", ["toml", "yaml"]) def test_pip_with_pinning( tmp_path: Path, toml_or_yaml: Literal["toml", "yaml"], ) -> None: p1 = tmp_path / "p1" / "requirements.yaml" p1.parent.mkdir() p1.write_text( textwrap.dedent( """\ dependencies: - pip: qiskit-terra ==0.25.2.1 - pip: qiskit-terra ==0.25.2.2 """, ), ) p1 = maybe_as_toml(toml_or_yaml, p1) requirements = parse_requirements(p1, verbose=False) with pytest.raises( VersionConflictError, match=r"Invalid version pinning '==0\.25\.2\.1' for 'qiskit-terra'", ): resolve_conflicts(requirements.requirements, requirements.platforms) with pytest.raises( VersionConflictError, match=( r"Multiple exact version pinnings found: ==0\.25\.2\.1, ==0\.25\.2\.2 " r"for `qiskit-terra`" ), ): create_conda_env_specification( requirements.dependency_entries, requirements.channels, requirements.platforms, ) with pytest.raises( VersionConflictError, match=( r"Multiple exact version pinnings found: ==0\.25\.2\.1, ==0\.25\.2\.2 " r"for `qiskit-terra`" ), ): filter_python_dependencies( requirements.dependency_entries, requirements.platforms, ) p2 = tmp_path / "p2" / "requirements.yaml" p2.parent.mkdir() p2.write_text( textwrap.dedent( """\ dependencies: - pip: qiskit-terra =0.25.2.1 - pip: qiskit-terra =0.25.2.1 """, ), ) p2 = maybe_as_toml(toml_or_yaml, p2) requirements = parse_requirements(p2, verbose=False) env_spec = create_conda_env_specification( requirements.dependency_entries, requirements.channels, requirements.pip_indices, requirements.platforms, ) assert env_spec.conda == [] assert env_spec.pip == ["qiskit-terra ==0.25.2.1"] @pytest.mark.parametrize("toml_or_yaml", ["toml", "yaml"]) def test_pip_with_pinning_special_case_wildcard( tmp_path: Path, toml_or_yaml: Literal["toml", "yaml"], ) -> None: p1 = tmp_path / "p1" / "requirements.yaml" p1.parent.mkdir() p1.write_text( textwrap.dedent( """\ dependencies: - pip: qsimcirq * cuda* - pip: qsimcirq * cuda* """, ), ) p1 = maybe_as_toml(toml_or_yaml, p1) requirements = parse_requirements(p1, verbose=False) resolved = resolve_conflicts(requirements.requirements, requirements.platforms) assert resolved == { "qsimcirq": { None: { "pip": Spec( name="qsimcirq", which="pip", pin="* cuda*", identifier="17e5d607", ), }, }, } p2 = tmp_path / "p2" / "requirements.yaml" p2.parent.mkdir() p2.write_text( textwrap.dedent( """\ dependencies: - pip: qsimcirq * cuda* - pip: qsimcirq * cpu* """, ), ) p2 = maybe_as_toml(toml_or_yaml, p2) requirements = parse_requirements(p2, verbose=False) with pytest.raises( VersionConflictError, match=r"Invalid version pinning '\* cuda\*'", ): resolve_conflicts(requirements.requirements, requirements.platforms) @pytest.mark.parametrize("toml_or_yaml", ["toml", "yaml"]) def test_pip_with_pinning_special_case_git_repo( tmp_path: Path, toml_or_yaml: Literal["toml", "yaml"], ) -> None: p1 = tmp_path / "p1" / "requirements.yaml" p1.parent.mkdir() p1.write_text( textwrap.dedent( """\ dependencies: - pip: adaptive @ git+https://github.com/python-adaptive/adaptive.git@main - pip: adaptive @ git+https://github.com/python-adaptive/adaptive.git@main """, ), ) p1 = maybe_as_toml(toml_or_yaml, p1) requirements = parse_requirements(p1, verbose=False) resolved = resolve_conflicts(requirements.requirements, requirements.platforms) assert resolved == { "adaptive": { None: { "pip": Spec( name="adaptive", which="pip", pin="@ git+https://github.com/python-adaptive/adaptive.git@main", identifier="17e5d607", ), }, }, } @pytest.mark.parametrize("toml_or_yaml", ["toml", "yaml"]) def test_not_equal( tmp_path: Path, toml_or_yaml: Literal["toml", "yaml"], ) -> None: p1 = tmp_path / "p1" / "requirements.yaml" p1.parent.mkdir() p1.write_text( textwrap.dedent( """\ dependencies: - adaptive != 1.0.0 - adaptive <2 """, ), ) p1 = maybe_as_toml(toml_or_yaml, p1) requirements = parse_requirements(p1, verbose=False) resolved = resolve_conflicts(requirements.requirements, requirements.platforms) assert resolved == { "adaptive": { None: { "conda": Spec( name="adaptive", which="conda", pin="!=1.0.0,<2", identifier="17e5d607", ), "pip": Spec( name="adaptive", which="pip", pin="!=1.0.0,<2", identifier="17e5d607", ), }, }, } @pytest.mark.parametrize("toml_or_yaml", ["toml", "yaml"]) def test_dot_in_package_name( toml_or_yaml: Literal["toml", "yaml"], tmp_path: Path, ) -> None: p1 = tmp_path / "p1" / "requirements.yaml" p1.parent.mkdir() p1.write_text( textwrap.dedent( """\ dependencies: - ruamel.yaml """, ), ) p1 = maybe_as_toml(toml_or_yaml, p1) requirements = parse_requirements(p1, verbose=False) assert requirements.requirements == { "ruamel.yaml": [ Spec(name="ruamel.yaml", which="conda", identifier="17e5d607"), Spec(name="ruamel.yaml", which="pip", identifier="17e5d607"), ], } @pytest.mark.parametrize("toml_or_yaml", ["toml", "yaml"]) def test_optional_dependencies( tmp_path: Path, toml_or_yaml: Literal["toml", "yaml"], ) -> None: p = tmp_path / "p" / "requirements.yaml" p.parent.mkdir() p.write_text( textwrap.dedent( """\ dependencies: - adaptive != 1.0.0 - adaptive <2 optional_dependencies: test: - pytest """, ), ) p = maybe_as_toml(toml_or_yaml, p) requirements = parse_requirements(p, verbose=False, extras="*") assert requirements.optional_dependencies.keys() == {"test"} assert requirements.optional_dependencies["test"].keys() == {"pytest"} requirements = parse_requirements(p, verbose=False, extras=[["test"]]) with pytest.raises(ValueError, match="Cannot specify `extras` list"): parse_requirements(Path(f"{p}[test]"), verbose=False, extras=[["test"]]) with pytest.raises(ValueError, match="Length of `extras`"): parse_requirements(p, verbose=False, extras=[[], []]) requirements2 = parse_requirements(Path(f"{p}[test]"), verbose=False) assert requirements2.optional_dependencies == requirements.optional_dependencies resolved = resolve_conflicts( requirements.requirements, requirements.platforms, optional_dependencies=requirements.optional_dependencies, ) assert resolved.keys() == {"adaptive", "pytest"} @pytest.mark.parametrize("toml_or_yaml", ["toml", "yaml"]) def test_optional_dependencies_multiple_sections( tmp_path: Path, toml_or_yaml: Literal["toml", "yaml"], ) -> None: p = tmp_path / "p" / "requirements.yaml" p.parent.mkdir() p.write_text( textwrap.dedent( """\ optional_dependencies: test: - pytest lint: - flake8 """, ), ) p = maybe_as_toml(toml_or_yaml, p) requirements = parse_requirements(p, verbose=False, extras=[["test"]]) assert requirements.optional_dependencies.keys() == {"test"} requirements = parse_requirements(p, verbose=False, extras=[["lint"]]) assert requirements.optional_dependencies.keys() == {"lint"} requirements = parse_requirements(p, verbose=False, extras=[["test", "lint"]]) assert requirements.optional_dependencies.keys() == {"test", "lint"} @pytest.mark.parametrize("toml_or_yaml", ["toml", "yaml"]) def test_optional_dependencies_get_python_dependencies( tmp_path: Path, toml_or_yaml: Literal["toml", "yaml"], ) -> None: p = tmp_path / "p" / "requirements.yaml" p.parent.mkdir() p.write_text( textwrap.dedent( """\ optional_dependencies: test: - pytest lint: - flake8 """, ), ) p = maybe_as_toml(toml_or_yaml, p) deps = get_python_dependencies(f"{p}[test]", verbose=False) assert deps.dependencies == [] assert deps.extras == {"test": ["pytest"], "lint": ["flake8"]} @pytest.mark.parametrize("toml_or_yaml", ["toml", "yaml"]) def test_pip_dep_with_extras( tmp_path: Path, toml_or_yaml: Literal["toml", "yaml"], ) -> None: p = tmp_path / "p" / "requirements.yaml" p.parent.mkdir() p.write_text( textwrap.dedent( """\ dependencies: - conda: adaptive pip: adaptive[notebook] """, ), ) p = maybe_as_toml(toml_or_yaml, p) requirements = parse_requirements(p, verbose=False, extras="*") assert requirements.optional_dependencies == {} resolved = resolve_conflicts( requirements.requirements, requirements.platforms, optional_dependencies=requirements.optional_dependencies, ) assert resolved == { "adaptive": { None: { "conda": Spec( name="adaptive", which="conda", pin=None, identifier="17e5d607", selector=None, ), }, }, "adaptive[notebook]": { None: { "pip": Spec( name="adaptive[notebook]", which="pip", pin=None, identifier="17e5d607", selector=None, ), }, }, } env_spec = create_conda_env_specification( requirements.dependency_entries, requirements.channels, requirements.platforms, ) assert env_spec.conda == [] assert env_spec.pip == ["adaptive[notebook]"] python_deps = filter_python_dependencies( requirements.dependency_entries, requirements.platforms, ) assert python_deps == ["adaptive[notebook]"] @pytest.mark.parametrize("toml_or_yaml", ["toml", "yaml"]) def test_explicit_conda_pip_pair_with_different_names_prefers_pinned_pip( tmp_path: Path, toml_or_yaml: Literal["toml", "yaml"], ) -> None: p = tmp_path / "requirements.yaml" p.write_text( textwrap.dedent( """\ dependencies: - conda: python-graphviz pip: graphviz >1 """, ), ) p = maybe_as_toml(toml_or_yaml, p) requirements = parse_requirements(p, verbose=False) env_spec = create_conda_env_specification( requirements.dependency_entries, requirements.channels, requirements.platforms, ) assert env_spec.conda == [] assert env_spec.pip == ["graphviz >1"] python_deps = filter_python_dependencies( requirements.dependency_entries, requirements.platforms, ) assert python_deps == ["graphviz >1"] @pytest.mark.parametrize("toml_or_yaml", ["toml", "yaml"]) def test_same_source_final_collisions_merge_pip_extras( tmp_path: Path, toml_or_yaml: Literal["toml", "yaml"], ) -> None: p = tmp_path / "requirements.yaml" p.write_text( textwrap.dedent( """\ dependencies: - pip: foo[dev] - conda: bar pip: foo[test] """, ), ) p = maybe_as_toml(toml_or_yaml, p) requirements = parse_requirements(p, verbose=False) env_spec = create_conda_env_specification( requirements.dependency_entries, requirements.channels, requirements.platforms, ) assert env_spec.conda == [] assert env_spec.pip == ["foo[dev,test]"] python_deps = filter_python_dependencies( requirements.dependency_entries, requirements.platforms, ) assert python_deps == ["foo[dev,test]"] @pytest.mark.parametrize("toml_or_yaml", ["toml", "yaml"]) def test_cross_source_final_collisions_raise_for_conda_like_outputs( tmp_path: Path, toml_or_yaml: Literal["toml", "yaml"], ) -> None: p = tmp_path / "requirements.yaml" p.write_text( textwrap.dedent( """\ dependencies: - foo - conda: bar pip: foo >1 """, ), ) p = maybe_as_toml(toml_or_yaml, p) requirements = parse_requirements(p, verbose=False) with pytest.raises(ValueError, match="Final Dependency Collision"): create_conda_env_specification( requirements.dependency_entries, requirements.channels, requirements.platforms, ) python_deps = filter_python_dependencies( requirements.dependency_entries, requirements.platforms, ) assert python_deps == ["foo >1"] @pytest.mark.parametrize("toml_or_yaml", ["toml", "yaml"]) def test_same_name_cross_family_collisions_choose_deterministically( tmp_path: Path, toml_or_yaml: Literal["toml", "yaml"], ) -> None: p = tmp_path / "requirements.yaml" p.write_text( textwrap.dedent( """\ dependencies: - conda: foo - pip: foo >1 platforms: - linux-64 """, ), ) p = maybe_as_toml(toml_or_yaml, p) requirements = parse_requirements(p, verbose=False) env_spec = create_conda_env_specification( requirements.dependency_entries, requirements.channels, requirements.platforms, ) assert env_spec.conda == [] assert env_spec.pip == ["foo >1"] python_deps = filter_python_dependencies( requirements.dependency_entries, requirements.platforms, ) assert python_deps == ["foo >1"] @pytest.mark.parametrize("toml_or_yaml", ["toml", "yaml"]) def test_pip_pep440_constraints_fall_back_to_explicit_joined_string( tmp_path: Path, toml_or_yaml: Literal["toml", "yaml"], ) -> None: p = tmp_path / "requirements.yaml" p.write_text( textwrap.dedent( """\ dependencies: - pip: pkg ~=1.0 - pip: pkg <2 """, ), ) p = maybe_as_toml(toml_or_yaml, p) requirements = parse_requirements(p, verbose=False) env_spec = create_conda_env_specification( requirements.dependency_entries, requirements.channels, requirements.platforms, ) assert env_spec.conda == [] assert env_spec.pip == ["pkg ~=1.0,<2"] python_deps = filter_python_dependencies( requirements.dependency_entries, requirements.platforms, ) assert python_deps == ["pkg ~=1.0,<2"] @pytest.mark.parametrize("toml_or_yaml", ["toml", "yaml"]) @pytest.mark.parametrize( ("first_pin", "second_pin"), [ (">1", "<1"), ("~=1.0", "<1"), ("==1", "!=1"), ], ) def test_pip_contradictory_pep440_constraints_raise( tmp_path: Path, toml_or_yaml: Literal["toml", "yaml"], first_pin: str, second_pin: str, ) -> None: p = tmp_path / "requirements.yaml" p.write_text( textwrap.dedent( f"""\ dependencies: - pip: pkg {first_pin} - pip: pkg {second_pin} """, ), ) p = maybe_as_toml(toml_or_yaml, p) requirements = parse_requirements(p, verbose=False) with pytest.raises(VersionConflictError): create_conda_env_specification( requirements.dependency_entries, requirements.channels, requirements.platforms, ) with pytest.raises(VersionConflictError): filter_python_dependencies( requirements.dependency_entries, requirements.platforms, ) @pytest.mark.parametrize("toml_or_yaml", ["toml", "yaml"]) def test_local_dependency_in_dependencies_list( tmp_path: Path, toml_or_yaml: Literal["toml", "yaml"], ) -> None: p = tmp_path / "p" / "requirements.yaml" p.parent.mkdir() p.write_text( textwrap.dedent( """\ dependencies: - ../p # self """, ), ) p = maybe_as_toml(toml_or_yaml, p) with pytest.raises(ValueError, match=r"Use the `local_dependencies` section"): parse_requirements(p, verbose=False) @pytest.mark.parametrize("toml_or_yaml", ["toml", "yaml"]) def test_optional_dependencies_with_local_dependencies( tmp_path: Path, toml_or_yaml: Literal["toml", "yaml"], ) -> None: p1 = tmp_path / "p1" / "requirements.yaml" p1.parent.mkdir() p1.write_text( textwrap.dedent( """\ dependencies: - adaptive optional_dependencies: test: - pytest """, ), ) p1 = maybe_as_toml(toml_or_yaml, p1) p2 = tmp_path / "p2" / "requirements.yaml" p2.parent.mkdir() p2.write_text( textwrap.dedent( """\ dependencies: - numthreads optional_dependencies: local: - ../p1 - black """, ), ) p2 = maybe_as_toml(toml_or_yaml, p2) requirements = parse_requirements(p2, verbose=True, extras="*") assert requirements.optional_dependencies.keys() == {"local"} assert requirements.optional_dependencies["local"].keys() == {"black"} assert requirements.requirements.keys() == {"adaptive", "numthreads"} resolved = resolve_conflicts( requirements.requirements, requirements.platforms, optional_dependencies=requirements.optional_dependencies, ) assert resolved.keys() == {"adaptive", "numthreads", "black"} @pytest.mark.parametrize("toml_or_yaml", ["toml", "yaml"]) def test_optional_dependencies_with_local_dependencies_with_extras( tmp_path: Path, toml_or_yaml: Literal["toml", "yaml"], capsys: pytest.CaptureFixture, ) -> None: p1 = tmp_path / "p1" / "requirements.yaml" p1.parent.mkdir() p1.write_text( textwrap.dedent( """\ dependencies: - adaptive optional_dependencies: test: - pytest """, ), ) p1 = maybe_as_toml(toml_or_yaml, p1) p2 = tmp_path / "p2" / "requirements.yaml" p2.parent.mkdir() p2.write_text( textwrap.dedent( """\ dependencies: - numthreads optional_dependencies: local: - ../p1[test] """, ), ) p2 = maybe_as_toml(toml_or_yaml, p2) requirements = parse_requirements(p2, verbose=True, extras="*") # The deps in the 'test' section in p1 will be moved to the dependencies. assert "Removing empty" in capsys.readouterr().out assert not requirements.optional_dependencies.keys() assert requirements.requirements.keys() == {"numthreads", "adaptive", "pytest"} # The local dependency section should still exist in p2 loc = parse_local_dependencies( Path(f"{p2}[local]"), verbose=True, check_pip_installable=False, ) assert len(loc) == 1 resolved = resolve_conflicts( requirements.requirements, requirements.platforms, optional_dependencies=requirements.optional_dependencies, ) assert resolved.keys() == {"adaptive", "numthreads", "pytest"} @pytest.mark.parametrize("toml_or_yaml", ["toml", "yaml"]) def test_optional_dependencies_with_dicts( tmp_path: Path, toml_or_yaml: Literal["toml", "yaml"], ) -> None: p1 = tmp_path / "p1" / "requirements.yaml" p1.parent.mkdir() p1.write_text( textwrap.dedent( """\ dependencies: - adaptive optional_dependencies: flat: - conda: python-flatbuffers pip: flatbuffers """, ), ) p1 = maybe_as_toml(toml_or_yaml, p1) requirements = parse_requirements(p1, verbose=True, extras="*") assert requirements.optional_dependencies.keys() == {"flat"} assert requirements.optional_dependencies["flat"].keys() == { "python-flatbuffers", "flatbuffers", } resolved = resolve_conflicts( requirements.requirements, requirements.platforms, optional_dependencies=requirements.optional_dependencies, ) assert resolved.keys() == {"adaptive", "python-flatbuffers", "flatbuffers"} @pytest.mark.parametrize("toml_or_yaml", ["toml", "yaml"]) def test_optional_dependencies_with_version_specifier( tmp_path: Path, toml_or_yaml: Literal["toml", "yaml"], ) -> None: p = tmp_path / "p" / "requirements.yaml" p.parent.mkdir() p.write_text( textwrap.dedent( """\ dependencies: - adaptive optional_dependencies: specific: - adaptive =0.13.2 """, ), ) p = maybe_as_toml(toml_or_yaml, p) requirements = parse_requirements(p, verbose=False, extras="*") assert requirements.optional_dependencies.keys() == {"specific"} assert requirements.optional_dependencies["specific"].keys() == {"adaptive"} assert ( requirements.optional_dependencies["specific"]["adaptive"][0].pin == "=0.13.2" ) requirements = parse_requirements(p, verbose=False, extras=[["specific"]]) requirements2 = parse_requirements(Path(f"{p}[specific]"), verbose=False) assert requirements2.optional_dependencies == requirements.optional_dependencies resolved = resolve_conflicts( requirements.requirements, requirements.platforms, optional_dependencies=requirements.optional_dependencies, ) assert resolved.keys() == {"adaptive"} assert resolved["adaptive"][None]["conda"].pin == "=0.13.2" ================================================ FILE: tests/test_utils.py ================================================ """Tests for the unidep.utils module.""" from __future__ import annotations import importlib.metadata import sys from pathlib import Path from unittest.mock import patch import pytest from unidep.platform_definitions import Platform, Selector, Spec from unidep.utils import ( PathWithExtras, UnsupportedPlatformError, build_pep508_environment_marker, collect_selector_platforms, escape_unicode, extract_matching_platforms, get_package_version, identify_current_platform, parse_package_str, resolve_platforms, split_path_and_extras, ) if sys.version_info >= (3, 8): from typing import get_args else: # pragma: no cover from typing_extensions import get_args def test_escape_unicode() -> None: assert escape_unicode("foo\\n") == "foo\n" assert escape_unicode("foo\\t") == "foo\t" def test_build_pep508_environment_marker() -> None: # Test with a single platform assert ( build_pep508_environment_marker(["linux-64"]) == "sys_platform == 'linux' and platform_machine == 'x86_64'" ) # Test with multiple platforms assert ( build_pep508_environment_marker(["linux-64", "osx-64"]) == "sys_platform == 'linux' and platform_machine == 'x86_64' or sys_platform == 'darwin' and platform_machine == 'x86_64'" ) # Test with an empty list assert not build_pep508_environment_marker([]) # Test with a platform not in PEP508_MARKERS assert not build_pep508_environment_marker(["unknown-platform"]) # type: ignore[list-item] # Test with a mix of valid and invalid platforms assert ( build_pep508_environment_marker(["linux-64", "unknown-platform"]) # type: ignore[list-item] == "sys_platform == 'linux' and platform_machine == 'x86_64'" ) def test_spec_rendering_helpers() -> None: spec = Spec( name="numpy", which="conda", pin="=1.26,>=1.20", identifier="abc", selector="linux64", ) assert spec.pprint() == "numpy =1.26,>=1.20 # [linux64]" assert spec.name_with_pin() == "numpy =1.26,>=1.20" assert spec.name_with_pin(is_pip=True) == "numpy ==1.26,>=1.20" def test_detect_platform() -> None: with patch("platform.system", return_value="Linux"), patch( "platform.machine", return_value="x86_64", ): assert identify_current_platform() == "linux-64" with patch("platform.system", return_value="Linux"), patch( "platform.machine", return_value="aarch64", ): assert identify_current_platform() == "linux-aarch64" with patch("platform.system", return_value="Darwin"), patch( "platform.machine", return_value="x86_64", ): assert identify_current_platform() == "osx-64" with patch("platform.system", return_value="Darwin"), patch( "platform.machine", return_value="arm64", ): assert identify_current_platform() == "osx-arm64" with patch("platform.system", return_value="Windows"), patch( "platform.machine", return_value="AMD64", ): assert identify_current_platform() == "win-64" with patch("platform.system", return_value="Linux"), patch( "platform.machine", return_value="unknown", ), pytest.raises(UnsupportedPlatformError, match="Unsupported Linux architecture"): identify_current_platform() with patch("platform.system", return_value="Darwin"), patch( "platform.machine", return_value="unknown", ), pytest.raises(UnsupportedPlatformError, match="Unsupported macOS architecture"): identify_current_platform() with patch("platform.system", return_value="Windows"), patch( "platform.machine", return_value="unknown", ), pytest.raises( UnsupportedPlatformError, match="Unsupported Windows architecture", ): identify_current_platform() with patch("platform.system", return_value="Linux"), patch( "platform.machine", return_value="ppc64le", ): assert identify_current_platform() == "linux-ppc64le" with patch("platform.system", return_value="Unknown"), patch( "platform.machine", return_value="x86_64", ), pytest.raises(UnsupportedPlatformError, match="Unsupported operating system"): identify_current_platform() def test_collect_selector_platforms_with_optional_dependencies() -> None: requirements = { "numpy": [ Spec(name="numpy", which="conda", pin=">=1.20", identifier="a1"), Spec( name="numpy", which="conda", pin=">=1.20", identifier="a2", selector="linux64", ), ], } optional_dependencies = { "dev": { "pyobjc": [ Spec(name="pyobjc", which="pip", identifier="b1", selector="osx"), ], "pytest": [Spec(name="pytest", which="pip", selector="win")], }, } assert collect_selector_platforms(requirements, optional_dependencies) == [ "linux-64", "osx-64", "osx-arm64", "win-64", ] @pytest.mark.parametrize("empty_requested_platforms", [None, []]) def test_resolve_platforms_precedence_and_fallback( empty_requested_platforms: list[Platform] | None, ) -> None: assert resolve_platforms( requested_platforms=["osx-64", "osx-64"], declared_platforms=["linux-64"], selector_platforms=["win-64"], ) == ["osx-64"] assert resolve_platforms( requested_platforms=empty_requested_platforms, declared_platforms={"linux-64", "linux-aarch64"}, selector_platforms=["win-64"], ) == ["linux-64", "linux-aarch64"] assert resolve_platforms( requested_platforms=empty_requested_platforms, declared_platforms=None, selector_platforms=["win-64", "win-64"], ) == ["win-64"] with patch("unidep.utils.identify_current_platform", return_value="linux-64"): assert resolve_platforms( requested_platforms=empty_requested_platforms, declared_platforms=None, selector_platforms=None, default_current=True, ) == ["linux-64"] assert ( resolve_platforms( requested_platforms=empty_requested_platforms, declared_platforms=None, selector_platforms=None, default_current=False, ) == [] ) def test_parse_package_str() -> None: # Test with version pin assert parse_package_str("numpy >=1.20.0") == ("numpy", ">=1.20.0", None) assert parse_package_str("pandas<2.0,>=1.1.3") == ("pandas", "<2.0,>=1.1.3", None) # Test a name that includes a dash assert parse_package_str("python-yolo>=1.20.0") == ("python-yolo", ">=1.20.0", None) # Test with multiple version conditions assert parse_package_str("scipy>=1.2.3, <1.3") == ("scipy", ">=1.2.3, <1.3", None) # Test with no version pin assert parse_package_str("matplotlib") == ("matplotlib", None, None) # Test with whitespace variations assert parse_package_str("requests >= 2.25") == ("requests", ">= 2.25", None) # Test when installing from a URL url = "https://github.com/python-adaptive/adaptive.git@main" pin = f"@ git+{url}" assert parse_package_str(f"adaptive {pin}") == ("adaptive", pin, None) # Test with invalid input with pytest.raises(ValueError, match="Invalid package string"): parse_package_str(">=1.20.0 numpy") def test_path_with_extras_eq_handles_non_matching_object() -> None: path_with_extras = PathWithExtras(Path("requirements.yaml"), ["dev"]) assert path_with_extras.__eq__(object()) is NotImplemented def test_get_package_version_missing_package(monkeypatch: pytest.MonkeyPatch) -> None: monkeypatch.setattr( importlib.metadata, "version", lambda _name: (_ for _ in ()).throw(importlib.metadata.PackageNotFoundError), ) assert get_package_version("definitely-not-installed") is None def test_parse_package_str_with_selector() -> None: # Test with version pin assert parse_package_str("numpy >=1.20.0:linux64") == ( "numpy", ">=1.20.0", "linux64", ) assert parse_package_str("pandas<2.0,>=1.1.3:osx") == ( "pandas", "<2.0,>=1.1.3", "osx", ) # Test with multiple version conditions assert parse_package_str("scipy>=1.2.3, <1.3:win") == ( "scipy", ">=1.2.3, <1.3", "win", ) # Test with no version pin assert parse_package_str("matplotlib:win") == ("matplotlib", None, "win") # Test with whitespace variations assert parse_package_str("requests >= 2.25:win") == ("requests", ">= 2.25", "win") # Test when installing from a URL url = "https://github.com/python-adaptive/adaptive.git@main" pin = f"@ git+{url}" assert parse_package_str(f"adaptive {pin}:win") == ("adaptive", pin, "win") for sel in get_args(Selector): assert parse_package_str(f"numpy:{sel}") == ("numpy", None, sel) # Test with multiple selectors assert parse_package_str("numpy:linux64 win64") == ("numpy", None, "linux64 win64") with pytest.raises(ValueError, match="Invalid platform selector: `unknown`"): assert parse_package_str("numpy:linux64 unknown") def test_parse_package_str_with_extras() -> None: assert parse_package_str("numpy[full]") == ("numpy[full]", None, None) assert parse_package_str("numpy[full]:win") == ("numpy[full]", None, "win") assert parse_package_str("numpy[full]>1.20.0:win") == ( "numpy[full]", ">1.20.0", "win", ) assert parse_package_str("../path/to/package[full]") == ( "../path/to/package[full]", None, None, ) assert parse_package_str("../path/to/package[full]:win") == ( "../path/to/package[full]", None, "win", ) assert parse_package_str("../path/to/package[full]>1.20.0:win") == ( "../path/to/package[full]", ">1.20.0", "win", ) assert parse_package_str("python-yolo[full]>1.20.0:win") == ( "python-yolo[full]", ">1.20.0", "win", ) def test_extract_matching_platforms() -> None: # Test with a line having a linux selector content_linux = "dependency1 # [linux]" assert set(extract_matching_platforms(content_linux)) == { "linux-64", "linux-aarch64", "linux-ppc64le", } # Test with a line having a win selector content_win = "dependency2 # [win]" assert set(extract_matching_platforms(content_win)) == {"win-64"} # Test with a line having an osx64 selector content_osx64 = "dependency3 # [osx64]" assert set(extract_matching_platforms(content_osx64)) == {"osx-64"} # Test with a line having no selector content_none = "dependency4" assert extract_matching_platforms(content_none) == [] # Test with a comment line content_comment = "# This is a comment" assert extract_matching_platforms(content_comment) == [] # Test with a line having a unix selector content_unix = "dependency5 # [unix]" expected_unix = { "linux-64", "linux-aarch64", "linux-ppc64le", "osx-64", "osx-arm64", } assert set(extract_matching_platforms(content_unix)) == expected_unix # Test with a line having multiple selectors content_multi = "dependency7 # [linux64 unix]" expected_multi = { "linux-64", "linux-aarch64", "linux-ppc64le", "osx-64", "osx-arm64", } assert set(extract_matching_platforms(content_multi)) == expected_multi # Test with a line having multiple [] content_multi = "dependency7 # [linux64] [win]" with pytest.raises(ValueError, match="Multiple bracketed selectors"): extract_matching_platforms(content_multi) incorrect_platform = "dependency8 # [unknown-platform]" with pytest.raises(ValueError, match="Invalid platform selector"): extract_matching_platforms(incorrect_platform) def test_split_path_and_extras() -> None: # parse_with_extras s = "any/path[something, another]" path, extras = split_path_and_extras(s) assert path == Path("any/path") assert extras == ["something", "another"] pe = PathWithExtras(path, extras) assert pe.path_with_extras == Path("any/path[something,another]") # parse_without_extras s = "any/path" path, extras = split_path_and_extras(s) assert path == Path("any/path") assert extras == [] assert PathWithExtras(path, extras).path_with_extras == Path(s) # parse_incorrect_format # Technically this path is not correct, but we don't check for multiple [] s = "any/path[something][another]" path, extras = split_path_and_extras(s) assert path == Path("any/path[something]") assert extras == ["another"] assert PathWithExtras(path, extras).path_with_extras == Path(s) # parse_empty_string s = "" path, extras = split_path_and_extras(s) assert path == Path() assert extras == [] assert PathWithExtras(path, extras).path_with_extras == Path(s) s = "any/path[something]/other" path, extras = split_path_and_extras(s) assert path == Path("any/path[something]/other") assert extras == [] assert PathWithExtras(path, extras).path_with_extras == Path(s) s = "any/path[something]/other[foo]" path, extras = split_path_and_extras(s) assert path == Path("any/path[something]/other") assert extras == ["foo"] assert PathWithExtras(path, extras).path_with_extras == Path(s) s = "any/path]something[" path, extras = split_path_and_extras(s) assert path == Path("any/path]something[") assert extras == [] assert PathWithExtras(path, extras).path_with_extras == Path(s) s = "any/path[something" path, extras = split_path_and_extras(s) assert path == Path("any/path[something") assert extras == [] assert PathWithExtras(path, extras).path_with_extras == Path(s) s = "any/path]something]" path, extras = split_path_and_extras(s) assert path == Path("any/path]something]") assert extras == [] assert PathWithExtras(path, extras).path_with_extras == Path(s) ================================================ FILE: tests/test_version_conflicts.py ================================================ """Tests for the version conflict resolution logic.""" from __future__ import annotations import pytest from unidep._conflicts import ( ALL_VERSION_OPERATORS, VersionConflictError, _combine_pinning_within_platform, _is_redundant, _is_valid_pinning, _parse_pinning, combine_version_pinnings, extract_version_operator, ) from unidep.platform_definitions import Spec def test_combining_versions() -> None: data = { None: { "conda": [ Spec(name="numpy", which="conda", pin=">1"), Spec(name="numpy", which="conda", pin="<2"), ], }, } resolved = _combine_pinning_within_platform(data) # type: ignore[arg-type] assert resolved == { None: { "conda": Spec(name="numpy", which="conda", pin=">1,<2"), }, } @pytest.mark.parametrize("operator", ["<", "<=", ">", ">=", "="]) @pytest.mark.parametrize("version", ["1", "1.0", "1.0.0", "1.0.0rc1"]) def test_is_valid_pinning(operator: str, version: str) -> None: assert _is_valid_pinning(f"{operator}{version}") @pytest.mark.parametrize( ("pinnings", "expected"), [ ([" > 0.0.1", " < 2", " = 1.0.0"], "=1.0.0"), (["<2", ">1"], "<2,>1"), ([">1", "<2"], ">1,<2"), (["<3", "<=3", "<4"], "<3"), (["=1", "=1"], "=1"), (["=2", "<3", "<=3", "<4"], "=2"), (["=2", ">1", "<3"], "=2"), (["=3", ">=2", "<=4"], "=3"), (["=3", ">1", "<4"], "=3"), (["=3", ">2", "<4"], "=3"), ([">=1", "<=1"], ">=1,<=1"), ([">=1", ">=1", "=1"], "=1"), ([">=1", ">0", "<=3", "<4"], ">=1,<=3"), ([">=1", ">0", "<=3", "<4", "!=1.5"], ">=1,<=3,!=1.5"), ([">=2", "<=2"], ">=2,<=2"), ([">=2", "<3"], ">=2,<3"), ([">0.0.1", "<2", "=1.0.0"], "=1.0.0"), ([">1", "<=3", "<4"], ">1,<=3"), ([">1", "<=3"], ">1,<=3"), # TODO #67: !=5 should be removed but this is not yet implemented # noqa: TD004, FIX002 # However, this is not a problem here because !=5 is redundant # as it is outside the range of >1 and <=3 ([">1", "<=3", "!=5"], ">1,<=3,!=5"), ([">1", ">=1", "<3", "<=3", ""], ">1,<3"), ([">1"], ">1"), ([], ""), ], ) def test_combine_version_pinnings(pinnings: list[str], expected: str) -> None: assert combine_version_pinnings(pinnings) == expected # Try reversing the order of the pinnings if "," not in expected: assert combine_version_pinnings(pinnings[::-1]) == expected else: parts = expected.split(",") assert combine_version_pinnings(pinnings[::-1]) == ",".join(parts[::-1]) @pytest.mark.parametrize( "pinnings", [ ["abc", "def"], ["==abc", ">2"], ["<=>abc", ">2"], [">1", "abc", "<=3", ""], ["abc", ">=1", "<=2"], ["3", "6"], [">", "<"], ], ) def test_invalid_pinnings(pinnings: list[str]) -> None: with pytest.raises(VersionConflictError, match="Invalid version pinning"): assert combine_version_pinnings(pinnings) @pytest.mark.parametrize( "pinnings", [[">2", "<1"], ["<1", ">2"], [">1", "<1"], ["<=1", ">1"], [">1", "<=1"]], ) def test_contradictory_pinnings(pinnings: list[str]) -> None: p1, p2 = pinnings with pytest.raises( VersionConflictError, match=f"Contradictory version pinnings found for `None`: {p1} and {p2}", ): combine_version_pinnings(pinnings) def test_exact_pinning_with_contradictory_ranges() -> None: with pytest.raises( VersionConflictError, match="Contradictory version pinnings found for `None`: =3 and <2", ): combine_version_pinnings(["=3", "<2", ">4"]) with pytest.raises( VersionConflictError, match="Contradictory version pinnings found for `None`: =3 and <1", ): assert combine_version_pinnings(["=3", "<1", ">4"]) def test_multiple_exact_pinnings() -> None: with pytest.raises( VersionConflictError, match="Multiple exact version pinnings found: =2, =3", ): combine_version_pinnings(["=2", "=3"]) def test_general_contradictory_pinnings() -> None: # This test ensures that contradictory non-exact pinnings raise a VersionConflictError with pytest.raises( VersionConflictError, match="Contradictory version pinnings found for `None`: >=2 and <1", ): combine_version_pinnings([">=2", "<1"]) def test_is_redundant() -> None: assert _is_redundant(">2", [">5"]) assert not _is_redundant(">5", [">2"]) assert _is_redundant("<5", ["<2"]) assert _is_redundant(">=2", [">2"]) assert not _is_redundant(">2", [">=2"]) @pytest.mark.parametrize("pinning", ["<<1", ">>1", "=<1", "=>1"]) def test_invalid_parse_pinning(pinning: str) -> None: with pytest.raises( VersionConflictError, match=f"Invalid version pinning: '{pinning}'", ): _parse_pinning(pinning) @pytest.mark.parametrize("op", ALL_VERSION_OPERATORS) def test_extract_version_operator_all_operators(op: str) -> None: assert extract_version_operator(f"{op}1.0") == op @pytest.mark.parametrize( "constraint", ["1.0", "abc", "", "hello world"], ) def test_extract_version_operator_no_operator(constraint: str) -> None: assert extract_version_operator(constraint) == "" def test_extract_version_operator_strips_whitespace() -> None: assert extract_version_operator(" >=1.0 ") == ">=" assert extract_version_operator(" <2.0") == "<" assert extract_version_operator(" 1.0 ") == "" ================================================ FILE: unidep/__init__.py ================================================ """unidep - Unified Conda and Pip requirements management.""" from unidep._conda_env import ( create_conda_env_specification, write_conda_environment_file, ) from unidep._dependencies_parsing import ( find_requirements_files, parse_local_dependencies, parse_requirements, ) from unidep._setuptools_integration import ( filter_python_dependencies, get_python_dependencies, ) from unidep._version import __version__ __all__ = [ "__version__", "create_conda_env_specification", "filter_python_dependencies", "find_requirements_files", "get_python_dependencies", "parse_local_dependencies", "parse_requirements", "write_conda_environment_file", ] ================================================ FILE: unidep/_cli.py ================================================ #!/usr/bin/env python3 """unidep - Unified Conda and Pip requirements management. This module provides a command-line tool for managing conda environment.yaml files. """ from __future__ import annotations import argparse import functools import importlib.util import itertools import json import os import platform import shutil import subprocess import sys import time from pathlib import Path from typing import TYPE_CHECKING from ruamel.yaml import YAML from unidep._conda_env import ( create_conda_env_specification, write_conda_environment_file, ) from unidep._conda_lock import conda_lock_command from unidep._dependencies_parsing import ( DependencyEntry, _load, find_requirements_files, parse_local_dependencies, parse_requirements, ) from unidep._pixi import generate_pixi_toml from unidep._setuptools_integration import ( filter_python_dependencies, get_python_dependencies, ) from unidep._version import __version__ from unidep.platform_definitions import Platform from unidep.utils import ( add_comment_to_file, escape_unicode, get_package_version, identify_current_platform, is_pip_installable, parse_folder_or_filename, parse_package_str, resolve_platforms, warn, ) if sys.version_info >= (3, 8): from typing import Literal, get_args else: # pragma: no cover from typing_extensions import Literal, get_args if TYPE_CHECKING: from collections.abc import Sequence try: # pragma: no cover from rich_argparse import RichHelpFormatter class _HelpFormatter(RichHelpFormatter): def _get_help_string(self, action: argparse.Action) -> str | None: # escapes "[" in text, otherwise e.g., [linux] is removed if action.help is not None: return action.help.replace("[", r"\[") return None except ImportError: # pragma: no cover from argparse import HelpFormatter as _HelpFormatter # type: ignore[assignment] _DEP_FILES = "`requirements.yaml` or `pyproject.toml`" CondaExecutable = Literal["conda", "mamba", "micromamba"] def _flatten_selected_dependency_entries( dependency_entries: list[DependencyEntry], optional_dependency_entries: dict[str, list[DependencyEntry]], ) -> list[DependencyEntry]: entries = list(dependency_entries) for group_entries in optional_dependency_entries.values(): entries.extend(group_entries) return entries def _collect_available_optional_dependency_groups( found_files: list[Path], ) -> list[str]: # Inspect only the top-level files so local-only groups remain visible # without traversing local dependencies. yaml = YAML(typ="rt") groups: set[str] = set() for found_file in found_files: groups.update(_load(found_file, yaml).get("optional_dependencies", {})) return sorted(groups) def _merge_optional_dependency_extras( *, found_files: list[Path], optional_dependencies: list[str], all_optional_dependencies: bool, ) -> list[list[str]] | Literal["*"] | None: if all_optional_dependencies: return "*" if not optional_dependencies: return None available_groups = _collect_available_optional_dependency_groups( found_files, ) missing_groups = [ group_name for group_name in dict.fromkeys(optional_dependencies) if group_name not in available_groups ] if missing_groups: missing = ", ".join(f"`{group_name}`" for group_name in missing_groups) if available_groups: available = ", ".join(f"`{group_name}`" for group_name in available_groups) print( "❌ Unknown optional dependency group(s): " f"{missing}. Valid groups: {available}.", ) else: print( "❌ Unknown optional dependency group(s): " f"{missing}. No optional dependency groups were found.", ) sys.exit(1) selected_groups = list(dict.fromkeys(optional_dependencies)) return [selected_groups.copy() for _ in found_files] def _collect_selected_conda_like_platforms( entries: list[DependencyEntry], ) -> list[Platform]: """Collect all platforms referenced directly by dependency selectors.""" selector_platforms: set[Platform] = set() for entry in entries: for spec in (entry.conda, entry.pip): if spec is None or spec.selector is None: continue entry_platforms = spec.platforms() if entry_platforms is not None: selector_platforms.update(entry_platforms) return sorted(selector_platforms) def _add_common_args( # noqa: PLR0912, C901 sub_parser: argparse.ArgumentParser, options: set[str], ) -> None: # pragma: no cover if "directory" in options: sub_parser.add_argument( "-d", "--directory", type=Path, default=".", help=f"Base directory to scan for {_DEP_FILES} file(s), by default `.`", ) if "depth" in options: sub_parser.add_argument( "--depth", type=int, default=1, help=f"Maximum depth to scan for {_DEP_FILES} files, by default 1", ) if "file" in options or "file-alt" in options: if "file-alt" in options: help_msg = ( f"A single {_DEP_FILES} file to use, or" " folder that contains that file. This is an alternative to using" f" `--directory` which searches for all {_DEP_FILES} files in the" " directory and its subdirectories." ) else: help_msg = ( f"The {_DEP_FILES} file to parse, or folder" " that contains that file, by default `.`" ) assert "conda-lock-file" not in options # both use "-f" sub_parser.add_argument( "-f", "--file", type=Path, default=[], action="append", help=help_msg, ) if "*files" in options: sub_parser.add_argument( "files", type=Path, nargs="+", help=f"The {_DEP_FILES} file(s) to parse" " or folder(s) that contain those file(s), by default `.`", default=None, ) if "verbose" in options: sub_parser.add_argument( "-v", "--verbose", action="store_true", help="Print verbose output", ) if "platform" in options: sub_parser.add_argument( "-p", "--platform", type=str, action="append", # Allow multiple instances of -p default=[], choices=get_args(Platform), help="The platform(s) to get the requirements for. " "Multiple platforms can be specified. " "If omitted, behavior is command-specific: platforms may be inferred " "from requirements files, otherwise the current platform is used.", ) if "editable" in options: sub_parser.add_argument( "-e", "--editable", action="store_true", help="Install the project in editable mode", ) if "skip-local" in options: sub_parser.add_argument( "--skip-local", action="store_true", help="Skip installing local dependencies", ) if "skip-pip" in options: sub_parser.add_argument( "--skip-pip", action="store_true", help=f"Skip installing pip dependencies from {_DEP_FILES}", ) if "skip-conda" in options: sub_parser.add_argument( "--skip-conda", action="store_true", help=f"Skip installing conda dependencies from {_DEP_FILES}", ) if "skip-dependency" in options: sub_parser.add_argument( "--skip-dependency", type=str, action="append", default=[], help="Skip installing a specific dependency that is in one of the" f" {_DEP_FILES}" " files. This option can be used multiple times, each" " time specifying a different package to skip." " For example, use `--skip-dependency pandas` to skip installing pandas.", ) if "no-dependencies" in options: sub_parser.add_argument( "--no-dependencies", "--no-deps", action="store_true", help=f"Skip installing dependencies from {_DEP_FILES}" " file(s) and only install local package(s). Useful after" " installing a `conda-lock.yml` file because then all" " dependencies have already been installed.", ) if "conda-executable" in options: sub_parser.add_argument( "--conda-executable", type=str, choices=("conda", "mamba", "micromamba"), help="The conda executable to use", default=None, ) if "conda-env" in options: grp = sub_parser.add_mutually_exclusive_group() grp.add_argument( "-n", "--conda-env-name", type=str, default=None, help="Name of the conda environment, if not provided, the currently" " active environment name is used, unless `--conda-env-prefix` is" " provided", ) grp.add_argument( "-p", # Overlaps with `--platform`, but that's fine "--conda-env-prefix", type=Path, default=None, help="Path to the conda environment, if not provided, the currently" " active environment path is used, unless `--conda-env-name` is" " provided", ) if "dry-run" in options: sub_parser.add_argument( "--dry-run", "--dry", action="store_true", help="Only print the commands that would be run", ) if "ignore-pin" in options: sub_parser.add_argument( "--ignore-pin", type=str, action="append", default=[], help="Ignore the version pin for a specific package," " e.g., `--ignore-pin numpy`. This option can be repeated" " to ignore multiple packages.", ) if "overwrite-pin" in options: sub_parser.add_argument( "--overwrite-pin", type=str, action="append", default=[], help="Overwrite the version pin for a specific package," " e.g., `--overwrite-pin 'numpy=1.19.2'`. This option can be repeated" " to overwrite the pins of multiple packages.", ) if "conda-lock-file" in options: sub_parser.add_argument( "-f", "--conda-lock-file", type=Path, help="Path to the `conda-lock.yml` file to use for creating the new" " environment. Assumes that the lock file contains all dependencies." " Must be used with `--conda-env-name` or `--conda-env-prefix`.", ) if "no-uv" in options: sub_parser.add_argument( "--no-uv", action="store_true", help="Disables the use of `uv` for pip install. By default, `uv` is used" " if it is available in the PATH.", ) def _add_extra_flags( subparser: argparse.ArgumentParser, downstream_command: str, unidep_subcommand: str, example: str, ) -> None: subparser.add_argument( "extra_flags", nargs=argparse.REMAINDER, help=f"Extra flags to pass to `{downstream_command}`. These flags are passed" f" directly and should be provided in the format expected by" f" `{downstream_command}`. For example, `unidep {unidep_subcommand} -- {example}`." # noqa: E501 f" Note that the `--` is required to separate the flags for" f" `unidep {unidep_subcommand}` from the flags for `{downstream_command}`.", ) def _parse_args() -> argparse.Namespace: # noqa: PLR0915 parser = argparse.ArgumentParser( description="Unified Conda and Pip requirements management.", formatter_class=_HelpFormatter, ) subparsers = parser.add_subparsers(dest="command", help="Subcommands") # Subparser for the 'merge' command merge_help = ( f"Combine multiple (or a single) {_DEP_FILES}" " files into a" " single Conda installable `environment.yaml` file." ) merge_example = ( " Example usage: `unidep merge --directory . --depth 1 --output environment.yaml`" # noqa: E501 f" to search for {_DEP_FILES}" " files in the current directory and its" " subdirectories and create `environment.yaml`. These are the defaults, so you" " can also just run `unidep merge`. For Pixi support, use `unidep pixi`." ) parser_merge = subparsers.add_parser( "merge", help=merge_help, description=merge_help + merge_example, formatter_class=_HelpFormatter, ) parser_merge.add_argument( "-o", "--output", type=Path, default=None, help="Output file for the conda environment, by default `environment.yaml`", ) parser_merge.add_argument( "-n", "--name", type=str, default="myenv", help="Name of the conda environment, by default `myenv`", ) parser_merge.add_argument( "--stdout", action="store_true", help="Output to stdout instead of a file", ) parser_merge.add_argument( "--selector", type=str, choices=("sel", "comment"), default="sel", help="The selector to use for the environment markers, if `sel` then" " `- numpy # [linux]` becomes `sel(linux): numpy`, if `comment` then" " it remains `- numpy # [linux]`, by default `sel`", ) merge_optional_group = parser_merge.add_mutually_exclusive_group() merge_optional_group.add_argument( "--optional-dependencies", nargs="+", metavar="GROUP", default=[], help="Include the named optional dependency group(s) from the discovered" " requirements files.", ) merge_optional_group.add_argument( "--all-optional-dependencies", action="store_true", help="Include all optional dependency groups from the discovered" " requirements files.", ) _add_common_args( parser_merge, { "directory", "verbose", "platform", "depth", "ignore-pin", "skip-dependency", "overwrite-pin", }, ) # Subparser for the 'install' command install_help = ( f"Automatically install all dependencies from one or more {_DEP_FILES} files." " This command first installs dependencies" " with Conda, then with Pip. Finally, it installs local packages" f" (those containing the {_DEP_FILES} files)" " using `pip install [-e] ./project`." ) install_example = ( " Example usage: `unidep install .` for a single project." " For multiple projects: `unidep install ./project1 ./project2`." " The command accepts both file paths and directories containing" f" a {_DEP_FILES} file. Use `--editable` or" " `-e` to install the local packages in editable mode. See" f" `unidep install-all` to install all {_DEP_FILES} files in and below the" " current folder." ) parser_install = subparsers.add_parser( "install", help=install_help, description=install_help + install_example, formatter_class=_HelpFormatter, ) # Add positional argument for the file _add_common_args( parser_install, { "*files", "conda-executable", "conda-env", "conda-lock-file", "dry-run", "editable", "skip-local", "skip-pip", "skip-conda", "no-dependencies", "ignore-pin", "skip-dependency", "overwrite-pin", "no-uv", "verbose", }, ) install_all_help = ( f"Install dependencies from all {_DEP_FILES}" " files found in the current" " directory or specified directory. This command first installs dependencies" " using Conda, then Pip, and finally the local packages." ) install_all_example = ( " Example usage: `unidep install-all` to install dependencies from all" f" {_DEP_FILES}" " files in the current directory. Use" " `--directory ./path/to/dir` to specify a different directory. Use" " `--depth` to control the depth of directory search. Add `--editable`" " or `-e` for installing local packages in editable mode." ) parser_install_all = subparsers.add_parser( "install-all", help=install_all_help, description=install_all_help + install_all_example, formatter_class=_HelpFormatter, ) # Add positional argument for the file _add_common_args( parser_install_all, { "conda-executable", "conda-env", "conda-lock-file", "dry-run", "editable", "depth", "directory", "skip-local", "skip-pip", "skip-conda", "no-dependencies", "ignore-pin", "skip-dependency", "overwrite-pin", "no-uv", "verbose", }, ) # Subparser for the 'conda-lock' command conda_lock_help = ( "Generate a global `conda-lock.yml` file for a collection of" f" {_DEP_FILES}" " files. Additionally, create individual" f" `conda-lock.yml` files for each {_DEP_FILES} file" " consistent with the global lock file." ) conda_lock_example = ( " Example usage: `unidep conda-lock --directory ./projects` to generate" f" conda-lock files for all {_DEP_FILES}" " files in the `./projects`" " directory. Use `--only-global` to generate only the global lock file." " The `--check-input-hash` option can be used to avoid regenerating lock" " files if the input hasn't changed." ) parser_lock = subparsers.add_parser( "conda-lock", help=conda_lock_help, description=conda_lock_help + conda_lock_example, formatter_class=_HelpFormatter, ) parser_lock.add_argument( "--only-global", action="store_true", help="Only generate the global lock file", ) parser_lock.add_argument( "--lockfile", type=Path, default="conda-lock.yml", help="Specify a path for the global lockfile (default: `conda-lock.yml`" " in current directory). Path should be relative, e.g.," " `--lockfile ./locks/example.conda-lock.yml`.", ) parser_lock.add_argument( "--check-input-hash", action="store_true", help="Check existing input hashes in lockfiles before regenerating lock files." " This flag is directly passed to `conda-lock`.", ) _add_common_args( parser_lock, { "directory", "file-alt", "verbose", "platform", "depth", "ignore-pin", "skip-dependency", "overwrite-pin", }, ) _add_extra_flags(parser_lock, "conda-lock lock", "conda-lock", "--micromamba") # Subparser for the 'pixi' command pixi_help = f"Generate a `pixi.toml` file from {_DEP_FILES} files." pixi_example = ( " Example usage: `unidep pixi` to generate a pixi.toml file. " "Use `--output` to specify a different output path. " "Use `--name` to set the project name. " "After generating, use `pixi lock` and `pixi install` directly." ) parser_pixi = subparsers.add_parser( "pixi", help=pixi_help, description=pixi_help + pixi_example, formatter_class=_HelpFormatter, ) parser_pixi.add_argument( "-o", "--output", type=Path, default=None, help="Output path for pixi.toml (default: pixi.toml in current directory)", ) parser_pixi.add_argument( "-n", "--name", type=str, default=None, help="Name of the project (default: current directory name)", ) parser_pixi.add_argument( "--stdout", action="store_true", help="Output to stdout instead of a file", ) parser_pixi.add_argument( "-c", "--channel", action="append", help="Conda channel to include. Can be repeated. Overrides channels" " declared in requirements files. If omitted, channels are read from" " the requirements files (defaulting to conda-forge).", ) _add_common_args( parser_pixi, { "directory", "file-alt", "verbose", "platform", "depth", "ignore-pin", "skip-dependency", "overwrite-pin", }, ) # Subparser for the 'pip-compile' command pip_compile_help = ( "Generate a fully pinned `requirements.txt` file from one or more" f" {_DEP_FILES}" " files using `pip-compile` from `pip-tools`. This" f" command consolidates all pip dependencies defined in the {_DEP_FILES}" " files and compiles them into a single `requirements.txt` file, taking" " into account the specific versions and dependencies of each package." ) pip_compile_example = ( " Example usage: `unidep pip-compile --directory ./projects` to generate" f" a `requirements.txt` file for all {_DEP_FILES}" " files in the" " `./projects` directory. Use `--output-file requirements.txt` to specify a" " different output file." ) parser_pip_compile = subparsers.add_parser( "pip-compile", help=pip_compile_help, description=pip_compile_help + pip_compile_example, formatter_class=_HelpFormatter, ) parser_pip_compile.add_argument( "-o", "--output-file", type=Path, default=None, help="Output file for the pip requirements, by default `requirements.txt`", ) _add_common_args( parser_pip_compile, { "directory", "verbose", "platform", "depth", "ignore-pin", "skip-dependency", "overwrite-pin", }, ) _add_extra_flags( parser_pip_compile, "pip-compile", "pip-compile", "--generate-hashes --allow-unsafe", ) # Subparser for the 'pip' and 'conda' command help_str = "Get the {} requirements for the current platform only." help_example = ( " Example usage: `unidep {which} --file folder1 --file" " folder2/requirements.yaml --separator ' ' --platform linux-64` to" " extract all the {which} dependencies specific to the linux-64 platform. Note" " that the `--file` argument can be used multiple times to specify multiple" f" {_DEP_FILES}" " files and that --file can also be a folder that contains" f" a {_DEP_FILES} file." ) parser_pip = subparsers.add_parser( "pip", help=help_str.format("pip"), description=help_str.format("pip") + help_example.format(which="pip"), formatter_class=_HelpFormatter, ) parser_conda = subparsers.add_parser( "conda", help=help_str.format("conda"), description=help_str.format("conda") + help_example.format(which="conda"), formatter_class=_HelpFormatter, ) for sub_parser in [parser_pip, parser_conda]: _add_common_args( sub_parser, { "verbose", "platform", "file", "ignore-pin", "skip-dependency", "overwrite-pin", }, ) sub_parser.add_argument( "--separator", type=str, default=" ", help="The separator between the dependencies, by default ` `", ) # Subparser for the 'version' command parser_merge = subparsers.add_parser( "version", help="Print version information of unidep.", formatter_class=_HelpFormatter, ) args = parser.parse_args() if args.command is None: # pragma: no cover parser.print_help() sys.exit(1) if "file" in args: _ensure_files(args.file) return args def _ensure_files(files: list[Path]) -> None: """Ensure that the files exist.""" missing = [] for i, f in enumerate(files): try: path_with_extras = parse_folder_or_filename(f) except FileNotFoundError: # noqa: PERF203 missing.append(f"`{f}`") else: files[i] = path_with_extras.path_with_extras if missing: print(f"❌ One or more files ({', '.join(missing)}) not found.") sys.exit(1) def _get_conda_executable(which: CondaExecutable) -> str | None: if shutil.which(which): return which # Found in PATH so return the name # e.g., micromamba might be a bash function, check env var in that case env_var = "CONDA_EXE" if which == "conda" else "MAMBA_EXE" exe = os.environ.get(env_var, None) if exe is None: # pragma: no cover return None if Path(exe).name != which: # pragma: no cover return None return exe def _identify_conda_executable() -> CondaExecutable: # pragma: no cover """Identify the conda executable to use. This function checks for micromamba, mamba, and conda in that order. """ if _get_conda_executable("micromamba") is not None: return "micromamba" if _get_conda_executable("mamba") is not None: return "mamba" if _get_conda_executable("conda") is not None: return "conda" msg = "Could not identify conda executable." raise RuntimeError(msg) def _maybe_conda_executable() -> CondaExecutable | None: try: return _identify_conda_executable() except RuntimeError: # pragma: no cover return None def _format_inline_conda_package(package: str) -> str: pkg = parse_package_str(package) if pkg.pin is None: return pkg.name return f'{pkg.name}"{pkg.pin.strip()}"' def _maybe_exe(conda_executable: CondaExecutable) -> str: """Add .exe on Windows.""" if os.name == "nt": # pragma: no cover if conda_executable in ("micromamba", "mamba") and os.environ.get("MAMBA_EXE"): return os.path.normpath(os.environ["MAMBA_EXE"]) if os.environ.get("CONDA_EXE"): return os.path.normpath(os.environ["CONDA_EXE"]) executables = [f"{conda_executable}.exe", conda_executable] for exe in executables: path = shutil.which(exe) if path is not None: return os.path.normpath(path) print( "🔍 Going to search in different common paths" f" because `{conda_executable}` was not found in PATH.", ) return os.path.normpath(_find_windows_path(conda_executable)) executable = _get_conda_executable(conda_executable) assert executable is not None return executable def _capitalize_dir(path: str, *, capitalize: bool = True, index: int = -1) -> str: """Capitalize or lowercase a directory in a path, on Windows only.""" sep = "\\" parts = path.split(sep) if capitalize: parts[index] = parts[index].capitalize() else: parts[index] = parts[index].lower() return sep.join(parts) @functools.lru_cache(1) def _find_windows_path(conda_executable: CondaExecutable) -> str: """Find the path to the conda executable on Windows.""" searched = [] conda_roots = [ r"%USERPROFILE%\Anaconda3", # https://stackoverflow.com/a/58211115 r"%USERPROFILE%\Miniconda3", # https://stackoverflow.com/a/76545804 r"C:\Anaconda3", # https://stackoverflow.com/a/44597801 r"C:\Miniconda3", # https://stackoverflow.com/a/53685910 r"C:\ProgramData\Anaconda3", # https://stackoverflow.com/a/58211115 r"C:\ProgramData\Miniconda3", # https://stackoverflow.com/a/51003321 ] if conda_executable == "mamba": conda_roots = [ r"C:\ProgramData\mambaforge", # https://github.com/mamba-org/mamba/issues/1756#issuecomment-1517284831 r"%USERPROFILE%\AppData\Local\mambaforge", # https://stackoverflow.com/a/75612393 # First try native mamba locations, then Conda locations (in # case `conda install mamba` was used) *conda_roots, ] if conda_executable == "micromamba": conda_roots = [ # Default installation directory based on the installation script # https://raw.githubusercontent.com/mamba-org/micromamba-releases/main/install.ps1 r"%LOCALAPPDATA%\micromamba", ] extensions = (".exe", "", ".bat") subs = ("condabin\\", "Scripts\\", "") # The "" is for micromamba for root, sub, ext, cap in itertools.product( conda_roots, subs, extensions, (True, False), ): # @sbalk reported that his `anaconda3` folder is lowercase path = rf"{_capitalize_dir(root, capitalize=cap)}\{sub}{conda_executable}{ext}" path = os.path.expandvars(path) searched.append(path) if os.path.exists(path): # noqa: PTH110 return path msg = f"Could not find {conda_executable}." searched_str = "\n👉 ".join(searched) msg = f"Could not find {conda_executable}. Searched in:\n👉 {searched_str}" raise FileNotFoundError(msg) def _conda_cli_command_json( conda_executable: CondaExecutable, *args: str, ) -> dict[str, list[str]]: """Run a conda command and return the JSON output.""" try: result = subprocess.run( [_maybe_exe(conda_executable), *args, "--json"], capture_output=True, text=True, check=True, ) return json.loads(result.stdout) except subprocess.CalledProcessError as e: # pragma: no cover print(f"Error occurred: {e}") raise except json.JSONDecodeError as e: # pragma: no cover print(f"Failed to parse JSON: {e}") raise @functools.lru_cache(maxsize=None) def _conda_env_list(conda_executable: CondaExecutable) -> list[str]: """Get a list of conda environments.""" return _conda_cli_command_json(conda_executable, "env", "list")["envs"] @functools.lru_cache(maxsize=None) def _conda_info(conda_executable: CondaExecutable) -> dict: return _conda_cli_command_json(conda_executable, "info") def _conda_root_prefix(conda_executable: CondaExecutable) -> Path: # pragma: no cover """Get the root prefix of the conda installation.""" if os.environ.get("MAMBA_ROOT_PREFIX"): return Path(os.environ["MAMBA_ROOT_PREFIX"]) if os.environ.get("CONDA_ROOT"): return Path(os.environ["CONDA_ROOT"]) info_dict = _conda_info(conda_executable) if conda_executable in ("conda", "mamba"): prefix = info_dict.get("root_prefix") or info_dict["conda_prefix"] else: assert conda_executable == "micromamba" prefix = info_dict["base environment"] return Path(prefix) def _conda_env_dirs( conda_executable: CondaExecutable, ) -> list[Path]: # pragma: no cover """Get a list of conda environment directories.""" info_dict = _conda_info(conda_executable) if conda_executable in ("conda", "mamba"): envs_dirs = info_dict["envs_dirs"] else: assert conda_executable == "micromamba" envs_dirs = info_dict["envs directories"] return [Path(d) for d in envs_dirs] def _conda_env_name_to_prefix( conda_executable: CondaExecutable, conda_env_name: str, *, raise_if_not_found: bool = True, ) -> Path | None: # pragma: no cover """Get the prefix of a conda environment.""" # Based on `conda.base.context.locate_prefix_by_name` # https://github.com/conda/conda/blob/72fe69dac8b2fef351c511c813493fef17f295e1/conda/base/context.py#L1976-L1977 root_prefix = _conda_root_prefix(conda_executable) if conda_env_name in ("base", "root"): return root_prefix for envs_dir in _conda_env_dirs(conda_executable): prefix = envs_dir / conda_env_name if prefix.exists(): return prefix if not raise_if_not_found: return None envs = _conda_env_list(conda_executable) envs_str = "\n👉 ".join(envs) msg = ( f"Could not find conda prefix with name `{conda_env_name}`." f" Available prefixes:\n👉 {envs_str}" ) raise ValueError(msg) def _maybe_create_conda_env_args( conda_executable: CondaExecutable, conda_env_name: str | None, conda_env_prefix: Path | None, ) -> list[str]: if not conda_env_name and not conda_env_prefix: return [] conda_env_args = [] if conda_env_name: conda_env_args = ["--name", conda_env_name] prefix = _conda_env_name_to_prefix( conda_executable, conda_env_name, raise_if_not_found=False, ) if prefix is None: _create_conda_environment(conda_executable, *conda_env_args) elif conda_env_prefix: conda_env_args = ["--prefix", str(conda_env_prefix)] if not conda_env_prefix.exists(): _create_conda_environment(conda_executable, *conda_env_args) return conda_env_args def _create_conda_environment( conda_executable: CondaExecutable, *args: str, ) -> None: # pragma: no cover """Create an empty conda environment.""" conda_command = [_maybe_exe(conda_executable), "create", "--yes", *args] print(f"📦 Creating empty conda environment with `{' '.join(conda_command)}`\n") subprocess.run(conda_command, check=True) def _python_executable( conda_executable: CondaExecutable | None, conda_env_name: str | None, conda_env_prefix: Path | None, ) -> str: """Get the Python executable to use for a conda environment.""" if conda_env_name is None and conda_env_prefix is None: return sys.executable if conda_env_name: assert conda_executable is not None conda_env_prefix = _conda_env_name_to_prefix(conda_executable, conda_env_name) assert conda_env_prefix is not None if platform.system() == "Windows": # pragma: no cover python_executable = conda_env_prefix / "python.exe" else: python_executable = conda_env_prefix / "bin" / "python" assert python_executable.exists() return str(python_executable) def _use_uv(no_uv: bool) -> bool: # noqa: FBT001 """Check if the user wants to use the `uv` package.""" if no_uv: return False return shutil.which("uv") is not None def _build_pip_index_arguments(pip_indices: Sequence[str]) -> list[str]: """Build pip/uv index arguments from pip_indices list. First index becomes --index-url (primary), remaining indices become --extra-index-url (supplementary). """ args = [] if pip_indices: # Expand environment variables in URLs expanded_indices = [] for index in pip_indices: expanded = os.path.expandvars(index) expanded_indices.append(expanded) # First index is primary args.extend(["--index-url", expanded_indices[0]]) # Additional indices are extra for index in expanded_indices[1:]: args.extend(["--extra-index-url", index]) return args def _pip_install_local( *folders: str | Path, editable: bool, dry_run: bool, python_executable: str, conda_run: list[str], no_uv: bool, pip_indices: Sequence[str] | None = None, flags: list[str] | None = None, ) -> None: # pragma: no cover index_args = _build_pip_index_arguments(pip_indices or []) if _use_uv(no_uv): pip_command = [ *conda_run, "uv", "pip", "install", "--python", python_executable, *index_args, ] else: pip_command = [ *conda_run, python_executable, "-m", "pip", "install", *index_args, ] if flags: pip_command.extend(flags) for folder in sorted(folders): if not os.path.isabs(folder): # noqa: PTH117 relative_prefix = ".\\" if os.name == "nt" else "./" folder = f"{relative_prefix}{folder}" # noqa: PLW2901 if ( editable and not str(folder).endswith(".whl") and not str(folder).endswith(".zip") ): pip_command.extend(["-e", str(folder)]) else: pip_command.append(str(folder)) print(f"📦 Installing project with `{' '.join(pip_command)}`\n") if not dry_run: subprocess.run(pip_command, check=True) def _install_command( # noqa: C901, PLR0912, PLR0915 *files: Path, conda_executable: CondaExecutable | None, conda_env_name: str | None, conda_env_prefix: Path | None, conda_lock_file: Path | None, dry_run: bool, editable: bool, skip_local: bool = False, skip_pip: bool = False, skip_conda: bool = False, no_dependencies: bool = False, ignore_pins: list[str] | None = None, overwrite_pins: list[str] | None = None, skip_dependencies: list[str] | None = None, no_uv: bool = True, verbose: bool = False, ) -> None: """Install the dependencies of a single `requirements.yaml` or `pyproject.toml` file.""" # noqa: E501 start_time = time.time() paths_with_extras = [parse_folder_or_filename(f) for f in files] requirements = parse_requirements( *[f.path for f in paths_with_extras], ignore_pins=ignore_pins, overwrite_pins=overwrite_pins, skip_dependencies=skip_dependencies, verbose=verbose, extras=[f.extras for f in paths_with_extras], ) platforms = [identify_current_platform()] env_entries = _flatten_selected_dependency_entries( requirements.dependency_entries, requirements.optional_dependency_entries, ) env_spec = create_conda_env_specification( env_entries, requirements.channels, requirements.pip_indices, platforms=platforms, ) if not conda_executable: # None or empty string conda_executable = _maybe_conda_executable() if conda_lock_file: # As late as possible to error out early in previous steps assert conda_executable is not None _create_env_from_lock( conda_lock_file, conda_executable, conda_env_name=conda_env_name, conda_env_prefix=conda_env_prefix, dry_run=dry_run, verbose=verbose, ) no_dependencies = True # Assume the lock file has all dependencies if no_dependencies: skip_pip = True skip_conda = True if env_spec.conda and not skip_conda: assert conda_executable is not None channel_args = ["--override-channels"] if env_spec.channels else [] for channel in env_spec.channels: channel_args.extend(["--channel", channel]) conda_env_args = _maybe_create_conda_env_args( conda_executable, conda_env_name, conda_env_prefix, ) conda_command = [ _maybe_exe(conda_executable), "install", "--yes", *channel_args, *conda_env_args, ] # When running the command in terminal, we need to wrap the pin in quotes # so what we print is what the user would type (copy-paste). to_print = [_format_inline_conda_package(pkg) for pkg in env_spec.conda] # type: ignore[arg-type] conda_command_str = " ".join((*conda_command, *to_print)) print(f"📦 Installing conda dependencies with `{conda_command_str}`\n") # type: ignore[arg-type] if not dry_run: # pragma: no cover subprocess.run((*conda_command, *env_spec.conda), check=True) # type: ignore[arg-type] python_executable = _python_executable( conda_executable, conda_env_name, conda_env_prefix, ) if env_spec.pip and not skip_pip: conda_run = _maybe_conda_run(conda_executable, conda_env_name, conda_env_prefix) index_args = _build_pip_index_arguments(env_spec.pip_indices) if _use_uv(no_uv): pip_command = [ *conda_run, "uv", "pip", "install", "--python", python_executable, *index_args, *env_spec.pip, ] else: pip_command = [ *conda_run, python_executable, "-m", "pip", "install", *index_args, *env_spec.pip, ] print(f"📦 Installing pip dependencies with `{' '.join(pip_command)}`\n") if not dry_run: # pragma: no cover subprocess.run(pip_command, check=True) installable = [] if not skip_local: for file in paths_with_extras: if is_pip_installable(file.path.parent): installable.append(file.path.parent) else: # pragma: no cover print( f"⚠️ Project {file.path.parent} is not pip installable. " "Could not find setup.py or [build-system] in pyproject.toml.", ) # Install local dependencies (if any) included via `local_dependencies:` local_dependencies = parse_local_dependencies( *[p.path_with_extras for p in paths_with_extras], check_pip_installable=True, verbose=verbose, ) names = {k.name: [dep.name for dep in v] for k, v in local_dependencies.items()} print(f"📝 Found local dependencies: {names}\n") installable_set = {p.resolve() for p in installable} for deps in local_dependencies.values(): for dep in deps: resolved_dep = dep.resolve() if resolved_dep in installable_set: continue installable_set.add(resolved_dep) installable.append(dep) if installable: pip_flags = ["--no-deps"] # we just ran pip/conda install, so skip if verbose: pip_flags.append("--verbose") conda_run = _maybe_conda_run( conda_executable, conda_env_name, conda_env_prefix, ) _pip_install_local( *sorted(installable), editable=editable, dry_run=dry_run, python_executable=python_executable, flags=pip_flags, no_uv=no_uv, pip_indices=env_spec.pip_indices, conda_run=conda_run, ) if not dry_run: # pragma: no cover total_time = time.time() - start_time msg = f"✅ All dependencies installed successfully in {total_time:.2f} seconds." print(msg) def _install_all_command( *, conda_executable: CondaExecutable | None, conda_env_name: str | None, conda_env_prefix: Path | None, conda_lock_file: Path | None, dry_run: bool, editable: bool, depth: int, directory: Path, skip_local: bool = False, skip_pip: bool = False, skip_conda: bool = False, no_dependencies: bool = False, ignore_pins: list[str] | None = None, overwrite_pins: list[str] | None = None, skip_dependencies: list[str] | None = None, no_uv: bool = True, verbose: bool = False, ) -> None: # pragma: no cover found_files = find_requirements_files( directory, depth, verbose=verbose, ) if not found_files: print(f"❌ No {_DEP_FILES} files found in {directory}") sys.exit(1) _install_command( *found_files, conda_executable=conda_executable, conda_env_name=conda_env_name, conda_env_prefix=conda_env_prefix, conda_lock_file=conda_lock_file, dry_run=dry_run, editable=editable, skip_local=skip_local, skip_pip=skip_pip, skip_conda=skip_conda, no_dependencies=no_dependencies, ignore_pins=ignore_pins, overwrite_pins=overwrite_pins, skip_dependencies=skip_dependencies, no_uv=no_uv, verbose=verbose, ) def _maybe_conda_run( conda_executable: CondaExecutable | None, conda_env_name: str | None, conda_env_prefix: Path | None, ) -> list[str]: if not conda_executable: # None or empty string return [] if conda_env_name is None and conda_env_prefix is None: if not os.getenv("CONDA_PREFIX") and not os.getenv("MAMBA_ROOT_PREFIX"): # Conda/mamba/micromamba might be installed but not in PATH return [] exe = Path(sys.executable) conda_prefix = exe.parent if os.name == "nt" else exe.parent.parent env_args = ["--prefix", str(conda_prefix)] elif conda_env_name: env_args = ["--name", conda_env_name] elif conda_env_prefix: env_args = ["--prefix", str(conda_env_prefix)] return [_maybe_exe(conda_executable), "run", *env_args] def _create_env_from_lock( # noqa: PLR0912 conda_lock_file: Path, conda_executable: CondaExecutable, conda_env_name: str | None, conda_env_prefix: Path | None, *, dry_run: bool, verbose: bool, ) -> None: if conda_env_name is None and conda_env_prefix is None: print( "❌ Please provide either `--conda-env-name` or" " `--conda-env-prefix` when using `--conda-lock-file`.", ) sys.exit(1) elif conda_env_name: env_args = ["--name", conda_env_name] elif conda_env_prefix: env_args = ["--prefix", str(conda_env_prefix)] if conda_executable == "micromamba": create_cmd = [ _maybe_exe(conda_executable), "create", "-f", str(conda_lock_file), "--yes", *env_args, ] if verbose: create_cmd.append("--verbose") else: # conda or mamba if not dry_run: _verify_conda_lock_installed() create_cmd = ["conda-lock", "install", *env_args] if conda_executable == "mamba": create_cmd.append("--mamba") elif conda_executable == "conda": create_cmd.extend(["--conda", "conda"]) create_cmd.append(str(conda_lock_file)) if verbose: create_cmd.append("--log-level=DEBUG") create_cmd_str = " ".join(map(str, create_cmd)) env_identifier = ( f"'{conda_env_name}'" if conda_env_name else f"at '{conda_env_prefix}'" ) print(f"📦 Creating conda environment {env_identifier} with `{create_cmd_str}`") if not dry_run: # pragma: no cover try: subprocess.run(create_cmd, check=True) if verbose: print(f"✅ Environment {env_identifier} created successfully.") except subprocess.CalledProcessError as e: print(f"❌ Failed to create environment: {e}") sys.exit(1) else: print("🏁 Dry run completed. No environment was created.") def _verify_conda_lock_installed() -> None: """Verify that conda-lock is installed and accessible.""" if shutil.which("conda-lock") is None: print( "❌ conda-lock is not installed or not found in PATH.\n" "Please install it with one of the following commands:\n" " pip install conda-lock\n" " conda install conda-lock -c conda-forge\n" " mamba install conda-lock -c conda-forge", ) sys.exit(1) try: # Check if conda-lock is working correctly subprocess.run( ["conda-lock", "--version"], # noqa: S607 check=True, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL, ) except subprocess.CalledProcessError: print( "❌ conda-lock is installed but not working correctly.\n" "Please try reinstalling it with one of the following commands:\n" " `pip install --force-reinstall conda-lock`\n" " `conda install --force-reinstall conda-lock -c conda-forge`\n" " `mamba install --force-reinstall conda-lock -c conda-forge`\n" " `pipx install --force-reinstall conda-lock`", ) sys.exit(1) # If we get here, conda-lock is installed and working def _merge_command( *, depth: int, directory: Path, files: list[Path] | None, name: str, output: Path | None, stdout: bool, selector: Literal["sel", "comment"], platforms: list[Platform], ignore_pins: list[str], skip_dependencies: list[str], overwrite_pins: list[str], verbose: bool, optional_dependencies: list[str] | None = None, all_optional_dependencies: bool = False, ) -> None: # pragma: no cover # When using stdout, suppress verbose output verbose = verbose and not stdout if output is None: output = Path("environment.yaml") if files: # ignores depth and directory! found_files = files else: found_files = find_requirements_files( directory, depth, verbose=verbose, ) if not found_files: print(f"❌ No {_DEP_FILES} files found in {directory}") sys.exit(1) extras = _merge_optional_dependency_extras( found_files=found_files, optional_dependencies=optional_dependencies or [], all_optional_dependencies=all_optional_dependencies, ) requirements = parse_requirements( *found_files, ignore_pins=ignore_pins, overwrite_pins=overwrite_pins, skip_dependencies=skip_dependencies, verbose=verbose, extras=extras, ) env_entries = _flatten_selected_dependency_entries( requirements.dependency_entries, requirements.optional_dependency_entries, ) platforms = resolve_platforms( requested_platforms=platforms, declared_platforms=requirements.platforms, selector_platforms=_collect_selected_conda_like_platforms(env_entries), ) env_spec = create_conda_env_specification( env_entries, requirements.channels, requirements.pip_indices, platforms, selector=selector, ) output_file = None if stdout else output write_conda_environment_file(env_spec, output_file, name, verbose=verbose) if output_file: found_files_str = ", ".join(f"`{f}`" for f in found_files) print( f"✅ Generated environment file at `{output_file}` from {found_files_str}", ) def _pixi_command( *, depth: int, directory: Path, files: list[Path] | None, name: str | None, output: Path | None, stdout: bool, channels: list[str] | None, platforms: list[Platform] | None, ignore_pins: list[str], skip_dependencies: list[str], overwrite_pins: list[str], verbose: bool, ) -> None: # pragma: no cover """Generate a pixi.toml file from requirements files.""" # When using stdout, suppress verbose output verbose = verbose and not stdout if output is None: output = Path("pixi.toml") if files: # ignores depth and directory! found_files = files else: found_files = find_requirements_files( directory, depth, verbose=verbose, ) if not found_files: print(f"❌ No {_DEP_FILES} files found in {directory}") sys.exit(1) output_file = None if stdout else output generate_pixi_toml( *found_files, project_name=name, channels=channels, platforms=platforms, output_file=output_file, verbose=verbose, ignore_pins=ignore_pins, skip_dependencies=skip_dependencies, overwrite_pins=overwrite_pins, ) if output_file: found_files_str = ", ".join(f"`{f}`" for f in found_files) print(f"✅ Generated `{output_file}` from {found_files_str}") print(" Run `pixi install` to install dependencies.") def _pip_compile_command( *, depth: int, directory: Path, platform: Platform, ignore_pins: list[str], skip_dependencies: list[str], overwrite_pins: list[str], verbose: bool, extra_flags: list[str], output_file: Path | None = None, ) -> None: if importlib.util.find_spec("piptools") is None: # pragma: no cover print( "❌ Could not import `pip-tools` module." " Please install it with `pip install pip-tools`.", ) sys.exit(1) found_files = find_requirements_files( directory, depth, verbose=verbose, ) requirements = parse_requirements( *found_files, ignore_pins=ignore_pins, overwrite_pins=overwrite_pins, skip_dependencies=skip_dependencies, verbose=verbose, ) pip_entries = _flatten_selected_dependency_entries( requirements.dependency_entries, requirements.optional_dependency_entries, ) python_deps = filter_python_dependencies(pip_entries, [platform]) requirements_in = directory / "requirements.in" with requirements_in.open("w") as f: f.write("\n".join(python_deps)) print("✅ Generated `requirements.in` file.") if extra_flags: assert extra_flags[0] == "--" extra_flags = extra_flags[1:] if verbose: print(f"📝 Extra flags for `pip-compile`: {extra_flags}") if output_file is None: output_file = directory / "requirements.txt" cmd = [ "pip-compile", "--output-file", str(output_file), *extra_flags, str(requirements_in), ] print(f"🔒 Locking dependencies with `{' '.join(cmd)}`\n") subprocess.run(cmd, check=True) if output_file.exists(): # pragma: no cover # might not exist in tests add_comment_to_file(output_file) print(f"✅ Generated `{output_file}`.") def _check_conda_prefix() -> None: # pragma: no cover """Check if sys.executable is in the $CONDA_PREFIX.""" if "CONDA_PREFIX" not in os.environ: return conda_prefix = os.environ["CONDA_PREFIX"] if sys.executable.startswith(str(conda_prefix)): return msg = ( "UniDep should be run from the current Conda environment for correct" " operation. However, it's currently running with the Python interpreter" f" at `{sys.executable}`, which is not in the active Conda environment" f" (`{conda_prefix}`). Please install and run UniDep in the current" " Conda environment to avoid any issues, or provide the `--conda-env-name`" " or `--conda-env-prefix` option to specify the Conda environment to use." ) warn(msg, stacklevel=2) sys.exit(1) def _print_versions() -> None: # pragma: no cover """Print version information.""" path = Path(__file__).parent txt = [ f"unidep version: {__version__}", f"unidep location: {path}", f"Python version: {sys.version}", f"Python executable: {sys.executable}", ] extra_packages = [ "rich_argparse", "rich", "conda_lock", "pydantic", "pip_tools", "conda_package_handling", "ruamel.yaml", "packaging", "tomli", ] for package in extra_packages: version = get_package_version(package) if version is not None: txt.append(f"{package} version: {version}") if importlib.util.find_spec("rich") is not None: _print_with_rich(txt) else: print("\n".join(txt)) def _print_with_rich(data: list) -> None: """Print data as a table using rich, if it's installed.""" from rich.console import Console from rich.table import Table console = Console() table = Table(show_header=False) table.add_column("Property", style="cyan") table.add_column("Value", style="magenta") for line in data: prop, value = line.split(":", 1) table.add_row(prop, value.strip()) console.print(table) def _pip_subcommand( *, file: list[Path], platforms: list[Platform], verbose: bool, ignore_pins: list[str] | None, skip_dependencies: list[str] | None, overwrite_pins: list[str] | None, separator: str, ) -> str: # pragma: no cover platforms = platforms or [identify_current_platform()] assert len(file) <= 1 path = file[0] if file else Path() deps = get_python_dependencies( path, platforms=platforms, verbose=verbose, ignore_pins=ignore_pins, skip_dependencies=skip_dependencies, overwrite_pins=overwrite_pins, include_local_dependencies=True, ) pip_dependencies = deps.dependencies for extra in parse_folder_or_filename(path).extras: pip_dependencies.extend(deps.extras[extra]) return escape_unicode(separator).join(pip_dependencies) def main() -> None: # noqa: PLR0912 """Main entry point for the command-line tool.""" args = _parse_args() if args.command == "merge": # pragma: no cover _merge_command( depth=args.depth, directory=args.directory, files=None, name=args.name, output=args.output, stdout=args.stdout, selector=args.selector, platforms=args.platform, optional_dependencies=args.optional_dependencies, all_optional_dependencies=args.all_optional_dependencies, ignore_pins=args.ignore_pin, skip_dependencies=args.skip_dependency, overwrite_pins=args.overwrite_pin, verbose=args.verbose, ) elif args.command == "pip": # pragma: no cover print( _pip_subcommand( file=args.file, platforms=args.platform, verbose=args.verbose, ignore_pins=args.ignore_pin, skip_dependencies=args.skip_dependency, overwrite_pins=args.overwrite_pin, separator=args.separator, ), ) elif args.command == "conda": # pragma: no cover platforms = args.platform or [identify_current_platform()] files = args.file or [Path()] requirements = parse_requirements( *files, ignore_pins=args.ignore_pin, skip_dependencies=args.skip_dependency, overwrite_pins=args.overwrite_pin, verbose=args.verbose, ) env_entries = _flatten_selected_dependency_entries( requirements.dependency_entries, requirements.optional_dependency_entries, ) env_spec = create_conda_env_specification( env_entries, requirements.channels, requirements.pip_indices, platforms=platforms, ) if any(parse_folder_or_filename(f).extras for f in files): msg = "🚧 The `conda` command currently does not support extras." print(msg) sys.exit(1) print(escape_unicode(args.separator).join(env_spec.conda)) # type: ignore[arg-type] elif args.command == "install": if args.conda_env_name is None and args.conda_env_prefix is None: _check_conda_prefix() _install_command( *(args.files or [Path()]), conda_executable=args.conda_executable, conda_env_name=args.conda_env_name, conda_env_prefix=args.conda_env_prefix, conda_lock_file=args.conda_lock_file, dry_run=args.dry_run, editable=args.editable, skip_local=args.skip_local, skip_pip=args.skip_pip, skip_conda=args.skip_conda, no_dependencies=args.no_dependencies, ignore_pins=args.ignore_pin, skip_dependencies=args.skip_dependency, overwrite_pins=args.overwrite_pin, no_uv=args.no_uv, verbose=args.verbose, ) elif args.command == "install-all": if args.conda_env_name is None and args.conda_env_prefix is None: _check_conda_prefix() _install_all_command( conda_executable=args.conda_executable, conda_env_name=args.conda_env_name, conda_env_prefix=args.conda_env_prefix, conda_lock_file=args.conda_lock_file, dry_run=args.dry_run, editable=args.editable, depth=args.depth, directory=args.directory, skip_local=args.skip_local, skip_pip=args.skip_pip, skip_conda=args.skip_conda, no_dependencies=args.no_dependencies, ignore_pins=args.ignore_pin, skip_dependencies=args.skip_dependency, overwrite_pins=args.overwrite_pin, no_uv=args.no_uv, verbose=args.verbose, ) elif args.command == "conda-lock": # pragma: no cover conda_lock_command( depth=args.depth, directory=args.directory, files=args.file or None, platforms=args.platform, verbose=args.verbose, only_global=args.only_global, ignore_pins=args.ignore_pin, skip_dependencies=args.skip_dependency, overwrite_pins=args.overwrite_pin, check_input_hash=args.check_input_hash, extra_flags=args.extra_flags, lockfile=args.lockfile, ) elif args.command == "pixi": # pragma: no cover _pixi_command( depth=args.depth, directory=args.directory, files=args.file or None, name=args.name, output=args.output, stdout=args.stdout, channels=args.channel or None, platforms=args.platform or None, ignore_pins=args.ignore_pin, skip_dependencies=args.skip_dependency, overwrite_pins=args.overwrite_pin, verbose=args.verbose, ) elif args.command == "pip-compile": # pragma: no cover if args.platform and len(args.platform) > 1: print( "❌ The `pip-compile` command does not support multiple platforms.", ) sys.exit(1) platform = args.platform[0] if args.platform else identify_current_platform() _pip_compile_command( depth=args.depth, directory=args.directory, platform=platform, verbose=args.verbose, ignore_pins=args.ignore_pin, skip_dependencies=args.skip_dependency, overwrite_pins=args.overwrite_pin, extra_flags=args.extra_flags, output_file=args.output_file, ) elif args.command == "version": # pragma: no cover _print_versions() ================================================ FILE: unidep/_conda_env.py ================================================ """unidep - Unified Conda and Pip requirements management. Conda environment file generation functions. """ from __future__ import annotations import sys from collections import defaultdict from copy import deepcopy from typing import TYPE_CHECKING, NamedTuple, cast from ruamel.yaml import YAML from ruamel.yaml.comments import CommentedMap, CommentedSeq from unidep._conflicts import ( VersionConflictError, _maybe_new_spec_with_combined_pinnings, ) from unidep._dependency_selection import ( collapse_selected_universals, select_conda_like_requirements, ) from unidep.platform_definitions import ( PLATFORM_SELECTOR_MAP, CondaPlatform, Platform, Spec, ) from unidep.utils import ( add_comment_to_file, build_pep508_environment_marker, ) if TYPE_CHECKING: from collections.abc import Sequence from pathlib import Path from unidep._dependencies_parsing import DependencyEntry if sys.version_info >= (3, 8): from typing import Literal, get_args else: # pragma: no cover from typing_extensions import Literal, get_args class CondaEnvironmentSpec(NamedTuple): """A conda environment.""" channels: list[str] platforms: list[Platform] conda: list[str | dict[str, str]] # actually a CommentedSeq[str | dict[str, str]] pip: list[str] pip_indices: Sequence[str] = () def _conda_sel(sel: str) -> CondaPlatform: """Return the allowed `sel(platform)` string.""" _platform = sel.split("-", 1)[0] assert _platform in get_args(CondaPlatform), f"Invalid platform: {_platform}" return cast("CondaPlatform", _platform) def _as_dependency_entries( entries: Sequence[DependencyEntry], ) -> list[DependencyEntry]: if isinstance(entries, dict): msg = ( "`create_conda_env_specification()` now requires dependency entries from " "`parse_requirements(...).dependency_entries`, not the output of " "`resolve_conflicts()`." ) raise TypeError(msg) return list(entries) def _normalize_pip_indices( pip_indices: Sequence[str] | None, ) -> tuple[str, ...]: if pip_indices is None: return () if isinstance(pip_indices, str): return (pip_indices,) return tuple(pip_indices) def _extract_conda_pip_dependencies( entries: list[DependencyEntry], platforms: list[Platform], ) -> tuple[ dict[str, dict[Platform | None, Spec]], dict[str, dict[Platform | None, Spec]], ]: """Extract dependencies using the shared conda-like selector.""" conda: dict[str, dict[Platform | None, Spec]] = {} pip: dict[str, dict[Platform | None, Spec]] = {} selected = collapse_selected_universals( select_conda_like_requirements(entries, platforms), platforms, ) for _platform, candidates in selected.items(): for candidate in candidates: if candidate.source == "conda": conda.setdefault(candidate.spec.name, {})[_platform] = candidate.spec else: pip.setdefault(candidate.spec.name, {})[_platform] = candidate.spec return conda, pip def _ensure_sel_representable( platform_to_spec: dict[Platform | None, Spec], ) -> None: """Ensure selected specs can be represented with `sel(...)` selectors.""" grouped: dict[CondaPlatform, list[tuple[Platform, Spec]]] = defaultdict(list) for _platform, spec in sorted(platform_to_spec.items()): assert _platform is not None grouped[_conda_sel(_platform)].append((_platform, spec)) for conda_platform, platform_specs in grouped.items(): keep_platform = platform_specs[0][0] unique_specs = list(dict.fromkeys(spec for _, spec in platform_specs)) if len(unique_specs) > 1: try: merged_spec = _maybe_new_spec_with_combined_pinnings(unique_specs) except VersionConflictError: msg = ( "Selected dependencies cannot be represented with `sel(...)` " f"for '{conda_platform}'. Use selector='comment' instead." ) raise ValueError(msg) from None else: merged_spec = unique_specs[0] for _platform, _spec in platform_specs: if _platform != keep_platform: platform_to_spec.pop(_platform, None) platform_to_spec[keep_platform] = merged_spec def _add_comment(commment_seq: CommentedSeq, platform: Platform) -> None: comment = f"# [{PLATFORM_SELECTOR_MAP[platform][0]}]" commment_seq.yaml_add_eol_comment(comment, len(commment_seq) - 1) _LEGACY_SELECTOR_ARG_COUNT = 2 _LEGACY_FULL_ARG_COUNT = 3 def create_conda_env_specification( # noqa: C901, PLR0912, PLR0915 entries: Sequence[DependencyEntry], channels: list[str], *args: object, platforms: Sequence[Platform] | None = None, selector: Literal["sel", "comment"] = "sel", pip_indices: Sequence[str] | None = None, ) -> CondaEnvironmentSpec: """Create a conda environment specification from dependency entries. Preferred calling convention: `create_conda_env_specification(entries, channels, platforms, pip_indices=...)` For compatibility, the older positional style used during the original `pip_indices` branch development is also accepted: `create_conda_env_specification(entries, channels, pip_indices, platforms)` """ if platforms is not None: if len(args) > 1: msg = ( "Too many positional arguments for `create_conda_env_specification()`." ) raise TypeError(msg) if args: if pip_indices is not None: msg = "`pip_indices` was provided both positionally and by keyword." raise TypeError(msg) pip_indices = cast("Sequence[str]", args[0]) resolved_platforms = list(platforms) else: if not args: msg = "Missing required `platforms` argument." raise TypeError(msg) if len(args) == 1: resolved_platforms = list(cast("Sequence[Platform]", args[0])) elif len(args) == _LEGACY_SELECTOR_ARG_COUNT: if args[1] in ("sel", "comment"): resolved_platforms = list(cast("Sequence[Platform]", args[0])) selector = cast("Literal['sel', 'comment']", args[1]) else: if pip_indices is not None: msg = "`pip_indices` was provided both positionally and by keyword." raise TypeError(msg) pip_indices = cast("Sequence[str]", args[0]) resolved_platforms = list(cast("Sequence[Platform]", args[1])) elif len(args) == _LEGACY_FULL_ARG_COUNT: if pip_indices is not None: msg = "`pip_indices` was provided both positionally and by keyword." raise TypeError(msg) pip_indices = cast("Sequence[str]", args[0]) resolved_platforms = list(cast("Sequence[Platform]", args[1])) selector = cast("Literal['sel', 'comment']", args[2]) else: msg = ( "Too many positional arguments for `create_conda_env_specification()`." ) raise TypeError(msg) if selector not in ("sel", "comment"): # pragma: no cover msg = f"Invalid selector: {selector}, must be one of ['sel', 'comment']" raise ValueError(msg) entries = _as_dependency_entries(entries) conda, pip = _extract_conda_pip_dependencies(entries, resolved_platforms) normalized_pip_indices = _normalize_pip_indices(pip_indices) conda_deps: list[str | dict[str, str]] = CommentedSeq() pip_deps: list[str] = CommentedSeq() for platform_to_spec in conda.values(): if len(platform_to_spec) > 1 and selector == "sel": _ensure_sel_representable(platform_to_spec) for _platform, spec in sorted(platform_to_spec.items()): dep_str = spec.name_with_pin() if len(resolved_platforms) != 1 and _platform is not None: if selector == "sel": sel = _conda_sel(_platform) dep_str = {f"sel({sel})": dep_str} # type: ignore[assignment] conda_deps.append(dep_str) if selector == "comment": _add_comment(conda_deps, _platform) else: conda_deps.append(dep_str) for platform_to_spec in pip.values(): spec_to_platforms: dict[Spec, list[Platform | None]] = {} for _platform, spec in platform_to_spec.items(): spec_to_platforms.setdefault(spec, []).append(_platform) for spec, _platforms in spec_to_platforms.items(): dep_str = spec.name_with_pin(is_pip=True) if _platforms != [None] and len(resolved_platforms) != 1: if selector == "sel": marker = build_pep508_environment_marker(_platforms) # type: ignore[arg-type] dep_str = f"{dep_str}; {marker}" pip_deps.append(dep_str) else: assert selector == "comment" # We can only add comments with a single platform because # `conda-lock` doesn't implement logic, e.g., [linux or win] # should be spread into two lines, one with [linux] and the # other with [win]. for _platform in _platforms: pip_deps.append(dep_str) _add_comment(pip_deps, cast("Platform", _platform)) else: pip_deps.append(dep_str) return CondaEnvironmentSpec( channels, resolved_platforms, conda_deps, pip_deps, normalized_pip_indices, ) def write_conda_environment_file( env_spec: CondaEnvironmentSpec, output_file: str | Path | None = "environment.yaml", name: str = "myenv", *, verbose: bool = False, ) -> None: """Generate a conda environment.yaml file or print to stdout.""" resolved_dependencies = deepcopy(env_spec.conda) if env_spec.pip: resolved_dependencies.append({"pip": env_spec.pip}) # type: ignore[arg-type, dict-item] env_data = CommentedMap({"name": name}) if env_spec.channels: env_data["channels"] = env_spec.channels if env_spec.pip_indices: env_data["pip-repositories"] = list(env_spec.pip_indices) if resolved_dependencies: env_data["dependencies"] = resolved_dependencies if env_spec.platforms: env_data["platforms"] = env_spec.platforms yaml = YAML(typ="rt") yaml.default_flow_style = False yaml.width = 4096 yaml.indent(mapping=2, sequence=2, offset=2) if output_file: if verbose: print(f"📝 Generating environment file at `{output_file}`") with open(output_file, "w") as f: # noqa: PTH123 yaml.dump(env_data, f) if verbose: print("📝 Environment file generated successfully.") add_comment_to_file(output_file) else: yaml.dump(env_data, sys.stdout) ================================================ FILE: unidep/_conda_lock.py ================================================ """unidep - Unified Conda and Pip requirements management. This module provides the `unidep conda-lock` CLI command, used in `unidep._cli`. """ from __future__ import annotations import shutil import subprocess import sys import tempfile import urllib.request from collections import defaultdict from functools import partial from pathlib import Path from typing import TYPE_CHECKING, Any, NamedTuple from packaging.utils import canonicalize_name from ruamel.yaml import YAML from unidep._dependencies_parsing import find_requirements_files, parse_requirements from unidep._dependency_selection import ( collapse_selected_universals, select_conda_like_requirements, ) from unidep.utils import ( add_comment_to_file, remove_top_comments, warn, ) if TYPE_CHECKING: from unidep.platform_definitions import CondaPip, Platform if sys.version_info >= (3, 8): from typing import Literal else: # pragma: no cover from typing_extensions import Literal def _run_conda_lock( tmp_env: Path, conda_lock_output: Path, *, check_input_hash: bool = False, extra_flags: list[str], ) -> None: # pragma: no cover if shutil.which("conda-lock") is None: msg = ( "Cannot find `conda-lock`." " Please install it with `pip install conda-lock`, or" " `pipx install conda-lock`, or" " `conda install -c conda-forge conda-lock`." ) raise RuntimeError(msg) if not check_input_hash and conda_lock_output.exists(): print(f"🗑️ Removing existing `{conda_lock_output}`") conda_lock_output.unlink() cmd = [ "conda-lock", "lock", "--file", str(tmp_env), "--lockfile", str(conda_lock_output), *extra_flags, ] if check_input_hash: cmd.append("--check-input-hash") print(f"🔒 Locking dependencies with `{' '.join(cmd)}`\n") try: subprocess.run(cmd, check=True, text=True, capture_output=True) remove_top_comments(conda_lock_output) add_comment_to_file( conda_lock_output, extra_lines=[ "#", "# This environment can be installed with", "# `micromamba create -f conda-lock.yml -n myenv`", "# This file is a `conda-lock` file generated via `unidep`.", "# For details see https://conda.github.io/conda-lock/", ], ) except subprocess.CalledProcessError as e: print("❌ Error occurred:\n", e) print("Return code:", e.returncode) print("Output:", e.output) print("Error Output:", e.stderr) sys.exit(1) def _conda_lock_global( *, depth: int, directory: Path, files: list[Path] | None, platforms: list[Platform], verbose: bool, check_input_hash: bool, ignore_pins: list[str], skip_dependencies: list[str], overwrite_pins: list[str], extra_flags: list[str], lockfile: str, ) -> Path: """Generate a conda-lock file for the global dependencies.""" from unidep._cli import _merge_command if files: directory = files[0].parent tmp_env = directory / "tmp.environment.yaml" conda_lock_output = directory / lockfile _merge_command( depth=depth, directory=directory, files=files, name="myenv", output=tmp_env, stdout=False, selector="comment", platforms=platforms, optional_dependencies=[], all_optional_dependencies=False, ignore_pins=ignore_pins, overwrite_pins=overwrite_pins, skip_dependencies=skip_dependencies, verbose=verbose, ) _run_conda_lock( tmp_env, conda_lock_output, check_input_hash=check_input_hash, extra_flags=extra_flags, ) print(f"✅ Global dependencies locked successfully in `{conda_lock_output}`.") return conda_lock_output class LockSpec(NamedTuple): """A specification of the lock file.""" packages: dict[tuple[CondaPip, Platform, str], dict[str, Any]] dependencies: dict[tuple[CondaPip, Platform, str], set[str]] def _parse_conda_lock_packages( conda_lock_packages: list[dict[str, Any]], ) -> LockSpec: deps: dict[CondaPip, dict[Platform, dict[str, set[str]]]] = defaultdict( lambda: defaultdict(lambda: defaultdict(set)), ) def _recurse( package_name: str, resolved: dict[str, set[str]], dependencies: dict[str, set[str]], seen: set[str], ) -> set[str]: if package_name in resolved: return resolved[package_name] if package_name in seen: # Circular dependency detected return set() seen.add(package_name) all_deps = set(dependencies[package_name]) for dep in dependencies[package_name]: all_deps.update(_recurse(dep, resolved, dependencies, seen)) resolved[package_name] = all_deps seen.remove(package_name) return all_deps for p in conda_lock_packages: deps[p["manager"]][p["platform"]][p["name"]].update(p["dependencies"]) resolved: dict[CondaPip, dict[Platform, dict[str, set[str]]]] = {} for manager, platforms in deps.items(): resolved_manager = resolved.setdefault(manager, {}) for _platform, pkgs in platforms.items(): _resolved: dict[str, set[str]] = {} for package in list(pkgs): _recurse(package, _resolved, pkgs, set()) resolved_manager[_platform] = _resolved packages: dict[tuple[CondaPip, Platform, str], dict[str, Any]] = {} for p in conda_lock_packages: key = (p["manager"], p["platform"], p["name"]) assert key not in packages packages[key] = p # Flatten the `dependencies` dict to same format as `packages` dependencies = { (which, platform, name): deps for which, platforms in resolved.items() for platform, pkgs in platforms.items() for name, deps in pkgs.items() } return LockSpec(packages, dependencies) def _add_package_to_lock( *, name: str, which: CondaPip, platform: Platform, packages: dict[tuple[CondaPip, Platform, str], dict[str, Any]], locked: list[dict[str, Any]], locked_keys: set[tuple[CondaPip, Platform, str]], ) -> tuple[CondaPip, Platform, str] | None: key = _find_lock_key( name=name, which=which, platform=platform, packages=packages, ) if key is None: return None if key not in locked_keys: locked.append(packages[key]) locked_keys.add(key) # Add identifier to the set return key def _strip_pip_extras(name: str) -> str: if not name.endswith("]") or "[" not in name: return name return name.split("[", 1)[0] def _find_lock_key( *, name: str, which: CondaPip, platform: Platform, packages: dict[tuple[CondaPip, Platform, str], dict[str, Any]], ) -> tuple[CondaPip, Platform, str] | None: key = (which, platform, name) if key in packages: return key if which != "pip": return None normalized_name = canonicalize_name(_strip_pip_extras(name)) for _which, _platform, _name in packages: if _which != which or _platform != platform: continue if canonicalize_name(_strip_pip_extras(_name)) == normalized_name: return (_which, _platform, _name) return None def _add_package_with_dependencies_to_lock( *, name: str, which: CondaPip, platform: Platform, lock_spec: LockSpec, locked: list[dict[str, Any]], locked_keys: set[tuple[CondaPip, Platform, str]], missing_keys: set[tuple[CondaPip, Platform, str]], ) -> None: found_key = _find_lock_key( name=name, which=which, platform=platform, packages=lock_spec.packages, ) if found_key is None: missing_keys.add((which, platform, name)) return _add_package_to_lock( name=found_key[2], which=found_key[0], platform=found_key[1], packages=lock_spec.packages, locked=locked, locked_keys=locked_keys, ) for dep in lock_spec.dependencies.get(found_key, set()): if dep.startswith("__"): # pragma: no cover continue # Skip meta packages dep_key = _add_package_to_lock( name=dep, which=which, platform=platform, packages=lock_spec.packages, locked=locked, locked_keys=locked_keys, ) if dep_key is None: missing_keys.add((which, platform, dep)) def _handle_missing_keys( lock_spec: LockSpec, locked_keys: set[tuple[CondaPip, Platform, str]], missing_keys: set[tuple[CondaPip, Platform, str]], locked: list[dict[str, Any]], ) -> None: add_pkg = partial( _add_package_with_dependencies_to_lock, lock_spec=lock_spec, locked=locked, locked_keys=locked_keys, missing_keys=missing_keys, ) # Do not re-add packages that with pip that are # already added with conda for which, _platform, name in locked_keys: if which == "conda": key = ("pip", _platform, name) missing_keys.discard(key) # type: ignore[arg-type] # Add missing pip packages using conda (if possible) for which, _platform, name in list(missing_keys): if which == "pip": missing_keys.discard((which, _platform, name)) add_pkg(name=name, which="conda", platform=_platform) if ("conda", _platform, name) in missing_keys: # If the package wasn't added, restore the missing key missing_keys.discard(("conda", _platform, name)) missing_keys.add(("pip", _platform, name)) if not missing_keys: return # Finally there might be some pip packages that are missing # because in the lock file they are installed with conda, however, # on Conda the name might be different than on PyPI. For example, # `msgpack` (pip) and `msgpack-python` (conda). options = { (which, platform, name): pkg for which, platform, name in missing_keys for (_which, _platform, _name), pkg in lock_spec.packages.items() if which == "pip" and _which == "conda" and platform == _platform and name in _name } for (which, _platform, name), pkg in options.items(): names = _download_and_get_package_names(pkg) if names is None: continue if name in names: add_pkg(name=pkg["name"], which=pkg["manager"], platform=pkg["platform"]) missing_keys.discard((which, _platform, name)) if missing_keys: print(f"❌ Missing keys {missing_keys}") def _conda_lock_subpackage( *, file: Path, lock_spec: LockSpec, channels: list[str], platforms: list[Platform], yaml: YAML | None, # Passing this to preserve order! ) -> Path: requirements = parse_requirements(file) locked: list[dict[str, Any]] = [] locked_keys: set[tuple[CondaPip, Platform, str]] = set() missing_keys: set[tuple[CondaPip, Platform, str]] = set() add_pkg = partial( _add_package_with_dependencies_to_lock, lock_spec=lock_spec, locked=locked, locked_keys=locked_keys, missing_keys=missing_keys, ) selected = collapse_selected_universals( select_conda_like_requirements(requirements.dependency_entries, platforms), platforms, ) for target_platform, candidates in selected.items(): candidate_platforms = ( platforms if target_platform is None else [target_platform] ) for candidate in candidates: if candidate.spec.name.startswith("__"): # pragma: no cover continue for candidate_platform in candidate_platforms: add_pkg( name=candidate.spec.name, which=candidate.source, platform=candidate_platform, ) _handle_missing_keys( lock_spec=lock_spec, locked_keys=locked_keys, missing_keys=missing_keys, locked=locked, ) # Sort locked packages by manager, name, platform locked = sorted(locked, key=lambda p: (p["manager"], p["name"], p["platform"])) # Sort dependencies within each package for package in locked: deps = package["dependencies"] if deps: package["dependencies"] = dict(sorted(deps.items())) if yaml is None: # pragma: no cover # When passing the same YAML instance that is used to load the file, # we preserve the order of the keys. yaml = YAML(typ="rt") yaml.default_flow_style = False yaml.width = 4096 yaml.representer.ignore_aliases = lambda *_: True # Disable anchors conda_lock_output = file.parent / "conda-lock.yml" metadata = { "content_hash": {p: "unidep-is-awesome" for p in platforms}, "channels": [{"url": c, "used_env_vars": []} for c in channels], "platforms": platforms, "sources": [str(file)], } with conda_lock_output.open("w") as fp: yaml.dump({"version": 1, "metadata": metadata, "package": locked}, fp) add_comment_to_file( conda_lock_output, extra_lines=[ "#", "# This environment can be installed with", "# `micromamba create -f conda-lock.yml -n myenv`", "# This file is a `conda-lock` file generated via `unidep`.", "# For details see https://conda.github.io/conda-lock/", ], ) return conda_lock_output def _download_and_get_package_names( package: dict[str, Any], component: Literal["info", "pkg"] | None = None, ) -> list[str] | None: try: import conda_package_handling.api except ImportError: # pragma: no cover print( "❌ Could not import `conda-package-handling` module." " Please install it with `pip install conda-package-handling`.", ) sys.exit(1) url = package["url"] with tempfile.TemporaryDirectory() as temp_dir: temp_path = Path(temp_dir) file_path = temp_path / Path(url).name urllib.request.urlretrieve(url, str(file_path)) # noqa: S310 conda_package_handling.api.extract( str(file_path), dest_dir=str(temp_path), components=component, ) if (temp_path / "site-packages").exists(): site_packages_path = temp_path / "site-packages" elif (temp_path / "lib").exists(): lib_path = temp_path / "lib" python_dirs = [ d for d in lib_path.iterdir() if d.is_dir() and d.name.startswith("python") ] if not python_dirs: return None site_packages_path = python_dirs[0] / "site-packages" else: return None if not site_packages_path.exists(): return None return [ d.name for d in site_packages_path.iterdir() if d.is_dir() and not d.name.endswith((".dist-info", ".egg-info")) ] def _conda_lock_subpackages( directory: Path, depth: int, conda_lock_file: str | Path, ) -> list[Path]: conda_lock_file = Path(conda_lock_file) with YAML(typ="rt") as yaml, conda_lock_file.open() as fp: data = yaml.load(fp) channels = [c["url"] for c in data["metadata"]["channels"]] platforms = data["metadata"]["platforms"] lock_spec = _parse_conda_lock_packages(data["package"]) lock_files: list[Path] = [] # Assumes that different platforms have the same versions found_files = find_requirements_files(directory, depth) for file in found_files: if file.parent == directory: # This is a `requirements.yaml` file in the root directory # for e.g., common packages, so skip it. continue sublock_file = _conda_lock_subpackage( file=file, lock_spec=lock_spec, channels=channels, platforms=platforms, yaml=yaml, ) print(f"📝 Generated lock file for `{file}`: `{sublock_file}`") lock_files.append(sublock_file) return lock_files def conda_lock_command( *, depth: int, directory: Path, files: list[Path] | None, platforms: list[Platform], verbose: bool, only_global: bool, check_input_hash: bool, ignore_pins: list[str], skip_dependencies: list[str], overwrite_pins: list[str], extra_flags: list[str], lockfile: str = "conda-lock.yml", ) -> None: """Generate a conda-lock file a collection of `requirements.yaml` and/or `pyproject.toml` files.""" # noqa: E501 if extra_flags: assert extra_flags[0] == "--" extra_flags = extra_flags[1:] if verbose: print(f"📝 Extra flags for `conda-lock lock`: {extra_flags}") conda_lock_output = _conda_lock_global( depth=depth, directory=directory, files=files, platforms=platforms, verbose=verbose, check_input_hash=check_input_hash, ignore_pins=ignore_pins, overwrite_pins=overwrite_pins, skip_dependencies=skip_dependencies, extra_flags=extra_flags, lockfile=lockfile, ) if only_global or files: return sub_lock_files = _conda_lock_subpackages( directory=directory, depth=depth, conda_lock_file=conda_lock_output, ) mismatches = _check_consistent_lock_files( global_lock_file=conda_lock_output, sub_lock_files=sub_lock_files, ) if not mismatches: print("✅ Analyzed all lock files and found no inconsistencies.") elif len(mismatches) > 1: # pragma: no cover print("❌ Complete table of package version mismatches:") _mismatch_report(mismatches, raises=False) class Mismatch(NamedTuple): """A mismatch between a global and subpackage lock file.""" name: str version: str version_global: str platform: Platform lock_file: Path which: CondaPip def _check_consistent_lock_files( global_lock_file: Path, sub_lock_files: list[Path], ) -> list[Mismatch]: yaml = YAML(typ="safe") with global_lock_file.open() as fp: global_data = yaml.load(fp) global_packages: dict[str, dict[Platform, dict[CondaPip, str]]] = defaultdict( lambda: defaultdict(dict), ) for p in global_data["package"]: global_packages[p["name"]][p["platform"]][p["manager"]] = p["version"] mismatched_packages = [] for lock_file in sub_lock_files: with lock_file.open() as fp: data = yaml.load(fp) for p in data["package"]: name = p["name"] platform = p["platform"] version = p["version"] which = p["manager"] if global_packages.get(name, {}).get(platform, {}).get(which) == version: continue global_version = global_packages[name][platform][which] if global_version != version: mismatched_packages.append( Mismatch( name=name, version=version, version_global=global_version, platform=platform, lock_file=lock_file, which=which, ), ) return mismatched_packages def _format_table_row( row: list[str], widths: list[int], seperator: str = " | ", ) -> str: # pragma: no cover """Format a row of the table with specified column widths.""" return seperator.join(f"{cell:<{widths[i]}}" for i, cell in enumerate(row)) def _mismatch_report( mismatched_packages: list[Mismatch], *, raises: bool = False, ) -> None: # pragma: no cover if not mismatched_packages: return headers = [ "Subpackage", "Manager", "Package", "Version (Sub)", "Version (Global)", "Platform", ] def _to_seq(m: Mismatch) -> list[str]: return [ m.lock_file.parent.name, m.which, m.name, m.version, m.version_global, str(m.platform), ] column_widths = [len(header) for header in headers] for m in mismatched_packages: attrs = _to_seq(m) for i, attr in enumerate(attrs): column_widths[i] = max(column_widths[i], len(attr)) # Create the table rows separator_line = [w * "-" for w in column_widths] table_rows = [ _format_table_row(separator_line, column_widths, seperator="-+-"), _format_table_row(headers, column_widths), _format_table_row(["-" * width for width in column_widths], column_widths), ] for m in mismatched_packages: row = _to_seq(m) table_rows.append(_format_table_row(row, column_widths)) table_rows.append(_format_table_row(separator_line, column_widths, seperator="-+-")) table = "\n".join(table_rows) full_error_message = ( "Version mismatches found between global and subpackage lock files:\n" + table + "\n\n‼️ You might want to pin some versions stricter" " in your `requirements.yaml` and/or `pyproject.toml` files." ) if raises: raise RuntimeError(full_error_message) warn(full_error_message, stacklevel=2) ================================================ FILE: unidep/_conflicts.py ================================================ """unidep - Unified Conda and Pip requirements management. Verion conflict detections and resolution. """ from __future__ import annotations import sys from collections import defaultdict from typing import TYPE_CHECKING from packaging import version from unidep.platform_definitions import Platform, Spec from unidep.utils import defaultdict_to_dict if sys.version_info >= (3, 8): from typing import get_args else: # pragma: no cover from typing_extensions import get_args if TYPE_CHECKING: from unidep.platform_definitions import CondaPip # Full PEP 440 + conda operator set, ordered longest-prefix-first for matching ALL_VERSION_OPERATORS: tuple[str, ...] = ( "===", "==", "~=", ">=", "<=", "!=", ">", "<", "=", ) # Subset used for conflict resolution (excludes PEP 440-only operators) VALID_OPERATORS = [op for op in ALL_VERSION_OPERATORS if op not in ("===", "==", "~=")] _REPO_URL = "https://github.com/basnijholt/unidep" def extract_version_operator(constraint: str) -> str: """Extract the version operator prefix from a constraint string. Returns the matched operator or "" if none matches. This is a pure extraction helper — it does not validate. """ constraint = constraint.strip() return next( (op for op in ALL_VERSION_OPERATORS if constraint.startswith(op)), "", ) def _prepare_specs_for_conflict_resolution( requirements: dict[str, list[Spec]], ) -> dict[str, dict[Platform | None, dict[CondaPip, list[Spec]]]]: """Prepare and group metadata for conflict resolution. This function groups metadata by platform and source for each package. :param requirements: Dictionary mapping package names to a list of Spec objects. :return: Dictionary mapping package names to grouped metadata. """ prepared_data = {} for package, spec_list in requirements.items(): grouped_specs: dict[Platform | None, dict[CondaPip, list[Spec]]] = defaultdict( lambda: defaultdict(list), ) for spec in spec_list: _platforms = spec.platforms() if _platforms is None: _platforms = [None] # type: ignore[list-item] for _platform in _platforms: grouped_specs[_platform][spec.which].append(spec) prepared_data[package] = grouped_specs return defaultdict_to_dict(prepared_data) def _pop_unused_platforms_and_maybe_expand_none( platform_data: dict[Platform | None, dict[CondaPip, list[Spec]]], platforms: list[Platform] | None, ) -> None: """Expand `None` to all platforms if there is a platform besides None.""" allowed_platforms = get_args(Platform) if platforms: allowed_platforms = platforms # type: ignore[assignment] # If there is a platform besides None, expand None to all platforms if len(platform_data) > 1 and None in platform_data: sources = platform_data.pop(None) for _platform in allowed_platforms: for which, specs in sources.items(): platform_data.setdefault(_platform, {}).setdefault(which, []).extend( specs, ) # Remove platforms that are not allowed to_pop = platform_data.keys() - allowed_platforms to_pop.discard(None) for _platform in to_pop: platform_data.pop(_platform) def _maybe_new_spec_with_combined_pinnings( specs: list[Spec], ) -> Spec: pinned_specs = [m for m in specs if m.pin is not None] if len(pinned_specs) == 1: return pinned_specs[0] if len(pinned_specs) > 1: first = pinned_specs[0] pins = [m.pin for m in pinned_specs] pin = combine_version_pinnings(pins, name=first.name) # type: ignore[arg-type] return Spec( name=first.name, which=first.which, pin=pin, identifier=first.identifier, # should I create a new one? ) # Flatten the list return specs[0] def _combine_pinning_within_platform( data: dict[Platform | None, dict[CondaPip, list[Spec]]], ) -> dict[Platform | None, dict[CondaPip, Spec]]: reduced_data: dict[Platform | None, dict[CondaPip, Spec]] = {} for _platform, packages in data.items(): reduced_data[_platform] = {} for which, specs in packages.items(): spec = _maybe_new_spec_with_combined_pinnings(specs) reduced_data[_platform][which] = spec return reduced_data class VersionConflictError(ValueError): """Raised when a version conflict is detected.""" def _add_optional_dependencies( requirements: dict[str, list[Spec]], optional_dependencies: dict[str, dict[str, list[Spec]]] | None, ) -> None: """Add optional dependencies to the requirements dictionary.""" if optional_dependencies is None: return for dependencies in optional_dependencies.values(): for pkg, specs in dependencies.items(): requirements.setdefault(pkg, []).extend(specs) def resolve_conflicts( requirements: dict[str, list[Spec]], platforms: list[Platform] | None = None, optional_dependencies: dict[str, dict[str, list[Spec]]] | None = None, ) -> dict[str, dict[Platform | None, dict[CondaPip, Spec]]]: """Resolve conflicts in a dict-based requirements model. This helper consolidates within-source duplicates on ``ParsedRequirements.requirements`` and preserves conda/pip alternatives in the returned metadata. CLI-facing renderers instead consume ``parse_requirements(...).dependency_entries`` and apply source selection later. Parameters ---------- requirements Dictionary mapping package names to a list of Spec objects. Typically ``ParsedRequirements.requirements`` is passed here, which is returned by `parse_requirements`. platforms List of platforms to resolve conflicts for. Typically ``ParsedRequirements.platforms`` is passed here, which is returned by `parse_requirements`. optional_dependencies Dictionary mapping package names to a dictionary of optional dependencies. Typically ``ParsedRequirements.optional_dependencies`` is passed here, which is returned by `parse_requirements`. If passing this argument, all optional dependencies will be added to the requirements dictionary. Pass `None` to ignore optional dependencies. Returns ------- Dictionary mapping package names to a dictionary of resolved metadata. The resolved metadata is a dictionary mapping platforms to a dictionary mapping sources to a single `Spec` object. """ if platforms and not set(platforms).issubset(get_args(Platform)): msg = f"Invalid platform: {platforms}, must contain only {get_args(Platform)}" raise VersionConflictError(msg) _add_optional_dependencies(requirements, optional_dependencies) prepared = _prepare_specs_for_conflict_resolution(requirements) for data in prepared.values(): _pop_unused_platforms_and_maybe_expand_none(data, platforms) return { pkg: _combine_pinning_within_platform(data) for pkg, data in prepared.items() } def _parse_pinning(pinning: str) -> tuple[str, version.Version]: """Separates the operator and the version number.""" pinning = pinning.strip() operator = extract_version_operator(pinning) if operator and operator in VALID_OPERATORS: version_part = pinning[len(operator) :].strip() if version_part: try: return operator, version.parse(version_part) except version.InvalidVersion: pass msg = f"Invalid version pinning: '{pinning}', must start with one of {VALID_OPERATORS}" # noqa: E501 raise VersionConflictError(msg) def _is_redundant(pinning: str, other_pinnings: list[str]) -> bool: """Determines if a version pinning is redundant given a list of other pinnings.""" op, version = _parse_pinning(pinning) for other in other_pinnings: other_op, other_version = _parse_pinning(other) if other == pinning: continue if op == "<" and ( (other_op == "<" and version >= other_version) or (other_op == "<=" and version > other_version) ): return True if op == "<=" and other_op in ["<", "<="] and version >= other_version: return True if op == ">" and ( (other_op == ">" and version <= other_version) or (other_op == ">=" and version < other_version) ): return True if op == ">=" and other_op in [">", ">="] and version <= other_version: return True return False def _is_valid_pinning(pinning: str) -> bool: """Checks if a version pinning string is valid.""" if any(op in pinning for op in VALID_OPERATORS): try: # Attempt to parse the version part of the pinning _parse_pinning(pinning) return True # noqa: TRY300 except VersionConflictError: # If parsing fails, the pinning is not valid return False # If the pinning doesn't contain any recognized operator, it's not valid return False def _deduplicate(pinnings: list[str]) -> list[str]: """Removes duplicate strings.""" return list(dict.fromkeys(pinnings)) # preserve order def _split_pinnings(pinnings: list[str]) -> list[str]: """Extracts version pinnings from a list of Spec objects.""" return [_pin.lstrip().rstrip() for pin in pinnings for _pin in pin.split(",")] def combine_version_pinnings(pinnings: list[str], *, name: str | None = None) -> str: """Combines a list of version pinnings into a single string.""" pinnings = [p for p in pinnings if p != ""] pinnings = _split_pinnings(pinnings) pinnings = _deduplicate(pinnings) if len(pinnings) == 1: return pinnings[0] for pin in pinnings: if not _is_valid_pinning(pin): ops = ", ".join(VALID_OPERATORS) url = f"{_REPO_URL}/blob/main/README.md#supported-version-pinnings" msg = ( f"Invalid version pinning '{pin}' for '{name}'. " "UniDep supports only the following operators for combining pinnings: " f"{ops}. For complex pinnings (like VCS URLs, local paths, or build" " strings), ensure all pinnings are identical. Divergent complex" f" pinnings cannot be combined. See {url} for more information." ) raise VersionConflictError(msg) valid_pinnings = [p.replace(" ", "") for p in pinnings] exact_pinnings = [p for p in valid_pinnings if p.startswith("=")] if len(exact_pinnings) > 1: pinnings_str = ", ".join(exact_pinnings) msg = f"Multiple exact version pinnings found: {pinnings_str} for `{name}`" raise VersionConflictError(msg) err_msg = f"Contradictory version pinnings found for `{name}`" if exact_pinnings: exact_pin = exact_pinnings[0] exact_version = version.parse(exact_pin[1:]) for other_pin in valid_pinnings: if other_pin != exact_pin: op, ver = _parse_pinning(other_pin) if not ( (op == "<" and exact_version < ver) or (op == "<=" and exact_version <= ver) or (op == ">" and exact_version > ver) or (op == ">=" and exact_version >= ver) ): msg = f"{err_msg}: {exact_pin} and {other_pin}" raise VersionConflictError(msg) return exact_pin non_redundant_pinnings = [ pin for pin in valid_pinnings if not _is_redundant(pin, valid_pinnings) ] for i, pin in enumerate(non_redundant_pinnings): for other_pin in non_redundant_pinnings[i + 1 :]: op1, ver1 = _parse_pinning(pin) op2, ver2 = _parse_pinning(other_pin) msg = f"{err_msg}: {pin} and {other_pin}" # Check for direct contradictions like >2 and <1 if (op1 == ">" and op2 == "<" and ver1 >= ver2) or ( op1 == "<" and op2 == ">" and ver1 <= ver2 ): raise VersionConflictError(msg) # Check for contradictions involving inclusive bounds like >=2 and <1 if ( (op1 == ">=" and op2 == "<" and ver1 >= ver2) or (op1 == ">" and op2 == "<=" and ver1 >= ver2) or (op1 == "<=" and op2 == ">" and ver1 <= ver2) or (op1 == ">" and op2 == "<=" and ver1 >= ver2) ): raise VersionConflictError(msg) return ",".join(non_redundant_pinnings) ================================================ FILE: unidep/_dependencies_parsing.py ================================================ """unidep - Unified Conda and Pip requirements management. This module provides parsing of `requirements.yaml` and `pyproject.toml` files. """ from __future__ import annotations import functools import hashlib import os import sys from collections import defaultdict from pathlib import Path from typing import TYPE_CHECKING, Any, NamedTuple, cast from ruamel.yaml import YAML from ruamel.yaml.comments import CommentedMap, CommentedSeq from unidep.platform_definitions import Platform, Spec, platforms_from_selector from unidep.utils import ( LocalDependency, LocalDependencyUse, PathWithExtras, defaultdict_to_dict, is_pip_installable, parse_folder_or_filename, parse_package_str, selector_from_comment, split_path_and_extras, unidep_configured_in_toml, warn, ) if TYPE_CHECKING: if sys.version_info >= (3, 8): from typing import Literal else: # pragma: no cover from typing_extensions import Literal if sys.version_info >= (3, 11): import tomllib else: # pragma: no cover import tomli as tomllib def find_requirements_files( base_dir: str | Path = ".", depth: int = 1, *, verbose: bool = False, ) -> list[Path]: """Scan a directory for `requirements.yaml` and `pyproject.toml` files.""" base_path = Path(base_dir) found_files = [] # Define a helper function to recursively scan directories def _scan_dir(path: Path, current_depth: int) -> None: if verbose: print(f"🔍 Scanning in `{path}` at depth {current_depth}") if current_depth > depth: return for child in sorted(path.iterdir()): if child.is_dir(): _scan_dir(child, current_depth + 1) elif child.name == "requirements.yaml": found_files.append(child) if verbose: print(f'🔍 Found `"requirements.yaml"` at `{child}`') elif child.name == "pyproject.toml" and unidep_configured_in_toml(child): if verbose: print(f'🔍 Found `"pyproject.toml"` with dependencies at `{child}`') found_files.append(child) _scan_dir(base_path, 0) return sorted(found_files) def _extract_first_comment( commented_map: CommentedMap, index_or_key: int | str, ) -> str | None: """Extract the first comment from a CommentedMap.""" comments = commented_map.ca.items.get(index_or_key, None) if comments is None: return None comment_strings = next( c.value.split("\n")[0].rstrip().lstrip() for c in comments if c is not None ) if not comment_strings: # empty string return None return "".join(comment_strings) def _identifier(identifier: int, selector: str | None) -> str: """Return a unique identifier based on the comment.""" platforms = None if selector is None else tuple(platforms_from_selector(selector)) data_str = f"{identifier}-{platforms}" # Hash using SHA256 and take the first 8 characters for a shorter hash return hashlib.sha256(data_str.encode()).hexdigest()[:8] def _parse_dependency( dependency: str, dependencies: CommentedMap, index_or_key: int | str, which: Literal["conda", "pip", "both"], identifier: int, ignore_pins: list[str], overwrite_pins: dict[str, str | None], skip_dependencies: list[str], ) -> list[Spec]: name, pin, selector = parse_package_str(dependency) if name in ignore_pins: pin = None if name in skip_dependencies: return [] if name in overwrite_pins: pin = overwrite_pins[name] comment = ( _extract_first_comment(dependencies, index_or_key) if isinstance(dependencies, (CommentedMap, CommentedSeq)) else None ) if comment and selector is None: selector = selector_from_comment(comment) identifier_hash = _identifier(identifier, selector) if which == "both": return [ Spec(name, "conda", pin, identifier_hash, selector), Spec(name, "pip", pin, identifier_hash, selector), ] return [Spec(name, which, pin, identifier_hash, selector)] class DependencyOrigin(NamedTuple): """Origin information for a parsed dependency entry.""" source_file: Path dependency_index: int optional_group: str | None = None local_dependency_chain: tuple[Path, ...] = () class DependencyEntry(NamedTuple): """One original dependency declaration with optional conda/pip alternatives.""" identifier: str selector: str | None conda: Spec | None pip: Spec | None origin: DependencyOrigin class ParsedRequirements(NamedTuple): """Requirements with comments.""" channels: list[str] platforms: list[Platform] requirements: dict[str, list[Spec]] optional_dependencies: dict[str, dict[str, list[Spec]]] dependency_entries: list[DependencyEntry] optional_dependency_entries: dict[str, list[DependencyEntry]] pip_indices: tuple[str, ...] = () class Requirements(NamedTuple): """Requirements as CommentedSeq.""" # mypy doesn't support CommentedSeq[str], so we use list[str] instead. channels: list[str] # actually a CommentedSeq[str] conda: list[str] # actually a CommentedSeq[str] pip: list[str] # actually a CommentedSeq[str] class _LoadedRequirementData(NamedTuple): data: dict[str, Any] path_with_extras: PathWithExtras local_dependency_chain: tuple[Path, ...] def _parse_overwrite_pins(overwrite_pins: list[str]) -> dict[str, str | None]: """Parse overwrite pins.""" result = {} for overwrite_pin in overwrite_pins: pkg = parse_package_str(overwrite_pin) result[pkg.name] = pkg.pin return result def _collect_pip_indices(data: dict[str, Any]) -> list[str]: """Collect pip index URLs from the unidep config.""" indices: list[str] = [] if "pip_indices" not in data: return indices value = data["pip_indices"] if isinstance(value, str): values = [value] elif isinstance(value, list): values = value else: msg = "`pip_indices` must be a string or a list of strings." raise TypeError(msg) for index in values: if not isinstance(index, str): msg = "`pip_indices` entries must be strings." raise TypeError(msg) if index and index not in indices: indices.append(index) return indices @functools.lru_cache def _load(p: Path, yaml: YAML) -> dict[str, Any]: if p.suffix == ".toml": with p.open("rb") as f: pyproject = tomllib.load(f) project_dependencies = pyproject.get("project", {}).get("dependencies", []) unidep_cfg = pyproject["tool"]["unidep"] if not project_dependencies: return unidep_cfg unidep_dependencies = unidep_cfg.setdefault("dependencies", []) project_dependency_handling = unidep_cfg.get( "project_dependency_handling", "ignore", ) _add_project_dependencies( project_dependencies, unidep_dependencies, project_dependency_handling, ) return unidep_cfg with p.open() as f: return yaml.load(f) def _add_project_dependencies( project_dependencies: list[str], unidep_dependencies: list[dict[str, str] | str], project_dependency_handling: Literal["same-name", "pip-only", "ignore"], ) -> None: """Add project dependencies to unidep dependencies based on the chosen handling.""" if project_dependency_handling == "same-name": unidep_dependencies.extend(project_dependencies) elif project_dependency_handling == "pip-only": unidep_dependencies.extend([{"pip": dep} for dep in project_dependencies]) elif project_dependency_handling != "ignore": msg = ( f"Invalid `project_dependency_handling` value: {project_dependency_handling}." # noqa: E501 " Must be one of 'same-name', 'pip-only', 'ignore'." ) raise ValueError(msg) def _parse_local_dependency_item(item: str | dict[str, str]) -> LocalDependency: """Parse a single local dependency item into a LocalDependency object.""" if isinstance(item, str): return LocalDependency(local=item, pypi=None) if isinstance(item, dict): if "local" not in item: msg = "Dictionary-style local dependency must have a 'local' key" raise ValueError(msg) use = _normalize_local_dependency_use(item.get("use")) pypi_value = item.get("pypi") if use == "pypi" and not pypi_value: msg = "Local dependency with `use: pypi` must specify a `pypi` alternative." raise ValueError(msg) return LocalDependency(local=item["local"], pypi=pypi_value, use=use) msg = f"Invalid local dependency format: {item}" raise TypeError(msg) def _normalize_local_dependency_use(use_value: str | None) -> LocalDependencyUse: if use_value is None: return "local" normalized = use_value.strip().lower() valid = {"local", "pypi", "skip"} if normalized not in valid: options = ", ".join(sorted(valid)) msg = f"Invalid `use` value `{use_value}`. Supported values: {options}." raise ValueError(msg) return cast("LocalDependencyUse", normalized) def get_local_dependencies(data: dict[str, Any]) -> list[LocalDependency]: """Get `local_dependencies` from a `requirements.yaml` or `pyproject.toml` file.""" raw_deps = [] if "local_dependencies" in data: raw_deps = data["local_dependencies"] elif "includes" in data: warn( "⚠️ You are using `includes` in `requirements.yaml` or `pyproject.toml`" " `[unidep.tool]` which is deprecated since 0.42.0 and has been renamed to" " `local_dependencies`.", category=DeprecationWarning, stacklevel=2, ) raw_deps = data["includes"] return [_parse_local_dependency_item(item) for item in raw_deps] def _to_path_with_extras( paths: list[Path], extras: list[list[str]] | Literal["*"] | None, ) -> list[PathWithExtras]: if isinstance(extras, (list, tuple)) and len(extras) != len(paths): msg = ( f"Length of `extras` ({len(extras)}) does not match length" f" of `paths` ({len(paths)})." ) raise ValueError(msg) paths_with_extras = [parse_folder_or_filename(p) for p in paths] if extras is None: return paths_with_extras assert extras is not None if any(p.extras for p in paths_with_extras): msg = ( "Cannot specify `extras` list when paths are" " specified like `path/to/project[extra1,extra2]`, `extras` must be `None`" " or specify pure paths without extras like `path/to/project` and specify" " extras in `extras`." ) raise ValueError(msg) if extras == "*": extras = [["*"]] * len(paths) # type: ignore[list-item] return [PathWithExtras(p.path, e) for p, e in zip(paths_with_extras, extras)] def _update_data_structures( *, path_with_extras: PathWithExtras, loaded_data: list[_LoadedRequirementData], # modified in place all_extras: list[list[str]], # modified in place seen: set[PathWithExtras], # modified in place yaml: YAML, is_nested: bool, local_dependency_overrides: dict[Path, LocalDependency], local_dependency_chain: tuple[Path, ...] = (), include_local_dependencies: bool = True, verbose: bool = False, ) -> None: if verbose: print(f"📄 Parsing `{path_with_extras.path_with_extras}`") data = _load(path_with_extras.path, yaml) loaded_data.append( _LoadedRequirementData( data=data, path_with_extras=path_with_extras, local_dependency_chain=local_dependency_chain, ), ) _move_local_optional_dependencies_to_local_dependencies( data=data, # modified in place path_with_extras=path_with_extras, verbose=verbose, ) if not is_nested: all_extras.append(path_with_extras.extras) else: # When nested, the extras that are specified in the # local_dependencies section should be moved to the main dependencies # because they are not optional if specified in the file. Only # the top-level extras are optional. all_extras.append([]) _move_optional_dependencies_to_dependencies( data=data, # modified in place path_with_extras=path_with_extras, verbose=verbose, ) seen.add(path_with_extras.resolved()) # Handle "local_dependencies" (or old name "includes", changed in 0.42.0) for effective_local_dep in _effective_local_dependencies( data=data, base_dir=path_with_extras.path.parent, overrides=local_dependency_overrides, ): if effective_local_dep.use == "skip": continue if effective_local_dep.use == "pypi": _append_pip_dependency_from_local( data=data, local_dependency=effective_local_dep, ) continue if not include_local_dependencies: continue # NOTE: The current function calls _add_local_dependencies, # which calls the current function recursively _add_local_dependencies( local_dependency=effective_local_dep.local, path_with_extras=path_with_extras, loaded_data=loaded_data, # modified in place all_extras=all_extras, # modified in place seen=seen, # modified in place yaml=yaml, local_dependency_overrides=local_dependency_overrides, local_dependency_chain=( *local_dependency_chain, path_with_extras.path.resolve(), ), include_local_dependencies=include_local_dependencies, verbose=verbose, ) def _move_optional_dependencies_to_dependencies( data: dict[str, Any], path_with_extras: PathWithExtras, *, verbose: bool = False, ) -> None: optional_dependencies = data.pop("optional_dependencies", {}) for extra in path_with_extras.extras: if extra == "*": # If "*" is specified, include all optional dependencies for opt_deps in optional_dependencies.values(): data.setdefault("dependencies", []).extend(opt_deps) if verbose: print( "📄 Moving all optional dependencies to main dependencies" f" for `{path_with_extras.path_with_extras}`", ) elif extra in optional_dependencies: data.setdefault("dependencies", []).extend(optional_dependencies[extra]) if verbose: print( f"📄 Moving `{extra}` optional dependencies to main dependencies" f" for `{path_with_extras.path_with_extras}`", ) def _move_local_optional_dependencies_to_local_dependencies( *, data: dict[str, Any], # modified in place path_with_extras: PathWithExtras, verbose: bool = False, ) -> None: # Move local dependencies from `optional_dependencies` to `local_dependencies` extras = path_with_extras.extras if "*" in extras: extras = list(data.get("optional_dependencies", {}).keys()) optional_dependencies = data.get("optional_dependencies", {}) for extra in extras: moved = set() for dep in optional_dependencies.get(extra, []): if isinstance(dep, dict): # This is a {"pip": "package"} and/or {"conda": "package"} dependency continue if _str_is_path_like(dep): if verbose: print( f"📄 Moving `{dep}` from the `{extra}` section in" " `optional_dependencies` to `local_dependencies`", ) data.setdefault("local_dependencies", []).append(dep) moved.add(dep) for dep in moved: extras = optional_dependencies[extra] # key must exist if moved non-empty extras.pop(extras.index(dep)) # Remove empty optional_dependencies sections to_delete = [extra for extra, deps in optional_dependencies.items() if not deps] for extra in to_delete: if verbose: print(f"📄 Removing empty `{extra}` section from `optional_dependencies`") optional_dependencies.pop(extra) def _resolve_local_dependency_path(base_dir: Path, local: str) -> Path: local_path, _ = split_path_and_extras(local) return (base_dir / local_path).resolve() def _try_parse_local_dependency_requirement_file( *, base_dir: Path, local_dependency: str, ) -> PathWithExtras | None: """Return managed requirements file for a local dependency, if present.""" try: requirements_dep_file = parse_folder_or_filename(base_dir / local_dependency) except FileNotFoundError: return None if requirements_dep_file.path.suffix in (".whl", ".zip"): return None return requirements_dep_file def _apply_local_dependency_override( *, local_dependency: LocalDependency, base_dir: Path, overrides: dict[Path, LocalDependency], ) -> LocalDependency: try: resolved_path = _resolve_local_dependency_path(base_dir, local_dependency.local) except (OSError, RuntimeError, ValueError): # pragma: no cover resolved_path = None if local_dependency.use != "local" and resolved_path is not None: overrides[resolved_path] = local_dependency return local_dependency if ( local_dependency.use == "local" and resolved_path is not None and resolved_path in overrides ): override = overrides[resolved_path] return LocalDependency( local=local_dependency.local, pypi=local_dependency.pypi or override.pypi, use=override.use, ) return local_dependency def _effective_local_dependencies( *, data: dict[str, Any], base_dir: Path, overrides: dict[Path, LocalDependency], ) -> list[LocalDependency]: """Return local dependencies after applying global ``use`` overrides.""" local_dependencies = get_local_dependencies(data) for local_dep_obj in local_dependencies: if local_dep_obj.use != "local": _apply_local_dependency_override( local_dependency=local_dep_obj, base_dir=base_dir, overrides=overrides, ) return [ _apply_local_dependency_override( local_dependency=local_dep_obj, base_dir=base_dir, overrides=overrides, ) for local_dep_obj in local_dependencies ] def _append_pip_dependency_from_local( *, data: dict[str, Any], local_dependency: LocalDependency, ) -> None: assert local_dependency.pypi is not None dependency_entry: str | dict[str, str] dependency_entry = {"pip": local_dependency.pypi} data.setdefault("dependencies", []).append(dependency_entry) def _add_local_dependencies( *, local_dependency: str, path_with_extras: PathWithExtras, loaded_data: list[_LoadedRequirementData], all_extras: list[list[str]], seen: set[PathWithExtras], yaml: YAML, local_dependency_overrides: dict[Path, LocalDependency], local_dependency_chain: tuple[Path, ...] = (), include_local_dependencies: bool = True, verbose: bool = False, ) -> None: requirements_dep_file = _try_parse_local_dependency_requirement_file( base_dir=path_with_extras.path.parent, local_dependency=local_dependency, ) if requirements_dep_file is None: local_path, _ = split_path_and_extras(local_dependency) abs_local = (path_with_extras.path.parent / local_path).resolve() if verbose and abs_local.suffix in (".whl", ".zip") and abs_local.exists(): print( f"⚠️ Local dependency `{local_dependency}` is a wheel or zip file. " "Skipping parsing, but it will be installed by pip if " "`--skip-local` is not set. Note that unidep will not " "detect its dependencies.", ) return if requirements_dep_file.resolved() in seen: return # Avoids circular local_dependencies if verbose: print(f"📄 Parsing `{local_dependency}` from `local_dependencies`") _update_data_structures( path_with_extras=requirements_dep_file, loaded_data=loaded_data, # modified in place all_extras=all_extras, # modified in place seen=seen, # modified in place yaml=yaml, verbose=verbose, is_nested=True, local_dependency_overrides=local_dependency_overrides, local_dependency_chain=local_dependency_chain, include_local_dependencies=include_local_dependencies, ) def parse_requirements( *paths: Path, ignore_pins: list[str] | None = None, overwrite_pins: list[str] | None = None, skip_dependencies: list[str] | None = None, verbose: bool = False, extras: list[list[str]] | Literal["*"] | None = None, include_local_dependencies: bool = True, ) -> ParsedRequirements: """Parse a list of `requirements.yaml` or `pyproject.toml` files. Parameters ---------- paths Paths to `requirements.yaml` or `pyproject.toml` files. ignore_pins List of package names to ignore pins for. overwrite_pins List of package names with pins to overwrite. skip_dependencies List of package names to skip. verbose Whether to print verbose output. extras List of lists of extras to include. The outer list corresponds to the `requirements.yaml` or `pyproject.toml` files, the inner list to the extras to include for that file. If "*", all extras are included, if None, no extras are included. include_local_dependencies Whether local dependencies should be recursively parsed and merged into the result. When False, local dependencies with `use: pypi` are still translated to pip dependencies, but `use: local` entries are not traversed. """ paths_with_extras = _to_path_with_extras(paths, extras) # type: ignore[arg-type] ignore_pins = ignore_pins or [] skip_dependencies = skip_dependencies or [] overwrite_pins_map = _parse_overwrite_pins(overwrite_pins or []) # `loaded_data` and `all_extras` are lists of the same length loaded_data: list[_LoadedRequirementData] = [] all_extras: list[list[str]] = [] seen: set[PathWithExtras] = set() local_dependency_overrides: dict[Path, LocalDependency] = {} yaml = YAML(typ="rt") # Might be unused if all are TOML files for path_with_extras in paths_with_extras: _update_data_structures( path_with_extras=path_with_extras, loaded_data=loaded_data, # modified in place all_extras=all_extras, # modified in place seen=seen, # modified in place yaml=yaml, verbose=verbose, is_nested=False, local_dependency_overrides=local_dependency_overrides, include_local_dependencies=include_local_dependencies, ) assert len(loaded_data) == len(all_extras) # Parse the requirements from loaded data requirements: dict[str, list[Spec]] = defaultdict(list) optional_dependencies: dict[str, dict[str, list[Spec]]] = defaultdict( lambda: defaultdict(list), ) dependency_entries: list[DependencyEntry] = [] optional_dependency_entries: dict[str, list[DependencyEntry]] = defaultdict(list) channels: set[str] = set() pip_indices: list[str] = [] # Preserve order, first is primary platforms: set[Platform] = set() identifier = -1 for loaded, _extras in zip(loaded_data, all_extras): data = loaded.data channels.update(data.get("channels", [])) # Collect pip_indices, maintaining order and avoiding duplicates. for index in _collect_pip_indices(data): if index and index not in pip_indices: pip_indices.append(index) platforms.update(data.get("platforms", [])) if "dependencies" in data: identifier = _add_dependencies( data["dependencies"], requirements, # modified in place dependency_entries, # modified in place identifier, ignore_pins, overwrite_pins_map, skip_dependencies, source_file=loaded.path_with_extras.path, local_dependency_chain=loaded.local_dependency_chain, ) for opt_name, opt_deps in data.get("optional_dependencies", {}).items(): if opt_name in _extras or "*" in _extras: identifier = _add_dependencies( opt_deps, optional_dependencies[opt_name], # modified in place optional_dependency_entries[opt_name], # modified in place identifier, ignore_pins, overwrite_pins_map, skip_dependencies, is_optional=True, optional_group=opt_name, source_file=loaded.path_with_extras.path, local_dependency_chain=loaded.local_dependency_chain, ) return ParsedRequirements( sorted(channels), sorted(platforms), dict(requirements), defaultdict_to_dict(optional_dependencies), dependency_entries, defaultdict_to_dict(optional_dependency_entries), tuple(pip_indices), ) def _str_is_path_like(s: str) -> bool: """Check if a string is path-like.""" return os.path.sep in s or "/" in s or s.startswith(".") def _check_allowed_local_dependency(name: str, is_optional: bool) -> None: # noqa: FBT001 if _str_is_path_like(name): # There should not be path-like dependencies in the optional_dependencies # section after _move_local_optional_dependencies_to_local_dependencies. assert not is_optional msg = ( f"Local dependencies (`{name}`) are not allowed in `dependencies`." " Use the `local_dependencies` section instead." ) raise ValueError(msg) def _add_dependencies( dependencies: list[str], requirements: dict[str, list[Spec]], # modified in place dependency_entries: list[DependencyEntry], # modified in place identifier: int, ignore_pins: list[str], overwrite_pins_map: dict[str, str | None], skip_dependencies: list[str], *, is_optional: bool = False, optional_group: str | None = None, source_file: Path, local_dependency_chain: tuple[Path, ...] = (), ) -> int: for i, dep in enumerate(dependencies): identifier += 1 origin = DependencyOrigin( source_file=source_file, dependency_index=i + 1, optional_group=optional_group, local_dependency_chain=local_dependency_chain, ) if isinstance(dep, str): specs = _parse_dependency( dep, dependencies, i, "both", identifier, ignore_pins, overwrite_pins_map, skip_dependencies, ) if not specs: continue for spec in specs: _check_allowed_local_dependency(spec.name, is_optional) requirements[spec.name].append(spec) dependency_entries.append( DependencyEntry( identifier=specs[0].identifier or _identifier(identifier, specs[0].selector), selector=specs[0].selector, conda=next((spec for spec in specs if spec.which == "conda"), None), pip=next((spec for spec in specs if spec.which == "pip"), None), origin=origin, ), ) continue assert isinstance(dep, dict) conda_spec: Spec | None = None pip_spec: Spec | None = None for which in ["conda", "pip"]: if which in dep: specs = _parse_dependency( dep[which], dep, which, which, # type: ignore[arg-type] identifier, ignore_pins, overwrite_pins_map, skip_dependencies, ) if not specs: continue for spec in specs: _check_allowed_local_dependency(spec.name, is_optional) requirements[spec.name].append(spec) if spec.which == "conda": conda_spec = spec else: pip_spec = spec if conda_spec is not None or pip_spec is not None: identifier_hash = ( conda_spec.identifier if conda_spec is not None else pip_spec.identifier # type: ignore[union-attr] ) selector = ( conda_spec.selector if conda_spec is not None and conda_spec.selector is not None else pip_spec.selector if pip_spec is not None else None ) dependency_entries.append( DependencyEntry( identifier=identifier_hash or _identifier(identifier, selector), selector=selector, conda=conda_spec, pip=pip_spec, origin=origin, ), ) return identifier # Alias for backwards compatibility parse_yaml_requirements = parse_requirements def _extract_local_dependencies( # noqa: PLR0912 path: Path, base_path: Path, processed: set[Path], dependencies: dict[str, set[str]], *, check_pip_installable: bool = True, verbose: bool = False, raise_if_missing: bool = True, warn_non_managed: bool = True, local_dependency_overrides: dict[Path, LocalDependency], ) -> None: path, extras = parse_folder_or_filename(path) if path in processed: return processed.add(path) yaml = YAML(typ="safe") data = _load(path, yaml) _move_local_optional_dependencies_to_local_dependencies( data=data, # modified in place path_with_extras=PathWithExtras(path, extras), verbose=verbose, ) for effective_local_dep in _effective_local_dependencies( data=data, base_dir=path.parent, overrides=local_dependency_overrides, ): if effective_local_dep.use != "local": continue local_dependency = effective_local_dep.local assert not os.path.isabs(local_dependency) # noqa: PTH117 local_path, extras = split_path_and_extras(local_dependency) abs_local = (path.parent / local_path).resolve() if abs_local.suffix in (".whl", ".zip"): if verbose: print(f"🔗 Adding `{local_dependency}` from `local_dependencies`") dependencies[str(base_path)].add(str(abs_local)) continue if not abs_local.exists(): if raise_if_missing: msg = f"File `{abs_local}` not found." raise FileNotFoundError(msg) continue try: requirements_path = parse_folder_or_filename(abs_local).path except FileNotFoundError: # Means that this is a local package that is not managed by unidep. if is_pip_installable(abs_local): dependencies[str(base_path)].add(str(abs_local)) if warn_non_managed: # We do not need to emit this warning when `pip install` is called warn( f"⚠️ Installing a local dependency (`{abs_local.name}`) which" " is not managed by unidep, this will skip all of its" " dependencies, i.e., it will call `pip install` with" " `--no-deps`. To properly manage this dependency," " add a `requirements.yaml` or `pyproject.toml` file with" " `[tool.unidep]` in its directory.", ) elif _is_empty_folder(abs_local): msg = ( f"`{local_dependency}` in `local_dependencies` is not pip" " installable because it is an empty folder. Is it perhaps" " an uninitialized Git submodule? If so, initialize it with" " `git submodule update --init --recursive`. Otherwise," " remove it from `local_dependencies`." ) raise RuntimeError(msg) from None elif _is_empty_git_submodule(abs_local): # Extra check for empty Git submodules (common problem folks run into) msg = ( f"`{local_dependency}` in `local_dependencies` is not installable" " by pip because it is an empty Git submodule. Either remove it" " from `local_dependencies` or fetch the submodule with" " `git submodule update --init --recursive`." ) raise RuntimeError(msg) from None else: msg = ( f"`{local_dependency}` in `local_dependencies` is not pip" " installable nor is it managed by unidep. Remove it" " from `local_dependencies`." ) raise RuntimeError(msg) from None continue project_path = str(requirements_path.parent) if project_path == str(base_path): continue if not check_pip_installable or is_pip_installable(requirements_path.parent): dependencies[str(base_path)].add(project_path) if verbose: print(f"🔗 Adding `{requirements_path}` from `local_dependencies`") _extract_local_dependencies( requirements_path, base_path, processed, dependencies, check_pip_installable=check_pip_installable, verbose=verbose, raise_if_missing=raise_if_missing, warn_non_managed=warn_non_managed, local_dependency_overrides=local_dependency_overrides, ) def parse_local_dependencies( *paths: Path, check_pip_installable: bool = True, verbose: bool = False, raise_if_missing: bool = True, warn_non_managed: bool = True, ) -> dict[Path, list[Path]]: """Extract local project dependencies from a list of `requirements.yaml` or `pyproject.toml` files. Works by loading the specified `local_dependencies` list. Returns a dictionary with the: name of the project folder => list of `Path`s of local dependencies folders. """ # noqa: E501 dependencies: dict[str, set[str]] = defaultdict(set) local_dependency_overrides: dict[Path, LocalDependency] = {} for p in paths: if verbose: print(f"🔗 Analyzing dependencies in `{p}`") base_path = p.resolve().parent _extract_local_dependencies( path=p, base_path=base_path, processed=set(), dependencies=dependencies, check_pip_installable=check_pip_installable, verbose=verbose, raise_if_missing=raise_if_missing, warn_non_managed=warn_non_managed, local_dependency_overrides=local_dependency_overrides, ) return { Path(k): sorted({Path(v) for v in v_set}) for k, v_set in sorted(dependencies.items()) } def yaml_to_toml(yaml_path: Path) -> str: """Converts a `requirements.yaml` file TOML format.""" try: import tomli_w except ImportError: # pragma: no cover msg = ( "❌ `tomli_w` is required to convert YAML to TOML." " Install it with `pip install tomli_w`." ) raise ImportError(msg) from None yaml = YAML(typ="rt") data = _load(yaml_path, yaml) data.pop("name", None) dependencies = data.get("dependencies", []) for i, dep in enumerate(dependencies): if isinstance(dep, str): comment = _extract_first_comment(dependencies, i) if comment is not None: selector = selector_from_comment(comment) if selector is not None: dependencies[i] = f"{dep}:{selector}" continue assert isinstance(dep, dict) for which in ["conda", "pip"]: if which in dep: comment = _extract_first_comment(dep, which) if comment is not None: selector = selector_from_comment(comment) if selector is not None: dep[which] = f"{dep[which]}:{selector}" return tomli_w.dumps({"tool": {"unidep": data}}) def _is_empty_git_submodule(path: Path) -> bool: """Checks if the given path is an empty Git submodule.""" if not path.is_dir(): return False git_file = path / ".git" if not git_file.exists() or not git_file.is_file(): return False # Check if it's empty (apart from the .git file) return len(list(path.iterdir())) == 1 # Only .git should be present def _is_empty_folder(path: Path) -> bool: """Checks if the given path is an empty folder.""" return not any(path.iterdir()) ================================================ FILE: unidep/_dependency_selection.py ================================================ """Shared conda/pip dependency selection for CLI-facing outputs.""" from __future__ import annotations import math from dataclasses import dataclass from typing import TYPE_CHECKING, Optional, Tuple, cast from packaging.specifiers import InvalidSpecifier, Specifier from packaging.utils import canonicalize_name from packaging.version import Version from unidep._conflicts import ( VersionConflictError, combine_version_pinnings, extract_version_operator, ) from unidep.platform_definitions import ( PLATFORM_SELECTOR_MAP, CondaPip, Platform, Spec, ) if TYPE_CHECKING: from collections.abc import Iterable, Sequence from unidep._dependencies_parsing import DependencyEntry, DependencyOrigin TargetPlatform = Optional[Platform] FamilyKey = Tuple[Optional[str], Optional[str]] @dataclass(frozen=True) class SourceRequirement: source: CondaPip spec: Spec family_key: FamilyKey base_name: str normalized_name: str extras: tuple[str, ...] declared_platforms: tuple[Platform, ...] | None origin: DependencyOrigin @dataclass(frozen=True) class MergedSourceCandidate: source: CondaPip spec: Spec normalized_name: str family_keys: tuple[FamilyKey, ...] requirements: tuple[SourceRequirement, ...] declared_scopes: tuple[tuple[Platform, ...] | None, ...] @dataclass(frozen=True) class PlatformCandidates: family_key: FamilyKey platform: TargetPlatform conda: MergedSourceCandidate | None pip: MergedSourceCandidate | None def _operator_order_key(constraint: str) -> tuple[int, str]: op = extract_version_operator(constraint) order = { "===": 0, "==": 1, "~=": 2, ">=": 3, "<=": 4, "!=": 5, ">": 6, "<": 7, "=": 8, } return (order.get(op, len(order)), constraint) def _canonicalize_joined_pinnings(pinnings: list[str]) -> str: seen: set[str] = set() for pinning in pinnings: for stripped in filter(None, (token.strip() for token in pinning.split(","))): seen.add(stripped) return ",".join(sorted(seen, key=_operator_order_key)) def _parse_pip_name(name: str) -> tuple[str, tuple[str, ...]]: if not name.endswith("]") or "[" not in name: return name, () base_name, extras = name[:-1].split("[", 1) parsed = tuple(sorted(e.strip() for e in extras.split(",") if e.strip())) return base_name, parsed def _build_pip_name(base_name: str, extras: tuple[str, ...]) -> str: if not extras: return base_name return f"{base_name}[{','.join(extras)}]" def _spec_is_pinned(spec: Spec) -> bool: return spec.pin is not None def _candidate_scope_rank(candidate: MergedSourceCandidate) -> float: ranks = [len(scope) for scope in candidate.declared_scopes if scope is not None] if not ranks: return math.inf return min(ranks) def _candidate_has_universal_origin(candidate: MergedSourceCandidate) -> bool: return any(scope is None for scope in candidate.declared_scopes) def _candidate_has_pip_extras(candidate: MergedSourceCandidate) -> bool: return candidate.source == "pip" and bool(_parse_pip_name(candidate.spec.name)[1]) def _candidate_display_key( candidate: MergedSourceCandidate, ) -> tuple[int, str, str]: return ( 0 if candidate.source == "conda" else 1, candidate.normalized_name, candidate.spec.name_with_pin(is_pip=candidate.source == "pip"), ) def _origin_to_text(origin: DependencyOrigin) -> str: parts = [origin.source_file.as_posix(), f"item {origin.dependency_index}"] if origin.optional_group is not None: parts.append(f"group {origin.optional_group}") if origin.local_dependency_chain: chain = " -> ".join(path.as_posix() for path in origin.local_dependency_chain) parts.append(f"via {chain}") return ", ".join(parts) def _candidate_to_text(candidate: MergedSourceCandidate) -> str: rendered = candidate.spec.name_with_pin(is_pip=candidate.source == "pip") origins = "; ".join(_origin_to_text(req.origin) for req in candidate.requirements) return f"{candidate.source}: {rendered} ({origins})" def _merge_pin_strings( requirements: list[SourceRequirement], *, allow_unsatisfiable_fallback: bool, ) -> str | None: pinned = [req.spec.pin for req in requirements if req.spec.pin is not None] if not pinned: return None unique = list(dict.fromkeys(pinned)) if len(unique) == 1: return unique[0] if allow_unsatisfiable_fallback: exact_pinnings = [ pin for pin in unique if _exact_pinning_version_text(pin) is not None ] distinct_exact_versions = { cast("str", _exact_pinning_version_text(pin)) for pin in exact_pinnings } if len(distinct_exact_versions) > 1: pinnings_str = ", ".join(exact_pinnings) msg = ( "Multiple exact version pinnings found: " f"{pinnings_str} for `{requirements[0].base_name}`" ) raise VersionConflictError(msg) try: merged = combine_version_pinnings(unique, name=requirements[0].base_name) return _canonicalize_joined_pinnings([merged]) except VersionConflictError: if allow_unsatisfiable_fallback and _joined_pinnings_are_safely_satisfiable( unique, ): return _canonicalize_joined_pinnings(unique) raise def _bump_release_prefix(release: tuple[int, ...], prefix_len: int) -> str: assert 0 < prefix_len <= len(release) bumped = list(release[:prefix_len]) bumped[-1] += 1 return ".".join(str(part) for part in bumped) def _normalize_pinning_token_for_satisfiability( # noqa: PLR0911 pinning: str, ) -> list[str] | None: try: specifier = Specifier(pinning) except InvalidSpecifier: return None operator = specifier.operator version_text = specifier.version if operator in {">", ">=", "<", "<="}: return [f"{operator}{version_text}"] if operator == "!=": if "*" in version_text: return None return [f"!={version_text}"] if operator == "==": if version_text.endswith(".*"): prefix = version_text[:-2] parsed = Version(prefix) upper = _bump_release_prefix(parsed.release, len(parsed.release)) return [f">={prefix}", f"<{upper}"] Version(version_text) return [f"={version_text}"] if operator == "~=": parsed = Version(version_text) upper = _bump_release_prefix(parsed.release, len(parsed.release) - 1) return [f">={version_text}", f"<{upper}"] return None def _parse_supported_pinning(pinning: str) -> tuple[str, Version]: operator = extract_version_operator(pinning) assert operator version_text = pinning[len(operator) :].strip() return operator, Version(version_text) def _exact_pinning_version_text(pinning: str) -> str | None: operator = extract_version_operator(pinning) if operator not in {"==", "===", "="}: return None return pinning[len(operator) :].strip() def _stricter_lower_bound( current: tuple[Version, bool] | None, candidate: tuple[Version, bool], ) -> tuple[Version, bool]: if current is None: return candidate if candidate[0] > current[0]: return candidate if candidate[0] < current[0]: return current return (current[0], current[1] and candidate[1]) def _stricter_upper_bound( current: tuple[Version, bool] | None, candidate: tuple[Version, bool], ) -> tuple[Version, bool]: if current is None: return candidate if candidate[0] < current[0]: return candidate if candidate[0] > current[0]: return current return (current[0], current[1] and candidate[1]) def _normalized_pinnings_are_satisfiable( # noqa: PLR0911, PLR0912 pinnings: list[str], ) -> bool: exact: Version | None = None excluded: set[Version] = set() lower: tuple[Version, bool] | None = None upper: tuple[Version, bool] | None = None for pinning in pinnings: operator, parsed_version = _parse_supported_pinning(pinning) if operator == "=": assert exact is None or exact == parsed_version exact = parsed_version elif operator == "!=": excluded.add(parsed_version) elif operator == ">": lower = _stricter_lower_bound(lower, (parsed_version, False)) elif operator == ">=": lower = _stricter_lower_bound(lower, (parsed_version, True)) elif operator == "<": upper = _stricter_upper_bound(upper, (parsed_version, False)) elif operator == "<=": upper = _stricter_upper_bound(upper, (parsed_version, True)) if exact is not None: if exact in excluded: return False if lower is not None and ( exact < lower[0] or (exact == lower[0] and not lower[1]) ): return False return not ( upper is not None and (exact > upper[0] or (exact == upper[0] and not upper[1])) ) if lower is not None and upper is not None: if lower[0] > upper[0]: return False if lower[0] == upper[0]: if not (lower[1] and upper[1]): return False if lower[0] in excluded: return False return True def _joined_pinnings_are_safely_satisfiable(pinnings: list[str]) -> bool: normalized: list[str] = [] for pinning in pinnings: for stripped in filter(None, (token.strip() for token in pinning.split(","))): normalized_tokens = _normalize_pinning_token_for_satisfiability(stripped) if normalized_tokens is None: return False normalized.extend(normalized_tokens) return _normalized_pinnings_are_satisfiable(normalized) def _merge_source_requirements( source: CondaPip, requirements: list[SourceRequirement], ) -> MergedSourceCandidate: requirements = list(requirements) if source == "pip": extras = tuple( sorted({extra for req in requirements for extra in req.extras}), ) pin = _merge_pin_strings( requirements, allow_unsatisfiable_fallback=True, ) name = _build_pip_name(requirements[0].base_name, extras) spec = Spec(name=name, which="pip", pin=pin) normalized_name = requirements[0].normalized_name else: pin = _merge_pin_strings( requirements, allow_unsatisfiable_fallback=False, ) spec = Spec(name=requirements[0].spec.name, which="conda", pin=pin) normalized_name = requirements[0].normalized_name return MergedSourceCandidate( source=source, spec=spec, normalized_name=normalized_name, family_keys=tuple(dict.fromkeys(req.family_key for req in requirements)), requirements=tuple(requirements), declared_scopes=tuple(req.declared_platforms for req in requirements), ) def _entry_family_key(entry: DependencyEntry) -> FamilyKey: conda_name = entry.conda.name if entry.conda is not None else None pip_name = None if entry.pip is not None: base_name, _extras = _parse_pip_name(entry.pip.name) pip_name = canonicalize_name(base_name) return (conda_name, pip_name) def _source_requirement_from_spec( spec: Spec, *, family_key: FamilyKey, origin: DependencyOrigin, declared_platforms: tuple[Platform, ...] | None, ) -> SourceRequirement: if spec.which == "pip": base_name, extras = _parse_pip_name(spec.name) normalized_name = canonicalize_name(base_name) else: base_name = spec.name extras = () normalized_name = spec.name return SourceRequirement( source=spec.which, spec=spec, family_key=family_key, base_name=base_name, normalized_name=normalized_name, extras=extras, declared_platforms=declared_platforms, origin=origin, ) def _collect_target_platforms( _entries: Sequence[DependencyEntry], platforms: Sequence[Platform] | None, ) -> list[TargetPlatform]: if platforms: return cast("list[TargetPlatform]", list(platforms)) return cast("list[TargetPlatform]", sorted(PLATFORM_SELECTOR_MAP)) def _entry_targets( spec: Spec, *, target_platforms: Sequence[TargetPlatform], ) -> tuple[tuple[Platform, ...] | None, list[TargetPlatform]]: declared = spec.platforms() if declared is None: return None, list(target_platforms) targets: list[TargetPlatform] = [ platform for platform in declared if platform in target_platforms ] return tuple(declared), targets def _build_platform_candidates( entries: Sequence[DependencyEntry], platforms: Sequence[Platform] | None = None, ) -> list[PlatformCandidates]: target_platforms = _collect_target_platforms(entries, platforms) grouped: dict[ FamilyKey, dict[TargetPlatform, dict[CondaPip, list[SourceRequirement]]], ] = {} for entry in entries: family_key = _entry_family_key(entry) for spec in (entry.conda, entry.pip): if spec is None: continue declared_platforms, targets = _entry_targets( spec, target_platforms=target_platforms, ) source_requirement = _source_requirement_from_spec( spec, family_key=family_key, origin=entry.origin, declared_platforms=declared_platforms, ) for platform in targets: grouped.setdefault(family_key, {}).setdefault(platform, {}).setdefault( spec.which, [], ).append(source_requirement) result: list[PlatformCandidates] = [] for family_key, platform_data in grouped.items(): for platform, source_lists in sorted(platform_data.items()): conda = None pip = None if source_lists.get("conda"): conda = _merge_source_requirements("conda", source_lists["conda"]) if source_lists.get("pip"): pip = _merge_source_requirements("pip", source_lists["pip"]) result.append( PlatformCandidates( family_key=family_key, platform=platform, conda=conda, pip=pip, ), ) return result def _choose_by_precedence( conda: MergedSourceCandidate | None, pip: MergedSourceCandidate | None, ) -> MergedSourceCandidate | None: if conda is None: return pip if pip is None: return conda if _candidate_has_pip_extras(pip): return pip conda_pinned = _spec_is_pinned(conda.spec) pip_pinned = _spec_is_pinned(pip.spec) if conda_pinned != pip_pinned: return conda if conda_pinned else pip if conda_pinned and pip_pinned: conda_scope = _candidate_scope_rank(conda) pip_scope = _candidate_scope_rank(pip) if conda_scope != pip_scope: return conda if conda_scope < pip_scope else pip return conda def _select_conda_like_candidate( platform_candidates: PlatformCandidates, ) -> MergedSourceCandidate | None: return _choose_by_precedence( platform_candidates.conda, platform_candidates.pip, ) def _select_pip_candidate( platform_candidates: PlatformCandidates, ) -> MergedSourceCandidate | None: if platform_candidates.pip is None: return None return platform_candidates.pip def _final_identity(candidate: MergedSourceCandidate) -> str: if candidate.source == "conda": return candidate.spec.name return candidate.normalized_name def _merge_candidate_group( candidates: Iterable[MergedSourceCandidate], ) -> MergedSourceCandidate: ordered = sorted(candidates, key=_candidate_display_key) source = ordered[0].source requirements = [ requirement for candidate in ordered for requirement in candidate.requirements ] return _merge_source_requirements(source, requirements) def _can_reconcile_cross_source_collision( candidates: Iterable[MergedSourceCandidate], ) -> bool: conda_names = { conda_name for candidate in candidates for conda_name, _pip_name in candidate.family_keys if conda_name is not None } pip_names = { pip_name for candidate in candidates for _conda_name, pip_name in candidate.family_keys if pip_name is not None } return len(conda_names) <= 1 and len(pip_names) <= 1 def _raise_final_collision( *, platform: TargetPlatform, identity: str, candidates: Iterable[MergedSourceCandidate], ) -> None: platform_text = platform or "universal" rendered = "\n".join( f" - {_candidate_to_text(candidate)}" for candidate in sorted(candidates, key=_candidate_display_key) ) msg = ( "Final Dependency Collision:\n" f"Multiple selected dependency families map to final install identity " f"'{identity}' on platform '{platform_text}':\n" f"{rendered}\n" "Resolve the ambiguity by removing one alternative or making the target " "package names distinct." ) raise ValueError(msg) def _resolve_final_collisions( selected: dict[TargetPlatform, list[MergedSourceCandidate]], ) -> dict[TargetPlatform, list[MergedSourceCandidate]]: resolved: dict[TargetPlatform, list[MergedSourceCandidate]] = {} for platform, candidates in selected.items(): by_identity: dict[str, list[MergedSourceCandidate]] = {} for candidate in candidates: by_identity.setdefault(_final_identity(candidate), []).append(candidate) resolved_candidates: list[MergedSourceCandidate] = [] for identity, group in sorted(by_identity.items()): if len(group) == 1: resolved_candidates.append(group[0]) continue by_source: dict[CondaPip, list[MergedSourceCandidate]] = {} for candidate in group: by_source.setdefault(candidate.source, []).append(candidate) merged_group = [ _merge_candidate_group(source_candidates) for _source, source_candidates in sorted(by_source.items()) ] sources = {candidate.source for candidate in merged_group} if len(sources) > 1 and not _can_reconcile_cross_source_collision( merged_group, ): _raise_final_collision( platform=platform, identity=identity, candidates=merged_group, ) if len(sources) > 1: conda = next( ( candidate for candidate in merged_group if candidate.source == "conda" ), None, ) pip = next( ( candidate for candidate in merged_group if candidate.source == "pip" ), None, ) winner = _choose_by_precedence(conda, pip) assert winner is not None resolved_candidates.append(winner) continue resolved_candidates.append(merged_group[0]) resolved[platform] = resolved_candidates return resolved def select_conda_like_requirements( entries: Sequence[DependencyEntry], platforms: Sequence[Platform] | None = None, ) -> dict[TargetPlatform, list[MergedSourceCandidate]]: selected: dict[TargetPlatform, list[MergedSourceCandidate]] = {} for platform_candidates in _build_platform_candidates(entries, platforms): candidate = _select_conda_like_candidate(platform_candidates) assert candidate is not None selected.setdefault(platform_candidates.platform, []).append(candidate) return _resolve_final_collisions(selected) def select_pip_requirements( entries: Sequence[DependencyEntry], platforms: Sequence[Platform] | None = None, ) -> dict[TargetPlatform, list[MergedSourceCandidate]]: selected: dict[TargetPlatform, list[MergedSourceCandidate]] = {} for platform_candidates in _build_platform_candidates(entries, platforms): candidate = _select_pip_candidate(platform_candidates) if candidate is None: continue selected.setdefault(platform_candidates.platform, []).append(candidate) return _resolve_final_collisions(selected) def collapse_selected_universals( selected: dict[TargetPlatform, list[MergedSourceCandidate]], platforms: Sequence[Platform] | None = None, ) -> dict[TargetPlatform, list[MergedSourceCandidate]]: """Compress identical universal-origin candidates back to the universal bucket.""" result: dict[TargetPlatform, list[MergedSourceCandidate]] = {} active_platforms = ( list(platforms) if platforms else sorted(platform for platform in selected if platform is not None) ) if not active_platforms: return result grouped: dict[ tuple[CondaPip, Spec], dict[Platform, MergedSourceCandidate], ] = {} for platform in active_platforms: for candidate in selected.get(platform, []): grouped.setdefault( (candidate.source, candidate.spec), {}, )[platform] = candidate for candidates_by_platform in grouped.values(): if len(candidates_by_platform) == len(active_platforms) and all( _candidate_has_universal_origin(candidate) for candidate in candidates_by_platform.values() ): result.setdefault(None, []).append( next(iter(candidates_by_platform.values())), ) continue for platform, candidate in candidates_by_platform.items(): result.setdefault(platform, []).append(candidate) return result ================================================ FILE: unidep/_hatch_integration.py ================================================ """unidep - Unified Conda and Pip requirements management. This module contains the Hatchling integration. """ from __future__ import annotations from pathlib import Path from hatchling.metadata.plugin.interface import MetadataHookInterface from hatchling.plugin import hookimpl from unidep._setuptools_integration import _deps from unidep.utils import ( parse_folder_or_filename, ) __all__ = ["UnidepRequirementsMetadataHook"] class UnidepRequirementsMetadataHook(MetadataHookInterface): """Hatch hook to populate ``'project.depencencies'`` from ``requirements.yaml`` or ``pyproject.toml``.""" # noqa: E501 PLUGIN_NAME = "unidep" def update(self, metadata: dict) -> None: """Update the project table's metadata.""" if "dependencies" not in metadata.get("dynamic", []): return project_root = Path.cwd() try: requirements_file = parse_folder_or_filename(project_root).path except FileNotFoundError: return if "dependencies" in metadata: error_msg = ( "You have a `requirements.yaml` file in your project root or" " configured unidep in `pyproject.toml` with `[tool.unidep]`," " but you are also using `[project.dependencies]`." " Please remove `[project.dependencies]`, you cannot use both." ) raise RuntimeError(error_msg) deps = _deps(requirements_file) metadata["dependencies"] = deps.dependencies if "optional-dependencies" not in metadata.get("dynamic", []): return metadata["optional-dependencies"] = deps.extras @hookimpl def hatch_register_metadata_hook() -> type[UnidepRequirementsMetadataHook]: return UnidepRequirementsMetadataHook ================================================ FILE: unidep/_pixi.py ================================================ """Pixi.toml generation with version constraint merging.""" from __future__ import annotations import copy import os import re import sys from collections import Counter, deque from collections.abc import Mapping from pathlib import Path from typing import ( TYPE_CHECKING, Any, Literal, NamedTuple, Sequence, cast, ) from ruamel.yaml import YAML from unidep._dependencies_parsing import ( DependencyEntry, _apply_local_dependency_override, _effective_local_dependencies, _load, _move_local_optional_dependencies_to_local_dependencies, _str_is_path_like, _try_parse_local_dependency_requirement_file, parse_requirements, ) from unidep._dependency_selection import select_conda_like_requirements from unidep.platform_definitions import Platform from unidep.utils import ( LocalDependency, PathWithExtras, is_pip_installable, package_name_from_path, parse_folder_or_filename, resolve_platforms, split_path_and_extras, ) if TYPE_CHECKING: from typing import Dict, Optional, Tuple, Union from unidep._dependencies_parsing import ParsedRequirements from unidep.platform_definitions import Spec if sys.version_info >= (3, 10): from typing import TypeAlias else: from typing_extensions import TypeAlias from unidep.platform_definitions import Platform # Version spec can be a string or dict with version/build/extras VersionSpec: TypeAlias = Union[str, Dict[str, Any]] # Type alias for the extracted dependencies structure # Maps platform (or None for universal) to (conda_deps, pip_deps) PlatformDeps: TypeAlias = Dict[ Optional[str], Tuple[Dict[str, VersionSpec], Dict[str, VersionSpec]], ] def _parse_version_build(pin: str | None) -> str | dict[str, str]: """Parse a version pin that may contain a build string. Conda matchspecs can have format: ">=1.0 build_string*" where the build string comes after a space following the version. Returns: str: Simple version string like ">=1.0" or "*" dict: {"version": ">=1.0", "build": "build_string*"} when build present """ if not pin: return "*" pin = pin.strip() if not pin: return "*" # Build strings come after the full version constraint, separated by whitespace. # We split on the last whitespace and only treat the last token as build when # the version part looks complete (has digits or a wildcard) and the last token # doesn't look like another constraint. if " " in pin: version_candidate, build_candidate = pin.rsplit(None, 1) if ( re.search(r"\d", version_candidate) or "*" in version_candidate ) and not re.match(r"^[><=!~]", build_candidate): version = version_candidate.replace(" ", "") return {"version": version, "build": build_candidate} # No build string, just return the version without spaces return pin.replace(" ", "") def _parse_package_extras(pkg_name: str) -> tuple[str, list[str]]: """Parse a package name that may contain extras. Pip packages can have format: "package[extra1,extra2]" Returns: tuple: (base_name, extras_list) where extras_list is empty if no extras """ match = re.match(r"^([a-zA-Z0-9_.\-]+)\[([^\]]+)\]$", pkg_name) if match: base_name = match.group(1) extras = [e.strip() for e in match.group(2).split(",")] return base_name, extras return pkg_name, [] def _make_pip_version_spec( version: str | dict[str, str], extras: list[str], ) -> str | dict[str, Any]: """Create a pip version spec, handling extras if present. Pixi requires extras in table format: package = { version = "*", extras = ["extra1", "extra2"] } Returns: str: Simple version string if no extras dict: Table with version and extras if extras present """ if not extras: return version # When we have extras, we need table format if isinstance(version, str): return {"version": version, "extras": extras} # version is already a dict (has build string), add extras return {**version, "extras": extras} def _get_package_name(project_dir: Path) -> str: """Get a pixi dependency key for an editable local package.""" name = package_name_from_path(project_dir) return name.replace("-", "_").replace(".", "_") def _normalize_feature_name(name: str) -> str: """Normalize a feature name to a deterministic pixi-friendly key.""" return re.sub(r"[^A-Za-z0-9_-]+", "-", name.strip()).strip("-_") def _project_dir_from_requirement_file(req_file: Path) -> Path: """Get the installable project directory for a requirements path.""" resolved = req_file.resolve() return resolved.parent if resolved.is_file() else resolved def _derive_feature_names(requirements_files: Sequence[Path]) -> list[str]: """Derive unique, non-empty feature names for requirements files.""" project_dirs = [ _project_dir_from_requirement_file(req_file) for req_file in requirements_files ] resolved_paths = [req_file.resolve() for req_file in requirements_files] base_names = [] for req_file, req_path, req_dir in zip( requirements_files, resolved_paths, project_dirs, ): # Prefer the file stem for non-standard requirement filenames # (e.g. dev-requirements.yaml) so shared files get meaningful feature names. if req_path.name not in {"requirements.yaml", "pyproject.toml"}: default_name = req_path.stem else: default_name = req_dir.name or req_path.stem or req_file.stem or "feature" normalized = _normalize_feature_name(default_name) base_names.append(normalized or "feature") try: common_dir = Path(os.path.commonpath([str(path) for path in project_dirs])) except ValueError: common_dir = Path.cwd().resolve() base_counts = Counter(base_names) used_names: set[str] = set() feature_names: list[str] = [] for base_name, req_path, req_dir in zip(base_names, resolved_paths, project_dirs): if base_counts[base_name] == 1: candidate = base_name else: try: rel_parts = req_dir.relative_to(common_dir).parts except ValueError: rel_parts = req_dir.parts rel_name = _normalize_feature_name( "-".join(part for part in rel_parts if part), ) candidate = rel_name or base_name or "feature" if candidate in used_names: stem_name = _normalize_feature_name(req_path.stem) if stem_name: candidate = _normalize_feature_name(f"{candidate}-{stem_name}") unique_name = candidate suffix = 2 while unique_name in used_names: unique_name = f"{candidate}-{suffix}" suffix += 1 used_names.add(unique_name) feature_names.append(unique_name) return feature_names def _editable_dependency_path(req_dir: Path, output_file: str | Path | None) -> str: """Build editable path relative to the generated pixi.toml location.""" output_dir = ( Path.cwd().resolve() if output_file is None else Path(output_file).resolve().parent ) try: rel_path = Path(os.path.relpath(req_dir.resolve(), output_dir)).as_posix() except ValueError: # On Windows, os.path.relpath raises ValueError when paths are on # different drives (e.g. C:\ vs D:\). Fall back to an absolute path. return req_dir.resolve().as_posix() if rel_path == ".": return "." if rel_path.startswith("."): return rel_path return f"./{rel_path}" def _with_unique_order_paths(items: Sequence[Path]) -> list[Path]: """Return unique paths while preserving order.""" unique_items: list[Path] = [] seen: set[Path] = set() for item in items: resolved = item.resolve() if resolved in seen: continue seen.add(resolved) unique_items.append(item) return unique_items def _add_editable_local_dependencies( section: dict[str, Any], local_projects: Sequence[Path], *, output_file: str | Path | None, exclude: set[Path] | None = None, ) -> None: """Add local projects to a pixi section as editable pip dependencies. Parameters ---------- section The pixi data dict to add ``pypi-dependencies`` entries to. local_projects Directories of installable Python projects. output_file Path to the generated pixi.toml (used to compute relative paths). exclude Resolved paths to skip (used to avoid duplicating editables that already appear in a parent/base section). """ unique_projects = _with_unique_order_paths(list(local_projects)) if not unique_projects: return for project_dir in unique_projects: if exclude and project_dir.resolve() in exclude: continue package_name = _get_package_name(project_dir) section.setdefault("pypi-dependencies", {})[package_name] = { "path": _editable_dependency_path(project_dir, output_file), "editable": True, } def _unmanaged_installable_local_project_dir( *, base_dir: Path, local_dependency: str, ) -> Path | None: """Resolve an unmanaged local dependency to an installable project directory.""" local_path, _extras = split_path_and_extras(local_dependency) abs_local = (base_dir / local_path).resolve() if abs_local.suffix in (".whl", ".zip"): return None if is_pip_installable(abs_local): return abs_local return None class LocalDependencyGraph(NamedTuple): """Result of discovering local dependency relationships.""" roots: list[PathWithExtras] discovered: list[PathWithExtras] graph: dict[PathWithExtras, list[PathWithExtras]] optional_group_graph: dict[PathWithExtras, dict[str, list[PathWithExtras]]] unmanaged_local_graph: dict[PathWithExtras, list[Path]] optional_group_unmanaged_graph: dict[PathWithExtras, dict[str, list[Path]]] def _discover_local_dependency_graph( # noqa: PLR0912, C901, PLR0915 requirements_files: Sequence[Path], ) -> LocalDependencyGraph: """Discover requirement files reachable via local_dependencies. Returns: - Root requirement files (the user-provided inputs). - All discovered requirement files (roots + reachable local deps). - A direct dependency graph between discovered requirement files. - Optional-group local dependency edges for root files. - Direct unmanaged installable local dependencies for each node. - Optional-group unmanaged installable local dependencies for root files. """ yaml = YAML(typ="rt") local_dependency_overrides: dict[Path, LocalDependency] = {} roots = [ parse_folder_or_filename(req_file).canonicalized() for req_file in requirements_files ] discovered: list[PathWithExtras] = [] graph: dict[PathWithExtras, list[PathWithExtras]] = {} optional_group_graph: dict[PathWithExtras, dict[str, list[PathWithExtras]]] = {} unmanaged_local_graph: dict[PathWithExtras, list[Path]] = {} optional_group_unmanaged_graph: dict[PathWithExtras, dict[str, list[Path]]] = {} seen: set[PathWithExtras] = set() roots_set = set(roots) queue = deque(roots) while queue: node = queue.popleft() if node in seen: continue seen.add(node) discovered.append(node) data = copy.deepcopy(_load(node.path, yaml)) _move_local_optional_dependencies_to_local_dependencies( data=data, path_with_extras=node, verbose=False, ) effective_local_dependencies = _effective_local_dependencies( data=data, base_dir=node.path.parent, overrides=local_dependency_overrides, ) if node in roots_set: optional_groups = data.get("optional_dependencies", {}) if isinstance(optional_groups, Mapping): for group_name, group_deps in optional_groups.items(): if not isinstance(group_deps, list): continue for dep in group_deps: if isinstance(dep, Mapping) or not _str_is_path_like(dep): continue effective_local_dep = _apply_local_dependency_override( local_dependency=LocalDependency(local=dep), base_dir=node.path.parent, overrides=local_dependency_overrides, ) if effective_local_dep.use != "local": continue requirements_dep_file = ( _try_parse_local_dependency_requirement_file( base_dir=node.path.parent, local_dependency=effective_local_dep.local, ) ) if requirements_dep_file is None: unmanaged_local_dir = ( _unmanaged_installable_local_project_dir( base_dir=node.path.parent, local_dependency=effective_local_dep.local, ) ) if unmanaged_local_dir is None: continue unmanaged_group_edges = ( optional_group_unmanaged_graph.setdefault( node, {}, ).setdefault(group_name, []) ) if unmanaged_local_dir not in unmanaged_group_edges: unmanaged_group_edges.append(unmanaged_local_dir) continue child = requirements_dep_file.canonicalized() group_edges = optional_group_graph.setdefault( node, {}, ).setdefault(group_name, []) if child not in group_edges: group_edges.append(child) if child not in seen: queue.append(child) direct_nodes: list[PathWithExtras] = [] direct_unmanaged_nodes: list[Path] = [] for effective_local_dep in effective_local_dependencies: if effective_local_dep.use != "local": continue requirements_dep_file = _try_parse_local_dependency_requirement_file( base_dir=node.path.parent, local_dependency=effective_local_dep.local, ) if requirements_dep_file is None: unmanaged_local_dir = _unmanaged_installable_local_project_dir( base_dir=node.path.parent, local_dependency=effective_local_dep.local, ) if ( unmanaged_local_dir is not None and unmanaged_local_dir not in direct_unmanaged_nodes ): direct_unmanaged_nodes.append(unmanaged_local_dir) continue child = requirements_dep_file.canonicalized() if child not in direct_nodes: direct_nodes.append(child) if child not in seen: queue.append(child) graph[node] = direct_nodes unmanaged_local_graph[node] = direct_unmanaged_nodes return LocalDependencyGraph( roots=roots, discovered=discovered, graph=graph, optional_group_graph=optional_group_graph, unmanaged_local_graph=unmanaged_local_graph, optional_group_unmanaged_graph=optional_group_unmanaged_graph, ) def _parse_direct_requirements_for_node( node: PathWithExtras, *, verbose: bool, ignore_pins: list[str] | None, skip_dependencies: list[str] | None, overwrite_pins: list[str] | None, include_all_optional_groups: bool = False, ) -> ParsedRequirements: """Parse a requirements node without recursively flattening local deps.""" extras: list[list[str]] | Literal["*"] | None if node.extras: extras = [node.extras] elif include_all_optional_groups: extras = "*" else: extras = None req = parse_requirements( node.path, verbose=verbose, extras=extras, ignore_pins=ignore_pins, overwrite_pins=overwrite_pins, skip_dependencies=skip_dependencies, include_local_dependencies=False, ) if not node.extras: return req merged_requirements = { name: list(specs) for name, specs in req.requirements.items() } merged_entries = list(req.dependency_entries) if "*" in node.extras: selected_groups = list(req.optional_dependencies.keys()) else: selected_groups = [ group_name for group_name in node.extras if group_name in req.optional_dependencies ] # Extras selected on local dependencies are required for the parent feature. for group_name in selected_groups: for dep_name, specs in req.optional_dependencies[group_name].items(): merged_requirements.setdefault(dep_name, []).extend(specs) merged_entries.extend(req.optional_dependency_entries.get(group_name, [])) return req._replace( requirements=merged_requirements, optional_dependencies={}, dependency_entries=merged_entries, optional_dependency_entries={}, ) def _collect_transitive_nodes( node: PathWithExtras, graph: dict[PathWithExtras, list[PathWithExtras]], ) -> list[PathWithExtras]: """Collect transitive local dependency nodes in deterministic order.""" collected: list[PathWithExtras] = [] seen: set[PathWithExtras] = set() queue = deque(graph.get(node, [])) while queue: current = queue.popleft() if current in seen: continue seen.add(current) collected.append(current) queue.extend(graph.get(current, [])) return collected def _with_unique_order(items: list[str]) -> list[str]: """Return unique items while preserving order.""" return list(dict.fromkeys(items)) def _unique_optional_feature_name( *, parent_feature: str, group_name: str, taken_names: set[str], ) -> str: """Generate a non-colliding optional sub-feature name.""" candidate = f"{parent_feature}-{group_name}" if candidate not in taken_names: taken_names.add(candidate) return candidate suffix_base = f"{candidate}-opt" unique_candidate = suffix_base suffix = 2 while unique_candidate in taken_names: unique_candidate = f"{suffix_base}-{suffix}" suffix += 1 taken_names.add(unique_candidate) return unique_candidate def _unique_env_name( feature_name: str, taken_env_names: set[str], ) -> str: """Generate a non-colliding pixi environment name from a feature name. Pixi environment names cannot contain underscores, so we replace them with hyphens. When this normalization causes a collision (e.g. both ``foo_bar`` and ``foo-bar`` exist), a numeric suffix is appended. """ candidate = feature_name.replace("_", "-") if candidate not in taken_env_names: taken_env_names.add(candidate) return candidate suffix = 2 while f"{candidate}-{suffix}" in taken_env_names: suffix += 1 result = f"{candidate}-{suffix}" taken_env_names.add(result) return result def _add_single_file_optional_environments( pixi_data: dict[str, Any], opt_features: list[str], ) -> None: """Add single-file optional environments, avoiding `all` name collisions.""" if not opt_features: return pixi_data["environments"]["default"] = [] create_aggregate_all_env = len(opt_features) > 1 taken_env_names: set[str] = {"default"} | ( {"all"} if create_aggregate_all_env else set() ) for feat in opt_features: env_name = _unique_env_name(feat, taken_env_names) pixi_data["environments"][env_name] = [feat] if create_aggregate_all_env: pixi_data["environments"]["all"] = opt_features def _spec_key(spec: Spec) -> tuple[str, str, str | None, str | None]: """Return the stable identity of a Spec (excludes parse-time identifier).""" return (spec.name, spec.which, spec.pin, spec.selector) def _entry_key( entry: DependencyEntry, ) -> tuple[ tuple[str, str, str | None, str | None] | None, tuple[str, str, str | None, str | None] | None, ]: """Return the stable identity of a dependency entry.""" conda = _spec_key(entry.conda) if entry.conda is not None else None pip = _spec_key(entry.pip) if entry.pip is not None else None return (conda, pip) def _subtract_entries( full_entries: list[DependencyEntry], base_entries: list[DependencyEntry], ) -> list[DependencyEntry]: """Return entries present in full_entries but not in base_entries.""" remaining = Counter(_entry_key(entry) for entry in base_entries) diff: list[DependencyEntry] = [] for entry in full_entries: key = _entry_key(entry) if remaining[key] > 0: remaining[key] -= 1 else: diff.append(entry) return diff class _PixiGenerationResult(NamedTuple): """Intermediate result from single-file or multi-file pixi generation.""" pixi_data: dict[str, Any] all_channels: set[str] all_platforms: set[str] discovered_target_platforms: set[str] def _process_single_file_optional_groups( pixi_data: dict[str, Any], *, req_file: Path, base_req: ParsedRequirements, base_feature_platforms: list[Platform] | None, dep_graph: LocalDependencyGraph, root_node: PathWithExtras, base_local_editable_set: set[Path], output_file: str | Path | None, verbose: bool, ignore_pins: list[str] | None, skip_dependencies: list[str] | None, overwrite_pins: list[str] | None, ) -> set[str]: """Process optional dependency groups for single-file pixi generation. Returns discovered target platforms. """ discovered_target_platforms: set[str] = set() optional_data = _load(req_file, YAML(typ="rt")).get("optional_dependencies", {}) optional_groups = list(optional_data) if isinstance(optional_data, Mapping) else [] if not optional_groups: return discovered_target_platforms pixi_data["feature"] = {} pixi_data["environments"] = {} opt_features = [] workspace_platforms: set[Platform] = set(base_feature_platforms or []) parsed_groups: list[tuple[str, list[DependencyEntry]]] = [] for group_name in optional_groups: group_req = parse_requirements( req_file, verbose=verbose, extras=[[group_name]], ignore_pins=ignore_pins, overwrite_pins=overwrite_pins, skip_dependencies=skip_dependencies, include_local_dependencies=True, ) # A group parse contains the base requirements plus group-selected # optional local dependencies. Keep only the delta to preserve # optional semantics. group_feature_entries = _subtract_entries( group_req.dependency_entries, base_req.dependency_entries, ) group_feature_entries.extend( group_req.optional_dependency_entries.get(group_name, []), ) workspace_platforms.update( _selector_platforms_from_entries(group_feature_entries), ) parsed_groups.append((group_name, group_feature_entries)) group_platforms = sorted(workspace_platforms) or None if group_platforms: discovered_target_platforms.update(group_platforms) for group_name, group_feature_entries in parsed_groups: opt_platform_deps = _extract_dependencies( group_feature_entries, platforms=group_platforms, allow_hoist_without_universal_origin=True, ) feature = _build_feature_dict(opt_platform_deps) optional_group_projects: list[Path] = list( dep_graph.optional_group_unmanaged_graph.get(root_node, {}).get( group_name, [], ), ) optional_local_nodes = dep_graph.optional_group_graph.get( root_node, {}, ).get( group_name, [], ) seen_optional_nodes: set[PathWithExtras] = set() for optional_local_node in optional_local_nodes: for candidate_node in [ optional_local_node, *( _collect_transitive_nodes( optional_local_node, dep_graph.graph, ) ), ]: if candidate_node in seen_optional_nodes: continue seen_optional_nodes.add(candidate_node) optional_project_dir = _project_dir_from_requirement_file( candidate_node.path, ) if is_pip_installable(optional_project_dir): optional_group_projects.append(optional_project_dir) optional_group_projects.extend( dep_graph.unmanaged_local_graph.get(candidate_node, []), ) _add_editable_local_dependencies( feature, optional_group_projects, output_file=output_file, exclude=base_local_editable_set, ) if feature: pixi_data["feature"][group_name] = feature opt_features.append(group_name) # Create environments for optional dependencies _add_single_file_optional_environments(pixi_data, opt_features) return discovered_target_platforms def _generate_single_file_pixi( requirements_file: Path, *, platforms_override: list[Platform] | None, output_file: str | Path | None, verbose: bool, ignore_pins: list[str] | None, skip_dependencies: list[str] | None, overwrite_pins: list[str] | None, ) -> _PixiGenerationResult: """Generate pixi data for a single requirements file.""" pixi_data: dict[str, Any] = {} all_channels: set[str] = set() all_platforms: set[str] = set() discovered_target_platforms: set[str] = set() req_file = parse_folder_or_filename(requirements_file).path base_req = parse_requirements( requirements_file, verbose=verbose, ignore_pins=ignore_pins, overwrite_pins=overwrite_pins, skip_dependencies=skip_dependencies, include_local_dependencies=True, ) base_feature_platforms = _feature_platforms_for_entries( entries=base_req.dependency_entries, declared_platforms=base_req.platforms, global_declared_platforms=set(base_req.platforms), platforms_override=platforms_override, ) platform_deps = _extract_dependencies( base_req.dependency_entries, platforms=base_feature_platforms, allow_hoist_without_universal_origin=True, ) if base_feature_platforms: discovered_target_platforms.update(base_feature_platforms) # Use channels and platforms from the requirements file if base_req.channels: all_channels.update(base_req.channels) if base_req.platforms and not platforms_override: all_platforms.update(base_req.platforms) pixi_data.update(_build_feature_dict(platform_deps)) dep_graph = _discover_local_dependency_graph([requirements_file]) root_node = dep_graph.roots[0] # Collect editable packages from the root project and required local deps # only (NOT optional-only local deps, which belong in optional features). required_nodes = set(_collect_transitive_nodes(root_node, dep_graph.graph)) req_dir = _project_dir_from_requirement_file(req_file) local_editable_projects: list[Path] = [] if is_pip_installable(req_dir): local_editable_projects.append(req_dir) for node in dep_graph.discovered: if node == root_node or node not in required_nodes: continue node_project_dir = _project_dir_from_requirement_file(node.path) should_add_editable = node.path.name in { "requirements.yaml", "pyproject.toml", } if should_add_editable and is_pip_installable(node_project_dir): local_editable_projects.append(node_project_dir) local_editable_projects.extend(dep_graph.unmanaged_local_graph.get(node, [])) local_editable_projects.extend(dep_graph.unmanaged_local_graph.get(root_node, [])) _add_editable_local_dependencies( pixi_data, local_editable_projects, output_file=output_file, ) base_local_editable_set = { path.resolve() for path in _with_unique_order_paths(local_editable_projects) } # Handle optional dependencies as features opt_target_platforms = _process_single_file_optional_groups( pixi_data, req_file=req_file, base_req=base_req, base_feature_platforms=base_feature_platforms, dep_graph=dep_graph, root_node=root_node, base_local_editable_set=base_local_editable_set, output_file=output_file, verbose=verbose, ignore_pins=ignore_pins, skip_dependencies=skip_dependencies, overwrite_pins=overwrite_pins, ) discovered_target_platforms.update(opt_target_platforms) return _PixiGenerationResult( pixi_data=pixi_data, all_channels=all_channels, all_platforms=all_platforms, discovered_target_platforms=discovered_target_platforms, ) def _generate_multi_file_pixi( # noqa: PLR0912, C901, PLR0915 requirements_files: Sequence[Path], *, platforms_override: list[Platform] | None, output_file: str | Path | None, verbose: bool, ignore_pins: list[str] | None, skip_dependencies: list[str] | None, overwrite_pins: list[str] | None, ) -> _PixiGenerationResult: """Generate pixi data for multiple requirements files.""" pixi_data: dict[str, Any] = {"feature": {}, "environments": {}} all_channels: set[str] = set() all_platforms: set[str] = set() discovered_target_platforms: set[str] = set() dep_graph = _discover_local_dependency_graph(requirements_files) feature_names = _derive_feature_names( [node.path for node in dep_graph.discovered], ) feature_name_by_node = dict(zip(dep_graph.discovered, feature_names)) taken_optional_feature_names: set[str] = set(feature_names) root_nodes_set = set(dep_graph.roots) parsed_by_node: dict[PathWithExtras, ParsedRequirements] = {} global_declared_platforms: set[Platform] = set() base_feature_nodes: dict[str, PathWithExtras] = {} optional_feature_parents: dict[str, str] = {} optional_feature_has_feature: dict[str, bool] = {} optional_feature_local_nodes: dict[str, list[PathWithExtras]] = {} for node in dep_graph.discovered: req = _parse_direct_requirements_for_node( node, verbose=verbose, ignore_pins=ignore_pins, skip_dependencies=skip_dependencies, overwrite_pins=overwrite_pins, include_all_optional_groups=node in root_nodes_set, ) parsed_by_node[node] = req if req.platforms and not platforms_override: global_declared_platforms.update(req.platforms) for node in dep_graph.discovered: req = parsed_by_node[node] feature_platforms = _feature_platforms_for_entries( entries=req.dependency_entries, declared_platforms=req.platforms, global_declared_platforms=global_declared_platforms, platforms_override=platforms_override, ) platform_deps = _extract_dependencies( req.dependency_entries, platforms=feature_platforms, allow_hoist_without_universal_origin=platforms_override is not None or not req.platforms, ) discovered_target_platforms.update( platform for platform in platform_deps if platform is not None ) feature_name = feature_name_by_node[node] # Collect channels and platforms if req.channels: all_channels.update(req.channels) if not platforms_override and feature_platforms: all_platforms.update(feature_platforms) # Build the feature dict from platform deps feature = _build_feature_dict(platform_deps) # Add editable dependency for standard project requirement files. req_dir = _project_dir_from_requirement_file(node.path) should_add_editable = node.path.name in { "requirements.yaml", "pyproject.toml", } node_editable_projects: list[Path] = [] if should_add_editable and is_pip_installable(req_dir): node_editable_projects.append(req_dir) node_editable_projects.extend(dep_graph.unmanaged_local_graph.get(node, [])) _add_editable_local_dependencies( feature, node_editable_projects, output_file=output_file, ) if feature: # Only add non-empty features pixi_data["feature"][feature_name] = feature # Always track the node so transitive deps are computed even when # the root itself has no direct dependencies (aggregator pattern). base_feature_nodes[feature_name] = node if node not in root_nodes_set: continue # Build set of editables already in the base feature so optional # sub-features don't duplicate them (mirrors single-file behavior). base_editable_set = { p.resolve() for p in _with_unique_order_paths(node_editable_projects) } # Handle optional dependencies as sub-features for root features. # Even when a root has no direct deps/editables (so no base feature), # its optional groups may still carry real dependencies and must be kept. parsed_group_names = list(req.optional_dependencies) local_only_group_names = set( dep_graph.optional_group_graph.get(node, {}), ) | set( dep_graph.optional_group_unmanaged_graph.get(node, {}), ) all_group_names = parsed_group_names + [ group_name for group_name in sorted(local_only_group_names) if group_name not in req.optional_dependencies ] for group_name in all_group_names: group_entries = req.optional_dependency_entries.get(group_name, []) group_platforms = _feature_platforms_for_entries( entries=group_entries, declared_platforms=req.platforms, global_declared_platforms=global_declared_platforms, platforms_override=platforms_override, ) group_platform_deps = _extract_dependencies( group_entries, platforms=group_platforms, allow_hoist_without_universal_origin=platforms_override is not None or not req.platforms, ) discovered_target_platforms.update( platform for platform in group_platform_deps if platform is not None ) if not platforms_override and group_platforms: all_platforms.update(group_platforms) opt_feature = _build_feature_dict(group_platform_deps) opt_feature_name = _unique_optional_feature_name( parent_feature=feature_name, group_name=group_name, taken_names=taken_optional_feature_names, ) optional_local_nodes = dep_graph.optional_group_graph.get( node, {}, ).get( group_name, [], ) optional_unmanaged_local_projects = ( dep_graph.optional_group_unmanaged_graph.get( node, {}, ).get( group_name, [], ) ) _add_editable_local_dependencies( opt_feature, optional_unmanaged_local_projects, output_file=output_file, exclude=base_editable_set, ) if ( not opt_feature and not optional_local_nodes and not optional_unmanaged_local_projects ): continue if opt_feature: pixi_data["feature"][opt_feature_name] = opt_feature optional_feature_has_feature[opt_feature_name] = True else: optional_feature_has_feature[opt_feature_name] = False optional_feature_parents[opt_feature_name] = feature_name optional_feature_local_nodes[opt_feature_name] = optional_local_nodes # Create environments if pixi_data["feature"]: transitive_features: dict[str, list[str]] = {} for feature_name, node in base_feature_nodes.items(): dep_features = [ feature_name_by_node[dep_node] for dep_node in _collect_transitive_nodes( node, dep_graph.graph, ) if feature_name_by_node.get(dep_node) in pixi_data["feature"] ] transitive_features[feature_name] = _with_unique_order(dep_features) default_features: list[str] = [] for root_node in dep_graph.roots: root_feature = feature_name_by_node[root_node] # Include the root's own feature only if it's non-empty. if root_feature in pixi_data["feature"]: default_features.append(root_feature) # Always include transitive deps (supports aggregator roots # that have no direct deps but pull in local_dependencies). default_features.extend(transitive_features.get(root_feature, [])) pixi_data["environments"]["default"] = _with_unique_order(default_features) taken_env_names: set[str] = {"default"} for opt_feature_name, parent_feature in optional_feature_parents.items(): env_name = _unique_env_name(opt_feature_name, taken_env_names) env_features = [] if parent_feature in pixi_data["feature"]: env_features.append(parent_feature) env_features.extend(transitive_features.get(parent_feature, [])) if optional_feature_has_feature.get(opt_feature_name, False): env_features.append(opt_feature_name) for local_node in optional_feature_local_nodes.get( opt_feature_name, [], ): local_feature = feature_name_by_node[local_node] # Include the local node's own feature if it's non-empty. if local_feature in pixi_data["feature"]: env_features.append(local_feature) # Always traverse transitive deps even when the local node # itself is empty (aggregator pattern). env_features.extend(transitive_features.get(local_feature, [])) pixi_data["environments"][env_name] = _with_unique_order(env_features) return _PixiGenerationResult( pixi_data=pixi_data, all_channels=all_channels, all_platforms=all_platforms, discovered_target_platforms=discovered_target_platforms, ) def _selector_platforms_from_entries( entries: Sequence[DependencyEntry], ) -> list[Platform]: selector_platforms: set[Platform] = set() for entry in entries: for spec in (entry.conda, entry.pip): if spec is None or spec.selector is None: continue entry_platforms = spec.platforms() if entry_platforms is not None: selector_platforms.update(entry_platforms) return sorted(selector_platforms) def _feature_platforms_for_entries( *, entries: Sequence[DependencyEntry], declared_platforms: Sequence[Platform], global_declared_platforms: set[Platform], platforms_override: list[Platform] | None, ) -> list[Platform] | None: if platforms_override: return list(platforms_override) if declared_platforms: return list(declared_platforms) inferred_platforms = set(global_declared_platforms) inferred_platforms.update(_selector_platforms_from_entries(entries)) return sorted(inferred_platforms) or None def generate_pixi_toml( *requirements_files: Path, project_name: str | None = None, channels: list[str] | None = None, platforms: list[Platform] | None = None, output_file: str | Path | None = "pixi.toml", verbose: bool = False, ignore_pins: list[str] | None = None, skip_dependencies: list[str] | None = None, overwrite_pins: list[str] | None = None, ) -> None: """Generate a pixi.toml file from requirements files. This function creates a pixi.toml with features for each requirements file, letting Pixi handle all dependency resolution and conflict management. Parameters ---------- requirements_files One or more requirement file paths to process. project_name Name for the ``[workspace]`` section. Defaults to the current directory name. channels Conda channels for the workspace. When provided, these **override** any channels declared in the requirement files (consistent with how *platforms* behaves). When ``None``, channels are read from the requirement files, falling back to ``["conda-forge"]``. platforms Target platforms. When provided, overrides file-declared platforms. output_file Path to write the generated TOML. ``None`` writes to stdout. verbose Print progress information. ignore_pins Package names whose version pins should be stripped. skip_dependencies Package names to omit entirely. overwrite_pins Pin overrides in ``"pkg>=version"`` format. """ if not requirements_files: requirements_files = (Path.cwd(),) if platforms is not None and not platforms: platforms = None if len(requirements_files) == 1: result = _generate_single_file_pixi( requirements_files[0], platforms_override=platforms, output_file=output_file, verbose=verbose, ignore_pins=ignore_pins, skip_dependencies=skip_dependencies, overwrite_pins=overwrite_pins, ) else: result = _generate_multi_file_pixi( requirements_files, platforms_override=platforms, output_file=output_file, verbose=verbose, ignore_pins=ignore_pins, skip_dependencies=skip_dependencies, overwrite_pins=overwrite_pins, ) pixi_data = result.pixi_data # Set workspace metadata with collected channels and platforms # Sort for deterministic output final_platforms = resolve_platforms( requested_platforms=platforms, declared_platforms=cast("set[Platform]", result.all_platforms), selector_platforms=cast("set[Platform]", result.discovered_target_platforms), ) if channels is not None: final_channels = list(channels) elif result.all_channels: final_channels = sorted(result.all_channels) else: final_channels = ["conda-forge"] pixi_data["workspace"] = { "name": project_name or Path.cwd().name, "channels": final_channels, "platforms": final_platforms, } # Filter target sections to only include platforms in the project's platforms list _filter_targets_by_platforms(pixi_data, set(final_platforms)) # Write the pixi.toml file _write_pixi_toml(pixi_data, output_file, verbose=verbose) def _extract_dependencies( # noqa: PLR0912 entries: list[DependencyEntry], *, platforms: list[Platform] | None = None, allow_hoist_without_universal_origin: bool = False, ) -> PlatformDeps: """Extract conda and pip dependencies from dependency entries. Returns a dict mapping platform (or None for universal) to ``(conda_deps, pip_deps)``. """ platform_deps: PlatformDeps = {None: ({}, {})} selected = select_conda_like_requirements(entries, platforms) target_platforms = platforms or sorted( platform for platform in selected if platform is not None ) if target_platforms: per_platform: dict[ Platform, tuple[ dict[str, VersionSpec], dict[str, VersionSpec], ], ] = {platform: ({}, {}) for platform in target_platforms} for platform, candidates in selected.items(): assert platform is not None conda_deps, pip_deps = per_platform[platform] for candidate in candidates: if candidate.source == "conda": conda_deps[candidate.spec.name] = _parse_version_build( candidate.spec.pin, ) else: base_name, extras = _parse_package_extras(candidate.spec.name) normalized = candidate.spec.name_with_pin(is_pip=True) normalized_pin = ( normalized[len(candidate.spec.name) :].strip() or None ) version = _parse_version_build(normalized_pin) pip_deps[base_name] = _make_pip_version_spec(version, extras) universal_conda, universal_pip = platform_deps[None] conda_names = { name for conda_deps, _pip_deps in per_platform.values() for name in conda_deps } pip_names = { name for _conda_deps, pip_deps in per_platform.values() for name in pip_deps } for name in sorted(conda_names): present = { platform: deps[0][name] for platform, deps in per_platform.items() if name in deps[0] } if len(present) == len(target_platforms): first_spec = next(iter(present.values())) specs_match = all(spec == first_spec for spec in present.values()) hoist_is_safe = allow_hoist_without_universal_origin if specs_match and hoist_is_safe: universal_conda[name] = first_spec continue for platform, spec in present.items(): platform_deps.setdefault(platform, ({}, {}))[0][name] = spec for name in sorted(pip_names): present = { platform: deps[1][name] for platform, deps in per_platform.items() if name in deps[1] } if len(present) == len(target_platforms): first_spec = next(iter(present.values())) specs_match = all(spec == first_spec for spec in present.values()) hoist_is_safe = allow_hoist_without_universal_origin if specs_match and hoist_is_safe: universal_pip[name] = first_spec continue for platform, spec in present.items(): platform_deps.setdefault(platform, ({}, {}))[1][name] = spec return platform_deps def _build_feature_dict(platform_deps: PlatformDeps) -> dict[str, Any]: """Build a pixi feature dict from platform dependencies.""" feature: dict[str, Any] = {} # Get universal (non-platform-specific) dependencies conda_deps, pip_deps = platform_deps.get(None, ({}, {})) if conda_deps: feature["dependencies"] = conda_deps if pip_deps: feature["pypi-dependencies"] = pip_deps # Add platform-specific dependencies as target sections for platform, (plat_conda, plat_pip) in platform_deps.items(): if platform is None: continue if "target" not in feature: feature["target"] = {} if platform not in feature["target"]: feature["target"][platform] = {} if plat_conda: feature["target"][platform]["dependencies"] = plat_conda if plat_pip: feature["target"][platform]["pypi-dependencies"] = plat_pip return feature def _filter_section_targets( section: dict[str, Any], valid_platforms: set[str], ) -> None: """Remove target entries for platforms not in *valid_platforms*.""" if "target" not in section: return section["target"] = { platform: deps for platform, deps in section["target"].items() if platform in valid_platforms } if not section["target"]: del section["target"] def _filter_targets_by_platforms( pixi_data: dict[str, Any], valid_platforms: set[str], ) -> None: """Filter target sections to only include platforms in valid_platforms. This removes targets for platforms that aren't in the project's platforms list, which would otherwise cause pixi to emit warnings. """ _filter_section_targets(pixi_data, valid_platforms) for feature_data in pixi_data.get("feature", {}).values(): _filter_section_targets(feature_data, valid_platforms) def _write_pixi_toml( pixi_data: dict[str, Any], output_file: str | Path | None, *, verbose: bool = False, ) -> None: """Write the pixi data structure to a TOML file.""" try: import tomli_w except ImportError: # pragma: no cover msg = ( "❌ `tomli_w` is required to write TOML files. " "Install it with `pip install tomli_w`." ) raise ImportError(msg) from None if output_file is not None: output_path = Path(output_file) with output_path.open("wb") as f: tomli_w.dump(pixi_data, f) if verbose: print(f"✅ Generated pixi.toml at {output_path}") else: # Output to stdout tomli_w.dump(pixi_data, sys.stdout.buffer) ================================================ FILE: unidep/_pytest_plugin.py ================================================ """unidep - Unified Conda and Pip requirements management. Pytest plugin for running only tests of changed files. WARNING: Still experimental and not documented. """ from __future__ import annotations import logging import os import sys from pathlib import Path from typing import TYPE_CHECKING from unidep._dependencies_parsing import ( find_requirements_files, parse_local_dependencies, ) if TYPE_CHECKING: import pytest LOGGER = logging.getLogger(__name__) def pytest_addoption(parser: pytest.Parser) -> None: # pragma: no cover """Add options to the pytest command line.""" parser.addoption( "--run-affected", action="store_true", default=False, help="Run only tests from affected packages (via `unidep`)", ) parser.addoption( "--branch", action="store", default="origin/main", help="Branch to compare with for finding affected tests", ) parser.addoption( "--repo-root", action="store", default=".", type=Path, help="Root of the repository", ) def pytest_collection_modifyitems( config: pytest.Config, items: list[pytest.Item], ) -> None: # pragma: no cover """Filter tests based on the --run-affected option.""" if not config.getoption("--run-affected"): return try: from git import Repo except ImportError: print( "🛑 You need to install `gitpython` to use the `--run-affected` option." "run `pip install gitpython` to install it.", ) sys.exit(1) compare_branch = config.getoption("--branch") repo_root = Path(config.getoption("--repo-root")).absolute() repo = Repo(repo_root, search_parent_directories=True) repo_root = Path(repo.working_tree_dir) # In case we searched parent directories found_files = find_requirements_files(repo_root) local_dependencies = parse_local_dependencies(*found_files) staged_diffs = repo.head.commit.diff(compare_branch) unstaged_diffs = repo.index.diff(None) diffs = staged_diffs + unstaged_diffs changed_files = [Path(diff.a_path) for diff in diffs] affected_packages = _affected_packages(repo_root, changed_files, local_dependencies) test_files = [config.cwd_relative_nodeid(i.nodeid).split("::", 1)[0] for i in items] run_from_dir = config.invocation_params.dir assert all((run_from_dir / item).exists() for item in test_files) affected_tests = { item for item, f in zip(items, test_files) if any(f.startswith(str(pkg)) for pkg in affected_packages) } # Run `pytest -o log_cli=true -o log_cli_level=INFO --run-affected` # to see the logging output. logging.info( "Running affected_tests: %s, changed_files: %s, affected_packages: %s", affected_tests, changed_files, affected_packages, ) items[:] = list(affected_tests) def _file_in_folder(file: Path, folder: Path) -> bool: # pragma: no cover file = file.absolute() folder = folder.absolute() common = os.path.commonpath([folder, file]) return os.path.commonpath([folder]) == common def _affected_packages( repo_root: Path, changed_files: list[Path], dependencies: dict[Path, list[Path]], *, verbose: bool = False, ) -> set[Path]: # pragma: no cover affected_packages = set() for file in changed_files: for package, deps in dependencies.items(): if _file_in_folder(repo_root / file, package): if verbose: print(f"File {file} affects package {package}") affected_packages.add(package) affected_packages.update(deps) return {pkg.relative_to(repo_root) for pkg in affected_packages} ================================================ FILE: unidep/_setuptools_integration.py ================================================ #!/usr/bin/env python3 """unidep - Unified Conda and Pip requirements management. This module provides setuptools integration for unidep. """ from __future__ import annotations import os from pathlib import Path, PurePath from typing import TYPE_CHECKING, NamedTuple from ruamel.yaml import YAML from unidep._dependencies_parsing import ( DependencyEntry, _load, get_local_dependencies, parse_requirements, ) from unidep._dependency_selection import ( collapse_selected_universals, select_pip_requirements, ) from unidep.utils import ( UnsupportedPlatformError, build_pep508_environment_marker, identify_current_platform, is_pip_installable, package_name_from_path, parse_folder_or_filename, split_path_and_extras, warn, ) if TYPE_CHECKING: import sys from setuptools import Distribution from unidep.platform_definitions import Platform, Spec if sys.version_info >= (3, 8): from typing import Literal else: from typing_extensions import Literal def filter_python_dependencies( entries: list[DependencyEntry], platforms: list[Platform] | None = None, ) -> list[str]: """Filter out conda dependencies and return only pip dependencies. Examples -------- >>> requirements = parse_requirements("requirements.yaml") >>> python_deps = filter_python_dependencies( ... requirements.dependency_entries, requirements.platforms ... ) """ if isinstance(entries, dict): msg = ( "`filter_python_dependencies()` now requires dependency entries from " "`parse_requirements(...).dependency_entries`, not the output of " "`resolve_conflicts()`." ) raise TypeError(msg) entries = list(entries) selected = collapse_selected_universals( select_pip_requirements(entries, platforms), platforms, ) pip_deps: list[str] = [] by_spec: dict[Spec, list[Platform | None]] = {} for _platform, candidates in selected.items(): for candidate in candidates: by_spec.setdefault(candidate.spec, []).append(_platform) for spec, _platforms in by_spec.items(): dep_str = spec.name_with_pin(is_pip=True) if _platforms != [None] and all( platform is not None for platform in _platforms ): selector = build_pep508_environment_marker(_platforms) # type: ignore[arg-type] dep_str = f"{dep_str}; {selector}" pip_deps.append(dep_str) return sorted(pip_deps) class Dependencies(NamedTuple): dependencies: list[str] extras: dict[str, list[str]] def _path_to_file_uri(path: PurePath) -> str: """Return a RFC 8089 compliant file URI for an absolute path.""" # Keep in sync with CI helper and discussion in # https://github.com/basnijholt/unidep/pull/214#issuecomment-2568663364 if isinstance(path, Path): target = path if path.is_absolute() else path.resolve() return target.as_uri() uri_path = path.as_posix().lstrip("/") return f"file:///{uri_path.replace(' ', '%20')}" def get_python_dependencies( # noqa: PLR0912 filename: str | Path | Literal["requirements.yaml", "pyproject.toml"] = "requirements.yaml", # noqa: PYI051 *, verbose: bool = False, ignore_pins: list[str] | None = None, overwrite_pins: list[str] | None = None, skip_dependencies: list[str] | None = None, platforms: list[Platform] | None = None, raises_if_missing: bool = True, include_local_dependencies: bool = False, ) -> Dependencies: """Extract Python (pip) requirements from a `requirements.yaml` or `pyproject.toml` file.""" # noqa: E501 try: p = parse_folder_or_filename(filename) except FileNotFoundError: if raises_if_missing: raise return Dependencies(dependencies=[], extras={}) requirements = parse_requirements( p.path, ignore_pins=ignore_pins, overwrite_pins=overwrite_pins, skip_dependencies=skip_dependencies, verbose=verbose, extras="*", ) if not platforms: platforms = list(requirements.platforms) dependencies = filter_python_dependencies( requirements.dependency_entries, platforms, ) # TODO[Bas]: This currently doesn't correctly handle # noqa: TD004, TD003, FIX002 # conflicts between sections in the extras and the main dependencies. extras = { section: filter_python_dependencies(entries, platforms) for section, entries in requirements.optional_dependency_entries.items() } # Always process local dependencies to handle PyPI alternatives yaml = YAML(typ="rt") data = _load(p.path, yaml) # Process each local dependency for local_dep_obj in get_local_dependencies(data): if local_dep_obj.use == "skip": continue if local_dep_obj.use == "pypi": # Already added to pip dependencies when parsing requirements. continue local_path, extras_list = split_path_and_extras(local_dep_obj.local) abs_local = (p.path.parent / local_path).resolve() # If include_local_dependencies is False (UNIDEP_SKIP_LOCAL_DEPS=1), # always use PyPI alternative if available, skip otherwise if not include_local_dependencies: if local_dep_obj.pypi: dependencies.append(local_dep_obj.pypi) continue # Original behavior when include_local_dependencies is True # Handle wheel and zip files if abs_local.suffix in (".whl", ".zip"): if abs_local.exists(): # Local wheel exists - use it uri = _path_to_file_uri(abs_local) dependencies.append(f"{abs_local.name} @ {uri}") elif local_dep_obj.pypi: # Wheel doesn't exist - use PyPI alternative dependencies.append(local_dep_obj.pypi) continue # Check if local path exists if abs_local.exists() and is_pip_installable(abs_local): # Local development - use file:// URL name = package_name_from_path(abs_local) uri = _path_to_file_uri(abs_local) dep_str = f"{name} @ {uri}" if extras_list: dep_str = f"{name}[{','.join(extras_list)}] @ {uri}" dependencies.append(dep_str) elif local_dep_obj.pypi: # Built wheel - local path doesn't exist, use PyPI alternative dependencies.append(local_dep_obj.pypi) # else: path doesn't exist and no PyPI alternative - skip return Dependencies(dependencies=dependencies, extras=extras) def _deps(requirements_file: Path) -> Dependencies: # pragma: no cover try: platforms = [identify_current_platform()] except UnsupportedPlatformError: warn( "Could not identify the current platform." " This may result in selecting all platforms." " Please report this issue at" " https://github.com/basnijholt/unidep/issues", ) # We don't know the current platform, so we can't filter out. # This will result in selecting all platforms. But this is better # than failing. platforms = None skip_local_dependencies = bool(os.getenv("UNIDEP_SKIP_LOCAL_DEPS")) verbose = bool(os.getenv("UNIDEP_VERBOSE")) return get_python_dependencies( requirements_file, platforms=platforms, raises_if_missing=False, verbose=verbose, include_local_dependencies=not skip_local_dependencies, ) def _setuptools_finalizer(dist: Distribution) -> None: # pragma: no cover """Entry point called by setuptools to get the dependencies for a project.""" # PEP 517 says that "All hooks are run with working directory set to the # root of the source tree". project_root = Path.cwd() try: requirements_file = parse_folder_or_filename(project_root).path except FileNotFoundError: return if requirements_file.exists() and dist.install_requires: # type: ignore[attr-defined] msg = ( "You have a `requirements.yaml` file in your project root or" " configured unidep in `pyproject.toml` with `[tool.unidep]`," " but you are also using setuptools' `install_requires`." " Remove the `install_requires` line from `setup.py`." ) raise RuntimeError(msg) deps = _deps(requirements_file) dist.install_requires = deps.dependencies # type: ignore[attr-defined] if deps.extras: dist.extras_require = deps.extras # type: ignore[attr-defined] ================================================ FILE: unidep/_version.py ================================================ """unidep - Unified Conda and Pip requirements management.""" __version__ = "3.2.0" ================================================ FILE: unidep/platform_definitions.py ================================================ """unidep - Unified Conda and Pip requirements management. Types and definitions for platforms, selectors, and markers. """ from __future__ import annotations import sys from typing import NamedTuple, cast if sys.version_info >= (3, 8): from typing import Literal, get_args else: # pragma: no cover from typing_extensions import Literal, get_args CondaPlatform = Literal["unix", "linux", "osx", "win"] Platform = Literal[ "linux-64", "linux-aarch64", "linux-ppc64le", "osx-64", "osx-arm64", "win-64", ] Selector = Literal[ "linux64", "aarch64", "ppc64le", "osx64", "arm64", "win64", "win", "unix", "linux", "osx", "macos", ] # The following are also supported in conda-build but not in UniDep: # "linux-32" (32-bit x86 on Linux) # "linux-64" (64-bit x86 on Linux) # "linux-ppc64" (64-bit PowerPC on Linux) # "linux-ppc64le" (64-bit Little Endian PowerPC on Linux) # "linux-s390x" (64-bit IBM z Systems on Linux) # "linux-armv6l" (32-bit ARMv6 on Linux) # "linux-armv7l" (32-bit ARMv7 on Linux) # "win-32" (32-bit x86 Windows) # "win-arm64" (64-bit ARM on Windows) CondaPip = Literal["conda", "pip"] VALID_SELECTORS = get_args(Selector) PEP508_MARKERS = { "linux-64": "sys_platform == 'linux' and platform_machine == 'x86_64'", "linux-aarch64": "sys_platform == 'linux' and platform_machine == 'aarch64'", "linux-ppc64le": "sys_platform == 'linux' and platform_machine == 'ppc64le'", "osx-64": "sys_platform == 'darwin' and platform_machine == 'x86_64'", "osx-arm64": "sys_platform == 'darwin' and platform_machine == 'arm64'", "win-64": "sys_platform == 'win32' and platform_machine == 'AMD64'", ("linux-64", "linux-aarch64", "linux-ppc64le"): "sys_platform == 'linux'", ("osx-64", "osx-arm64"): "sys_platform == 'darwin'", ( "linux-64", "linux-aarch64", "linux-ppc64le", "osx-64", "osx-arm64", ): "sys_platform == 'linux' or sys_platform == 'darwin'", } # The first element of each tuple is the only unique selector PLATFORM_SELECTOR_MAP: dict[Platform, list[Selector]] = { "linux-64": ["linux64", "unix", "linux"], "linux-aarch64": ["aarch64", "unix", "linux"], "linux-ppc64le": ["ppc64le", "unix", "linux"], # "osx64" is a selector unique to conda-build referring to # platforms on macOS and the Python architecture is x86-64 "osx-64": ["osx64", "osx", "macos", "unix"], "osx-arm64": ["arm64", "osx", "macos", "unix"], "win-64": ["win64", "win"], } PLATFORM_SELECTOR_MAP_REVERSE: dict[Selector, set[Platform]] = {} for _platform, _selectors in PLATFORM_SELECTOR_MAP.items(): for _selector in _selectors: PLATFORM_SELECTOR_MAP_REVERSE.setdefault(_selector, set()).add(_platform) def validate_selector(selector: Selector) -> None: """Check if a selector is valid.""" valid_selectors = VALID_SELECTORS if selector not in VALID_SELECTORS: msg = f"Invalid platform selector: `{selector}`, use one of `{valid_selectors}`" raise ValueError(msg) def platforms_from_selector(selector: str) -> list[Platform]: """Extract platforms from a selector. For example, selector can be ``'linux64 win64'`` or ``'osx'``. """ # we support a very limited set of selectors that adhere to platform only # refs: # https://docs.conda.io/projects/conda-build/en/latest/resources/define-metadata.html#preprocessing-selectors # https://github.com/conda/conda-lock/blob/3d2bf356e2cf3f7284407423f7032189677ba9be/conda_lock/src_parser/selectors.py platforms: set[Platform] = set() for s in selector.split(): s = cast("Selector", s) platforms |= set(PLATFORM_SELECTOR_MAP_REVERSE[s]) return sorted(platforms) class Spec(NamedTuple): """A dependency specification.""" name: str which: CondaPip pin: str | None = None identifier: str | None = None # can be of type `Selector` but also space separated string of `Selector`s selector: str | None = None def platforms(self) -> list[Platform] | None: """Return the platforms for this dependency.""" if self.selector is None: return None return platforms_from_selector(self.selector) def pprint(self) -> str: """Pretty print the dependency.""" result = f"{self.name}" if self.pin is not None: result += f" {self.pin}" if self.selector is not None: result += f" # [{self.selector}]" return result def name_with_pin(self, *, is_pip: bool = False) -> str: """Return the name with the pin.""" result = f"{self.name}" if self.pin is not None: pin = self.pin if is_pip: pin = ",".join( ( f"=={token[1:]}" if token.startswith("=") and not token.startswith("==") else token ) for token in pin.split(",") ) result += f" {pin}" return result ================================================ FILE: unidep/py.typed ================================================ ================================================ FILE: unidep/utils.py ================================================ """unidep - Unified Conda and Pip requirements management. This module provides utility functions used throughout the package. """ from __future__ import annotations import ast import codecs import configparser import contextlib import platform import re import sys import warnings from collections import defaultdict from pathlib import Path from typing import Any, Literal, NamedTuple, cast from unidep._version import __version__ from unidep.platform_definitions import ( PEP508_MARKERS, Platform, Selector, Spec, platforms_from_selector, validate_selector, ) if sys.version_info >= (3, 11): import tomllib else: # pragma: no cover import tomli as tomllib def add_comment_to_file( filename: str | Path, extra_lines: list[str] | None = None, ) -> None: """Add a comment to the top of a file.""" if extra_lines is None: extra_lines = [] with open(filename, "r+") as f: # noqa: PTH123 content = f.read() f.seek(0, 0) command_line_args = " ".join(sys.argv[1:]) txt = [ f"# This file is created and managed by `unidep` {__version__}.", "# For details see https://github.com/basnijholt/unidep", f"# File generated with: `unidep {command_line_args}`", *extra_lines, ] content = "\n".join(txt) + "\n\n" + content f.write(content) def remove_top_comments(filename: str | Path) -> None: """Removes the top comments (lines starting with '#') from a file.""" with open(filename) as file: # noqa: PTH123 lines = file.readlines() first_non_comment = next( (i for i, line in enumerate(lines) if not line.strip().startswith("#")), len(lines), ) content_without_comments = lines[first_non_comment:] with open(filename, "w") as file: # noqa: PTH123 file.writelines(content_without_comments) def escape_unicode(string: str) -> str: """Escape unicode characters.""" return codecs.decode(string, "unicode_escape") def is_pip_installable(folder: str | Path) -> bool: # pragma: no cover """Determine if the project is pip installable. Checks for existence of setup.py or [build-system] in pyproject.toml. If the `toml` library is available, it is used to parse the `pyproject.toml` file. If the `toml` library is not available, the function checks for the existence of a line starting with "[build-system]". This does not handle the case where [build-system] is inside of a multi-line literal string. """ path = Path(folder) if (path / "setup.py").exists(): return True pyproject_path = path / "pyproject.toml" if pyproject_path.exists(): with pyproject_path.open("rb") as file: pyproject_data = tomllib.load(file) return "build-system" in pyproject_data return False class UnsupportedPlatformError(Exception): """Raised when the current platform is not supported.""" def identify_current_platform() -> Platform: """Detect the current platform.""" system = platform.system().lower() architecture = platform.machine().lower() if system == "linux": if architecture == "x86_64": return "linux-64" if architecture == "aarch64": return "linux-aarch64" if architecture == "ppc64le": return "linux-ppc64le" msg = f"Unsupported Linux architecture `{architecture}`" raise UnsupportedPlatformError(msg) if system == "darwin": if architecture == "x86_64": return "osx-64" if architecture == "arm64": return "osx-arm64" msg = f"Unsupported macOS architecture `{architecture}`" raise UnsupportedPlatformError(msg) if system == "windows": if "64" in architecture: return "win-64" msg = f"Unsupported Windows architecture `{architecture}`" raise UnsupportedPlatformError(msg) msg = f"Unsupported operating system `{system}` with architecture `{architecture}`" raise UnsupportedPlatformError(msg) def collect_selector_platforms( requirements: dict[str, list[Spec]], optional_dependencies: dict[str, dict[str, list[Spec]]] | None = None, ) -> list[Platform]: """Collect all platforms referenced by dependency selectors.""" selector_platforms: set[Platform] = set() def _collect(specs_by_name: dict[str, list[Spec]]) -> None: for specs in specs_by_name.values(): for spec in specs: if spec.selector is None: continue selector_platforms.update(platforms_from_selector(spec.selector)) _collect(requirements) if optional_dependencies is not None: for optional_specs in optional_dependencies.values(): _collect(optional_specs) return sorted(selector_platforms) def resolve_platforms( *, requested_platforms: list[Platform] | None, declared_platforms: list[Platform] | set[Platform] | None = None, selector_platforms: list[Platform] | set[Platform] | None = None, default_current: bool = True, ) -> list[Platform]: """Resolve effective platforms with a shared precedence policy. Precedence is: 1) explicitly requested platforms 2) declared platforms from requirements files 3) selector-derived platforms from dependency specs 4) current platform fallback (optional) """ for candidate in (requested_platforms, declared_platforms, selector_platforms): if candidate: return sorted(set(candidate)) if default_current: return [identify_current_platform()] return [] def build_pep508_environment_marker( platforms: list[Platform | tuple[Platform, ...]], ) -> str: """Generate a PEP 508 selector for a list of platforms.""" sorted_platforms = tuple(sorted(platforms)) if sorted_platforms in PEP508_MARKERS: return PEP508_MARKERS[sorted_platforms] # type: ignore[index] environment_markers = [ PEP508_MARKERS[platform] for platform in sorted(sorted_platforms) if platform in PEP508_MARKERS ] return " or ".join(environment_markers) class ParsedPackageStr(NamedTuple): """A package name and version pinning.""" name: str pin: str | None = None # can be of type `Selector` but also space separated string of `Selector`s selector: str | None = None def parse_package_str(package_str: str) -> ParsedPackageStr: """Splits a string into package name, version pinning, and platform selector.""" # Regex to match package name, version pinning, and optionally platform selector # Note: the name_pattern currently allows for paths and extras, however, # paths cannot contain spaces or contain brackets. name_pattern = r"[a-zA-Z0-9_.\-/]+(\[[a-zA-Z0-9_.,\-]+\])?" version_pin_pattern = r".*?" selector_pattern = r"[a-z0-9\s]+" pattern = rf"({name_pattern})\s*({version_pin_pattern})?(:({selector_pattern}))?$" match = re.match(pattern, package_str) if match: package_name = match.group(1).strip() version_pin = match.group(3).strip() if match.group(3) else None selector = match.group(5).strip() if match.group(5) else None if selector is not None: for s in selector.split(): validate_selector(cast("Selector", s)) return ParsedPackageStr( package_name, version_pin, selector, ) msg = f"Invalid package string: '{package_str}'" raise ValueError(msg) def package_name_from_setup_cfg(file_path: Path) -> str: """Read the package name from ``setup.cfg`` metadata.""" config = configparser.ConfigParser() config.read(file_path) name = config.get("metadata", "name", fallback=None) if name is None: msg = "Could not find the package name in the setup.cfg file." raise KeyError(msg) return name def package_name_from_setup_py(file_path: Path) -> str: """Read the package name from a simple ``setup.py`` AST.""" with file_path.open() as f: file_content = f.read() tree = ast.parse(file_content) def _string_literal(node: ast.expr) -> str | None: if isinstance(node, ast.Constant) and isinstance(node.value, str): return node.value return None class SetupVisitor(ast.NodeVisitor): def __init__(self) -> None: self.package_name: str | None = None def visit_Call(self, node: ast.Call) -> None: # noqa: N802 if isinstance(node.func, ast.Name) and node.func.id == "setup": for keyword in node.keywords: if keyword.arg == "name": self.package_name = _string_literal(keyword.value) if self.package_name is not None: return visitor = SetupVisitor() visitor.visit(tree) if visitor.package_name is None: msg = "Could not find the package name in the setup.py file." raise KeyError(msg) return visitor.package_name def package_name_from_pyproject_toml(file_path: Path) -> str: """Read project name from ``pyproject.toml`` (PEP 621 or Poetry).""" with file_path.open("rb") as f: data = tomllib.load(f) with contextlib.suppress(KeyError): return data["project"]["name"] with contextlib.suppress(KeyError): return data["tool"]["poetry"]["name"] msg = f"Could not find the package name in the pyproject.toml file: {data}." raise KeyError(msg) def package_name_from_path(path: Path) -> str: """Get the package name from ``pyproject.toml``, ``setup.cfg``, or ``setup.py``.""" pyproject_toml = path / "pyproject.toml" if pyproject_toml.exists(): with contextlib.suppress( KeyError, OSError, TypeError, UnicodeError, tomllib.TOMLDecodeError, ): return package_name_from_pyproject_toml(pyproject_toml) setup_cfg = path / "setup.cfg" if setup_cfg.exists(): with contextlib.suppress( KeyError, OSError, UnicodeError, configparser.Error, ): return package_name_from_setup_cfg(setup_cfg) setup_py = path / "setup.py" if setup_py.exists(): with contextlib.suppress( KeyError, OSError, SyntaxError, UnicodeError, ValueError, ): return package_name_from_setup_py(setup_py) return path.name def _simple_warning_format( message: Warning | str, category: type[Warning], # noqa: ARG001 filename: str, lineno: int, line: str | None = None, # noqa: ARG001 ) -> str: # pragma: no cover """Format warnings without code context.""" return ( f"---------------------\n" f"⚠️ *** WARNING *** ⚠️\n" f"{message}\n" f"Location: {filename}:{lineno}\n" f"---------------------\n" ) def warn( message: str | Warning, category: type[Warning] = UserWarning, stacklevel: int = 1, ) -> None: """Emit a warning with a custom format specific to this package.""" original_format = warnings.formatwarning warnings.formatwarning = _simple_warning_format try: warnings.warn(message, category, stacklevel=stacklevel + 1) finally: warnings.formatwarning = original_format def selector_from_comment(comment: str) -> str | None: """Extract a valid selector from a comment.""" multiple_brackets_pat = re.compile(r"#.*\].*\[") # Detects multiple brackets if multiple_brackets_pat.search(comment): msg = f"Multiple bracketed selectors found in comment: '{comment}'" raise ValueError(msg) sel_pat = re.compile(r"#\s*\[([^\[\]]+)\]") m = sel_pat.search(comment) if not m: return None selectors = m.group(1).strip().split() for s in selectors: validate_selector(cast("Selector", s)) return " ".join(selectors) def extract_matching_platforms(comment: str) -> list[Platform]: """Get all platforms matching a comment.""" selector = selector_from_comment(comment) if selector is None: return [] return platforms_from_selector(selector) def unidep_configured_in_toml(path: Path) -> bool: """Check if dependencies are specified in pyproject.toml.""" with path.open("rb") as f: data = tomllib.load(f) return bool(data.get("tool", {}).get("unidep", {})) def split_path_and_extras(input_str: str | Path) -> tuple[Path, list[str]]: """Parse a string of the form `path/to/file[extra1,extra2]` into parts. Returns a tuple of the `pathlib.Path` and a list of extras """ if isinstance(input_str, Path): input_str = str(input_str) if not input_str: # Check for empty string return Path(), [] pattern = r"^(.+?)(?:\[([^\[\]]+)\])?$" match = re.search(pattern, input_str) if match is None: # pragma: no cover # I don't think this is possible, but just in case return Path(), [] path = Path(match.group(1)) extras = match.group(2) if not extras: return path, [] extras = [extra.strip() for extra in extras.split(",")] return path, extras class PathWithExtras(NamedTuple): """A dependency file and extras.""" path: Path extras: list[str] @property def path_with_extras(self) -> Path: """Path including extras, e.g., `path/to/file[test,docs]`.""" if not self.extras: return self.path return Path(f"{self.path}[{','.join(self.extras)}]") def resolved(self) -> PathWithExtras: """Resolve the path and extras.""" return PathWithExtras(self.path.resolve(), self.extras) def canonicalized(self) -> PathWithExtras: """Resolve path and normalize extras for deterministic graph keys.""" return PathWithExtras(self.path.resolve(), sorted(set(self.extras))) def __hash__(self) -> int: """Hash the path and extras.""" return hash((self.path, tuple(sorted(self.extras)))) def __eq__(self, other: object) -> bool: """Check if two `PathWithExtras` are equal.""" if not isinstance(other, PathWithExtras): return NotImplemented return self.path == other.path and set(self.extras) == set(other.extras) LocalDependencyUse = Literal["local", "pypi", "skip"] class LocalDependency(NamedTuple): """A local dependency with optional PyPI alternative and `use` mode.""" local: str pypi: str | None = None use: LocalDependencyUse = "local" def parse_folder_or_filename(folder_or_file: str | Path) -> PathWithExtras: """Get the path to `requirements.yaml` or `pyproject.toml` file.""" folder_or_file, extras = split_path_and_extras(folder_or_file) path = Path(folder_or_file) if path.is_dir(): fname_yaml = path / "requirements.yaml" if fname_yaml.exists(): return PathWithExtras(fname_yaml, extras) fname_toml = path / "pyproject.toml" if fname_toml.exists() and unidep_configured_in_toml(fname_toml): return PathWithExtras(fname_toml, extras) msg = ( f"File `{fname_yaml}` or `{fname_toml}` (with unidep configuration)" f" not found in `{folder_or_file}`." ) raise FileNotFoundError(msg) if not path.exists(): msg = f"File `{path}` not found." raise FileNotFoundError(msg) return PathWithExtras(path, extras) def defaultdict_to_dict(d: defaultdict | Any) -> dict: """Convert (nested) defaultdict to (nested) dict.""" if isinstance(d, defaultdict): d = {key: defaultdict_to_dict(value) for key, value in d.items()} return d def get_package_version(package_name: str) -> str | None: """Returns the version of the given package. Parameters ---------- package_name The name of the package to find the version of. Returns ------- The version of the package, or None if the package is not found. """ if sys.version_info >= (3, 8): import importlib.metadata try: return importlib.metadata.version(package_name) except importlib.metadata.PackageNotFoundError: return None else: # pragma: no cover with warnings.catch_warnings(): warnings.simplefilter("ignore", DeprecationWarning) import pkg_resources try: return pkg_resources.get_distribution(package_name).version except pkg_resources.DistributionNotFound: return None