Full Code of basnijholt/unidep for AI

main ef030c208627 cached
101 files
863.1 KB
215.4k tokens
693 symbols
1 requests
Download .txt
Showing preview only (904K chars total). Download the full file or copy to clipboard to get everything.
Repository: basnijholt/unidep
Branch: main
Commit: ef030c208627
Files: 101
Total size: 863.1 KB

Directory structure:
gitextract_6n9izm0e/

├── .github/
│   ├── release.py
│   ├── renovate.json
│   ├── use-local-unidep.py
│   └── workflows/
│       ├── documentation-links.yml
│       ├── install-example-projects.yml
│       ├── pytest.yml
│       ├── release.yml
│       ├── toc.yaml
│       └── update-readme.yml
├── .gitignore
├── .pre-commit-config.yaml
├── .pre-commit-hooks.yaml
├── .readthedocs.yaml
├── LICENSE
├── README.md
├── bootstrap.sh
├── docs/
│   ├── Makefile
│   └── source/
│       ├── .gitignore
│       └── conf.py
├── example/
│   ├── README.md
│   ├── environment.yaml
│   ├── hatch2_project/
│   │   ├── README.md
│   │   ├── hatch2_project.py
│   │   └── pyproject.toml
│   ├── hatch_project/
│   │   ├── README.md
│   │   ├── hatch_project.py
│   │   ├── pyproject.toml
│   │   └── requirements.yaml
│   ├── pyproject_toml_project/
│   │   ├── README.md
│   │   ├── pyproject.toml
│   │   └── pyproject_toml_project.py
│   ├── setup_py_project/
│   │   ├── README.md
│   │   ├── pyproject.toml
│   │   ├── requirements.yaml
│   │   ├── setup.py
│   │   └── setup_py_project.py
│   └── setuptools_project/
│       ├── README.md
│       ├── pyproject.toml
│       ├── requirements.yaml
│       └── setuptools_project.py
├── pyproject.toml
├── tests/
│   ├── __init__.py
│   ├── helpers.py
│   ├── shared_local_install_monorepo/
│   │   ├── project1/
│   │   │   ├── pyproject.toml
│   │   │   └── requirements.yaml
│   │   ├── project2/
│   │   │   ├── pyproject.toml
│   │   │   └── requirements.yaml
│   │   └── shared/
│   │       ├── pyproject.toml
│   │       └── requirements.yaml
│   ├── simple_monorepo/
│   │   ├── common-requirements.yaml
│   │   ├── conda-lock.yml
│   │   ├── project1/
│   │   │   ├── conda-lock.yml
│   │   │   └── requirements.yaml
│   │   └── project2/
│   │       ├── conda-lock.yml
│   │       └── requirements.yaml
│   ├── test-pip-and-conda-different-name/
│   │   ├── conda-lock.yml
│   │   ├── project1/
│   │   │   └── requirements.yaml
│   │   └── project2/
│   │       └── requirements.yaml
│   ├── test-pip-package-with-conda-dependency/
│   │   ├── conda-lock.yml
│   │   ├── project1/
│   │   │   └── requirements.yaml
│   │   └── project2/
│   │       └── requirements.yaml
│   ├── test_cli.py
│   ├── test_cli_install_conda_lock.py
│   ├── test_conda_lock.py
│   ├── test_dependencies_parsing_internal.py
│   ├── test_dependency_selection.py
│   ├── test_local_wheels_and_zip.py
│   ├── test_parse_yaml_local_dependencies.py
│   ├── test_parse_yaml_nested_local_dependencies.py
│   ├── test_pip_indices.py
│   ├── test_pip_indices_cli.py
│   ├── test_pip_indices_integration.py
│   ├── test_pixi.py
│   ├── test_project_dependency_handling.py
│   ├── test_pypi_alternatives/
│   │   ├── main_app/
│   │   │   ├── main_app/
│   │   │   │   └── __init__.py
│   │   │   └── pyproject.toml
│   │   ├── shared_lib/
│   │   │   ├── pyproject.toml
│   │   │   └── shared_lib/
│   │   │       └── __init__.py
│   │   └── test_all_scenarios.sh
│   ├── test_pypi_alternatives.py
│   ├── test_pypi_alternatives_errors.py
│   ├── test_pypi_alternatives_integration.py
│   ├── test_setuptools_integration.py
│   ├── test_unidep.py
│   ├── test_utils.py
│   └── test_version_conflicts.py
└── unidep/
    ├── __init__.py
    ├── _cli.py
    ├── _conda_env.py
    ├── _conda_lock.py
    ├── _conflicts.py
    ├── _dependencies_parsing.py
    ├── _dependency_selection.py
    ├── _hatch_integration.py
    ├── _pixi.py
    ├── _pytest_plugin.py
    ├── _setuptools_integration.py
    ├── _version.py
    ├── platform_definitions.py
    ├── py.typed
    └── utils.py

================================================
FILE CONTENTS
================================================

================================================
FILE: .github/release.py
================================================
"""Create a new release tag with CalVer format."""

import datetime
import operator
import os
from pathlib import Path

import git
from packaging import version


def get_repo() -> git.Repo:
    """Get the git repo for the current project."""
    return git.Repo(Path(__file__).parent.parent)


def is_already_tagged(repo: git.Repo) -> bool:
    """Check if the current commit is already tagged."""
    return repo.git.tag(points_at="HEAD")


def should_skip_release(repo: git.Repo) -> bool:
    """Check if the commit message contains [skip release]."""
    commit_message = repo.head.commit.message.split("\n")[0]
    return "[skip release]" in commit_message


def get_new_version(repo: git.Repo) -> str:
    """Get the new version number."""
    latest_tag = max(repo.tags, key=operator.attrgetter("commit.committed_datetime"))
    last_version = version.parse(latest_tag.name)
    now = datetime.datetime.now(tz=datetime.timezone.utc)
    patch = (
        last_version.micro + 1
        if last_version.major == now.year and last_version.minor == now.month
        else 0
    )
    return f"{now.year}.{now.month}.{patch}"


def set_author(repo: git.Repo) -> None:
    """Set author information."""
    author_name = repo.head.commit.author.name
    author_email = repo.head.commit.author.email
    os.environ["GIT_AUTHOR_NAME"] = author_name
    os.environ["GIT_AUTHOR_EMAIL"] = author_email
    os.environ["GIT_COMMITTER_NAME"] = author_name
    os.environ["GIT_COMMITTER_EMAIL"] = author_email


def create_tag(repo: git.Repo, new_version: str, release_notes: str) -> None:
    """Create a new tag."""
    set_author(repo)
    repo.create_tag(new_version, message=f"Release {new_version}\n\n{release_notes}")


def push_tag(repo: git.Repo, new_version: str) -> None:
    """Push the new tag to the remote repository."""
    origin = repo.remote("origin")
    origin.push(new_version)


def get_commit_messages_since_last_release(repo: git.Repo) -> str:
    """Get the commit messages since the last release."""
    latest_tag = max(repo.tags, key=operator.attrgetter("commit.committed_datetime"))
    return repo.git.log(f"{latest_tag}..HEAD", "--pretty=format:%s")


def format_release_notes(commit_messages: str, new_version: str) -> str:
    """Format the release notes."""
    header = f"🚀 Release {new_version}\n\n"
    intro = "📝 This release includes the following changes:\n\n"

    commit_list = commit_messages.split("\n")
    formatted_commit_list = [f"- {commit}" for commit in commit_list]
    commit_section = "\n".join(formatted_commit_list)

    footer = (
        "\n\n🙏 Thank you for using this project! Please report any issues "
        "or feedback on the GitHub repository"
        " on https://github.com/basnijholt/home-assistant-streamdeck-yaml."
    )

    return f"{header}{intro}{commit_section}{footer}"


def main() -> None:
    """Main entry point."""
    repo = get_repo()
    if is_already_tagged(repo):
        print("Current commit is already tagged!")
        return

    if should_skip_release(repo):
        print("Commit message is [skip release]!")
        return

    new_version = get_new_version(repo)
    commit_messages = get_commit_messages_since_last_release(repo)
    release_notes = format_release_notes(commit_messages, new_version)
    print(release_notes)
    create_tag(repo, new_version, release_notes)
    push_tag(repo, new_version)
    # Write the output version to the GITHUB_OUTPUT environment file
    with open(os.environ["GITHUB_OUTPUT"], "a") as output_file:  # noqa: PTH123
        output_file.write(f"version={new_version}\n")
    print(f"Created new tag: {new_version}")


if __name__ == "__main__":
    main()


================================================
FILE: .github/renovate.json
================================================
{
    "$schema": "https://docs.renovatebot.com/renovate-schema.json",
    "rebaseWhen": "behind-base-branch",
    "dependencyDashboard": true,
    "labels": [
        "dependencies",
        "no-stale"
    ],
    "commitMessagePrefix": "⬆️",
    "commitMessageTopic": "{{depName}}",
    "prBodyDefinitions": {
        "Release": "yes"
    },
    "packageRules": [
        {
            "matchManagers": [
                "github-actions"
            ],
            "addLabels": [
                "github_actions"
            ],
            "rangeStrategy": "pin"
        },
        {
            "matchManagers": [
                "github-actions"
            ],
            "matchUpdateTypes": [
                "minor",
                "patch"
            ],
            "automerge": true
        }
    ]
}


================================================
FILE: .github/use-local-unidep.py
================================================
"""Update `pyproject.toml` in each example project to use local `unidep`."""

from pathlib import Path

REPO_ROOT = Path(__file__).resolve().parent.parent
EXAMPLE_DIR = REPO_ROOT / "example"
PROJECT_DIRS = [p for p in EXAMPLE_DIR.iterdir() if p.name.endswith("_project")]
REPO_ROOT_URI = REPO_ROOT.resolve().as_uri()

print(
    f"REPO_ROOT: {REPO_ROOT}, EXAMPLE_DIR: {EXAMPLE_DIR}, PROJECT_DIRS: {PROJECT_DIRS}",
)

for project_dir in PROJECT_DIRS:
    # Find the line with `requires = [` in `pyproject.toml` and replace
    # `unidep`/`unidep[toml]` entries with file:// references to the repo root.
    pyproject_toml = project_dir / "pyproject.toml"
    lines = pyproject_toml.read_text().splitlines()
    for i, line in enumerate(lines):
        if "requires = [" not in line:
            continue
        if "unidep[toml]" in line:
            lines[i] = line.replace(
                "unidep[toml]",
                f"unidep[toml] @ {REPO_ROOT_URI}",
            )
        elif "unidep" in line:
            lines[i] = line.replace("unidep", f"unidep @ {REPO_ROOT_URI}")
        break
    pyproject_toml.write_text("\n".join(lines))


================================================
FILE: .github/workflows/documentation-links.yml
================================================
name: readthedocs/actions
on:
  pull_request_target:
    types:
      - opened

permissions:
  pull-requests: write

jobs:
  documentation-links:
    runs-on: ubuntu-latest
    steps:
      - uses: readthedocs/actions/preview@v1
        with:
          project-slug: "unidep"


================================================
FILE: .github/workflows/install-example-projects.yml
================================================
name: install-example-projects

on:
  push:
    branches: [main]
  pull_request:

jobs:
  pip-install:
    strategy:
      fail-fast: false
      matrix:
        python-version: ["3.8", "3.9", "3.10", "3.11", "3.12"] # skips 3.7 (unsupported on GH Actions)
        platform: [ubuntu-latest, macos-latest, windows-latest]
    runs-on: ${{ matrix.platform }}
    env:
      PYTHONIOENCODING: "utf8" # https://gist.github.com/NodeJSmith/e7e37f2d3f162456869f015f842bcf15
    steps:
      - uses: actions/checkout@v6
        with:
          fetch-depth: 0
      - name: Set up Python
        uses: actions/setup-python@v6
        with:
          python-version: ${{ matrix.python-version }}
      - name: Update pyproject.toml
        run: |
          python .github/use-local-unidep.py
      - name: Install example packages
        run: |
          set -ex
          # Loop over all folders in `./example` and install them
          for d in ./example/*/ ; do
            pip install -e "$d"
            pkg=$(basename $d)
            python -c "import $pkg"
            pip list
          done
        shell: bash

  micromamba-install:
    strategy:
      fail-fast: false
      matrix:
        python-version: ["3.8", "3.9", "3.10", "3.11", "3.12"] # skips 3.7 (unsupported on GH Actions)
        platform: [ubuntu-latest, macos-latest, windows-latest]
    runs-on: ${{ matrix.platform }}
    env:
      PYTHONIOENCODING: "utf8" # https://gist.github.com/NodeJSmith/e7e37f2d3f162456869f015f842bcf15
    steps:
      - uses: actions/checkout@v6
        with:
          fetch-depth: 0
      - name: Set up Python ${{ matrix.python-version }}
        uses: mamba-org/setup-micromamba@v2
        with:
          environment-name: unidep
          create-args: >-
            python=${{ matrix.python-version }}
      - name: Install unidep
        run: |
          python -m pip install --upgrade pip
          pip install -e ".[toml]"
        shell: bash -el {0}
      - name: Update pyproject.toml
        run: python .github/use-local-unidep.py
        shell: bash -el {0}
      - name: Install example packages
        run: |
          set -ex
          # Loop over all folders in `./example` and install them
          for d in ./example/*/ ; do
            unidep install -e "$d"
            pkg=$(basename $d)
            python -c "import $pkg"
            micromamba list
          done
        shell: bash -el {0}
      - name: Install pyproject_toml_project in new environment
        run: |
          unidep install -n new-env -e ./example/pyproject_toml_project
          micromamba activate new-env
          python -c "import pyproject_toml_project"
        shell: bash -el {0}


  miniconda-install:
    strategy:
      fail-fast: false
      matrix:
        python-version: ["3.8", "3.12"] # Just testing the oldest and newest supported versions
        platform: [ubuntu-latest, macos-latest, windows-latest]
    runs-on: ${{ matrix.platform }}
    env:
      PYTHONIOENCODING: "utf8" # https://gist.github.com/NodeJSmith/e7e37f2d3f162456869f015f842bcf15
    steps:
      - uses: actions/checkout@v6
        with:
          fetch-depth: 0
      - uses: conda-incubator/setup-miniconda@v3
        with:
          auto-update-conda: true
          python-version: ${{ matrix.python-version }}
      - name: Conda info
        shell: bash -el {0}
        run: conda info
      - name: Install unidep
        run: |
          python -m pip install --upgrade pip
          pip install -e ".[toml]"
        shell: bash -el {0}
      - name: Update pyproject.toml
        run: python .github/use-local-unidep.py
        shell: bash -el {0}
      - name: Install example packages
        run: |
          set -ex
          # Loop over all folders in `./example` and install them
          for d in ./example/*/ ; do
            unidep install -e "$d"
            pkg=$(basename $d)
            python -c "import $pkg"
            conda list
          done
        shell: bash -el {0}
      - name: Install pyproject_toml_project in new environment
        run: |
          unidep install -n new-env -e ./example/pyproject_toml_project
          conda activate new-env
          python -c "import pyproject_toml_project"
        shell: bash -el {0}


================================================
FILE: .github/workflows/pytest.yml
================================================
name: pytest

on:
  push:
    branches: [main]
  pull_request:

jobs:
  test:
    strategy:
      fail-fast: false
      matrix:
        python-version: ["3.8", "3.9", "3.10", "3.11", "3.12"]  # skips 3.7 (unsupported on GH Actions)
        platform: [ubuntu-latest, macos-latest, windows-latest]
    runs-on: ${{ matrix.platform }}
    env:
      PYTHONIOENCODING: "utf8" # https://gist.github.com/NodeJSmith/e7e37f2d3f162456869f015f842bcf15
    steps:
      - uses: actions/checkout@v6
        with:
          fetch-depth: 0
      - name: Set up Python ${{ matrix.python-version }}
        uses: mamba-org/setup-micromamba@v2
        with:
          environment-name: unidep
          create-args: >-
            python=${{ matrix.python-version }}
      - name: Install dependencies
        run: |
          python -m pip install --upgrade pip
          if [[ "${{ matrix.python-version }}" == "3.8" ]]; then
            # Python 3.8 coverage does not support the "patch" config option
            sed -i.bak '/patch/d' pyproject.toml && rm pyproject.toml.bak
          fi
          pip install -e ".[test]"
        shell: bash -el {0}
      - name: Run pytest
        run: |
          if [[ "${{ matrix.platform }}" == "ubuntu-latest" && "${{ matrix.python-version }}" == "3.11" ]]; then
            pytest
          else
            pytest --cov-fail-under=0
          fi
        shell: bash -el {0}
      - name: Upload coverage to Codecov
        if: matrix.python-version == '3.11' && matrix.platform == 'ubuntu-latest'
        uses: codecov/codecov-action@v5
        with:
          token: ${{ secrets.CODECOV_TOKEN }}


================================================
FILE: .github/workflows/release.yml
================================================
name: Upload Python Package

on:
  release:
    types: [published]

jobs:
  deploy:
    runs-on: ubuntu-latest
    environment:
      name: pypi
      url: https://pypi.org/p/${{ github.repository }}
    permissions:
      id-token: write
    steps:
      - uses: actions/checkout@v6
      - name: Set up Python
        uses: actions/setup-python@v6
        with:
          python-version: "3.14.2"
      - name: Install dependencies
        run: |
          python -m pip install --upgrade pip
          pip install setuptools wheel build
      - name: Build
        run: |
          python -m build
      - name: Publish package distributions to PyPI
        uses: pypa/gh-action-pypi-publish@release/v1


================================================
FILE: .github/workflows/toc.yaml
================================================
on: push
name: TOC Generator
jobs:
  generateTOC:
    name: TOC Generator
    runs-on: ubuntu-latest
    steps:
      - uses: technote-space/toc-generator@v4
        with:
          TOC_TITLE: ""
          TARGET_PATHS: "README.md,example/README.md"


================================================
FILE: .github/workflows/update-readme.yml
================================================
name: Update README.md

on:
  push:
    branches:
      - main
  pull_request:

jobs:
  update_readme:
    runs-on: ubuntu-latest
    steps:
      - name: Check out repository
        uses: actions/checkout@v6
        with:
          persist-credentials: false
          fetch-depth: 0

      - name: Set up Python
        uses: actions/setup-python@v6
        with:
          python-version: '3.14.2'

      - name: Install Python dependencies
        run: |
          python -m pip install --upgrade pip
          pip install markdown-code-runner
          pip install -e .

      - name: Run markdown-code-runner
        run: |
          markdown-code-runner README.md
          cd example
          markdown-code-runner README.md

      - name: Commit updated files
        id: commit
        run: |
          git add -u .
          git config --local user.email "github-actions[bot]@users.noreply.github.com"
          git config --local user.name "github-actions[bot]"
          if git diff --quiet && git diff --staged --quiet; then
            echo "No changes, skipping commit."
            echo "commit_status=skipped" >> $GITHUB_ENV
          else
            git commit -m "Update files from markdown-code-runner"
            echo "commit_status=committed" >> $GITHUB_ENV
          fi

      - name: Push changes
        if: env.commit_status == 'committed'
        uses: ad-m/github-push-action@v1.1.0
        with:
          github_token: ${{ secrets.GITHUB_TOKEN }}
          branch: ${{ github.head_ref }}


================================================
FILE: .gitignore
================================================
# Byte-compiled / optimized / DLL files
__pycache__/
*.py[cod]
*$py.class

# C extensions
*.so

# Distribution / packaging
.Python
env/
build/
develop-eggs/
dist/
downloads/
eggs/
.eggs/
lib/
lib64/
parts/
sdist/
var/
*.egg-info/
.installed.cfg
*.egg

# PyInstaller
#  Usually these files are written by a python script from a template
#  before PyInstaller builds the exe, so as to inject date/other infos into it.
*.manifest
*.spec

# Installer logs
pip-log.txt
pip-delete-this-directory.txt

# Unit test / coverage reports
htmlcov/
.tox/
.coverage
.coverage.*
.cache
nosetests.xml
coverage.xml
*,cover
.hypothesis/

# Translations
*.mo
*.pot

# Django stuff:
*.log
local_settings.py

# Flask stuff:
instance/
.webassets-cache

# Scrapy stuff:
.scrapy

# Sphinx documentation
docs/_build/

# PyBuilder
target/

# IPython Notebook
.ipynb_checkpoints

# pyenv
.python-version

# celery beat schedule file
celerybeat-schedule

# dotenv
.env

# virtualenv
venv/
ENV/

# Spyder project settings
.spyderproject

# Rope project settings
.ropeproject

# other
.pixi
.DS_Store
*.code-workspace


================================================
FILE: .pre-commit-config.yaml
================================================
repos:
  - repo: https://github.com/pre-commit/pre-commit-hooks
    rev: v5.0.0
    hooks:
      - id: check-added-large-files
      - id: trailing-whitespace
      - id: end-of-file-fixer
      - id: mixed-line-ending
  - repo: https://github.com/astral-sh/ruff-pre-commit
    rev: "v0.9.9"
    hooks:
      - id: ruff
        args: ["--fix"]
      - id: ruff-format
  - repo: https://github.com/pre-commit/mirrors-mypy
    rev: "v1.15.0"
    hooks:
      - id: mypy
        additional_dependencies: ["types-PyYAML", "types-setuptools"]


================================================
FILE: .pre-commit-hooks.yaml
================================================
- id: unidep-environment-yaml
  name: unidep environment.yaml
  description: Generate environment.yaml from requirements.yaml using unidep.
  entry: unidep merge
  language: python
  files: '(requirements\.yaml|pyproject\.toml)$'
  pass_filenames: false


================================================
FILE: .readthedocs.yaml
================================================
version: 2

build:
  os: ubuntu-22.04
  tools:
    python: "3.12"

sphinx:
  configuration: docs/source/conf.py

python:
  install:
    - method: pip
      path: .
      extra_requirements:
        - docs


================================================
FILE: LICENSE
================================================
BSD 3-Clause License

Copyright (c) 2023, Bas Nijholt
All rights reserved.

Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:

* Redistributions of source code must retain the above copyright notice, this
  list of conditions and the following disclaimer.

* Redistributions in binary form must reproduce the above copyright notice,
  this list of conditions and the following disclaimer in the documentation
  and/or other materials provided with the distribution.

* Neither the name of the copyright holder nor the names of its
  contributors may be used to endorse or promote products derived from
  this software without specific prior written permission.

THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.


================================================
FILE: README.md
================================================
# 🚀 UniDep - Unified Conda and Pip Dependency Management 🚀

![UniDep logo](https://media.githubusercontent.com/media/basnijholt/nijho.lt/main/content/project/unidep/featured.png)

[![PyPI](https://img.shields.io/pypi/v/unidep.svg)](https://pypi.python.org/pypi/unidep)
[![Build Status](https://github.com/basnijholt/unidep/actions/workflows/pytest.yml/badge.svg)](https://github.com/basnijholt/unidep/actions/workflows/pytest.yml)
[![CodeCov](https://codecov.io/gh/basnijholt/unidep/branch/main/graph/badge.svg)](https://codecov.io/gh/basnijholt/unidep)
[![GitHub Repo stars](https://img.shields.io/github/stars/basnijholt/unidep)](https://github.com/basnijholt/unidep)
[![Documentation](https://readthedocs.org/projects/unidep/badge/?version=latest)](https://unidep.readthedocs.io/)
[![Python Bytes](https://img.shields.io/badge/Python_Bytes-366-D7F9FF?logo=applepodcasts&labelColor=blue)](https://www.youtube.com/live/PRaTs3PnJvI?si=UrVozo81Pj8WcyXh&t=489)

> UniDep streamlines Python project dependency management by unifying Conda and Pip packages in a single system.
> [Learn when to use UniDep](#q-when-to-use-unidep) in our [FAQ](#-faq).

Handling dependencies in Python projects can be challenging, especially when juggling Python and non-Python packages.
This often leads to confusion and inefficiency, as developers juggle between multiple dependency files.

- **📝 Unified Dependency File**: Use either `requirements.yaml` or `pyproject.toml` to manage both Conda and Pip dependencies in one place.
- **⚙️ Build System Integration**: Integrates with Setuptools and Hatchling for automatic dependency handling during `pip install ./your-package`.
- **💻 One-Command Installation**: `unidep install` handles Conda, Pip, and local dependencies effortlessly.
- **⚡️ Fast Pip Operations**: Leverages `uv` (if installed) for faster pip installations.
- **🏢 Monorepo-Friendly**: Render (multiple) `requirements.yaml` or `pyproject.toml` files into one Conda `environment.yaml` file and maintain fully consistent global *and* per sub package `conda-lock` files.
- **🌍 Platform-Specific Support**: Specify dependencies for different operating systems or architectures.
- **🔧 `pip-compile` Integration**: Generate fully pinned `requirements.txt` files from `requirements.yaml` or `pyproject.toml` files using `pip-compile`.
- **🔒 Integration with `conda-lock`**: Generate fully pinned `conda-lock.yml` files from (multiple) `requirements.yaml` or `pyproject.toml` file(s), leveraging `conda-lock`.
- **🥧 Pixi Support**: Generate `pixi.toml` files from your dependency files, enabling Pixi-based workflows while keeping UniDep as the single source of truth.
- **🤓 Nerd stats**: written in Python, 100% test coverage, fully-typed, all Ruff's rules enabled, easily extensible, and minimal dependencies

`unidep` is designed to make dependency management in Python projects as simple and efficient as possible.
Try it now and streamline your development process!

> [!TIP]
> Check out the [example `requirements.yaml` and `pyproject.toml` below](#example).

<!-- toc-start -->

## :books: Table of Contents

<!-- START doctoc generated TOC please keep comment here to allow auto update -->
<!-- DON'T EDIT THIS SECTION, INSTEAD RE-RUN doctoc TO UPDATE -->

- [:rocket: Bootstrap from Scratch](#rocket-bootstrap-from-scratch)
- [:package: Installation](#package-installation)
- [:memo: `requirements.yaml` and `pyproject.toml` structure](#memo-requirementsyaml-and-pyprojecttoml-structure)
  - [Example](#example)
    - [Example `requirements.yaml`](#example-requirementsyaml)
    - [Example `pyproject.toml`](#example-pyprojecttoml)
  - [Key Points](#key-points)
  - [Supported Version Pinnings](#supported-version-pinnings)
  - [Conflict Resolution](#conflict-resolution)
    - [How It Works](#how-it-works)
  - [Platform Selectors](#platform-selectors)
    - [Supported Selectors](#supported-selectors)
    - [Usage](#usage)
    - [Implementation](#implementation)
  - [Custom Pip Index URLs](#custom-pip-index-urls)
    - [How It Works](#how-it-works-1)
    - [Example Usage](#example-usage)
    - [Generated Output](#generated-output)
  - [`[project.dependencies]` in `pyproject.toml` handling](#projectdependencies-in-pyprojecttoml-handling)
- [:jigsaw: Build System Integration](#jigsaw-build-system-integration)
  - [Local Dependencies in Monorepos](#local-dependencies-in-monorepos)
  - [PyPI Alternatives for Local Dependencies](#pypi-alternatives-for-local-dependencies)
  - [Overriding Nested Vendor Copies with `use`](#overriding-nested-vendor-copies-with-use)
    - [Example: Override foo's bundled bar with your PyPI build](#example-override-foos-bundled-bar-with-your-pypi-build)
  - [All `use` values](#all-use-values)
  - [Build System Behavior](#build-system-behavior)
  - [Example packages](#example-packages)
  - [Setuptools Integration](#setuptools-integration)
  - [Hatchling Integration](#hatchling-integration)
- [:desktop_computer: As a CLI](#desktop_computer-as-a-cli)
  - [`unidep merge`](#unidep-merge)
  - [`unidep install`](#unidep-install)
  - [`unidep install-all`](#unidep-install-all)
  - [`unidep conda-lock`](#unidep-conda-lock)
  - [`unidep pixi`](#unidep-pixi)
    - [What `unidep pixi` generates](#what-unidep-pixi-generates)
    - [Dependency reconciliation rules (important)](#dependency-reconciliation-rules-important)
    - [Channels/platforms precedence](#channelsplatforms-precedence)
    - [Example (single-file)](#example-single-file)
  - [`unidep pip-compile`](#unidep-pip-compile)
  - [`unidep pip`](#unidep-pip)
  - [`unidep conda`](#unidep-conda)
- [❓ FAQ](#-faq)
  - [**Q: When to use UniDep?**](#q-when-to-use-unidep)
  - [**Q: Just show me a full example!**](#q-just-show-me-a-full-example)
  - [**Q: Uses of UniDep in the wild?**](#q-uses-of-unidep-in-the-wild)
  - [**Q: How do I force PyPI instead of a local path for one dependency?**](#q-how-do-i-force-pypi-instead-of-a-local-path-for-one-dependency)
  - [**Q: How do I ignore a local dependency entirely?**](#q-how-do-i-ignore-a-local-dependency-entirely)
  - [**Q: A submodule brings its own copy of package X. How do I avoid conflicts?**](#q-a-submodule-brings-its-own-copy-of-package-x-how-do-i-avoid-conflicts)
  - [**Q: How is this different from conda/mamba/pip?**](#q-how-is-this-different-from-condamambapip)
  - [**Q: I found a project using unidep, now what?**](#q-i-found-a-project-using-unidep-now-what)
  - [**Q: How to handle local dependencies that do not use UniDep?**](#q-how-to-handle-local-dependencies-that-do-not-use-unidep)
  - [**Q: Can't Conda already do this?**](#q-cant-conda-already-do-this)
  - [**Q: What is the difference between `conda-lock` and `unidep conda-lock`?**](#q-what-is-the-difference-between-conda-lock-and-unidep-conda-lock)
  - [**Q: What is the difference between `hatch-conda` / `pdm-conda` and `unidep`?**](#q-what-is-the-difference-between-hatch-conda--pdm-conda-and-unidep)
- [:hammer_and_wrench: Troubleshooting](#hammer_and_wrench-troubleshooting)
  - [`pip install` fails with `FileNotFoundError`](#pip-install-fails-with-filenotfounderror)
- [:warning: Limitations](#warning-limitations)

<!-- END doctoc generated TOC please keep comment here to allow auto update -->

<!-- toc-end -->

## :rocket: Bootstrap from Scratch

To get started quickly with UniDep, run the following command. This will download and install [micromamba](https://mamba.readthedocs.io/en/latest/installation/micromamba-installation.html) (recommended for fast Conda environment management), [uv](https://docs.astral.sh/uv/getting-started/installation/) (recommended for faster pip installations), and then install UniDep:

```bash
"${SHELL}" <(curl -LsSf raw.githubusercontent.com/basnijholt/unidep/main/bootstrap.sh)
```

> [!NOTE]
> Micromamba and uv are recommended to optimize your installation experience, but they are not required if you prefer to use your existing Conda and pip setup.

> [!WARNING]
> NEVER! run scripts from the internet without understanding what they do. Always inspect the script first!

<details>
<summary>Pin the hash of the bootstrap script with:</summary>

<!-- CODE:BASH:START -->
<!-- HASH=$(git log -n 1 --pretty=format:"%H" -- bootstrap.sh) -->
<!-- echo '```bash' -->
<!-- echo '"${SHELL}"' '<(curl -LsSf raw.githubusercontent.com/basnijholt/unidep/'"$HASH"'/bootstrap.sh)' -->
<!-- echo '```' -->
<!-- CODE:END -->
<!-- OUTPUT:START -->
<!-- ⚠️ This content is auto-generated by `markdown-code-runner`. -->
```bash
"${SHELL}" <(curl -LsSf raw.githubusercontent.com/basnijholt/unidep/939246571b65004391c425eb6df713303663054a/bootstrap.sh)
```

<!-- OUTPUT:END -->

</details>

## :package: Installation

To install `unidep`, run one of the following commands that use [`pipx`](https://pipx.pypa.io/) (recommended), `pip`, or `conda`:

```bash
pipx install "unidep[all]"  # Recommended (install as a standalone CLI)
```

or

```bash
pip install "unidep[all]"
```

or

```bash
conda install -c conda-forge unidep
```

## :memo: `requirements.yaml` and `pyproject.toml` structure

`unidep` allows either using a
1. `requirements.yaml` file with a specific format (similar but _**not**_ the same as a Conda `environment.yaml` file) or
2. `pyproject.toml` file with a `[tool.unidep]` section.

Both files contain the following keys:

- **name** (Optional): For documentation, not used in the output.
- **channels**: List of conda channels for packages, such as `conda-forge`.
- **dependencies**: Mix of Conda and Pip packages.
- **local_dependencies** (Optional): List of paths to other `requirements.yaml` or `pyproject.toml` files to include.
- **optional_dependencies** (Optional): Dictionary with lists of optional dependencies.
- **platforms** (Optional): List of platforms that are supported (used in `conda-lock`).
- **pip_indices** (Optional): List of custom pip index URLs for private or alternative package repositories.

Whether you use a `requirements.yaml` or `pyproject.toml` file, the same information can be specified in either.
Choose the format that works best for your project.

### Example

#### Example `requirements.yaml`

Example of a `requirements.yaml` file:

```yaml
name: example_environment
channels:
  - conda-forge
dependencies:
  - numpy                   # same name on conda and pip
  - conda: python-graphviz  # When names differ between Conda and Pip
    pip: graphviz
  - pip: slurm-usage >=1.1.0,<2  # pip-only
  - conda: mumps                 # conda-only
  # Use platform selectors
  - conda: cuda-toolkit =11.8    # [linux64]
local_dependencies:
  - ../other-project-using-unidep     # include other projects that use unidep
  - ../common-requirements.yaml       # include other requirements.yaml files
  - ../project-not-managed-by-unidep  # 🚨 Skips its dependencies!
optional_dependencies:
  test:
    - pytest
  full:
    - ../other-local-dep[test]  # include its optional 'test' dependencies
platforms:  # (Optional) specify platforms that are supported (used in conda-lock)
  - linux-64
  - osx-arm64
pip_indices:  # (Optional) additional pip index URLs for private packages
  - https://pypi.org/simple/  # Main PyPI index (automatically included if not specified)
  - https://private.company.com/simple/  # Private company index
  - https://${PIP_USER}:${PIP_PASSWORD}@private.pypi.org/simple/  # Authenticated index with env vars
```

> [!IMPORTANT]
> `unidep` can process this during `pip install` and create a Conda installable `environment.yaml` or `conda-lock.yml` file, and more!

> [!NOTE]
> For a more in-depth example containing multiple installable projects, see the [`example`](example/) directory.

#### Example `pyproject.toml`

***Alternatively***, one can fully configure the dependencies in the `pyproject.toml` file in the `[tool.unidep]` section:

```toml
[tool.unidep]
channels = ["conda-forge"]
dependencies = [
    "numpy",                                         # same name on conda and pip
    { conda = "python-graphviz", pip = "graphviz" }, # When names differ between Conda and Pip
    { pip = "slurm-usage >=1.1.0,<2" },              # pip-only
    { conda = "mumps" },                             # conda-only
    { conda = "cuda-toolkit =11.8:linux64" }         # Use platform selectors by appending `:linux64`
]
local_dependencies = [
    "../other-project-using-unidep",    # include other projects that use unidep
    "../common-requirements.yaml",      # include other requirements.yaml files
    "../project-not-managed-by-unidep"  # 🚨 Skips its dependencies!
]
optional_dependencies = {
    test = ["pytest"],
    full = ["../other-local-dep[test]"]  # include its optional 'test' dependencies
}
platforms = [ # (Optional) specify platforms that are supported (used in conda-lock)
    "linux-64",
    "osx-arm64"
]
pip_indices = [ # (Optional) additional pip index URLs for private packages
    "https://pypi.org/simple/",  # Main PyPI index (automatically included if not specified)
    "https://private.company.com/simple/",  # Private company index
    "https://${PIP_USER}:${PIP_PASSWORD}@private.pypi.org/simple/"  # Authenticated index with env vars
]
```

This data structure is *identical* to the `requirements.yaml` format, with the exception of the `name` field and the [platform selectors](#platform-selectors).
In the `requirements.yaml` file, one can use e.g., `# [linux64]`, which in the `pyproject.toml` file is `:linux64` at the end of the package name.

See [Build System Integration](#jigsaw-build-system-integration) for more information on how to set up `unidep` with different build systems (Setuptools or Hatchling).

> [!IMPORTANT]
> In these docs, we often mention the `requirements.yaml` format for simplicity, but the same information can be specified in `pyproject.toml` as well.
> Everything that is possible in `requirements.yaml` is also possible in `pyproject.toml`!

### Key Points

- Standard names (e.g., `- numpy`) are assumed to be the same for Conda and Pip.
- Use a dictionary with `conda: <package>` *and* `pip: <package>` to specify different names across platforms.
- Use `pip:` to specify packages that are only available through Pip.
- Use `conda:` to specify packages that are only available through Conda.
- Use `# [selector]` (YAML only) or `package:selector` to specify platform-specific dependencies.
- Use `local_dependencies:` to include other `requirements.yaml` or `pyproject.toml` files and merge them into one. Also allows projects that are not managed by `unidep` to be included, but be aware that this skips their dependencies! Can specify PyPI alternatives for monorepo setups (see [PyPI Alternatives for Local Dependencies](#pypi-alternatives-for-local-dependencies)).
- Use `optional_dependencies:` to specify optional dependencies. Can be installed like `unidep install ".[test]"` or `pip install ".[test]"`.
- Use `platforms:` to specify the platforms that are supported. If omitted, all platforms are assumed to be supported.
- Use `pip_indices:` to specify additional pip index URLs for installing packages from private or alternative package repositories (see [Custom Pip Index URLs](#custom-pip-index-urls) below).

> *We use the YAML notation here, but the same information can be specified in `pyproject.toml` as well.*

### Supported Version Pinnings

UniDep has two relevant pinning layers:

- **Dict-based conflict helper (`unidep._conflicts.resolve_conflicts`)**: combines repeated pinnings with the Conda-compatible subset of operators: `=`, `>`, `<`, `>=`, `<=`, `!=`.
- **CLI-facing pip renderers**: additionally preserve safe pip-only PEP 440 forms such as `==` and `~=` when those constraints can be kept explicitly without ambiguity.

Examples:

- Conda-compatible merge: `>1.0.0, <2.0.0`
- Exact pip pin: `==0.25.2.1`
- Compatible release pin: `~=1.0`

- **Redundant Pinning Resolution**: Automatically resolves redundant compatible constraints when possible.
  - Example: `>1.0.0, >0.5.0` simplifies to `>1.0.0`.

- **Contradictory Version Detection**: Errors are raised for contradictory pinnings to maintain dependency integrity. See the [Conflict Resolution](#conflict-resolution) section for more information.
  - Example: Specifying `>2.0.0, <1.5.0` triggers a `VersionConflictError`.

- **Invalid Pinning Detection**: Detects and raises errors for unrecognized or improperly formatted version specifications.

- **Conda Build Pinning**: UniDep also supports Conda's build pinning, allowing you to specify builds in your pinning patterns.
  - Example: Conda supports pinning builds like `qsimcirq * cuda*` or `vtk * *egl*`.
  - **Limitation**: While UniDep allows such build pinning, it requires that there be a single pin per package. UniDep cannot resolve conflicts where multiple build pinnings are specified for the same package.
    - Example: UniDep can handle `qsimcirq * cuda*`, but it cannot resolve a scenario with both `qsimcirq * cuda*` and `qsimcirq * cpu*`.

- **Other Special Cases**: In addition to Conda build pins, UniDep supports all special pinning formats, such as VCS (Version Control System) URLs or local file paths. This includes formats like `package @ git+https://git/repo/here` or `package @ file:///path/to/package`. However, UniDep has a limitation: it can handle only one special pin per package. These special pins can be combined with an unpinned version specification, but not with multiple special pin formats for the same package.
  - Example: UniDep can manage dependencies specified as `package @ git+https://git/repo/here` and `package` in the same `requirements.yaml`. However, it cannot resolve scenarios where both `package @ git+https://git/repo/here` and `package @ file:///path/to/package` are specified for the same package.

> [!WARNING]
> **Pinning Validation and Combination**: UniDep actively validates and/or combines pinnings only when **multiple different pinnings** are specified for the same package.
> This means if your `requirements.yaml` files include multiple pinnings for a single package, UniDep will attempt to resolve them into a single, coherent specification.
> However, if the pinnings are contradictory or incompatible, UniDep will raise an error to alert you of the conflict.

### Conflict Resolution

`unidep` features a conflict resolution mechanism to manage version conflicts and platform-specific dependencies in `requirements.yaml` or `pyproject.toml` files.

#### How It Works

- **Within-source pinning priority**: `unidep` combines repeated entries within the same source (`conda` or `pip`) and gives priority to version-pinned packages. For instance, if both `foo` and `foo <1` are listed for the same source, `foo <1` is selected due to its specific version pin.

- **Entry-based rendering**: CLI-facing outputs now work from `parse_requirements(...).dependency_entries`, preserving each original declaration long enough for the shared selector to choose the final Conda-like or pip-only result.

- **Lower-level metadata helper**: `unidep._conflicts.resolve_conflicts()` still exists for the older dict-based requirements model (`ParsedRequirements.requirements`), but it is no longer the main renderer handoff.

- **Conda-like paired-entry selection**: For explicit dependency entries that provide both `conda:` and `pip:` alternatives, Conda-like outputs use deterministic source selection rules: Pip extras win, otherwise a single pinned side wins, and ties prefer Conda.

- **Pip-only output selection**: Pip-only exports (`unidep pip`, setuptools integration, `get_python_dependencies`) keep the Pip dependency when it exists, even if Conda would win for a Conda-like output.

- **Platform-Specific Version Pinning**: `unidep` resolves platform-specific dependency conflicts by preferring the version with the narrowest platform scope. For instance, given `foo <3 # [linux64]` and `foo >1`, it installs `foo >1,<3` exclusively on Linux-64 and `foo >1` on all other platforms.

- **Intractable Conflicts**: When conflicts are irreconcilable within a source (e.g., `foo >1` vs. `foo <1`), `unidep` raises an exception.

### Platform Selectors

This tool supports a range of platform selectors that allow for specific handling of dependencies based on the user's operating system and architecture. This feature is particularly useful for managing conditional dependencies in diverse environments.

#### Supported Selectors

The following selectors are supported:

- `linux`: For all Linux-based systems.
- `linux64`: Specifically for 64-bit Linux systems.
- `aarch64`: For Linux systems on ARM64 architectures.
- `ppc64le`: For Linux on PowerPC 64-bit Little Endian architectures.
- `osx`: For all macOS systems.
- `osx64`: Specifically for 64-bit macOS systems.
- `arm64`: For macOS systems on ARM64 architectures (Apple Silicon).
- `macos`: An alternative to `osx` for macOS systems.
- `unix`: A general selector for all UNIX-like systems (includes Linux and macOS).
- `win`: For all Windows systems.
- `win64`: Specifically for 64-bit Windows systems.

#### Usage

Selectors are used in `requirements.yaml` files to conditionally include dependencies based on the platform:

```yaml
dependencies:
  - some-package >=1  # [unix]
  - another-package   # [win]
  - special-package   # [osx64]
  - pip: cirq         # [macos win]
    conda: cirq       # [linux]
```

Or when using `pyproject.toml` instead of `requirements.yaml`:

```toml
[tool.unidep]
dependencies = [
    "some-package >=1:unix",
    "another-package:win",
    "special-package:osx64",
    { pip = "cirq:macos win", conda = "cirq:linux" },
]
```

In this example:

- `some-package` is included only in UNIX-like environments (Linux and macOS).
- `another-package` is specific to Windows.
- `special-package` is included only for 64-bit macOS systems.
- `cirq` is managed by `pip` on macOS and Windows, and by `conda` on Linux. This demonstrates how you can specify different package managers for the same package based on the platform.

Note that the `package-name:unix` syntax can also be used in the `requirements.yaml` file, but the `package-name # [unix]` syntax is not supported in `pyproject.toml`.

#### Implementation

`unidep` parses these selectors and filters dependencies according to the platform where it's being installed.
It is also used for creating environment and lock files that are portable across different platforms, ensuring that each environment has the appropriate dependencies installed.

### Custom Pip Index URLs

The `pip_indices` field allows you to specify additional pip index URLs for installing packages from private or alternative package repositories. It may be given as a single string or a list of strings. This is particularly useful for:

- **Private Company Packages**: Access internal packages hosted on private PyPI servers
- **Alternative Package Repositories**: Use mirrors or alternative package sources
- **Authenticated Repositories**: Access protected repositories using environment variables for credentials

#### How It Works

When `pip_indices` is specified:

1. **First index is primary**: The first URL in the list is used as `--index-url` (primary index)
2. **Additional indices are extra**: Subsequent URLs are passed as `--extra-index-url` flags
3. **Environment variable expansion**: Variables like `${PIP_USER}` and `${PIP_PASSWORD}` are automatically expanded from environment variables
4. **Automatic deduplication**: Duplicate URLs are automatically removed while preserving order
5. **Integration with all tools**: Works with `unidep install`, `pip install`, and when using `uv` as the installer

#### Example Usage

```yaml
# requirements.yaml
pip_indices:
  - https://pypi.org/simple/  # Primary index (optional, used by default)
  - https://test.pypi.org/simple/  # Test PyPI for pre-release packages
  - https://${GITLAB_USER}:${GITLAB_TOKEN}@gitlab.company.com/api/v4/projects/123/packages/pypi/simple  # Private GitLab
```

```toml
# pyproject.toml
[tool.unidep]
pip_indices = [
    "https://download.pytorch.org/whl/cpu",  # PyTorch CPU-only builds
    "https://${ARTIFACTORY_USER}:${ARTIFACTORY_PASSWORD}@artifactory.company.com/pypi/simple"  # Artifactory
]
```

#### Generated Output

When generating `environment.yaml` files, `pip_indices` are included as `pip-repositories`:

```yaml
# Generated environment.yaml
name: myproject
channels:
  - conda-forge
pip-repositories:
  - https://pypi.org/simple/
  - https://private.company.com/simple/
dependencies:
  - python
  - pip:
    - private-package  # Will be installed from the private index
```


> [!TIP]
> Store sensitive credentials in environment variables rather than hardcoding them in configuration files. UniDep automatically expands `${VAR_NAME}` patterns.

### `[project.dependencies]` in `pyproject.toml` handling

The `project_dependency_handling` option in `[tool.unidep]` (in `pyproject.toml`) controls how dependencies listed in the standard `[project.dependencies]` section of `pyproject.toml` are handled when processed by `unidep`.

**Modes:**

- **`ignore`** (default): Dependencies in `[project.dependencies]` are ignored by `unidep`.
- **`same-name`**: Dependencies in `[project.dependencies]` are treated as dependencies with the same name for both Conda and Pip. They will be added to the `dependencies` list in `[tool.unidep]` under the assumption that the package name is the same for both package managers.
- **`pip-only`**: Dependencies in `[project.dependencies]` are treated as pip-only dependencies. They will be added to the `dependencies` list in `[tool.unidep]` under the `pip` key.

**Example `pyproject.toml`:**

```toml
[build-system]
requires = ["hatchling", "unidep"]
build-backend = "hatchling.build"

[project]
name = "my-project"
version = "0.1.0"
dependencies = [  # These will be handled according to the `project_dependency_handling` option
  "requests",
  "pandas",
]

[tool.unidep]
project_dependency_handling = "same-name"  # Or "pip-only", "ignore"
dependencies = [
    {conda = "python-graphviz", pip = "graphivz"},
]
```

**Notes:**

- The `project_dependency_handling` option only affects how dependencies from `[project.dependencies]` are processed. Dependencies directly listed under `[tool.unidep.dependencies]` are handled as before.
- This feature is helpful for projects that are already using the standard `[project.dependencies]` field and want to integrate `unidep` without duplicating their dependency list.
- The `project_dependency_handling` feature is _*only available*_ when using `pyproject.toml` files. It is not supported in `requirements.yaml` files.

## :jigsaw: Build System Integration

> [!TIP]
> See [`example/`](example/) for working examples of using `unidep` with different build systems.

`unidep` seamlessly integrates with popular Python build systems to simplify dependency management in your projects.

### Local Dependencies in Monorepos

Local dependencies are essential for monorepos and multi-package projects, allowing you to:
- Share code between packages during development
- Maintain separate releases for each package
- Test changes across multiple packages simultaneously

However, when building wheels for distribution, local paths create non-portable packages that only work on the original system.

### PyPI Alternatives for Local Dependencies

UniDep solves this problem by letting you specify both local paths (for development) and PyPI packages (for distribution):

```yaml
# requirements.yaml
dependencies:
  - numpy
  - pandas

local_dependencies:
  # Standard string format for local dependencies
  - ../shared-lib

  # Dictionary format with optional PyPI alternative for build-time
  - local: ../auth-lib
    pypi: company-auth-lib>=1.0

  - local: ../utils
    pypi: company-utils~=2.0
    use: pypi  # see [Overriding Nested Vendor Copies](#overriding-nested-vendor-copies-with-use)
```

Or in `pyproject.toml`:

```toml
[tool.unidep]
dependencies = ["numpy", "pandas"]

local_dependencies = [
    # Standard string format for local dependencies
    "../shared-lib",

    # Dictionary format with optional PyPI alternative for build-time
    {local = "../auth-lib", pypi = "company-auth-lib>=1.0"},
    {local = "../utils", pypi = "company-utils~=2.0", use = "pypi"},
]
```

**How it works:**
- **During development** (e.g., `unidep install` or `pip install -e .`): Uses local paths when they exist
- **When building wheels**: PyPI alternatives (if specified) are used to create portable packages
- The standard string format continues to work as always for local dependencies

> [!TIP]
> PyPI alternatives ensure your wheels are portable and can be installed anywhere, not just on the build system. Use the `use` field (see [Overriding Nested Vendor Copies](#overriding-nested-vendor-copies-with-use)) to control whether UniDep installs the local path, forces PyPI, or skips the entry entirely.

### Overriding Nested Vendor Copies with `use`

**The Problem:** When vendoring dependencies as git submodules, you often encounter conflicts where a submodule bundles its own copy of a dependency you also use, but at a different version.

**The Solution:** Use `use: pypi` to force your PyPI package instead of the vendored copy, with automatic propagation to all nested references.

#### Example: Override foo's bundled bar with your PyPI build

Your project vendors `foo` as a submodule. Foo bundles `bar@1.0`, but you need `bar@2.0`:

```
project/
  third_party/
    foo/                    # git submodule you don't control
      third_party/
        bar/                # foo bundles bar@1.0
```

**Solution with `use: pypi`:**

```yaml
local_dependencies:
  - ./third_party/foo       # Keep foo editable for development

  # Override: force YOUR PyPI build of bar
  - local: ./third_party/foo/third_party/bar
    pypi: my-bar>=2.0
    use: pypi               # Install from PyPI, skip local path
```

**What happens:**
1. `foo` stays local (editable for development)
2. `my-bar>=2.0` gets installed from PyPI (not foo's bundled v1.0)
3. **Propagates**: Every nested reference to `bar` uses your PyPI package
4. Works with `unidep install`, `unidep conda-lock`, all CLI commands

This is the **key difference** from just using `pypi:` as a build-time fallback - `use: pypi` **forces the PyPI package during development** while keeping other local dependencies editable.

---

### All `use` values

Tell UniDep what to **use** for each entry in `local_dependencies`:

| `use` value | When to use | Installs from | Propagates override? |
|------------|-------------|---------------|---------------------|
| `local` *(default)* | Normal local development | Local path | - |
| `pypi` | **Force PyPI** even when local exists | `pypi:` spec | Yes |
| `skip` | Ignore this path entirely | Nothing | Yes |

**Common patterns:**

```yaml
local_dependencies:
  # Standard local development (default)
  - ../shared-lib

  # Force PyPI to override nested vendor copy
  - local: ./vendor/foo/nested/bar
    pypi: my-bar>=2.0
    use: pypi

  # Skip a path without installing anything
  - local: ./deprecated-module
    use: skip
```

> [!NOTE]
> **Precedence:** The `use` flag on the entry itself always wins. When UniDep encounters the same path in nested `local_dependencies`, it uses your override. Setting `UNIDEP_SKIP_LOCAL_DEPS=1` forces any effective `use: local` to behave like `pypi` (if specified) or `skip`, but does **not** override explicit `use: pypi` or `use: skip`.

> [!WARNING]
> If `use: pypi` is set but no `pypi:` requirement is provided, UniDep exits with a clear error so you can supply the missing spec.

### Build System Behavior

**Important differences between build backends:**
- **Setuptools**: Builds wheels containing `file://` URLs with absolute paths. These wheels only work on the original system.
- **Hatchling**: Rejects `file://` URLs by default, preventing non-portable wheels.

To ensure portable wheels, you can use the `UNIDEP_SKIP_LOCAL_DEPS` environment variable:

```bash
# Force use of PyPI alternatives even when local paths exist
UNIDEP_SKIP_LOCAL_DEPS=1 python -m build

# For hatch projects
UNIDEP_SKIP_LOCAL_DEPS=1 hatch build

# For uv build
UNIDEP_SKIP_LOCAL_DEPS=1 uv build
```

> [!NOTE]
> **When `UNIDEP_SKIP_LOCAL_DEPS=1` is set:**
> - Any effective `use: local` behaves as `use: pypi` (if a `pypi` spec exists) or `use: skip`
> - Explicit `use: pypi` and `use: skip` remain unchanged
> - Dependencies from local packages are still included (from their `requirements.yaml`/`pyproject.toml`)

### Example packages

Explore these installable [example](example/) packages to understand how `unidep` integrates with different build tools and configurations:

| Project                                                    | Build Tool   | `pyproject.toml` | `requirements.yaml` | `setup.py` |
| ---------------------------------------------------------- | ------------ | ---------------- | ------------------- | ---------- |
| [`setup_py_project`](example/setup_py_project)             | `setuptools` | ✅                | ✅                   | ✅          |
| [`setuptools_project`](example/setuptools_project)         | `setuptools` | ✅                | ✅                   | ❌          |
| [`pyproject_toml_project`](example/pyproject_toml_project) | `setuptools` | ✅                | ❌                   | ❌          |
| [`hatch_project`](example/hatch_project)                   | `hatch`      | ✅                | ✅                   | ❌          |
| [`hatch2_project`](example/hatch2_project)                 | `hatch`      | ✅                | ❌                   | ❌          |

### Setuptools Integration

For projects using `setuptools`, configure `unidep` in `pyproject.toml` and either specify dependencies in a `requirements.yaml` file or include them in `pyproject.toml` too.

- **Using `pyproject.toml` only**: The `[project.dependencies]` field in `pyproject.toml` gets automatically populated from `requirements.yaml` or from the `[tool.unidep]` section in `pyproject.toml`.
- **Using `setup.py`**: The `install_requires` field in `setup.py` automatically reflects dependencies specified in `requirements.yaml` or `pyproject.toml`.

**Example `pyproject.toml` Configuration**:

```toml
[build-system]
build-backend = "setuptools.build_meta"
requires = ["setuptools", "unidep"]

[project]
dynamic = ["dependencies"]
```

### Hatchling Integration

For projects managed with [Hatch](https://hatch.pypa.io/), `unidep` can be configured in `pyproject.toml` to automatically process the dependencies from `requirements.yaml` or from the `[tool.unidep]` section in `pyproject.toml`.

**Example Configuration for Hatch**:

```toml
[build-system]
requires = ["hatchling", "unidep"]
build-backend = "hatchling.build"

[project]
dynamic = ["dependencies"]
# Additional project configurations

[tool.hatch.metadata.hooks.unidep]
# Enable the unidep plugin

[tool.hatch.metadata]
allow-direct-references = true

[tool.unidep]
# Your dependencies configuration
```

## :desktop_computer: As a CLI

See [example](example/) for more information or check the output of `unidep -h` for the available sub commands:

<!-- CODE:BASH:START -->
<!-- echo '```bash' -->
<!-- unidep -h -->
<!-- echo '```' -->
<!-- CODE:END -->
<!-- OUTPUT:START -->
<!-- ⚠️ This content is auto-generated by `markdown-code-runner`. -->
```bash
usage: unidep [-h]
              {merge,install,install-all,conda-lock,pixi,pip-compile,pip,conda,version} ...

Unified Conda and Pip requirements management.

positional arguments:
  {merge,install,install-all,conda-lock,pixi,pip-compile,pip,conda,version}
                        Subcommands
    merge               Combine multiple (or a single) `requirements.yaml` or
                        `pyproject.toml` files into a single Conda installable
                        `environment.yaml` file.
    install             Automatically install all dependencies from one or
                        more `requirements.yaml` or `pyproject.toml` files.
                        This command first installs dependencies with Conda,
                        then with Pip. Finally, it installs local packages
                        (those containing the `requirements.yaml` or
                        `pyproject.toml` files) using `pip install [-e]
                        ./project`.
    install-all         Install dependencies from all `requirements.yaml` or
                        `pyproject.toml` files found in the current directory
                        or specified directory. This command first installs
                        dependencies using Conda, then Pip, and finally the
                        local packages.
    conda-lock          Generate a global `conda-lock.yml` file for a
                        collection of `requirements.yaml` or `pyproject.toml`
                        files. Additionally, create individual `conda-
                        lock.yml` files for each `requirements.yaml` or
                        `pyproject.toml` file consistent with the global lock
                        file.
    pixi                Generate a `pixi.toml` file from `requirements.yaml`
                        or `pyproject.toml` files.
    pip-compile         Generate a fully pinned `requirements.txt` file from
                        one or more `requirements.yaml` or `pyproject.toml`
                        files using `pip-compile` from `pip-tools`. This
                        command consolidates all pip dependencies defined in
                        the `requirements.yaml` or `pyproject.toml` files and
                        compiles them into a single `requirements.txt` file,
                        taking into account the specific versions and
                        dependencies of each package.
    pip                 Get the pip requirements for the current platform
                        only.
    conda               Get the conda requirements for the current platform
                        only.
    version             Print version information of unidep.

options:
  -h, --help            show this help message and exit
```

<!-- OUTPUT:END -->

### `unidep merge`

Use `unidep merge` to scan directories for `requirements.yaml` file(s) and combine them into an `environment.yaml` file.
Optional dependency groups can be included with `--optional-dependencies docs test`
or `--all-optional-dependencies`.
See `unidep merge -h` for more information:

<!-- CODE:BASH:START -->
<!-- echo '```bash' -->
<!-- unidep merge -h -->
<!-- echo '```' -->
<!-- CODE:END -->
<!-- OUTPUT:START -->
<!-- ⚠️ This content is auto-generated by `markdown-code-runner`. -->
```bash
usage: unidep merge [-h] [-o OUTPUT] [-n NAME] [--stdout]
                    [--selector {sel,comment}]
                    [--optional-dependencies GROUP [GROUP ...] |
                    --all-optional-dependencies] [-d DIRECTORY]
                    [--depth DEPTH] [-v]
                    [-p {linux-64,linux-aarch64,linux-ppc64le,osx-64,osx-arm64,win-64}]
                    [--skip-dependency SKIP_DEPENDENCY]
                    [--ignore-pin IGNORE_PIN] [--overwrite-pin OVERWRITE_PIN]

Combine multiple (or a single) `requirements.yaml` or `pyproject.toml` files
into a single Conda installable `environment.yaml` file. Example usage:
`unidep merge --directory . --depth 1 --output environment.yaml` to search for
`requirements.yaml` or `pyproject.toml` files in the current directory and its
subdirectories and create `environment.yaml`. These are the defaults, so you
can also just run `unidep merge`. For Pixi support, use `unidep pixi`.

options:
  -h, --help            show this help message and exit
  -o, --output OUTPUT   Output file for the conda environment, by default
                        `environment.yaml`
  -n, --name NAME       Name of the conda environment, by default `myenv`
  --stdout              Output to stdout instead of a file
  --selector {sel,comment}
                        The selector to use for the environment markers, if
                        `sel` then `- numpy # [linux]` becomes `sel(linux):
                        numpy`, if `comment` then it remains `- numpy #
                        [linux]`, by default `sel`
  --optional-dependencies GROUP [GROUP ...]
                        Include the named optional dependency group(s) from
                        the discovered requirements files.
  --all-optional-dependencies
                        Include all optional dependency groups from the
                        discovered requirements files.
  -d, --directory DIRECTORY
                        Base directory to scan for `requirements.yaml` or
                        `pyproject.toml` file(s), by default `.`
  --depth DEPTH         Maximum depth to scan for `requirements.yaml` or
                        `pyproject.toml` files, by default 1
  -v, --verbose         Print verbose output
  -p, --platform {linux-64,linux-aarch64,linux-ppc64le,osx-64,osx-arm64,win-64}
                        The platform(s) to get the requirements for. Multiple
                        platforms can be specified. If omitted, behavior is
                        command-specific: platforms may be inferred from
                        requirements files, otherwise the current platform is
                        used.
  --skip-dependency SKIP_DEPENDENCY
                        Skip installing a specific dependency that is in one
                        of the `requirements.yaml` or `pyproject.toml` files.
                        This option can be used multiple times, each time
                        specifying a different package to skip. For example,
                        use `--skip-dependency pandas` to skip installing
                        pandas.
  --ignore-pin IGNORE_PIN
                        Ignore the version pin for a specific package, e.g.,
                        `--ignore-pin numpy`. This option can be repeated to
                        ignore multiple packages.
  --overwrite-pin OVERWRITE_PIN
                        Overwrite the version pin for a specific package,
                        e.g., `--overwrite-pin 'numpy=1.19.2'`. This option
                        can be repeated to overwrite the pins of multiple
                        packages.
```

<!-- OUTPUT:END -->

### `unidep install`

Use `unidep install` on one or more `requirements.yaml` files and install the dependencies on the current platform using conda, then install the remaining dependencies with pip, and finally install the current package with `pip install [-e] .`.
See `unidep install -h` for more information:

<!-- CODE:BASH:START -->
<!-- echo '```bash' -->
<!-- unidep install -h -->
<!-- echo '```' -->
<!-- CODE:END -->
<!-- OUTPUT:START -->
<!-- ⚠️ This content is auto-generated by `markdown-code-runner`. -->
```bash
usage: unidep install [-h] [-v] [-e] [--skip-local] [--skip-pip]
                      [--skip-conda] [--skip-dependency SKIP_DEPENDENCY]
                      [--no-dependencies]
                      [--conda-executable {conda,mamba,micromamba}]
                      [-n CONDA_ENV_NAME | -p CONDA_ENV_PREFIX] [--dry-run]
                      [--ignore-pin IGNORE_PIN]
                      [--overwrite-pin OVERWRITE_PIN] [-f CONDA_LOCK_FILE]
                      [--no-uv]
                      files [files ...]

Automatically install all dependencies from one or more `requirements.yaml` or
`pyproject.toml` files. This command first installs dependencies with Conda,
then with Pip. Finally, it installs local packages (those containing the
`requirements.yaml` or `pyproject.toml` files) using `pip install [-e]
./project`. Example usage: `unidep install .` for a single project. For
multiple projects: `unidep install ./project1 ./project2`. The command accepts
both file paths and directories containing a `requirements.yaml` or
`pyproject.toml` file. Use `--editable` or `-e` to install the local packages
in editable mode. See `unidep install-all` to install all `requirements.yaml`
or `pyproject.toml` files in and below the current folder.

positional arguments:
  files                 The `requirements.yaml` or `pyproject.toml` file(s) to
                        parse or folder(s) that contain those file(s), by
                        default `.`

options:
  -h, --help            show this help message and exit
  -v, --verbose         Print verbose output
  -e, --editable        Install the project in editable mode
  --skip-local          Skip installing local dependencies
  --skip-pip            Skip installing pip dependencies from
                        `requirements.yaml` or `pyproject.toml`
  --skip-conda          Skip installing conda dependencies from
                        `requirements.yaml` or `pyproject.toml`
  --skip-dependency SKIP_DEPENDENCY
                        Skip installing a specific dependency that is in one
                        of the `requirements.yaml` or `pyproject.toml` files.
                        This option can be used multiple times, each time
                        specifying a different package to skip. For example,
                        use `--skip-dependency pandas` to skip installing
                        pandas.
  --no-dependencies, --no-deps
                        Skip installing dependencies from `requirements.yaml`
                        or `pyproject.toml` file(s) and only install local
                        package(s). Useful after installing a `conda-lock.yml`
                        file because then all dependencies have already been
                        installed.
  --conda-executable {conda,mamba,micromamba}
                        The conda executable to use
  -n, --conda-env-name CONDA_ENV_NAME
                        Name of the conda environment, if not provided, the
                        currently active environment name is used, unless
                        `--conda-env-prefix` is provided
  -p, --conda-env-prefix CONDA_ENV_PREFIX
                        Path to the conda environment, if not provided, the
                        currently active environment path is used, unless
                        `--conda-env-name` is provided
  --dry-run, --dry      Only print the commands that would be run
  --ignore-pin IGNORE_PIN
                        Ignore the version pin for a specific package, e.g.,
                        `--ignore-pin numpy`. This option can be repeated to
                        ignore multiple packages.
  --overwrite-pin OVERWRITE_PIN
                        Overwrite the version pin for a specific package,
                        e.g., `--overwrite-pin 'numpy=1.19.2'`. This option
                        can be repeated to overwrite the pins of multiple
                        packages.
  -f, --conda-lock-file CONDA_LOCK_FILE
                        Path to the `conda-lock.yml` file to use for creating
                        the new environment. Assumes that the lock file
                        contains all dependencies. Must be used with `--conda-
                        env-name` or `--conda-env-prefix`.
  --no-uv               Disables the use of `uv` for pip install. By default,
                        `uv` is used if it is available in the PATH.
```

<!-- OUTPUT:END -->

### `unidep install-all`

Use `unidep install-all` on a folder with packages that contain `requirements.yaml` files and install the dependencies on the current platform using conda, then install the remaining dependencies with pip, and finally install the current package with `pip install [-e] ./package1 ./package2`.
See `unidep install-all -h` for more information:

<!-- CODE:BASH:START -->
<!-- echo '```bash' -->
<!-- unidep install -h -->
<!-- echo '```' -->
<!-- CODE:END -->
<!-- OUTPUT:START -->
<!-- ⚠️ This content is auto-generated by `markdown-code-runner`. -->
```bash
usage: unidep install [-h] [-v] [-e] [--skip-local] [--skip-pip]
                      [--skip-conda] [--skip-dependency SKIP_DEPENDENCY]
                      [--no-dependencies]
                      [--conda-executable {conda,mamba,micromamba}]
                      [-n CONDA_ENV_NAME | -p CONDA_ENV_PREFIX] [--dry-run]
                      [--ignore-pin IGNORE_PIN]
                      [--overwrite-pin OVERWRITE_PIN] [-f CONDA_LOCK_FILE]
                      [--no-uv]
                      files [files ...]

Automatically install all dependencies from one or more `requirements.yaml` or
`pyproject.toml` files. This command first installs dependencies with Conda,
then with Pip. Finally, it installs local packages (those containing the
`requirements.yaml` or `pyproject.toml` files) using `pip install [-e]
./project`. Example usage: `unidep install .` for a single project. For
multiple projects: `unidep install ./project1 ./project2`. The command accepts
both file paths and directories containing a `requirements.yaml` or
`pyproject.toml` file. Use `--editable` or `-e` to install the local packages
in editable mode. See `unidep install-all` to install all `requirements.yaml`
or `pyproject.toml` files in and below the current folder.

positional arguments:
  files                 The `requirements.yaml` or `pyproject.toml` file(s) to
                        parse or folder(s) that contain those file(s), by
                        default `.`

options:
  -h, --help            show this help message and exit
  -v, --verbose         Print verbose output
  -e, --editable        Install the project in editable mode
  --skip-local          Skip installing local dependencies
  --skip-pip            Skip installing pip dependencies from
                        `requirements.yaml` or `pyproject.toml`
  --skip-conda          Skip installing conda dependencies from
                        `requirements.yaml` or `pyproject.toml`
  --skip-dependency SKIP_DEPENDENCY
                        Skip installing a specific dependency that is in one
                        of the `requirements.yaml` or `pyproject.toml` files.
                        This option can be used multiple times, each time
                        specifying a different package to skip. For example,
                        use `--skip-dependency pandas` to skip installing
                        pandas.
  --no-dependencies, --no-deps
                        Skip installing dependencies from `requirements.yaml`
                        or `pyproject.toml` file(s) and only install local
                        package(s). Useful after installing a `conda-lock.yml`
                        file because then all dependencies have already been
                        installed.
  --conda-executable {conda,mamba,micromamba}
                        The conda executable to use
  -n, --conda-env-name CONDA_ENV_NAME
                        Name of the conda environment, if not provided, the
                        currently active environment name is used, unless
                        `--conda-env-prefix` is provided
  -p, --conda-env-prefix CONDA_ENV_PREFIX
                        Path to the conda environment, if not provided, the
                        currently active environment path is used, unless
                        `--conda-env-name` is provided
  --dry-run, --dry      Only print the commands that would be run
  --ignore-pin IGNORE_PIN
                        Ignore the version pin for a specific package, e.g.,
                        `--ignore-pin numpy`. This option can be repeated to
                        ignore multiple packages.
  --overwrite-pin OVERWRITE_PIN
                        Overwrite the version pin for a specific package,
                        e.g., `--overwrite-pin 'numpy=1.19.2'`. This option
                        can be repeated to overwrite the pins of multiple
                        packages.
  -f, --conda-lock-file CONDA_LOCK_FILE
                        Path to the `conda-lock.yml` file to use for creating
                        the new environment. Assumes that the lock file
                        contains all dependencies. Must be used with `--conda-
                        env-name` or `--conda-env-prefix`.
  --no-uv               Disables the use of `uv` for pip install. By default,
                        `uv` is used if it is available in the PATH.
```

<!-- OUTPUT:END -->

### `unidep conda-lock`

Use `unidep conda-lock` on one or multiple `requirements.yaml` files and output the conda-lock file.
Optionally, when using a monorepo with multiple subpackages (with their own `requirements.yaml` files), generate a lock file for each subpackage.
See `unidep conda-lock -h` for more information:

<!-- CODE:BASH:START -->
<!-- echo '```bash' -->
<!-- unidep conda-lock -h -->
<!-- echo '```' -->
<!-- CODE:END -->
<!-- OUTPUT:START -->
<!-- ⚠️ This content is auto-generated by `markdown-code-runner`. -->
```bash
usage: unidep conda-lock [-h] [--only-global] [--lockfile LOCKFILE]
                         [--check-input-hash] [-d DIRECTORY] [--depth DEPTH]
                         [-f FILE] [-v]
                         [-p {linux-64,linux-aarch64,linux-ppc64le,osx-64,osx-arm64,win-64}]
                         [--skip-dependency SKIP_DEPENDENCY]
                         [--ignore-pin IGNORE_PIN]
                         [--overwrite-pin OVERWRITE_PIN]
                         ...

Generate a global `conda-lock.yml` file for a collection of
`requirements.yaml` or `pyproject.toml` files. Additionally, create individual
`conda-lock.yml` files for each `requirements.yaml` or `pyproject.toml` file
consistent with the global lock file. Example usage: `unidep conda-lock
--directory ./projects` to generate conda-lock files for all
`requirements.yaml` or `pyproject.toml` files in the `./projects` directory.
Use `--only-global` to generate only the global lock file. The `--check-input-
hash` option can be used to avoid regenerating lock files if the input hasn't
changed.

positional arguments:
  extra_flags           Extra flags to pass to `conda-lock lock`. These flags
                        are passed directly and should be provided in the
                        format expected by `conda-lock lock`. For example,
                        `unidep conda-lock -- --micromamba`. Note that the
                        `--` is required to separate the flags for `unidep
                        conda-lock` from the flags for `conda-lock lock`.

options:
  -h, --help            show this help message and exit
  --only-global         Only generate the global lock file
  --lockfile LOCKFILE   Specify a path for the global lockfile (default:
                        `conda-lock.yml` in current directory). Path should be
                        relative, e.g., `--lockfile ./locks/example.conda-
                        lock.yml`.
  --check-input-hash    Check existing input hashes in lockfiles before
                        regenerating lock files. This flag is directly passed
                        to `conda-lock`.
  -d, --directory DIRECTORY
                        Base directory to scan for `requirements.yaml` or
                        `pyproject.toml` file(s), by default `.`
  --depth DEPTH         Maximum depth to scan for `requirements.yaml` or
                        `pyproject.toml` files, by default 1
  -f, --file FILE       A single `requirements.yaml` or `pyproject.toml` file
                        to use, or folder that contains that file. This is an
                        alternative to using `--directory` which searches for
                        all `requirements.yaml` or `pyproject.toml` files in
                        the directory and its subdirectories.
  -v, --verbose         Print verbose output
  -p, --platform {linux-64,linux-aarch64,linux-ppc64le,osx-64,osx-arm64,win-64}
                        The platform(s) to get the requirements for. Multiple
                        platforms can be specified. If omitted, behavior is
                        command-specific: platforms may be inferred from
                        requirements files, otherwise the current platform is
                        used.
  --skip-dependency SKIP_DEPENDENCY
                        Skip installing a specific dependency that is in one
                        of the `requirements.yaml` or `pyproject.toml` files.
                        This option can be used multiple times, each time
                        specifying a different package to skip. For example,
                        use `--skip-dependency pandas` to skip installing
                        pandas.
  --ignore-pin IGNORE_PIN
                        Ignore the version pin for a specific package, e.g.,
                        `--ignore-pin numpy`. This option can be repeated to
                        ignore multiple packages.
  --overwrite-pin OVERWRITE_PIN
                        Overwrite the version pin for a specific package,
                        e.g., `--overwrite-pin 'numpy=1.19.2'`. This option
                        can be repeated to overwrite the pins of multiple
                        packages.
```

<!-- OUTPUT:END -->

### `unidep pixi`

Use `unidep pixi` to generate a `pixi.toml` file from your `requirements.yaml` or `pyproject.toml` files.
This enables using [Pixi](https://pixi.sh/) for solving/locking/installing while keeping UniDep as your source of truth.

The philosophy is **"Let UniDep translate, let Pixi resolve"**.

**Workflow:**
```bash
# 1. Generate pixi.toml from your requirements
unidep pixi

# 2. Use pixi directly
pixi install
pixi lock
pixi run <cmd>
```

#### What `unidep pixi` generates

- A `[workspace]` section with `name`, `channels`, and `platforms`
- Conda deps in `[dependencies]`
- PyPI deps in `[pypi-dependencies]`
- Selector/platform-specific deps in `[target.<platform>.dependencies]` and/or `[target.<platform>.pypi-dependencies]`
- Optional dependency groups as Pixi features (`[feature.<group>.*]`)
- Local installable projects as editable path deps:
  ```toml
  [pypi-dependencies]
  my_pkg = { path = "./relative/path", editable = true }
  ```

In monorepo mode (multiple input files), UniDep builds feature sections per discovered project and composes environments from those features.

#### Dependency reconciliation rules (important)

When the same package appears from both conda and pip, UniDep applies deterministic rules before writing `pixi.toml`:

1. If pip has extras (`foo[bar]`), pip wins.
2. If only one side is pinned, pinned wins.
3. On ties (both pinned or both unpinned), conda wins.
4. When both sides are pinned and one declaration is narrower in platform scope, the narrower target-specific intent wins on that target. Other platforms continue through the same shared selection rules independently.

Version pins from repeated entries are merged when possible (for example `>=1.7,<2` + `<1.16` → `>=1.7,<1.16`).

#### Channels/platforms precedence

- **Channels**
  - If `--channel` is passed: use only CLI-provided channels.
  - Else: collect channels from requirement files.
  - Else fallback: `conda-forge`.
- **Platforms**
  - If `--platform` is passed: use CLI-provided platforms.
  - Else: use platforms declared in files.
  - Else: infer from selectors in dependencies.
  - Else fallback: current platform.

#### Example (single-file)

Input (`requirements.yaml`):

```yaml
channels:
  - conda-forge
dependencies:
  - numpy >=1.26
  - pip: rich
  - pip: uvloop  # [linux64]
optional_dependencies:
  dev:
    - pytest
platforms:
  - linux-64
  - osx-64
```

Representative output shape (`pixi.toml`):

```toml
[workspace]
name = "my-project"
channels = ["conda-forge"]
platforms = ["linux-64", "osx-64"]

[dependencies]
numpy = ">=1.26"

[pypi-dependencies]
rich = "*"

[target.linux-64.pypi-dependencies]
uvloop = "*"

[feature.dev.dependencies]
pytest = "*"

[environments]
default = []
dev = ["dev"]
```

See `unidep pixi -h` for more information:

<!-- CODE:BASH:START -->
<!-- echo '```bash' -->
<!-- unidep pixi -h -->
<!-- echo '```' -->
<!-- CODE:END -->
<!-- OUTPUT:START -->
<!-- ⚠️ This content is auto-generated by `markdown-code-runner`. -->
```bash
usage: unidep pixi [-h] [-o OUTPUT] [-n NAME] [--stdout] [-c CHANNEL]
                   [-d DIRECTORY] [--depth DEPTH] [-f FILE] [-v]
                   [-p {linux-64,linux-aarch64,linux-ppc64le,osx-64,osx-arm64,win-64}]
                   [--skip-dependency SKIP_DEPENDENCY]
                   [--ignore-pin IGNORE_PIN] [--overwrite-pin OVERWRITE_PIN]

Generate a `pixi.toml` file from `requirements.yaml` or `pyproject.toml`
files. Example usage: `unidep pixi` to generate a pixi.toml file. Use
`--output` to specify a different output path. Use `--name` to set the project
name. After generating, use `pixi lock` and `pixi install` directly.

options:
  -h, --help            show this help message and exit
  -o, --output OUTPUT   Output path for pixi.toml (default: pixi.toml in
                        current directory)
  -n, --name NAME       Name of the project (default: current directory name)
  --stdout              Output to stdout instead of a file
  -c, --channel CHANNEL
                        Conda channel to include. Can be repeated. Overrides
                        channels declared in requirements files. If omitted,
                        channels are read from the requirements files
                        (defaulting to conda-forge).
  -d, --directory DIRECTORY
                        Base directory to scan for `requirements.yaml` or
                        `pyproject.toml` file(s), by default `.`
  --depth DEPTH         Maximum depth to scan for `requirements.yaml` or
                        `pyproject.toml` files, by default 1
  -f, --file FILE       A single `requirements.yaml` or `pyproject.toml` file
                        to use, or folder that contains that file. This is an
                        alternative to using `--directory` which searches for
                        all `requirements.yaml` or `pyproject.toml` files in
                        the directory and its subdirectories.
  -v, --verbose         Print verbose output
  -p, --platform {linux-64,linux-aarch64,linux-ppc64le,osx-64,osx-arm64,win-64}
                        The platform(s) to get the requirements for. Multiple
                        platforms can be specified. If omitted, behavior is
                        command-specific: platforms may be inferred from
                        requirements files, otherwise the current platform is
                        used.
  --skip-dependency SKIP_DEPENDENCY
                        Skip installing a specific dependency that is in one
                        of the `requirements.yaml` or `pyproject.toml` files.
                        This option can be used multiple times, each time
                        specifying a different package to skip. For example,
                        use `--skip-dependency pandas` to skip installing
                        pandas.
  --ignore-pin IGNORE_PIN
                        Ignore the version pin for a specific package, e.g.,
                        `--ignore-pin numpy`. This option can be repeated to
                        ignore multiple packages.
  --overwrite-pin OVERWRITE_PIN
                        Overwrite the version pin for a specific package,
                        e.g., `--overwrite-pin 'numpy=1.19.2'`. This option
                        can be repeated to overwrite the pins of multiple
                        packages.
```

<!-- OUTPUT:END -->

> [!TIP]
> Install Pixi-related optional dependencies with: `pip install "unidep[pixi]"`

### `unidep pip-compile`

Use `unidep pip-compile` on one or multiple `requirements.yaml` files and output a fully locked `requirements.txt` file using `pip-compile` from [`pip-tools`](https://pip-tools.readthedocs.io/en/latest/).
See `unidep pip-compile -h` for more information:

<!-- CODE:BASH:START -->
<!-- echo '```bash' -->
<!-- unidep pip-compile -h -->
<!-- echo '```' -->
<!-- CODE:END -->
<!-- OUTPUT:START -->
<!-- ⚠️ This content is auto-generated by `markdown-code-runner`. -->
```bash
usage: unidep pip-compile [-h] [-o OUTPUT_FILE] [-d DIRECTORY] [--depth DEPTH]
                          [-v]
                          [-p {linux-64,linux-aarch64,linux-ppc64le,osx-64,osx-arm64,win-64}]
                          [--skip-dependency SKIP_DEPENDENCY]
                          [--ignore-pin IGNORE_PIN]
                          [--overwrite-pin OVERWRITE_PIN]
                          ...

Generate a fully pinned `requirements.txt` file from one or more
`requirements.yaml` or `pyproject.toml` files using `pip-compile` from `pip-
tools`. This command consolidates all pip dependencies defined in the
`requirements.yaml` or `pyproject.toml` files and compiles them into a single
`requirements.txt` file, taking into account the specific versions and
dependencies of each package. Example usage: `unidep pip-compile --directory
./projects` to generate a `requirements.txt` file for all `requirements.yaml`
or `pyproject.toml` files in the `./projects` directory. Use `--output-file
requirements.txt` to specify a different output file.

positional arguments:
  extra_flags           Extra flags to pass to `pip-compile`. These flags are
                        passed directly and should be provided in the format
                        expected by `pip-compile`. For example, `unidep pip-
                        compile -- --generate-hashes --allow-unsafe`. Note
                        that the `--` is required to separate the flags for
                        `unidep pip-compile` from the flags for `pip-compile`.

options:
  -h, --help            show this help message and exit
  -o, --output-file OUTPUT_FILE
                        Output file for the pip requirements, by default
                        `requirements.txt`
  -d, --directory DIRECTORY
                        Base directory to scan for `requirements.yaml` or
                        `pyproject.toml` file(s), by default `.`
  --depth DEPTH         Maximum depth to scan for `requirements.yaml` or
                        `pyproject.toml` files, by default 1
  -v, --verbose         Print verbose output
  -p, --platform {linux-64,linux-aarch64,linux-ppc64le,osx-64,osx-arm64,win-64}
                        The platform(s) to get the requirements for. Multiple
                        platforms can be specified. If omitted, behavior is
                        command-specific: platforms may be inferred from
                        requirements files, otherwise the current platform is
                        used.
  --skip-dependency SKIP_DEPENDENCY
                        Skip installing a specific dependency that is in one
                        of the `requirements.yaml` or `pyproject.toml` files.
                        This option can be used multiple times, each time
                        specifying a different package to skip. For example,
                        use `--skip-dependency pandas` to skip installing
                        pandas.
  --ignore-pin IGNORE_PIN
                        Ignore the version pin for a specific package, e.g.,
                        `--ignore-pin numpy`. This option can be repeated to
                        ignore multiple packages.
  --overwrite-pin OVERWRITE_PIN
                        Overwrite the version pin for a specific package,
                        e.g., `--overwrite-pin 'numpy=1.19.2'`. This option
                        can be repeated to overwrite the pins of multiple
                        packages.
```

<!-- OUTPUT:END -->

### `unidep pip`

Use `unidep pip` on a `requirements.yaml` file and output the pip installable dependencies on the current platform (default).
See `unidep pip -h` for more information:

<!-- CODE:BASH:START -->
<!-- echo '```bash' -->
<!-- unidep pip -h -->
<!-- echo '```' -->
<!-- CODE:END -->
<!-- OUTPUT:START -->
<!-- ⚠️ This content is auto-generated by `markdown-code-runner`. -->
```bash
usage: unidep pip [-h] [-f FILE] [-v]
                  [-p {linux-64,linux-aarch64,linux-ppc64le,osx-64,osx-arm64,win-64}]
                  [--skip-dependency SKIP_DEPENDENCY]
                  [--ignore-pin IGNORE_PIN] [--overwrite-pin OVERWRITE_PIN]
                  [--separator SEPARATOR]

Get the pip requirements for the current platform only. Example usage: `unidep
pip --file folder1 --file folder2/requirements.yaml --separator ' ' --platform
linux-64` to extract all the pip dependencies specific to the linux-64
platform. Note that the `--file` argument can be used multiple times to
specify multiple `requirements.yaml` or `pyproject.toml` files and that --file
can also be a folder that contains a `requirements.yaml` or `pyproject.toml`
file.

options:
  -h, --help            show this help message and exit
  -f, --file FILE       The `requirements.yaml` or `pyproject.toml` file to
                        parse, or folder that contains that file, by default
                        `.`
  -v, --verbose         Print verbose output
  -p, --platform {linux-64,linux-aarch64,linux-ppc64le,osx-64,osx-arm64,win-64}
                        The platform(s) to get the requirements for. Multiple
                        platforms can be specified. If omitted, behavior is
                        command-specific: platforms may be inferred from
                        requirements files, otherwise the current platform is
                        used.
  --skip-dependency SKIP_DEPENDENCY
                        Skip installing a specific dependency that is in one
                        of the `requirements.yaml` or `pyproject.toml` files.
                        This option can be used multiple times, each time
                        specifying a different package to skip. For example,
                        use `--skip-dependency pandas` to skip installing
                        pandas.
  --ignore-pin IGNORE_PIN
                        Ignore the version pin for a specific package, e.g.,
                        `--ignore-pin numpy`. This option can be repeated to
                        ignore multiple packages.
  --overwrite-pin OVERWRITE_PIN
                        Overwrite the version pin for a specific package,
                        e.g., `--overwrite-pin 'numpy=1.19.2'`. This option
                        can be repeated to overwrite the pins of multiple
                        packages.
  --separator SEPARATOR
                        The separator between the dependencies, by default ` `
```

<!-- OUTPUT:END -->

### `unidep conda`

Use `unidep conda` on a `requirements.yaml` file and output the conda installable dependencies on the current platform (default).
See `unidep conda -h` for more information:

<!-- CODE:BASH:START -->
<!-- echo '```bash' -->
<!-- unidep conda -h -->
<!-- echo '```' -->
<!-- CODE:END -->
<!-- OUTPUT:START -->
<!-- ⚠️ This content is auto-generated by `markdown-code-runner`. -->
```bash
usage: unidep conda [-h] [-f FILE] [-v]
                    [-p {linux-64,linux-aarch64,linux-ppc64le,osx-64,osx-arm64,win-64}]
                    [--skip-dependency SKIP_DEPENDENCY]
                    [--ignore-pin IGNORE_PIN] [--overwrite-pin OVERWRITE_PIN]
                    [--separator SEPARATOR]

Get the conda requirements for the current platform only. Example usage:
`unidep conda --file folder1 --file folder2/requirements.yaml --separator ' '
--platform linux-64` to extract all the conda dependencies specific to the
linux-64 platform. Note that the `--file` argument can be used multiple times
to specify multiple `requirements.yaml` or `pyproject.toml` files and that
--file can also be a folder that contains a `requirements.yaml` or
`pyproject.toml` file.

options:
  -h, --help            show this help message and exit
  -f, --file FILE       The `requirements.yaml` or `pyproject.toml` file to
                        parse, or folder that contains that file, by default
                        `.`
  -v, --verbose         Print verbose output
  -p, --platform {linux-64,linux-aarch64,linux-ppc64le,osx-64,osx-arm64,win-64}
                        The platform(s) to get the requirements for. Multiple
                        platforms can be specified. If omitted, behavior is
                        command-specific: platforms may be inferred from
                        requirements files, otherwise the current platform is
                        used.
  --skip-dependency SKIP_DEPENDENCY
                        Skip installing a specific dependency that is in one
                        of the `requirements.yaml` or `pyproject.toml` files.
                        This option can be used multiple times, each time
                        specifying a different package to skip. For example,
                        use `--skip-dependency pandas` to skip installing
                        pandas.
  --ignore-pin IGNORE_PIN
                        Ignore the version pin for a specific package, e.g.,
                        `--ignore-pin numpy`. This option can be repeated to
                        ignore multiple packages.
  --overwrite-pin OVERWRITE_PIN
                        Overwrite the version pin for a specific package,
                        e.g., `--overwrite-pin 'numpy=1.19.2'`. This option
                        can be repeated to overwrite the pins of multiple
                        packages.
  --separator SEPARATOR
                        The separator between the dependencies, by default ` `
```

<!-- OUTPUT:END -->

## ❓ FAQ

Here is a list of questions we have either been asked by users or potential pitfalls we hope to help users avoid:

### **Q: When to use UniDep?**

**A:** UniDep is particularly useful for setting up full development environments that require both Python *and* non-Python dependencies (e.g., CUDA, compilers, etc.) with a single command.

In fields like research, data science, robotics, AI, and ML projects, it is common to work from a locally cloned Git repository.

Setting up a full development environment can be a pain, especially if you need to install non Python dependencies like compilers, low-level numerical libraries, or CUDA (luckily Conda has all of them).
Typically, instructions are different for each OS and their corresponding package managers (`apt`, `brew`, `yum`, `winget`, etc.).

With UniDep, you can specify all your Pip and Conda dependencies in a single file.
To get set up on a new machine, you just need to install Conda (we recommend [micromamba](https://mamba.readthedocs.io/en/latest/installation/micromamba-installation.html)) and run `pip install unidep; unidep install-all -e` in your project directory, to install all dependencies and local packages in editable mode in the current Conda environment.

For fully reproducible environments, you can run `unidep conda-lock` to generate a `conda-lock.yml` file.
Then, run `conda env create -f conda-lock.yml -n myenv` to create a new Conda environment with all the third-party dependencies.
Finally, run `unidep install-all -e --no-dependencies` to install all your local packages in editable mode.

For those who prefer not to use Conda, you can simply run `pip install -e .` on a project using UniDep.
You'll need to install the non-Python dependencies yourself, but you'll have a list of them in the `requirements.yaml` file.

In summary, use UniDep if you:

- Prefer installing packages with conda but still want your package to be pip installable.
- Are tired of synchronizing your Pip requirements (`requirements.txt`) and Conda requirements (`environment.yaml`).
- Want a low-effort, comprehensive development environment setup.

### **Q: Just show me a full example!**

**A:** Check out the [`example` folder](https://github.com/basnijholt/unidep/tree/main/example).

### **Q: Uses of UniDep in the wild?**

**A:** UniDep really shines when used in a monorepo with multiple dependent projects, however, since these are typically private, we cannot share them.

However, an example of a single package that is public is [`home-assistant-streamdeck-yaml`](https://github.com/basnijholt/home-assistant-streamdeck-yaml/).
This is a Python package that allows to interact with [Home Assistant](https://www.home-assistant.io/) from an Elgato Stream Deck connected via USB to e.g., a Raspberry Pi.
It requires a couple of system dependencies (e.g., `libusb` and `hidapi`), which are typically installed with `apt` or `brew`.
The [`README.md`](https://github.com/basnijholt/home-assistant-streamdeck-yaml/blob/main/README.md) shows different installation instructions on Linux, MacOS, and Windows for non-Conda installs, however, with UniDep, we can just use `unidep install .` on all platforms.
It is fully configured via [`pyproject.toml`](https://github.com/basnijholt/home-assistant-streamdeck-yaml/blob/main/pyproject.toml).
The 2 `Dockerfile`s show 2 different ways of using UniDep:

1. [`Dockerfile.locked`](https://github.com/basnijholt/home-assistant-streamdeck-yaml/blob/a1b9966398dfe748804f058f82d546e47cd7f722/Dockerfile.locked): Installing `conda-lock.yml` (generated with `unidep conda-lock`) and then `pip install .` the local package.
2. [`Dockerfile.latest`](https://github.com/basnijholt/home-assistant-streamdeck-yaml/blob/a1b9966398dfe748804f058f82d546e47cd7f722/Dockerfile.latest): Using `unidep install .` to install all dependencies, first with conda, then pip, then the local package.

### **Q: How do I force PyPI instead of a local path for one dependency?**

**A:** Use `use: pypi` to force the PyPI package even during development (see [Overriding Nested Vendor Copies](#overriding-nested-vendor-copies-with-use)). This is especially useful for overriding nested vendor copies while keeping other local dependencies editable.

```yaml
local_dependencies:
  - local: ./path/to/dep
    pypi: my-package>=1.0
    use: pypi  # Force PyPI, skip local path
```

### **Q: How do I ignore a local dependency entirely?**

**A:** Set `use: skip` on that entry. It won't be installed and UniDep won't recurse into it. See [Overriding Nested Vendor Copies](#overriding-nested-vendor-copies-with-use) for details.

### **Q: A submodule brings its own copy of package X. How do I avoid conflicts?**

**A:** Use `use: pypi` as shown in [Overriding Nested Vendor Copies](#overriding-nested-vendor-copies-with-use). In short:

```yaml
local_dependencies:
  - ./third_party/foo              # Keep foo editable
  - local: ./third_party/foo/third_party/bar
    pypi: my-bar>=2.0
    use: pypi                      # Force YOUR PyPI build of bar
```

This propagates to **every** nested reference, so foo's bundled bar gets replaced with your PyPI package.

### **Q: How is this different from conda/mamba/pip?**

**A:** UniDep uses pip and conda under the hood to install dependencies, but it is not a replacement for them. UniDep will print the commands it runs, so you can see exactly what it is doing.

### **Q: I found a project using unidep, now what?**

**A:** You can install it like *any other Python package* using `pip install`.
However, to take full advantage of UniDep's functionality, clone the repository and run `unidep install-all -e` in the project directory.
This installs all dependencies in editable mode in the current Conda environment.

### **Q: How to handle local dependencies that do not use UniDep?**

**A:** You can use the `local_dependencies` field in the `requirements.yaml` or `pyproject.toml` file to specify local dependencies.
However, *if* a local dependency is *not* managed by UniDep, it will skip installing its dependencies!

To include all its dependencies, either convert the package to use UniDep (🏆), or maintain a separate `requirements.yaml` file, e.g., for a package called `foo` create, `foo-requirements.yaml`:

```yaml
dependencies:
  # List the dependencies of foo here
  - numpy
  - scipy
  - matplotlib
  - bar
local_dependencies:
  - ./path/to/foo  # This is the path to the package
```

Then, in the `requirements.yaml` or `pyproject.toml` file of the package that uses `foo`, list `foo-requirements.yaml` as a local dependency:

```yaml
local_dependencies:
  - ./path/to/foo-requirements.yaml
```

### **Q: Can't Conda already do this?**

**A:** Not quite. Conda can indeed install both Conda and Pip dependencies via an `environment.yaml` file, however, it does not work the other way around.
Pip cannot install the `pip` dependencies from an `environment.yaml` file.
This means, that if you want your package to be installable with `pip install -e .` *and* support Conda, you need to maintain two separate files: `environment.yaml` and `requirements.txt` (or specify these dependencies in `pyproject.toml` or `setup.py`).

### **Q: What is the difference between `conda-lock` and `unidep conda-lock`?**

**A:** [`conda-lock`](https://github.com/conda/conda-lock) is a standalone tool that creates a `conda-lock.yml` file from a `environment.yaml` file.
On the other hand, `unidep conda-lock` is a command within the UniDep tool that also generates a `conda-lock.yml` file (leveraging `conda-lock`), but it does so from one or more `requirements.yaml` or `pyproject.toml` files.
When managing multiple dependent projects (e.g., in a monorepo), a unique feature of `unidep conda-lock` is its ability to create **_consistent_** individual `conda-lock.yml` files for each `requirements.yaml` or `pyproject.toml` file, ensuring consistency with a global `conda-lock.yml` file.
This feature is not available in the standalone `conda-lock` tool.

### **Q: What is the difference between `hatch-conda` / `pdm-conda` and `unidep`?**

**A:** [`hatch-conda`](https://github.com/OldGrumpyViking/hatch-conda) is a plugin for [`hatch`](https://hatch.pypa.io/latest/) that integrates Conda environments into `hatch`.
A key difference is that `hatch-conda` keeps Conda and Pip dependencies separate, choosing to install packages with either Conda *or* Pip.
This results in Conda being a hard requirement, for example, if `numba` is specified for Conda, it cannot be installed with Pip despite its availability on PyPI.

In contrast, [UniDep](https://github.com/basnijholt/unidep/) does not require Conda.
Without Conda, it can still install any dependency that is available on PyPI (e.g., `numba` is both Conda and Pip installable).
However, without Conda, UniDep will not install dependencies exclusive to Conda.
These Conda-specific dependencies can often be installed through alternative package managers like `apt`, `brew`, `yum`, or by building them from source.

Another key difference is that `hatch-conda` is managing [Hatch environments](https://hatch.pypa.io/latest/environment/) whereas `unidep` can install Pip dependencies in the current Python environment (venv, Conda, Hatch, etc.), however, to optimally use UniDep, we recommend using Conda environments to additionally install non-Python dependencies.

Similar to `hatch-conda`, `unidep` also integrates with Hatchling, but it works in a slightly different way.

**A:** [`pdm-conda`](https://github.com/macro128/pdm-conda) is a plugin for [`pdm`](https://pdm-project.org/) designed to facilitate the use of Conda environments in conjunction with `pdm`.
Like `hatch-conda`, `pdm-conda` opts to install packages either with Conda or Pip.
It is closely integrated with `pdm`, primarily enabling the inclusion of Conda packages in `pdm`'s lock file (`pdm.lock`).
However, `pdm-conda` lacks extensive cross-platform support.
For instance, when adding a package like Numba using `pdm-conda`, it gets locked to the current platform (e.g., osx-arm64) without the flexibility to specify compatibility for other platforms such as linux64.
In contrast, UniDep allows for cross-platform compatibility, enabling the user to specify dependencies for multiple platforms.
UniDep currently does not support `pdm`, but it does support Hatchling and Setuptools.

UniDep stands out from both `pdm-conda` and `hatch-conda` with its additional functionalities, particularly beneficial for monorepos and projects spanning multiple operating systems. For instance:

1. **Conda Lock Files**: Create `conda-lock.yml` files for all packages with consistent sub-lock files per package.
2. **CLI tools**: Provides tools like `unidep install-all -e` which will install multiple local projects (e.g., in monorepo) and all its dependencies first with Conda, then remaining ones with Pip, and finally the local dependencies in editable mode with Pip.
3. **Conda Environment Files**: Can create standard Conda `environment.yaml` files by combining the dependencies from many `requirements.yaml` or `pyproject.toml` files.
4. **Platform-Specific Dependencies**: Allows specifying dependencies for certain platforms (e.g., linux64, osx-arm64), enhancing cross-platform compatibility.

## :hammer_and_wrench: Troubleshooting

### `pip install` fails with `FileNotFoundError`

When using a project that uses `local_dependencies: [../not/current/dir]` in the `requirements.yaml` file:

```yaml
local_dependencies:
  # File in a different directory than the pyproject.toml file
  - ../common-requirements.yaml
```

You might get an error like this when using a `pip` version older than `22.0`:

```bash
$ pip install /path/to/your/project/using/unidep
  ...
  File "/usr/lib/python3.8/pathlib.py", line 1222, in open
    return io.open(self, mode, buffering, encoding, errors, newline,
  File "/usr/lib/python3.8/pathlib.py", line 1078, in _opener
    return self._accessor.open(self, flags, mode)
FileNotFoundError: [Errno 2] No such file or directory: '/tmp/common-requirements.yaml'
```

The solution is to upgrade `pip` to version `22.0` or newer:

```bash
pip install --upgrade pip
```

## :warning: Limitations

- **Conda-Focused**: Best suited for Conda environments. However, note that having `conda` is not a requirement to install packages that use UniDep.
- **Setuptools and Hatchling only**: Currently only works with setuptools and Hatchling, not flit, poetry, or other build systems. Open an issue if you'd like to see support for other build systems.
- No [logic operators in platform selectors](https://github.com/basnijholt/unidep/issues/5) and [no Python selectors](https://github.com/basnijholt/unidep/issues/7).

* * *

Try `unidep` today for a streamlined approach to managing your Conda environment dependencies across multiple projects! 🎉👏


================================================
FILE: bootstrap.sh
================================================
#!/usr/bin/env bash
# Run this script with:
#   "${SHELL}" <(curl -LsSf raw.githubusercontent.com/basnijholt/unidep/main/bootstrap.sh)
#
# 🚀 UniDep - Unified Conda and Pip Dependency Management 🚀
#
# This script downloads and installs:
#  - micromamba to ~/.local/bin/micromamba (for fast Conda environment management)
#  - uv to ~/.local/bin/uv (for fast pip installations)
#  - unidep (to manage unified Conda and Pip dependencies)
#
# UniDep streamlines Python project dependency management by combining both Conda
# and Pip dependencies into a single system. For more information, visit:
# https://github.com/basnijholt/unidep
#
# If you prefer to run the commands manually, you can execute each section one by one.
# Otherwise, piping this script directly to your default shell ensures everything is installed in one go.

echo "Downloading and installing micromamba to ~/.local/bin/micromamba and uv to ~/.local/bin/uv"

# Install micromamba (https://mamba.readthedocs.io/en/latest/installation/micromamba-installation.html)
"${SHELL}" <(curl -LsSf micro.mamba.pm/install.sh) < /dev/null

# Install uv (https://docs.astral.sh/uv/getting-started/installation/)
curl -LsSf https://astral.sh/uv/install.sh | sh

# Install unidep using uv
~/.local/bin/uv tool install --quiet -U "unidep[all]"

echo "Done installing micromamba, uv, and unidep"


================================================
FILE: docs/Makefile
================================================
# Minimal makefile for Sphinx documentation
#

# You can set these variables from the command line, and also
# from the environment for the first two.
SPHINXOPTS    ?=
SPHINXBUILD   ?= sphinx-build
SOURCEDIR     = source
BUILDDIR      = build

# Put it first so that "make" without argument is like "make help".
help:
	@$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)

.PHONY: help Makefile

# Catch-all target: route all unknown targets to Sphinx using the new
# "make mode" option.  $(O) is meant as a shortcut for $(SPHINXOPTS).
%: Makefile
	@$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)

clean:
	@$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
	rm -rf $(BUILDDIR)/*
	rm -f $(SOURCEDIR)/*.md


================================================
FILE: docs/source/.gitignore
================================================
*.md


================================================
FILE: docs/source/conf.py
================================================
"""Spinx configuration file for the unidep documentation.

The documentation is generated from the README.md file in the root of the repository.
The README.md file is copied to the Sphinx source directory and processed to generate
the documentation. The following transformations are applied to the README.md file:

1. Replace named emojis with unicode emojis.
2. Replace markdown alerts with admonitions.
3. Replace relative links to `example/` files with absolute links to GitHub.
4. Fix anchors with named emojis.
5. Split the README.md file into individual sections based on second-level headers.
6. Extract the table of contents links from the processed README.
7. Replace links in each section to point to the correct section.
8. Decrease the header levels by one in each section.
9. Rename the first section to `introduction.md` and update its header.
10. Write an index file for the documentation.

This code is tightly coupled with the structure of the README.md file and the
table of contents generated by the doctoc tool.
"""

from __future__ import annotations

import os
import re
import shutil
import sys
import textwrap
from pathlib import Path

package_path = Path("../..").resolve()
sys.path.insert(0, str(package_path))
PYTHON_PATH = os.environ.get("PYTHONPATH", "")
os.environ["PYTHONPATH"] = f"{package_path}:{PYTHON_PATH}"

docs_path = Path("..").resolve()
sys.path.insert(1, str(docs_path))

import unidep  # noqa: E402

project = "unidep"
copyright = "2023, Bas Nijholt"  # noqa: A001
author = "Bas Nijholt"

version = unidep.__version__
release = unidep.__version__

extensions = [
    "sphinx.ext.autodoc",
    "sphinx.ext.autosummary",
    "sphinx.ext.autosectionlabel",
    "sphinx.ext.intersphinx",
    "sphinx.ext.viewcode",
    "sphinx.ext.napoleon",
    "myst_parser",
    "sphinx_autodoc_typehints",
]


autosectionlabel_maxdepth = 5
myst_heading_anchors = 0
templates_path = ["_templates"]
source_suffix = [".rst", ".md"]
master_doc = "index"
language = "en"
pygments_style = "sphinx"
html_theme = "furo"
html_static_path = ["_static"]
htmlhelp_basename = "unidepdoc"
default_role = "autolink"
intersphinx_mapping = {
    "python": ("https://docs.python.org/3", None),
}
html_logo = "https://github.com/basnijholt/nijho.lt/raw/2cf0045f9609a176cb53422c591fde946459669d/content/project/unidep/unidep-logo.webp"


def replace_named_emojis(input_file: Path, output_file: Path) -> None:
    """Replace named emojis in a file with unicode emojis."""
    import emoji

    with input_file.open("r") as infile:
        content = infile.read()
    content_with_emojis = emoji.emojize(content, language="alias")

    with output_file.open("w") as outfile:
        outfile.write(content_with_emojis)


def _change_alerts_to_admonitions(input_text: str) -> str:
    # Splitting the text into lines
    lines = input_text.split("\n")

    # Placeholder for the edited text
    edited_text = []

    # Mapping of markdown markers to their new format
    mapping = {
        "IMPORTANT": "important",
        "NOTE": "note",
        "TIP": "tip",
        "WARNING": "caution",
    }

    # Variable to keep track of the current block type
    current_block_type = None

    for line in lines:
        # Check if the line starts with any of the markers
        if any(line.strip().startswith(f"> [!{marker}]") for marker in mapping):
            # Find the marker and set the current block type
            current_block_type = next(
                marker for marker in mapping if f"> [!{marker}]" in line
            )
            # Start of a new block
            edited_text.append("```{" + mapping[current_block_type] + "}")
        elif current_block_type and line.strip() == ">":
            # Empty line within the block, skip it
            continue
        elif current_block_type and not line.strip().startswith(">"):
            # End of the current block
            edited_text.append("```")
            edited_text.append(line)  # Add the current line as it is
            current_block_type = None  # Reset the block type
        elif current_block_type:
            # Inside the block, so remove '>' and add the line
            edited_text.append(line.lstrip("> ").rstrip())
        else:
            # Outside any block, add the line as it is
            edited_text.append(line)

    # Join the edited lines back into a single string
    return "\n".join(edited_text)


def change_alerts_to_admonitions(input_file: Path, output_file: Path) -> None:
    """Change markdown alerts to admonitions.

    For example, changes
    > [!NOTE]
    > This is a note.
    to
    ```{note}
    This is a note.
    ```
    """
    with input_file.open("r") as infile:
        content = infile.read()
    new_content = _change_alerts_to_admonitions(content)

    with output_file.open("w") as outfile:
        outfile.write(new_content)


def replace_example_links(input_file: Path, output_file: Path) -> None:
    """Replace relative links to `example/` files with absolute links to GitHub."""
    with input_file.open("r") as infile:
        content = infile.read()
    new_content = content.replace(
        "(example/",
        "(https://github.com/basnijholt/unidep/tree/main/example/",
    )
    with output_file.open("w") as outfile:
        outfile.write(new_content)


def fix_anchors_with_named_emojis(input_file: Path, output_file: Path) -> None:
    """Fix anchors with named emojis.

    WARNING: this currently hardcodes the emojis to remove.
    """
    to_remove = [
        "package",
        "memo",
        "jigsaw",
        "desktop_computer",
        "hammer_and_wrench",
        "warning",
    ]
    with input_file.open("r") as infile:
        content = infile.read()
    new_content = content
    for emoji_name in to_remove:
        new_content = new_content.replace(f"#{emoji_name}-", "#")
    with output_file.open("w") as outfile:
        outfile.write(new_content)


def normalize_slug(slug: str) -> str:
    """Normalize a slug."""
    return "#" + slug[1:].lstrip("-").rstrip("-")


def split_markdown_by_headers(
    readme_path: Path,
    out_folder: Path,
    links: dict[str, str],
    level: int = 2,
    to_skip: tuple[str, ...] = ("Table of Contents",),
) -> list[str]:
    """Split a markdown file into individual files based on headers."""
    with readme_path.open(encoding="utf-8") as file:
        content = file.read()

    # Regex to find second-level headers
    n = "#" * level
    headers = re.finditer(rf"\n({n} .+?)(?=\n{n} |\Z)", content, re.DOTALL)

    # Split content based on headers
    split_contents: list[str] = []
    header_contents: list[str] = []
    start = 0
    previous_header = ""
    for header in headers:
        header_title = header.group(1).strip("# ").strip()
        header_contents.append(header_title.split("\n", 1)[0])
        end = header.start()
        if not any(s in previous_header for s in to_skip):
            split_contents.append(content[start:end].strip())
        start = end
        previous_header = header_title

    # Add the last section
    split_contents.append(content[start:].strip())

    # Create individual files for each section
    toctree_entries = []
    for i, (section, header_content) in enumerate(
        zip(split_contents, header_contents),
    ):
        name = (
            normalize_slug(links[header_content]).lstrip("#")
            if header_content in links
            else f"section_{i}"
        )
        fname = out_folder / f"{name}.md"
        toctree_entries.append(name)
        with fname.open("w", encoding="utf-8") as file:
            file.write(section)

    return toctree_entries


def replace_header(file_path: Path, new_header: str) -> None:
    """Replace the first-level header in a markdown file."""
    with file_path.open("r", encoding="utf-8") as file:
        content = file.read()

    # Find the first-level header (indicated by '# ')
    # We use a regular expression to match the first occurrence of '# '
    # and any following characters until a newline
    content = re.sub(
        r"^# .+?\n",
        f"# {new_header}\n",
        content,
        count=1,
        flags=re.MULTILINE,
    )

    with file_path.open("w", encoding="utf-8") as file:
        file.write(content)


def extract_toc_links(md_file_path: Path) -> dict[str, str]:
    """Extracts the table of contents with title to link mapping from the given README content.

    Parameters
    ----------
    md_file_path
        Markdown file path.

    Returns
    -------
    A dictionary where keys are section titles and values are the corresponding links.

    """
    with md_file_path.open("r") as infile:
        readme_content = infile.read()
    toc_start = "<!-- START doctoc generated TOC please keep comment here to allow auto update -->"
    toc_end = "<!-- END doctoc generated TOC please keep comment here to allow auto update -->"

    # Extract the TOC section
    toc_section = re.search(f"{toc_start}(.*?){toc_end}", readme_content, re.DOTALL)
    if not toc_section:
        msg = "Table of Contents section not found."
        raise RuntimeError(msg)

    toc_content = toc_section.group(1)

    # Regular expression to match the markdown link syntax
    link_regex = re.compile(r"- \[([^]]+)\]\(([^)]+)\)")

    # Extracting links
    return {
        match.group(1).strip(): match.group(2)
        for match in link_regex.finditer(toc_content)
    }


def extract_headers_from_markdown(md_file_path: Path) -> list[tuple[int, str]]:
    """Extracts all headers from a markdown file.

    Parameters
    ----------
    md_file_path
        Path to the markdown file.

    Returns
    -------
    A list of tuples containing the level of the header and the header text.

    """
    with md_file_path.open("r") as infile:
        content = infile.read()

    # Regex to match markdown headers (e.g., ## Header)
    header_regex = re.compile(r"^(#+)\s+(.+)$", re.MULTILINE)

    # Extract headers
    return [
        (len(match.group(1)), match.group(2).strip())
        for match in header_regex.finditer(content)
    ]


def replace_links_in_markdown(
    md_file_path: Path,
    headers_mapping: dict[str, list[tuple[int, str]]],
    links: dict[str, str],
) -> None:
    """Replaces markdown links with updated links that point to the correct file and header anchor.

    Parameters
    ----------
    md_file_path
        Path to the markdown file to process.
    headers_mapping
        A dictionary where keys are markdown file names and values are lists of headers.
    links
        A dictionary of original header texts mapped to their slug (anchor) in the original README.

    """
    with md_file_path.open("r") as infile:
        content = infile.read()

    # Replace links based on headers_mapping and links dictionary
    for file_name, headers in headers_mapping.items():
        for _header_level, header_text in headers:
            # Find the original slug for this header text from the links dictionary
            original_slug = links.get(header_text, "")
            if original_slug:
                # Remove the '#' from the slug and update the link in the content
                new_slug = normalize_slug(original_slug)
                original_slug = original_slug.lstrip("#")
                content = content.replace(
                    f"(#{original_slug})",
                    f"({file_name}{new_slug})",
                )

    # Write updated content back to file
    with md_file_path.open("w") as outfile:
        outfile.write(content)


def decrease_header_levels(md_file_path: Path) -> None:
    """Decreases the header levels by one in a Markdown file, without going below level 1.

    Parameters
    ----------
    md_file_path
        Path to the Markdown file.

    """
    with md_file_path.open("r", encoding="utf-8") as file:
        content = file.read()

    # Function to decrease the header level
    def lower_header_level(match: re.Match) -> str:
        header_level = len(match.group(1))
        new_header_level = "#" * max(1, header_level - 1)  # Ensure at least one '#'
        return f"{new_header_level} {match.group(2)}"

    # Regular expression for Markdown headers
    header_regex = re.compile(r"^(#+)\s+(.+)$", re.MULTILINE)

    # Replace headers with decreased levels
    new_content = header_regex.sub(lower_header_level, content)

    # Write the updated content back to the file
    with md_file_path.open("w", encoding="utf-8") as file:
        file.write(new_content)


def write_index_file(docs_path: Path, toctree_entries: list[str]) -> None:
    """Write an index file for the documentation."""
    index_path = docs_path / "source" / "index.md"
    # Skip section_0.md as it is renamed to introduction.md
    pages = "\n".join(f"{entry}" for entry in toctree_entries[1:])
    # Constructing the content using textwrap.dedent for better readability
    content = textwrap.dedent(
        """
        ```{{include}} introduction.md
        ```

        ```{{toctree}}
        :hidden: true
        :maxdepth: 2
        :glob:

        introduction
        {pages}
        reference/index
        ```
    """,
    ).format(pages=pages)

    # Write the content to the file
    with index_path.open("w", encoding="utf-8") as index_file:
        index_file.write(content)


def process_readme_for_sphinx_docs(readme_path: Path, docs_path: Path) -> None:
    """Process the README.md file for Sphinx documentation generation.

    Parameters
    ----------
    readme_path
        Path to the original README.md file.
    docs_path
        Path to the Sphinx documentation source directory.

    """
    # Step 1: Copy README.md to the Sphinx source directory and apply transformations
    output_file = docs_path / "source" / "README.md"
    replace_named_emojis(readme_path, output_file)
    change_alerts_to_admonitions(output_file, output_file)
    replace_example_links(output_file, output_file)
    fix_anchors_with_named_emojis(output_file, output_file)

    # Step 2: Extract the table of contents links from the processed README
    links = extract_toc_links(output_file)

    # Step 3: Split the README into individual sections for Sphinx
    src_folder = docs_path / "source"
    for md_file in src_folder.glob("sections_*.md"):
        md_file.unlink()
    toctree_entries = split_markdown_by_headers(output_file, src_folder, links)
    output_file.unlink()  # Remove the original README file from Sphinx source
    write_index_file(docs_path, toctree_entries)

    # Step 4: Extract headers from each section for link replacement
    headers_in_files = {}
    for md_file in src_folder.glob("*.md"):
        headers = extract_headers_from_markdown(md_file)
        decrease_header_levels(md_file)
        headers_in_files[md_file.name] = headers

    # Rename the first section to 'introduction.md' and update its header
    shutil.move(src_folder / "section_0.md", src_folder / "introduction.md")  # type: ignore[arg-type]
    replace_header(src_folder / "introduction.md", new_header="🌟 Introduction")

    # Step 5: Replace links in each markdown file to point to the correct section
    for md_file in (*src_folder.glob("*.md"), src_folder / "introduction.md"):
        replace_links_in_markdown(md_file, headers_in_files, links)


readme_path = package_path / "README.md"
process_readme_for_sphinx_docs(readme_path, docs_path)


================================================
FILE: example/README.md
================================================
# Examples

> [!TIP]
> Try out `unidep` in this folder by running:
> - `unidep install ./setup_py_project ./hatch_project` to install the `setup_py_project` and `hatch_project` packages and its dependencies with `conda`, then the remaining dependencies with `pip`, and finally the local packages with `pip`
> - `unidep install-all -e` to install all packages (`setup_py_project`, `hatch_project`, `setuptools_project`, etc.) in editable mode
> - `unidep conda-lock` to generate a global `conda-lock.yml` file and consistent per package `conda-lock.yml` files
> - `unidep merge` to merge all `requirements.yaml` files into a single `environment.yaml` file
> - `unidep pip-compile` to generate a locked `requirements.txt` file

Explore these example projects to understand how `unidep` integrates with different build tools and configurations:

| Project                                            | Build Tool   | `pyproject.toml` | `requirements.yaml` | `setup.py` | Description                                                                        |
| -------------------------------------------------- | ------------ | ---------------- | ------------------- | ---------- | ---------------------------------------------------------------------------------- |
| [`setup_py_project`](setup_py_project)             | `setuptools` | ✅                | ✅                   | ✅          | Traditional `setuptools` project with `requirements.yaml`.                         |
| [`setuptools_project`](setuptools_project)         | `setuptools` | ✅                | ✅                   | ❌          | Modern `setuptools` usage with both `pyproject.toml` and `requirements.yaml`.      |
| [`pyproject_toml_project`](pyproject_toml_project) | `setuptools` | ✅                | ❌                   | ❌          | Pure `pyproject.toml` setup, showcasing comprehensive dependency management.       |
| [`hatch_project`](hatch_project)                   | `hatch`      | ✅                | ✅                   | ❌          | Demonstrates `unidep` integration in a Hatchling project with `requirements.yaml`. |
| [`hatch2_project`](hatch2_project)                 | `hatch`      | ✅                | ❌                   | ❌          | Pure `pyproject.toml` Hatchling project.                                           |


## Exploring `unidep` Through Practical Examples

<!-- START doctoc generated TOC please keep comment here to allow auto update -->
<!-- DON'T EDIT THIS SECTION, INSTEAD RE-RUN doctoc TO UPDATE -->

- [Combine one or multiple `requirements.yaml`/`pyproject.toml` files into a single `environment.yaml` file](#combine-one-or-multiple-requirementsyamlpyprojecttoml-files-into-a-single-environmentyaml-file)
- [Using `pip install`](#using-pip-install)
- [Using `unidep install`](#using-unidep-install)
- [Using `unidep install-all` for installation across multiple projects](#using-unidep-install-all-for-installation-across-multiple-projects)

<!-- END doctoc generated TOC please keep comment here to allow auto update -->

### Combine one or multiple `requirements.yaml`/`pyproject.toml` files into a single `environment.yaml` file

Combine `requirements.yaml` files in subdirectories and into an `environment.yaml` file that can be installed with `conda`.

Here we can just run `unidep merge` with no arguments, since the defaults are the same as what we want.

This would be the same as running `unidep merge --name myenv --verbose`:

<!-- CODE:BASH:START -->
<!-- echo '```bash' -->
<!-- unidep merge --name myenv --verbose -->
<!-- echo '```' -->
<!-- CODE:END -->
<!-- OUTPUT:START -->
<!-- ⚠️ This content is auto-generated by `markdown-code-runner`. -->
```bash
🔍 Scanning in `.` at depth 0
🔍 Scanning in `hatch2_project` at depth 1
🔍 Found `"pyproject.toml"` with dependencies at `hatch2_project/pyproject.toml`
🔍 Scanning in `hatch_project` at depth 1
🔍 Found `"requirements.yaml"` at `hatch_project/requirements.yaml`
🔍 Scanning in `pyproject_toml_project` at depth 1
🔍 Found `"pyproject.toml"` with dependencies at `pyproject_toml_project/pyproject.toml`
🔍 Scanning in `setup_py_project` at depth 1
🔍 Found `"requirements.yaml"` at `setup_py_project/requirements.yaml`
🔍 Scanning in `setuptools_project` at depth 1
🔍 Found `"requirements.yaml"` at `setuptools_project/requirements.yaml`
📄 Parsing `hatch2_project/pyproject.toml`
📄 Parsing `hatch_project/requirements.yaml`
📄 Parsing `pyproject_toml_project/pyproject.toml`
📄 Parsing `../hatch_project[test]` from `local_dependencies`
📄 Parsing `pyproject_toml_project/../hatch_project/requirements.yaml[test]`
📄 Moving `test` optional dependencies to main dependencies for `pyproject_toml_project/../hatch_project/requirements.yaml[test]`
📄 Parsing `setup_py_project/requirements.yaml`
📄 Parsing `../setuptools_project` from `local_dependencies`
📄 Parsing `setup_py_project/../setuptools_project/requirements.yaml`
📄 Parsing `setuptools_project/requirements.yaml`
📝 Generating environment file at `environment.yaml`
📝 Environment file generated successfully.
✅ Generated environment file at `environment.yaml` from `hatch2_project/pyproject.toml`, `hatch_project/requirements.yaml`, `pyproject_toml_project/pyproject.toml`, `setup_py_project/requirements.yaml`, `setuptools_project/requirements.yaml`
```

<!-- OUTPUT:END -->

See the resulting [`environment.yaml`](environment.yaml) file which is installable with [`mamba`](https://mamba.readthedocs.io/en/latest/).
This file is using `sel(linux|osx|win)` to specify platform specific dependencies.
Alternatively, use `unidep merge --selector comment` to generate a file that uses comments to specify platform specific dependencies, which can be read by [`conda-lock`](https://github.com/conda/conda-lock).

### Using `pip install`

This method allows you to install packages defined in a `requirements.yaml` file using `pip`. It focuses on installing only those dependencies that are pip-installable, followed by the local project package.

**How to Use**:

- Run `pip install ./setup_py_project`.
- This command will process the `requirements.yaml` in the specified directory (`./setup_py_project/`), installing all pip-installable dependencies, including the local project itself.

### Using `unidep install`

Using `unidep` for installation offers a more comprehensive approach. It handles both Conda and Pip dependencies specified in the `requirements.yaml` file, ensuring all necessary packages are installed, including those not available through pip.

**How to Use**:

- To perform a standard installation, run `unidep install ./setup_py_project`.
- For an editable installation (useful during development), use `unidep install -e ./setup_py_project`.
- The `unidep install` command first installs any Conda-specific dependencies from the `requirements.yaml` file, then proceeds to install pip-specific dependencies. Finally, it installs the local project package.

<!-- CODE:BASH:START -->
<!-- echo '```bash' -->
<!-- echo '$ unidep install --dry-run -e ./setup_py_project' -->
<!-- unidep install --dry-run -e ./setup_py_project -->
<!-- echo '```' -->
<!-- CODE:END -->
<!-- OUTPUT:START -->
<!-- ⚠️ This content is auto-generated by `markdown-code-runner`. -->
```bash
$ unidep install --dry-run -e ./setup_py_project
📦 Installing conda dependencies with `conda install --yes --override-channels --channel conda-forge adaptive">=0.15.0, <2.0.0" adaptive-scheduler hpc05 pexpect pfapack numpy">=1.21" packaging pandas">=1,<3" pytest pytest-cov`

📦 Installing pip dependencies with `/opt/hostedtoolcache/Python/3.14.2/x64/bin/python -m pip install yaml2bib aiokef markdown-code-runner numthreads pyyaml rsync-time-machine slurm-usage unidep`

📝 Found local dependencies: {'setup_py_project': ['hatch_project', 'setuptools_project']}

📦 Installing project with `/opt/hostedtoolcache/Python/3.14.2/x64/bin/python -m pip install --no-deps -e /home/runner/work/unidep/unidep/example/hatch_project -e /home/runner/work/unidep/unidep/example/setuptools_project -e ./setup_py_project`

```

<!-- OUTPUT:END -->

### Using `unidep install-all` for installation across multiple projects

The `unidep install-all` command provides a convenient way to install all dependencies across multiple projects or packages within a given directory.
This command is especially useful in monorepos or when managing several related projects with their own `requirements.yaml` files.

**How `unidep install-all` Works**:

- This command scans a specified directory (or the current directory if none is specified) for `requirements.yaml` files.
- It then installs dependencies for each found project, handling both Conda and Pip dependencies.
- The local packages are also installed, making this command a one-stop solution for setting up your entire workspace.

**Usage Examples**:

- Run `unidep install-all` to install all dependencies in the current directory.
- Use `unidep install-all -e` for an editable install, which is useful during development. This flag ensures that local packages are installed in a way that allows changes to be reflected immediately without needing reinstallation.

**Example Command**:

```bash
# To install all projects in the current directory in editable mode
unidep install-all -e
```

**Output Example**:

<!-- CODE:BASH:START -->
<!-- echo '```bash' -->
<!-- echo '$ unidep install-all -e --dry-run' -->
<!-- unidep install-all -e --dry-run -->
<!-- echo '```' -->
<!-- CODE:END -->
<!-- OUTPUT:START -->
<!-- ⚠️ This content is auto-generated by `markdown-code-runner`. -->
```bash
$ unidep install-all -e --dry-run
📦 Installing conda dependencies with `conda install --yes --override-channels --channel conda-forge adaptive">=0.15.0, <2.0.0" adaptive-scheduler hpc05 pexpect pfapack numpy">=1.21" packaging pandas">=1,<3" pytest pytest-cov`

📦 Installing pip dependencies with `/opt/hostedtoolcache/Python/3.14.2/x64/bin/python -m pip install yaml2bib aiokef markdown-code-runner numthreads pyyaml rsync-time-machine slurm-usage unidep`

📝 Found local dependencies: {'pyproject_toml_project': ['hatch_project'], 'setup_py_project': ['hatch_project', 'setuptools_project'], 'setuptools_project': ['hatch_project']}

📦 Installing project with `/opt/hostedtoolcache/Python/3.14.2/x64/bin/python -m pip install --no-deps -e ./hatch2_project -e ./hatch_project -e ./pyproject_toml_project -e ./setup_py_project -e ./setuptools_project`

```

<!-- OUTPUT:END -->

This command streamlines the process of getting a development environment up and running, particularly in complex setups with multiple interdependent projects.


================================================
FILE: example/environment.yaml
================================================
# This file is created and managed by `unidep` 3.2.0.
# For details see https://github.com/basnijholt/unidep
# File generated with: `unidep merge --name myenv --verbose`

name: myenv
channels:
  - conda-forge
dependencies:
  - sel(linux): adaptive >=0.15.0, <2.0.0
  - sel(linux): adaptive-scheduler
  - sel(linux): hpc05
  - sel(linux): pexpect
  - sel(osx): pexpect
  - sel(linux): pfapack
  - numpy >=1.21
  - packaging
  - pandas >=1,<3
  - pytest
  - pytest-cov
  - pip:
    - yaml2bib; sys_platform == 'linux' and platform_machine == 'x86_64'
    - aiokef
    - markdown-code-runner
    - numthreads
    - pyyaml
    - rsync-time-machine
    - slurm-usage
    - unidep
    - fileup; sys_platform == 'darwin'
platforms:
  - linux-64
  - osx-64
  - osx-arm64


================================================
FILE: example/hatch2_project/README.md
================================================
# Hatchling Integration

> [!TIP]
> - **Standard Installation**: In this example folder, use `pip install .` to install all Python dependencies that are pip-installable, along with the local package itself.
> - **Comprehensive Installation with `unidep`**: To install all dependencies, including those that are not Python-specific, use `unidep install .`. This command performs the following actions in sequence:
>   1. `conda install [dependencies from pyproject.toml]` – Installs all Conda installable dependencies.
>   2. `pip install [dependencies from pyproject.toml]` – Installs remaining pip-only dependencies.
>   3. `pip install .` – Installs the local package.

For projects managed with [Hatch](https://hatch.pypa.io/), `unidep` can be configured fully in `pyproject.toml` including all its dependencies.

**Example Configuration for Hatch**:

```toml
[build-system]
requires = ["hatchling", "unidep[toml]"]  # add "unidep[toml]" here
build-backend = "hatchling.build"

[project]
dynamic = ["dependencies"]  # add "dependencies" here
# Additional project configurations

[tool.hatch]
# Additional Hatch configurations

[tool.hatch.metadata]
allow-direct-references = true  # allow VCS URLs, local paths, etc.

[tool.hatch.metadata.hooks.unidep]  # add this to enable the hook

# Specify pip and conda dependencies here
[tool.unidep]
channels = ["conda-forge"]
dependencies = [
    { conda = "adaptive-scheduler:linux64" },
    { pip = "unidep" },
    "numpy >=1.21",
    "hpc05:linux64",
    "pandas >=1,<3",
    "pexpect:unix",
    "wexpect:win64",
]
```

> [!NOTE]
> See the [`pyproject.toml`](pyproject.toml) for a working example.


================================================
FILE: example/hatch2_project/hatch2_project.py
================================================
x = 1


================================================
FILE: example/hatch2_project/pyproject.toml
================================================
[build-system]
requires = ["hatchling", "unidep[toml]"]
build-backend = "hatchling.build"

[project]
name = "hatch2_project"
description = "Example hatch2_project for `unidep`."
authors = [{ name = "Bas Nijholt", email = "bas@nijho.lt" }]
# `dependencies` is not needed because it is automatically
# populated by `unidep` with the dependencies defined in the [tool.unidep] section!
# dependencies = []
dynamic = ["dependencies"]
version = "0.1.0"

[tool.hatch]

# Allow direct references (e.g., VCS URLs, local paths) in dependencies
[tool.hatch.metadata]
allow-direct-references = true

[tool.hatch.metadata.hooks.unidep]

[tool.unidep]
channels = ["conda-forge"]
dependencies = [
    { conda = "adaptive-scheduler:linux64" },
    { pip = "unidep" },
    "numpy >=1.21",
    "hpc05:linux64",
    "pandas >=1,<3",
    "pexpect:unix",
    "wexpect:win64",
]


================================================
FILE: example/hatch_project/README.md
================================================
# Hatchling Integration

> [!TIP]
> - **Standard Installation**: In this example folder, use `pip install .` to install all Python dependencies that are pip-installable, along with the local package itself.
> - **Comprehensive Installation with `unidep`**: To install all dependencies, including those that are not Python-specific, use `unidep install .`. This command performs the following actions in sequence:
>   1. `conda install [dependencies from requirements.yaml]` – Installs all Conda installable dependencies.
>   2. `pip install [dependencies from requirements.yaml]` – Installs remaining pip-only dependencies.
>   3. `pip install .` – Installs the local package.

For projects managed with [Hatch](https://hatch.pypa.io/), `unidep` can be configured in `pyproject.toml` to automatically process `requirements.yaml`.

**Example Configuration for Hatch**:

```toml
[build-system]
requires = ["hatchling", "unidep"]  # add "unidep" here
build-backend = "hatchling.build"

[project]
dynamic = ["dependencies"]  # add "dependencies" here
# Additional project configurations

[tool.hatch]
# Additional Hatch configurations

[tool.hatch.metadata]
allow-direct-references = true  # allow VCS URLs, local paths, etc.

[tool.hatch.metadata.hooks.unidep]  # add this to enable the hook
```

> [!NOTE]
> See the [`pyproject.toml`](pyproject.toml) a working example.


================================================
FILE: example/hatch_project/hatch_project.py
================================================
x = 1


================================================
FILE: example/hatch_project/pyproject.toml
================================================
[build-system]
requires = ["hatchling", "unidep"]
build-backend = "hatchling.build"

[project]
name = "hatch_project"
description = "Example hatch_project for `unidep`."
authors = [{ name = "Bas Nijholt", email = "bas@nijho.lt" }]
# `dependencies` is not needed because it is automatically
# populated by `unidep` with the dependencies from the `requirements.yaml`
# dependencies = []
dynamic = ["dependencies", "optional-dependencies"]
version = "0.1.0"

[tool.hatch]

# Allow direct references (e.g., VCS URLs, local paths) in dependencies
[tool.hatch.metadata]
allow-direct-references = true

[tool.hatch.metadata.hooks.unidep]


================================================
FILE: example/hatch_project/requirements.yaml
================================================
name: hatch_project
channels:
  - conda-forge
dependencies:
  - conda: adaptive-scheduler  # [linux64]
  - pip: unidep
  - numpy >=1.21
  - hpc05  # [linux64]
  - pandas >=1,<3
  - pexpect # [unix]
  - wexpect # [win]
optional_dependencies:
  test:
    - pytest
    - pytest-cov


================================================
FILE: example/pyproject_toml_project/README.md
================================================
# Full `pyproject.toml` integration example

> [!TIP]
> - **Standard Installation**: In this example folder, use `pip install .` to install all Python dependencies that are pip-installable, along with the local package itself.
> - **Comprehensive Installation with `unidep`**: To install all dependencies, including those that are not Python-specific, use `unidep install .`. This command performs the following actions in sequence:
>   1. `conda install [dependencies from pyproject.toml]` – Installs all Conda installable dependencies.
>   2. `pip install [dependencies from pyproject.toml]` – Installs remaining pip-only dependencies.
>   3. `pip install .` – Installs the local package.

For projects using `setuptools` with only a `pyproject.toml` file, configure `unidep` in `pyproject.toml` and specify all dependencies there too.

**Example Configuration for projects using `pyproject.toml`**:

Add this to `pyproject.toml`:

```toml
[build-system]
build-backend = "setuptools.build_meta"
requires = ["setuptools", "unidep[toml]"]  # add "unidep[toml]" here

[project]
dynamic = ["dependencies"]  # add "dependencies" here

[tool.unidep]
channels = ["conda-forge"]
dependencies = [
    "adaptive",
    "pfapack:linux64",
    "packaging",
    { pip = "markdown-code-runner" },
    { pip = "numthreads" },
]
```

Then, of course, add a `requirements.yaml` and you are good to go! 🎉

> [!NOTE]
> See the [`pyproject.toml`](pyproject.toml) for a working example.


================================================
FILE: example/pyproject_toml_project/pyproject.toml
================================================
[build-system]
requires = ["setuptools", "unidep[toml]"]
build-backend = "setuptools.build_meta"

[project]
name = "pyproject_toml_project"
description = "Example pyproject_toml_project for `unidep`."
authors = [{ name = "Bas Nijholt", email = "bas@nijho.lt" }]
# `dependencies` is not needed because it is automatically
# populated by `unidep` with the dependencies defined in the [tool.unidep] section!
# dependencies = []
version = "0.1.0"
dynamic = ["dependencies", "optional-dependencies"]

[tool.setuptools]
py-modules = ["pyproject_toml_project"]

[tool.unidep]
channels = ["conda-forge"]
dependencies = [
    "adaptive:linux64",
    "pfapack:linux64",
    "packaging",
    { pip = "markdown-code-runner" },
    { pip = "numthreads" },
]
local_dependencies = [
    "../hatch_project[test]",  # Local dependency with optional dependencies
]
[tool.unidep.optional_dependencies]
dev = ["mypy", "ruff"]
test = ["pytest"]


================================================
FILE: example/pyproject_toml_project/pyproject_toml_project.py
================================================


================================================
FILE: example/setup_py_project/README.md
================================================
# `setup.py` integration example

> [!TIP]
> - **Standard Installation**: In this example folder, use `pip install .` to install all Python dependencies that are pip-installable, along with the local package itself.
> - **Comprehensive Installation with `unidep`**: To install all dependencies, including those that are not Python-specific, use `unidep install .`. This command performs the following actions in sequence:
>   1. `conda install [dependencies from requirements.yaml]` – Installs all Conda installable dependencies.
>   2. `pip install [dependencies from requirements.yaml]` – Installs remaining pip-only dependencies.
>   3. `pip install .` – Installs the local package.

For projects using `setuptools` with a `setup.py` file, configure `unidep` in `pyproject.toml` alongside a `requirements.yaml` file.

**Example Configuration for projects using `setup.py`**:

Add this to `pyproject.toml`:

```toml
[build-system]
build-backend = "setuptools.build_meta"
requires = ["setuptools", "unidep"]
```

And just do not use `install_requires` in `setup.py`.

> [!NOTE]
> See the [`pyproject.toml`](pyproject.toml) and [`setup.py`](setup.py) for a working example.


================================================
FILE: example/setup_py_project/pyproject.toml
================================================
[build-system]
requires = ["setuptools", "unidep"]
build-backend = "setuptools.build_meta"


================================================
FILE: example/setup_py_project/requirements.yaml
================================================
name: setup_py_project
channels:
  - conda-forge
dependencies:
  - pandas
  - adaptive >=0.15.0, <2.0.0  # [linux64]
  - pip: yaml2bib # [linux64]
  - pip: rsync-time-machine
  - pip: slurm-usage
  - pip: fileup  # [macos]
  - pip: pyyaml
  - pip: aiokef
local_dependencies:
  - ../setuptools_project  # depends on setuptools_project
platforms:
  - linux-64
  - osx-64
  - osx-arm64


================================================
FILE: example/setup_py_project/setup.py
================================================
from setuptools import setup

setup(
    name="setup_py_project",
    version="0.1.0",
    description="A short description of your package",
    py_modules=["setup_py_project"],
    # This is not needed because `install_requires` is automatically
    # populated by `unidep` with the dependencies from the `requirements.yaml`
)


================================================
FILE: example/setup_py_project/setup_py_project.py
================================================


================================================
FILE: example/setuptools_project/README.md
================================================
# Setuptools `pyproject.toml` integration example

> [!TIP]
> - **Standard Installation**: In this example folder, use `pip install .` to install all Python dependencies that are pip-installable, along with the local package itself.
> - **Comprehensive Installation with `unidep`**: To install all dependencies, including those that are not Python-specific, use `unidep install .`. This command performs the following actions in sequence:
>   1. `conda install [dependencies from requirements.yaml]` – Installs all Conda installable dependencies.
>   2. `pip install [dependencies from requirements.yaml]` – Installs remaining pip-only dependencies.
>   3. `pip install .` – Installs the local package.

For projects using `setuptools` with only a `pyproject.toml` file, configure `unidep` in `pyproject.toml` alongside a `requirements.yaml` file.

**Example Configuration for projects using `pyproject.toml`**:

Add this to `pyproject.toml`:

```toml
[build-system]
build-backend = "setuptools.build_meta"
requires = ["setuptools", "unidep"]  # add "unidep" here

[project]
dynamic = ["dependencies"]  # add "dependencies" here
```

Then, of course, add a `requirements.yaml` and you are good to go! 🎉

> [!NOTE]
> See the [`pyproject.toml`](pyproject.toml) for a working example.


================================================
FILE: example/setuptools_project/pyproject.toml
================================================
[build-system]
requires = ["setuptools", "unidep"]
build-backend = "setuptools.build_meta"

[project]
name = "setuptools_project"
description = "Example setuptools_project for `unidep`."
authors = [{ name = "Bas Nijholt", email = "bas@nijho.lt" }]
# `dependencies` is not needed because it is automatically
# populated by `unidep` with the dependencies from the `requirements.yaml`
# dependencies = []
version = "0.1.0"
dynamic = ["dependencies", "optional-dependencies"]

[tool.setuptools]
py-modules = ["setuptools_project"]


================================================
FILE: example/setuptools_project/requirements.yaml
================================================
name: setuptools_project
channels:
  - conda-forge
dependencies:
  - adaptive  # [linux64]
  - pfapack  # [linux64]
  - packaging
  - pip: markdown-code-runner
  - pip: numthreads
local_dependencies:
  - ../hatch_project[test]  # depends on hatch_project
optional_dependencies:
  dev:
    - mypy
    - ruff
  test:
    - pytest-xdist
  setup_py:
    # Optional local dependency
    - ../setup_py_project


================================================
FILE: example/setuptools_project/setuptools_project.py
================================================


================================================
FILE: pyproject.toml
================================================
[build-system]
requires = ["setuptools>=42", "wheel"]
build-backend = "setuptools.build_meta"

[project]
name = "unidep"
description = "Unified Conda and Pip requirements management."
dynamic = ["version"]
authors = [{ name = "Bas Nijholt", email = "bas@nijho.lt" }]
dependencies = [
    "packaging",
    "ruamel.yaml",
    "typing_extensions; python_version < '3.8'",
    "tomli; python_version < '3.11'",
]
requires-python = ">=3.7"

[project.readme]
file = "README.md"
content-type = "text/markdown"

[project.urls]
Homepage = "https://github.com/basnijholt/unidep"

[project.optional-dependencies]
toml = ["tomli; python_version < '3.11'"]
conda-lock = ["conda-lock", "conda-package-handling"]
pip-compile = ["pip-tools"]
pytest = ["pytest", "GitPython"] # The pytest plugin
rich = ["rich-argparse"]
pixi = ["pixi-to-conda-lock; python_version >= '3.9'", "tomli_w"]
# Everything except 'test' and 'docs'
all = [
    "unidep[toml,conda-lock,pip-compile,pytest,rich,pixi]",
]
docs = [
    "myst-parser",
    "sphinx",
    "furo",
    "emoji",
    "sphinx-autodoc-typehints",
]
test = [
    "unidep[all]",
    "tomli_w",
    "pytest",
    "pre-commit",
    "coverage",
    "pytest-cov",
    "pytest-mock",
    "conda-package-handling",
    "rich",
]

[project.scripts]
unidep = "unidep:_cli.main"

[project.entry-points."setuptools.finalize_distribution_options"]
unidep = "unidep._setuptools_integration:_setuptools_finalizer"

[project.entry-points.hatch]
unidep = "unidep._hatch_integration"

[project.entry-points.pytest11]
affected = "unidep._pytest_plugin"

[tool.setuptools.packages.find]
include = ["unidep.*", "unidep"]

[tool.setuptools.dynamic]
version = { attr = "unidep._version.__version__" }

[tool.setuptools.package-data]
"unidep" = ["py.typed"]

[tool.pytest.ini_options]
addopts = """
    --cov=unidep
    --cov-report term
    --cov-report html
    --cov-report xml
    --cov-fail-under=100
    -W error
    -vvv
"""

[tool.coverage.run]
omit = ["unidep/_pytest_plugin.py", "unidep/_hatch_integration.py"]
patch = ["subprocess"]

[tool.coverage.report]
exclude_lines = [
    "pragma: no cover",
    "raise NotImplementedError",
    "if TYPE_CHECKING:",
    "if __name__ == .__main__.:",
]

[tool.black]
line_length = 88

[tool.ruff]
line-length = 88
target-version = "py37"

[tool.ruff.lint]
select = ["ALL"]
ignore = [
    "T20",     # flake8-print
    "ANN101",  # Missing type annotation for {name} in method
    "S101",    # Use of assert detected
    "S603",    # S603 `subprocess` call: check for execution of untrusted input
    "PD901",   # df is a bad variable name. Be kinder to your future self.
    "ANN401",  # Dynamically typed expressions (typing.Any) are disallowed in {name}
    "D402",    # First line should not be the function's signature
    "PLW0603", # Using the global statement to update `X` is discouraged
    "D401",    # First line of docstring should be in imperative mood
    "SLF001",  # Private member accessed
    "PLR0913", # Too many arguments in function definition
    "TD002",   # Missing author in TODO
]

[tool.ruff.lint.per-file-ignores]
"tests/*" = ["SLF001", "D103", "E501", "PLR2004"]
"tests/test_examples.py" = ["E501"]
".github/*" = ["INP001"]
"example/*" = ["INP001", "D100"]
"docs/*" = ["INP001", "E501"]

[tool.ruff.lint.mccabe]
max-complexity = 18

[tool.mypy]
python_version = "3.8"  # 3.7 is no longer supported by mypy

# Use bump-my-version, e.g., call `bump-my-version bump minor`
[tool.bumpversion]
current_version = "3.2.0"
commit = true
commit_args = "--no-verify"
tag = true
tag_name = "v{new_version}"

[[tool.bumpversion.files]]
filename = "unidep/_version.py"
replace = '__version__ = "{new_version}"'
search = '__version__ = "{current_version}"'


================================================
FILE: tests/__init__.py
================================================
"""Tests for the ``unidep`` package."""


================================================
FILE: tests/helpers.py
================================================
"""unidep tests."""

from __future__ import annotations

from pathlib import Path
from typing import TYPE_CHECKING

from unidep._dependencies_parsing import yaml_to_toml

if TYPE_CHECKING:
    import sys

    if sys.version_info >= (3, 8):
        from typing import Literal
    else:  # pragma: no cover
        from typing_extensions import Literal


REPO_ROOT = Path(__file__).parent.parent


def maybe_as_toml(toml_or_yaml: Literal["toml", "yaml"], p: Path) -> Path:
    if toml_or_yaml == "toml":
        toml = yaml_to_toml(p)
        p.unlink()
        p = p.with_name("pyproject.toml")
        p.write_text(toml)
    return p


================================================
FILE: tests/shared_local_install_monorepo/project1/pyproject.toml
================================================
[build-system]
requires = ["setuptools>=42", "wheel"]
build-backend = "setuptools.build_meta"

[project]
name = "project1"
version = "0.0.1"


================================================
FILE: tests/shared_local_install_monorepo/project1/requirements.yaml
================================================
name: project1
local_dependencies:
  - ../shared


================================================
FILE: tests/shared_local_install_monorepo/project2/pyproject.toml
================================================
[build-system]
requires = ["setuptools>=42", "wheel"]
build-backend = "setuptools.build_meta"

[project]
name = "project2"
version = "0.0.1"


================================================
FILE: tests/shared_local_install_monorepo/project2/requirements.yaml
================================================
name: project2
local_dependencies:
  - ../shared


================================================
FILE: tests/shared_local_install_monorepo/shared/pyproject.toml
================================================
[build-system]
requires = ["setuptools>=42", "wheel"]
build-backend = "setuptools.build_meta"

[project]
name = "shared"
version = "0.0.1"


================================================
FILE: tests/shared_local_install_monorepo/shared/requirements.yaml
================================================
name: shared
dependencies: []


================================================
FILE: tests/simple_monorepo/common-requirements.yaml
================================================
# This file is uses in the `local_dependencies:` section in `project1/requirements.yml`
# and `project2/requirements.yml`.
name: common-requirements
channels:
  - conda-forge
dependencies:
  - conda: python_abi


================================================
FILE: tests/simple_monorepo/conda-lock.yml
================================================
# This file is created and managed by `unidep` 0.41.0.
# For details see https://github.com/basnijholt/unidep
# File generated with: `unidep conda-lock -d tests/simple_monorepo`
#
# This environment can be installed with
# `micromamba create -f conda-lock.yml -n myenv`
# This file is a `conda-lock` file generated via `unidep`.
# For details see https://conda.github.io/conda-lock/

version: 1
metadata:
  content_hash:
    osx-64: ee56565c906fa861ded63721f99e398fd1734b57368e6f701e25dddf03e7960a
    osx-arm64: 08362c60bc03c882ae95fa83c4d29e9fb0b7795d63d74ada081ac0fa8a7c69f8
  channels:
  - url: conda-forge
    used_env_vars: []
  platforms:
  - osx-64
  - osx-arm64
  sources:
  - tmp.environment.yaml
package:
- name: bzip2
  version: 1.0.8
  manager: conda
  platform: osx-64
  dependencies: {}
  url: https://conda.anaconda.org/conda-forge/osx-64/bzip2-1.0.8-h10d778d_5.conda
  hash:
    md5: 6097a6ca9ada32699b5fc4312dd6ef18
    sha256: 61fb2b488928a54d9472113e1280b468a309561caa54f33825a3593da390b242
  category: main
  optional: false
- name: bzip2
  version: 1.0.8
  manager: conda
  platform: osx-arm64
  dependencies: {}
  url: https://conda.anaconda.org/conda-forge/osx-arm64/bzip2-1.0.8-h93a5062_5.conda
  hash:
    md5: 1bbc659ca658bfd49a481b5ef7a0f40f
    sha256: bfa84296a638bea78a8bb29abc493ee95f2a0218775642474a840411b950fe5f
  category: main
  optional: false
- name: python_abi
  version: '3.12'
  manager: conda
  platform: osx-64
  dependencies: {}
  url: https://conda.anaconda.org/conda-forge/osx-64/python_abi-3.12-4_cp312.conda
  hash:
    md5: 87201ac4314b911b74197e588cca3639
    sha256: 82c154d95c1637604671a02a89e72f1382e89a4269265a03506496bd928f6f14
  category: main
  optional: false
- name: python_abi
  version: '3.12'
  manager: conda
  platform: osx-arm64
  dependencies: {}
  url: https://conda.anaconda.org/conda-forge/osx-arm64/python_abi-3.12-4_cp312.conda
  hash:
    md5: bbb3a02c78b2d8219d7213f76d644a2a
    sha256: db25428e4f24f8693ffa39f3ff6dfbb8fd53bc298764b775b57edab1c697560f
  category: main
  optional: false
- name: tzdata
  version: 2023d
  manager: conda
  platform: osx-arm64
  dependencies: {}
  url: https://conda.anaconda.org/conda-forge/noarch/tzdata-2023d-h0c530f3_0.conda
  hash:
    md5: 8dee24b8be2d9ff81e7bd4d7d97ff1b0
    sha256: 04f2ab3e36f2015841551415bf16bf62933bd94b7085d4be5493b388e95a9c3d
  category: main
  optional: false


================================================
FILE: tests/simple_monorepo/project1/conda-lock.yml
================================================
# This file is created and managed by `unidep` 0.41.0.
# For details see https://github.com/basnijholt/unidep
# File generated with: `unidep conda-lock -d tests/simple_monorepo`
#
# This environment can be installed with
# `micromamba create -f conda-lock.yml -n myenv`
# This file is a `conda-lock` file generated via `unidep`.
# For details see https://conda.github.io/conda-lock/

version: 1
metadata:
  content_hash:
    osx-64: unidep-is-awesome
    osx-arm64: unidep-is-awesome
  channels:
  - url: conda-forge
    used_env_vars: []
  platforms:
  - osx-64
  - osx-arm64
  sources:
  - tests/simple_monorepo/project1/requirements.yaml
package:
- name: bzip2
  version: 1.0.8
  manager: conda
  platform: osx-64
  dependencies: {}
  url: https://conda.anaconda.org/conda-forge/osx-64/bzip2-1.0.8-h10d778d_5.conda
  hash:
    md5: 6097a6ca9ada32699b5fc4312dd6ef18
    sha256: 61fb2b488928a54d9472113e1280b468a309561caa54f33825a3593da390b242
  category: main
  optional: false
- name: bzip2
  version: 1.0.8
  manager: conda
  platform: osx-arm64
  dependencies: {}
  url: https://conda.anaconda.org/conda-forge/osx-arm64/bzip2-1.0.8-h93a5062_5.conda
  hash:
    md5: 1bbc659ca658bfd49a481b5ef7a0f40f
    sha256: bfa84296a638bea78a8bb29abc493ee95f2a0218775642474a840411b950fe5f
  category: main
  optional: false
- name: python_abi
  version: '3.12'
  manager: conda
  platform: osx-64
  dependencies: {}
  url: https://conda.anaconda.org/conda-forge/osx-64/python_abi-3.12-4_cp312.conda
  hash:
    md5: 87201ac4314b911b74197e588cca3639
    sha256: 82c154d95c1637604671a02a89e72f1382e89a4269265a03506496bd928f6f14
  category: main
  optional: false
- name: python_abi
  version: '3.12'
  manager: conda
  platform: osx-arm64
  dependencies: {}
  url: https://conda.anaconda.org/conda-forge/osx-arm64/python_abi-3.12-4_cp312.conda
  hash:
    md5: bbb3a02c78b2d8219d7213f76d644a2a
    sha256: db25428e4f24f8693ffa39f3ff6dfbb8fd53bc298764b775b57edab1c697560f
  category: main
  optional: false
- name: tzdata
  version: 2023d
  manager: conda
  platform: osx-arm64
  dependencies: {}
  url: https://conda.anaconda.org/conda-forge/noarch/tzdata-2023d-h0c530f3_0.conda
  hash:
    md5: 8dee24b8be2d9ff81e7bd4d7d97ff1b0
    sha256: 04f2ab3e36f2015841551415bf16bf62933bd94b7085d4be5493b388e95a9c3d
  category: main
  optional: false


================================================
FILE: tests/simple_monorepo/project1/requirements.yaml
================================================
name: project1
channels:
  - conda-forge
dependencies:
  - conda: bzip2
local_dependencies:
  - ../project2  # this means `project2` is a dependency of `project1`
  - ../common-requirements.yaml


================================================
FILE: tests/simple_monorepo/project2/conda-lock.yml
================================================
# This file is created and managed by `unidep` 0.41.0.
# For details see https://github.com/basnijholt/unidep
# File generated with: `unidep conda-lock -d tests/simple_monorepo`
#
# This environment can be installed with
# `micromamba create -f conda-lock.yml -n myenv`
# This file is a `conda-lock` file generated via `unidep`.
# For details see https://conda.github.io/conda-lock/

version: 1
metadata:
  content_hash:
    osx-64: unidep-is-awesome
    osx-arm64: unidep-is-awesome
  channels:
  - url: conda-forge
    used_env_vars: []
  platforms:
  - osx-64
  - osx-arm64
  sources:
  - tests/simple_monorepo/project2/requirements.yaml
package:
- name: python_abi
  version: '3.12'
  manager: conda
  platform: osx-64
  dependencies: {}
  url: https://conda.anaconda.org/conda-forge/osx-64/python_abi-3.12-4_cp312.conda
  hash:
    md5: 87201ac4314b911b74197e588cca3639
    sha256: 82c154d95c1637604671a02a89e72f1382e89a4269265a03506496bd928f6f14
  category: main
  optional: false
- name: python_abi
  version: '3.12'
  manager: conda
  platform: osx-arm64
  dependencies: {}
  url: https://conda.anaconda.org/conda-forge/osx-arm64/python_abi-3.12-4_cp312.conda
  hash:
    md5: bbb3a02c78b2d8219d7213f76d644a2a
    sha256: db25428e4f24f8693ffa39f3ff6dfbb8fd53bc298764b775b57edab1c697560f
  category: main
  optional: false
- name: tzdata
  version: 2023d
  manager: conda
  platform: osx-arm64
  dependencies: {}
  url: https://conda.anaconda.org/conda-forge/noarch/tzdata-2023d-h0c530f3_0.conda
  hash:
    md5: 8dee24b8be2d9ff81e7bd4d7d97ff1b0
    sha256: 04f2ab3e36f2015841551415bf16bf62933bd94b7085d4be5493b388e95a9c3d
  category: main
  optional: false


================================================
FILE: tests/simple_monorepo/project2/requirements.yaml
================================================
name: project2
channels:
  - conda-forge
dependencies:
  - conda: tzdata  # [arm64]
local_dependencies:
  - ../common-requirements.yaml
platforms:
  - osx-arm64
  - osx-64


================================================
FILE: tests/test-pip-and-conda-different-name/conda-lock.yml
================================================
# This file is created and managed by `unidep` 0.23.0.
# For details see https://github.com/basnijholt/unidep
# File generated with: `unidep conda-lock -d /Users/basnijholt/Code/unidep/tests/test-pip-and-conda-different-name`
#
# This environment can be installed with
# `micromamba create -f conda-lock.yml -n myenv`
# This file is a `conda-lock` file generated via `unidep`.
# For details see https://conda.github.io/conda-lock/

version: 1
metadata:
  content_hash:
    linux-64: c18392f096a6c21233400900e6ba90c299ad2d28348b69cb62a7cf66734bfe81
  channels:
  - url: conda-forge
    used_env_vars: []
  platforms:
  - linux-64
  sources:
  - tmp.environment.yaml
package:
- name: _libgcc_mutex
  version: '0.1'
  manager: conda
  platform: linux-64
  dependencies: {}
  url: https://conda.anaconda.org/conda-forge/linux-64/_libgcc_mutex-0.1-conda_forge.tar.bz2
  hash:
    md5: d7c89558ba9fa0495403155b64376d81
    sha256: fe51de6107f9edc7aa4f786a70f4a883943bc9d39b3bb7307c04c41410990726
  category: main
  optional: false
- name: _openmp_mutex
  version: '4.5'
  manager: conda
  platform: linux-64
  dependencies:
    _libgcc_mutex: '0.1'
    libgomp: '>=7.5.0'
  url: https://conda.anaconda.org/conda-forge/linux-64/_openmp_mutex-4.5-2_gnu.tar.bz2
  hash:
    md5: 73aaf86a425cc6e73fcf236a5a46396d
    sha256: fbe2c5e56a653bebb982eda4876a9178aedfc2b545f25d0ce9c4c0b508253d22
  category: main
  optional: false
- name: bzip2
  version: 1.0.8
  manager: conda
  platform: linux-64
  dependencies:
    libgcc-ng: '>=12'
  url: https://conda.anaconda.org/conda-forge/linux-64/bzip2-1.0.8-hd590300_5.conda
  hash:
    md5: 69b8b6202a07720f448be700e300ccf4
    sha256: 242c0c324507ee172c0e0dd2045814e746bb303d1eb78870d182ceb0abc726a8
  category: main
  optional: false
- name: ca-certificates
  version: 2023.11.17
  manager: conda
  platform: linux-64
  dependencies: {}
  url: https://conda.anaconda.org/conda-forge/linux-64/ca-certificates-2023.11.17-hbcca054_0.conda
  hash:
    md5: 01ffc8d36f9eba0ce0b3c1955fa780ee
    sha256: fb4b9f4b7d885002db0b93e22f44b5b03791ef3d4efdc9d0662185a0faafd6b6
  category: main
  optional: false
- name: ld_impl_linux-64
  version: '2.40'
  manager: conda
  platform: linux-64
  dependencies: {}
  url: https://conda.anaconda.org/conda-forge/linux-64/ld_impl_linux-64-2.40-h41732ed_0.conda
  hash:
    md5: 7aca3059a1729aa76c597603f10b0dd3
    sha256: f6cc89d887555912d6c61b295d398cff9ec982a3417d38025c45d5dd9b9e79cd
  category: main
  optional: false
- name: libffi
  version: 3.4.2
  manager: conda
  platform: linux-64
  dependencies:
    libgcc-ng: '>=9.4.0'
  url: https://conda.anaconda.org/conda-forge/linux-64/libffi-3.4.2-h7f98852_5.tar.bz2
  hash:
    md5: d645c6d2ac96843a2bfaccd2d62b3ac3
    sha256: ab6e9856c21709b7b517e940ae7028ae0737546122f83c2aa5d692860c3b149e
  category: main
  optional: false
- name: libgcc-ng
  version: 13.2.0
  manager: conda
  platform: linux-64
  dependencies:
    _libgcc_mutex: '0.1'
    _openmp_mutex: '>=4.5'
  url: https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-13.2.0-h807b86a_3.conda
  hash:
    md5: 23fdf1fef05baeb7eadc2aed5fb0011f
    sha256: 5e88f658e07a30ab41b154b42c59f079b168acfa9551a75bdc972099453f4105
  category: main
  optional: false
- name: libgomp
  version: 13.2.0
  manager: conda
  platform: linux-64
  dependencies:
    _libgcc_mutex: '0.1'
  url: https://conda.anaconda.org/conda-forge/linux-64/libgomp-13.2.0-h807b86a_3.conda
  hash:
    md5: 7124cbb46b13d395bdde68f2d215c989
    sha256: 6ebedee39b6bbbc969715d0d7fa4b381cce67e1139862604ffa393f821c08e81
  category: main
  optional: false
- name: libnsl
  version: 2.0.1
  manager: conda
  platform: linux-64
  dependencies:
    libgcc-ng: '>=12'
  url: https://conda.anaconda.org/conda-forge/linux-64/libnsl-2.0.1-hd590300_0.conda
  hash:
    md5: 30fd6e37fe21f86f4bd26d6ee73eeec7
    sha256: 26d77a3bb4dceeedc2a41bd688564fe71bf2d149fdcf117049970bc02ff1add6
  category: main
  optional: false
- name: libsqlite
  version: 3.44.2
  manager: conda
  platform: linux-64
  dependencies:
    libgcc-ng: '>=12'
    libzlib: '>=1.2.13,<1.3.0a0'
  url: https://conda.anaconda.org/conda-forge/linux-64/libsqlite-3.44.2-h2797004_0.conda
  hash:
    md5: 3b6a9f225c3dbe0d24f4fedd4625c5bf
    sha256: ee2c4d724a3ed60d5b458864d66122fb84c6ce1df62f735f90d8db17b66cd88a
  category: main
  optional: false
- name: libstdcxx-ng
  version: 13.2.0
  manager: conda
  platform: linux-64
  dependencies: {}
  url: https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-ng-13.2.0-h7e041cc_3.conda
  hash:
    md5: 937eaed008f6bf2191c5fe76f87755e9
    sha256: 6c6c49efedcc5709a66f19fb6b26b69c6a5245310fd1d9a901fd5e38aaf7f882
  category: main
  optional: false
- name: libuuid
  version: 2.38.1
  manager: conda
  platform: linux-64
  dependencies:
    libgcc-ng: '>=12'
  url: https://conda.anaconda.org/conda-forge/linux-64/libuuid-2.38.1-h0b41bf4_0.conda
  hash:
    md5: 40b61aab5c7ba9ff276c41cfffe6b80b
    sha256: 787eb542f055a2b3de553614b25f09eefb0a0931b0c87dbcce6efdfd92f04f18
  category: main
  optional: false
- name: libzlib
  version: 1.2.13
  manager: conda
  platform: linux-64
  dependencies:
    libgcc-ng: '>=12'
  url: https://conda.anaconda.org/conda-forge/linux-64/libzlib-1.2.13-hd590300_5.conda
  hash:
    md5: f36c115f1ee199da648e0597ec2047ad
    sha256: 370c7c5893b737596fd6ca0d9190c9715d89d888b8c88537ae1ef168c25e82e4
  category: main
  optional: false
- name: msgpack-python
  version: 1.0.7
  manager: conda
  platform: linux-64
  dependencies:
    libgcc-ng: '>=12'
    libstdcxx-ng: '>=12'
    python: '>=3.10,<3.11.0a0'
    python_abi: 3.10.*
  url: https://conda.anaconda.org/conda-forge/linux-64/msgpack-python-1.0.7-py310hd41b1e2_0.conda
  hash:
    md5: dc5263dcaa1347e5a456ead3537be27d
    sha256: a5c7612029e3871b0af0bd69e8ee1545d3deb93b5bec29cf1bf72522375fda31
  category: main
  optional: false
- name: ncurses
  version: '6.4'
  manager: conda
  platform: linux-64
  dependencies:
    libgcc-ng: '>=12'
  url: https://conda.anaconda.org/conda-forge/linux-64/ncurses-6.4-h59595ed_2.conda
  hash:
    md5: 7dbaa197d7ba6032caf7ae7f32c1efa0
    sha256: 91cc03f14caf96243cead96c76fe91ab5925a695d892e83285461fb927dece5e
  category: main
  optional: false
- name: openssl
  version: 3.2.0
  manager: conda
  platform: linux-64
  dependencies:
    ca-certificates: ''
    libgcc-ng: '>=12'
  url: https://conda.anaconda.org/conda-forge/linux-64/openssl-3.2.0-hd590300_1.conda
  hash:
    md5: 603827b39ea2b835268adb8c821b8570
    sha256: 80efc6f429bd8e622d999652e5cba2ca56fcdb9c16a439d2ce9b4313116e4a87
  category: main
  optional: false
- name: pip
  version: 23.3.1
  manager: conda
  platform: linux-64
  dependencies:
    python: '>=3.7'
    setuptools: ''
    wheel: ''
  url: https://conda.anaconda.org/conda-forge/noarch/pip-23.3.1-pyhd8ed1ab_0.conda
  hash:
    md5: 2400c0b86889f43aa52067161e1fb108
    sha256: 435829a03e1c6009f013f29bb83de8b876c388820bf8cf69a7baeec25f6a3563
  category: main
  optional: false
- name: python
  version: 3.10.13
  manager: conda
  platform: linux-64
  dependencies:
    bzip2: '>=1.0.8,<2.0a0'
    ld_impl_linux-64: '>=2.36.1'
    libffi: '>=3.4,<4.0a0'
    libgcc-ng: '>=12'
    libnsl: '>=2.0.1,<2.1.0a0'
    libsqlite: '>=3.43.2,<4.0a0'
    libuuid: '>=2.38.1,<3.0a0'
    libzlib: '>=1.2.13,<1.3.0a0'
    ncurses: '>=6.4,<7.0a0'
    openssl: '>=3.1.4,<4.0a0'
    readline: '>=8.2,<9.0a0'
    tk: '>=8.6.13,<8.7.0a0'
    tzdata: ''
    xz: '>=5.2.6,<6.0a0'
  url: https://conda.anaconda.org/conda-forge/linux-64/python-3.10.13-hd12c33a_0_cpython.conda
  hash:
    md5: f3a8c32aa764c3e7188b4b810fc9d6ce
    sha256: a53410f459f314537b379982717b1c5911efc2f0cc26d63c4d6f831bcb31c964
  category: main
  optional: false
- name: python_abi
  version: '3.10'
  manager: conda
  platform: linux-64
  dependencies: {}
  url: https://conda.anaconda.org/conda-forge/linux-64/python_abi-3.10-4_cp310.conda
  hash:
    md5: 26322ec5d7712c3ded99dd656142b8ce
    sha256: 456bec815bfc2b364763084d08b412fdc4c17eb9ccc66a36cb775fa7ac3cbaec
  category: main
  optional: false
- name: readline
  version: '8.2'
  manager: conda
  platform: linux-64
  dependencies:
    libgcc-ng: '>=12'
    ncurses: '>=6.3,<7.0a0'
  url: https://conda.anaconda.org/conda-forge/linux-64/readline-8.2-h8228510_1.conda
  hash:
    md5: 47d31b792659ce70f470b5c82fdfb7a4
    sha256: 5435cf39d039387fbdc977b0a762357ea909a7694d9528ab40f005e9208744d7
  category: main
  optional: false
- name: setuptools
  version: 68.2.2
  manager: conda
  platform: linux-64
  dependencies:
    python: '>=3.7'
  url: https://conda.anaconda.org/conda-forge/noarch/setuptools-68.2.2-pyhd8ed1ab_0.conda
  hash:
    md5: fc2166155db840c634a1291a5c35a709
    sha256: 851901b1f8f2049edb36a675f0c3f9a98e1495ef4eb214761b048c6f696a06f7
  category: main
  optional: false
- name: tk
  version: 8.6.13
  manager: conda
  platform: linux-64
  dependencies:
    libgcc-ng: '>=12'
    libzlib: '>=1.2.13,<1.3.0a0'
  url: https://conda.anaconda.org/conda-forge/linux-64/tk-8.6.13-noxft_h4845f30_101.conda
  hash:
    md5: d453b98d9c83e71da0741bb0ff4d76bc
    sha256: e0569c9caa68bf476bead1bed3d79650bb080b532c64a4af7d8ca286c08dea4e
  category: main
  optional: false
- name: tzdata
  version: 2023c
  manager: conda
  platform: linux-64
  dependencies: {}
  url: https://conda.anaconda.org/conda-forge/noarch/tzdata-2023c-h71feb2d_0.conda
  hash:
    md5: 939e3e74d8be4dac89ce83b20de2492a
    sha256: 0449138224adfa125b220154408419ec37c06b0b49f63c5954724325903ecf55
  category: main
  optional: false
- name: wheel
  version: 0.42.0
  manager: conda
  platform: linux-64
  dependencies:
    python: '>=3.7'
  url: https://conda.anaconda.org/conda-forge/noarch/wheel-0.42.0-pyhd8ed1ab_0.conda
  hash:
    md5: 1cdea58981c5cbc17b51973bcaddcea7
    sha256: 80be0ccc815ce22f80c141013302839b0ed938a2edb50b846cf48d8a8c1cfa01
  category: main
  optional: false
- name: xz
  version: 5.2.6
  manager: conda
  platform: linux-64
  dependencies:
    libgcc-ng: '>=12'
  url: https://conda.anaconda.org/conda-forge/linux-64/xz-5.2.6-h166bdaf_0.tar.bz2
  hash:
    md5: 2161070d867d1b1204ea749c8eec4ef0
    sha256: 03a6d28ded42af8a347345f82f3eebdd6807a08526d47899a42d62d319609162
  category: main
  optional: false
- name: fluent-logger
  version: 0.10.0
  manager: pip
  platform: linux-64
  dependencies:
    msgpack: '>1.0'
  url: https://files.pythonhosted.org/packages/00/43/9cbd7756dfe2cddc0a76ec2eaec56449ac126455c36fe03ecc86f7feac8f/fluent_logger-0.10.0-py2.py3-none-any.whl
  hash:
    sha256: 543637e5e62ec3fc3c92b44e5a4e148a3cea88a0f8ca4fae26c7e60fda7564c1
  category: main
  optional: false
- name: rsync-time-machine
  version: 1.3.0
  manager: pip
  platform: linux-64
  dependencies: {}
  url: https://files.pythonhosted.org/packages/42/88/f32647517b00f937c66ae2891f22ebb614ac521386254c2eefd9d770c05e/rsync_time_machine-1.3.0-py3-none-any.whl
  hash:
    sha256: 371c23dddddedee51c57dec1f31de82465b9139f17357754dc92269d58c3d454
  category: main
  optional: false


================================================
FILE: tests/test-pip-and-conda-different-name/project1/requirements.yaml
================================================
name: project2
channels:
  - conda-forge
dependencies:
  - conda: python=3.10
  - pip: fluent-logger  # depends on msgpack, but on conda-forge it's called msgpack-python
  - pip: rsync-time-machine
platforms:
  - linux-64


================================================
FILE: tests/test-pip-and-conda-different-name/project2/requirements.yaml
================================================
name: project2
channels:
  - conda-forge
dependencies:
  - conda: msgpack-python
platforms:
  - linux-64


================================================
FILE: tests/test-pip-package-with-conda-dependency/conda-lock.yml
================================================
# This file is created and managed by `unidep` 0.23.0.
# For details see https://github.com/basnijholt/unidep
# File generated with: `unidep conda-lock -d tests/test-pip-package-with-conda-dependency`
#
# This environment can be installed with
# `micromamba create -f conda-lock.yml -n myenv`
# This file is a `conda-lock` file generated via `unidep`.
# For details see https://conda.github.io/conda-lock/

version: 1
metadata:
  content_hash:
    linux-64: 64492feacfc7d0ed4ee041529c75ad1ec9543bb69603d7519427014d47061f9a
  channels:
  - url: conda-forge
    used_env_vars: []
  platforms:
  - linux-64
  sources:
  - tmp.environment.yaml
package:
- name: _libgcc_mutex
  version: '0.1'
  manager: conda
  platform: linux-64
  dependencies: {}
  url: https://conda.anaconda.org/conda-forge/linux-64/_libgcc_mutex-0.1-conda_forge.tar.bz2
  hash:
    md5: d7c89558ba9fa0495403155b64376d81
    sha256: fe51de6107f9edc7aa4f786a70f4a883943bc9d39b3bb7307c04c41410990726
  category: main
  optional: false
- name: _openmp_mutex
  version: '4.5'
  manager: conda
  platform: linux-64
  dependencies:
    _libgcc_mutex: '0.1'
    libgomp: '>=7.5.0'
  url: https://conda.anaconda.org/conda-forge/linux-64/_openmp_mutex-4.5-2_gnu.tar.bz2
  hash:
    md5: 73aaf86a425cc6e73fcf236a5a46396d
    sha256: fbe2c5e56a653bebb982eda4876a9178aedfc2b545f25d0ce9c4c0b508253d22
  category: main
  optional: false
- name: bzip2
  version: 1.0.8
  manager: conda
  platform: linux-64
  dependencies:
    libgcc-ng: '>=12'
  url: https://conda.anaconda.org/conda-forge/linux-64/bzip2-1.0.8-hd590300_5.conda
  hash:
    md5: 69b8b6202a07720f448be700e300ccf4
    sha256: 242c0c324507ee172c0e0dd2045814e746bb303d1eb78870d182ceb0abc726a8
  category: main
  optional: false
- name: ca-certificates
  version: 2023.11.17
  manager: conda
  platform: linux-64
  dependencies: {}
  url: https://conda.anaconda.org/conda-forge/linux-64/ca-certificates-2023.11.17-hbcca054_0.conda
  hash:
    md5: 01ffc8d36f9eba0ce0b3c1955fa780ee
    sha256: fb4b9f4b7d885002db0b93e22f44b5b03791ef3d4efdc9d0662185a0faafd6b6
  category: main
  optional: false
- name: ld_impl_linux-64
  version: '2.40'
  manager: conda
  platform: linux-64
  dependencies: {}
  url: https://conda.anaconda.org/conda-forge/linux-64/ld_impl_linux-64-2.40-h41732ed_0.conda
  hash:
    md5: 7aca3059a1729aa76c597603f10b0dd3
    sha256: f6cc89d887555912d6c61b295d398cff9ec982a3417d38025c45d5dd9b9e79cd
  category: main
  optional: false
- name: libexpat
  version: 2.5.0
  manager: conda
  platform: linux-64
  dependencies:
    libgcc-ng: '>=12'
  url: https://conda.anaconda.org/conda-forge/linux-64/libexpat-2.5.0-hcb278e6_1.conda
  hash:
    md5: 6305a3dd2752c76335295da4e581f2fd
    sha256: 74c98a563777ae2ad71f1f74d458a8ab043cee4a513467c159ccf159d0e461f3
  category: main
  optional: false
- name: libffi
  version: 3.4.2
  manager: conda
  platform: linux-64
  dependencies:
    libgcc-ng: '>=9.4.0'
  url: https://conda.anaconda.org/conda-forge/linux-64/libffi-3.4.2-h7f98852_5.tar.bz2
  hash:
    md5: d645c6d2ac96843a2bfaccd2d62b3ac3
    sha256: ab6e9856c21709b7b517e940ae7028ae0737546122f83c2aa5d692860c3b149e
  category: main
  optional: false
- name: libgcc-ng
  version: 13.2.0
  manager: conda
  platform: linux-64
  dependencies:
    _libgcc_mutex: '0.1'
    _openmp_mutex: '>=4.5'
  url: https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-13.2.0-h807b86a_3.conda
  hash:
    md5: 23fdf1fef05baeb7eadc2aed5fb0011f
    sha256: 5e88f658e07a30ab41b154b42c59f079b168acfa9551a75bdc972099453f4105
  category: main
  optional: false
- name: libgomp
  version: 13.2.0
  manager: conda
  platform: linux-64
  dependencies:
    _libgcc_mutex: '0.1'
  url: https://conda.anaconda.org/conda-forge/linux-64/libgomp-13.2.0-h807b86a_3.conda
  hash:
    md5: 7124cbb46b13d395bdde68f2d215c989
    sha256: 6ebedee39b6bbbc969715d0d7fa4b381cce67e1139862604ffa393f821c08e81
  category: main
  optional: false
- name: libnsl
  version: 2.0.1
  manager: conda
  platform: linux-64
  dependencies:
    libgcc-ng: '>=12'
  url: https://conda.anaconda.org/conda-forge/linux-64/libnsl-2.0.1-hd590300_0.conda
  hash:
    md5: 30fd6e37fe21f86f4bd26d6ee73eeec7
    sha256: 26d77a3bb4dceeedc2a41bd688564fe71bf2d149fdcf117049970bc02ff1add6
  category: main
  optional: false
- name: libsqlite
  version: 3.44.2
  manager: conda
  platform: linux-64
  dependencies:
    libgcc-ng: '>=12'
    libzlib: '>=1.2.13,<1.3.0a0'
  url: https://conda.anaconda.org/conda-forge/linux-64/libsqlite-3.44.2-h2797004_0.conda
  hash:
    md5: 3b6a9f225c3dbe0d24f4fedd4625c5bf
    sha256: ee2c4d724a3ed60d5b458864d66122fb84c6ce1df62f735f90d8db17b66cd88a
  category: main
  optional: false
- name: libstdcxx-ng
  version: 13.2.0
  manager: conda
  platform: linux-64
  dependencies: {}
  url: https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-ng-13.2.0-h7e041cc_3.conda
  hash:
    md5: 937eaed008f6bf2191c5fe76f87755e9
    sha256: 6c6c49efedcc5709a66f19fb6b26b69c6a5245310fd1d9a901fd5e38aaf7f882
  category: main
  optional: false
- name: libuuid
  version: 2.38.1
  manager: conda
  platform: linux-64
  dependencies:
    libgcc-ng: '>=12'
  url: https://conda.anaconda.org/conda-forge/linux-64/libuuid-2.38.1-h0b41bf4_0.conda
  hash:
    md5: 40b61aab5c7ba9ff276c41cfffe6b80b
    sha256: 787eb542f055a2b3de553614b25f09eefb0a0931b0c87dbcce6efdfd92f04f18
  category: main
  optional: false
- name: libzlib
  version: 1.2.13
  manager: conda
  platform: linux-64
  dependencies:
    libgcc-ng: '>=12'
  url: https://conda.anaconda.org/conda-forge/linux-64/libzlib-1.2.13-hd590300_5.conda
  hash:
    md5: f36c115f1ee199da648e0597ec2047ad
    sha256: 370c7c5893b737596fd6ca0d9190c9715d89d888b8c88537ae1ef168c25e82e4
  category: main
  optional: false
- name: ncurses
  version: '6.4'
  manager: conda
  platform: linux-64
  dependencies:
    libgcc-ng: '>=12'
  url: https://conda.anaconda.org/conda-forge/linux-64/ncurses-6.4-h59595ed_2.conda
  hash:
    md5: 7dbaa197d7ba6032caf7ae7f32c1efa0
    sha256: 91cc03f14caf96243cead96c76fe91ab5925a695d892e83285461fb927dece5e
  category: main
  optional: false
- name: openssl
  version: 3.2.0
  manager: conda
  platform: linux-64
  dependencies:
    ca-certificates: ''
    libgcc-ng: '>=12'
  url: https://conda.anaconda.org/conda-forge/linux-64/openssl-3.2.0-hd590300_1.conda
  hash:
    md5: 603827b39ea2b835268adb8c821b8570
    sha256: 80efc6f429bd8e622d999652e5cba2ca56fcdb9c16a439d2ce9b4313116e4a87
  category: main
  optional: false
- name: pip
  version: 23.3.1
  manager: conda
  platform: linux-64
  dependencies:
    python: '>=3.7'
    setuptools: ''
    wheel: ''
  url: https://conda.anaconda.org/conda-forge/noarch/pip-23.3.1-pyhd8ed1ab_0.conda
  hash:
    md5: 2400c0b86889f43aa52067161e1fb108
    sha256: 435829a03e1c6009f013f29bb83de8b876c388820bf8cf69a7baeec25f6a3563
  category: main
  optional: false
- name: pybind11
  version: 2.11.1
  manager: conda
  platform: linux-64
  dependencies:
    libgcc-ng: '>=12'
    libstdcxx-ng: '>=12'
    pybind11-global: 2.11.1
    python: '>=3.11,<3.12.0a0'
    python_abi: 3.11.*
  url: https://conda.anaconda.org/conda-forge/linux-64/pybind11-2.11.1-py311h9547e67_2.conda
  hash:
    md5: 64a8933c635a78a6dc0f0cb07ef19a6e
    sha256: 98ea0d8edd21b6ef7205aeafa6dbdcb1829aeb888ec8a4ba69d58effb912d536
  category: main
  optional: false
- name: pybind11-global
  version: 2.11.1
  manager: conda
  platform: linux-64
  dependencies:
    libgcc-ng: '>=12'
    libstdcxx-ng: '>=12'
    python: '>=3.11,<3.12.0a0'
    python_abi: 3.11.*
  url: https://conda.anaconda.org/conda-forge/linux-64/pybind11-global-2.11.1-py311h9547e67_2.conda
  hash:
    md5: 71330b362711dd503ef2e8139570b8e0
    sha256: 6f231d62f03e99c0e45d70f17a82c0482dbe8286412fe44556bcfeccbacd5c0c
  category: main
  optional: false
- name: python
  version: 3.11.6
  manager: conda
  platform: linux-64
  dependencies:
    bzip2: '>=1.0.8,<2.0a0'
    ld_impl_linux-64: '>=2.36.1'
    libexpat: '>=2.5.0,<3.0a0'
    libffi: '>=3.4,<4.0a0'
    libgcc-ng: '>=12'
    libnsl: '>=2.0.0,<2.1.0a0'
    libsqlite: '>=3.43.0,<4.0a0'
    libuuid: '>=2.38.1,<3.0a0'
    libzlib: '>=1.2.13,<1.3.0a0'
    ncurses: '>=6.4,<7.0a0'
    openssl: '>=3.1.3,<4.0a0'
    readline: '>=8.2,<9.0a0'
    tk: '>=8.6.13,<8.7.0a0'
    tzdata: ''
    xz: '>=5.2.6,<6.0a0'
  url: https://conda.anaconda.org/conda-forge/linux-64/python-3.11.6-hab00c5b_0_cpython.conda
  hash:
    md5: b0dfbe2fcbfdb097d321bfd50ecddab1
    sha256: 84f13bd70cff5dcdaee19263b2d4291d5793856a718efc1b63a9cfa9eb6e2ca1
  category: main
  optional: false
- name: python_abi
  version: '3.11'
  manager: conda
  platform: linux-64
  dependencies: {}
  url: https://conda.anaconda.org/conda-forge/linux-64/python_abi-3.11-4_cp311.conda
  hash:
    md5: d786502c97404c94d7d58d258a445a65
    sha256: 0be3ac1bf852d64f553220c7e6457e9c047dfb7412da9d22fbaa67e60858b3cf
  category: main
  optional: false
- name: readline
  version: '8.2'
  manager: conda
  platform: linux-64
  dependencies:
    libgcc-ng: '>=12'
    ncurses: '>=6.3,<7.0a0'
  url: https://conda.anaconda.org/conda-forge/linux-64/readline-8.2-h8228510_1.conda
  hash:
    md5: 47d31b792659ce70f470b5c82fdfb7a4
    sha256: 5435cf39d039387fbdc977b0a762357ea909a7694d9528ab40f005e9208744d7
  category: main
  optional: false
- name: setuptools
  version: 68.2.2
  manager: conda
  platform: linux-64
  dependencies:
    python: '>=3.7'
  url: https://conda.anaconda.org/conda-forge/noarch/setuptools-68.2.2-pyhd8ed1ab_0.conda
  hash:
    md5: fc2166155db840c634a1291a5c35a709
    sha256: 851901b1f8f2049edb36a675f0c3f9a98e1495ef4eb214761b048c6f696a06f7
  category: main
  optional: false
- name: tk
  version: 8.6.13
  manager: conda
  platform: linux-64
  dependencies:
    libgcc-ng: '>=12'
    libzlib: '>=1.2.13,<1.3.0a0'
  url: https://conda.anaconda.org/conda-forge/linux-64/tk-8.6.13-noxft_h4845f30_101.conda
  hash:
    md5: d453b98d9c83e71da0741bb0ff4d76bc
    sha256: e0569c9caa68bf476bead1bed3d79650bb080b532c64a4af7d8ca286c08dea4e
  category: main
  optional: false
- name: tzdata
  version: 2023c
  manager: conda
  platform: linux-64
  dependencies: {}
  url: https://conda.anaconda.org/conda-forge/noarch/tzdata-2023c-h71feb2d_0.conda
  hash:
    md5: 939e3e74d8be4dac89ce83b20de2492a
    sha256: 0449138224adfa125b220154408419ec37c06b0b49f63c5954724325903ecf55
  category: main
  optional: false
- name: wheel
  version: 0.42.0
  manager: conda
  platform: linux-64
  dependencies:
    python: '>=3.7'
  url: https://conda.anaconda.org/conda-forge/noarch/wheel-0.42.0-pyhd8ed1ab_0.conda
  hash:
    md5: 1cdea58981c5cbc17b51973bcaddcea7
    sha256: 80be0ccc815ce22f80c141013302839b0ed938a2edb50b846cf48d8a8c1cfa01
  category: main
  optional: false
- name: xz
  version: 5.2.6
  manager: conda
  platform: linux-64
  dependencies:
    libgcc-ng: '>=12'
  url: https://conda.anaconda.org/conda-forge/linux-64/xz-5.2.6-h166bdaf_0.tar.bz2
  hash:
    md5: 2161070d867d1b1204ea749c8eec4ef0
    sha256: 03a6d28ded42af8a347345f82f3eebdd6807a08526d47899a42d62d319609162
  category: main
  optional: false
- name: cutde
  version: 23.6.25
  manager: pip
  platform: linux-64
  dependencies:
    mako: '*'
    pybind11: '*'
  url: https://files.pythonhosted.org/packages/08/15/0ae45db8fcc0d2da6002d13900689e2fe7773da038922b1ff450ab08088e/cutde-23.6.25.tar.gz
  hash:
    sha256: 946aeb03b3bf2f9060dabda1dd84330a67a7fddab27879010107382bcca31eac
  category: main
  optional: false
- name: mako
  version: 1.3.0
  manager: pip
  platform: linux-64
  dependencies:
    markupsafe: '>=0.9.2'
  url: https://files.pythonhosted.org/packages/24/3b/11fe92d68c6a42468ddab0cf03f454419b0788fff4e91ba46b8bebafeffd/Mako-1.3.0-py3-none-any.whl
  hash:
    sha256: 57d4e997349f1a92035aa25c17ace371a4213f2ca42f99bee9a602500cfd54d9
  category: main
  optional: false
- name: markupsafe
  version: 2.1.3
  manager: pip
  platform: linux-64
  dependencies: {}
  url: https://files.pythonhosted.org/packages/fe/21/2eff1de472ca6c99ec3993eab11308787b9879af9ca8bbceb4868cf4f2ca/MarkupSafe-2.1.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl
  hash:
    sha256: bfce63a9e7834b12b87c64d6b155fdd9b3b96191b6bd334bf37db7ff1fe457f2
  category: main
  optional: false
- name: rsync-time-machine
  version: 1.3.0
  manager: pip
  platform: linux-64
  dependencies: {}
  url: https://files.pythonhosted.org/packages/42/88/f32647517b00f937c66ae2891f22ebb614ac521386254c2eefd9d770c05e/rsync_time_machine-1.3.0-py3-none-any.whl
  hash:
    sha256: 371c23dddddedee51c57dec1f31de82465b9139f17357754dc92269d58c3d454
  category: main
  optional: false


================================================
FILE: tests/test-pip-package-with-conda-dependency/project1/requirements.yaml
================================================
name: project1
channels:
  - conda-forge
dependencies:
  - pybind11
platforms:
  - linux-64


================================================
FILE: tests/test-pip-package-with-conda-dependency/project2/requirements.yaml
================================================
name: project2
channels:
  - conda-forge
dependencies:
  - conda: python=3.11
  - pip: cutde  # depends on pybind11, but pybind11 is installed via conda because project1/
  - pip: rsync-time-machine
platforms:
  - linux-64


================================================
FILE: tests/test_cli.py
================================================
"""unidep CLI tests."""

from __future__ import annotations

import os
import platform
import re
import shutil
import subprocess
import sys
import textwrap
from contextlib import contextmanager
from pathlib import Path
from typing import Any, Generator
from unittest.mock import patch

import pytest

try:
    import tomllib
except ImportError:  # pragma: no cover
    import tomli as tomllib

from unidep._cli import (
    CondaExecutable,
    _capitalize_dir,
    _collect_available_optional_dependency_groups,
    _collect_selected_conda_like_platforms,
    _conda_env_list,
    _conda_info,
    _conda_root_prefix,
    _find_windows_path,
    _flatten_selected_dependency_entries,
    _get_conda_executable,
    _identify_conda_executable,
    _install_all_command,
    _install_command,
    _maybe_conda_run,
    _maybe_create_conda_env_args,
    _merge_command,
    _merge_optional_dependency_extras,
    _pip_compile_command,
    _pip_subcommand,
    _print_versions,
)
from unidep._dependencies_parsing import parse_requirements

REPO_ROOT = Path(__file__).parent.parent

EXAMPLE_PROJECTS = [
    "setup_py_project",
    "setuptools_project",
    "hatch_project",
    "pyproject_toml_project",
    "hatch2_project",
]


def current_env_and_prefix() -> tuple[str, Path]:
    """Get the current conda environment name and prefix."""
    try:
        prefix = _conda_root_prefix("conda")
    except (KeyError, FileNotFoundError):
        prefix = _conda_root_prefix("micromamba")
    folder, env_name = Path(os.environ["CONDA_PREFIX"]).parts[-2:]
    if folder != "envs":
        return "base", prefix
    return env_name, prefix / "envs" / env_name


@pytest.mark.parametrize(
    "project",
    EXAMPLE_PROJECTS,
)
def test_install_command(project: str, capsys: pytest.CaptureFixture) -> None:
    current_env, prefix = current_env_and_prefix()
    print(f"current_env: {current_env}, prefix: {prefix}")
    for kw in [
        {"conda_env_name": current_env, "conda_env_prefix": None},
        {"conda_env_name": None, "conda_env_prefix": prefix},
    ]:
        _install_command(
            REPO_ROOT / "example" / project,
            conda_executable="",  # type: ignore[arg-type]
            conda_lock_file=None,
            dry_run=True,
            editable=False,
            verbose=True,
            **kw,  # type: ignore[arg-type]
        )
        captured = capsys.readouterr()
        assert "Installing conda dependencies" in captured.out
        assert "Installing pip dependencies" in captured.out
        assert "Installing project with" in captured.out


@pytest.mark.parametrize(
    "project",
    EXAMPLE_PROJECTS,
)
def test_unidep_install_dry_run(project: str) -> None:
    # Path to the requirements file
    requirements_path = REPO_ROOT / "example" / project

    # Ensure the requirements file exists
    assert requirements_path.exists(), "Requirements file does not exist"

    # Run the unidep install command
    result = subprocess.run(
        [  # noqa: S607
            "unidep",
            "install",
            "--dry-run",
            str(requirements_path),
        ],
        check=True,
        capture_output=True,
        text=True,
        encoding="utf-8",
    )

    # Check the output
    assert result.returncode == 0, "Command failed to execute successfully"
    if project in ("setup_py_project", "setuptools_project"):
        assert "📦 Installing conda dependencies with" in result.stdout
    assert "📦 Installing pip dependencies with" in result.stdout
    assert "📦 Installing project with" in result.stdout


def test_install_all_command(capsys: pytest.CaptureFixture) -> None:
    _install_all_command(
        conda_executable="",  # type: ignore[arg-type]
        conda_env_name=None,
        conda_env_prefix=None,
        conda_lock_file=None,
        dry_run=True,
        editable=True,
        directory=REPO_ROOT / "example",
        depth=1,
        verbose=False,
    )
    captured = capsys.readouterr()
    assert "Installing conda dependencies" in captured.out
    assert "Installing pip dependencies" in captured.out
    projects = [REPO_ROOT / "example" / p for p in EXAMPLE_PROJECTS]
    pkgs = " ".join([f"-e {p}" for p in sorted(projects)])
    assert f"pip install --no-deps {pkgs}`" in captured.out


def test_install_command_deduplicates_shared_local_dependencies(
    tmp_path: Path,
    capsys: pytest.CaptureFixture,
) -> None:
    fixture_root = REPO_ROOT / "tests" / "shared_local_install_monorepo"
    monorepo = tmp_path / fixture_root.name
    shutil.copytree(fixture_root, monorepo)
    shared = monorepo / "shared"
    project1 = monorepo / "project1"
    project2 = monorepo / "project2"

    _install_command(
        project1,
        project2,
        conda_executable="",  # type: ignore[arg-type]
        conda_env_name=None,
        conda_env_prefix=None,
        conda_lock_file=None,
        dry_run=True,
        editable=True,
        no_dependencies=True,
        no_uv=True,
        verbose=False,
    )

    captured = capsys.readouterr()
    pkgs = " ".join([f"-e {p}" for p in sorted((project1, project2, shared))])
    assert f"pip install --no-deps {pkgs}`" in captured.out
    assert captured.out.count(f"-e {shared}") == 1


def mock_uv_env(tmp_path: Path) -> dict[str, str]:
    """Create a mock uv executable and return env with it in the PATH."""
    mock_uv_path = tmp_path / ("uv.bat" if platform.system() == "Windows" else "uv")
    if platform.system() == "Windows":
        mock_uv_path.write_text("@echo off\necho Mock uv called %*")
    else:
        mock_uv_path.write_text("#!/bin/sh\necho 'Mock uv called' \"$@\"")
    mock_uv_path.chmod(0o755)  # Make it executable

    # Add tmp_path to the PATH environment variable
    env = os.environ.copy()
    env["PATH"] = f"{tmp_path}{os.pathsep}{env['PATH']}"
    return env


@pytest.mark.parametrize("with_uv", [True, False])
def test_unidep_install_all_dry_run(tmp_path: Path, with_uv: bool) -> None:  # noqa: FBT001
    # Path to the requirements file
    requirements_path = REPO_ROOT / "example"

    # Ensure the requirements file exists
    assert requirements_path.exists(), "Requirements file does not exist"

    # Run the unidep install command
    result = subprocess.run(
        [  # noqa: S607
            "unidep",
            "install-all",
            "--dry-run",
            "--editable",
            "--directory",
            str(requirements_path),
            *(["--no-uv"] if not with_uv else []),
        ],
        check=True,
        capture_output=True,
        text=True,
        encoding="utf-8",
        env=mock_uv_env(tmp_path) if with_uv else None,
    )

    # Check the output
    assert result.returncode == 0, "Command failed to execute successfully"
    assert "📦 Installing conda dependencies with `" in result.stdout

    assert r"📦 Installing pip dependencies with `" in result.stdout
    assert (
        "📝 Found local dependencies: {'pyproject_toml_project': ['hatch_project'], 'setup_py_project': ['hatch_project', 'setuptools_project'], 'setuptools_project': ['hatch_project']}"
        in result.stdout
    )
    projects = [REPO_ROOT / "example" / p for p in EXAMPLE_PROJECTS]
    pkgs = " ".join([f"-e {p}" for p in sorted(projects)])
    assert "📦 Installing project with `" in result.stdout
    if with_uv:
        assert "uv pip install --python" in result.stdout
    else:
        assert f" -m pip install --no-deps {pkgs}" in result.stdout


def test_unidep_conda() -> None:
    # Path to the requirements file
    requirements_path = REPO_ROOT / "example" / "setup_py_project"

    assert requirements_path.exists(), "Requirements file does not exist"

    result = subprocess.run(
        [  # noqa: S607
            "unidep",
            "conda",
            "--file",
            str(requirements_path),
        ],
        check=True,
        capture_output=True,
        text=True,
        encoding="utf-8",
    )

    # Check the output
    assert result.returncode == 0, "Command failed to execute successfully"
    assert "pandas" in result.stdout


def test_unidep_pixi_cli_respects_overrides(tmp_path: Path) -> None:
    req_file = tmp_path / "requirements.yaml"
    req_file.write_text(
        textwrap.dedent(
            """\
            channels:
              - conda-forge
            dependencies:
              - numpy >=1.20
              - pandas >=2.0
              - scipy <1.10
              - pyobjc  # [osx]
            platforms:
              - linux-64
              - osx-arm64
            """,
        ),
    )

    output_file = tmp_path / "pixi.toml"
    result = subprocess.run(
        [  # noqa: S607
            "unidep",
            "pixi",
            "--file",
            str(req_file),
            "--output",
            str(output_file),
            "--name",
            "test-project",
            "--platform",
            "linux-64",
            "--ignore-pin",
            "numpy",
            "--skip-dependency",
            "pandas",
            "--overwrite-pin",
            "scipy>=1.11",
        ],
        check=True,
        capture_output=True,
        text=True,
        encoding="utf-8",
    )

    assert result.returncode == 0, "Command failed to execute successfully"
    with output_file.open("rb") as f:
        data = tomllib.load(f)

    deps = data["dependencies"]
    assert deps["numpy"] == "*"
    assert "pandas" not in deps
    assert deps["scipy"] == ">=1.11"
    assert data["workspace"]["platforms"] == ["linux-64"]
    assert "target" not in data or "osx-arm64" not in data["target"]


def test_unidep_pixi_cli_channel_override(tmp_path: Path) -> None:
    req_file = tmp_path / "requirements.yaml"
    req_file.write_text(
        textwrap.dedent(
            """\
            channels:
              - conda-forge
            dependencies:
              - numpy
            platforms:
              - linux-64
            """,
        ),
    )

    output_file = tmp_path / "pixi.toml"
    result = subprocess.run(
        [  # noqa: S607
            "unidep",
            "pixi",
            "--file",
            str(req_file),
            "--output",
            str(output_file),
            "--channel",
            "defaults",
            "--channel",
            "bioconda",
        ],
        check=True,
        capture_output=True,
        text=True,
        encoding="utf-8",
    )

    assert result.returncode == 0
    with output_file.open("rb") as f:
  
Download .txt
gitextract_6n9izm0e/

├── .github/
│   ├── release.py
│   ├── renovate.json
│   ├── use-local-unidep.py
│   └── workflows/
│       ├── documentation-links.yml
│       ├── install-example-projects.yml
│       ├── pytest.yml
│       ├── release.yml
│       ├── toc.yaml
│       └── update-readme.yml
├── .gitignore
├── .pre-commit-config.yaml
├── .pre-commit-hooks.yaml
├── .readthedocs.yaml
├── LICENSE
├── README.md
├── bootstrap.sh
├── docs/
│   ├── Makefile
│   └── source/
│       ├── .gitignore
│       └── conf.py
├── example/
│   ├── README.md
│   ├── environment.yaml
│   ├── hatch2_project/
│   │   ├── README.md
│   │   ├── hatch2_project.py
│   │   └── pyproject.toml
│   ├── hatch_project/
│   │   ├── README.md
│   │   ├── hatch_project.py
│   │   ├── pyproject.toml
│   │   └── requirements.yaml
│   ├── pyproject_toml_project/
│   │   ├── README.md
│   │   ├── pyproject.toml
│   │   └── pyproject_toml_project.py
│   ├── setup_py_project/
│   │   ├── README.md
│   │   ├── pyproject.toml
│   │   ├── requirements.yaml
│   │   ├── setup.py
│   │   └── setup_py_project.py
│   └── setuptools_project/
│       ├── README.md
│       ├── pyproject.toml
│       ├── requirements.yaml
│       └── setuptools_project.py
├── pyproject.toml
├── tests/
│   ├── __init__.py
│   ├── helpers.py
│   ├── shared_local_install_monorepo/
│   │   ├── project1/
│   │   │   ├── pyproject.toml
│   │   │   └── requirements.yaml
│   │   ├── project2/
│   │   │   ├── pyproject.toml
│   │   │   └── requirements.yaml
│   │   └── shared/
│   │       ├── pyproject.toml
│   │       └── requirements.yaml
│   ├── simple_monorepo/
│   │   ├── common-requirements.yaml
│   │   ├── conda-lock.yml
│   │   ├── project1/
│   │   │   ├── conda-lock.yml
│   │   │   └── requirements.yaml
│   │   └── project2/
│   │       ├── conda-lock.yml
│   │       └── requirements.yaml
│   ├── test-pip-and-conda-different-name/
│   │   ├── conda-lock.yml
│   │   ├── project1/
│   │   │   └── requirements.yaml
│   │   └── project2/
│   │       └── requirements.yaml
│   ├── test-pip-package-with-conda-dependency/
│   │   ├── conda-lock.yml
│   │   ├── project1/
│   │   │   └── requirements.yaml
│   │   └── project2/
│   │       └── requirements.yaml
│   ├── test_cli.py
│   ├── test_cli_install_conda_lock.py
│   ├── test_conda_lock.py
│   ├── test_dependencies_parsing_internal.py
│   ├── test_dependency_selection.py
│   ├── test_local_wheels_and_zip.py
│   ├── test_parse_yaml_local_dependencies.py
│   ├── test_parse_yaml_nested_local_dependencies.py
│   ├── test_pip_indices.py
│   ├── test_pip_indices_cli.py
│   ├── test_pip_indices_integration.py
│   ├── test_pixi.py
│   ├── test_project_dependency_handling.py
│   ├── test_pypi_alternatives/
│   │   ├── main_app/
│   │   │   ├── main_app/
│   │   │   │   └── __init__.py
│   │   │   └── pyproject.toml
│   │   ├── shared_lib/
│   │   │   ├── pyproject.toml
│   │   │   └── shared_lib/
│   │   │       └── __init__.py
│   │   └── test_all_scenarios.sh
│   ├── test_pypi_alternatives.py
│   ├── test_pypi_alternatives_errors.py
│   ├── test_pypi_alternatives_integration.py
│   ├── test_setuptools_integration.py
│   ├── test_unidep.py
│   ├── test_utils.py
│   └── test_version_conflicts.py
└── unidep/
    ├── __init__.py
    ├── _cli.py
    ├── _conda_env.py
    ├── _conda_lock.py
    ├── _conflicts.py
    ├── _dependencies_parsing.py
    ├── _dependency_selection.py
    ├── _hatch_integration.py
    ├── _pixi.py
    ├── _pytest_plugin.py
    ├── _setuptools_integration.py
    ├── _version.py
    ├── platform_definitions.py
    ├── py.typed
    └── utils.py
Download .txt
SYMBOL INDEX (693 symbols across 37 files)

FILE: .github/release.py
  function get_repo (line 12) | def get_repo() -> git.Repo:
  function is_already_tagged (line 17) | def is_already_tagged(repo: git.Repo) -> bool:
  function should_skip_release (line 22) | def should_skip_release(repo: git.Repo) -> bool:
  function get_new_version (line 28) | def get_new_version(repo: git.Repo) -> str:
  function set_author (line 41) | def set_author(repo: git.Repo) -> None:
  function create_tag (line 51) | def create_tag(repo: git.Repo, new_version: str, release_notes: str) -> ...
  function push_tag (line 57) | def push_tag(repo: git.Repo, new_version: str) -> None:
  function get_commit_messages_since_last_release (line 63) | def get_commit_messages_since_last_release(repo: git.Repo) -> str:
  function format_release_notes (line 69) | def format_release_notes(commit_messages: str, new_version: str) -> str:
  function main (line 87) | def main() -> None:

FILE: docs/source/conf.py
  function replace_named_emojis (line 77) | def replace_named_emojis(input_file: Path, output_file: Path) -> None:
  function _change_alerts_to_admonitions (line 89) | def _change_alerts_to_admonitions(input_text: str) -> str:
  function change_alerts_to_admonitions (line 135) | def change_alerts_to_admonitions(input_file: Path, output_file: Path) ->...
  function replace_example_links (line 154) | def replace_example_links(input_file: Path, output_file: Path) -> None:
  function fix_anchors_with_named_emojis (line 166) | def fix_anchors_with_named_emojis(input_file: Path, output_file: Path) -...
  function normalize_slug (line 188) | def normalize_slug(slug: str) -> str:
  function split_markdown_by_headers (line 193) | def split_markdown_by_headers(
  function replace_header (line 243) | def replace_header(file_path: Path, new_header: str) -> None:
  function extract_toc_links (line 263) | def extract_toc_links(md_file_path: Path) -> dict[str, str]:
  function extract_headers_from_markdown (line 299) | def extract_headers_from_markdown(md_file_path: Path) -> list[tuple[int,...
  function replace_links_in_markdown (line 325) | def replace_links_in_markdown(
  function decrease_header_levels (line 364) | def decrease_header_levels(md_file_path: Path) -> None:
  function write_index_file (line 393) | def write_index_file(docs_path: Path, toctree_entries: list[str]) -> None:
  function process_readme_for_sphinx_docs (line 421) | def process_readme_for_sphinx_docs(readme_path: Path, docs_path: Path) -...

FILE: tests/helpers.py
  function maybe_as_toml (line 22) | def maybe_as_toml(toml_or_yaml: Literal["toml", "yaml"], p: Path) -> Path:

FILE: tests/test_cli.py
  function current_env_and_prefix (line 59) | def current_env_and_prefix() -> tuple[str, Path]:
  function test_install_command (line 75) | def test_install_command(project: str, capsys: pytest.CaptureFixture) ->...
  function test_unidep_install_dry_run (line 101) | def test_unidep_install_dry_run(project: str) -> None:
  function test_install_all_command (line 130) | def test_install_all_command(capsys: pytest.CaptureFixture) -> None:
  function test_install_command_deduplicates_shared_local_dependencies (line 150) | def test_install_command_deduplicates_shared_local_dependencies(
  function mock_uv_env (line 181) | def mock_uv_env(tmp_path: Path) -> dict[str, str]:
  function test_unidep_install_all_dry_run (line 197) | def test_unidep_install_all_dry_run(tmp_path: Path, with_uv: bool) -> No...
  function test_unidep_conda (line 240) | def test_unidep_conda() -> None:
  function test_unidep_pixi_cli_respects_overrides (line 264) | def test_unidep_pixi_cli_respects_overrides(tmp_path: Path) -> None:
  function test_unidep_pixi_cli_channel_override (line 321) | def test_unidep_pixi_cli_channel_override(tmp_path: Path) -> None:
  function test_unidep_pixi_cli_ranged_build_string (line 363) | def test_unidep_pixi_cli_ranged_build_string(tmp_path: Path) -> None:
  function test_merge_uses_selector_platforms_when_no_platforms_declared (line 403) | def test_merge_uses_selector_platforms_when_no_platforms_declared(
  function test_merge_uses_selector_platforms_even_for_losing_alternatives (line 476) | def test_merge_uses_selector_platforms_even_for_losing_alternatives(
  function test_merge_command_includes_selected_optional_dependencies (line 514) | def test_merge_command_includes_selected_optional_dependencies(
  function test_merge_command_includes_all_optional_dependencies (line 556) | def test_merge_command_includes_all_optional_dependencies(
  function test_merge_command_includes_local_only_optional_dependencies (line 598) | def test_merge_command_includes_local_only_optional_dependencies(
  function test_merge_optional_dependency_extras_rejects_unknown_group (line 651) | def test_merge_optional_dependency_extras_rejects_unknown_group(
  function test_merge_optional_dependency_extras_validates_across_all_files (line 680) | def test_merge_optional_dependency_extras_validates_across_all_files(
  function test_collect_available_optional_dependency_groups_preserves_local_only_groups (line 717) | def test_collect_available_optional_dependency_groups_preserves_local_on...
  function test_merge_optional_dependency_extras_reports_when_no_groups_exist (line 738) | def test_merge_optional_dependency_extras_reports_when_no_groups_exist(
  function test_flatten_selected_dependency_entries_includes_optional_groups (line 757) | def test_flatten_selected_dependency_entries_includes_optional_groups(
  function test_collect_selected_conda_like_platforms_uses_both_source_selectors (line 793) | def test_collect_selected_conda_like_platforms_uses_both_source_selectors(
  function test_collect_selected_conda_like_platforms_preserves_selector_platforms (line 820) | def test_collect_selected_conda_like_platforms_preserves_selector_platfo...
  function test_unidep_pixi_cli_optional_monorepo_env_includes_base (line 846) | def test_unidep_pixi_cli_optional_monorepo_env_includes_base(
  function test_unidep_file_not_found_error (line 910) | def test_unidep_file_not_found_error() -> None:
  function test_doubly_nested_project_folder_installable (line 934) | def test_doubly_nested_project_folder_installable(
  function test_pip_compile_command (line 1037) | def test_pip_compile_command(tmp_path: Path, capsys: pytest.CaptureFixtu...
  function test_install_non_existing_file (line 1066) | def test_install_non_existing_file() -> None:
  function test_install_non_existing_folder (line 1080) | def test_install_non_existing_folder(tmp_path: Path) -> None:
  function test_version (line 1100) | def test_version(capsys: pytest.CaptureFixture) -> None:
  function test_conda_env_list (line 1108) | def test_conda_env_list() -> None:
  function test_conda_root_prefix_uses_conda_info_when_env_vars_are_unset (line 1113) | def test_conda_root_prefix_uses_conda_info_when_env_vars_are_unset(
  function test_get_conda_executable_uses_env_var_fallback (line 1135) | def test_get_conda_executable_uses_env_var_fallback(
  function test_unidep_version_uses_rich_when_available (line 1147) | def test_unidep_version_uses_rich_when_available(
  function test_pip_optional (line 1195) | def test_pip_optional(tmp_path: Path) -> None:
  function test_capitalize_last_dir (line 1231) | def test_capitalize_last_dir() -> None:
  function test_find_conda_windows (line 1242) | def test_find_conda_windows() -> None:
  function test_find_windows_path_returns_existing_mamba_location (line 1364) | def test_find_windows_path_returns_existing_mamba_location(
  function test_find_windows_path_returns_existing_micromamba_location (line 1375) | def test_find_windows_path_returns_existing_micromamba_location(
  function set_env_var (line 1387) | def set_env_var(key: str, value: str) -> Generator[None, None, None]:
  function test_maybe_conda_run (line 1403) | def test_maybe_conda_run() -> None:
  function test_maybe_conda_run_without_executable_returns_empty (line 1418) | def test_maybe_conda_run_without_executable_returns_empty() -> None:
  function test_maybe_conda_run_without_active_environment_returns_empty (line 1422) | def test_maybe_conda_run_without_active_environment_returns_empty(
  function test_maybe_create_conda_env_args_creates_env (line 1430) | def test_maybe_create_conda_env_args_creates_env(
  function test_install_command_with_conda_lock_skips_dependency_install (line 1503) | def test_install_command_with_conda_lock_skips_dependency_install(
  function test_unidep_merge_cli_optional_dependencies (line 1553) | def test_unidep_merge_cli_optional_dependencies(tmp_path: Path) -> None:
  function test_unidep_merge_cli_all_optional_dependencies (line 1598) | def test_unidep_merge_cli_all_optional_dependencies(tmp_path: Path) -> N...
  function test_unidep_merge_cli_rejects_unknown_optional_dependency_group (line 1645) | def test_unidep_merge_cli_rejects_unknown_optional_dependency_group(
  function test_unidep_merge_cli_rejects_mutually_exclusive_optional_flags (line 1686) | def test_unidep_merge_cli_rejects_mutually_exclusive_optional_flags(
  function test_unidep_merge_cli_optional_dependencies_across_multiple_files (line 1727) | def test_unidep_merge_cli_optional_dependencies_across_multiple_files(

FILE: tests/test_cli_install_conda_lock.py
  function mock_subprocess_run (line 17) | def mock_subprocess_run(monkeypatch: pytest.MonkeyPatch) -> Mock:
  function mock_print (line 24) | def mock_print(monkeypatch: pytest.MonkeyPatch) -> Mock:
  function test_create_env_from_lock_dry_run (line 38) | def test_create_env_from_lock_dry_run(
  function test_create_env_from_lock_no_env_specified (line 99) | def test_create_env_from_lock_no_env_specified(mock_print: Mock) -> None:
  function test_create_env_from_lock_verifies_installation_for_conda (line 118) | def test_create_env_from_lock_verifies_installation_for_conda(
  function test_verify_conda_lock_installed_not_found (line 135) | def test_verify_conda_lock_installed_not_found(
  function test_verify_conda_lock_installed_not_working (line 150) | def test_verify_conda_lock_installed_not_working(

FILE: tests/test_conda_lock.py
  function test_conda_lock_command (line 31) | def test_conda_lock_command(tmp_path: Path) -> None:
  function test_conda_lock_command_pip_package_with_conda_dependency (line 65) | def test_conda_lock_command_pip_package_with_conda_dependency(tmp_path: ...
  function test_conda_lock_global_infers_selector_platforms (line 148) | def test_conda_lock_global_infers_selector_platforms(tmp_path: Path) -> ...
  function test_conda_lock_command_pip_and_conda_different_name (line 183) | def test_conda_lock_command_pip_and_conda_different_name(
  function test_remove_top_comments (line 210) | def test_remove_top_comments(tmp_path: Path) -> None:
  function test_handle_missing_keys (line 224) | def test_handle_missing_keys(capsys: pytest.CaptureFixture) -> None:
  function test_handle_missing_keys_adds_matching_conda_package (line 261) | def test_handle_missing_keys_adds_matching_conda_package() -> None:
  function test_download_and_get_package_names_reads_site_packages (line 295) | def test_download_and_get_package_names_reads_site_packages(
  function test_download_and_get_package_names_returns_none_without_python_dirs (line 333) | def test_download_and_get_package_names_returns_none_without_python_dirs(
  function test_download_and_get_package_names_returns_none_without_lib_or_site_packages (line 368) | def test_download_and_get_package_names_returns_none_without_lib_or_site...
  function test_download_and_get_package_names_returns_none_without_site_packages (line 403) | def test_download_and_get_package_names_returns_none_without_site_packages(
  function test_conda_lock_subpackages_skips_root_requirements (line 438) | def test_conda_lock_subpackages_skips_root_requirements(
  function test_check_consistent_lock_files_reports_mismatches (line 476) | def test_check_consistent_lock_files_reports_mismatches(tmp_path: Path) ...
  function test_conda_lock_subpackage_uses_selected_same_name_pip_winner (line 514) | def test_conda_lock_subpackage_uses_selected_same_name_pip_winner(
  function test_conda_lock_subpackage_uses_selected_paired_different_name_pip_winner (line 556) | def test_conda_lock_subpackage_uses_selected_paired_different_name_pip_w...
  function test_conda_lock_subpackage_uses_selected_pip_winner_with_extras (line 598) | def test_conda_lock_subpackage_uses_selected_pip_winner_with_extras(
  function test_circular_dependency (line 651) | def test_circular_dependency() -> None:

FILE: tests/test_dependencies_parsing_internal.py
  function test_move_optional_dependencies_star_promotes_all_groups (line 19) | def test_move_optional_dependencies_star_promotes_all_groups(
  function test_parse_requirements_skips_empty_paired_dependency_after_filtering (line 41) | def test_parse_requirements_skips_empty_paired_dependency_after_filtering(
  function test_is_empty_git_submodule_false_for_non_directory (line 59) | def test_is_empty_git_submodule_false_for_non_directory(tmp_path: Path) ...

FILE: tests/test_dependency_selection.py
  function _write_requirements (line 26) | def _write_requirements(tmp_path: Path, content: str) -> Path:
  function _selected_summary (line 32) | def _selected_summary(
  function test_origin_to_text_includes_optional_group_and_local_chain (line 44) | def test_origin_to_text_includes_optional_group_and_local_chain() -> None:
  function test_origin_to_text_normalizes_windows_style_local_chain (line 56) | def test_origin_to_text_normalizes_windows_style_local_chain() -> None:
  function test_joined_pinnings_are_safely_satisfiable_for_user_shaped_pin_strings (line 74) | def test_joined_pinnings_are_safely_satisfiable_for_user_shaped_pin_stri...
  function test_select_conda_like_requirements_prefers_pinned_conda_over_unpinned_pip (line 88) | def test_select_conda_like_requirements_prefers_pinned_conda_over_unpinn...
  function test_select_conda_like_requirements_prefers_pip_extras_over_conda (line 113) | def test_select_conda_like_requirements_prefers_pip_extras_over_conda(
  function test_select_conda_like_requirements_prefers_narrower_pinned_selector_scope (line 138) | def test_select_conda_like_requirements_prefers_narrower_pinned_selector...
  function test_select_conda_like_requirements_reports_final_collisions_with_origins (line 167) | def test_select_conda_like_requirements_reports_final_collisions_with_or...
  function test_select_pip_requirements_merges_supported_wildcard_pinnings (line 198) | def test_select_pip_requirements_merges_supported_wildcard_pinnings(
  function test_select_pip_requirements_merges_compatible_compatible_release_pinnings (line 224) | def test_select_pip_requirements_merges_compatible_compatible_release_pi...
  function test_select_pip_requirements_rejects_unsafely_merged_wildcard_pinnings (line 249) | def test_select_pip_requirements_rejects_unsafely_merged_wildcard_pinnings(
  function test_select_pip_requirements_rejects_multiple_exact_pinnings (line 271) | def test_select_pip_requirements_rejects_multiple_exact_pinnings(
  function test_collapse_selected_universals_collapses_user_declared_universal_dependencies (line 296) | def test_collapse_selected_universals_collapses_user_declared_universal_...

FILE: tests/test_local_wheels_and_zip.py
  function test_local_wheel (line 15) | def test_local_wheel(tmp_path: Path, toml_or_yaml: Literal["toml", "yaml...
  function test_local_zip (line 42) | def test_local_zip(tmp_path: Path, toml_or_yaml: Literal["toml", "yaml"]...
  function test_local_wheel_and_folder (line 64) | def test_local_wheel_and_folder(
  function test_local_wheel_with_extras (line 99) | def test_local_wheel_with_extras(
  function test_local_wheel_in_dependencies (line 124) | def test_local_wheel_in_dependencies(
  function test_nested_local_dependencies_with_wheel (line 149) | def test_nested_local_dependencies_with_wheel(

FILE: tests/test_parse_yaml_local_dependencies.py
  function test_circular_local_dependencies (line 36) | def test_circular_local_dependencies(
  function test_parse_local_dependencies (line 85) | def test_parse_local_dependencies(
  function test_parse_local_dependencies_respects_use (line 128) | def test_parse_local_dependencies_respects_use(
  function test_nested_local_dependencies (line 173) | def test_nested_local_dependencies(
  function test_nonexistent_local_dependencies (line 244) | def test_nonexistent_local_dependencies(
  function test_no_local_dependencies (line 265) | def test_no_local_dependencies(
  function test_mixed_real_and_placeholder_dependencies (line 290) | def test_mixed_real_and_placeholder_dependencies(
  function test_parse_local_dependencies_pip_installable (line 317) | def test_parse_local_dependencies_pip_installable(
  function test_parse_local_dependencies_pip_installable_with_non_installable_project (line 382) | def test_parse_local_dependencies_pip_installable_with_non_installable_p...
  function test_local_non_unidep_managed_dependency (line 442) | def test_local_non_unidep_managed_dependency(tmp_path: Path) -> None:
  function test_local_non_unidep_and_non_installable_managed_dependency (line 467) | def test_local_non_unidep_and_non_installable_managed_dependency(
  function test_local_empty_git_submodule_dependency (line 487) | def test_local_empty_git_submodule_dependency(
  function test_parse_local_dependencies_missing (line 509) | def test_parse_local_dependencies_missing(
  function test_parse_local_dependencies_without_local_deps_themselves (line 535) | def test_parse_local_dependencies_without_local_deps_themselves(
  function test_parse_requirements_unmanaged_local_dependency (line 581) | def test_parse_requirements_unmanaged_local_dependency(tmp_path: Path) -...

FILE: tests/test_parse_yaml_nested_local_dependencies.py
  function test_nested_local_dependencies_multiple_levels (line 31) | def test_nested_local_dependencies_multiple_levels(
  function test_nested_local_dependencies_with_circular_reference (line 111) | def test_nested_local_dependencies_with_circular_reference(
  function test_nested_local_dependencies_with_non_unidep_managed_project (line 172) | def test_nested_local_dependencies_with_non_unidep_managed_project(
  function test_skip_propagates_to_nested_local_dependency (line 232) | def test_skip_propagates_to_nested_local_dependency(
  function test_pypi_override_propagates_to_nested_local_dependency (line 276) | def test_pypi_override_propagates_to_nested_local_dependency(
  function test_skip_propagates_when_nested_entry_is_dict (line 321) | def test_skip_propagates_when_nested_entry_is_dict(
  function test_nested_local_dependencies_with_extras (line 365) | def test_nested_local_dependencies_with_extras(

FILE: tests/test_pip_indices.py
  class TestPipIndicesParsing (line 19) | class TestPipIndicesParsing:
    method test_parse_pip_indices_from_yaml (line 22) | def test_parse_pip_indices_from_yaml(self, tmp_path: Path) -> None:
    method test_parse_pip_indices_from_toml (line 47) | def test_parse_pip_indices_from_toml(self, tmp_path: Path) -> None:
    method test_parse_empty_pip_indices (line 73) | def test_parse_empty_pip_indices(self, tmp_path: Path) -> None:
    method test_parse_pip_indices_with_env_vars (line 91) | def test_parse_pip_indices_with_env_vars(self, tmp_path: Path) -> None:
    method test_merge_pip_indices_from_multiple_files (line 113) | def test_merge_pip_indices_from_multiple_files(self, tmp_path: Path) -...
    method test_pip_indices_ordering_preserved (line 160) | def test_pip_indices_ordering_preserved(self, tmp_path: Path) -> None:
    method test_collect_pip_indices_supports_single_string (line 187) | def test_collect_pip_indices_supports_single_string(self) -> None:
    method test_collect_pip_indices_rejects_invalid_value_type (line 195) | def test_collect_pip_indices_rejects_invalid_value_type(self) -> None:
    method test_collect_pip_indices_rejects_non_string_entries (line 203) | def test_collect_pip_indices_rejects_non_string_entries(self) -> None:
  class TestEnvironmentGeneration (line 214) | class TestEnvironmentGeneration:
    method test_environment_yaml_with_pip_indices (line 217) | def test_environment_yaml_with_pip_indices(self, tmp_path: Path) -> None:
    method test_environment_yaml_without_pip_indices (line 258) | def test_environment_yaml_without_pip_indices(self, tmp_path: Path) ->...
    method test_environment_yaml_with_env_vars_in_indices (line 277) | def test_environment_yaml_with_env_vars_in_indices(self, tmp_path: Pat...
  class TestPipCommandConstruction (line 304) | class TestPipCommandConstruction:
    method test_build_pip_command_with_indices (line 307) | def test_build_pip_command_with_indices(self) -> None:
    method test_build_pip_command_without_indices (line 318) | def test_build_pip_command_without_indices(self) -> None:
    method test_build_pip_command_single_index (line 324) | def test_build_pip_command_single_index(self) -> None:
    method test_uv_compatibility (line 331) | def test_uv_compatibility(self) -> None:
  class TestEdgeCases (line 346) | class TestEdgeCases:
    method test_invalid_url_format (line 349) | def test_invalid_url_format(self, tmp_path: Path) -> None:
    method test_duplicate_indices (line 369) | def test_duplicate_indices(self, tmp_path: Path) -> None:
    method test_empty_string_in_indices (line 394) | def test_empty_string_in_indices(self, tmp_path: Path) -> None:
    method test_missing_env_var_in_url (line 414) | def test_missing_env_var_in_url(self, tmp_path: Path) -> None:

FILE: tests/test_pip_indices_cli.py
  class TestBuildPipIndexArguments (line 15) | class TestBuildPipIndexArguments:
    method test_empty_indices (line 18) | def test_empty_indices(self) -> None:
    method test_single_index (line 23) | def test_single_index(self) -> None:
    method test_multiple_indices (line 29) | def test_multiple_indices(self) -> None:
    method test_environment_variable_expansion (line 46) | def test_environment_variable_expansion(self) -> None:
    method test_missing_environment_variable (line 70) | def test_missing_environment_variable(self) -> None:
    method test_complex_environment_variables (line 81) | def test_complex_environment_variables(self) -> None:
  class TestPipInstallLocalWithIndices (line 104) | class TestPipInstallLocalWithIndices:
    method test_pip_install_with_indices (line 109) | def test_pip_install_with_indices(
    method test_uv_install_with_indices (line 141) | def test_uv_install_with_indices(
  class TestCondaEnvWithPipRepositories (line 179) | class TestCondaEnvWithPipRepositories:
    method test_write_env_with_pip_repositories (line 182) | def test_write_env_with_pip_repositories(self, tmp_path: Path) -> None:
    method test_write_env_without_pip_repositories (line 213) | def test_write_env_without_pip_repositories(self, tmp_path: Path) -> N...
  class TestCreateCondaEnvSpecificationCompatibility (line 232) | class TestCreateCondaEnvSpecificationCompatibility:
    method test_accepts_string_keyword_pip_indices (line 235) | def test_accepts_string_keyword_pip_indices(self) -> None:
    method test_accepts_legacy_positional_selector (line 247) | def test_accepts_legacy_positional_selector(self) -> None:
    method test_accepts_legacy_positional_pip_indices_and_selector (line 254) | def test_accepts_legacy_positional_pip_indices_and_selector(self) -> N...
    method test_rejects_missing_platforms_argument (line 267) | def test_rejects_missing_platforms_argument(self) -> None:
    method test_rejects_too_many_positionals_with_platforms_keyword (line 272) | def test_rejects_too_many_positionals_with_platforms_keyword(self) -> ...
    method test_rejects_duplicate_pip_indices_with_platforms_keyword (line 286) | def test_rejects_duplicate_pip_indices_with_platforms_keyword(self) ->...
    method test_rejects_duplicate_pip_indices_in_legacy_two_argument_form (line 300) | def test_rejects_duplicate_pip_indices_in_legacy_two_argument_form(sel...
    method test_rejects_duplicate_pip_indices_in_legacy_three_argument_form (line 314) | def test_rejects_duplicate_pip_indices_in_legacy_three_argument_form(
    method test_rejects_too_many_legacy_positional_arguments (line 331) | def test_rejects_too_many_legacy_positional_arguments(self) -> None:
  class TestInstallCommandWithIndices (line 347) | class TestInstallCommandWithIndices:
    method test_install_command_with_pip_indices (line 353) | def test_install_command_with_pip_indices(
    method test_install_command_with_uv_and_indices (line 414) | def test_install_command_with_uv_and_indices(
  class TestPipIndicesIntegration (line 469) | class TestPipIndicesIntegration:
    method test_full_workflow_with_indices (line 472) | def test_full_workflow_with_indices(self, tmp_path: Path) -> None:
    method test_conda_lock_with_pip_indices (line 525) | def test_conda_lock_with_pip_indices(
    method test_merge_command_with_indices (line 566) | def test_merge_command_with_indices(self, tmp_path: Path) -> None:

FILE: tests/test_pip_indices_integration.py
  class TestUnidepInstallIntegration (line 12) | class TestUnidepInstallIntegration:
    method mock_project (line 16) | def mock_project(self, tmp_path: Path) -> Path:
    method test_install_with_pip_indices (line 62) | def test_install_with_pip_indices(self, mock_run: Any, mock_project: P...
    method test_install_with_env_var_indices (line 86) | def test_install_with_env_var_indices(self, mock_run: Any, tmp_path: P...
    method test_install_with_uv_backend (line 124) | def test_install_with_uv_backend(self, mock_project: Path) -> None:  #...
    method test_install_without_pip_indices (line 147) | def test_install_without_pip_indices(self, tmp_path: Path) -> None:
  class TestUnidepCondaLockIntegration (line 174) | class TestUnidepCondaLockIntegration:
    method mock_monorepo (line 178) | def mock_monorepo(self, tmp_path: Path) -> Path:
    method test_conda_lock_generates_pip_repositories (line 223) | def test_conda_lock_generates_pip_repositories(self, mock_monorepo: Pa...
    method test_conda_lock_with_merged_indices (line 249) | def test_conda_lock_with_merged_indices(self, mock_monorepo: Path) -> ...
    method test_conda_lock_creates_valid_lockfile (line 271) | def test_conda_lock_creates_valid_lockfile(self, tmp_path: Path) -> None:
  class TestErrorHandling (line 319) | class TestErrorHandling:
    method test_install_with_unreachable_index (line 322) | def test_install_with_unreachable_index(self, tmp_path: Path) -> None:
    method test_install_with_conflicting_packages (line 349) | def test_install_with_conflicting_packages(self, tmp_path: Path) -> None:
    method test_merge_with_circular_dependencies (line 375) | def test_merge_with_circular_dependencies(self, tmp_path: Path) -> None:
  class TestCompatibility (line 421) | class TestCompatibility:
    method test_pip_indices_with_platforms (line 424) | def test_pip_indices_with_platforms(self, tmp_path: Path) -> None:
    method test_pip_indices_with_optional_dependencies (line 456) | def test_pip_indices_with_optional_dependencies(self, tmp_path: Path) ...
    method test_coexistence_with_uv_index_config (line 489) | def test_coexistence_with_uv_index_config(self, tmp_path: Path) -> None:

FILE: tests/test_pixi.py
  function _write_file (line 43) | def _write_file(path: Path, content: str) -> Path:
  function _generate_and_load (line 48) | def _generate_and_load(
  function _setup_app_lib_other (line 60) | def _setup_app_lib_other(
  function test_simple_pixi_generation (line 107) | def test_simple_pixi_generation(tmp_path: Path) -> None:
  function test_channels_resolution_behaviors (line 151) | def test_channels_resolution_behaviors(tmp_path: Path) -> None:
  function test_monorepo_pixi_generation (line 206) | def test_monorepo_pixi_generation(tmp_path: Path) -> None:
  function test_pixi_monorepo_feature_names_unique_for_same_leaf_dir (line 271) | def test_pixi_monorepo_feature_names_unique_for_same_leaf_dir(tmp_path: ...
  function test_pixi_monorepo_feature_name_not_empty_for_relative_root_file (line 322) | def test_pixi_monorepo_feature_name_not_empty_for_relative_root_file(
  function test_pixi_with_version_pins (line 363) | def test_pixi_with_version_pins(tmp_path: Path) -> None:
  function test_pixi_normalizes_single_equals_for_pip_pins (line 394) | def test_pixi_normalizes_single_equals_for_pip_pins(tmp_path: Path) -> N...
  function test_pixi_prefers_pip_pin_over_unpinned_conda (line 413) | def test_pixi_prefers_pip_pin_over_unpinned_conda(tmp_path: Path) -> None:
  function test_pixi_prefers_conda_for_unpinned_both_sources (line 435) | def test_pixi_prefers_conda_for_unpinned_both_sources(tmp_path: Path) ->...
  function test_pixi_prefers_conda_for_equally_pinned_both_sources (line 458) | def test_pixi_prefers_conda_for_equally_pinned_both_sources(tmp_path: Pa...
  function test_pixi_reconciles_single_platform_conflict (line 547) | def test_pixi_reconciles_single_platform_conflict(
  function test_pixi_reconcile_is_order_independent_for_universal_and_target_conflicts (line 591) | def test_pixi_reconcile_is_order_independent_for_universal_and_target_co...
  function test_pixi_demoted_reconciliation_is_order_independent_with_repeated_universals (line 632) | def test_pixi_demoted_reconciliation_is_order_independent_with_repeated_...
  function test_pixi_reconciles_multiplatform_conflict (line 698) | def test_pixi_reconciles_multiplatform_conflict(
  function test_pixi_with_local_package (line 727) | def test_pixi_with_local_package(tmp_path: Path) -> None:
  function test_pixi_single_file_editable_path_relative_to_output (line 770) | def test_pixi_single_file_editable_path_relative_to_output(tmp_path: Pat...
  function test_pixi_single_file_includes_local_dependency_package_as_editable (line 803) | def test_pixi_single_file_includes_local_dependency_package_as_editable(
  function test_pixi_empty_dependencies (line 850) | def test_pixi_empty_dependencies(tmp_path: Path) -> None:
  function test_pixi_with_platform_selectors (line 877) | def test_pixi_with_platform_selectors(tmp_path: Path) -> None:
  function test_pixi_selector_targets_preserved_without_explicit_platforms (line 910) | def test_pixi_selector_targets_preserved_without_explicit_platforms(
  function test_pixi_with_multiple_platform_selectors (line 936) | def test_pixi_with_multiple_platform_selectors(tmp_path: Path) -> None:
  function test_pixi_monorepo_with_platform_selectors (line 968) | def test_pixi_monorepo_with_platform_selectors(tmp_path: Path) -> None:
  function test_pixi_monorepo_preserves_selector_only_platforms_without_declared_platforms (line 1019) | def test_pixi_monorepo_preserves_selector_only_platforms_without_declare...
  function test_pixi_single_file_preserves_selector_only_platforms_without_declared_platforms (line 1062) | def test_pixi_single_file_preserves_selector_only_platforms_without_decl...
  function test_pixi_monorepo_optional_group_preserves_selector_only_platforms (line 1093) | def test_pixi_monorepo_optional_group_preserves_selector_only_platforms(
  function test_pixi_single_file_optional_group_preserves_selector_only_platforms (line 1137) | def test_pixi_single_file_optional_group_preserves_selector_only_platforms(
  function test_pixi_single_file_optional_group_keeps_platform_specific_dep_targeted (line 1168) | def test_pixi_single_file_optional_group_keeps_platform_specific_dep_tar...
  function test_pixi_rejects_contradictory_pip_constraints (line 1211) | def test_pixi_rejects_contradictory_pip_constraints(
  function test_pixi_monorepo_with_local_packages (line 1231) | def test_pixi_monorepo_with_local_packages(tmp_path: Path) -> None:
  function test_pixi_monorepo_keeps_unmanaged_local_dependency_as_editable (line 1296) | def test_pixi_monorepo_keeps_unmanaged_local_dependency_as_editable(
  function test_pixi_monorepo_optional_unmanaged_deduped_against_base (line 1347) | def test_pixi_monorepo_optional_unmanaged_deduped_against_base(
  function test_pixi_monorepo_optional_unmanaged_only_group_creates_feature (line 1408) | def test_pixi_monorepo_optional_unmanaged_only_group_creates_feature(
  function test_pixi_monorepo_editable_paths_use_project_paths (line 1471) | def test_pixi_monorepo_editable_paths_use_project_paths(tmp_path: Path) ...
  function test_pixi_monorepo_shared_local_file_becomes_single_feature (line 1532) | def test_pixi_monorepo_shared_local_file_becomes_single_feature(tmp_path...
  function test_pixi_monorepo_transitive_local_dependencies_are_composed_in_envs (line 1604) | def test_pixi_monorepo_transitive_local_dependencies_are_composed_in_envs(
  function test_pixi_monorepo_ignores_wheel_local_dependencies_in_graph (line 1678) | def test_pixi_monorepo_ignores_wheel_local_dependencies_in_graph(
  function test_pixi_single_file_local_dependency_use_modes (line 1718) | def test_pixi_single_file_local_dependency_use_modes(tmp_path: Path) -> ...
  function test_pixi_with_directory_input (line 1770) | def test_pixi_with_directory_input(tmp_path: Path) -> None:
  function test_pixi_verbose_output (line 1798) | def test_pixi_verbose_output(tmp_path: Path, capsys: object) -> None:
  function test_pixi_fallback_package_name (line 1821) | def test_pixi_fallback_package_name(tmp_path: Path) -> None:
  function test_pixi_filtering_removes_empty_targets (line 1855) | def test_pixi_filtering_removes_empty_targets(tmp_path: Path) -> None:
  function test_pixi_stdout_output (line 1882) | def test_pixi_stdout_output(tmp_path: Path, capsys: object) -> None:
  function test_pixi_monorepo_with_directory_input (line 1907) | def test_pixi_monorepo_with_directory_input(tmp_path: Path) -> None:
  function test_pixi_monorepo_filtering_removes_empty_feature_targets (line 1948) | def test_pixi_monorepo_filtering_removes_empty_feature_targets(tmp_path:...
  function test_pixi_default_cwd (line 1992) | def test_pixi_default_cwd(tmp_path: Path, monkeypatch: pytest.MonkeyPatc...
  function test_pixi_optional_dependencies_single_file (line 2019) | def test_pixi_optional_dependencies_single_file(tmp_path: Path) -> None:
  function test_pixi_optional_dependencies_single_group (line 2074) | def test_pixi_optional_dependencies_single_group(tmp_path: Path) -> None:
  function test_pixi_single_file_optional_group_named_all_keeps_unique_env (line 2106) | def test_pixi_single_file_optional_group_named_all_keeps_unique_env(
  function test_pixi_single_file_optional_local_dependency_stays_optional (line 2139) | def test_pixi_single_file_optional_local_dependency_stays_optional(
  function test_pixi_optional_dependencies_monorepo (line 2177) | def test_pixi_optional_dependencies_monorepo(tmp_path: Path) -> None:
  function test_pixi_monorepo_optional_local_dependency_is_only_in_optional_env (line 2235) | def test_pixi_monorepo_optional_local_dependency_is_only_in_optional_env(
  function test_pixi_monorepo_optional_group_with_only_local_deps_creates_env (line 2260) | def test_pixi_monorepo_optional_group_with_only_local_deps_creates_env(
  function test_pixi_monorepo_optional_feature_name_collision_does_not_overwrite_base_feature (line 2285) | def test_pixi_monorepo_optional_feature_name_collision_does_not_overwrit...
  function test_pixi_monorepo_default_env_excludes_optional_features (line 2328) | def test_pixi_monorepo_default_env_excludes_optional_features(
  function test_pixi_empty_platform_override_uses_file_platforms (line 2376) | def test_pixi_empty_platform_override_uses_file_platforms(tmp_path: Path...
  function test_pixi_monorepo_keeps_optional_groups_when_base_feature_empty (line 2400) | def test_pixi_monorepo_keeps_optional_groups_when_base_feature_empty(
  function test_pixi_monorepo_skips_empty_optional_feature_group (line 2442) | def test_pixi_monorepo_skips_empty_optional_feature_group(tmp_path: Path...
  function test_derive_feature_names_handles_commonpath_valueerror (line 2481) | def test_derive_feature_names_handles_commonpath_valueerror(
  function test_derive_feature_names_handles_relative_to_valueerror (line 2505) | def test_derive_feature_names_handles_relative_to_valueerror(
  function test_editable_dependency_path_relative_forms (line 2536) | def test_editable_dependency_path_relative_forms(tmp_path: Path) -> None:
  function test_editable_dependency_path_cross_drive (line 2548) | def test_editable_dependency_path_cross_drive(
  function test_discover_local_dependency_graph_skips_non_local_and_missing (line 2571) | def test_discover_local_dependency_graph_skips_non_local_and_missing(
  function test_parse_direct_requirements_for_node_extras (line 2636) | def test_parse_direct_requirements_for_node_extras(
  function test_collect_transitive_nodes_deduplicates_seen_nodes (line 2657) | def test_collect_transitive_nodes_deduplicates_seen_nodes(tmp_path: Path...
  function test_pixi_with_build_string (line 2666) | def test_pixi_with_build_string(tmp_path: Path) -> None:
  function test_pixi_with_pip_extras (line 2692) | def test_pixi_with_pip_extras(tmp_path: Path) -> None:
  function test_pixi_with_merged_constraints (line 2722) | def test_pixi_with_merged_constraints(tmp_path: Path) -> None:
  function test_pixi_optional_local_dep_does_not_leak_base_local_deps (line 2748) | def test_pixi_optional_local_dep_does_not_leak_base_local_deps(
  function test_pixi_demoted_universal_weak_target (line 2828) | def test_pixi_demoted_universal_weak_target(
  function test_pixi_demoted_universal_uses_latest_merged_constraint (line 2861) | def test_pixi_demoted_universal_uses_latest_merged_constraint(
  function test_pixi_demoted_universal_merges_constraints_across_demotions (line 2894) | def test_pixi_demoted_universal_merges_constraints_across_demotions(
  function test_pixi_raises_when_losing_pip_alternative_is_internally_contradictory (line 2919) | def test_pixi_raises_when_losing_pip_alternative_is_internally_contradic...
  function test_parse_version_build_whitespace_only (line 2943) | def test_parse_version_build_whitespace_only() -> None:
  function test_make_pip_version_spec_dict_with_extras (line 2947) | def test_make_pip_version_spec_dict_with_extras() -> None:
  function test_with_unique_order_paths_deduplicates (line 2952) | def test_with_unique_order_paths_deduplicates(tmp_path: Path) -> None:
  function test_unique_optional_feature_name_double_collision (line 2959) | def test_unique_optional_feature_name_double_collision() -> None:
  function test_unique_env_name_triple_collision (line 2970) | def test_unique_env_name_triple_collision() -> None:
  function test_add_single_file_optional_environments_noop_without_features (line 2975) | def test_add_single_file_optional_environments_noop_without_features() -...
  function test_feature_platforms_for_entries_prefers_override (line 2981) | def test_feature_platforms_for_entries_prefers_override() -> None:
  function test_extract_dependencies_handles_universal_pip_and_mixed_buckets (line 3001) | def test_extract_dependencies_handles_universal_pip_and_mixed_buckets() ...
  function test_filter_targets_by_platforms_removes_empty_sections (line 3030) | def test_filter_targets_by_platforms_removes_empty_sections() -> None:
  function test_pixi_single_file_optional_local_dep_transitive_dedup (line 3050) | def test_pixi_single_file_optional_local_dep_transitive_dedup(
  function test_pixi_single_file_optional_group_demoted_universal (line 3123) | def test_pixi_single_file_optional_group_demoted_universal(
  function test_pixi_monorepo_demotion (line 3203) | def test_pixi_monorepo_demotion(
  function test_pixi_monorepo_feature_subset_does_not_leak_universal_deps (line 3251) | def test_pixi_monorepo_feature_subset_does_not_leak_universal_deps(
  function test_pixi_single_file_env_name_collision (line 3294) | def test_pixi_single_file_env_name_collision(tmp_path: Path) -> None:
  function test_pixi_discover_graph_skips_non_list_optional_group (line 3322) | def test_pixi_discover_graph_skips_non_list_optional_group(
  function test_pixi_discover_graph_skips_non_local_optional_dep (line 3342) | def test_pixi_discover_graph_skips_non_local_optional_dep(
  function test_pixi_discover_graph_skips_non_installable_optional_unmanaged (line 3383) | def test_pixi_discover_graph_skips_non_installable_optional_unmanaged(
  function test_restore_demoted_skips_when_still_in_universal (line 3414) | def test_restore_demoted_skips_when_still_in_universal(tmp_path: Path) -...
  function test_pixi_monorepo_optional_local_feature_not_in_pixi_data (line 3434) | def test_pixi_monorepo_optional_local_feature_not_in_pixi_data(
  function test_pixi_single_file_installable_optional_local_dep_not_in_root (line 3487) | def test_pixi_single_file_installable_optional_local_dep_not_in_root(
  function test_pixi_monorepo_optional_aggregator_transitive_deps_in_env (line 3533) | def test_pixi_monorepo_optional_aggregator_transitive_deps_in_env(

FILE: tests/test_project_dependency_handling.py
  function test_project_dependency_handling (line 41) | def test_project_dependency_handling(
  function test_project_dependency_handling_in_pyproject_toml (line 72) | def test_project_dependency_handling_in_pyproject_toml(

FILE: tests/test_pypi_alternatives.py
  function test_parse_local_dependency_item_string (line 37) | def test_parse_local_dependency_item_string() -> None:
  function test_parse_local_dependency_item_dict (line 44) | def test_parse_local_dependency_item_dict() -> None:
  function test_parse_local_dependency_item_dict_with_use (line 51) | def test_parse_local_dependency_item_dict_with_use() -> None:
  function test_parse_local_dependency_item_dict_no_pypi (line 62) | def test_parse_local_dependency_item_dict_no_pypi() -> None:
  function test_parse_local_dependency_item_invalid_dict (line 69) | def test_parse_local_dependency_item_invalid_dict() -> None:
  function test_parse_local_dependency_item_invalid_type (line 79) | def test_parse_local_dependency_item_invalid_type() -> None:
  function test_parse_local_dependency_item_invalid_use (line 86) | def test_parse_local_dependency_item_invalid_use() -> None:
  function test_parse_local_dependency_item_use_pypi_requires_pypi (line 93) | def test_parse_local_dependency_item_use_pypi_requires_pypi() -> None:
  function test_get_local_dependencies_mixed_format (line 101) | def test_get_local_dependencies_mixed_format(
  function test_setuptools_integration_with_pypi_alternatives (line 148) | def test_setuptools_integration_with_pypi_alternatives(
  function test_local_dependency_use_pypi_injects_dependency (line 220) | def test_local_dependency_use_pypi_injects_dependency(tmp_path: Path) ->...
  function test_standard_string_format (line 244) | def test_standard_string_format(
  function test_yaml_to_toml_with_pypi_alternatives (line 273) | def test_yaml_to_toml_with_pypi_alternatives(
  function test_edge_cases (line 309) | def test_edge_cases(tmp_path: Path) -> None:  # noqa: ARG001
  function test_local_dependency_with_extras (line 334) | def test_local_dependency_with_extras(tmp_path: Path) -> None:
  function test_recursive_local_dependencies_with_pypi_alternatives (line 375) | def test_recursive_local_dependencies_with_pypi_alternatives(tmp_path: P...
  function test_empty_local_dependencies_list (line 423) | def test_empty_local_dependencies_list(tmp_path: Path) -> None:
  function test_local_dependencies_with_extras (line 448) | def test_local_dependencies_with_extras(tmp_path: Path) -> None:
  function test_complex_path_structures (line 503) | def test_complex_path_structures(tmp_path: Path) -> None:
  function test_invalid_yaml_handling (line 555) | def test_invalid_yaml_handling(tmp_path: Path) -> None:
  function test_pypi_alternatives_with_absolute_paths (line 579) | def test_pypi_alternatives_with_absolute_paths(tmp_path: Path) -> None:
  function test_pypi_alternatives_when_local_missing (line 614) | def test_pypi_alternatives_when_local_missing(tmp_path: Path) -> None:
  function test_mixed_string_and_dict_in_toml (line 649) | def test_mixed_string_and_dict_in_toml(
  function test_wheel_file_with_pypi_alternatives (line 688) | def test_wheel_file_with_pypi_alternatives(tmp_path: Path) -> None:
  function test_skip_local_deps_with_pypi_alternatives (line 743) | def test_skip_local_deps_with_pypi_alternatives(tmp_path: Path) -> None:
  function test_regular_local_deps_with_existing_paths (line 791) | def test_regular_local_deps_with_existing_paths(tmp_path: Path) -> None:
  function test_local_deps_with_extras_and_pypi_alternatives (line 873) | def test_local_deps_with_extras_and_pypi_alternatives(tmp_path: Path) ->...
  function test_local_deps_missing_with_pypi_fallback (line 934) | def test_local_deps_missing_with_pypi_fallback(tmp_path: Path) -> None:
  function test_missing_requirements_file_handling (line 971) | def test_missing_requirements_file_handling(tmp_path: Path) -> None:
  function test_package_name_extraction_edge_cases (line 989) | def test_package_name_extraction_edge_cases(tmp_path: Path) -> None:

FILE: tests/test_pypi_alternatives/main_app/main_app/__init__.py
  function main (line 4) | def main() -> str:

FILE: tests/test_pypi_alternatives/shared_lib/shared_lib/__init__.py
  function greet (line 4) | def greet() -> str:

FILE: tests/test_pypi_alternatives_errors.py
  function test_local_dependency_wheel_with_pypi_alternative (line 18) | def test_local_dependency_wheel_with_pypi_alternative(tmp_path: Path) ->...
  function test_missing_local_dependency_with_pypi_alternative (line 55) | def test_missing_local_dependency_with_pypi_alternative(tmp_path: Path) ...
  function test_empty_folder_with_pypi_alternative (line 83) | def test_empty_folder_with_pypi_alternative(tmp_path: Path) -> None:
  function test_empty_git_submodule_with_pypi_alternative (line 114) | def test_empty_git_submodule_with_pypi_alternative(tmp_path: Path) -> None:
  function test_non_pip_installable_with_pypi_alternative (line 146) | def test_non_pip_installable_with_pypi_alternative(tmp_path: Path) -> None:
  function test_circular_dependencies_with_pypi_alternatives (line 178) | def test_circular_dependencies_with_pypi_alternatives(tmp_path: Path) ->...
  function test_very_long_pypi_alternative_names (line 221) | def test_very_long_pypi_alternative_names(tmp_path: Path) -> None:
  function test_special_characters_in_paths (line 274) | def test_special_characters_in_paths(tmp_path: Path) -> None:
  function test_symlink_local_dependencies (line 313) | def test_symlink_local_dependencies(tmp_path: Path) -> None:

FILE: tests/test_pypi_alternatives_integration.py
  function test_build_with_pypi_alternatives (line 17) | def test_build_with_pypi_alternatives(
  function test_mixed_local_deps_with_and_without_pypi (line 102) | def test_mixed_local_deps_with_and_without_pypi(tmp_path: Path) -> None:
  function test_setuptools_with_skip_local_deps_env_var (line 146) | def test_setuptools_with_skip_local_deps_env_var(
  function test_use_skip_entries_are_ignored (line 197) | def test_use_skip_entries_are_ignored(tmp_path: Path) -> None:
  function test_use_pypi_entries_not_readded (line 228) | def test_use_pypi_entries_not_readded(tmp_path: Path) -> None:

FILE: tests/test_setuptools_integration.py
  function test_package_name_from_path (line 20) | def test_package_name_from_path() -> None:
  function test_package_name_from_cfg (line 61) | def test_package_name_from_cfg(tmp_path: Path) -> None:
  function test_package_name_from_setup_py_requires_literal_name (line 91) | def test_package_name_from_setup_py_requires_literal_name(tmp_path: Path...
  function test_package_name_from_path_falls_back_on_invalid_pyproject (line 110) | def test_package_name_from_path_falls_back_on_invalid_pyproject(tmp_path...
  function test_package_name_from_path_falls_back_on_invalid_setup_py (line 117) | def test_package_name_from_path_falls_back_on_invalid_setup_py(tmp_path:...
  function test_package_name_from_path_does_not_suppress_unexpected_errors (line 124) | def test_package_name_from_path_does_not_suppress_unexpected_errors(
  function test_filter_python_dependencies_rejects_resolved_dict_input (line 137) | def test_filter_python_dependencies_rejects_resolved_dict_input() -> None:

FILE: tests/test_unidep.py
  function setup_test_files (line 48) | def setup_test_files(
  function test_find_requirements_files (line 66) | def test_find_requirements_files(
  function test_find_requirements_files_depth (line 84) | def test_find_requirements_files_depth(tmp_path: Path) -> None:
  function test_parse_requirements (line 113) | def test_parse_requirements(
  function test_generate_conda_env_file (line 188) | def test_generate_conda_env_file(
  function test_generate_conda_env_stdout (line 211) | def test_generate_conda_env_stdout(
  function test_create_conda_env_specification_platforms (line 230) | def test_create_conda_env_specification_platforms(
  function test_verbose_output (line 301) | def test_verbose_output(tmp_path: Path, capsys: pytest.CaptureFixture) -...
  function test_create_conda_env_specification_rejects_resolved_dict_input (line 331) | def test_create_conda_env_specification_rejects_resolved_dict_input() ->...
  function test_pop_unused_platforms_removes_non_requested_platform (line 340) | def test_pop_unused_platforms_removes_non_requested_platform() -> None:
  function test_extract_python_requires (line 353) | def test_extract_python_requires(setup_test_files: tuple[Path, Path]) ->...
  function test_pip_install_local_dependencies (line 372) | def test_pip_install_local_dependencies(tmp_path: Path) -> None:
  function test_path_to_file_uri_handles_windows_drive (line 403) | def test_path_to_file_uri_handles_windows_drive() -> None:
  function test_channels (line 409) | def test_channels(toml_or_yaml: Literal["toml", "yaml"], tmp_path: Path)...
  function test_surrounding_comments (line 418) | def test_surrounding_comments(
  function test_filter_pip_and_conda (line 515) | def test_filter_pip_and_conda(
  function test_duplicates_with_version (line 808) | def test_duplicates_with_version(
  function test_duplicates_different_platforms (line 922) | def test_duplicates_different_platforms(
  function test_expand_none_with_different_platforms (line 1053) | def test_expand_none_with_different_platforms(
  function test_different_pins_on_conda_and_pip (line 1217) | def test_different_pins_on_conda_and_pip(
  function test_pip_pinned_conda_not (line 1286) | def test_pip_pinned_conda_not(
  function test_conda_pinned_pip_not (line 1338) | def test_conda_pinned_pip_not(
  function test_get_python_dependencies_preserves_platform_specific_pip_with_pinned_conda (line 1390) | def test_get_python_dependencies_preserves_platform_specific_pip_with_pi...
  function test_filter_python_dependencies_with_platforms (line 1419) | def test_filter_python_dependencies_with_platforms(
  function test_conda_with_comments (line 1444) | def test_conda_with_comments(
  function test_duplicate_names (line 1476) | def test_duplicate_names(toml_or_yaml: Literal["toml", "yaml"], tmp_path...
  function test_conflicts_when_selector_comment (line 1507) | def test_conflicts_when_selector_comment(
  function test_platforms_section_in_yaml (line 1584) | def test_platforms_section_in_yaml(
  function test_platforms_section_in_yaml_similar_platforms (line 1621) | def test_platforms_section_in_yaml_similar_platforms(
  function test_conda_with_non_platform_comment (line 1688) | def test_conda_with_non_platform_comment(
  function test_pip_and_conda_different_name_on_linux64 (line 1724) | def test_pip_and_conda_different_name_on_linux64(
  function test_parse_requirements_with_ignore_pin (line 1802) | def test_parse_requirements_with_ignore_pin(
  function test_parse_requirements_with_skip_dependency (line 1834) | def test_parse_requirements_with_skip_dependency(
  function test_pin_star_cuda (line 1872) | def test_pin_star_cuda(toml_or_yaml: Literal["toml", "yaml"], tmp_path: ...
  function test_parse_requirements_with_overwrite_pins (line 1906) | def test_parse_requirements_with_overwrite_pins(
  function test_duplicate_names_different_platforms (line 1953) | def test_duplicate_names_different_platforms(
  function test_with_unused_platform (line 2020) | def test_with_unused_platform(
  function test_sel_selector_merges_explicit_platform_pinnings (line 2051) | def test_sel_selector_merges_explicit_platform_pinnings(
  function test_pip_with_pinning (line 2084) | def test_pip_with_pinning(
  function test_pip_with_pinning_special_case_wildcard (line 2156) | def test_pip_with_pinning_special_case_wildcard(
  function test_pip_with_pinning_special_case_git_repo (line 2211) | def test_pip_with_pinning_special_case_git_repo(
  function test_not_equal (line 2246) | def test_not_equal(
  function test_dot_in_package_name (line 2287) | def test_dot_in_package_name(
  function test_optional_dependencies (line 2313) | def test_optional_dependencies(
  function test_optional_dependencies_multiple_sections (line 2353) | def test_optional_dependencies_multiple_sections(
  function test_optional_dependencies_get_python_dependencies (line 2383) | def test_optional_dependencies_get_python_dependencies(
  function test_pip_dep_with_extras (line 2408) | def test_pip_dep_with_extras(
  function test_explicit_conda_pip_pair_with_different_names_prefers_pinned_pip (line 2472) | def test_explicit_conda_pip_pair_with_different_names_prefers_pinned_pip(
  function test_same_source_final_collisions_merge_pip_extras (line 2506) | def test_same_source_final_collisions_merge_pip_extras(
  function test_cross_source_final_collisions_raise_for_conda_like_outputs (line 2540) | def test_cross_source_final_collisions_raise_for_conda_like_outputs(
  function test_same_name_cross_family_collisions_choose_deterministically (line 2573) | def test_same_name_cross_family_collisions_choose_deterministically(
  function test_pip_pep440_constraints_fall_back_to_explicit_joined_string (line 2608) | def test_pip_pep440_constraints_fall_back_to_explicit_joined_string(
  function test_pip_contradictory_pep440_constraints_raise (line 2649) | def test_pip_contradictory_pep440_constraints_raise(
  function test_local_dependency_in_dependencies_list (line 2682) | def test_local_dependency_in_dependencies_list(
  function test_optional_dependencies_with_local_dependencies (line 2702) | def test_optional_dependencies_with_local_dependencies(
  function test_optional_dependencies_with_local_dependencies_with_extras (line 2750) | def test_optional_dependencies_with_local_dependencies_with_extras(
  function test_optional_dependencies_with_dicts (line 2807) | def test_optional_dependencies_with_dicts(
  function test_optional_dependencies_with_version_specifier (line 2843) | def test_optional_dependencies_with_version_specifier(

FILE: tests/test_utils.py
  function test_escape_unicode (line 33) | def test_escape_unicode() -> None:
  function test_build_pep508_environment_marker (line 38) | def test_build_pep508_environment_marker() -> None:
  function test_spec_rendering_helpers (line 64) | def test_spec_rendering_helpers() -> None:
  function test_detect_platform (line 77) | def test_detect_platform() -> None:
  function test_collect_selector_platforms_with_optional_dependencies (line 142) | def test_collect_selector_platforms_with_optional_dependencies() -> None:
  function test_resolve_platforms_precedence_and_fallback (line 173) | def test_resolve_platforms_precedence_and_fallback(
  function test_parse_package_str (line 213) | def test_parse_package_str() -> None:
  function test_path_with_extras_eq_handles_non_matching_object (line 240) | def test_path_with_extras_eq_handles_non_matching_object() -> None:
  function test_get_package_version_missing_package (line 245) | def test_get_package_version_missing_package(monkeypatch: pytest.MonkeyP...
  function test_parse_package_str_with_selector (line 254) | def test_parse_package_str_with_selector() -> None:
  function test_parse_package_str_with_extras (line 294) | def test_parse_package_str_with_extras() -> None:
  function test_extract_matching_platforms (line 326) | def test_extract_matching_platforms() -> None:
  function test_split_path_and_extras (line 383) | def test_split_path_and_extras() -> None:

FILE: tests/test_version_conflicts.py
  function test_combining_versions (line 20) | def test_combining_versions() -> None:
  function test_is_valid_pinning (line 39) | def test_is_valid_pinning(operator: str, version: str) -> None:
  function test_combine_version_pinnings (line 74) | def test_combine_version_pinnings(pinnings: list[str], expected: str) ->...
  function test_invalid_pinnings (line 96) | def test_invalid_pinnings(pinnings: list[str]) -> None:
  function test_contradictory_pinnings (line 105) | def test_contradictory_pinnings(pinnings: list[str]) -> None:
  function test_exact_pinning_with_contradictory_ranges (line 114) | def test_exact_pinning_with_contradictory_ranges() -> None:
  function test_multiple_exact_pinnings (line 128) | def test_multiple_exact_pinnings() -> None:
  function test_general_contradictory_pinnings (line 136) | def test_general_contradictory_pinnings() -> None:
  function test_is_redundant (line 145) | def test_is_redundant() -> None:
  function test_invalid_parse_pinning (line 154) | def test_invalid_parse_pinning(pinning: str) -> None:
  function test_extract_version_operator_all_operators (line 163) | def test_extract_version_operator_all_operators(op: str) -> None:
  function test_extract_version_operator_no_operator (line 171) | def test_extract_version_operator_no_operator(constraint: str) -> None:
  function test_extract_version_operator_strips_whitespace (line 175) | def test_extract_version_operator_strips_whitespace() -> None:

FILE: unidep/_cli.py
  class _HelpFormatter (line 67) | class _HelpFormatter(RichHelpFormatter):
    method _get_help_string (line 68) | def _get_help_string(self, action: argparse.Action) -> str | None:
  function _flatten_selected_dependency_entries (line 80) | def _flatten_selected_dependency_entries(
  function _collect_available_optional_dependency_groups (line 90) | def _collect_available_optional_dependency_groups(
  function _merge_optional_dependency_extras (line 102) | def _merge_optional_dependency_extras(
  function _collect_selected_conda_like_platforms (line 140) | def _collect_selected_conda_like_platforms(
  function _add_common_args (line 155) | def _add_common_args(  # noqa: PLR0912, C901
  function _add_extra_flags (line 345) | def _add_extra_flags(
  function _parse_args (line 362) | def _parse_args() -> argparse.Namespace:  # noqa: PLR0915
  function _ensure_files (line 759) | def _ensure_files(files: list[Path]) -> None:
  function _get_conda_executable (line 774) | def _get_conda_executable(which: CondaExecutable) -> str | None:
  function _identify_conda_executable (line 787) | def _identify_conda_executable() -> CondaExecutable:  # pragma: no cover
  function _maybe_conda_executable (line 802) | def _maybe_conda_executable() -> CondaExecutable | None:
  function _format_inline_conda_package (line 809) | def _format_inline_conda_package(package: str) -> str:
  function _maybe_exe (line 816) | def _maybe_exe(conda_executable: CondaExecutable) -> str:
  function _capitalize_dir (line 840) | def _capitalize_dir(path: str, *, capitalize: bool = True, index: int = ...
  function _find_windows_path (line 852) | def _find_windows_path(conda_executable: CondaExecutable) -> str:
  function _conda_cli_command_json (line 898) | def _conda_cli_command_json(
  function _conda_env_list (line 920) | def _conda_env_list(conda_executable: CondaExecutable) -> list[str]:
  function _conda_info (line 926) | def _conda_info(conda_executable: CondaExecutable) -> dict:
  function _conda_root_prefix (line 930) | def _conda_root_prefix(conda_executable: CondaExecutable) -> Path:  # pr...
  function _conda_env_dirs (line 945) | def _conda_env_dirs(
  function _conda_env_name_to_prefix (line 958) | def _conda_env_name_to_prefix(
  function _maybe_create_conda_env_args (line 986) | def _maybe_create_conda_env_args(
  function _create_conda_environment (line 1010) | def _create_conda_environment(
  function _python_executable (line 1020) | def _python_executable(
  function _use_uv (line 1040) | def _use_uv(no_uv: bool) -> bool:  # noqa: FBT001
  function _build_pip_index_arguments (line 1047) | def _build_pip_index_arguments(pip_indices: Sequence[str]) -> list[str]:
  function _pip_install_local (line 1069) | def _pip_install_local(
  function _install_command (line 1122) | def _install_command(  # noqa: C901, PLR0912, PLR0915
  function _install_all_command (line 1290) | def _install_all_command(
  function _maybe_conda_run (line 1338) | def _maybe_conda_run(
  function _create_env_from_lock (line 1359) | def _create_env_from_lock(  # noqa: PLR0912
  function _verify_conda_lock_installed (line 1422) | def _verify_conda_lock_installed() -> None:
  function _merge_command (line 1456) | def _merge_command(
  function _pixi_command (line 1528) | def _pixi_command(
  function _pip_compile_command (line 1580) | def _pip_compile_command(
  function _check_conda_prefix (line 1645) | def _check_conda_prefix() -> None:  # pragma: no cover
  function _print_versions (line 1664) | def _print_versions() -> None:  # pragma: no cover
  function _print_with_rich (line 1695) | def _print_with_rich(data: list) -> None:
  function _pip_subcommand (line 1710) | def _pip_subcommand(
  function main (line 1738) | def main() -> None:  # noqa: PLR0912

FILE: unidep/_conda_env.py
  class CondaEnvironmentSpec (line 47) | class CondaEnvironmentSpec(NamedTuple):
  function _conda_sel (line 57) | def _conda_sel(sel: str) -> CondaPlatform:
  function _as_dependency_entries (line 64) | def _as_dependency_entries(
  function _normalize_pip_indices (line 77) | def _normalize_pip_indices(
  function _extract_conda_pip_dependencies (line 87) | def _extract_conda_pip_dependencies(
  function _ensure_sel_representable (line 110) | def _ensure_sel_representable(
  function _add_comment (line 140) | def _add_comment(commment_seq: CommentedSeq, platform: Platform) -> None:
  function create_conda_env_specification (line 149) | def create_conda_env_specification(  # noqa: C901, PLR0912, PLR0915
  function write_conda_environment_file (line 265) | def write_conda_environment_file(

FILE: unidep/_conda_lock.py
  function _run_conda_lock (line 41) | def _run_conda_lock(
  function _conda_lock_global (line 92) | def _conda_lock_global(
  class LockSpec (line 140) | class LockSpec(NamedTuple):
  function _parse_conda_lock_packages (line 147) | def _parse_conda_lock_packages(
  function _add_package_to_lock (line 202) | def _add_package_to_lock(
  function _strip_pip_extras (line 225) | def _strip_pip_extras(name: str) -> str:
  function _find_lock_key (line 231) | def _find_lock_key(
  function _add_package_with_dependencies_to_lock (line 252) | def _add_package_with_dependencies_to_lock(
  function _handle_missing_keys (line 294) | def _handle_missing_keys(
  function _conda_lock_subpackage (line 352) | def _conda_lock_subpackage(
  function _download_and_get_package_names (line 435) | def _download_and_get_package_names(
  function _conda_lock_subpackages (line 483) | def _conda_lock_subpackages(
  function conda_lock_command (line 515) | def conda_lock_command(
  class Mismatch (line 568) | class Mismatch(NamedTuple):
  function _check_consistent_lock_files (line 579) | def _check_consistent_lock_files(
  function _format_table_row (line 621) | def _format_table_row(
  function _mismatch_report (line 630) | def _mismatch_report(

FILE: unidep/_conflicts.py
  function extract_version_operator (line 45) | def extract_version_operator(constraint: str) -> str:
  function _prepare_specs_for_conflict_resolution (line 58) | def _prepare_specs_for_conflict_resolution(
  function _pop_unused_platforms_and_maybe_expand_none (line 84) | def _pop_unused_platforms_and_maybe_expand_none(
  function _maybe_new_spec_with_combined_pinnings (line 109) | def _maybe_new_spec_with_combined_pinnings(
  function _combine_pinning_within_platform (line 130) | def _combine_pinning_within_platform(
  class VersionConflictError (line 142) | class VersionConflictError(ValueError):
  function _add_optional_dependencies (line 146) | def _add_optional_dependencies(
  function resolve_conflicts (line 158) | def resolve_conflicts(
  function _parse_pinning (line 208) | def _parse_pinning(pinning: str) -> tuple[str, version.Version]:
  function _is_redundant (line 224) | def _is_redundant(pinning: str, other_pinnings: list[str]) -> bool:
  function _is_valid_pinning (line 251) | def _is_valid_pinning(pinning: str) -> bool:
  function _deduplicate (line 265) | def _deduplicate(pinnings: list[str]) -> list[str]:
  function _split_pinnings (line 270) | def _split_pinnings(pinnings: list[str]) -> list[str]:
  function combine_version_pinnings (line 275) | def combine_version_pinnings(pinnings: list[str], *, name: str | None = ...

FILE: unidep/_dependencies_parsing.py
  function find_requirements_files (line 47) | def find_requirements_files(
  function _extract_first_comment (line 79) | def _extract_first_comment(
  function _identifier (line 96) | def _identifier(identifier: int, selector: str | None) -> str:
  function _parse_dependency (line 104) | def _parse_dependency(
  class DependencyOrigin (line 137) | class DependencyOrigin(NamedTuple):
  class DependencyEntry (line 146) | class DependencyEntry(NamedTuple):
  class ParsedRequirements (line 156) | class ParsedRequirements(NamedTuple):
  class Requirements (line 168) | class Requirements(NamedTuple):
  class _LoadedRequirementData (line 177) | class _LoadedRequirementData(NamedTuple):
  function _parse_overwrite_pins (line 183) | def _parse_overwrite_pins(overwrite_pins: list[str]) -> dict[str, str | ...
  function _collect_pip_indices (line 192) | def _collect_pip_indices(data: dict[str, Any]) -> list[str]:
  function _load (line 215) | def _load(p: Path, yaml: YAML) -> dict[str, Any]:
  function _add_project_dependencies (line 238) | def _add_project_dependencies(
  function _parse_local_dependency_item (line 256) | def _parse_local_dependency_item(item: str | dict[str, str]) -> LocalDep...
  function _normalize_local_dependency_use (line 274) | def _normalize_local_dependency_use(use_value: str | None) -> LocalDepen...
  function get_local_dependencies (line 286) | def get_local_dependencies(data: dict[str, Any]) -> list[LocalDependency]:
  function _to_path_with_extras (line 304) | def _to_path_with_extras(
  function _update_data_structures (line 332) | def _update_data_structures(
  function _move_optional_dependencies_to_dependencies (line 411) | def _move_optional_dependencies_to_dependencies(
  function _move_local_optional_dependencies_to_local_dependencies (line 437) | def _move_local_optional_dependencies_to_local_dependencies(
  function _resolve_local_dependency_path (line 475) | def _resolve_local_dependency_path(base_dir: Path, local: str) -> Path:
  function _try_parse_local_dependency_requirement_file (line 480) | def _try_parse_local_dependency_requirement_file(
  function _apply_local_dependency_override (line 495) | def _apply_local_dependency_override(
  function _effective_local_dependencies (line 522) | def _effective_local_dependencies(
  function _append_pip_dependency_from_local (line 547) | def _append_pip_dependency_from_local(
  function _add_local_dependencies (line 558) | def _add_local_dependencies(
  function parse_requirements (line 604) | def parse_requirements(
  function _str_is_path_like (line 724) | def _str_is_path_like(s: str) -> bool:
  function _check_allowed_local_dependency (line 729) | def _check_allowed_local_dependency(name: str, is_optional: bool) -> Non...
  function _add_dependencies (line 741) | def _add_dependencies(
  function _extract_local_dependencies (line 841) | def _extract_local_dependencies(  # noqa: PLR0912
  function parse_local_dependencies (line 949) | def parse_local_dependencies(
  function yaml_to_toml (line 988) | def yaml_to_toml(yaml_path: Path) -> str:
  function _is_empty_git_submodule (line 1022) | def _is_empty_git_submodule(path: Path) -> bool:
  function _is_empty_folder (line 1035) | def _is_empty_folder(path: Path) -> bool:

FILE: unidep/_dependency_selection.py
  class SourceRequirement (line 35) | class SourceRequirement:
  class MergedSourceCandidate (line 47) | class MergedSourceCandidate:
  class PlatformCandidates (line 57) | class PlatformCandidates:
  function _operator_order_key (line 64) | def _operator_order_key(constraint: str) -> tuple[int, str]:
  function _canonicalize_joined_pinnings (line 80) | def _canonicalize_joined_pinnings(pinnings: list[str]) -> str:
  function _parse_pip_name (line 88) | def _parse_pip_name(name: str) -> tuple[str, tuple[str, ...]]:
  function _build_pip_name (line 96) | def _build_pip_name(base_name: str, extras: tuple[str, ...]) -> str:
  function _spec_is_pinned (line 102) | def _spec_is_pinned(spec: Spec) -> bool:
  function _candidate_scope_rank (line 106) | def _candidate_scope_rank(candidate: MergedSourceCandidate) -> float:
  function _candidate_has_universal_origin (line 113) | def _candidate_has_universal_origin(candidate: MergedSourceCandidate) ->...
  function _candidate_has_pip_extras (line 117) | def _candidate_has_pip_extras(candidate: MergedSourceCandidate) -> bool:
  function _candidate_display_key (line 121) | def _candidate_display_key(
  function _origin_to_text (line 131) | def _origin_to_text(origin: DependencyOrigin) -> str:
  function _candidate_to_text (line 141) | def _candidate_to_text(candidate: MergedSourceCandidate) -> str:
  function _merge_pin_strings (line 147) | def _merge_pin_strings(
  function _bump_release_prefix (line 183) | def _bump_release_prefix(release: tuple[int, ...], prefix_len: int) -> str:
  function _normalize_pinning_token_for_satisfiability (line 190) | def _normalize_pinning_token_for_satisfiability(  # noqa: PLR0911
  function _parse_supported_pinning (line 226) | def _parse_supported_pinning(pinning: str) -> tuple[str, Version]:
  function _exact_pinning_version_text (line 233) | def _exact_pinning_version_text(pinning: str) -> str | None:
  function _stricter_lower_bound (line 240) | def _stricter_lower_bound(
  function _stricter_upper_bound (line 253) | def _stricter_upper_bound(
  function _normalized_pinnings_are_satisfiable (line 266) | def _normalized_pinnings_are_satisfiable(  # noqa: PLR0911, PLR0912
  function _joined_pinnings_are_safely_satisfiable (line 314) | def _joined_pinnings_are_safely_satisfiable(pinnings: list[str]) -> bool:
  function _merge_source_requirements (line 325) | def _merge_source_requirements(
  function _entry_family_key (line 358) | def _entry_family_key(entry: DependencyEntry) -> FamilyKey:
  function _source_requirement_from_spec (line 367) | def _source_requirement_from_spec(
  function _collect_target_platforms (line 393) | def _collect_target_platforms(
  function _entry_targets (line 402) | def _entry_targets(
  function _build_platform_candidates (line 416) | def _build_platform_candidates(
  function _choose_by_precedence (line 466) | def _choose_by_precedence(
  function _select_conda_like_candidate (line 488) | def _select_conda_like_candidate(
  function _select_pip_candidate (line 497) | def _select_pip_candidate(
  function _final_identity (line 505) | def _final_identity(candidate: MergedSourceCandidate) -> str:
  function _merge_candidate_group (line 511) | def _merge_candidate_group(
  function _can_reconcile_cross_source_collision (line 522) | def _can_reconcile_cross_source_collision(
  function _raise_final_collision (line 540) | def _raise_final_collision(
  function _resolve_final_collisions (line 562) | def _resolve_final_collisions(
  function select_conda_like_requirements (line 617) | def select_conda_like_requirements(
  function select_pip_requirements (line 629) | def select_pip_requirements(
  function collapse_selected_universals (line 642) | def collapse_selected_universals(

FILE: unidep/_hatch_integration.py
  class UnidepRequirementsMetadataHook (line 21) | class UnidepRequirementsMetadataHook(MetadataHookInterface):
    method update (line 26) | def update(self, metadata: dict) -> None:
  function hatch_register_metadata_hook (line 52) | def hatch_register_metadata_hook() -> type[UnidepRequirementsMetadataHook]:

FILE: unidep/_pixi.py
  function _parse_version_build (line 69) | def _parse_version_build(pin: str | None) -> str | dict[str, str]:
  function _parse_package_extras (line 103) | def _parse_package_extras(pkg_name: str) -> tuple[str, list[str]]:
  function _make_pip_version_spec (line 120) | def _make_pip_version_spec(
  function _get_package_name (line 144) | def _get_package_name(project_dir: Path) -> str:
  function _normalize_feature_name (line 150) | def _normalize_feature_name(name: str) -> str:
  function _project_dir_from_requirement_file (line 155) | def _project_dir_from_requirement_file(req_file: Path) -> Path:
  function _derive_feature_names (line 161) | def _derive_feature_names(requirements_files: Sequence[Path]) -> list[str]:
  function _editable_dependency_path (line 220) | def _editable_dependency_path(req_dir: Path, output_file: str | Path | N...
  function _with_unique_order_paths (line 240) | def _with_unique_order_paths(items: Sequence[Path]) -> list[Path]:
  function _add_editable_local_dependencies (line 253) | def _add_editable_local_dependencies(
  function _unmanaged_installable_local_project_dir (line 288) | def _unmanaged_installable_local_project_dir(
  class LocalDependencyGraph (line 303) | class LocalDependencyGraph(NamedTuple):
  function _discover_local_dependency_graph (line 314) | def _discover_local_dependency_graph(  # noqa: PLR0912, C901, PLR0915
  function _parse_direct_requirements_for_node (line 452) | def _parse_direct_requirements_for_node(
  function _collect_transitive_nodes (line 509) | def _collect_transitive_nodes(
  function _with_unique_order (line 529) | def _with_unique_order(items: list[str]) -> list[str]:
  function _unique_optional_feature_name (line 534) | def _unique_optional_feature_name(
  function _unique_env_name (line 556) | def _unique_env_name(
  function _add_single_file_optional_environments (line 579) | def _add_single_file_optional_environments(
  function _spec_key (line 601) | def _spec_key(spec: Spec) -> tuple[str, str, str | None, str | None]:
  function _entry_key (line 606) | def _entry_key(
  function _subtract_entries (line 618) | def _subtract_entries(
  class _PixiGenerationResult (line 634) | class _PixiGenerationResult(NamedTuple):
  function _process_single_file_optional_groups (line 643) | def _process_single_file_optional_groups(
  function _generate_single_file_pixi (line 762) | def _generate_single_file_pixi(
  function _generate_multi_file_pixi (line 865) | def _generate_multi_file_pixi(  # noqa: PLR0912, C901, PLR0915
  function _selector_platforms_from_entries (line 1096) | def _selector_platforms_from_entries(
  function _feature_platforms_for_entries (line 1110) | def _feature_platforms_for_entries(
  function generate_pixi_toml (line 1126) | def generate_pixi_toml(
  function _extract_dependencies (line 1221) | def _extract_dependencies(  # noqa: PLR0912
  function _build_feature_dict (line 1308) | def _build_feature_dict(platform_deps: PlatformDeps) -> dict[str, Any]:
  function _filter_section_targets (line 1335) | def _filter_section_targets(
  function _filter_targets_by_platforms (line 1351) | def _filter_targets_by_platforms(
  function _write_pixi_toml (line 1365) | def _write_pixi_toml(

FILE: unidep/_pytest_plugin.py
  function pytest_addoption (line 27) | def pytest_addoption(parser: pytest.Parser) -> None:  # pragma: no cover
  function pytest_collection_modifyitems (line 50) | def pytest_collection_modifyitems(
  function _file_in_folder (line 96) | def _file_in_folder(file: Path, folder: Path) -> bool:  # pragma: no cover
  function _affected_packages (line 103) | def _affected_packages(

FILE: unidep/_setuptools_integration.py
  function filter_python_dependencies (line 49) | def filter_python_dependencies(
  class Dependencies (line 92) | class Dependencies(NamedTuple):
  function _path_to_file_uri (line 97) | def _path_to_file_uri(path: PurePath) -> str:
  function get_python_dependencies (line 109) | def get_python_dependencies(  # noqa: PLR0912
  function _deps (line 200) | def _deps(requirements_file: Path) -> Dependencies:  # pragma: no cover
  function _setuptools_finalizer (line 226) | def _setuptools_finalizer(dist: Distribution) -> None:  # pragma: no cover

FILE: unidep/platform_definitions.py
  function validate_selector (line 90) | def validate_selector(selector: Selector) -> None:
  function platforms_from_selector (line 98) | def platforms_from_selector(selector: str) -> list[Platform]:
  class Spec (line 114) | class Spec(NamedTuple):
    method platforms (line 124) | def platforms(self) -> list[Platform] | None:
    method pprint (line 130) | def pprint(self) -> str:
    method name_with_pin (line 139) | def name_with_pin(self, *, is_pip: bool = False) -> str:

FILE: unidep/utils.py
  function add_comment_to_file (line 36) | def add_comment_to_file(
  function remove_top_comments (line 57) | def remove_top_comments(filename: str | Path) -> None:
  function escape_unicode (line 71) | def escape_unicode(string: str) -> str:
  function is_pip_installable (line 76) | def is_pip_installable(folder: str | Path) -> bool:  # pragma: no cover
  class UnsupportedPlatformError (line 97) | class UnsupportedPlatformError(Exception):
  function identify_current_platform (line 101) | def identify_current_platform() -> Platform:
  function collect_selector_platforms (line 131) | def collect_selector_platforms(
  function resolve_platforms (line 152) | def resolve_platforms(
  function build_pep508_environment_marker (line 175) | def build_pep508_environment_marker(
  class ParsedPackageStr (line 190) | class ParsedPackageStr(NamedTuple):
  function parse_package_str (line 199) | def parse_package_str(package_str: str) -> ParsedPackageStr:
  function package_name_from_setup_cfg (line 229) | def package_name_from_setup_cfg(file_path: Path) -> str:
  function package_name_from_setup_py (line 240) | def package_name_from_setup_py(file_path: Path) -> str:
  function package_name_from_pyproject_toml (line 272) | def package_name_from_pyproject_toml(file_path: Path) -> str:
  function package_name_from_path (line 284) | def package_name_from_path(path: Path) -> str:
  function _simple_warning_format (line 321) | def _simple_warning_format(
  function warn (line 338) | def warn(
  function selector_from_comment (line 352) | def selector_from_comment(comment: str) -> str | None:
  function extract_matching_platforms (line 369) | def extract_matching_platforms(comment: str) -> list[Platform]:
  function unidep_configured_in_toml (line 377) | def unidep_configured_in_toml(path: Path) -> bool:
  function split_path_and_extras (line 384) | def split_path_and_extras(input_str: str | Path) -> tuple[Path, list[str]]:
  class PathWithExtras (line 410) | class PathWithExtras(NamedTuple):
    method path_with_extras (line 417) | def path_with_extras(self) -> Path:
    method resolved (line 423) | def resolved(self) -> PathWithExtras:
    method canonicalized (line 427) | def canonicalized(self) -> PathWithExtras:
    method __hash__ (line 431) | def __hash__(self) -> int:
    method __eq__ (line 435) | def __eq__(self, other: object) -> bool:
  class LocalDependency (line 445) | class LocalDependency(NamedTuple):
  function parse_folder_or_filename (line 453) | def parse_folder_or_filename(folder_or_file: str | Path) -> PathWithExtras:
  function defaultdict_to_dict (line 475) | def defaultdict_to_dict(d: defaultdict | Any) -> dict:
  function get_package_version (line 482) | def get_package_version(package_name: str) -> str | None:
Condensed preview — 101 files, each showing path, character count, and a content snippet. Download the .json file or copy for the full structured content (939K chars).
[
  {
    "path": ".github/release.py",
    "chars": 3686,
    "preview": "\"\"\"Create a new release tag with CalVer format.\"\"\"\n\nimport datetime\nimport operator\nimport os\nfrom pathlib import Path\n\n"
  },
  {
    "path": ".github/renovate.json",
    "chars": 809,
    "preview": "{\n    \"$schema\": \"https://docs.renovatebot.com/renovate-schema.json\",\n    \"rebaseWhen\": \"behind-base-branch\",\n    \"depen"
  },
  {
    "path": ".github/use-local-unidep.py",
    "chars": 1140,
    "preview": "\"\"\"Update `pyproject.toml` in each example project to use local `unidep`.\"\"\"\n\nfrom pathlib import Path\n\nREPO_ROOT = Path"
  },
  {
    "path": ".github/workflows/documentation-links.yml",
    "chars": 276,
    "preview": "name: readthedocs/actions\non:\n  pull_request_target:\n    types:\n      - opened\n\npermissions:\n  pull-requests: write\n\njob"
  },
  {
    "path": ".github/workflows/install-example-projects.yml",
    "chars": 4254,
    "preview": "name: install-example-projects\n\non:\n  push:\n    branches: [main]\n  pull_request:\n\njobs:\n  pip-install:\n    strategy:\n   "
  },
  {
    "path": ".github/workflows/pytest.yml",
    "chars": 1628,
    "preview": "name: pytest\n\non:\n  push:\n    branches: [main]\n  pull_request:\n\njobs:\n  test:\n    strategy:\n      fail-fast: false\n     "
  },
  {
    "path": ".github/workflows/release.yml",
    "chars": 706,
    "preview": "name: Upload Python Package\n\non:\n  release:\n    types: [published]\n\njobs:\n  deploy:\n    runs-on: ubuntu-latest\n    envir"
  },
  {
    "path": ".github/workflows/toc.yaml",
    "chars": 250,
    "preview": "on: push\nname: TOC Generator\njobs:\n  generateTOC:\n    name: TOC Generator\n    runs-on: ubuntu-latest\n    steps:\n      - "
  },
  {
    "path": ".github/workflows/update-readme.yml",
    "chars": 1522,
    "preview": "name: Update README.md\n\non:\n  push:\n    branches:\n      - main\n  pull_request:\n\njobs:\n  update_readme:\n    runs-on: ubun"
  },
  {
    "path": ".gitignore",
    "chars": 1087,
    "preview": "# Byte-compiled / optimized / DLL files\n__pycache__/\n*.py[cod]\n*$py.class\n\n# C extensions\n*.so\n\n# Distribution / packagi"
  },
  {
    "path": ".pre-commit-config.yaml",
    "chars": 538,
    "preview": "repos:\n  - repo: https://github.com/pre-commit/pre-commit-hooks\n    rev: v5.0.0\n    hooks:\n      - id: check-added-large"
  },
  {
    "path": ".pre-commit-hooks.yaml",
    "chars": 254,
    "preview": "- id: unidep-environment-yaml\n  name: unidep environment.yaml\n  description: Generate environment.yaml from requirements"
  },
  {
    "path": ".readthedocs.yaml",
    "chars": 205,
    "preview": "version: 2\n\nbuild:\n  os: ubuntu-22.04\n  tools:\n    python: \"3.12\"\n\nsphinx:\n  configuration: docs/source/conf.py\n\npython:"
  },
  {
    "path": "LICENSE",
    "chars": 1511,
    "preview": "BSD 3-Clause License\n\nCopyright (c) 2023, Bas Nijholt\nAll rights reserved.\n\nRedistribution and use in source and binary "
  },
  {
    "path": "README.md",
    "chars": 86340,
    "preview": "# 🚀 UniDep - Unified Conda and Pip Dependency Management 🚀\n\n![UniDep logo](https://media.githubusercontent.com/media/bas"
  },
  {
    "path": "bootstrap.sh",
    "chars": 1345,
    "preview": "#!/usr/bin/env bash\n# Run this script with:\n#   \"${SHELL}\" <(curl -LsSf raw.githubusercontent.com/basnijholt/unidep/main"
  },
  {
    "path": "docs/Makefile",
    "chars": 764,
    "preview": "# Minimal makefile for Sphinx documentation\n#\n\n# You can set these variables from the command line, and also\n# from the "
  },
  {
    "path": "docs/source/.gitignore",
    "chars": 5,
    "preview": "*.md\n"
  },
  {
    "path": "docs/source/conf.py",
    "chars": 15382,
    "preview": "\"\"\"Spinx configuration file for the unidep documentation.\n\nThe documentation is generated from the README.md file in the"
  },
  {
    "path": "example/README.md",
    "chars": 10565,
    "preview": "# Examples\n\n> [!TIP]\n> Try out `unidep` in this folder by running:\n> - `unidep install ./setup_py_project ./hatch_projec"
  },
  {
    "path": "example/environment.yaml",
    "chars": 763,
    "preview": "# This file is created and managed by `unidep` 3.2.0.\n# For details see https://github.com/basnijholt/unidep\n# File gene"
  },
  {
    "path": "example/hatch2_project/README.md",
    "chars": 1646,
    "preview": "# Hatchling Integration\n\n> [!TIP]\n> - **Standard Installation**: In this example folder, use `pip install .` to install "
  },
  {
    "path": "example/hatch2_project/hatch2_project.py",
    "chars": 6,
    "preview": "x = 1\n"
  },
  {
    "path": "example/hatch2_project/pyproject.toml",
    "chars": 857,
    "preview": "[build-system]\nrequires = [\"hatchling\", \"unidep[toml]\"]\nbuild-backend = \"hatchling.build\"\n\n[project]\nname = \"hatch2_proj"
  },
  {
    "path": "example/hatch_project/README.md",
    "chars": 1368,
    "preview": "# Hatchling Integration\n\n> [!TIP]\n> - **Standard Installation**: In this example folder, use `pip install .` to install "
  },
  {
    "path": "example/hatch_project/hatch_project.py",
    "chars": 6,
    "preview": "x = 1\n"
  },
  {
    "path": "example/hatch_project/pyproject.toml",
    "chars": 631,
    "preview": "[build-system]\nrequires = [\"hatchling\", \"unidep\"]\nbuild-backend = \"hatchling.build\"\n\n[project]\nname = \"hatch_project\"\nde"
  },
  {
    "path": "example/hatch_project/requirements.yaml",
    "chars": 279,
    "preview": "name: hatch_project\nchannels:\n  - conda-forge\ndependencies:\n  - conda: adaptive-scheduler  # [linux64]\n  - pip: unidep\n "
  },
  {
    "path": "example/pyproject_toml_project/README.md",
    "chars": 1467,
    "preview": "# Full `pyproject.toml` integration example\n\n> [!TIP]\n> - **Standard Installation**: In this example folder, use `pip in"
  },
  {
    "path": "example/pyproject_toml_project/pyproject.toml",
    "chars": 924,
    "preview": "[build-system]\nrequires = [\"setuptools\", \"unidep[toml]\"]\nbuild-backend = \"setuptools.build_meta\"\n\n[project]\nname = \"pypr"
  },
  {
    "path": "example/pyproject_toml_project/pyproject_toml_project.py",
    "chars": 0,
    "preview": ""
  },
  {
    "path": "example/setup_py_project/README.md",
    "chars": 1174,
    "preview": "# `setup.py` integration example\n\n> [!TIP]\n> - **Standard Installation**: In this example folder, use `pip install .` to"
  },
  {
    "path": "example/setup_py_project/pyproject.toml",
    "chars": 91,
    "preview": "[build-system]\nrequires = [\"setuptools\", \"unidep\"]\nbuild-backend = \"setuptools.build_meta\"\n"
  },
  {
    "path": "example/setup_py_project/requirements.yaml",
    "chars": 383,
    "preview": "name: setup_py_project\nchannels:\n  - conda-forge\ndependencies:\n  - pandas\n  - adaptive >=0.15.0, <2.0.0  # [linux64]\n  -"
  },
  {
    "path": "example/setup_py_project/setup.py",
    "chars": 329,
    "preview": "from setuptools import setup\n\nsetup(\n    name=\"setup_py_project\",\n    version=\"0.1.0\",\n    description=\"A short descript"
  },
  {
    "path": "example/setup_py_project/setup_py_project.py",
    "chars": 0,
    "preview": ""
  },
  {
    "path": "example/setuptools_project/README.md",
    "chars": 1282,
    "preview": "# Setuptools `pyproject.toml` integration example\n\n> [!TIP]\n> - **Standard Installation**: In this example folder, use `"
  },
  {
    "path": "example/setuptools_project/pyproject.toml",
    "chars": 527,
    "preview": "[build-system]\nrequires = [\"setuptools\", \"unidep\"]\nbuild-backend = \"setuptools.build_meta\"\n\n[project]\nname = \"setuptools"
  },
  {
    "path": "example/setuptools_project/requirements.yaml",
    "chars": 404,
    "preview": "name: setuptools_project\nchannels:\n  - conda-forge\ndependencies:\n  - adaptive  # [linux64]\n  - pfapack  # [linux64]\n  - "
  },
  {
    "path": "example/setuptools_project/setuptools_project.py",
    "chars": 0,
    "preview": ""
  },
  {
    "path": "pyproject.toml",
    "chars": 3730,
    "preview": "[build-system]\nrequires = [\"setuptools>=42\", \"wheel\"]\nbuild-backend = \"setuptools.build_meta\"\n\n[project]\nname = \"unidep\""
  },
  {
    "path": "tests/__init__.py",
    "chars": 40,
    "preview": "\"\"\"Tests for the ``unidep`` package.\"\"\"\n"
  },
  {
    "path": "tests/helpers.py",
    "chars": 634,
    "preview": "\"\"\"unidep tests.\"\"\"\n\nfrom __future__ import annotations\n\nfrom pathlib import Path\nfrom typing import TYPE_CHECKING\n\nfrom"
  },
  {
    "path": "tests/shared_local_install_monorepo/project1/pyproject.toml",
    "chars": 141,
    "preview": "[build-system]\nrequires = [\"setuptools>=42\", \"wheel\"]\nbuild-backend = \"setuptools.build_meta\"\n\n[project]\nname = \"project"
  },
  {
    "path": "tests/shared_local_install_monorepo/project1/requirements.yaml",
    "chars": 49,
    "preview": "name: project1\nlocal_dependencies:\n  - ../shared\n"
  },
  {
    "path": "tests/shared_local_install_monorepo/project2/pyproject.toml",
    "chars": 141,
    "preview": "[build-system]\nrequires = [\"setuptools>=42\", \"wheel\"]\nbuild-backend = \"setuptools.build_meta\"\n\n[project]\nname = \"project"
  },
  {
    "path": "tests/shared_local_install_monorepo/project2/requirements.yaml",
    "chars": 49,
    "preview": "name: project2\nlocal_dependencies:\n  - ../shared\n"
  },
  {
    "path": "tests/shared_local_install_monorepo/shared/pyproject.toml",
    "chars": 139,
    "preview": "[build-system]\nrequires = [\"setuptools>=42\", \"wheel\"]\nbuild-backend = \"setuptools.build_meta\"\n\n[project]\nname = \"shared\""
  },
  {
    "path": "tests/shared_local_install_monorepo/shared/requirements.yaml",
    "chars": 30,
    "preview": "name: shared\ndependencies: []\n"
  },
  {
    "path": "tests/simple_monorepo/common-requirements.yaml",
    "chars": 211,
    "preview": "# This file is uses in the `local_dependencies:` section in `project1/requirements.yml`\n# and `project2/requirements.yml"
  },
  {
    "path": "tests/simple_monorepo/conda-lock.yml",
    "chars": 2397,
    "preview": "# This file is created and managed by `unidep` 0.41.0.\n# For details see https://github.com/basnijholt/unidep\n# File gen"
  },
  {
    "path": "tests/simple_monorepo/project1/conda-lock.yml",
    "chars": 2331,
    "preview": "# This file is created and managed by `unidep` 0.41.0.\n# For details see https://github.com/basnijholt/unidep\n# File gen"
  },
  {
    "path": "tests/simple_monorepo/project1/requirements.yaml",
    "chars": 195,
    "preview": "name: project1\nchannels:\n  - conda-forge\ndependencies:\n  - conda: bzip2\nlocal_dependencies:\n  - ../project2  # this mean"
  },
  {
    "path": "tests/simple_monorepo/project2/conda-lock.yml",
    "chars": 1665,
    "preview": "# This file is created and managed by `unidep` 0.41.0.\n# For details see https://github.com/basnijholt/unidep\n# File gen"
  },
  {
    "path": "tests/simple_monorepo/project2/requirements.yaml",
    "chars": 172,
    "preview": "name: project2\nchannels:\n  - conda-forge\ndependencies:\n  - conda: tzdata  # [arm64]\nlocal_dependencies:\n  - ../common-re"
  },
  {
    "path": "tests/test-pip-and-conda-different-name/conda-lock.yml",
    "chars": 11011,
    "preview": "# This file is created and managed by `unidep` 0.23.0.\n# For details see https://github.com/basnijholt/unidep\n# File gen"
  },
  {
    "path": "tests/test-pip-and-conda-different-name/project1/requirements.yaml",
    "chars": 222,
    "preview": "name: project2\nchannels:\n  - conda-forge\ndependencies:\n  - conda: python=3.10\n  - pip: fluent-logger  # depends on msgpa"
  },
  {
    "path": "tests/test-pip-and-conda-different-name/project2/requirements.yaml",
    "chars": 105,
    "preview": "name: project2\nchannels:\n  - conda-forge\ndependencies:\n  - conda: msgpack-python\nplatforms:\n  - linux-64\n"
  },
  {
    "path": "tests/test-pip-package-with-conda-dependency/conda-lock.yml",
    "chars": 12606,
    "preview": "# This file is created and managed by `unidep` 0.23.0.\n# For details see https://github.com/basnijholt/unidep\n# File gen"
  },
  {
    "path": "tests/test-pip-package-with-conda-dependency/project1/requirements.yaml",
    "chars": 92,
    "preview": "name: project1\nchannels:\n  - conda-forge\ndependencies:\n  - pybind11\nplatforms:\n  - linux-64\n"
  },
  {
    "path": "tests/test-pip-package-with-conda-dependency/project2/requirements.yaml",
    "chars": 223,
    "preview": "name: project2\nchannels:\n  - conda-forge\ndependencies:\n  - conda: python=3.11\n  - pip: cutde  # depends on pybind11, but"
  },
  {
    "path": "tests/test_cli.py",
    "chars": 52507,
    "preview": "\"\"\"unidep CLI tests.\"\"\"\n\nfrom __future__ import annotations\n\nimport os\nimport platform\nimport re\nimport shutil\nimport su"
  },
  {
    "path": "tests/test_cli_install_conda_lock.py",
    "chars": 4901,
    "preview": "\"\"\"Tests for the `unidep._cli` module (installing conda environment from lock file).\"\"\"\n\nimport subprocess\nfrom pathlib "
  },
  {
    "path": "tests/test_conda_lock.py",
    "chars": 21175,
    "preview": "\"\"\"unidep conda-lock tests.\"\"\"\n\nfrom __future__ import annotations\n\nimport shutil\nimport sys\nimport types\nfrom pathlib i"
  },
  {
    "path": "tests/test_dependencies_parsing_internal.py",
    "chars": 1674,
    "preview": "\"\"\"Focused tests for active internal dependency-parsing helpers.\"\"\"\n\nfrom __future__ import annotations\n\nfrom pathlib im"
  },
  {
    "path": "tests/test_dependency_selection.py",
    "chars": 8532,
    "preview": "\"\"\"Tests for user-shaped dependency selection behavior.\"\"\"\n\nfrom __future__ import annotations\n\nimport textwrap\nfrom pat"
  },
  {
    "path": "tests/test_local_wheels_and_zip.py",
    "chars": 5925,
    "preview": "\"\"\"Tests for parsing local dependencies from wheels and zips.\"\"\"\n\nimport textwrap\nfrom pathlib import Path\nfrom typing i"
  },
  {
    "path": "tests/test_parse_yaml_local_dependencies.py",
    "chars": 18718,
    "preview": "\"\"\"unidep's YAML parsing of the `local_dependencies` list.\"\"\"\n\nfrom __future__ import annotations\n\nimport shutil\nimport "
  },
  {
    "path": "tests/test_parse_yaml_nested_local_dependencies.py",
    "chars": 11981,
    "preview": "\"\"\"Test parsing nested local dependencies from YAML files.\"\"\"\n\nfrom __future__ import annotations\n\nimport textwrap\nfrom "
  },
  {
    "path": "tests/test_pip_indices.py",
    "chars": 14631,
    "preview": "\"\"\"Unit tests for pip_indices support in unidep.\"\"\"\n\nfrom __future__ import annotations\n\nimport os\nfrom pathlib import P"
  },
  {
    "path": "tests/test_pip_indices_cli.py",
    "chars": 19996,
    "preview": "\"\"\"Tests for pip_indices CLI functionality to achieve 100% coverage.\"\"\"\n\nfrom __future__ import annotations\n\nimport os\nf"
  },
  {
    "path": "tests/test_pip_indices_integration.py",
    "chars": 18119,
    "preview": "\"\"\"End-to-end integration tests for pip_indices support in unidep.\"\"\"\n\nimport os\nfrom pathlib import Path\nfrom textwrap "
  },
  {
    "path": "tests/test_pixi.py",
    "chars": 95955,
    "preview": "\"\"\"Tests for simple Pixi.toml generation.\"\"\"\n\nfrom __future__ import annotations\n\nimport os\nimport textwrap\nfrom itertoo"
  },
  {
    "path": "tests/test_project_dependency_handling.py",
    "chars": 4172,
    "preview": "\"\"\"Tests for the `project_dependency_handling` feature.\"\"\"\n\nfrom __future__ import annotations\n\nimport textwrap\nfrom typ"
  },
  {
    "path": "tests/test_pypi_alternatives/main_app/main_app/__init__.py",
    "chars": 167,
    "preview": "\"\"\"Main application module.\"\"\"\n\n\ndef main() -> str:\n    \"\"\"Run the main application logic.\"\"\"\n    from shared_lib import"
  },
  {
    "path": "tests/test_pypi_alternatives/main_app/pyproject.toml",
    "chars": 516,
    "preview": "[build-system]\nrequires = [\"hatchling\", \"unidep @ file:///Users/bas.nijholt/Work/unidep\"]\nbuild-backend = \"hatchling.bui"
  },
  {
    "path": "tests/test_pypi_alternatives/shared_lib/pyproject.toml",
    "chars": 249,
    "preview": "[build-system]\nrequires = [\"hatchling\"]\nbuild-backend = \"hatchling.build\"\n\n[project]\nname = \"test-shared-lib\"\nversion = "
  },
  {
    "path": "tests/test_pypi_alternatives/shared_lib/shared_lib/__init__.py",
    "chars": 134,
    "preview": "\"\"\"Shared library module.\"\"\"\n\n\ndef greet() -> str:\n    \"\"\"Return a greeting message.\"\"\"\n    return \"Hello from LOCAL sha"
  },
  {
    "path": "tests/test_pypi_alternatives/test_all_scenarios.sh",
    "chars": 2307,
    "preview": "#!/bin/bash\n# Test PyPI alternatives feature in different scenarios\n\nset -e  # Exit on error\n\necho \"=== Testing PyPI Alt"
  },
  {
    "path": "tests/test_pypi_alternatives.py",
    "chars": 34734,
    "preview": "\"\"\"Test PyPI alternatives for local dependencies.\"\"\"\n\nfrom __future__ import annotations\n\nimport sys\nimport textwrap\nfro"
  },
  {
    "path": "tests/test_pypi_alternatives_errors.py",
    "chars": 10710,
    "preview": "\"\"\"Test error cases and special scenarios for PyPI alternatives.\"\"\"\n\nfrom __future__ import annotations\n\nimport textwrap"
  },
  {
    "path": "tests/test_pypi_alternatives_integration.py",
    "chars": 8143,
    "preview": "\"\"\"Integration tests for PyPI alternatives in local dependencies.\"\"\"\n\nfrom __future__ import annotations\n\nimport shutil\n"
  },
  {
    "path": "tests/test_setuptools_integration.py",
    "chars": 4398,
    "preview": "\"\"\"Tests for setuptools integration.\"\"\"\n\nimport textwrap\nfrom pathlib import Path\nfrom unittest.mock import patch\n\nimpor"
  },
  {
    "path": "tests/test_unidep.py",
    "chars": 84372,
    "preview": "\"\"\"unidep tests.\"\"\"\n\nfrom __future__ import annotations\n\nimport textwrap\nfrom pathlib import Path, PureWindowsPath\nfrom "
  },
  {
    "path": "tests/test_utils.py",
    "chars": 14423,
    "preview": "\"\"\"Tests for the unidep.utils module.\"\"\"\n\nfrom __future__ import annotations\n\nimport importlib.metadata\nimport sys\nfrom "
  },
  {
    "path": "tests/test_version_conflicts.py",
    "chars": 5647,
    "preview": "\"\"\"Tests for the version conflict resolution logic.\"\"\"\n\nfrom __future__ import annotations\n\nimport pytest\n\nfrom unidep._"
  },
  {
    "path": "unidep/__init__.py",
    "chars": 705,
    "preview": "\"\"\"unidep - Unified Conda and Pip requirements management.\"\"\"\n\nfrom unidep._conda_env import (\n    create_conda_env_spec"
  },
  {
    "path": "unidep/_cli.py",
    "chars": 64234,
    "preview": "#!/usr/bin/env python3\n\"\"\"unidep - Unified Conda and Pip requirements management.\n\nThis module provides a command-line t"
  },
  {
    "path": "unidep/_conda_env.py",
    "chars": 11187,
    "preview": "\"\"\"unidep - Unified Conda and Pip requirements management.\n\nConda environment file generation functions.\n\"\"\"\n\nfrom __fut"
  },
  {
    "path": "unidep/_conda_lock.py",
    "chars": 21814,
    "preview": "\"\"\"unidep - Unified Conda and Pip requirements management.\n\nThis module provides the `unidep conda-lock` CLI command, us"
  },
  {
    "path": "unidep/_conflicts.py",
    "chars": 12812,
    "preview": "\"\"\"unidep - Unified Conda and Pip requirements management.\n\nVerion conflict detections and resolution.\n\"\"\"\n\nfrom __futur"
  },
  {
    "path": "unidep/_dependencies_parsing.py",
    "chars": 38319,
    "preview": "\"\"\"unidep - Unified Conda and Pip requirements management.\n\nThis module provides parsing of `requirements.yaml` and `pyp"
  },
  {
    "path": "unidep/_dependency_selection.py",
    "chars": 22536,
    "preview": "\"\"\"Shared conda/pip dependency selection for CLI-facing outputs.\"\"\"\n\nfrom __future__ import annotations\n\nimport math\nfro"
  },
  {
    "path": "unidep/_hatch_integration.py",
    "chars": 1804,
    "preview": "\"\"\"unidep - Unified Conda and Pip requirements management.\n\nThis module contains the Hatchling integration.\n\"\"\"\n\nfrom __"
  },
  {
    "path": "unidep/_pixi.py",
    "chars": 51200,
    "preview": "\"\"\"Pixi.toml generation with version constraint merging.\"\"\"\n\nfrom __future__ import annotations\n\nimport copy\nimport os\ni"
  },
  {
    "path": "unidep/_pytest_plugin.py",
    "chars": 3799,
    "preview": "\"\"\"unidep - Unified Conda and Pip requirements management.\n\nPytest plugin for running only tests of changed files.\n\nWARN"
  },
  {
    "path": "unidep/_setuptools_integration.py",
    "chars": 8755,
    "preview": "#!/usr/bin/env python3\n\"\"\"unidep - Unified Conda and Pip requirements management.\n\nThis module provides setuptools integ"
  },
  {
    "path": "unidep/_version.py",
    "chars": 85,
    "preview": "\"\"\"unidep - Unified Conda and Pip requirements management.\"\"\"\n\n__version__ = \"3.2.0\"\n"
  },
  {
    "path": "unidep/platform_definitions.py",
    "chars": 5124,
    "preview": "\"\"\"unidep - Unified Conda and Pip requirements management.\n\nTypes and definitions for platforms, selectors, and markers."
  },
  {
    "path": "unidep/py.typed",
    "chars": 0,
    "preview": ""
  },
  {
    "path": "unidep/utils.py",
    "chars": 16760,
    "preview": "\"\"\"unidep - Unified Conda and Pip requirements management.\n\nThis module provides utility functions used throughout the p"
  }
]

About this extraction

This page contains the full source code of the basnijholt/unidep GitHub repository, extracted and formatted as plain text for AI agents and large language models (LLMs). The extraction includes 101 files (863.1 KB), approximately 215.4k tokens, and a symbol index with 693 extracted functions, classes, methods, constants, and types. Use this with OpenClaw, Claude, ChatGPT, Cursor, Windsurf, or any other AI tool that accepts text input. You can copy the full output to your clipboard or download it as a .txt file.

Extracted by GitExtract — free GitHub repo to text converter for AI. Built by Nikandr Surkov.

Copied to clipboard!