[
  {
    "path": ".config/nextest.toml",
    "content": "[profile.default]\nretries = { backoff = \"exponential\", count = 2, delay = \"2s\", jitter = true }\nslow-timeout = { period = \"1m\", terminate-after = 3 }\nfail-fast = false\n"
  },
  {
    "path": ".github/CODE_OF_CONDUCT.md",
    "content": "The Soldeer project adheres to the\n[Rust Code of Conduct](https://www.rust-lang.org/policies/code-of-conduct).\nThis code of conduct describes the minimum behavior expected from all contributors.\n\nInstances of violations of the Code of Conduct can contact the project maintainers on the\n[Contributors' Telegram Chat](https://t.me/+tn6gOCJseD83OTZk).\n"
  },
  {
    "path": ".github/ISSUE_TEMPLATE/bug_report.yml",
    "content": "name: 🐛 Bug Report\ndescription: Report an issue found in Soldeer.\nlabels: ['bug']\nbody:\n  - type: markdown\n    attributes:\n      value: |\n        Thanks for taking the time to report a bug!\n        Please fill out the sections below to help us reproduce and fix the bug as quickly as possible.\n  - type: checkboxes\n    attributes:\n      label: 'I have checked the following:'\n      options:\n        - label: 'I have searched the issues of this repository and believe that this is not a duplicate.'\n          required: true\n        - label: 'I have checked that the bug is reproducible with the latest version of Soldeer.'\n          required: true\n  - type: input\n    id: version\n    attributes:\n      label: Soldeer Version\n      description: What is the result of running `soldeer version` or `forge soldeer version`\n      placeholder: soldeer x.y.z\n    validations:\n      required: true\n  - type: textarea\n    id: what-happened\n    attributes:\n      label: What Happened?\n      description: Describe the issue you are experiencing. You can run `soldeer` commands with the `-vvv` flag to see debug logs.\n      placeholder: A clear and concise description of what the bug is.\n    validations:\n      required: true\n  - type: textarea\n    id: expected-behavior\n    attributes:\n      label: Expected Behavior\n      description: Describe what you expected to happen.\n      placeholder: A clear and concise description of what you expected to happen in such a case.\n    validations:\n      required: false\n  - type: textarea\n    id: reproduction\n    attributes:\n      label: Reproduction Steps\n      description: Provide a detailed list of steps to reproduce the issue.\n      placeholder: |\n        1. Insert the \"...\" options into the config file\n        2. Run the command `...`\n        3. Observe that ... happens\n    validations:\n      required: false\n  - type: textarea\n    id: configuration\n    attributes:\n      label: Configuration\n      description: Provide the relevant sections of your `foundry.toml` or `soldeer.toml` file\n      render: toml\n      placeholder: |\n        [soldeer]\n        # Insert the relevant configuration options here\n    validations:\n      required: false\n"
  },
  {
    "path": ".github/ISSUE_TEMPLATE/config.yml",
    "content": "blank_issues_enabled: true\ncontact_links:\n  - name: Soldeer Contributors Telegram\n    url: https://t.me/+tn6gOCJseD83OTZk\n    about: Please ask and answer questions here.\n"
  },
  {
    "path": ".github/ISSUE_TEMPLATE/feature_request.yml",
    "content": "name: 💡 Feature Request\ndescription: Suggest a feature for Soldeer\nlabels: ['enhancement']\nbody:\n  - type: markdown\n    attributes:\n      value: |\n        Thanks for taking the time to suggest a feature!\n        Please fill out the sections below to help us understand your request.\n  - type: checkboxes\n    attributes:\n      label: 'I have checked the following:'\n      options:\n        - label: 'I have searched the issues of this repository and believe that this is not a duplicate.'\n          required: true\n  - type: textarea\n    id: problem\n    attributes:\n      label: Problem\n      description: What problem are you facing that you believe this feature would solve?\n      placeholder: A clear and concise description of what the problem is.\n    validations:\n      required: true\n  - type: textarea\n    id: solution\n    attributes:\n      label: Solution\n      description: Describe the solution you'd like to see.\n      placeholder: A clear and concise description of what you want to happen.\n    validations:\n      required: true\n  - type: textarea\n    id: context\n    attributes:\n      label: Additional Context\n      description: Add any other context or screenshots about the feature request here.\n    validations:\n      required: false\n"
  },
  {
    "path": ".github/ISSUE_TEMPLATE/registry_request.yml",
    "content": "name: 📦 Registry Addition\ndescription: Suggest a missing package for the Soldeer registry.\nlabels: ['add-dependency']\nassignees: ['mario-eth']\nbody:\n  - type: markdown\n    attributes:\n      value: |\n        Thanks for taking the time to suggest a package for the Soldeer registry!\n        Please fill out the sections below to help us understand your request.\n  - type: checkboxes\n    attributes:\n      label: 'I have checked the following:'\n      options:\n        - label: 'I have searched the issues of this repository and believe that this is not a duplicate.'\n          required: true\n  - type: input\n    id: package-name\n    attributes:\n      label: Package Name\n      description: What is the name of the package you would like to see added to the registry?\n      placeholder: soldeer-package-name\n    validations:\n      required: true\n  - type: input\n    id: project-url\n    attributes:\n      label: Project URL\n      description: Provide a link to the package repository or documentation.\n      placeholder: https://github.com/...\n    validations:\n      required: true\n  - type: textarea\n    id: additional-context\n    attributes:\n      label: Additional Context\n      description: Add any context to help us understand why this package should be added.\n    validations:\n      required: false\n"
  },
  {
    "path": ".github/PULL_REQUEST_TEMPLATE.md",
    "content": "<!--\nBefore submitting a PR, please read https://github.com/mario-eth/soldeer/blob/main/CONTRIBUTING.md\n\n1. Give the PR a descriptive title.\n\n  Examples of good title:\n    - fix(core): missing validation for ...\n    - docs(commands): update doc-comments for command ...\n    - feat(core): add option to ...\n\n  Examples of bad title:\n    - fix #7123\n    - update docs\n    - fix bugs\n\n2. Ensure there is a related issue and it is referenced in the PR text (\"Closes #123\").\n3. Ensure there are tests that cover the changes.\n4. Ensure `cargo nextest run` passes.\n5. Ensure code is formatted with `cargo +nightly fmt -- --check`.\n6. Ensure `cargo +nightly clippy --all --all-targets --all-features -- -D warnings` passes.\n7. Open as a draft PR if your work is still in progress.\n-->\n"
  },
  {
    "path": ".github/dependabot.yml",
    "content": "version: 2\nupdates:\n  - package-ecosystem: \"github-actions\"\n    directory: \"/\"\n    # Check for updates every Monday\n    schedule:\n      interval: \"weekly\"\n"
  },
  {
    "path": ".github/workflows/release.yml",
    "content": "name: Release\n\npermissions:\n  pull-requests: write\n  contents: write\n\non:\n  push:\n    branches:\n      - main\n\njobs:\n  # Release unpublished packages.\n  release-plz-release:\n    name: Release-plz release\n    runs-on: ubuntu-latest\n    permissions:\n      contents: write\n    steps:\n      - name: Generate GitHub token\n        uses: actions/create-github-app-token@v2\n        id: generate-token\n        with:\n          app-id: ${{ secrets.APP_ID }}\n          private-key: ${{ secrets.APP_PRIVATE_KEY }}\n      - name: Checkout repository\n        uses: actions/checkout@v4\n        with:\n          fetch-depth: 0\n          token: ${{ steps.generate-token.outputs.token }}\n      - name: Install Rust toolchain\n        uses: dtolnay/rust-toolchain@stable\n      - name: Run release-plz\n        uses: release-plz/action@v0.5\n        with:\n          command: release\n        env:\n          GITHUB_TOKEN: ${{ steps.generate-token.outputs.token }}\n          # CARGO_REGISTRY_TOKEN: ${{ secrets.CARGO_REGISTRY_TOKEN }}\n\n  # Create a PR with the new versions and changelog, preparing the next release.\n  release-plz-pr:\n    name: Release-plz PR\n    runs-on: ubuntu-latest\n    permissions:\n      contents: write\n      pull-requests: write\n    concurrency:\n      group: release-plz-${{ github.ref }}\n      cancel-in-progress: false\n    steps:\n      - name: Generate GitHub token\n        uses: actions/create-github-app-token@v2\n        id: generate-token\n        with:\n          app-id: ${{ secrets.APP_ID }}\n          private-key: ${{ secrets.APP_PRIVATE_KEY }}\n      - name: Checkout repository\n        uses: actions/checkout@v4\n        with:\n          fetch-depth: 0\n          token: ${{ steps.generate-token.outputs.token }}\n      - name: Install Rust toolchain\n        uses: dtolnay/rust-toolchain@stable\n      - name: Run release-plz\n        uses: release-plz/action@v0.5\n        with:\n          command: release-pr\n        env:\n          GITHUB_TOKEN: ${{ steps.generate-token.outputs.token }}\n          # CARGO_REGISTRY_TOKEN: ${{ secrets.CARGO_REGISTRY_TOKEN }}\n"
  },
  {
    "path": ".github/workflows/rust.yml",
    "content": "name: Rust\n\non:\n  push:\n    branches: ['main']\n  pull_request:\n\nenv:\n  CARGO_TERM_COLOR: always\n\njobs:\n  build-test:\n    strategy:\n      matrix:\n        platform: [ubuntu-latest, windows-latest, macos-latest]\n    runs-on: ${{ matrix.platform }}\n    steps:\n      - uses: actions/checkout@v4\n      - uses: dtolnay/rust-toolchain@stable\n      - uses: taiki-e/install-action@nextest\n      - name: Install Foundry\n        uses: foundry-rs/foundry-toolchain@v1\n      - name: Run tests\n        run: cargo nextest run\n\n  doctests:\n    runs-on: ubuntu-latest\n    timeout-minutes: 30\n    steps:\n      - uses: actions/checkout@v4\n      - uses: dtolnay/rust-toolchain@stable\n      - uses: Swatinem/rust-cache@v2\n        with:\n          cache-on-failure: true\n      - run: cargo test --workspace --doc\n\n  feature-checks:\n    runs-on: ubuntu-latest\n    timeout-minutes: 30\n    steps:\n      - uses: actions/checkout@v4\n      - uses: dtolnay/rust-toolchain@stable\n      - uses: taiki-e/install-action@cargo-hack\n      - uses: Swatinem/rust-cache@v2\n        with:\n          cache-on-failure: true\n      - name: cargo hack\n        run: cargo hack check --feature-powerset --depth 2\n\n  clippy:\n    runs-on: ubuntu-latest\n    timeout-minutes: 30\n    steps:\n      - uses: actions/checkout@v4\n      - uses: dtolnay/rust-toolchain@stable\n        with:\n          components: clippy\n      - uses: Swatinem/rust-cache@v2\n        with:\n          cache-on-failure: true\n      - run: cargo clippy --workspace --all-targets --all-features\n        env:\n          RUSTFLAGS: -Dwarnings\n\n  docs:\n    runs-on: ubuntu-latest\n    timeout-minutes: 30\n    steps:\n      - uses: actions/checkout@v4\n      - uses: dtolnay/rust-toolchain@nightly\n      - uses: Swatinem/rust-cache@v2\n        with:\n          cache-on-failure: true\n      - run: cargo doc --workspace --all-features --no-deps --document-private-items\n        env:\n          RUSTDOCFLAGS: '--cfg docsrs -D warnings'\n\n  fmt:\n    runs-on: ubuntu-latest\n    timeout-minutes: 30\n    steps:\n      - uses: actions/checkout@v4\n      - uses: dtolnay/rust-toolchain@nightly\n        with:\n          components: rustfmt\n      - run: cargo fmt --all --check\n"
  },
  {
    "path": ".gitignore",
    "content": "/target\ndependencies/\n.dependency_reading.toml\nremappings.txt\ncrawler/target/\n*.DS_Store*\npackage-lock.json\npackage.json\nrepositories.db\ncrawler/node_modules/\ncrawler/zipped/*\ncrawler/zipped/\nsrc/soldeer.toml\n*soldeer.lock\ntest/*\n!emptyfile\n!emptyfile2\ntest_push_sensitive\ntest_push_skip_sensitive\n.soldeer/"
  },
  {
    "path": ".vscode/settings.json",
    "content": "{\n  \"git.ignoreLimitWarning\": true,\n  \"editor.formatOnSave\": true,\n  \"rust-analyzer.rustfmt.extraArgs\": [\"+nightly\"],\n  \"[rust]\": {\n    \"editor.defaultFormatter\": \"rust-lang.rust-analyzer\"\n  },\n  \"rust-analyzer.cargo.features\": \"all\"\n}\n"
  },
  {
    "path": "CHANGELOG.md",
    "content": "# Changelog\n\nAll notable changes to this project will be documented in this file.\n\nThe format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),\nand this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).\n\n## [Unreleased]\n\n## `soldeer` - [0.11.0](https://github.com/mario-eth/soldeer/compare/v0.10.1...v0.11.0) - 2026-04-16\n\n### Fixed\n- *(commands)* do not init logging backend in the library crate ([#350](https://github.com/mario-eth/soldeer/pull/350))\n\n## `soldeer-commands` - [0.11.0](https://github.com/mario-eth/soldeer/compare/soldeer-commands-v0.10.1...soldeer-commands-v0.11.0) - 2026-04-16\n\n### Fixed\n- *(commands)* do not init logging backend in the library crate ([#350](https://github.com/mario-eth/soldeer/pull/350))\n\n### Other\n- *(deps)* update dependencies ([#355](https://github.com/mario-eth/soldeer/pull/355))\n\n## `soldeer-core` - [0.11.0](https://github.com/mario-eth/soldeer/compare/soldeer-core-v0.10.1...soldeer-core-v0.11.0) - 2026-04-16\n\n### Other\n- *(deps)* update dependencies ([#355](https://github.com/mario-eth/soldeer/pull/355))\n- *(install)* concurrent subdependencies install ([#352](https://github.com/mario-eth/soldeer/pull/352))\n\n## `soldeer-core` - [0.10.1](https://github.com/mario-eth/soldeer/compare/soldeer-core-v0.10.0...soldeer-core-v0.10.1) - 2026-02-16\n\n### Added\n- *(core)* support foundry.lock file ([#347](https://github.com/mario-eth/soldeer/pull/347))\n\n## `soldeer-core` - [0.10.0](https://github.com/mario-eth/soldeer/compare/soldeer-core-v0.9.0...soldeer-core-v0.10.0) - 2025-12-03\n\n### Added\n- *(config)* [**breaking**] allow to specify the project root path for dependencies ([#341](https://github.com/mario-eth/soldeer/pull/341))\n\n## `soldeer` - [0.9.0](https://github.com/mario-eth/soldeer/compare/v0.8.0...v0.9.0) - 2025-10-16\n\n### Other\n- update Cargo.lock dependencies\n\n## `soldeer-commands` - [0.9.0](https://github.com/mario-eth/soldeer/compare/soldeer-commands-v0.8.0...soldeer-commands-v0.9.0) - 2025-10-16\n\n### Added\n- detect project root ([#333](https://github.com/mario-eth/soldeer/pull/333))\n- *(commands)* add `soldeer clean` command ([#332](https://github.com/mario-eth/soldeer/pull/332))\n\n## `soldeer-core` - [0.9.0](https://github.com/mario-eth/soldeer/compare/soldeer-core-v0.8.0...soldeer-core-v0.9.0) - 2025-10-16\n\n### Added\n- detect project root ([#333](https://github.com/mario-eth/soldeer/pull/333))\n\n### Other\n- *(deps)* update deps ([#336](https://github.com/mario-eth/soldeer/pull/336))\n\n## `soldeer-commands` - [0.8.0](https://github.com/mario-eth/soldeer/compare/soldeer-commands-v0.7.1...soldeer-commands-v0.8.0) - 2025-09-29\n\n### Added\n- add support for private packages ([#327](https://github.com/mario-eth/soldeer/pull/327))\n\n## `soldeer-core` - [0.8.0](https://github.com/mario-eth/soldeer/compare/soldeer-core-v0.7.1...soldeer-core-v0.8.0) - 2025-09-29\n\n### Added\n- add support for private packages ([#327](https://github.com/mario-eth/soldeer/pull/327))\n\n## `soldeer-core` - [0.7.1](https://github.com/mario-eth/soldeer/compare/soldeer-core-v0.7.0...soldeer-core-v0.7.1) - 2025-09-19\n\n### Fixed\n- *(core)* install git submodules ([#328](https://github.com/mario-eth/soldeer/pull/328))\n\n## `soldeer` - [0.7.0](https://github.com/mario-eth/soldeer/compare/v0.6.1...v0.7.0) - 2025-09-02\n\n### Other\n- rust edition 2024 ([#319](https://github.com/mario-eth/soldeer/pull/319))\n\n## `soldeer-commands` - [0.7.0](https://github.com/mario-eth/soldeer/compare/soldeer-commands-v0.6.1...soldeer-commands-v0.7.0) - 2025-09-02\n\n### Added\n- *(registry)* use new API endpoints ([#318](https://github.com/mario-eth/soldeer/pull/318))\n- add support for CLI tokens ([#311](https://github.com/mario-eth/soldeer/pull/311))\n\n### Fixed\n- *(cmd)* avoid panicking if logger was already initialized ([#312](https://github.com/mario-eth/soldeer/pull/312))\n\n### Other\n- rust edition 2024 ([#319](https://github.com/mario-eth/soldeer/pull/319))\n\n## `soldeer-core` - [0.7.0](https://github.com/mario-eth/soldeer/compare/soldeer-core-v0.6.1...soldeer-core-v0.7.0) - 2025-09-02\n\n### Added\n- *(registry)* use new API endpoints ([#318](https://github.com/mario-eth/soldeer/pull/318))\n- add support for CLI tokens ([#311](https://github.com/mario-eth/soldeer/pull/311))\n\n### Fixed\n- *(cmd)* avoid panicking if logger was already initialized ([#312](https://github.com/mario-eth/soldeer/pull/312))\n\n### Other\n- rust edition 2024 ([#319](https://github.com/mario-eth/soldeer/pull/319))\n\n## `soldeer-core` - [0.6.1](https://github.com/mario-eth/soldeer/compare/soldeer-core-v0.6.0...soldeer-core-v0.6.1) - 2025-07-23\n\n### Other\n- add nix flake and fix clippy ([#301](https://github.com/mario-eth/soldeer/pull/301))\n- remove bzip2 support ([#298](https://github.com/mario-eth/soldeer/pull/298))\n\n## `soldeer` - [0.6.0](https://github.com/mario-eth/soldeer/compare/v0.5.4...v0.6.0) - 2025-07-10\n\n### Other\n- update Cargo.lock dependencies\n\n## `soldeer-commands` - [0.6.0](https://github.com/mario-eth/soldeer/compare/soldeer-commands-v0.5.4...soldeer-commands-v0.6.0) - 2025-07-10\n\n### Added\n- *(commands)* if adding a dependency which is already present, re-install all ([#289](https://github.com/mario-eth/soldeer/pull/289))\n\n### Fixed\n- *(core)* recursive subdependencies install ([#288](https://github.com/mario-eth/soldeer/pull/288))\n- *(commands)* canonicalize path in push command ([#284](https://github.com/mario-eth/soldeer/pull/284))\n\n## `soldeer-core` - [0.6.0](https://github.com/mario-eth/soldeer/compare/soldeer-core-v0.5.4...soldeer-core-v0.6.0) - 2025-07-10\n\n### Added\n- *(core)* remove forge requirement for recursive install ([#281](https://github.com/mario-eth/soldeer/pull/281))\n\n### Fixed\n- *(core)* recursive subdependencies install ([#288](https://github.com/mario-eth/soldeer/pull/288))\n- *(commands)* canonicalize path in push command ([#284](https://github.com/mario-eth/soldeer/pull/284))\n\n## `soldeer` - [0.5.4](https://github.com/mario-eth/soldeer/compare/v0.5.3...v0.5.4) - 2025-04-27\n\n### Other\n- update Cargo.lock dependencies\n\n## `soldeer-core` - [0.5.4](https://github.com/mario-eth/soldeer/compare/soldeer-core-v0.5.3...soldeer-core-v0.5.4) - 2025-04-27\n\n### Fixed\n- *(registry)* version resolution when no SemVer ([#271](https://github.com/mario-eth/soldeer/pull/271))\n\n## `soldeer` - [0.5.3](https://github.com/mario-eth/soldeer/compare/v0.5.2...v0.5.3) - 2025-03-18\n\n### Changed\n\n- fix(core): remove hardcoded git domains by @puuuuh in https://github.com/mario-eth/soldeer/pull/244\n- refactor!: logging by @beeb in https://github.com/mario-eth/soldeer/pull/242\n- fix(push): ensure version is non-empty when pushing to registry by @kubkon in https://github.com/mario-eth/soldeer/pull/247\n- feat!: improve toml validation by @beeb in https://github.com/mario-eth/soldeer/pull/248\n- chore(deps): update deps by @beeb in https://github.com/mario-eth/soldeer/pull/257\n\n## `soldeer` - [0.5.2](https://github.com/mario-eth/soldeer/compare/v0.5.1...v0.5.2) - 2024-11-21\n\n### Changed\n\n- fix(core): gitignore config for integrity checksum by @beeb in #233\n\n## `soldeer` - [0.5.1](https://github.com/mario-eth/soldeer/compare/v0.5.0...v0.5.1) - 2024-11-13\n\n### Changed\n\n- fix(core): keep duplicate and orphan remappings by @beeb in #226\n\n## `soldeer` - [0.5.0](https://github.com/mario-eth/soldeer/compare/v0.4.1...v0.5.0) - 2024-11-07\n\n### Changed\n\n- 185 add cli args to skip interaction for all commands by @mario-eth in #218\n\n## `soldeer` - [0.4.1](https://github.com/mario-eth/soldeer/compare/v0.4.0...v0.4.1) - 2024-10-11\n\n### Changed\n\n- updated readme by @mario-eth in #209\n- fix(core): all commands add the `[dependencies]` table in config if m… by @mario-eth in #214\n- Add core version by @mario-eth in #210\n\n\n## `soldeer` - [0.4.0](https://github.com/mario-eth/soldeer/compare/v0.3.4...v0.4.0) - 2024-10-07\n\n### Changed\n\n- refactor!: v0.4.0 main rewrite by @beeb in #150\n- docs(core): document `auth` and `config` modules by @beeb in #175\n- feat: format multiline remappings array by @beeb in #174\n- docs(core): add documentation by @beeb in #177\n- docs(core): add documentation by @beeb in #178\n- docs(core): update and utils modules by @beeb in #179\n- test(commands): init integration tests by @beeb in #180\n- refactor!: minor refactor and integration tests by @beeb in #186\n- test(commands): add integration test (install/uninstall) by @beeb in #190\n- feat(core): improve remappings matching by @beeb in #191\n- fix(core): updating git dependencies by @beeb in #192\n- feat(commands): update libs in foundry config during init by @beeb in #193\n- refactor: remove all unwraps by @beeb in #194\n- ci: speed up test by using cargo-nextest by @beeb in #196\n- perf: lock-free synchronization, add rayon by @crypdoughdoteth in #198\n- feat(cli): add banner by @xyizko in #199\n- refactor: use new syntax for bon builders by @beeb in #200\n- ci: add nextest config by @beeb in #201\n- test(commands): integration tests for push by @beeb in #197\n- fix(core): `path_matches` semver comparison by @beeb in #205\n- fix(cli): respect environment and tty preference for color by @beeb in #206\n- test(commands): fix tests when run with `cargo test` by @beeb in #207\n\n## `soldeer` - [0.3.4](https://github.com/mario-eth/soldeer/compare/v0.3.3...v0.3.4) - 2024-09-04\n\n### Changed\n\n- Moving the canonicalization to respect windows slashing by @mario-eth in #172\n\n## `soldeer` - [0.3.3](https://github.com/mario-eth/soldeer/compare/v0.3.2...v0.3.3) - 2024-09-04\n\n### Changed\n\n- chore(deps): bump zip-extract to 0.2.0 by @DaniPopes in #161\n- fix(config): preserve existing remappings by @beeb in #171\n\n## `soldeer` - [0.3.2](https://github.com/mario-eth/soldeer/compare/v0.3.1...v0.3.2) - 2024-08-29\n\n### Changed\n\n- hotfix os independent bytes by @mario-eth in #163\n- remappings_generated -> remappings_generate typo by @0xCalibur in #164\n- fix(utils): always consider relative path in hashing by @beeb in #168\n\n## `soldeer` - [0.3.1](https://github.com/mario-eth/soldeer/compare/v0.3.0...v0.3.1) - 2024-08-27\n\n### Changed\n\n- Hotfix on OS independent bytes on hashing\n\n## `soldeer` - [0.3.0](https://github.com/mario-eth/soldeer/compare/v0.2.19...v0.3.0) - 2024-08-27\n\n### Changed\n\n- Updated readme and version by @mario-eth in #104\n- 89 add soldeer uninstall by @mario-eth in #105\n- Feat/soldeer init by @Solthodox in #56\n- style(fmt): update formatter configuration and improve consistency by @beeb in #111\n- refactor!: cleanup, more idiomatic rust by @beeb in #113\n- perf(lock): better handling of missing lockfile by @beeb in #114\n- refactor!: big rewrite by @beeb in #118\n- fix(config)!: fix remappings logic and logging by @beeb in #125\n- chore: update deps and remove serde_derive by @beeb in #129\n- Handling dependency name sanitization by @mario-eth in #127\n- fix: parallel downloads order by @beeb in #133\n- Recursive Dependencies by @mario-eth in #136\n- Removing transform git to http by @mario-eth in #137\n- Hotfixes and extra tests before 0.3.0 by @mario-eth in #139\n- Hotfixes after refactor and extra tests by @mario-eth in #141\n- feat: add integrity checksum to lockfile by @beeb in #132\n- chore: update logo by @beeb in #143\n- chore: enable some more lints by @DaniPopes in #160\n- chore(deps): replace simple-home-dir with home by @DaniPopes in #157\n- chore: remove unused dev dep env_logger by @DaniPopes in #159\n- chore(deps): replace `once_cell` with `std::sync` by @DaniPopes in #158\n- Using git branch/tag to pull dependencies by @mario-eth in #147\n"
  },
  {
    "path": "CONTRIBUTING.md",
    "content": "## Contributing to Soldeer\n\nThanks for your interest in improving Soldeer!\n\nThere are multiple opportunities to contribute at any level. It doesn't matter if you are just getting started with Rust\nor are the most weathered expert, we can use your help.\n\nThis document will help you get started. **Do not let the document intimidate you**.\nIt should be considered as a guide to help you navigate the process.\n\nThe [Contributors' Telegram Chat][telegram] is available for any concerns you may have that are\nnot covered in this guide.\n\n### Code of Conduct\n\nThe Soldeer project adheres to the [Rust Code of Conduct][rust-coc]. This code of conduct describes the _minimum_\nbehavior expected from all contributors.\n\nInstances of violations of the Code of Conduct can contact the project maintainers on the\n[Contributors' Telegram Chat][telegram].\n\n### Ways to contribute\n\nThere are fundamentally four ways an individual can contribute:\n\n1. **By opening an issue:** For example, if you believe that you have uncovered a bug\n   in Soldeer, creating a new issue in the issue tracker is the way to report it.\n2. **By adding context:** Providing additional context to existing issues,\n   such as screenshots and code snippets, which help resolve issues.\n3. **By resolving issues:** Typically this is done in the form of either\n   demonstrating that the issue reported is not a problem after all, or more often,\n   by opening a pull request that fixes the underlying problem, in a concrete and\n   reviewable manner.\n\n**Anybody can participate in any stage of contribution**. We urge you to participate in the discussion\naround bugs and participate in reviewing PRs.\n\n### Contributions Related to Spelling and Grammar\n\nAt this time, we will not be accepting contributions that only fix spelling or grammatical errors in documentation, code\nor elsewhere.\n\n### Asking for help\n\nIf you have reviewed existing documentation and still have questions, or you are having problems, you can get help in\nthe following ways:\n\n- **Asking in the support Telegram:** The [Soldeer Support Telegram][telegram] is a fast and easy way to ask questions.\n\nAs Soldeer is still in heavy development, the documentation can be a bit scattered.\n\n### Submitting a bug report\n\nWhen filing a new bug report in the issue tracker, you will be presented with a basic form to fill out.\n\nIf you believe that you have uncovered a bug, please fill out the form to the best of your ability. Do not worry if you\ncannot answer every detail; just fill in what you can. Contributors will ask follow-up questions if something is\nunclear.\n\nThe most important pieces of information we need in a bug report are:\n\n- The Soldeer version you are on (and that it is up to date)\n- The platform you are on (Windows, macOS, an M1 Mac or Linux)\n- Code snippets if this is happening in relation to testing or building code\n- Concrete steps to reproduce the bug\n\nIn order to rule out the possibility of the bug being in your project, the code snippets should be as minimal\nas possible. It is better if you can reproduce the bug with a small snippet as opposed to an entire project!\n\nSee [this guide][mcve] on how to create a minimal, complete, and verifiable example.\n\n### Submitting a feature request\n\nWhen adding a feature request in the issue tracker, you will be presented with a basic form to fill out.\n\nPlease include as detailed of an explanation as possible of the feature you would like, adding additional context if\nnecessary.\n\nIf you have examples of other tools that have the feature you are requesting, please include them as well.\n\n### Resolving an issue\n\nPull requests are the way concrete changes are made to the code, documentation, and dependencies of Soldeer.\n\nPlease also make sure that the following commands pass if you have changed the code:\n\n```sh\ncargo check --all\ncargo test --all --all-features\ncargo +nightly fmt -- --check\ncargo +nightly clippy --all --all-targets --all-features -- -D warnings\n```\n\nIf you are working in VSCode, we recommend you install the [rust-analyzer](https://rust-analyzer.github.io/) extension,\nand use the following VSCode user settings:\n\n```json\n\"editor.formatOnSave\": true,\n\"rust-analyzer.rustfmt.extraArgs\": [\"+nightly\"],\n\"[rust]\": {\n  \"editor.defaultFormatter\": \"rust-lang.rust-analyzer\"\n}\n```\n\n#### Adding tests\n\nIf the change being proposed alters code, it is either adding new functionality to Soldeer, or fixing existing, broken\nfunctionality.\nIn both of these cases, the pull request should include one or more tests to ensure that Soldeer does not regress\nin the future.\n\nTypes of tests include:\n\n- **Unit tests**: Functions which have very specific tasks should be unit tested.\n- **Integration tests**: For general purpose, far reaching functionality, integration tests should be added.\n  The best way to add a new integration test is to look at existing ones and follow the style.\n\n#### Commits\n\nIt is a recommended best practice to keep your changes as logically grouped as possible within individual commits. There\nis no limit to the number of commits any single pull request may have, and many contributors find it easier to review\nchanges that are split across multiple commits.\n\nThat said, if you have a number of commits that are \"checkpoints\" and don't represent a single logical change, please\nsquash those together.\n\nPlease adhere to the [Conventional Commits][conventional-commits] format for commit messages\nand PR titles. Prefer all-lowercase descriptions when possible.\n\nThe following types should be used:\n\n- **build**: changes that affect the build system or external dependencies (example scope: cargo)\n- **chore**: tool configuration, metadata, manifest changes, dependencies updates, miscellaneous changes (anything that doesn't fit the other types)\n- **ci**: changes to the CI configuration files and scripts (GitHub Actions)\n- **docs**: documentation-only changes (doc comments, mdbook)\n- **feat**: a new feature\n- **fix**: a bug fix\n- **perf**: a code change that improves performance\n- **refactor**: a code change that neither fixes a bug nor adds a feature\n- **revert**: reverting an older commit or change\n- **style**: changes that do not affect the meaning of the code (whitespace, formatting, etc.)\n- **test**: adding or modifying tests (no change to lib/binary source code allowed)\n\n#### Opening the pull request\n\nFrom within GitHub, opening a new pull request will present you with a template that should be filled out. Please try\nyour best at filling out the details, but feel free to skip parts if you're not sure what to put.\n\nMake sure to use the [Conventional Commits][conventional-commits] format described above for\nyour PR title.\n\n#### Discuss and update\n\nYou will probably get feedback or requests for changes to your pull request.\nThis is a big part of the submission process, so don't be discouraged! Some contributors may sign off on the pull\nrequest right away, others may have more detailed comments or feedback.\nThis is a necessary part of the process in order to evaluate whether the changes are correct and necessary.\n\n**Any community member can review a PR, so you might get conflicting feedback**.\nKeep an eye out for comments from code owners to provide guidance on conflicting feedback.\n\n#### Reviewing pull requests\n\n**Any Soldeer community member is welcome to review any pull request**.\n\nAll contributors who choose to review and provide feedback on pull requests have a responsibility to both the project\nand individual making the contribution. Reviews and feedback must be helpful, insightful, and geared towards improving\nthe contribution as opposed to simply blocking it. If there are reasons why you feel the PR should not be merged,\nexplain what those are. Do not expect to be able to block a PR from advancing simply because you say \"no\" without\ngiving an explanation. Be open to having your mind changed. Be open to working _with_ the contributor to make the pull\nrequest better.\n\nReviews that are dismissive or disrespectful of the contributor or any other reviewers are strictly counter to the Code\nof Conduct.\n\nWhen reviewing a pull request, the primary goals are for the codebase to improve and for the person submitting the\nrequest to succeed.\n**Even if a pull request is not merged, the submitter should come away from the experience feeling like their effort was not unappreciated**.\nEvery PR from a new contributor is an opportunity to grow the community.\n\n##### Review a bit at a time\n\nDo not overwhelm new contributors.\n\nIt is tempting to micro-optimize and make everything about relative performance, perfect grammar, or exact style\nmatches. Do not succumb to that temptation..\n\nFocus first on the most significant aspects of the change:\n\n1. Does this change make sense for Soldeer?\n2. Does this change make Soldeer better, even if only incrementally?\n3. Are there clear bugs or larger scale issues that need attending?\n4. Are the commit messages readable and correct? If it contains a breaking change, is it clear enough?\n\nNote that only **incremental** improvement is needed to land a PR. This means that the PR does not need to be perfect,\nonly better than the status quo. Follow-up PRs may be opened to continue iterating.\n\nWhen changes are necessary, _request_ them, do not _demand_ them, and\n**do not assume that the submitter already knows how to add a test or run a benchmark**.\n\nSpecific performance optimization techniques, coding styles and conventions change over time. The first impression you\ngive to a new contributor never does.\n\nNits (requests for small changes that are not essential) are fine, but try to avoid stalling the pull request. Most nits\ncan typically be fixed by the Soldeer maintainers merging the pull request, but they can also be an opportunity for the\ncontributor to learn a bit more about the project.\n\nIt is always good to clearly indicate nits when you comment, e.g.:\n`Nit: change foo() to bar(). But this is not blocking`.\n\nIf your comments were addressed but were not folded after new commits, or if they proved to be mistaken, please,\n[hide them][hiding-a-comment] with the appropriate reason to keep the conversation flow concise and relevant.\n\n##### Be aware of the person behind the code\n\nBe aware that _how_ you communicate requests and reviews in your feedback can have a significant impact on the success\nof the pull request. Yes, we may merge a particular change that makes Soldeer better, but the individual might just not\nwant to have anything to do with Soldeer ever again. The goal is not just having good code.\n\n##### Abandoned or stale pull requests\n\nIf a pull request appears to be abandoned or stalled, it is polite to first check with the contributor to see if they\nintend to continue the work before checking if they would mind if you took it over (especially if it just has nits\nleft). When doing so, it is courteous to give the original contributor credit for the work they started, either by\npreserving their name and e-mail address in the commit log, or by using the `Author: ` or `Co-authored-by: ` metadata\ntag in the commits.\n\n_Adapted from the [ethers-rs contributing guide](https://github.com/gakonst/ethers-rs/blob/master/CONTRIBUTING.md)_.\n\n[telegram]: https://t.me/+tn6gOCJseD83OTZk\n[rust-coc]: https://www.rust-lang.org/policies/code-of-conduct\n[mcve]: https://stackoverflow.com/help/mcve\n[hiding-a-comment]: https://help.github.com/articles/managing-disruptive-comments/#hiding-a-comment\n[conventional-commits]: https://www.conventionalcommits.org/en/v1.0.0\n"
  },
  {
    "path": "Cargo.toml",
    "content": "[workspace]\nmembers = [\"crates/cli\", \"crates/core\", \"crates/commands\"]\nresolver = \"2\"\n\n[workspace.package]\nauthors = [\"m4rio\"]\ncategories = [\"development-tools\"]\ndescription = \"A minimal Solidity package manager written in Rust, best used with Foundry\"\nedition = \"2024\"\nexclude = [\"tests/\"]\nhomepage = \"https://soldeer.xyz\"\nkeywords = [\"solidity\", \"package-manager\", \"foundry\"]\nlicense = \"MIT\"\nreadme = \"./README.md\"\nrepository = \"https://github.com/mario-eth/soldeer\"\nrust-version = \"1.88\"\nversion = \"0.11.0\"\n\n[workspace.lints.clippy]\ndbg-macro = \"warn\"\nmanual-string-new = \"warn\"\nuninlined-format-args = \"warn\"\nuse-self = \"warn\"\nredundant-clone = \"warn\"\nunwrap_used = \"warn\"\n\n[workspace.lints.rust]\nrust-2018-idioms = \"warn\"\nunreachable-pub = \"warn\"\nunused-must-use = \"warn\"\nredundant-lifetimes = \"warn\"\n\n[workspace.dependencies]\nbon = \"3.0.0\"\nclap = { version = \"4.5.9\", features = [\"derive\"] }\ncliclack = \"0.5.4\"\nderive_more = { version = \"2.0.1\", features = [\"from\", \"display\", \"from_str\"] }\nlog = { version = \"0.4.25\", features = [\"kv\"] }\nmockito = \"1.5.0\"\npath-slash = \"0.2.1\"\nrayon = \"1.10.0\"\nreqwest = \"0.13.2\"\ntemp-env = { version = \"0.3.6\", features = [\"async_closure\"] }\ntestdir = \"0.10.0\"\nthiserror = \"2.0.3\"\ntokio = { version = \"1.38.0\", features = [\n    \"io-util\",\n    \"macros\",\n    \"process\",\n    \"rt-multi-thread\",\n] }\n"
  },
  {
    "path": "LICENSE",
    "content": "MIT License\n\nCopyright (c) 2023 mario-eth\n\nPermission is hereby granted, free of charge, to any person obtaining a copy\nof this software and associated documentation files (the \"Software\"), to deal\nin the Software without restriction, including without limitation the rights\nto use, copy, modify, merge, publish, distribute, sublicense, and/or sell\ncopies of the Software, and to permit persons to whom the Software is\nfurnished to do so, subject to the following conditions:\n\nThe above copyright notice and this permission notice shall be included in all\ncopies or substantial portions of the Software.\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\nIMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\nFITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\nAUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\nLIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\nOUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\nSOFTWARE.\n"
  },
  {
    "path": "README.md",
    "content": "# Soldeer ![Rust][rust-badge] [![License: MIT][license-badge]][license]\n\n[rust-badge]: https://img.shields.io/badge/Built%20with%20-Rust-e43716.svg\n[license]: https://opensource.org/licenses/MIT\n[license-badge]: https://img.shields.io/badge/License-MIT-blue.svg\n\n<p align=\"center\">\n  <img src=\"https://github.com/mario-eth/soldeer/raw/main/logo/soldeer_logo_outline_512.png\" />\n</p>\n\nSoldeer is a package manager for Solidity built in Rust and integrated into Foundry.\n\nSolidity development started to become more and more complex. The need for a package manager was evident.\nThis project was started to solve the following issues:\n\n- git submodules in Foundry are not a good solution for managing dependencies\n- npmjs was built for the JS ecosystem, not for Solidity\n- github versioning of the releases is a pain and not all the projects are using it correctly\n\n## Installation (Foundry)\n\nSoldeer is already integrated into Foundry. You can use it by running the following command:\n\n```bash\nforge soldeer [COMMAND]\n```\n\nTo check which version of Soldeer is packaged with your Foundry install, run `forge soldeer version`.\n\n## Installation (standalone)\n\nSoldeer is available on [crates.io](https://crates.io/crates/soldeer) and can be installed with:\n\n```bash\ncargo install soldeer\n```\n\n### Verify installation\n\n```bash\nsoldeer help\n```\n\n## Compile from Source\n\nClone this repository, then run `cargo build --release` inside the root.\n\nThe `soldeer` binary will be located inside the `target/release/` folder.\n\n## Usage\n\nCheck out the [usage guide](https://github.com/mario-eth/soldeer/blob/main/USAGE.md) or\n[Foundry Book](https://book.getfoundry.sh/projects/soldeer).\n\n## Changelog\n\nPlease see the [changelog](https://github.com/mario-eth/soldeer/blob/main/CHANGES.md) for more information about each release.\n\n## Contributing\n\nSee the [contribution guide](https://github.com/mario-eth/soldeer/blob/main/CONTRIBUTING.md) for more information.\n"
  },
  {
    "path": "USAGE.md",
    "content": "# Usage Guide\n\n`Soldeer` is straightforward to use. It can either be invoked from the `forge` tool provided by Foundry, or installed as\na standalone executable named `soldeer`.\n\nDependencies and configuration options can be specified inside Foundry's `foundry.toml` config file, or inside a\ndedicated `soldeer.toml` file.\n\nIn the following sections, commands can be prefixed with `forge` to use the built-in version packaged with Foundry.\n\n## Initializing a New Project\n\n```bash\n[forge] soldeer init [--clean]\n```\n\nThe `init` command can be used to setup a project for use with Soldeer. The command will generate or modify the\nproject's config file (`foundry.toml` or `soldeer.toml`) and perform optional removal of Foundry-style submodule\ndependencies with the `--clean` flag.\n\nThis command automatically adds the latest `forge-std` dependency to your project.\n\nNote that Soldeer installs dependencies into a folder named `dependencies`. There is currently no way to customize this\npath.\n\n## Adding Dependencies\n\n### From the Soldeer Registry\n\n```bash\n[forge] soldeer install <NAME>~<VERSION>\n```\n\nThis command searches the Soldeer registry at [https://soldeer.xyz](https://soldeer.xyz) for the specified dependency by\nname and version. If a match is found, a ZIP file containing the package source will be downloaded and unzipped into the\n`dependencies` directory.\n\nThe command also adds the dependency to the project's config file and creates the necessary\n[remappings](https://book.getfoundry.sh/projects/dependencies#remapping-dependencies) if configured to do so.\n\n#### Version Requirement\n\nThe `VERSION` argument is a version requirement string and can use operators and wildcards to match a range of versions.\nBy default, if no operator is provided, it defaults to `=` which means \"exactly this version\".\n\nExamples:\n\n```\n1.2.3         // exactly 1.2.3, equivalent to `=1.2.3`\n>=1.2.3       // any version greater than or equal to 1.2.3, including any 2.x version or more\n^1.2.3        // the patch and minor version can increase, but not the major\n1             // any version >=1.0.0 but <2.0.0\n1.2           // any version >=1.2.0 but <2.0.0\n~1.2.3        // only the patch number can increase\n>1.2.3,<1.4.0 // multiple requirements can be separated by a comma\n```\n\nNote that this only makes sense when used with the Soldeer registry, as it provides a list of available versions to\nselect from. Dependencies specified with a custom URL do not use the version requirement string in this way.\n\n### With a Custom URL\n\n#### ZIP file\n\n```bash\n[forge] soldeer install <NAME>~<VERSION> --url <ZIP_URL>\n```\n\nIf the URL to a ZIP file is provided, the registry is not used and the file is downloaded from the URL directly. Note\nthat a version must still be provided, but it can be freely chosen.\n\n#### Git Repository\n\n```bash\n[forge] soldeer install <NAME>~<VERSION> --git <GIT_URL>\n```\n\nIf the URL to a git repository is provided, then the repository will be cloned into the `dependencies` folder with the\n`git` CLI available on the system. HTTPS and SSH-style URLs are supported (see examples below).\n\nCloning a specific identifier can be done with the `--rev <COMMIT>`, `--branch <BRANCH>` or `--tag <TAG>` arguments. If\nomitted, then the default branch is checked out.\n\nSome examples:\n\n```bash\n[forge] soldeer install test-project~v1 --git git@github.com:test/test.git\n[forge] soldeer install test-project~v1 --git git@gitlab.com:test/test.git\n```\n\n```bash\n[forge] soldeer install test-project~v1 --git https://github.com/test/test.git\n[forge] soldeer install test-project~v1 --git https://gitlab.com/test/test.git\n```\n\n```bash\n[forge] soldeer install test-project~v1 --git git@github.com:test/test.git --rev 345e611cd84bfb4e62c583fa1886c1928bc1a464\n[forge] soldeer install test-project~v1 --git git@github.com:test/test.git --branch dev\n[forge] soldeer install test-project~v1 --git git@github.com:test/test.git --tag v1\n```\n\nNote that a version must still be provided, but it can be freely chosen.\n\n## Installing Existing Dependencies\n\n```bash\n[forge] soldeer install\n```\n\nWhen invoked without arguments, the `install` command installs the project's existing dependencies by looking at the\nconfiguration file (`soldeer.toml`/`foundry.toml`) and lockfile `soldeer.lock` if present.\n\nDependencies which are already present inside the `dependencies` folder are not downloaded again. For dependencies with\na version range specified in the config file, the exact version that is written in the lockfile is used, even if a newer\nversion exists on the registry. To update the lockfile to use the latest supported version, use `soldeer update`.\n\n### Recursive Installation\n\nWith the `--recursive-deps` flag, Soldeer will install the dependencies of each installed dependency, recursively. This\nis done internally by running `git submodule update --init --recursive` and/or installing Soldeer dependencies inside of\nthe dependency's folder. This behavior can also be enabled permanently via the config file.\n\n#### Specifying the Project Root for a Dependency\n\nIf recursive installation is enabled, Soldeer must find a `foundry.toml` or `soldeer.toml` config file within the\ndependency's directory to know which subdependencies to install.\n\nIn case that config file is not located at the root of the dependency's directory (meaning at the root of a git\nrepository or at the root of the zip file), then the path to the folder containing that file must be specified with\n`project_root`:\n\n```toml\n# foundry.toml\n[dependencies]\nmydep = { version = \"1.0.0\", project_root = \"contracts\" }\n\n[soldeer]\nrecursive_deps = true\n```\n\nThe path is a relative path, starting from the root of the dependency, to the folder containing the config file. You\nshould use forward slashes (`/`) as separator on all platforms.\n\n#### Note on Sub-Dependencies\n\nSince each dependency is free to use its own remappings, their resolution might become tricky in case of conflicting\nversions.\n\nFor example:\n\nWe have a project called `my-project` with the following dependencies:\n\n- `dependency~1`\n- `openzeppelin~5.0.2` with remapping `@openzeppelin/contracts/=dependencies/openzeppelin-5.0.2/`\n\nA contract inside `my-project` has the following import:\n\n```solidity\n@openzeppelin/contracts/token/ERC20/ERC20.sol\n```\n\nHowever, `dependency~1` also depends on `openzeppelin`, but it uses version `4.9.2` (with remapping\n`@openzeppelin/contracts/=dependencies/openzeppelin-4.9.2/`). The contract inside `dependency-1` has the same import\npath because they chose to use the same remappings path as `my-project`:\n\n```solidity\n@openzeppelin/contracts/token/ERC20/ERC20.sol\n```\n\nThis situation creates ambiguity. Furthermore, if `dependency~1` were to import a file that is no longer present in\n`v5`, the compiler would give an error.\n\nAs such, we recommend to always include the version requirement string as part of the remappings path. The version\nrequirement string does not need to target a specific version, but could e.g. target a major version:\n\n```toml\n[profile.default]\nremappings = [\"@openzeppelin-contracts-5/=dependencies/@openzeppelin-contracts-5.0.2/contracts/\"]\n\n[dependencies]\n\"@openzeppelin-contracts\" = \"5\"\n```\n\n```solidity\nimport from '@openzeppelin-contracts-5/token/ERC20/ERC20.sol';\n```\n\nThis approach should ensure that the correct version (or at least a compatible version) of the included file is used.\n\n## Updating Dependencies\n\n```bash\n[forge] soldeer update\n```\n\nFor dependencies from the online registry which specify a version range, the `update` command can be used to retrieve\nthe latest version that matches the requirements. The `soldeer.lock` lockfile is then updated accordingly. Remappings\nare automatically updated to the new version if Soldeer is configured to generate remappings.\n\nFor git dependencies which specify no identifier or a branch identifier, the `update` command checks out the latest\ncommit on the default or specified branch.\n\n## Removing a Dependency\n\n```bash\n[forge] soldeer uninstall <NAME>\n```\n\nThe `uninstall` command removes the dependency files and entry into the config file, lockfile and remappings.\n\n## Publishing a Package to the Repository\n\n```bash\n[forge] soldeer push <NAME>~<VERSION>\n```\n\nIn order to push a new dependency to the repository, an account must first be created at\n[https://soldeer.xyz](https://soldeer.xyz). Then, a project with the dependency name must be created through the\nwebsite.\n\nFinally, the `[forge] soldeer login` command must be used to retrieve or provide an access token for the API. CLI tokens\ncan be generated on soldeer.xyz and should be preferred over using the email and password in the CLI, because email\nlogin will be removed in a future version of Soldeer. Alternatively, you can provide a valid CLI token via the\n`SOLDEER_API_TOKEN` environment variable.\n\nExample:\n\nCreate a project called `my-project` and then use the `[forge] soldeer push my-project~1.0.0`. This will push the\nproject to the repository as version `1.0.0` and makes it available for anyone to use.\n\n### Specifying a Path\n\n```bash\n[forge] soldeer push <NAME>~<VERSION> [PATH]\n```\n\nIf the files to push are not located in the current directory, a path to the files can be provided.\n\n### Ignoring Files\n\nIf you want to ignore certain files from the published package, you need to create one or more `.soldeerignore` files\nthat must contain the patterns that you want to ignore. These files can be at any level of your directory structure.\nThey use the `.gitignore` syntax.\n\nAny file that matches a pattern present in `.gitignore` and `.ignore` files is also automatically excluded from the\npublished package.\n\n### Dry Run\n\n```bash\n[forge] soldeer push <NAME>~<VERSION> --dry-run\n```\n\nWith the `--dry-run` flag, the `push` command only creates a ZIP file containing the published package's content, but\ndoes not upload it to the registry. The file can then be inspected to check that the contents is suitable.\n\nWe recommend that everyone runs a dry-run before pushing a new dependency to avoid publishing unwanted files.\n\n**Warning** ⚠️\n\nYou are at risk to push sensitive files to the central repository that then can be seen by everyone. Make sure to\nexclude sensitive files in the `.soldeerignore` or `.gitignore` file.\n\nFurthermore, we've implemented a warning that gets triggered if the package contains any dotfile (a file with a name\nstarting with `.`). This warning can be ignored with `--skip-warnings`.\n\n## Configuration\n\nThe `foundry.toml`/`soldeer.toml` file can have a `[soldeer]` section to configure the tool's behavior.\n\nSee the default configuration below:\n\n```toml\n[soldeer]\n# whether Soldeer manages remappings\nremappings_generate = true\n\n# whether Soldeer re-generates all remappings when installing, updating or uninstalling deps\nremappings_regenerate = false\n\n# whether to suffix the remapping with the version requirement string: `name-a.b.c`\nremappings_version = true\n\n# a prefix to add to the remappings (\"@\" would give `@name`)\nremappings_prefix = \"\"\n\n# where to store the remappings (\"txt\" for `remappings.txt` or \"config\" for `foundry.toml`)\n# ignored when `soldeer.toml` is used as config (uses `remappings.txt`)\nremappings_location = \"txt\"\n\n# whether to install sub-dependencies or not. If true this will install the dependencies of dependencies recursively.\nrecursive_deps = false\n```\n\n## List of Available Commands\n\nFor more commands and their usage, see `[forge] soldeer --help` and `[forge] soldeer <COMMAND> --help`.\n\n## Remappings Caveats\n\nIf you use other dependency managers, such as git submodules or npm, ensure you don't duplicate dependencies between\nsoldeer and the other manager.\n\nRemappings targeting dependencies installed without Soldeer are not modified or removed when using Soldeer commands,\nunless the `--regenerate-remappings` flag is specified or the `remappings_regenerate = true` option is set.\n\n## Dependencies Maintenance\n\nThe vision for Soldeer is that major projects such as OpenZeppelin, Solady, Uniswap would start publishing their own\npackages to the Soldeer registry so that the community can easily include them and get timely updates.\n\nUntil this happens, the Soldeer maintenance team (currently m4rio.eth) will push the most popular dependencies to the\nrepository by relying on their npmjs or GitHub versions. We are using\n[an open-source crawler tool](https://github.com/mario-eth/soldeer-crawler) to crawl and push the dependencies under the\n`soldeer` organization.\n\nFor those who want an extra layer of security, the `soldeer.lock` file saves a `SHA-256` hash for each downloaded ZIP\nfile and the corresponding unzipped folder (see `soldeer_core::utils::hash_folder` to see how it gets generated). These\ncan be compared with the official releases to ensure the files were not manipulated.\n\n**For Project Maintainers**\n\nIf you want to move your project from the Soldeer organization and take care of pushing the versions to Soldeer\nyourself, please open an issue on GitHub or contact m4rio.eth on [X (formerly Twitter)](https://twitter.com/m4rio_eth).\n"
  },
  {
    "path": "clippy.toml",
    "content": "allow-unwrap-in-tests = true\n"
  },
  {
    "path": "crates/cli/Cargo.toml",
    "content": "[package]\nname = \"soldeer\"\ndescription.workspace = true\nauthors.workspace = true\ncategories.workspace = true\nedition.workspace = true\nexclude.workspace = true\nhomepage.workspace = true\nkeywords.workspace = true\nlicense.workspace = true\nreadme.workspace = true\nrepository.workspace = true\nrust-version.workspace = true\nversion.workspace = true\n\n[lints]\nworkspace = true\n\n[[bin]]\nname = \"soldeer\"\npath = \"src/main.rs\"\n\n[dependencies]\nenv_logger = { version = \"0.11.9\", features = [\"unstable-kv\"] }\nlog.workspace = true\nsoldeer-commands = { path = \"../commands\", version = \"0.11.0\" }\ntokio.workspace = true\nyansi = { version = \"1.0.1\", features = [\"detect-tty\", \"detect-env\"] }\n"
  },
  {
    "path": "crates/cli/src/main.rs",
    "content": "//! Soldeer is a package manager for Solidity projects\nuse std::env;\n\nuse log::Level;\nuse soldeer_commands::{Args, commands::Parser as _, run};\nuse yansi::{Condition, Paint as _};\n\nconst HAVE_COLOR: Condition = Condition(|| {\n    std::env::var_os(\"NO_COLOR\").is_none() &&\n        (Condition::CLICOLOR_LIVE)() &&\n        Condition::stdouterr_are_tty_live()\n});\n\n#[tokio::main]\nasync fn main() {\n    // disable colors if unsupported\n    yansi::whenever(HAVE_COLOR);\n    let args = Args::parse();\n    // setup logging\n    if env::var(\"RUST_LOG\").is_ok() {\n        env_logger::builder().init();\n    } else if let Some(level) = args.verbose.log_level() &&\n        level > Level::Error\n    {\n        // the user requested structured logging (-v[v*])\n        // init logger\n        env_logger::Builder::new().filter_level(args.verbose.log_level_filter()).init();\n    }\n    if !args.verbose.is_present() {\n        banner();\n    }\n    if let Err(err) = run(args.command, args.verbose).await {\n        eprintln!(\"{}\", err.to_string().red())\n    }\n}\n\n/// Generate and print a banner\nfn banner() {\n    println!(\n        \"{}\",\n        format!(\n            \"\n+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+\n    ╔═╗╔═╗╦  ╔╦╗╔═╗╔═╗╦═╗       Solidity Package Manager\n    ╚═╗║ ║║   ║║║╣ ║╣ ╠╦╝\n    ╚═╝╚═╝╩═╝═╩╝╚═╝╚═╝╩╚═     github.com/mario-eth/soldeer\n           v{}                       soldeer.xyz\n+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+\n\",\n            env!(\"CARGO_PKG_VERSION\")\n        )\n        .bright_cyan()\n    );\n}\n"
  },
  {
    "path": "crates/commands/Cargo.toml",
    "content": "[package]\nname = \"soldeer-commands\"\ndescription = \"High-level commands for the Soldeer CLI\"\nauthors.workspace = true\ncategories.workspace = true\nedition.workspace = true\nexclude.workspace = true\nhomepage.workspace = true\nkeywords.workspace = true\nlicense.workspace = true\nreadme.workspace = true\nrepository.workspace = true\nrust-version.workspace = true\nversion.workspace = true\n\n[lints]\nworkspace = true\n\n[dependencies]\nbon.workspace = true\nclap.workspace = true\nclap-verbosity-flag = \"3.0.2\"\ncliclack.workspace = true\nderive_more.workspace = true\nemail-address-parser = \"2.0.0\"\npath-slash.workspace = true\nrayon.workspace = true\nsoldeer-core = { path = \"../core\", version = \"0.11.0\" }\ntokio.workspace = true\n\n[dev-dependencies]\nmockito.workspace = true\nreqwest.workspace = true\ntemp-env.workspace = true\ntestdir.workspace = true\n\n[features]\nserde = [\"soldeer-core/serde\"]\n"
  },
  {
    "path": "crates/commands/src/commands/clean.rs",
    "content": "use crate::utils::success;\nuse clap::Parser;\nuse soldeer_core::{Result, config::Paths};\nuse std::fs;\n\n/// Clean downloaded dependencies and generated artifacts\n#[derive(Debug, Clone, Default, Parser, bon::Builder)]\n#[builder(on(String, into))]\n#[clap(after_help = \"For more information, read the README.md\")]\n#[non_exhaustive]\npub struct Clean {\n    // No options for basic implementation\n}\n\npub(crate) fn clean_command(paths: &Paths, _cmd: &Clean) -> Result<()> {\n    // Remove dependencies folder if it exists\n    if paths.dependencies.exists() {\n        fs::remove_dir_all(&paths.dependencies)?;\n        success!(\"Dependencies folder removed\");\n    }\n\n    Ok(())\n}\n"
  },
  {
    "path": "crates/commands/src/commands/init.rs",
    "content": "use crate::{\n    ConfigLocation,\n    utils::{Progress, remark, success},\n};\nuse clap::Parser;\nuse soldeer_core::{\n    Result,\n    config::{Paths, add_to_config, read_soldeer_config, update_config_libs},\n    install::{InstallProgress, ensure_dependencies_dir, install_dependency},\n    lock::add_to_lockfile,\n    registry::get_latest_version,\n    remappings::{RemappingsAction, edit_remappings},\n    utils::remove_forge_lib,\n};\nuse std::fs;\n\n/// Convert a Foundry project to use Soldeer\n#[derive(Debug, Clone, Default, Parser, bon::Builder)]\n#[allow(clippy::duplicated_attributes)]\n#[builder(on(String, into), on(ConfigLocation, into))]\n#[clap(after_help = \"For more information, read the README.md\")]\n#[non_exhaustive]\npub struct Init {\n    /// Clean the Foundry project by removing .gitmodules and the lib directory\n    #[arg(long, default_value_t = false)]\n    #[builder(default)]\n    pub clean: bool,\n\n    /// Specify the config location.\n    ///\n    /// This prevents prompting the user if the automatic detection can't determine the config\n    /// location.\n    #[arg(long, value_enum)]\n    pub config_location: Option<ConfigLocation>,\n}\n\npub(crate) async fn init_command(paths: &Paths, cmd: Init) -> Result<()> {\n    if cmd.clean {\n        remark!(\"Flag `--clean` was set, removing `lib` dir and submodules\");\n        remove_forge_lib(&paths.root).await?;\n    }\n    let config = read_soldeer_config(&paths.config)?;\n    success!(\"Done reading config\");\n    ensure_dependencies_dir(&paths.dependencies)?;\n    let dependency = get_latest_version(\"forge-std\").await?;\n    let (progress, monitor) = InstallProgress::new();\n    let bars = Progress::new(format!(\"Installing {dependency}\"), 1, monitor);\n    bars.start_all();\n    let lock = install_dependency(&dependency, None, &paths.dependencies, None, false, progress)\n        .await\n        .inspect_err(|e| {\n            bars.set_error(e);\n        })?;\n    bars.stop_all();\n    add_to_config(&dependency, &paths.config)?;\n    let foundry_config = paths.root.join(\"foundry.toml\");\n    if foundry_config.exists() {\n        update_config_libs(foundry_config)?;\n    }\n    success!(\"Dependency added to config\");\n    add_to_lockfile(lock, &paths.lock)?;\n    success!(\"Dependency added to lockfile\");\n    edit_remappings(&RemappingsAction::Add(dependency), &config, paths)?;\n    success!(\"Dependency added to remappings\");\n\n    let gitignore_path = paths.root.join(\".gitignore\");\n    if gitignore_path.exists() {\n        let mut gitignore = fs::read_to_string(&gitignore_path)?;\n        if !gitignore.contains(\"dependencies\") {\n            gitignore.push_str(\"\\n\\n# Soldeer\\n/dependencies\\n\");\n            fs::write(&gitignore_path, gitignore)?;\n        }\n    }\n    success!(\"Added `dependencies` to .gitignore\");\n\n    Ok(())\n}\n"
  },
  {
    "path": "crates/commands/src/commands/install.rs",
    "content": "use super::validate_dependency;\nuse crate::{\n    ConfigLocation,\n    utils::{Progress, remark, success, warning},\n};\nuse clap::Parser;\nuse soldeer_core::{\n    Result,\n    config::{\n        Dependency, GitIdentifier, Paths, UrlType, add_to_config, read_config_deps,\n        read_soldeer_config,\n    },\n    errors::{InstallError, LockError},\n    install::{InstallProgress, ensure_dependencies_dir, install_dependencies, install_dependency},\n    lock::{add_to_lockfile, generate_lockfile_contents, read_lockfile},\n    remappings::{RemappingsAction, edit_remappings},\n};\nuse std::fs;\n\n/// Install a dependency\n#[derive(Debug, Clone, Default, Parser, bon::Builder)]\n#[allow(clippy::duplicated_attributes)]\n#[builder(on(String, into), on(ConfigLocation, into))]\n#[clap(\n    long_about = \"Install a dependency\n\nIf used with arguments, a dependency will be added to the configuration. When used without argument, installs all dependencies that are missing.\n\nExamples:\n- Install all: soldeer install\n- Add from registry: soldeer install lib_name~2.3.0\n- Add with custom URL: soldeer install lib_name~2.3.0 --url https://foo.bar/lib.zip\n- Add with git: soldeer install lib_name~2.3.0 --git git@github.com:foo/bar.git\n- Add with git (commit): soldeer install lib_name~2.3.0 --git git@github.com:foo/bar.git --rev 05f218fb6617932e56bf5388c3b389c3028a7b73\n- Add with git (tag): soldeer install lib_name~2.3.0 --git git@github.com:foo/bar.git --tag v2.3.0\n- Add with git (branch): soldeer install lib_name~2.3.0 --git git@github.com:foo/bar.git --branch feature/baz\",\n    after_help = \"For more information, read the README.md\"\n)]\n#[non_exhaustive]\npub struct Install {\n    /// The dependency name and version, separated by a tilde. The version is always required.\n    ///\n    /// If not present, this command will install all dependencies which are missing.\n    #[arg(value_parser = validate_dependency, value_name = \"DEPENDENCY~VERSION\")]\n    pub dependency: Option<String>,\n\n    /// The URL to the dependency zip file.\n    ///\n    /// Example: https://my-domain/dep.zip\n    #[arg(long = \"url\", requires = \"dependency\", conflicts_with = \"git_url\")]\n    pub zip_url: Option<String>,\n\n    /// The URL to the dependency repository.\n    ///\n    /// Example: git@github.com:foo/bar.git\n    #[arg(long = \"git\", requires = \"dependency\", conflicts_with = \"zip_url\")]\n    pub git_url: Option<String>,\n\n    /// A Git commit hash\n    #[arg(long, group = \"identifier\", requires = \"git_url\")]\n    pub rev: Option<String>,\n\n    /// A Git tag\n    #[arg(long, group = \"identifier\", requires = \"git_url\")]\n    pub tag: Option<String>,\n\n    /// A Git branch\n    #[arg(long, group = \"identifier\", requires = \"git_url\")]\n    pub branch: Option<String>,\n\n    /// If set, this command will delete the existing remappings and re-create them\n    #[arg(short = 'g', long, default_value_t = false)]\n    #[builder(default)]\n    pub regenerate_remappings: bool,\n\n    /// If set, this command will install dependencies recursively (via git submodules or via\n    /// soldeer)\n    #[arg(short = 'd', long, default_value_t = false)]\n    #[builder(default)]\n    pub recursive_deps: bool,\n\n    /// Perform a clean install by re-installing all dependencies\n    #[arg(long, default_value_t = false)]\n    #[builder(default)]\n    pub clean: bool,\n\n    /// Specify the config location without prompting.\n    ///\n    /// This prevents prompting the user if the automatic detection can't determine the config\n    /// location.\n    #[arg(long, value_enum)]\n    pub config_location: Option<ConfigLocation>,\n}\n\npub(crate) async fn install_command(paths: &Paths, cmd: Install) -> Result<()> {\n    let mut config = read_soldeer_config(&paths.config)?;\n    if cmd.regenerate_remappings {\n        config.remappings_regenerate = true;\n    }\n    if cmd.recursive_deps {\n        config.recursive_deps = true;\n    }\n    success!(\"Done reading config\");\n    ensure_dependencies_dir(&paths.dependencies)?;\n    let (dependencies, warnings) = read_config_deps(&paths.config)?;\n    for w in warnings {\n        warning!(format!(\"Config warning: {w}\"));\n    }\n\n    match &cmd.dependency {\n        None => {\n            let lockfile = read_lockfile(&paths.lock)?;\n            success!(\"Done reading lockfile\");\n            if cmd.clean {\n                remark!(\"Flag `--clean` was set, re-installing all dependencies\");\n                fs::remove_dir_all(&paths.dependencies).map_err(|e| InstallError::IOError {\n                    path: paths.dependencies.clone(),\n                    source: e,\n                })?;\n                ensure_dependencies_dir(&paths.dependencies)?;\n            }\n\n            let (progress, monitor) = InstallProgress::new();\n            let bars = Progress::new(\"Installing dependencies\", dependencies.len(), monitor);\n            bars.start_all();\n            let new_locks = install_dependencies(\n                &dependencies,\n                &lockfile.entries,\n                &paths.dependencies,\n                config.recursive_deps,\n                progress,\n            )\n            .await?;\n            bars.stop_all();\n            let new_lockfile_content = generate_lockfile_contents(new_locks);\n            if !lockfile.raw.is_empty() && new_lockfile_content != lockfile.raw {\n                warning!(\n                    \"Warning: the lock file is out of sync with the dependencies. Consider running `soldeer update` to re-generate the lockfile.\"\n                );\n            } else if lockfile.raw.is_empty() {\n                fs::write(&paths.lock, new_lockfile_content).map_err(LockError::IOError)?;\n            }\n            edit_remappings(&RemappingsAction::Update, &config, paths)?;\n            success!(\"Updated remappings\");\n        }\n        Some(dependency) => {\n            let identifier = match (&cmd.rev, &cmd.branch, &cmd.tag) {\n                (Some(rev), None, None) => Some(GitIdentifier::from_rev(rev)),\n                (None, Some(branch), None) => Some(GitIdentifier::from_branch(branch)),\n                (None, None, Some(tag)) => Some(GitIdentifier::from_tag(tag)),\n                (None, None, None) => None,\n                _ => unreachable!(\"clap should prevent this\"),\n            };\n            let url =\n                cmd.zip_url.as_ref().map(UrlType::http).or(cmd.git_url.as_ref().map(UrlType::git));\n            let mut dep = Dependency::from_name_version(dependency, url, identifier)?;\n            if dependencies\n                .iter()\n                .any(|d| d.name() == dep.name() && d.version_req() == dep.version_req())\n            {\n                remark!(format!(\"{dep} is already installed, running `install` instead\"));\n                Box::pin(install_command(\n                    paths,\n                    Install::builder()\n                        .regenerate_remappings(cmd.regenerate_remappings)\n                        .recursive_deps(cmd.recursive_deps)\n                        .clean(cmd.clean)\n                        .maybe_config_location(cmd.config_location)\n                        .build(),\n                ))\n                .await?;\n                return Ok(());\n            }\n            let (progress, monitor) = InstallProgress::new();\n            let bars = Progress::new(format!(\"Installing {dep}\"), 1, monitor);\n            bars.start_all();\n            let lock = install_dependency(\n                &dep,\n                None,\n                &paths.dependencies,\n                None,\n                config.recursive_deps,\n                progress,\n            )\n            .await?;\n            bars.stop_all();\n            // for git deps, we need to add the commit hash before adding them to the\n            // config, unless a branch/tag was specified\n            if let Some(git_dep) = dep.as_git_mut() &&\n                git_dep.identifier.is_none()\n            {\n                git_dep.identifier = Some(GitIdentifier::from_rev(\n                    &lock.as_git().expect(\"lock entry should be of type git\").rev,\n                ));\n            }\n            add_to_config(&dep, &paths.config)?;\n            success!(\"Dependency added to config\");\n            add_to_lockfile(lock, &paths.lock)?;\n            success!(\"Dependency added to lockfile\");\n            edit_remappings(&RemappingsAction::Add(dep), &config, paths)?;\n            success!(\"Dependency added to remappings\");\n        }\n    }\n    Ok(())\n}\n"
  },
  {
    "path": "crates/commands/src/commands/login.rs",
    "content": "use crate::utils::{info, remark, step, success, warning};\nuse clap::Parser;\nuse email_address_parser::{EmailAddress, ParsingOptions};\nuse path_slash::PathBufExt as _;\nuse soldeer_core::{\n    Result,\n    auth::{Credentials, check_token, execute_login, save_token},\n    errors::AuthError,\n};\nuse std::path::PathBuf;\n\n/// Log into the central repository to push packages\n///\n/// The credentials are saved by default into ~/.soldeer.\n/// If you want to overwrite that location, use the SOLDEER_LOGIN_FILE env var.\n#[derive(Debug, Clone, Default, Parser, bon::Builder)]\n#[builder(on(String, into))]\n#[clap(after_help = \"For more information, read the README.md\")]\n#[non_exhaustive]\npub struct Login {\n    /// Specify the email without prompting.\n    #[arg(long, conflicts_with = \"token\")]\n    pub email: Option<String>,\n\n    /// Specify the password without prompting.\n    #[arg(long, conflicts_with = \"token\")]\n    pub password: Option<String>,\n\n    /// Login with a token created via soldeer.xyz.\n    #[arg(long)]\n    pub token: Option<String>,\n}\n\npub(crate) async fn login_command(cmd: Login) -> Result<()> {\n    remark!(\"If you do not have an account, please visit soldeer.xyz to create one.\");\n\n    if let Some(token) = cmd.token {\n        let token = token.trim();\n        let username = check_token(token).await?;\n        let token_path = save_token(token)?;\n        info!(format!(\n            \"Token is valid for user {username} and was saved in: {}\",\n            PathBuf::from_slash_lossy(&token_path).to_string_lossy() /* normalize separators */\n        ));\n        return Ok(());\n    }\n\n    warning!(\n        \"The option to login via email and password will be removed in a future version of Soldeer. Please update your usage by either using `soldeer login --token [YOUR CLI TOKEN]` or passing the `SOLDEER_API_TOKEN` environment variable to the `push` command.\"\n    );\n\n    let email: String = match cmd.email {\n        Some(email) => {\n            if EmailAddress::parse(&email, Some(ParsingOptions::default())).is_none() {\n                return Err(AuthError::InvalidCredentials.into());\n            }\n            step!(format!(\"Email: {email}\"));\n            email\n        }\n        None => {\n            if !crate::TUI_ENABLED.load(std::sync::atomic::Ordering::Relaxed) {\n                return Err(AuthError::TuiDisabled.into());\n            }\n            cliclack::input(\"Email address\")\n                .validate(|input: &String| {\n                    if input.is_empty() {\n                        Err(\"Email is required\")\n                    } else {\n                        match EmailAddress::parse(input, Some(ParsingOptions::default())) {\n                            None => Err(\"Invalid email address\"),\n                            Some(_) => Ok(()),\n                        }\n                    }\n                })\n                .interact()?\n        }\n    };\n\n    let password = match cmd.password {\n        Some(pw) => pw,\n        None => {\n            if !crate::TUI_ENABLED.load(std::sync::atomic::Ordering::Relaxed) {\n                return Err(AuthError::TuiDisabled.into());\n            }\n            cliclack::password(\"Password\").mask('▪').interact()?\n        }\n    };\n\n    let token_path = execute_login(&Credentials { email, password }).await?;\n    success!(\"Login successful\");\n    info!(format!(\n        \"Token saved in: {}\",\n        PathBuf::from_slash_lossy(&token_path).to_string_lossy() /* normalize separators */\n    ));\n    Ok(())\n}\n"
  },
  {
    "path": "crates/commands/src/commands/mod.rs",
    "content": "pub use clap::{Parser, Subcommand};\nuse clap_verbosity_flag::{LogLevel, VerbosityFilter};\nuse derive_more::derive::From;\n\npub mod clean;\npub mod init;\npub mod install;\npub mod login;\npub mod push;\npub mod uninstall;\npub mod update;\n\n#[derive(Copy, Clone, Debug, Default)]\npub struct CustomLevel;\n\nimpl LogLevel for CustomLevel {\n    fn default_filter() -> VerbosityFilter {\n        VerbosityFilter::Error\n    }\n\n    fn verbose_help() -> Option<&'static str> {\n        Some(\"Use structured logging and increase verbosity\")\n    }\n\n    fn verbose_long_help() -> Option<&'static str> {\n        Some(\n            r#\"Use structured logging and increase verbosity\n\nPass multiple times to increase the logging level (e.g. -v, -vv, -vvv).\nIf omitted, then a pretty TUI output will be used.\nOtherwise:\n- 1 (-v): print logs with level error and warning\n- 2 (-vv): print logs with level info\n- 3 (-vvv): print logs with level debug\n- 4 (-vvvv): print logs with level trace\n\"#,\n        )\n    }\n\n    fn quiet_help() -> Option<&'static str> {\n        Some(\"Disable logs and output, or reduce verbosity\")\n    }\n}\n\n/// A minimal Solidity dependency manager\n#[derive(Parser, Debug, bon::Builder)]\n#[clap(name = \"soldeer\", author = \"m4rio.eth\", version)]\n#[non_exhaustive]\npub struct Args {\n    #[clap(subcommand)]\n    pub command: Command,\n\n    /// Test\n    #[command(flatten)]\n    pub verbose: clap_verbosity_flag::Verbosity<CustomLevel>,\n}\n\n/// The available commands for Soldeer\n#[derive(Debug, Clone, Subcommand, From)]\n#[non_exhaustive]\npub enum Command {\n    Init(init::Init),\n    Install(install::Install),\n    Update(update::Update),\n    Login(login::Login),\n    Push(push::Push),\n    Uninstall(uninstall::Uninstall),\n    Clean(clean::Clean),\n    Version(Version),\n}\n\n/// Display the version of Soldeer\n#[derive(Debug, Clone, Default, Parser)]\n#[non_exhaustive]\npub struct Version {}\n\nfn validate_dependency(dep: &str) -> std::result::Result<String, String> {\n    if dep.split('~').count() != 2 {\n        return Err(\"The dependency should be in the format <DEPENDENCY>~<VERSION>\".to_string());\n    }\n    Ok(dep.to_string())\n}\n"
  },
  {
    "path": "crates/commands/src/commands/push.rs",
    "content": "use super::validate_dependency;\nuse crate::utils::{info, remark, success, warning};\nuse clap::Parser;\nuse soldeer_core::{\n    Result,\n    errors::PublishError,\n    push::{filter_ignored_files, push_version, validate_name, validate_version},\n    utils::{canonicalize_sync, check_dotfiles},\n};\nuse std::{env, path::PathBuf, sync::atomic::Ordering};\n\n/// Push a dependency to the repository\n#[derive(Debug, Clone, Parser, bon::Builder)]\n#[allow(clippy::duplicated_attributes)]\n#[builder(on(String, into), on(PathBuf, into))]\n#[clap(\n    long_about = \"Push a dependency to the soldeer.xyz repository.\n\nYou need to be logged in first (soldeer login) or provide the `SOLDEER_API_TOKEN` environment variable with a valid\nCLI token generated on soldeer.xyz.\n\nExamples:\n- Current directory: soldeer push mypkg~0.1.0\n- Custom directory: soldeer push mypkg~0.1.0 /path/to/dep\n- Dry run: soldeer push mypkg~0.1.0 --dry-run\n\nTo ignore certain files, create a `.soldeerignore` file in the root of the project and add the files you want to ignore. The `.soldeerignore` uses the same syntax as `.gitignore`.\",\n    after_help = \"For more information, read the README.md\"\n)]\n#[non_exhaustive]\npub struct Push {\n    /// The dependency name and version, separated by a tilde.\n    ///\n    /// This should always be used when you want to push a dependency to the central repository: `<https://soldeer.xyz>`.\n    #[arg(value_parser = validate_dependency, value_name = \"DEPENDENCY>~<VERSION\")]\n    pub dependency: String,\n\n    /// Use this if the package you want to push is not in the current directory.\n    ///\n    /// Example: `soldeer push mypkg~0.1.0 /path/to/dep`.\n    pub path: Option<PathBuf>,\n\n    /// If set, does not publish the package but generates a zip file that can be inspected.\n    #[arg(short, long, default_value_t = false)]\n    #[builder(default)]\n    pub dry_run: bool,\n\n    /// Use this if you want to skip the warnings that can be triggered when trying to push\n    /// dotfiles like .env.\n    #[arg(long, default_value_t = false)]\n    #[builder(default)]\n    pub skip_warnings: bool,\n}\n\npub(crate) async fn push_command(cmd: Push) -> Result<()> {\n    let path = cmd.path.unwrap_or(env::current_dir()?);\n    let path = canonicalize_sync(&path)?;\n\n    let files_to_copy: Vec<PathBuf> = filter_ignored_files(&path);\n\n    // Check for sensitive files or directories\n    if !cmd.dry_run &&\n        !cmd.skip_warnings &&\n        check_dotfiles(&files_to_copy) &&\n        !prompt_user_for_confirmation()?\n    {\n        return Err(PublishError::UserAborted.into());\n    }\n\n    if cmd.dry_run {\n        remark!(\"Running in dry-run mode, a zip file will be created for inspection\");\n    }\n\n    if cmd.skip_warnings {\n        warning!(\"Sensitive file warnings are being ignored as requested\");\n    }\n\n    let (dependency_name, dependency_version) =\n        cmd.dependency.split_once('~').expect(\"dependency string should have name and version\");\n\n    validate_name(dependency_name)?;\n    validate_version(dependency_version)?;\n\n    if let Some(zip_path) =\n        push_version(dependency_name, dependency_version, path, &files_to_copy, cmd.dry_run).await?\n    {\n        info!(format!(\"Zip file created at {}\", zip_path.to_string_lossy()));\n    } else {\n        success!(\"Pushed to repository!\");\n    }\n    Ok(())\n}\n\n// Function to prompt the user for confirmation\nfn prompt_user_for_confirmation() -> Result<bool> {\n    remark!(\"You are about to include some sensitive files in this version\");\n    info!(\n        \"If you are not sure which files will be included, you can run the command with `--dry-run`and inspect the generated zip file.\"\n    );\n\n    if crate::TUI_ENABLED.load(Ordering::Relaxed) {\n        cliclack::confirm(\"Do you want to continue?\")\n            .interact()\n            .map_err(|e| PublishError::IOError { path: PathBuf::new(), source: e }.into())\n    } else {\n        Ok(true)\n    }\n}\n"
  },
  {
    "path": "crates/commands/src/commands/uninstall.rs",
    "content": "use crate::utils::success;\nuse clap::Parser;\nuse soldeer_core::{\n    Result, SoldeerError,\n    config::{Paths, delete_from_config, read_soldeer_config},\n    download::delete_dependency_files_sync,\n    lock::remove_lock,\n    remappings::{RemappingsAction, edit_remappings},\n};\n\n/// Uninstall a dependency\n#[derive(Debug, Clone, Parser, bon::Builder)]\n#[builder(on(String, into))]\n#[clap(after_help = \"For more information, read the README.md\")]\n#[non_exhaustive]\npub struct Uninstall {\n    /// The dependency name. Specifying a version is not necessary.\n    pub dependency: String,\n}\n\npub(crate) fn uninstall_command(paths: &Paths, cmd: &Uninstall) -> Result<()> {\n    let config = read_soldeer_config(&paths.config)?;\n    success!(\"Done reading config\");\n\n    // delete from the config file and return the dependency\n    let dependency = delete_from_config(&cmd.dependency, &paths.config)?;\n    success!(\"Dependency removed from config file\");\n\n    edit_remappings(&RemappingsAction::Remove(dependency.clone()), &config, paths)?;\n    success!(\"Dependency removed from remappings\");\n\n    // deleting the files\n    delete_dependency_files_sync(&dependency, &paths.dependencies)\n        .map_err(|e| SoldeerError::DownloadError { dep: dependency.to_string(), source: e })?;\n    success!(\"Dependency removed from disk\");\n\n    remove_lock(&dependency, &paths.lock)?;\n    success!(\"Dependency removed from lockfile\");\n    Ok(())\n}\n"
  },
  {
    "path": "crates/commands/src/commands/update.rs",
    "content": "use crate::{\n    ConfigLocation,\n    utils::{Progress, success, warning},\n};\nuse clap::Parser;\nuse soldeer_core::{\n    Result,\n    config::{Paths, read_config_deps, read_soldeer_config},\n    errors::LockError,\n    install::{InstallProgress, ensure_dependencies_dir},\n    lock::{generate_lockfile_contents, read_lockfile},\n    remappings::{RemappingsAction, edit_remappings},\n    update::update_dependencies,\n};\nuse std::fs;\n\n/// Update dependencies by reading the config file\n#[derive(Debug, Clone, Default, Parser, bon::Builder)]\n#[allow(clippy::duplicated_attributes)]\n#[builder(on(String, into), on(ConfigLocation, into))]\n#[clap(after_help = \"For more information, read the README.md\")]\n#[non_exhaustive]\npub struct Update {\n    /// If set, this command will delete the existing remappings and re-create them\n    #[arg(short = 'g', long, default_value_t = false)]\n    #[builder(default)]\n    pub regenerate_remappings: bool,\n\n    /// If set, this command will install the dependencies recursively (via submodules or via\n    /// soldeer)\n    #[arg(short = 'd', long, default_value_t = false)]\n    #[builder(default)]\n    pub recursive_deps: bool,\n\n    /// Specify the config location without prompting.\n    ///\n    /// This prevents prompting the user if the automatic detection can't determine the config\n    /// location.\n    #[arg(long, value_enum)]\n    pub config_location: Option<ConfigLocation>,\n}\n\n// TODO: add a parameter for a dependency name, where we would only update that particular\n// dependency\n\npub(crate) async fn update_command(paths: &Paths, cmd: Update) -> Result<()> {\n    let mut config = read_soldeer_config(&paths.config)?;\n    if cmd.regenerate_remappings {\n        config.remappings_regenerate = true;\n    }\n    if cmd.recursive_deps {\n        config.recursive_deps = true;\n    }\n    success!(\"Done reading config\");\n    ensure_dependencies_dir(&paths.dependencies)?;\n    let (dependencies, warnings) = read_config_deps(&paths.config)?;\n    for w in warnings {\n        warning!(format!(\"Config warning: {w}\"));\n    }\n\n    let lockfile = read_lockfile(&paths.lock)?;\n    success!(\"Done reading lockfile\");\n    let (progress, monitor) = InstallProgress::new();\n    let bars = Progress::new(\"Updating dependencies\", dependencies.len(), monitor);\n    bars.start_all();\n    let new_locks = update_dependencies(\n        &dependencies,\n        &lockfile.entries,\n        &paths.dependencies,\n        config.recursive_deps,\n        progress,\n    )\n    .await?;\n    bars.stop_all();\n\n    let new_lockfile_content = generate_lockfile_contents(new_locks);\n    fs::write(&paths.lock, new_lockfile_content).map_err(LockError::IOError)?;\n    success!(\"Updated lockfile\");\n\n    edit_remappings(&RemappingsAction::Update, &config, paths)?;\n    success!(\"Updated remappings\");\n    Ok(())\n}\n"
  },
  {
    "path": "crates/commands/src/lib.rs",
    "content": "//! High-level commands for the Soldeer CLI\n#![cfg_attr(docsrs, feature(doc_cfg))]\npub use crate::commands::{Args, Command};\nuse clap::builder::PossibleValue;\npub use clap_verbosity_flag::Verbosity;\nuse clap_verbosity_flag::log::Level;\nuse commands::CustomLevel;\nuse derive_more::derive::FromStr;\nuse soldeer_core::{Result, config::Paths};\nuse std::{\n    env,\n    path::PathBuf,\n    sync::atomic::{AtomicBool, Ordering},\n};\nuse utils::{get_config_location, intro, outro, outro_cancel, step};\n\npub mod commands;\npub mod utils;\n\nstatic TUI_ENABLED: AtomicBool = AtomicBool::new(true);\n\n/// The location where the Soldeer config should be stored.\n///\n/// This is a new type so we can implement the `ValueEnum` trait for it.\n#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, FromStr)]\npub struct ConfigLocation(soldeer_core::config::ConfigLocation);\n\nimpl clap::ValueEnum for ConfigLocation {\n    fn value_variants<'a>() -> &'a [Self] {\n        &[\n            Self(soldeer_core::config::ConfigLocation::Foundry),\n            Self(soldeer_core::config::ConfigLocation::Soldeer),\n        ]\n    }\n\n    fn to_possible_value(&self) -> Option<clap::builder::PossibleValue> {\n        Some(match self.0 {\n            soldeer_core::config::ConfigLocation::Foundry => PossibleValue::new(\"foundry\"),\n            soldeer_core::config::ConfigLocation::Soldeer => PossibleValue::new(\"soldeer\"),\n        })\n    }\n}\n\nimpl From<ConfigLocation> for soldeer_core::config::ConfigLocation {\n    fn from(value: ConfigLocation) -> Self {\n        value.0\n    }\n}\n\nimpl From<soldeer_core::config::ConfigLocation> for ConfigLocation {\n    fn from(value: soldeer_core::config::ConfigLocation) -> Self {\n        Self(value)\n    }\n}\n\npub async fn run(command: Command, verbosity: Verbosity<CustomLevel>) -> Result<()> {\n    if let Some(level) = verbosity.log_level() &&\n        level <= Level::Error &&\n        env::var(\"RUST_LOG\").is_err()\n    {\n        // enable TUI if no `-v` flag and no RUST_LOG is provided\n        TUI_ENABLED.store(true, Ordering::Relaxed);\n    } else {\n        TUI_ENABLED.store(false, Ordering::Relaxed);\n    }\n    match command {\n        Command::Init(cmd) => {\n            intro!(\"🦌 Soldeer Init 🦌\");\n            step!(\"Initialize Foundry project to use Soldeer\");\n            // for init, we always use the current dir as root, unless specified by env\n            let root = env::var(\"SOLDEER_PROJECT_ROOT\")\n                .ok()\n                .filter(|p| !p.is_empty())\n                .map_or(env::current_dir()?, PathBuf::from);\n            let paths = Paths::with_root_and_config(\n                &root,\n                Some(get_config_location(&root, cmd.config_location)?),\n            )?;\n            commands::init::init_command(&paths, cmd).await.inspect_err(|_| {\n                outro_cancel!(\"An error occurred during initialization\");\n            })?;\n            outro!(\"Done initializing!\");\n        }\n        Command::Install(cmd) => {\n            intro!(\"🦌 Soldeer Install 🦌\");\n            let root = Paths::get_root_path();\n            let paths = Paths::with_root_and_config(\n                &root,\n                Some(get_config_location(&root, cmd.config_location)?),\n            )?;\n            commands::install::install_command(&paths, cmd).await.inspect_err(|_| {\n                outro_cancel!(\"An error occurred during install\");\n            })?;\n            outro!(\"Done installing!\");\n        }\n        Command::Update(cmd) => {\n            intro!(\"🦌 Soldeer Update 🦌\");\n            let root = Paths::get_root_path();\n            let paths = Paths::with_root_and_config(\n                &root,\n                Some(get_config_location(&root, cmd.config_location)?),\n            )?;\n            commands::update::update_command(&paths, cmd).await.inspect_err(|_| {\n                outro_cancel!(\"An error occurred during the update\");\n            })?;\n            outro!(\"Done updating!\");\n        }\n        Command::Uninstall(cmd) => {\n            intro!(\"🦌 Soldeer Uninstall 🦌\");\n            let root = Paths::get_root_path();\n            let paths =\n                Paths::with_root_and_config(&root, Some(get_config_location(&root, None)?))?;\n            commands::uninstall::uninstall_command(&paths, &cmd).inspect_err(|_| {\n                outro_cancel!(\"An error occurred during uninstall\");\n            })?;\n            outro!(\"Done uninstalling!\");\n        }\n        Command::Clean(cmd) => {\n            intro!(\"🦌 Soldeer Clean 🦌\");\n            let root = Paths::get_root_path();\n            let paths =\n                Paths::with_root_and_config(&root, Some(get_config_location(&root, None)?))?;\n            commands::clean::clean_command(&paths, &cmd).inspect_err(|_| {\n                outro_cancel!(\"An error occurred during clean\");\n            })?;\n            outro!(\"Done cleaning!\");\n        }\n        Command::Login(cmd) => {\n            intro!(\"🦌 Soldeer Login 🦌\");\n            commands::login::login_command(cmd).await.inspect_err(|_| {\n                outro_cancel!(\"An error occurred during login\");\n            })?;\n            outro!(\"Done logging in!\");\n        }\n        Command::Push(cmd) => {\n            intro!(\"🦌 Soldeer Push 🦌\");\n            commands::push::push_command(cmd).await.inspect_err(|_| {\n                outro_cancel!(\"An error occurred during push\");\n            })?;\n            outro!(\"Done!\");\n        }\n        Command::Version(_) => {\n            const VERSION: &str = env!(\"CARGO_PKG_VERSION\");\n            println!(\"soldeer {VERSION}\");\n        }\n    }\n    Ok(())\n}\n"
  },
  {
    "path": "crates/commands/src/utils.rs",
    "content": "#![allow(unused_macros)]\n//! Utils for the commands crate\nuse std::{fmt, path::Path};\n\nuse crate::ConfigLocation;\nuse cliclack::{MultiProgress, ProgressBar, multi_progress, progress_bar, select};\nuse soldeer_core::{Result, config::detect_config_location, install::InstallMonitoring};\n\n/// Template for the progress bars.\npub const PROGRESS_TEMPLATE: &str = \"[{elapsed_precise}] {bar:30.magenta} ({pos}/{len}) {msg}\";\n\n/// A collection of progress bars for the installation/update process.\n#[derive(Clone, Default)]\npub struct Progress {\n    multi: Option<MultiProgress>,\n    versions: Option<ProgressBar>,\n    downloads: Option<ProgressBar>,\n    unzip: Option<ProgressBar>,\n    subdependencies: Option<ProgressBar>,\n    integrity: Option<ProgressBar>,\n}\n\nimpl Progress {\n    /// Create a new progress bar object.\n    ///\n    /// A title and the total number of dependencies to install must be passed as an argument.\n    pub fn new(title: impl fmt::Display, total: usize, mut monitor: InstallMonitoring) -> Self {\n        if !crate::TUI_ENABLED.load(std::sync::atomic::Ordering::Relaxed) {\n            tokio::task::spawn(async move { while (monitor.logs.recv().await).is_some() {} });\n            tokio::task::spawn(async move { while (monitor.versions.recv().await).is_some() {} });\n            tokio::task::spawn(async move { while (monitor.downloads.recv().await).is_some() {} });\n            tokio::task::spawn(async move { while (monitor.unzip.recv().await).is_some() {} });\n            tokio::task::spawn(\n                async move { while (monitor.subdependencies.recv().await).is_some() {} },\n            );\n            tokio::task::spawn(async move { while (monitor.integrity.recv().await).is_some() {} });\n            return Self::default();\n        }\n        let multi = multi_progress(title);\n        let versions = multi.add(progress_bar(total as u64).with_template(PROGRESS_TEMPLATE));\n        let downloads = multi.add(progress_bar(total as u64).with_template(PROGRESS_TEMPLATE));\n        let unzip = multi.add(progress_bar(total as u64).with_template(PROGRESS_TEMPLATE));\n        let subdependencies =\n            multi.add(progress_bar(total as u64).with_template(PROGRESS_TEMPLATE));\n        let integrity = multi.add(progress_bar(total as u64).with_template(PROGRESS_TEMPLATE));\n        tokio::task::spawn({\n            let multi = multi.clone();\n            async move {\n                while let Some(log) = monitor.logs.recv().await {\n                    multi.println(log);\n                }\n            }\n        });\n        tokio::task::spawn({\n            let versions = versions.clone();\n            async move {\n                while let Some(dep) = monitor.versions.recv().await {\n                    versions.inc(1);\n                    versions.set_message(format!(\"Got version for {dep}\"));\n                }\n            }\n        });\n        tokio::task::spawn({\n            let downloads = downloads.clone();\n            async move {\n                while let Some(dep) = monitor.downloads.recv().await {\n                    downloads.inc(1);\n                    downloads.set_message(format!(\"Downloaded {dep}\"));\n                }\n            }\n        });\n        tokio::task::spawn({\n            let unzip = unzip.clone();\n            async move {\n                while let Some(dep) = monitor.unzip.recv().await {\n                    unzip.inc(1);\n                    unzip.set_message(format!(\"Unzipped {dep}\"));\n                }\n            }\n        });\n        tokio::task::spawn({\n            let subdependencies = subdependencies.clone();\n            async move {\n                while let Some(dep) = monitor.subdependencies.recv().await {\n                    subdependencies.inc(1);\n                    subdependencies.set_message(format!(\"Installed subdeps for {dep}\"));\n                }\n            }\n        });\n        tokio::task::spawn({\n            let integrity = integrity.clone();\n            async move {\n                while let Some(dep) = monitor.integrity.recv().await {\n                    integrity.inc(1);\n                    integrity.set_message(format!(\"Checked integrity of {dep}\"));\n                }\n            }\n        });\n        Self {\n            multi: Some(multi),\n            versions: Some(versions),\n            downloads: Some(downloads),\n            unzip: Some(unzip),\n            subdependencies: Some(subdependencies),\n            integrity: Some(integrity),\n        }\n    }\n\n    /// Start all progress bars.\n    pub fn start_all(&self) {\n        self.versions.as_ref().inspect(|p| p.start(\"Retrieving versions...\"));\n        self.downloads.as_ref().inspect(|p| p.start(\"Downloading dependencies...\"));\n        self.unzip.as_ref().inspect(|p| p.start(\"Unzipping dependencies...\"));\n        self.subdependencies.as_ref().inspect(|p| p.start(\"Installing subdependencies...\"));\n        self.integrity.as_ref().inspect(|p| p.start(\"Checking integrity...\"));\n    }\n\n    /// Stop all progress bars.\n    pub fn stop_all(&self) {\n        self.versions.as_ref().inspect(|p| p.stop(\"Done retrieving versions\"));\n        self.downloads.as_ref().inspect(|p| p.stop(\"Done downloading dependencies\"));\n        self.unzip.as_ref().inspect(|p| p.stop(\"Done unzipping dependencies\"));\n        self.subdependencies.as_ref().inspect(|p| p.stop(\"Done installing subdependencies\"));\n        self.integrity.as_ref().inspect(|p| p.stop(\"Done checking integrity\"));\n        self.multi.as_ref().inspect(|p| p.stop());\n    }\n\n    pub fn set_error(&self, error: impl fmt::Display) {\n        self.multi.as_ref().inspect(|m| m.error(error));\n    }\n}\n\n/// Auto-detect config location or prompt the user for preference.\npub fn get_config_location(\n    root: impl AsRef<Path>,\n    arg: Option<ConfigLocation>,\n) -> Result<soldeer_core::config::ConfigLocation> {\n    Ok(match arg {\n        Some(loc) => loc.into(),\n        None => match detect_config_location(root) {\n            Some(loc) => loc,\n            None => prompt_config_location()?.into(),\n        },\n    })\n}\n\n/// Prompt the user for their desired config location in case it cannot be auto-detected.\npub fn prompt_config_location() -> Result<ConfigLocation> {\n    Ok(select(\"Select how you want to configure Soldeer\")\n        .initial_value(\"foundry\")\n        .item(\"foundry\", \"Using foundry.toml\", \"recommended\")\n        .item(\"soldeer\", \"Using soldeer.toml\", \"for non-foundry projects\")\n        .interact()?\n        .parse()\n        .expect(\"all options should be valid variants of the ConfigLocation enum\"))\n}\n\nmacro_rules! define_cliclack_macro {\n    ($name:ident, $path:path) => {\n        macro_rules! $name {\n            ($expression:expr) => {\n                if $crate::TUI_ENABLED.load(::std::sync::atomic::Ordering::Relaxed) {\n                    $path($expression).ok();\n                }\n            };\n        }\n    };\n}\n\ndefine_cliclack_macro!(intro, ::cliclack::intro);\ndefine_cliclack_macro!(note, ::cliclack::note);\ndefine_cliclack_macro!(outro, ::cliclack::outro);\ndefine_cliclack_macro!(outro_cancel, ::cliclack::outro_cancel);\ndefine_cliclack_macro!(outro_note, ::cliclack::outro_note);\ndefine_cliclack_macro!(error, ::cliclack::log::error);\ndefine_cliclack_macro!(info, ::cliclack::log::info);\ndefine_cliclack_macro!(remark, ::cliclack::log::remark);\ndefine_cliclack_macro!(step, ::cliclack::log::step);\ndefine_cliclack_macro!(success, ::cliclack::log::success);\ndefine_cliclack_macro!(warning, ::cliclack::log::warning);\n\n#[allow(unused_imports)]\npub(crate) use error;\npub(crate) use info;\npub(crate) use intro;\n#[allow(unused_imports)]\npub(crate) use note;\npub(crate) use outro;\npub(crate) use outro_cancel;\n#[allow(unused_imports)]\npub(crate) use outro_note;\npub(crate) use remark;\npub(crate) use step;\npub(crate) use success;\npub(crate) use warning;\n"
  },
  {
    "path": "crates/commands/tests/tests-clean.rs",
    "content": "use soldeer_commands::{\n    Command, Verbosity,\n    commands::{clean::Clean, install::Install},\n    run,\n};\nuse soldeer_core::{\n    config::read_config_deps,\n    lock::{SOLDEER_LOCK, read_lockfile},\n};\n#[cfg(unix)]\nuse std::os::unix::fs::PermissionsExt;\nuse std::{\n    fs,\n    path::{Path, PathBuf},\n};\nuse temp_env::async_with_vars;\nuse testdir::testdir;\n\n#[allow(clippy::unwrap_used)]\nfn check_clean_success(dir: &Path, config_filename: &str) {\n    assert!(!dir.join(\"dependencies\").exists(), \"Dependencies folder should be removed\");\n\n    let config_path = dir.join(config_filename);\n    assert!(config_path.exists(), \"Config file should be preserved\");\n\n    let (deps, _) = read_config_deps(&config_path).unwrap();\n    assert_eq!(deps.len(), 2, \"Config should still have 2 dependencies\");\n    assert_eq!(deps[0].name(), \"@openzeppelin-contracts\");\n    assert_eq!(deps[1].name(), \"solady\");\n}\n\n#[allow(clippy::unwrap_used)]\nfn check_artifacts_exist(dir: &Path) {\n    assert!(dir.join(\"dependencies\").exists(), \"Dependencies folder should exist\");\n    assert!(dir.join(SOLDEER_LOCK).exists(), \"Lock file should exist\");\n\n    let lock = read_lockfile(dir.join(SOLDEER_LOCK)).unwrap();\n    assert_eq!(lock.entries.len(), 2, \"Lock file should have 2 entries\");\n    let deps_dir = dir.join(\"dependencies\");\n    let entries: Vec<_> = fs::read_dir(&deps_dir).unwrap().collect::<Result<Vec<_>, _>>().unwrap();\n    assert!(!entries.is_empty(), \"Dependencies directory should have content\");\n}\n\n#[allow(clippy::unwrap_used)]\nasync fn setup_project_with_dependencies(config_filename: &str) -> PathBuf {\n    let dir = testdir!();\n    let mut contents = r#\"[dependencies]\n\"@openzeppelin-contracts\" = \"5.0.2\"\nsolady = \"0.0.238\"\n\"#\n    .to_string();\n    if config_filename == \"foundry.toml\" {\n        contents = format!(\n            r#\"[profile.default]\nlibs = [\"dependencies\"]\n\n{contents}\"#\n        );\n    }\n    fs::write(dir.join(config_filename), contents).unwrap();\n    let cmd: Command = Install::default().into();\n    let res = async_with_vars(\n        [(\"SOLDEER_PROJECT_ROOT\", Some(dir.to_string_lossy().as_ref()))],\n        run(cmd, Verbosity::default()),\n    )\n    .await;\n    assert!(res.is_ok(), \"{res:?}\");\n\n    dir\n}\n\n#[tokio::test]\nasync fn test_clean_basic() {\n    let dir = setup_project_with_dependencies(\"soldeer.toml\").await;\n\n    assert!(dir.join(\"dependencies\").exists());\n    let cmd: Command = Clean::builder().build().into();\n    let res = async_with_vars(\n        [(\"SOLDEER_PROJECT_ROOT\", Some(dir.to_string_lossy().as_ref()))],\n        run(cmd, Verbosity::default()),\n    )\n    .await;\n    assert!(res.is_ok(), \"{res:?}\");\n\n    check_clean_success(&dir, \"soldeer.toml\");\n}\n\n#[tokio::test]\nasync fn test_clean_foundry_config() {\n    let dir = setup_project_with_dependencies(\"foundry.toml\").await;\n    check_artifacts_exist(&dir);\n    let cmd: Command = Clean::builder().build().into();\n    let res = async_with_vars(\n        [(\"SOLDEER_PROJECT_ROOT\", Some(dir.to_string_lossy().as_ref()))],\n        run(cmd, Verbosity::default()),\n    )\n    .await;\n    assert!(res.is_ok(), \"{res:?}\");\n\n    check_clean_success(&dir, \"foundry.toml\");\n}\n\n#[tokio::test]\nasync fn test_clean_no_artifacts() {\n    let dir = testdir!();\n    fs::write(dir.join(\"soldeer.toml\"), \"[dependencies]\\n\").unwrap();\n\n    // Run clean on empty project (no dependencies folder or lock file)\n    let cmd: Command = Clean::builder().build().into();\n    let res = async_with_vars(\n        [(\"SOLDEER_PROJECT_ROOT\", Some(dir.to_string_lossy().as_ref()))],\n        run(cmd, Verbosity::default()),\n    )\n    .await;\n\n    // Should succeed silently\n    assert!(res.is_ok(), \"{res:?}\");\n}\n\n#[tokio::test]\nasync fn test_clean_restores_with_install() {\n    let dir = setup_project_with_dependencies(\"soldeer.toml\").await;\n\n    let cmd: Command = Clean::builder().build().into();\n    let res = async_with_vars(\n        [(\"SOLDEER_PROJECT_ROOT\", Some(dir.to_string_lossy().as_ref()))],\n        run(cmd, Verbosity::default()),\n    )\n    .await;\n    assert!(res.is_ok(), \"{res:?}\");\n    assert!(!dir.join(\"dependencies\").exists());\n    assert!(dir.join(SOLDEER_LOCK).exists(), \"Lock file should remain after clean\");\n\n    let cmd: Command = Install::default().into();\n    let res = async_with_vars(\n        [(\"SOLDEER_PROJECT_ROOT\", Some(dir.to_string_lossy().as_ref()))],\n        run(cmd, Verbosity::default()),\n    )\n    .await;\n    assert!(res.is_ok(), \"{res:?}\");\n    assert!(dir.join(\"dependencies\").exists());\n\n    let dependencies_dir = dir.join(\"dependencies\");\n    let entries: Vec<_> =\n        fs::read_dir(dependencies_dir).unwrap().collect::<Result<Vec<_>, _>>().unwrap();\n    assert!(!entries.is_empty(), \"Dependencies should be installed\");\n}\n\n#[tokio::test]\nasync fn test_clean_with_complex_file_structure() {\n    let dir = setup_project_with_dependencies(\"soldeer.toml\").await;\n\n    let complex_path = dir.join(\"dependencies\").join(\"nested\").join(\"deep\").join(\"structure\");\n    fs::create_dir_all(&complex_path).unwrap();\n    fs::write(complex_path.join(\"test.txt\"), \"nested content\").unwrap();\n\n    // Create symlink (Unix only)\n    #[cfg(unix)]\n    {\n        use std::os::unix::fs::symlink;\n        let _ = symlink(dir.join(\"soldeer.toml\"), dir.join(\"dependencies\").join(\"config_link\"));\n    }\n\n    // Create large file to test performance\n    let large_content = \"x\".repeat(1024 * 1024); // 1MB\n    fs::write(dir.join(\"dependencies\").join(\"large_file.txt\"), large_content).unwrap();\n\n    let cmd: Command = Clean::builder().build().into();\n    let res: Result<(), soldeer_core::SoldeerError> = async_with_vars(\n        [(\"SOLDEER_PROJECT_ROOT\", Some(dir.to_string_lossy().as_ref()))],\n        run(cmd, Verbosity::default()),\n    )\n    .await;\n\n    assert!(res.is_ok(), \"{res:?}\");\n    check_clean_success(&dir, \"soldeer.toml\");\n}\n\n#[tokio::test]\nasync fn test_clean_permission_error() {\n    let dir = setup_project_with_dependencies(\"soldeer.toml\").await;\n\n    #[cfg(unix)]\n    {\n        let deps_path = dir.join(\"dependencies\");\n        let mut perms = fs::metadata(&deps_path).unwrap().permissions();\n        perms.set_mode(0o444); // Read-only\n        fs::set_permissions(&deps_path, perms).unwrap();\n\n        let cmd: Command = Clean::builder().build().into();\n        let res: Result<(), soldeer_core::SoldeerError> = async_with_vars(\n            [(\"SOLDEER_PROJECT_ROOT\", Some(dir.to_string_lossy().as_ref()))],\n            run(cmd, Verbosity::default()),\n        )\n        .await;\n\n        // Should fail due to permission error\n        assert!(res.is_err(), \"Clean should fail with permission error\");\n\n        let mut perms = fs::metadata(&deps_path).unwrap().permissions();\n        perms.set_mode(0o755);\n        fs::set_permissions(&deps_path, perms).unwrap();\n    }\n\n    #[cfg(not(unix))]\n    {\n        // On non-Unix systems, just run a successful clean\n        let cmd: Command = Clean::builder().build().into();\n        let res = async_with_vars(\n            [(\"SOLDEER_PROJECT_ROOT\", Some(dir.to_string_lossy().as_ref()))],\n            run(cmd, Verbosity::default()),\n        )\n        .await;\n        assert!(res.is_ok(), \"{res:?}\");\n    }\n}\n\n#[tokio::test]\nasync fn test_clean_with_soldeer_config_variations() {\n    let dir = testdir!();\n\n    let contents = r#\"[soldeer]\nremappings_generate = false\nremappings_regenerate = true\nremappings_location = \"config\"\n\n[dependencies]\n\"@openzeppelin-contracts\" = \"5.0.2\"\nsolady = \"0.0.238\"\n\"#;\n\n    fs::write(dir.join(\"soldeer.toml\"), contents).unwrap();\n\n    let cmd: Command = Install::default().into();\n    let res = async_with_vars(\n        [(\"SOLDEER_PROJECT_ROOT\", Some(dir.to_string_lossy().as_ref()))],\n        run(cmd, Verbosity::default()),\n    )\n    .await;\n    assert!(res.is_ok(), \"{res:?}\");\n    check_artifacts_exist(&dir);\n\n    let cmd: Command = Clean::builder().build().into();\n    let res = async_with_vars(\n        [(\"SOLDEER_PROJECT_ROOT\", Some(dir.to_string_lossy().as_ref()))],\n        run(cmd, Verbosity::default()),\n    )\n    .await;\n\n    assert!(res.is_ok(), \"{res:?}\");\n    check_clean_success(&dir, \"soldeer.toml\");\n\n    // Verify custom config is preserved\n    let config_content = fs::read_to_string(dir.join(\"soldeer.toml\")).unwrap();\n    assert!(config_content.contains(\"remappings_generate = false\"));\n    assert!(config_content.contains(\"remappings_location = \\\"config\\\"\"));\n}\n\n#[tokio::test]\nasync fn test_clean_multiple_times() {\n    let dir = setup_project_with_dependencies(\"soldeer.toml\").await;\n\n    let cmd: Command = Clean::builder().build().into();\n    let res = async_with_vars(\n        [(\"SOLDEER_PROJECT_ROOT\", Some(dir.to_string_lossy().as_ref()))],\n        run(cmd, Verbosity::default()),\n    )\n    .await;\n    assert!(res.is_ok(), \"{res:?}\");\n\n    let cmd: Command = Clean::builder().build().into();\n    let res = async_with_vars(\n        [(\"SOLDEER_PROJECT_ROOT\", Some(dir.to_string_lossy().as_ref()))],\n        run(cmd, Verbosity::default()),\n    )\n    .await;\n    assert!(res.is_ok(), \"{res:?}\");\n\n    let cmd: Command = Clean::builder().build().into();\n    let res = async_with_vars(\n        [(\"SOLDEER_PROJECT_ROOT\", Some(dir.to_string_lossy().as_ref()))],\n        run(cmd, Verbosity::default()),\n    )\n    .await;\n    assert!(res.is_ok(), \"{res:?}\");\n\n    // Verify final state\n    check_clean_success(&dir, \"soldeer.toml\");\n}\n"
  },
  {
    "path": "crates/commands/tests/tests-init.rs",
    "content": "use soldeer_commands::{Command, Verbosity, commands::init::Init, run};\nuse soldeer_core::{\n    config::{ConfigLocation, read_config_deps},\n    lock::{SOLDEER_LOCK, read_lockfile},\n    registry::get_latest_version,\n    utils::run_git_command,\n};\nuse std::fs;\nuse temp_env::async_with_vars;\nuse testdir::testdir;\n\n#[tokio::test]\nasync fn test_init_clean() {\n    let dir = testdir!();\n    run_git_command(\n        [\"clone\", \"--recursive\", \"https://github.com/foundry-rs/forge-template.git\", \".\"],\n        Some(&dir),\n    )\n    .await\n    .unwrap();\n    fs::write(dir.join(\"soldeer.toml\"), \"[dependencies]\\n\").unwrap();\n    let cmd: Command =\n        Init::builder().clean(true).config_location(ConfigLocation::Soldeer).build().into();\n    let res = async_with_vars(\n        [(\"SOLDEER_PROJECT_ROOT\", Some(dir.to_string_lossy().as_ref()))],\n        run(cmd, Verbosity::default()),\n    )\n    .await;\n    assert!(res.is_ok(), \"{res:?}\");\n    assert!(!dir.join(\"lib\").exists());\n    assert!(!dir.join(\".gitmodules\").exists());\n    assert!(dir.join(\"dependencies\").exists());\n    let (deps, _) = read_config_deps(dir.join(\"soldeer.toml\")).unwrap();\n    assert_eq!(deps.first().unwrap().name(), \"forge-std\");\n    let lock = read_lockfile(dir.join(SOLDEER_LOCK)).unwrap();\n    assert_eq!(lock.entries.first().unwrap().name(), \"forge-std\");\n    let remappings = fs::read_to_string(dir.join(\"remappings.txt\")).unwrap();\n    assert!(remappings.contains(\"forge-std\"));\n    let gitignore = fs::read_to_string(dir.join(\".gitignore\")).unwrap();\n    assert!(gitignore.contains(\"/dependencies\"));\n    let foundry_config = fs::read_to_string(dir.join(\"foundry.toml\")).unwrap();\n    assert!(foundry_config.contains(\"libs = [\\\"dependencies\\\"]\"));\n}\n\n#[tokio::test]\nasync fn test_init_no_clean() {\n    let dir = testdir!();\n    run_git_command(\n        [\"clone\", \"--recursive\", \"https://github.com/foundry-rs/forge-template.git\", \".\"],\n        Some(&dir),\n    )\n    .await\n    .unwrap();\n    fs::write(dir.join(\"soldeer.toml\"), \"[dependencies]\\n\").unwrap();\n    let cmd: Command = Init::builder().config_location(ConfigLocation::Soldeer).build().into();\n    let res = async_with_vars(\n        [(\"SOLDEER_PROJECT_ROOT\", Some(dir.to_string_lossy().as_ref()))],\n        run(cmd, Verbosity::default()),\n    )\n    .await;\n    assert!(res.is_ok(), \"{res:?}\");\n    assert!(dir.join(\"lib\").exists());\n    assert!(dir.join(\".gitmodules\").exists());\n    assert!(dir.join(\"dependencies\").exists());\n    let (deps, _) = read_config_deps(dir.join(\"soldeer.toml\")).unwrap();\n    assert_eq!(deps.first().unwrap().name(), \"forge-std\");\n    let lock = read_lockfile(dir.join(SOLDEER_LOCK)).unwrap();\n    assert_eq!(lock.entries.first().unwrap().name(), \"forge-std\");\n    let remappings = fs::read_to_string(dir.join(\"remappings.txt\")).unwrap();\n    assert!(remappings.contains(\"forge-std\"));\n    let gitignore = fs::read_to_string(dir.join(\".gitignore\")).unwrap();\n    assert!(gitignore.contains(\"/dependencies\"));\n    let foundry_config = fs::read_to_string(dir.join(\"foundry.toml\")).unwrap();\n    assert!(foundry_config.contains(\"libs = [\\\"dependencies\\\"]\"));\n}\n\n#[tokio::test]\nasync fn test_init_no_remappings() {\n    let dir = testdir!();\n    run_git_command(\n        [\"clone\", \"--recursive\", \"https://github.com/foundry-rs/forge-template.git\", \".\"],\n        Some(&dir),\n    )\n    .await\n    .unwrap();\n    let contents = r\"[soldeer]\nremappings_generate = false\n\n[dependencies]\n\";\n    fs::write(dir.join(\"soldeer.toml\"), contents).unwrap();\n    let cmd: Command =\n        Init::builder().clean(true).config_location(ConfigLocation::Soldeer).build().into();\n    let res = async_with_vars(\n        [(\"SOLDEER_PROJECT_ROOT\", Some(dir.to_string_lossy().as_ref()))],\n        run(cmd, Verbosity::default()),\n    )\n    .await;\n    assert!(res.is_ok(), \"{res:?}\");\n    assert!(!dir.join(\"remappings.txt\").exists());\n}\n\n#[tokio::test]\nasync fn test_init_no_gitignore() {\n    let dir = testdir!();\n    run_git_command(\n        [\"clone\", \"--recursive\", \"https://github.com/foundry-rs/forge-template.git\", \".\"],\n        Some(&dir),\n    )\n    .await\n    .unwrap();\n    fs::remove_file(dir.join(\".gitignore\")).unwrap();\n    fs::write(dir.join(\"soldeer.toml\"), \"[dependencies]\\n\").unwrap();\n    let cmd: Command =\n        Init::builder().clean(true).config_location(ConfigLocation::Soldeer).build().into();\n    let res = async_with_vars(\n        [(\"SOLDEER_PROJECT_ROOT\", Some(dir.to_string_lossy().as_ref()))],\n        run(cmd, Verbosity::default()),\n    )\n    .await;\n    assert!(res.is_ok(), \"{res:?}\");\n    assert!(!dir.join(\".gitignore\").exists());\n}\n\n#[tokio::test]\nasync fn test_init_select_foundry_location() {\n    let dir = testdir!();\n\n    let cmd: Command =\n        Init::builder().clean(true).config_location(ConfigLocation::Foundry).build().into();\n    let res = async_with_vars(\n        [(\"SOLDEER_PROJECT_ROOT\", Some(dir.to_string_lossy().as_ref()))],\n        run(cmd, Verbosity::default()),\n    )\n    .await;\n    assert!(res.is_ok(), \"{res:?}\");\n\n    let forge_std = get_latest_version(\"forge-std\").await.unwrap();\n    let config_path = dir.join(\"foundry.toml\");\n    assert!(config_path.exists());\n\n    let contents = format!(\n        r#\"[profile.default]\nsrc = \"src\"\nout = \"out\"\nlibs = [\"dependencies\"]\n\n[dependencies]\nforge-std = \"{}\"\n\n# See more config options https://github.com/foundry-rs/foundry/blob/master/crates/config/README.md#all-options\n\"#,\n        forge_std.version_req()\n    );\n    assert_eq!(fs::read_to_string(config_path).unwrap(), contents);\n}\n\n#[tokio::test]\nasync fn test_init_select_soldeer_location() {\n    let dir = testdir!();\n\n    let cmd: Command =\n        Init::builder().clean(true).config_location(ConfigLocation::Soldeer).build().into();\n    let res = async_with_vars(\n        [(\"SOLDEER_PROJECT_ROOT\", Some(dir.to_string_lossy().as_ref()))],\n        run(cmd, Verbosity::default()),\n    )\n    .await;\n    assert!(res.is_ok(), \"{res:?}\");\n\n    let forge_std = get_latest_version(\"forge-std\").await.unwrap();\n    let config_path = dir.join(\"soldeer.toml\");\n    assert!(config_path.exists());\n\n    let contents = format!(\n        r#\"[dependencies]\nforge-std = \"{}\"\n\"#,\n        forge_std.version_req()\n    );\n    assert_eq!(fs::read_to_string(config_path).unwrap(), contents);\n}\n"
  },
  {
    "path": "crates/commands/tests/tests-install.rs",
    "content": "#![allow(clippy::unwrap_used)]\nuse mockito::Matcher;\nuse soldeer_commands::{Command, Verbosity, commands::install::Install, run};\nuse soldeer_core::{\n    SoldeerError,\n    config::{ConfigLocation, read_config_deps},\n    download::download_file,\n    errors::InstallError,\n    lock::{SOLDEER_LOCK, read_lockfile},\n    push::zip_file,\n    utils::hash_file,\n};\nuse std::{\n    fs::{self},\n    path::{Path, PathBuf},\n};\nuse temp_env::async_with_vars;\nuse testdir::testdir;\n\nfn check_install(dir: &Path, name: &str, version_req: &str) {\n    assert!(dir.join(\"dependencies\").exists());\n    let mut config_path = dir.join(\"soldeer.toml\");\n    if !config_path.exists() {\n        config_path = dir.join(\"foundry.toml\");\n    }\n    let (deps, _) = read_config_deps(config_path).unwrap();\n    assert_eq!(deps.first().unwrap().name(), name);\n    let remappings = fs::read_to_string(dir.join(\"remappings.txt\")).unwrap();\n    assert!(remappings.contains(name));\n    let lock = read_lockfile(dir.join(SOLDEER_LOCK)).unwrap();\n    assert_eq!(lock.entries.first().unwrap().name(), name);\n    let version = lock.entries.first().unwrap().version();\n    assert!(version.starts_with(version_req));\n    assert!(dir.join(\"dependencies\").join(format!(\"{name}-{version}\")).exists());\n}\n\nfn create_zip_monorepo(testdir: &Path) -> PathBuf {\n    let root = testdir.join(\"monorepo\");\n    fs::create_dir(&root).unwrap();\n    let contracts = root.join(\"contracts\");\n    fs::create_dir(&contracts).unwrap();\n    let mut files = Vec::new();\n    files.push(root.join(\"README.md\"));\n    fs::write(\n        files.last().unwrap(),\n        \"Root of the repo is here, foundry project is under `contracts`\",\n    )\n    .unwrap();\n    files.push(contracts.join(\"foundry.toml\"));\n    fs::write(\n        files.last().unwrap(),\n        r#\"[profile.default]\nlibs = [\"dependencies\"]\nremappings = [\"forge-std/=dependencies/forge-std-1.11.0/src/\"]\n\n[dependencies]\nforge-std = \"1.11.0\"\n\n[soldeer]\nremappings_location = \"config\"\nrecursive_deps = true\"#,\n    )\n    .unwrap();\n\n    zip_file(&root, &files, \"test\").unwrap() // zip is inside the `monorepo` folder\n}\n\nfn create_zip_with_foundry_lock(testdir: &Path, branch: Option<&str>) -> PathBuf {\n    let root = testdir.join(\"foundry_lock_project\");\n    fs::create_dir(&root).unwrap();\n    let lib = root.join(\"lib\");\n    fs::create_dir(&lib).unwrap();\n    let mut files = Vec::new();\n    files.push(root.join(\"foundry.toml\"));\n    fs::write(\n        files.last().unwrap(),\n        r#\"[profile.default]\nsrc = \"src\"\nout = \"out\"\nlibs = [\"lib\"]\n\"#,\n    )\n    .unwrap();\n    files.push(root.join(\".gitmodules\"));\n    let gitmodules_content = if let Some(branch) = branch {\n        format!(\n            r#\"[submodule \"lib/forge-std\"]\n\tpath = lib/forge-std\n\turl = https://github.com/foundry-rs/forge-std\n\tbranch = {branch}\n\"#\n        )\n    } else {\n        r#\"[submodule \"lib/forge-std\"]\n\tpath = lib/forge-std\n\turl = https://github.com/foundry-rs/forge-std\n\"#\n        .to_string()\n    };\n    fs::write(files.last().unwrap(), gitmodules_content).unwrap();\n    files.push(root.join(\"foundry.lock\"));\n    let foundry_lock_content = if let Some(branch) = branch {\n        format!(\n            r#\"{{\n    \"lib/forge-std\": {{\n        \"branch\": {{\n            \"name\": \"{branch}\",\n            \"rev\": \"c29afdd40a82db50a3d3709d324416be50050e5e\"\n        }}\n    }}\n}}\"#\n        )\n    } else {\n        r#\"{\n    \"lib/forge-std\": {\n        \"rev\": \"c29afdd40a82db50a3d3709d324416be50050e5e\"\n    }\n}\"#\n        .to_string()\n    };\n    fs::write(files.last().unwrap(), foundry_lock_content).unwrap();\n    zip_file(&root, &files, \"test\").unwrap()\n}\n\n#[tokio::test]\nasync fn test_install_registry_any_version() {\n    let dir = testdir!();\n    fs::write(dir.join(\"soldeer.toml\"), \"[dependencies]\\n\").unwrap();\n    let cmd: Command = Install::builder().dependency(\"@openzeppelin-contracts~5\").build().into();\n    let res = async_with_vars(\n        [(\"SOLDEER_PROJECT_ROOT\", Some(dir.to_string_lossy().as_ref()))],\n        run(cmd, Verbosity::default()),\n    )\n    .await;\n    assert!(res.is_ok(), \"{res:?}\");\n    check_install(&dir, \"@openzeppelin-contracts\", \"5\");\n}\n\n#[tokio::test]\nasync fn test_install_registry_wildcard() {\n    let dir = testdir!();\n    fs::write(dir.join(\"soldeer.toml\"), \"[dependencies]\\n\").unwrap();\n    let cmd: Command = Install::builder().dependency(\"solady~*\").build().into();\n    let res = async_with_vars(\n        [(\"SOLDEER_PROJECT_ROOT\", Some(dir.to_string_lossy().as_ref()))],\n        run(cmd, Verbosity::default()),\n    )\n    .await;\n    assert!(res.is_ok(), \"{res:?}\");\n    check_install(&dir, \"solady\", \"\");\n}\n\n#[tokio::test]\nasync fn test_install_registry_specific_version() {\n    let dir = testdir!();\n    fs::write(dir.join(\"soldeer.toml\"), \"[dependencies]\\n\").unwrap();\n    let cmd: Command =\n        Install::builder().dependency(\"@openzeppelin-contracts~4.9.5\").build().into();\n    let res = async_with_vars(\n        [(\"SOLDEER_PROJECT_ROOT\", Some(dir.to_string_lossy().as_ref()))],\n        run(cmd, Verbosity::default()),\n    )\n    .await;\n    assert!(res.is_ok(), \"{res:?}\");\n    check_install(&dir, \"@openzeppelin-contracts\", \"4.9.5\");\n}\n\n#[tokio::test]\nasync fn test_install_custom_http() {\n    let dir = testdir!();\n    fs::write(dir.join(\"soldeer.toml\"), \"[dependencies]\\n\").unwrap();\n    let cmd: Command = Install::builder().dependency(\"mylib~1.0.0\")\n        .zip_url(\"https://github.com/mario-eth/soldeer/archive/8585a7ec85a29889cec8d08f4770e15ec4795943.zip\")\n        .build()\n        .into();\n    let res = async_with_vars(\n        [(\"SOLDEER_PROJECT_ROOT\", Some(dir.to_string_lossy().as_ref()))],\n        run(cmd, Verbosity::default()),\n    )\n    .await;\n    assert!(res.is_ok(), \"{res:?}\");\n    check_install(&dir, \"mylib\", \"1.0.0\");\n    let lock = read_lockfile(dir.join(SOLDEER_LOCK)).unwrap();\n    assert_eq!(\n        lock.entries.first().unwrap().as_http().unwrap().url,\n        \"https://github.com/mario-eth/soldeer/archive/8585a7ec85a29889cec8d08f4770e15ec4795943.zip\"\n    );\n    assert!(&dir.join(\"dependencies\").join(\"mylib-1.0.0\").join(\"README.md\").exists());\n}\n\n#[tokio::test]\nasync fn test_install_git_main() {\n    let dir = testdir!();\n    fs::write(dir.join(\"soldeer.toml\"), \"[dependencies]\\n\").unwrap();\n    let cmd: Command = Install::builder()\n        .dependency(\"mylib~0.1.0\")\n        .git_url(\"https://github.com/beeb/test-repo.git\")\n        .build()\n        .into();\n    let res = async_with_vars(\n        [(\"SOLDEER_PROJECT_ROOT\", Some(dir.to_string_lossy().as_ref()))],\n        run(cmd, Verbosity::default()),\n    )\n    .await;\n    assert!(res.is_ok(), \"{res:?}\");\n    check_install(&dir, \"mylib\", \"0.1.0\");\n    let lock = read_lockfile(dir.join(SOLDEER_LOCK)).unwrap();\n    assert_eq!(\n        lock.entries.first().unwrap().as_git().unwrap().rev,\n        \"d5d72fa135d28b2e8307650b3ea79115183f2406\"\n    );\n    assert!(&dir.join(\"dependencies\").join(\"mylib-0.1.0\").join(\"foo.txt\").exists());\n}\n\n#[tokio::test]\nasync fn test_install_git_commit() {\n    let dir = testdir!();\n    fs::write(dir.join(\"soldeer.toml\"), \"[dependencies]\\n\").unwrap();\n    let cmd: Command = Install::builder()\n        .dependency(\"mylib~0.1.0\")\n        .git_url(\"https://github.com/beeb/test-repo.git\")\n        .rev(\"78c2f6a1a54db26bab6c3f501854a1564eb3707f\")\n        .build()\n        .into();\n    let res = async_with_vars(\n        [(\"SOLDEER_PROJECT_ROOT\", Some(dir.to_string_lossy().as_ref()))],\n        run(cmd, Verbosity::default()),\n    )\n    .await;\n    assert!(res.is_ok(), \"{res:?}\");\n    check_install(&dir, \"mylib\", \"0.1.0\");\n    let lock = read_lockfile(dir.join(SOLDEER_LOCK)).unwrap();\n    assert_eq!(\n        lock.entries.first().unwrap().as_git().unwrap().rev,\n        \"78c2f6a1a54db26bab6c3f501854a1564eb3707f\"\n    );\n    assert!(!&dir.join(\"dependencies\").join(\"mylib-1.0.0\").join(\"foo.txt\").exists());\n}\n\n#[tokio::test]\nasync fn test_install_git_tag() {\n    let dir = testdir!();\n    fs::write(dir.join(\"soldeer.toml\"), \"[dependencies]\\n\").unwrap();\n    let cmd: Command = Install::builder()\n        .dependency(\"mylib~0.1.0\")\n        .git_url(\"https://github.com/beeb/test-repo.git\")\n        .tag(\"v0.1.0\")\n        .build()\n        .into();\n    let res = async_with_vars(\n        [(\"SOLDEER_PROJECT_ROOT\", Some(dir.to_string_lossy().as_ref()))],\n        run(cmd, Verbosity::default()),\n    )\n    .await;\n    assert!(res.is_ok(), \"{res:?}\");\n    check_install(&dir, \"mylib\", \"0.1.0\");\n    let lock = read_lockfile(dir.join(SOLDEER_LOCK)).unwrap();\n    assert_eq!(\n        lock.entries.first().unwrap().as_git().unwrap().rev,\n        \"78c2f6a1a54db26bab6c3f501854a1564eb3707f\"\n    );\n    assert!(!&dir.join(\"dependencies\").join(\"mylib-1.0.0\").join(\"foo.txt\").exists());\n}\n\n#[tokio::test]\nasync fn test_install_git_branch() {\n    let dir = testdir!();\n    fs::write(dir.join(\"soldeer.toml\"), \"[dependencies]\\n\").unwrap();\n    let cmd: Command = Install::builder()\n        .dependency(\"mylib~dev\")\n        .git_url(\"https://github.com/beeb/test-repo.git\")\n        .branch(\"dev\")\n        .build()\n        .into();\n    let res = async_with_vars(\n        [(\"SOLDEER_PROJECT_ROOT\", Some(dir.to_string_lossy().as_ref()))],\n        run(cmd, Verbosity::default()),\n    )\n    .await;\n    assert!(res.is_ok(), \"{res:?}\");\n    check_install(&dir, \"mylib\", \"dev\");\n    let lock = read_lockfile(dir.join(SOLDEER_LOCK)).unwrap();\n    assert_eq!(\n        lock.entries.first().unwrap().as_git().unwrap().rev,\n        \"8d903e557e8f1b6e62bde768aa456d4ddfca72c4\"\n    );\n    assert!(!&dir.join(\"dependencies\").join(\"mylib-1.0.0\").join(\"test.txt\").exists());\n}\n\n#[tokio::test]\nasync fn test_install_foundry_config() {\n    let dir = testdir!();\n    fs::write(dir.join(\"foundry.toml\"), \"[dependencies]\\n\").unwrap();\n    let cmd: Command = Install::builder().dependency(\"@openzeppelin-contracts~5\").build().into();\n    let res = async_with_vars(\n        [(\"SOLDEER_PROJECT_ROOT\", Some(dir.to_string_lossy().as_ref()))],\n        run(cmd, Verbosity::default()),\n    )\n    .await;\n    assert!(res.is_ok(), \"{res:?}\");\n    check_install(&dir, \"@openzeppelin-contracts\", \"5\");\n}\n\n#[tokio::test]\nasync fn test_install_foundry_remappings() {\n    let dir = testdir!();\n    let contents = r#\"[profile.default]\n\n[soldeer]\nremappings_location = \"config\"\n\n[dependencies]\n\"@openzeppelin-contracts\" = \"5.1.0\"\n\"#;\n    fs::write(dir.join(\"foundry.toml\"), contents).unwrap();\n    let cmd: Command = Install::builder().build().into();\n    let res = async_with_vars(\n        [(\"SOLDEER_PROJECT_ROOT\", Some(dir.to_string_lossy().as_ref()))],\n        run(cmd, Verbosity::default()),\n    )\n    .await;\n    assert!(res.is_ok(), \"{res:?}\");\n    let config = fs::read_to_string(dir.join(\"foundry.toml\")).unwrap();\n    assert!(config.contains(\n        \"remappings = [\\\"@openzeppelin-contracts-5.1.0/=dependencies/@openzeppelin-contracts-5.1.0/\\\"]\"\n    ));\n}\n\n#[tokio::test]\nasync fn test_install_missing_no_lock() {\n    let dir = testdir!();\n    let contents = r#\"[dependencies]\n\"@openzeppelin-contracts\" = \"5.0.2\"\n\"#;\n    fs::write(dir.join(\"soldeer.toml\"), contents).unwrap();\n    let cmd: Command = Install::builder().build().into();\n    let res = async_with_vars(\n        [(\"SOLDEER_PROJECT_ROOT\", Some(dir.to_string_lossy().as_ref()))],\n        run(cmd, Verbosity::default()),\n    )\n    .await;\n    assert!(res.is_ok(), \"{res:?}\");\n    check_install(&dir, \"@openzeppelin-contracts\", \"5.0.2\");\n}\n\n#[tokio::test]\nasync fn test_install_missing_with_lock() {\n    let dir = testdir!();\n    let contents = r#\"[dependencies]\nmylib = \"1.1\"\n\"#;\n    fs::write(dir.join(\"soldeer.toml\"), contents).unwrap();\n    let lock = r#\"[[dependencies]]\nname = \"mylib\"\nversion = \"1.1.0\"\nurl = \"https://github.com/mario-eth/soldeer/archive/8585a7ec85a29889cec8d08f4770e15ec4795943.zip\"\nchecksum = \"94a73dbe106f48179ea39b00d42e5d4dd96fdc6252caa3a89ce7efdaec0b9468\"\nintegrity = \"f3c628f3e9eae4db14fe14f9ab29e49a0107c47b8ee956e4cee57b616b493fc2\"\n\"#;\n    fs::write(dir.join(SOLDEER_LOCK), lock).unwrap();\n    let cmd: Command = Install::builder().build().into();\n    let res = async_with_vars(\n        [(\"SOLDEER_PROJECT_ROOT\", Some(dir.to_string_lossy().as_ref()))],\n        run(cmd, Verbosity::default()),\n    )\n    .await;\n    assert!(res.is_ok(), \"{res:?}\");\n    check_install(&dir, \"mylib\", \"1.1\");\n}\n\n#[tokio::test]\nasync fn test_install_second_time() {\n    let dir = testdir!();\n    let contents = r#\"[dependencies]\nmylib = \"1.1\"\n\"#;\n    fs::write(dir.join(\"soldeer.toml\"), contents).unwrap();\n\n    // get zip file locally for mock\n    let zip_file = download_file(\n        \"https://github.com/mario-eth/soldeer/archive/8585a7ec85a29889cec8d08f4770e15ec4795943.zip\",\n        &dir,\n        \"tmp\",\n    )\n    .await\n    .unwrap();\n\n    // serve the file with mock server\n    let mut server = mockito::Server::new_async().await;\n    let mock = server.mock(\"GET\", \"/file.zip\").with_body_from_file(zip_file).create_async().await;\n    let mock = mock.expect(1); // download link should be called exactly once\n\n    let lock = format!(\n        r#\"[[dependencies]]\nname = \"mylib\"\nversion = \"1.1.0\"\nurl = \"{}/file.zip\"\nchecksum = \"94a73dbe106f48179ea39b00d42e5d4dd96fdc6252caa3a89ce7efdaec0b9468\"\nintegrity = \"f3c628f3e9eae4db14fe14f9ab29e49a0107c47b8ee956e4cee57b616b493fc2\"\n\"#,\n        server.url()\n    );\n    fs::write(dir.join(SOLDEER_LOCK), lock).unwrap();\n    let cmd: Command = Install::builder().build().into();\n    let res = async_with_vars(\n        [(\"SOLDEER_PROJECT_ROOT\", Some(dir.to_string_lossy().as_ref()))],\n        run(cmd.clone(), Verbosity::default()),\n    )\n    .await;\n    assert!(res.is_ok(), \"{res:?}\");\n    mock.assert(); // download link was called\n\n    // second install\n    let res = async_with_vars(\n        [(\"SOLDEER_PROJECT_ROOT\", Some(dir.to_string_lossy().as_ref()))],\n        run(cmd, Verbosity::default()),\n    )\n    .await;\n    assert!(res.is_ok(), \"{res:?}\");\n    mock.assert(); // download link was not called a second time\n}\n\n#[tokio::test]\nasync fn test_install_private_second_time() {\n    let dir = testdir!();\n    let contents = r#\"[dependencies]\ntest-private = \"0.1.0\"\n\"#;\n    fs::write(dir.join(\"soldeer.toml\"), contents).unwrap();\n\n    // get zip file locally for mock\n    let zip_file = download_file(\n        \"https://github.com/mario-eth/soldeer/archive/8585a7ec85a29889cec8d08f4770e15ec4795943.zip\",\n        &dir,\n        \"tmp\",\n    )\n    .await\n    .unwrap();\n\n    // serve the file with mock server\n    let mut server = mockito::Server::new_async().await;\n    let data = format!(\n        r#\"{{\"data\":[{{\"created_at\":\"2025-09-28T12:36:09.526660Z\",\"deleted\":false,\"id\":\"0440c261-8cdf-4738-9139-c4dc7b0c7f3e\",\"internal_name\":\"test-private/0_1_0_28-09-2025_12:36:08_test-private.zip\",\"private\":true,\"project_id\":\"14f419e7-2d64-49e4-86b9-b44b36627786\",\"url\":\"{}/file.zip\",\"version\":\"0.1.0\"}}],\"status\":\"success\"}}\"#,\n        server.url()\n    );\n    server.mock(\"GET\", \"/file.zip\").with_body_from_file(zip_file).create_async().await;\n    server\n        .mock(\"GET\", \"/api/v1/revision-cli\")\n        .match_query(Matcher::Any)\n        .with_header(\"content-type\", \"application/json\")\n        .with_body(data)\n        .create_async()\n        .await;\n\n    let lock = r#\"[[dependencies]]\nname = \"test-private\"\nversion = \"0.1.0\"\nchecksum = \"94a73dbe106f48179ea39b00d42e5d4dd96fdc6252caa3a89ce7efdaec0b9468\"\nintegrity = \"f3c628f3e9eae4db14fe14f9ab29e49a0107c47b8ee956e4cee57b616b493fc2\"\n\"#;\n    fs::write(dir.join(SOLDEER_LOCK), lock).unwrap();\n    let cmd: Command = Install::builder().build().into();\n    let res = async_with_vars(\n        [\n            (\"SOLDEER_API_URL\", Some(server.url().as_str())),\n            (\"SOLDEER_PROJECT_ROOT\", Some(dir.to_string_lossy().as_ref())),\n        ],\n        run(cmd.clone(), Verbosity::default()),\n    )\n    .await;\n    assert!(res.is_ok(), \"{res:?}\");\n\n    // second install\n    let res = async_with_vars(\n        [\n            (\"SOLDEER_API_URL\", Some(server.url().as_str())),\n            (\"SOLDEER_PROJECT_ROOT\", Some(dir.to_string_lossy().as_ref())),\n        ],\n        run(cmd, Verbosity::default()),\n    )\n    .await;\n    assert!(res.is_ok(), \"{res:?}\");\n}\n\n#[tokio::test]\nasync fn test_install_add_existing_reinstall() {\n    let dir = testdir!();\n    let contents = r#\"[dependencies]\n\"@openzeppelin-contracts\" = \"5.0.2\"\n\"#;\n    fs::write(dir.join(\"soldeer.toml\"), contents).unwrap();\n    let cmd: Command = Install::builder().build().into();\n    let res = async_with_vars(\n        [(\"SOLDEER_PROJECT_ROOT\", Some(dir.to_string_lossy().as_ref()))],\n        run(cmd, Verbosity::default()),\n    )\n    .await;\n    assert!(res.is_ok());\n\n    // remove dependencies folder and lockfile\n    fs::remove_dir_all(dir.join(\"dependencies\")).unwrap();\n    fs::remove_file(dir.join(SOLDEER_LOCK)).unwrap();\n\n    // re-add the same dep, should re-install it\n    let cmd: Command =\n        Install::builder().dependency(\"@openzeppelin-contracts~5.0.2\").build().into();\n    let res = async_with_vars(\n        [(\"SOLDEER_PROJECT_ROOT\", Some(dir.to_string_lossy().as_ref()))],\n        run(cmd, Verbosity::default()),\n    )\n    .await;\n    assert!(res.is_ok());\n    let dep_path = dir.join(\"dependencies\").join(\"@openzeppelin-contracts-5.0.2\");\n    assert!(dep_path.exists());\n}\n\n#[tokio::test]\nasync fn test_install_clean() {\n    let dir = testdir!();\n    let contents = r#\"[dependencies]\n\"@openzeppelin-contracts\" = \"5.0.2\"\n\"#;\n    fs::write(dir.join(\"soldeer.toml\"), contents).unwrap();\n    let test_path = dir.join(\"dependencies\").join(\"foo\");\n    fs::create_dir_all(&test_path).unwrap();\n    fs::write(test_path.join(\"foo.txt\"), \"test\").unwrap();\n    let cmd: Command = Install::builder().clean(true).build().into();\n    let res = async_with_vars(\n        [(\"SOLDEER_PROJECT_ROOT\", Some(dir.to_string_lossy().as_ref()))],\n        run(cmd, Verbosity::default()),\n    )\n    .await;\n    assert!(res.is_ok(), \"{res:?}\");\n    assert!(!test_path.exists());\n}\n\n#[tokio::test]\nasync fn test_install_recursive_deps() {\n    let dir = testdir!();\n    let contents = r#\"[dependencies]\nfoo = { version = \"0.1.0\", git = \"https://github.com/foundry-rs/forge-template.git\" }\n\"#;\n    fs::write(dir.join(\"soldeer.toml\"), contents).unwrap();\n    let cmd: Command = Install::builder().recursive_deps(true).build().into();\n    let res = async_with_vars(\n        [(\"SOLDEER_PROJECT_ROOT\", Some(dir.to_string_lossy().as_ref()))],\n        run(cmd, Verbosity::default()),\n    )\n    .await;\n    assert!(res.is_ok(), \"{res:?}\");\n    let dep_path = dir.join(\"dependencies\").join(\"foo-0.1.0\");\n    assert!(dep_path.exists());\n    let sub_dirs_path = dep_path.join(\"lib\");\n    assert!(sub_dirs_path.exists());\n    assert!(sub_dirs_path.join(\"forge-std\").join(\"src\").exists());\n}\n\n#[tokio::test]\nasync fn test_install_recursive_deps_soldeer() {\n    let dir = testdir!();\n    // this template uses soldeer to install forge-std\n    let contents = r#\"[dependencies]\nfoo = { version = \"0.1.0\", git = \"https://github.com/beeb/forge-template.git\" }\n\"#;\n    fs::write(dir.join(\"soldeer.toml\"), contents).unwrap();\n    let cmd: Command = Install::builder().recursive_deps(true).build().into();\n    let res = async_with_vars(\n        [(\"SOLDEER_PROJECT_ROOT\", Some(dir.to_string_lossy().as_ref()))],\n        run(cmd, Verbosity::default()),\n    )\n    .await;\n    assert!(res.is_ok(), \"{res:?}\");\n    let dep_path = dir.join(\"dependencies\").join(\"foo-0.1.0\");\n    assert!(dep_path.exists());\n    let sub_dirs_path = dep_path.join(\"dependencies\");\n    assert!(sub_dirs_path.exists());\n    assert!(sub_dirs_path.join(\"forge-std-1.9.7\").join(\"src\").exists());\n}\n\n#[tokio::test]\nasync fn test_install_recursive_deps_nested() {\n    let dir = testdir!();\n    let contents = r#\"[dependencies]\n\"@uniswap-permit2\" = { version = \"1.0.0\", url = \"https://github.com/Uniswap/permit2/archive/cc56ad0f3439c502c246fc5cfcc3db92bb8b7219.zip\" }\n\"#;\n    fs::write(dir.join(\"soldeer.toml\"), contents).unwrap();\n    let cmd: Command = Install::builder().recursive_deps(true).build().into();\n    let res = async_with_vars(\n        [(\"SOLDEER_PROJECT_ROOT\", Some(dir.to_string_lossy().as_ref()))],\n        run(cmd, Verbosity::default()),\n    )\n    .await;\n    assert!(res.is_ok(), \"{res:?}\");\n    let paths = [\n        \"@uniswap-permit2-1.0.0/lib/forge-std/src\",\n        \"@uniswap-permit2-1.0.0/lib/forge-gas-snapshot/dependencies/forge-std-1.9.2/src\",\n        \"@uniswap-permit2-1.0.0/lib/openzeppelin-contracts/lib/erc4626-tests/ERC4626.test.sol\",\n        \"@uniswap-permit2-1.0.0/lib/openzeppelin-contracts/lib/forge-std/src\",\n        \"@uniswap-permit2-1.0.0/lib/openzeppelin-contracts/lib/halmos-cheatcodes/src\",\n        \"@uniswap-permit2-1.0.0/lib/solmate/lib/ds-test/src\",\n    ];\n    for path in paths {\n        let dep_path = dir.join(\"dependencies\").join(path);\n        assert!(dep_path.exists());\n    }\n}\n\n#[tokio::test]\nasync fn test_install_recursive_project_root() {\n    let dir = testdir!();\n    let zip_path = create_zip_monorepo(&dir);\n    let checksum = hash_file(&zip_path).unwrap();\n\n    let contents = r#\"[dependencies]\nmylib = { version = \"1.0.0\", project_root = \"contracts\" }\n\n[soldeer]\nrecursive_deps = true\n\"#;\n\n    // serve the dependency which uses foundry in a `contracts` subfolder\n    let mut server = mockito::Server::new_async().await;\n    server.mock(\"GET\", \"/file.zip\").with_body_from_file(zip_path).create_async().await;\n    fs::write(dir.join(\"soldeer.toml\"), contents).unwrap();\n    let lock = format!(\n        r#\"[[dependencies]]\nname = \"mylib\"\nversion = \"1.0.0\"\nurl = \"{}/file.zip\"\nchecksum = \"{checksum}\"\nintegrity = \"e629088e5b74df78f116a24c328a64fd002b4e42449607b6ca78f9afb799374d\"\n\"#,\n        server.url()\n    );\n    fs::write(dir.join(SOLDEER_LOCK), lock).unwrap();\n\n    let cmd: Command = Install::builder().build().into();\n    let res = async_with_vars(\n        [(\"SOLDEER_PROJECT_ROOT\", Some(dir.to_string_lossy().as_ref()))],\n        run(cmd.clone(), Verbosity::default()),\n    )\n    .await;\n    assert!(res.is_ok(), \"{res:?}\");\n\n    // check that we recursively installed all deps\n    assert!(dir.join(\"dependencies/mylib-1.0.0/contracts/dependencies/forge-std-1.11.0\").is_dir());\n}\n\n#[tokio::test]\nasync fn test_install_recursive_project_root_invalid_path() {\n    let dir = testdir!();\n    let zip_path = create_zip_monorepo(&dir);\n    let checksum = hash_file(&zip_path).unwrap();\n\n    // directory traversal is forbidden\n    let contents = r#\"[dependencies]\nmylib = { version = \"1.0.0\", project_root = \"../../../contracts\" }\n\n[soldeer]\nrecursive_deps = true\n\"#;\n\n    // serve the dependency which uses foundry in a `contracts` subfolder\n    let mut server = mockito::Server::new_async().await;\n    server.mock(\"GET\", \"/file.zip\").with_body_from_file(zip_path).create_async().await;\n    fs::write(dir.join(\"soldeer.toml\"), contents).unwrap();\n    let lock = format!(\n        r#\"[[dependencies]]\nname = \"mylib\"\nversion = \"1.0.0\"\nurl = \"{}/file.zip\"\nchecksum = \"{checksum}\"\nintegrity = \"e629088e5b74df78f116a24c328a64fd002b4e42449607b6ca78f9afb799374d\"\n\"#,\n        server.url()\n    );\n    fs::write(dir.join(SOLDEER_LOCK), lock).unwrap();\n\n    let cmd: Command = Install::builder().build().into();\n    let res = async_with_vars(\n        [(\"SOLDEER_PROJECT_ROOT\", Some(dir.to_string_lossy().as_ref()))],\n        run(cmd.clone(), Verbosity::default()),\n    )\n    .await;\n    assert!(matches!(\n        res.unwrap_err(),\n        SoldeerError::InstallError(InstallError::ConfigError(\n            soldeer_core::errors::ConfigError::InvalidProjectRoot { .. }\n        ))\n    ));\n}\n\n#[tokio::test]\nasync fn test_install_regenerate_remappings() {\n    let dir = testdir!();\n    fs::write(dir.join(\"soldeer.toml\"), \"[dependencies]\\n\").unwrap();\n    fs::write(dir.join(\"remappings.txt\"), \"foo=bar\").unwrap();\n    let cmd: Command = Install::builder()\n        .dependency(\"@openzeppelin-contracts~5\")\n        .regenerate_remappings(true)\n        .build()\n        .into();\n    let res = async_with_vars(\n        [(\"SOLDEER_PROJECT_ROOT\", Some(dir.to_string_lossy().as_ref()))],\n        run(cmd, Verbosity::default()),\n    )\n    .await;\n    assert!(res.is_ok(), \"{res:?}\");\n    let remappings = fs::read_to_string(dir.join(\"remappings.txt\")).unwrap();\n    assert!(!remappings.contains(\"foo=bar\"));\n    assert!(remappings.contains(\"@openzeppelin-contracts\"));\n}\n\n#[tokio::test]\nasync fn test_add_remappings() {\n    let dir = testdir!();\n\n    let contents = r#\"[profile.default]\nsrc = \"src\"\nout = \"out\"\nlibs = [\"dependencies\"]\n\n# See more config options https://github.com/foundry-rs/foundry/blob/master/crates/config/README.md#all-options\n\n[soldeer]\nremappings_generate = true\nremappings_prefix = \"@custom-f@\"\nremappings_location = \"config\"\nremappings_regenerate = true\n\n[dependencies]\n\"#;\n\n    fs::write(dir.join(\"foundry.toml\"), contents).unwrap();\n    let cmd: Command = Install::builder().dependency(\"forge-std~1.8.1\").build().into();\n    let res = async_with_vars(\n        [(\"SOLDEER_PROJECT_ROOT\", Some(dir.to_string_lossy().as_ref()))],\n        run(cmd, Verbosity::default()),\n    )\n    .await;\n    assert!(res.is_ok(), \"{res:?}\");\n\n    let updated_contents = r#\"[profile.default]\nsrc = \"src\"\nout = \"out\"\nlibs = [\"dependencies\"]\nremappings = [\"@custom-f@forge-std-1.8.1/=dependencies/forge-std-1.8.1/\"]\n\n# See more config options https://github.com/foundry-rs/foundry/blob/master/crates/config/README.md#all-options\n\n[soldeer]\nremappings_generate = true\nremappings_prefix = \"@custom-f@\"\nremappings_location = \"config\"\nremappings_regenerate = true\n\n[dependencies]\nforge-std = \"1.8.1\"\n\"#;\n    assert_eq!(updated_contents, fs::read_to_string(dir.join(\"foundry.toml\")).unwrap());\n}\n\n#[tokio::test]\nasync fn test_modifying_remappings_prefix_config() {\n    let dir = testdir!();\n\n    let contents = r#\"[profile.default]\nlibs = [\"dependencies\"]\nremappings = [\"@custom-f@forge-std-1.8.1/=dependencies/forge-std-1.8.1/\"]\n\n[soldeer]\nremappings_prefix = \"!custom-f!\"\nremappings_regenerate = true\nremappings_location = \"config\"\n\n[dependencies]\n\"#;\n\n    fs::write(dir.join(\"foundry.toml\"), contents).unwrap();\n    let cmd: Command = Install::builder().dependency(\"forge-std~1.8.1\").build().into();\n    let res = async_with_vars(\n        [(\"SOLDEER_PROJECT_ROOT\", Some(dir.to_string_lossy().as_ref()))],\n        run(cmd.clone(), Verbosity::default()),\n    )\n    .await;\n    assert!(res.is_ok(), \"{res:?}\");\n\n    let expected = r#\"[profile.default]\nlibs = [\"dependencies\"]\nremappings = [\"!custom-f!forge-std-1.8.1/=dependencies/forge-std-1.8.1/\"]\n\n[soldeer]\nremappings_prefix = \"!custom-f!\"\nremappings_regenerate = true\nremappings_location = \"config\"\n\n[dependencies]\nforge-std = \"1.8.1\"\n\"#;\n\n    assert_eq!(expected, fs::read_to_string(dir.join(\"foundry.toml\")).unwrap());\n}\n\n#[tokio::test]\nasync fn test_modifying_remappings_prefix_txt() {\n    let dir = testdir!();\n\n    let contents = r#\"[profile.default]\n\n[soldeer]\nremappings_prefix = \"!custom-f!\"\nremappings_regenerate = true\n\n[dependencies]\n\"#;\n    fs::write(\n        dir.join(\"remappings.txt\"),\n        \"@custom-f@forge-std-1.8.1/=dependencies/forge-std-1.8.1/\",\n    )\n    .unwrap();\n    fs::write(dir.join(\"foundry.toml\"), contents).unwrap();\n    let cmd: Command = Install::builder().dependency(\"forge-std~1.8.1\").build().into();\n    let res = async_with_vars(\n        [(\"SOLDEER_PROJECT_ROOT\", Some(dir.to_string_lossy().as_ref()))],\n        run(cmd.clone(), Verbosity::default()),\n    )\n    .await;\n    assert!(res.is_ok(), \"{res:?}\");\n\n    let updated_contents = r#\"!custom-f!forge-std-1.8.1/=dependencies/forge-std-1.8.1/\n\"#;\n\n    assert_eq!(updated_contents, fs::read_to_string(dir.join(\"remappings.txt\")).unwrap());\n}\n\n#[tokio::test]\nasync fn test_install_new_foundry_no_dependency_tag() {\n    let dir = testdir!();\n    let contents = r#\"[profile.default]\nlibs = [\"lib\"]\n\"#;\n    fs::write(dir.join(\"foundry.toml\"), contents).unwrap();\n    let cmd: Command = Install::builder()\n        .dependency(\"@openzeppelin-contracts~5\")\n        .config_location(ConfigLocation::Foundry)\n        .build()\n        .into();\n    let res = async_with_vars(\n        [(\"SOLDEER_PROJECT_ROOT\", Some(dir.to_string_lossy().as_ref()))],\n        run(cmd, Verbosity::default()),\n    )\n    .await;\n    assert!(res.is_ok(), \"{res:?}\");\n    let config = fs::read_to_string(dir.join(\"foundry.toml\")).unwrap();\n    let content = r#\"[profile.default]\nlibs = [\"lib\", \"dependencies\"]\n\n[dependencies]\n\"@openzeppelin-contracts\" = \"5\"\n\"#;\n    assert_eq!(config, content);\n}\n\n#[tokio::test]\nasync fn test_install_new_soldeer_no_soldeer_toml() {\n    let dir = testdir!();\n\n    let cmd: Command = Install::builder()\n        .dependency(\"@openzeppelin-contracts~5\")\n        .config_location(ConfigLocation::Soldeer)\n        .build()\n        .into();\n    let res = async_with_vars(\n        [(\"SOLDEER_PROJECT_ROOT\", Some(dir.to_string_lossy().as_ref()))],\n        run(cmd, Verbosity::default()),\n    )\n    .await;\n    assert!(res.is_ok(), \"{res:?}\");\n    let config = fs::read_to_string(dir.join(\"soldeer.toml\")).unwrap();\n    let content = r#\"[dependencies]\n\"@openzeppelin-contracts\" = \"5\"\n\"#;\n    assert_eq!(config, content);\n}\n\n#[tokio::test]\nasync fn test_install_new_soldeer_no_dependency_tag() {\n    let dir = testdir!();\n    let contents = r#\"[soldeer]\n\"#;\n    fs::write(dir.join(\"soldeer.toml\"), contents).unwrap();\n    let cmd: Command = Install::builder()\n        .dependency(\"@openzeppelin-contracts~5\")\n        .config_location(ConfigLocation::Soldeer)\n        .build()\n        .into();\n    let res = async_with_vars(\n        [(\"SOLDEER_PROJECT_ROOT\", Some(dir.to_string_lossy().as_ref()))],\n        run(cmd, Verbosity::default()),\n    )\n    .await;\n    assert!(res.is_ok(), \"{res:?}\");\n    let config = fs::read_to_string(dir.join(\"soldeer.toml\")).unwrap();\n    let content = r#\"[soldeer]\n\n[dependencies]\n\"@openzeppelin-contracts\" = \"5\"\n\"#;\n    assert_eq!(config, content);\n}\n\n#[tokio::test]\nasync fn test_install_recursive_deps_with_foundry_lock() {\n    let dir = testdir!();\n    let zip_path = create_zip_with_foundry_lock(&dir, None);\n    let checksum = hash_file(&zip_path).unwrap();\n\n    let contents = r#\"[dependencies]\nmylib = \"1.0.0\"\n\n[soldeer]\nrecursive_deps = true\n\"#;\n\n    // Serve the dependency via mock server\n    let mut server = mockito::Server::new_async().await;\n    server.mock(\"GET\", \"/file.zip\").with_body_from_file(&zip_path).create_async().await;\n    fs::write(dir.join(\"soldeer.toml\"), contents).unwrap();\n\n    let lock = format!(\n        r#\"[[dependencies]]\nname = \"mylib\"\nversion = \"1.0.0\"\nurl = \"{}/file.zip\"\nchecksum = \"{checksum}\"\nintegrity = \"placeholder\"\n\"#,\n        server.url()\n    );\n    fs::write(dir.join(SOLDEER_LOCK), lock).unwrap();\n\n    let cmd: Command = Install::builder().build().into();\n    let res = async_with_vars(\n        [(\"SOLDEER_PROJECT_ROOT\", Some(dir.to_string_lossy().as_ref()))],\n        run(cmd.clone(), Verbosity::default()),\n    )\n    .await;\n    assert!(res.is_ok(), \"{res:?}\");\n\n    // Verify the submodule exists\n    let forge_std_path = dir.join(\"dependencies/mylib-1.0.0/lib/forge-std\");\n    assert!(forge_std_path.exists());\n\n    // Verify it's checked out at the specific revision from foundry.lock\n    let output = std::process::Command::new(\"git\")\n        .args([\"rev-parse\", \"HEAD\"])\n        .current_dir(&forge_std_path)\n        .output()\n        .expect(\"failed to run git rev-parse\");\n\n    let current_rev = String::from_utf8_lossy(&output.stdout).trim().to_string();\n    assert_eq!(current_rev, \"c29afdd40a82db50a3d3709d324416be50050e5e\");\n}\n\n#[tokio::test]\nasync fn test_install_recursive_deps_with_foundry_lock_branch() {\n    let dir = testdir!();\n    let zip_path = create_zip_with_foundry_lock(&dir, Some(\"master\"));\n    let checksum = hash_file(&zip_path).unwrap();\n\n    let contents = r#\"[dependencies]\nmylib = \"1.0.0\"\n\n[soldeer]\nrecursive_deps = true\n\"#;\n\n    // Serve the dependency via mock server\n    let mut server = mockito::Server::new_async().await;\n    server.mock(\"GET\", \"/file.zip\").with_body_from_file(&zip_path).create_async().await;\n    fs::write(dir.join(\"soldeer.toml\"), contents).unwrap();\n\n    let lock = format!(\n        r#\"[[dependencies]]\nname = \"mylib\"\nversion = \"1.0.0\"\nurl = \"{}/file.zip\"\nchecksum = \"{checksum}\"\nintegrity = \"placeholder\"\n\"#,\n        server.url()\n    );\n    fs::write(dir.join(SOLDEER_LOCK), lock).unwrap();\n\n    let cmd: Command = Install::builder().build().into();\n    let res = async_with_vars(\n        [(\"SOLDEER_PROJECT_ROOT\", Some(dir.to_string_lossy().as_ref()))],\n        run(cmd.clone(), Verbosity::default()),\n    )\n    .await;\n    assert!(res.is_ok(), \"{res:?}\");\n\n    // Verify the submodule exists\n    let forge_std_path = dir.join(\"dependencies/mylib-1.0.0/lib/forge-std\");\n    assert!(forge_std_path.exists());\n\n    // Verify it's checked out at the specific revision from foundry.lock\n    let output = std::process::Command::new(\"git\")\n        .args([\"rev-parse\", \"HEAD\"])\n        .current_dir(&forge_std_path)\n        .output()\n        .expect(\"failed to run git rev-parse\");\n\n    let current_rev = String::from_utf8_lossy(&output.stdout).trim().to_string();\n    assert_eq!(current_rev, \"c29afdd40a82db50a3d3709d324416be50050e5e\",);\n}\n"
  },
  {
    "path": "crates/commands/tests/tests-login.rs",
    "content": "use std::{fs, path::PathBuf};\n\nuse mockito::{Matcher, Mock, ServerGuard};\nuse soldeer_commands::{Command, Verbosity, commands::login::Login, run};\nuse temp_env::async_with_vars;\nuse testdir::testdir;\n\nasync fn mock_api_server() -> (ServerGuard, Mock) {\n    let mut server = mockito::Server::new_async().await;\n    let body = r#\"{\"status\":\"success\",\"token\": \"example_token_jwt\"}\"#;\n    let mock = server\n        .mock(\"POST\", \"/api/v1/auth/login\")\n        .match_query(Matcher::Any)\n        .with_header(\"content-type\", \"application/json\")\n        .with_body(body)\n        .create_async()\n        .await;\n    (server, mock)\n}\n\nasync fn mock_api_server_token() -> (ServerGuard, Mock) {\n    let mut server = mockito::Server::new_async().await;\n    let body = r#\"{\"status\":\"success\",\"data\":{\"created_at\": \"2024-08-04T14:21:31.622589Z\",\"email\": \"test@test.net\",\"id\": \"b6d56bf0-00a5-474f-b732-f416bef53e92\",\"organization\": \"test\",\"role\": \"owner\",\"updated_at\": \"2024-08-04T14:21:31.622589Z\",\"username\": \"test\",\"verified\": true}}\"#;\n    let mock = server\n        .mock(\"GET\", \"/api/v1/auth/validate-cli-token\")\n        .match_query(Matcher::Any)\n        .with_header(\"content-type\", \"application/json\")\n        .with_body(body)\n        .create_async()\n        .await;\n    (server, mock)\n}\n\n#[tokio::test]\nasync fn test_login_without_prompt_err_400() {\n    let cmd: Command = Login::builder().email(\"test@test.com\").password(\"111111\").build().into();\n    let res = run(cmd, Verbosity::default()).await;\n    assert_eq!(\n        res.unwrap_err().to_string(),\n        \"error during login: http error during login: HTTP status client error (400 Bad Request) for url (https://api.soldeer.xyz/api/v1/auth/login)\"\n    );\n}\n\n#[tokio::test]\nasync fn test_login_without_prompt_success() {\n    let (server, mock) = mock_api_server().await;\n    let dir = testdir!();\n    let login_file: PathBuf = dir.join(\"test_save_jwt\");\n\n    let cmd: Command = Login::builder().email(\"test@test.com\").password(\"111111\").build().into();\n    let res = async_with_vars(\n        [\n            (\"SOLDEER_API_URL\", Some(server.url())),\n            (\"SOLDEER_LOGIN_FILE\", Some(login_file.to_string_lossy().to_string())),\n        ],\n        run(cmd, Verbosity::default()),\n    )\n    .await;\n    assert!(res.is_ok());\n    assert!(login_file.exists());\n    assert_eq!(fs::read_to_string(login_file).unwrap(), \"example_token_jwt\");\n    mock.expect(1);\n}\n\n#[tokio::test]\nasync fn test_login_token_success() {\n    let (server, mock) = mock_api_server_token().await;\n    let dir = testdir!();\n    let login_file: PathBuf = dir.join(\"test_save_jwt\");\n    let cmd: Command = Login::builder().token(\"example_token_jwt\").build().into();\n    let res = async_with_vars(\n        [\n            (\"SOLDEER_API_URL\", Some(server.url())),\n            (\"SOLDEER_LOGIN_FILE\", Some(login_file.to_string_lossy().to_string())),\n        ],\n        run(cmd, Verbosity::default()),\n    )\n    .await;\n    assert!(res.is_ok());\n    assert!(login_file.exists());\n    assert_eq!(fs::read_to_string(login_file).unwrap(), \"example_token_jwt\");\n    mock.expect(1);\n}\n\n#[tokio::test]\nasync fn test_login_token_failure() {\n    let cmd: Command = Login::builder().token(\"asdf\").build().into();\n    let res = run(cmd, Verbosity::default()).await;\n    assert_eq!(res.unwrap_err().to_string(), \"error during login: login error: invalid token\");\n}\n"
  },
  {
    "path": "crates/commands/tests/tests-push.rs",
    "content": "use mockito::{Matcher, Mock, ServerGuard};\nuse reqwest::StatusCode;\nuse soldeer_commands::{Verbosity, commands::push::Push, run};\nuse soldeer_core::{SoldeerError, errors::PublishError};\nuse std::{env, fs, path::PathBuf};\nuse temp_env::async_with_vars;\nuse testdir::testdir;\n\n#[allow(clippy::unwrap_used)]\nfn setup_project(dotfile: bool) -> (PathBuf, PathBuf) {\n    let dir = testdir!();\n    let login_file: PathBuf = dir.join(\"test_save_jwt\");\n    fs::write(&login_file, \"jwt_token_example\").unwrap();\n    let project_path = dir.join(\"mypkg\");\n    fs::create_dir(&project_path).unwrap();\n    fs::write(project_path.join(\"foundry.toml\"), \"[dependencies]\\n\").unwrap();\n    if dotfile {\n        fs::write(project_path.join(\".env\"), \"super-secret-stuff\").unwrap();\n    }\n    (login_file, project_path)\n}\n\nasync fn mock_api_server(status_code: Option<StatusCode>) -> (ServerGuard, Mock) {\n    let mut server = mockito::Server::new_async().await;\n    let body = r#\"{\"data\":[{\"created_at\":\"2024-02-27T19:19:23.938837Z\",\"created_by\":\"96228bb5-f777-4c19-ba72-363d14b8beed\",\"deleted\":false,\"deprecated\":false,\"description\":\"\",\"downloads\":648041,\"github_url\":\"\",\"id\":\"37adefe5-9bc6-4777-aaf2-e56277d1f30b\",\"image\":\"\",\"latest_version\":\"1.10.0\",\"long_description\":\"\",\"name\":\"mock\",\"organization_id\":\"ff9c0d8e-9275-4f6f-a1b7-2e822450a7ba\",\"organization_name\":\"\",\"organization_verified\":true,\"updated_at\":\"2024-02-27T19:19:23.938837Z\"}],\"status\":\"success\"}\"#;\n    server\n        .mock(\"GET\", \"/api/v2/project\")\n        .match_query(Matcher::Any)\n        .with_header(\"content-type\", \"application/json\")\n        .with_body(body)\n        .create_async()\n        .await;\n    let mock = match status_code {\n        Some(status_code) => {\n            server\n                .mock(\"POST\", \"/api/v1/revision/upload\")\n                .with_header(\"content-type\", \"application/json\")\n                .with_status(status_code.as_u16() as usize)\n                .with_body(r#\"{\"status\":\"fail\",\"message\": \"failure\"}\"#)\n                .create_async()\n                .await\n        }\n        None => {\n            server\n                .mock(\"POST\", \"/api/v1/revision/upload\")\n                .with_header(\"content-type\", \"application/json\")\n                .with_body(r#\"{\"status\":\"success\",\"data\":{\"data\":{\"project_id\":\"mock\"}}}\"#)\n                .create_async()\n                .await\n        }\n    };\n\n    (server, mock)\n}\n\n#[tokio::test]\nasync fn test_push_success() {\n    let (login_file, project_path) = setup_project(false);\n\n    let (server, mock) = mock_api_server(None).await;\n\n    env::set_current_dir(&project_path).unwrap();\n    let res = async_with_vars(\n        [\n            (\"SOLDEER_PROJECT_ROOT\", Some(project_path.to_string_lossy().to_string())),\n            (\"SOLDEER_API_URL\", Some(server.url())),\n            (\"SOLDEER_LOGIN_FILE\", Some(login_file.to_string_lossy().to_string())),\n        ],\n        run(Push::builder().dependency(\"mypkg~0.1.0\").build().into(), Verbosity::default()),\n    )\n    .await;\n    assert!(res.is_ok(), \"{res:?}\");\n    mock.expect(1);\n}\n\n#[tokio::test]\nasync fn test_push_other_dir_success() {\n    let dir = testdir!();\n    fs::write(dir.join(\"foundry.toml\"), \"[dependencies]\\n\").unwrap();\n    let login_file = dir.join(\"test_save_jwt\");\n    fs::write(&login_file, \"jwt_token_example\").unwrap();\n    let project_path = dir.join(\"mypkg\");\n    fs::create_dir(&project_path).unwrap();\n    fs::write(project_path.join(\"test.sol\"), \"contract Foo {}\\n\").unwrap();\n\n    let (server, mock) = mock_api_server(None).await;\n\n    let res = async_with_vars(\n        [\n            (\"SOLDEER_PROJECT_ROOT\", Some(dir.to_string_lossy().to_string())),\n            (\"SOLDEER_API_URL\", Some(server.url())),\n            (\"SOLDEER_LOGIN_FILE\", Some(login_file.to_string_lossy().to_string())),\n        ],\n        run(\n            Push::builder().dependency(\"mypkg~0.1.0\").path(project_path).build().into(),\n            Verbosity::default(),\n        ),\n    )\n    .await;\n    assert!(res.is_ok(), \"{res:?}\");\n    mock.expect(1);\n}\n\n#[tokio::test]\nasync fn test_push_not_found() {\n    let (login_file, project_path) = setup_project(false);\n\n    let (server, mock) = mock_api_server(Some(StatusCode::NO_CONTENT)).await;\n\n    let res = async_with_vars(\n        [\n            (\"SOLDEER_PROJECT_ROOT\", Some(project_path.to_string_lossy().to_string())),\n            (\"SOLDEER_API_URL\", Some(server.url())),\n            (\"SOLDEER_LOGIN_FILE\", Some(login_file.to_string_lossy().to_string())),\n        ],\n        run(\n            Push::builder().dependency(\"mypkg~0.1.0\").path(project_path).build().into(),\n            Verbosity::default(),\n        ),\n    )\n    .await;\n    assert!(matches!(res, Err(SoldeerError::PublishError(PublishError::ProjectNotFound))));\n    mock.expect(1);\n}\n\n#[tokio::test]\nasync fn test_push_already_exists() {\n    let (login_file, project_path) = setup_project(false);\n\n    let (server, mock) = mock_api_server(Some(StatusCode::ALREADY_REPORTED)).await;\n\n    let res = async_with_vars(\n        [\n            (\"SOLDEER_PROJECT_ROOT\", Some(project_path.to_string_lossy().to_string())),\n            (\"SOLDEER_API_URL\", Some(server.url())),\n            (\"SOLDEER_LOGIN_FILE\", Some(login_file.to_string_lossy().to_string())),\n        ],\n        run(\n            Push::builder().dependency(\"mypkg~0.1.0\").path(project_path).build().into(),\n            Verbosity::default(),\n        ),\n    )\n    .await;\n    assert!(matches!(res, Err(SoldeerError::PublishError(PublishError::AlreadyExists))));\n    mock.expect(1);\n}\n\n#[tokio::test]\nasync fn test_push_unauthorized() {\n    let (login_file, project_path) = setup_project(false);\n\n    let (server, mock) = mock_api_server(Some(StatusCode::UNAUTHORIZED)).await;\n\n    let res = async_with_vars(\n        [\n            (\"SOLDEER_PROJECT_ROOT\", Some(project_path.to_string_lossy().to_string())),\n            (\"SOLDEER_API_URL\", Some(server.url())),\n            (\"SOLDEER_LOGIN_FILE\", Some(login_file.to_string_lossy().to_string())),\n        ],\n        run(\n            Push::builder().dependency(\"mypkg~0.1.0\").path(project_path).build().into(),\n            Verbosity::default(),\n        ),\n    )\n    .await;\n    assert!(matches!(res, Err(SoldeerError::PublishError(PublishError::AuthError(_)))));\n    mock.expect(1);\n}\n\n#[tokio::test]\nasync fn test_push_payload_too_large() {\n    let (login_file, project_path) = setup_project(false);\n\n    let (server, mock) = mock_api_server(Some(StatusCode::PAYLOAD_TOO_LARGE)).await;\n\n    let res = async_with_vars(\n        [\n            (\"SOLDEER_PROJECT_ROOT\", Some(project_path.to_string_lossy().to_string())),\n            (\"SOLDEER_API_URL\", Some(server.url())),\n            (\"SOLDEER_LOGIN_FILE\", Some(login_file.to_string_lossy().to_string())),\n        ],\n        run(\n            Push::builder().dependency(\"mypkg~0.1.0\").path(project_path).build().into(),\n            Verbosity::default(),\n        ),\n    )\n    .await;\n    assert!(matches!(res, Err(SoldeerError::PublishError(PublishError::PayloadTooLarge))));\n    mock.expect(1);\n}\n\n#[tokio::test]\nasync fn test_push_other_error() {\n    let (login_file, project_path) = setup_project(false);\n\n    let (server, mock) = mock_api_server(Some(StatusCode::INTERNAL_SERVER_ERROR)).await;\n\n    let res = async_with_vars(\n        [\n            (\"SOLDEER_PROJECT_ROOT\", Some(project_path.to_string_lossy().to_string())),\n            (\"SOLDEER_API_URL\", Some(server.url())),\n            (\"SOLDEER_LOGIN_FILE\", Some(login_file.to_string_lossy().to_string())),\n        ],\n        run(\n            Push::builder().dependency(\"mypkg~0.1.0\").path(project_path).build().into(),\n            Verbosity::default(),\n        ),\n    )\n    .await;\n    assert!(matches!(res, Err(SoldeerError::PublishError(PublishError::HttpError(_)))));\n    mock.expect(1);\n}\n\n#[tokio::test]\nasync fn test_push_dry_run() {\n    let (login_file, project_path) = setup_project(true); // insert a .env file\n\n    let (server, mock) = mock_api_server(None).await;\n\n    let res = async_with_vars(\n        [\n            (\"SOLDEER_PROJECT_ROOT\", Some(project_path.to_string_lossy().to_string())),\n            (\"SOLDEER_API_URL\", Some(server.url())),\n            (\"SOLDEER_LOGIN_FILE\", Some(login_file.to_string_lossy().to_string())),\n        ],\n        run(\n            Push::builder()\n                .dependency(\"mypkg~0.1.0\")\n                .path(&project_path)\n                .dry_run(true)\n                .build()\n                .into(),\n            Verbosity::default(),\n        ),\n    )\n    .await;\n    assert!(res.is_ok(), \"{res:?}\");\n    mock.expect(0);\n    assert!(project_path.join(\"mypkg.zip\").exists());\n}\n\n#[tokio::test]\nasync fn test_push_skip_warnings() {\n    let (login_file, project_path) = setup_project(true); // insert a .env file\n\n    let (server, mock) = mock_api_server(None).await;\n\n    let res = async_with_vars(\n        [\n            (\"SOLDEER_PROJECT_ROOT\", Some(project_path.to_string_lossy().to_string())),\n            (\"SOLDEER_API_URL\", Some(server.url())),\n            (\"SOLDEER_LOGIN_FILE\", Some(login_file.to_string_lossy().to_string())),\n        ],\n        run(\n            Push::builder()\n                .dependency(\"mypkg~0.1.0\")\n                .path(&project_path)\n                .skip_warnings(true)\n                .build()\n                .into(),\n            Verbosity::default(),\n        ),\n    )\n    .await;\n    assert!(res.is_ok(), \"{res:?}\");\n    mock.expect(1);\n}\n"
  },
  {
    "path": "crates/commands/tests/tests-uninstall.rs",
    "content": "use soldeer_commands::{\n    Command, Verbosity,\n    commands::{install::Install, uninstall::Uninstall},\n    run,\n};\nuse soldeer_core::{\n    config::read_config_deps,\n    lock::{SOLDEER_LOCK, read_lockfile},\n};\nuse std::{fs, path::PathBuf};\nuse temp_env::async_with_vars;\nuse testdir::testdir;\n\n#[allow(clippy::unwrap_used)]\nasync fn setup(config_filename: &str) -> PathBuf {\n    let dir = testdir!();\n    let mut contents = r#\"[dependencies]\n\"@openzeppelin-contracts\" = \"5.0.2\"\nsolady = \"0.0.238\"\n\"#\n    .to_string();\n    if config_filename == \"foundry.toml\" {\n        contents = format!(\n            r#\"[profile.default]\n\n[soldeer]\nremappings_location = \"config\"\n\n{contents}\"#\n        );\n    }\n    fs::write(dir.join(config_filename), contents).unwrap();\n    let cmd: Command = Install::default().into();\n    let res = async_with_vars(\n        [(\"SOLDEER_PROJECT_ROOT\", Some(dir.to_string_lossy().as_ref()))],\n        run(cmd, Verbosity::default()),\n    )\n    .await;\n    assert!(res.is_ok(), \"{res:?}\");\n    dir\n}\n\n#[tokio::test]\nasync fn test_uninstall_one() {\n    let dir = setup(\"soldeer.toml\").await;\n    let cmd: Command = Uninstall::builder().dependency(\"solady\").build().into();\n    let res = async_with_vars(\n        [(\"SOLDEER_PROJECT_ROOT\", Some(dir.to_string_lossy().as_ref()))],\n        run(cmd, Verbosity::default()),\n    )\n    .await;\n    assert!(res.is_ok(), \"{res:?}\");\n    let (deps, _) = read_config_deps(dir.join(\"soldeer.toml\")).unwrap();\n    assert!(!deps.iter().any(|d| d.name() == \"solady\"));\n    let remappings = fs::read_to_string(dir.join(\"remappings.txt\")).unwrap();\n    assert!(!remappings.contains(\"solady\"));\n    let lock = read_lockfile(dir.join(SOLDEER_LOCK)).unwrap();\n    assert!(!lock.entries.iter().any(|d| d.name() == \"solady\"));\n    assert!(!dir.join(\"dependencies\").join(\"solady-0.0.238\").exists());\n}\n\n#[tokio::test]\nasync fn test_uninstall_all() {\n    let dir = setup(\"soldeer.toml\").await;\n    let cmd: Command = Uninstall::builder().dependency(\"solady\").build().into();\n    let res = async_with_vars(\n        [(\"SOLDEER_PROJECT_ROOT\", Some(dir.to_string_lossy().as_ref()))],\n        run(cmd, Verbosity::default()),\n    )\n    .await;\n    assert!(res.is_ok(), \"{res:?}\");\n    let cmd: Command = Uninstall::builder().dependency(\"@openzeppelin-contracts\").build().into();\n    let res = async_with_vars(\n        [(\"SOLDEER_PROJECT_ROOT\", Some(dir.to_string_lossy().as_ref()))],\n        run(cmd, Verbosity::default()),\n    )\n    .await;\n    assert!(res.is_ok(), \"{res:?}\");\n\n    let (deps, _) = read_config_deps(dir.join(\"soldeer.toml\")).unwrap();\n    assert!(deps.is_empty());\n    let remappings = fs::read_to_string(dir.join(\"remappings.txt\")).unwrap();\n    assert_eq!(remappings, \"\");\n    assert!(!dir.join(SOLDEER_LOCK).exists());\n}\n\n#[tokio::test]\nasync fn test_uninstall_foundry_config() {\n    let dir = setup(\"foundry.toml\").await;\n    let cmd: Command = Uninstall::builder().dependency(\"solady\").build().into();\n    let res = async_with_vars(\n        [(\"SOLDEER_PROJECT_ROOT\", Some(dir.to_string_lossy().as_ref()))],\n        run(cmd, Verbosity::default()),\n    )\n    .await;\n    assert!(res.is_ok(), \"{res:?}\");\n    let (deps, _) = read_config_deps(dir.join(\"foundry.toml\")).unwrap();\n    assert!(!deps.iter().any(|d| d.name() == \"solady\"));\n    let config = fs::read_to_string(dir.join(\"foundry.toml\")).unwrap();\n    assert!(!config.contains(\"solady\"));\n}\n"
  },
  {
    "path": "crates/commands/tests/tests-update.rs",
    "content": "use soldeer_commands::{\n    Command, Verbosity,\n    commands::{install::Install, update::Update},\n    run,\n};\nuse soldeer_core::{\n    config::ConfigLocation,\n    lock::{SOLDEER_LOCK, read_lockfile},\n};\nuse std::{fs, path::PathBuf};\nuse temp_env::async_with_vars;\nuse testdir::testdir;\n\n#[allow(clippy::unwrap_used)]\nasync fn setup(config_filename: &str) -> PathBuf {\n    // install v1.9.0 of forge-std (faking an old install)\n    let dir = testdir!();\n    let mut contents = r#\"[dependencies]\nforge-std = \"1.9.0\"\n\"#\n    .to_string();\n    if config_filename == \"foundry.toml\" {\n        contents = format!(\n            r#\"[profile.default]\n\n[soldeer]\nremappings_location = \"config\"\n\n{contents}\"#\n        );\n    }\n    fs::write(dir.join(config_filename), &contents).unwrap();\n    let cmd: Command = Install::default().into();\n    let res = async_with_vars(\n        [(\"SOLDEER_PROJECT_ROOT\", Some(dir.to_string_lossy().as_ref()))],\n        run(cmd, Verbosity::default()),\n    )\n    .await;\n    assert!(res.is_ok(), \"{res:?}\");\n    // change install requirement to forge-std ^1.0.0 (making the current install outdated)\n    contents = contents.replace(\"1.9.0\", \"1\");\n    fs::write(dir.join(config_filename), &contents).unwrap();\n    // update remappings accordingly\n    fs::write(dir.join(\"remappings.txt\"), \"forge-std-1/=dependencies/forge-std-1.9.0/\\n\").unwrap();\n    dir\n}\n\n#[tokio::test]\nasync fn test_update_existing() {\n    let dir = setup(\"soldeer.toml\").await;\n    let cmd: Command = Update::default().into();\n    let res = async_with_vars(\n        [(\"SOLDEER_PROJECT_ROOT\", Some(dir.to_string_lossy().as_ref()))],\n        run(cmd, Verbosity::default()),\n    )\n    .await;\n    assert!(res.is_ok(), \"{res:?}\");\n    let lockfile = read_lockfile(dir.join(SOLDEER_LOCK)).unwrap();\n    let version = lockfile.entries.first().unwrap().version();\n    assert_ne!(version, \"1.9.0\");\n    let remappings = fs::read_to_string(dir.join(\"remappings.txt\")).unwrap();\n    assert_eq!(remappings, format!(\"forge-std-1/=dependencies/forge-std-{version}/\\n\"));\n    assert!(dir.join(\"dependencies\").join(format!(\"forge-std-{version}\")).exists());\n}\n\n#[tokio::test]\nasync fn test_update_foundry_config() {\n    let dir = setup(\"foundry.toml\").await;\n    let cmd: Command = Update::default().into();\n    let res = async_with_vars(\n        [(\"SOLDEER_PROJECT_ROOT\", Some(dir.to_string_lossy().as_ref()))],\n        run(cmd, Verbosity::default()),\n    )\n    .await;\n    assert!(res.is_ok(), \"{res:?}\");\n    let lockfile = read_lockfile(dir.join(SOLDEER_LOCK)).unwrap();\n    let version = lockfile.entries.first().unwrap().version();\n    assert_ne!(version, \"1.9.0\");\n    assert!(dir.join(\"dependencies\").join(format!(\"forge-std-{version}\")).exists());\n}\n\n#[tokio::test]\nasync fn test_update_missing() {\n    let dir = testdir!();\n    let contents = r#\"[dependencies]\nforge-std = \"1\"\n\"#;\n    fs::write(dir.join(\"soldeer.toml\"), contents).unwrap();\n    let cmd: Command = Update::default().into();\n    let res = async_with_vars(\n        [(\"SOLDEER_PROJECT_ROOT\", Some(dir.to_string_lossy().as_ref()))],\n        run(cmd, Verbosity::default()),\n    )\n    .await;\n    assert!(res.is_ok(), \"{res:?}\");\n    let lockfile = read_lockfile(dir.join(SOLDEER_LOCK)).unwrap();\n    let version = lockfile.entries.first().unwrap().version();\n    assert!(dir.join(\"dependencies\").join(format!(\"forge-std-{version}\")).exists());\n}\n\n#[tokio::test]\nasync fn test_update_custom_remappings() {\n    let dir = setup(\"soldeer.toml\").await;\n    // customize remappings before update\n    fs::write(dir.join(\"remappings.txt\"), \"forge-std/=dependencies/forge-std-1.9.0/src/\\n\")\n        .unwrap();\n    let cmd: Command = Update::default().into();\n    let res = async_with_vars(\n        [(\"SOLDEER_PROJECT_ROOT\", Some(dir.to_string_lossy().as_ref()))],\n        run(cmd, Verbosity::default()),\n    )\n    .await;\n    assert!(res.is_ok(), \"{res:?}\");\n    let lockfile = read_lockfile(dir.join(SOLDEER_LOCK)).unwrap();\n    let version = lockfile.entries.first().unwrap().version();\n    let remappings = fs::read_to_string(dir.join(\"remappings.txt\")).unwrap();\n    assert_eq!(remappings, format!(\"forge-std/=dependencies/forge-std-{version}/src/\\n\"));\n}\n\n#[tokio::test]\nasync fn test_update_git_main() {\n    let dir = testdir!();\n    // install older commit in \"main\" branch\n    let contents = r#\"[dependencies]\nmy-lib = { version = \"branch-main\", git = \"https://github.com/beeb/test-repo.git\" }\n\"#;\n    fs::write(dir.join(\"soldeer.toml\"), contents).unwrap();\n    let lockfile = r#\"[[dependencies]]\nname = \"my-lib\"\nversion = \"branch-main\"\ngit = \"https://github.com/beeb/test-repo.git\"\nrev = \"78c2f6a1a54db26bab6c3f501854a1564eb3707f\"\n\"#;\n    fs::write(dir.join(SOLDEER_LOCK), lockfile).unwrap();\n    let cmd: Command = Install::default().into();\n    let res = async_with_vars(\n        [(\"SOLDEER_PROJECT_ROOT\", Some(dir.to_string_lossy().as_ref()))],\n        run(cmd, Verbosity::default()),\n    )\n    .await;\n    assert!(res.is_ok(), \"{res:?}\");\n\n    // update to latest commit in \"main\" branch\n    let cmd: Command = Update::default().into();\n    let res = async_with_vars(\n        [(\"SOLDEER_PROJECT_ROOT\", Some(dir.to_string_lossy().as_ref()))],\n        run(cmd, Verbosity::default()),\n    )\n    .await;\n    assert!(res.is_ok(), \"{res:?}\");\n    let lockfile = read_lockfile(dir.join(SOLDEER_LOCK)).unwrap();\n    assert_eq!(\n        lockfile.entries.first().unwrap().as_git().unwrap().rev,\n        \"d5d72fa135d28b2e8307650b3ea79115183f2406\"\n    );\n}\n\n#[tokio::test]\nasync fn test_update_git_branch() {\n    let dir = testdir!();\n    // install older commit in \"dev\" branch\n    let contents = r#\"[dependencies]\nmy-lib = { version = \"branch-dev\", git = \"https://github.com/beeb/test-repo.git\", branch = \"dev\" }\n\"#;\n    fs::write(dir.join(\"soldeer.toml\"), contents).unwrap();\n    let lockfile = r#\"[[dependencies]]\nname = \"my-lib\"\nversion = \"branch-dev\"\ngit = \"https://github.com/beeb/test-repo.git\"\nrev = \"78c2f6a1a54db26bab6c3f501854a1564eb3707f\"\n\"#;\n    fs::write(dir.join(SOLDEER_LOCK), lockfile).unwrap();\n    let cmd: Command = Install::default().into();\n    let res = async_with_vars(\n        [(\"SOLDEER_PROJECT_ROOT\", Some(dir.to_string_lossy().as_ref()))],\n        run(cmd, Verbosity::default()),\n    )\n    .await;\n    assert!(res.is_ok(), \"{res:?}\");\n\n    // update to latest commit in \"dev\" branch\n    let cmd: Command = Update::default().into();\n    let res = async_with_vars(\n        [(\"SOLDEER_PROJECT_ROOT\", Some(dir.to_string_lossy().as_ref()))],\n        run(cmd, Verbosity::default()),\n    )\n    .await;\n    assert!(res.is_ok(), \"{res:?}\");\n    let lockfile = read_lockfile(dir.join(SOLDEER_LOCK)).unwrap();\n    assert_eq!(\n        lockfile.entries.first().unwrap().as_git().unwrap().rev,\n        \"8d903e557e8f1b6e62bde768aa456d4ddfca72c4\"\n    );\n}\n\n#[tokio::test]\nasync fn test_update_foundry_config_multi_dep() {\n    let dir = testdir!();\n\n    let contents = r#\"[profile.default]\n\n[dependencies]\n\"@tt\" = {version = \"1.6.1\", url = \"https://soldeer-revisions.s3.amazonaws.com/@openzeppelin-contracts/3_3_0-rc_2_22-01-2024_13:12:57_contracts.zip\"}\nforge-std = { version = \"1.8.1\" }\nsolmate = \"6.7.0\"\nmario = { version = \"1.0\", git = \"https://gitlab.com/mario4582928/Mario.git\", rev = \"22868f426bd4dd0e682b5ec5f9bd55507664240c\" }\nmario-custom-tag = { version = \"1.0\", git = \"https://gitlab.com/mario4582928/Mario.git\", tag = \"custom-tag\" }\nmario-custom-branch = { version = \"1.0\", git = \"https://gitlab.com/mario4582928/Mario.git\", tag = \"custom-branch\" }\n\n[soldeer]\nremappings_location = \"config\"\n\"#;\n\n    fs::write(dir.join(\"foundry.toml\"), contents).unwrap();\n\n    let cmd: Command = Update::default().into();\n    let res = async_with_vars(\n        [(\"SOLDEER_PROJECT_ROOT\", Some(dir.to_string_lossy().as_ref()))],\n        run(cmd, Verbosity::default()),\n    )\n    .await;\n    assert!(res.is_ok(), \"{res:?}\");\n    let deps = dir.join(\"dependencies\");\n    assert!(deps.join(\"@tt-1.6.1\").exists());\n    assert!(deps.join(\"forge-std-1.8.1\").exists());\n    assert!(deps.join(\"solmate-6.7.0\").exists());\n    assert!(deps.join(\"mario-1.0\").exists());\n    assert!(deps.join(\"mario-custom-tag-1.0\").exists());\n    assert!(deps.join(\"mario-custom-branch-1.0\").exists());\n}\n\n#[tokio::test]\nasync fn test_install_new_foundry_no_foundry_toml() {\n    let dir = testdir!();\n\n    let cmd: Command = Update::builder().config_location(ConfigLocation::Foundry).build().into();\n    let res = async_with_vars(\n        [(\"SOLDEER_PROJECT_ROOT\", Some(dir.to_string_lossy().as_ref()))],\n        run(cmd, Verbosity::default()),\n    )\n    .await;\n    assert!(res.is_ok(), \"{res:?}\");\n    let config = fs::read_to_string(dir.join(\"foundry.toml\")).unwrap();\n    let expected = r#\"[profile.default]\nsrc = \"src\"\nout = \"out\"\nlibs = [\"dependencies\"]\n\n[dependencies]\n\n# See more config options https://github.com/foundry-rs/foundry/blob/master/crates/config/README.md#all-options\n\"#;\n    assert_eq!(config, expected);\n}\n\n#[tokio::test]\nasync fn test_install_new_soldeer_no_soldeer_toml() {\n    let dir = testdir!();\n\n    let cmd: Command = Update::builder().config_location(ConfigLocation::Soldeer).build().into();\n    let res = async_with_vars(\n        [(\"SOLDEER_PROJECT_ROOT\", Some(dir.to_string_lossy().as_ref()))],\n        run(cmd, Verbosity::default()),\n    )\n    .await;\n    assert!(res.is_ok(), \"{res:?}\");\n    let config = fs::read_to_string(dir.join(\"soldeer.toml\")).unwrap();\n    let content = \"[dependencies]\\n\";\n    assert_eq!(config, content);\n}\n"
  },
  {
    "path": "crates/core/Cargo.toml",
    "content": "[package]\nname = \"soldeer-core\"\ndescription = \"Core functionality for Soldeer\"\nauthors.workspace = true\ncategories.workspace = true\nedition.workspace = true\nexclude.workspace = true\nhomepage.workspace = true\nkeywords.workspace = true\nlicense.workspace = true\nreadme.workspace = true\nrepository.workspace = true\nrust-version.workspace = true\nversion.workspace = true\n\n\n[lints]\nworkspace = true\n\n[dependencies]\nbon.workspace = true\nchrono = { version = \"0.4.38\", default-features = false, features = [\n    \"serde\",\n    \"std\",\n] }\nconst-hex = \"1.12.0\"\nderive_more.workspace = true\ndunce = \"1.0.5\"\nhome = \"0.5.9\"\nignore = { version = \"0.4.24\", features = [\"simd-accel\"] }\nlog = { workspace = true, features = [\"kv_std\"] }\npath-slash.workspace = true\nrayon.workspace = true\nregex = \"1.10.5\"\nreqwest = { workspace = true, features = [\"json\", \"multipart\", \"stream\"] }\nsanitize-filename = \"0.6.0\"\nsemver = \"1.0.23\"\nserde = { version = \"1.0.204\", features = [\"derive\"] }\nserde_json = \"1.0.120\"\nsha2 = \"0.10.8\"\nthiserror.workspace = true\ntokio.workspace = true\ntoml_edit = { version = \"0.25.11\", features = [\"serde\"] }\nuuid = { version = \"1.10.0\", features = [\"serde\", \"v4\"] }\nzip = { version = \"4.0.0\", default-features = false, features = [\"deflate\"] }\nzip-extract = { version = \"0.4.0\", default-features = false, features = [\n    \"deflate\",\n] }\n\n[dev-dependencies]\nmockito.workspace = true\ntemp-env.workspace = true\ntestdir.workspace = true\n\n[features]\nserde = []\n"
  },
  {
    "path": "crates/core/src/auth.rs",
    "content": "//! Registry authentication\nuse crate::{errors::AuthError, registry::api_url, utils::login_file_path};\nuse log::{debug, info, warn};\nuse reqwest::{\n    Client, StatusCode,\n    header::{AUTHORIZATION, HeaderMap, HeaderValue},\n};\nuse serde::{Deserialize, Serialize};\nuse std::{env, fs, path::PathBuf};\n\npub type Result<T> = std::result::Result<T, AuthError>;\n\n/// Credentials to be used for login\n#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, Hash)]\npub struct Credentials {\n    pub email: String,\n    pub password: String,\n}\n\n/// Response from the login endpoint\n#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, Hash)]\npub struct LoginResponse {\n    pub status: String,\n    /// JWT token\n    pub token: String,\n}\n\n/// Get the JWT token from the environment or from the login file\n///\n/// Precedence is given to the `SOLDEER_API_TOKEN` environment variable.\npub fn get_token() -> Result<String> {\n    if let Ok(token) = env::var(\"SOLDEER_API_TOKEN\") &&\n        !token.is_empty()\n    {\n        return Ok(token)\n    }\n    let token_path = login_file_path()?;\n    let jwt =\n        fs::read_to_string(&token_path).map_err(|_| AuthError::MissingToken)?.trim().to_string();\n    if jwt.is_empty() {\n        debug!(token_path:?; \"token file exists but is empty\");\n        return Err(AuthError::MissingToken);\n    }\n    debug!(token_path:?; \"token retrieved from file\");\n    Ok(jwt)\n}\n\n/// Get a header map with the bearer token set up if it exists\npub fn get_auth_headers() -> Result<HeaderMap> {\n    let mut headers: HeaderMap = HeaderMap::new();\n    let Ok(token) = get_token() else {\n        return Ok(headers);\n    };\n    let header_value =\n        HeaderValue::from_str(&format!(\"Bearer {token}\")).map_err(|_| AuthError::InvalidToken)?;\n    headers.insert(AUTHORIZATION, header_value);\n    Ok(headers)\n}\n\n/// Save an access token in the login file\npub fn save_token(token: &str) -> Result<PathBuf> {\n    let token_path = login_file_path()?;\n    fs::write(&token_path, token)?;\n    Ok(token_path)\n}\n\n/// Retrieve user profile for the token to check its validity, returning the username\npub async fn check_token(token: &str) -> Result<String> {\n    let client = Client::new();\n    let url = api_url(\"v1\", \"auth/validate-cli-token\", &[]);\n    let mut headers: HeaderMap = HeaderMap::new();\n    let header_value =\n        HeaderValue::from_str(&format!(\"Bearer {token}\")).map_err(|_| AuthError::InvalidToken)?;\n    headers.insert(AUTHORIZATION, header_value);\n    let response = client.get(url).headers(headers).send().await?;\n    match response.status() {\n        s if s.is_success() => {\n            #[derive(Deserialize)]\n            struct User {\n                id: String,\n                username: String,\n            }\n            #[derive(Deserialize)]\n            struct UserResponse {\n                data: User,\n            }\n            let res: UserResponse = response.json().await?;\n            debug!(\"token is valid for user {} with ID {}\", res.data.username, res.data.id);\n            Ok(res.data.username)\n        }\n        StatusCode::UNAUTHORIZED => Err(AuthError::InvalidToken),\n        _ => Err(AuthError::HttpError(\n            response.error_for_status().expect_err(\"result should be an error\"),\n        )),\n    }\n}\n\n/// Execute the login request and store the JWT token in the login file\npub async fn execute_login(login: &Credentials) -> Result<PathBuf> {\n    warn!(\n        \"the option to login via email and password will be removed in a future version of Soldeer. Please update your usage by either using `soldeer login --token [YOUR CLI TOKEN]` or passing the `SOLDEER_API_TOKEN` environment variable to the `push` command.\"\n    );\n\n    let token_path = login_file_path()?;\n    let url = api_url(\"v1\", \"auth/login\", &[]);\n    let client = Client::new();\n    let res = client.post(url).json(login).send().await?;\n    match res.status() {\n        s if s.is_success() => {\n            debug!(\"login request completed\");\n            let response: LoginResponse = res.json().await?;\n            fs::write(&token_path, response.token)?;\n            info!(token_path:?; \"login successful\");\n            Ok(token_path)\n        }\n        StatusCode::UNAUTHORIZED => Err(AuthError::InvalidCredentials),\n        _ => Err(AuthError::HttpError(\n            res.error_for_status().expect_err(\"result should be an error\"),\n        )),\n    }\n}\n\n#[cfg(test)]\nmod tests {\n    use super::*;\n    use temp_env::{async_with_vars, with_var};\n    use testdir::testdir;\n\n    #[tokio::test]\n    async fn test_login_success() {\n        let mut server = mockito::Server::new_async().await;\n        server\n            .mock(\"POST\", \"/api/v1/auth/login\")\n            .with_status(201)\n            .with_header(\"content-type\", \"application/json\")\n            .with_body(r#\"{\"status\":\"200\",\"token\":\"jwt_token_example\"}\"#)\n            .create_async()\n            .await;\n\n        let test_file = testdir!().join(\"test_save_jwt\");\n        let res = async_with_vars(\n            [\n                (\"SOLDEER_API_URL\", Some(server.url())),\n                (\"SOLDEER_LOGIN_FILE\", Some(test_file.to_string_lossy().to_string())),\n            ],\n            execute_login(&Credentials {\n                email: \"test@test.com\".to_string(),\n                password: \"1234\".to_string(),\n            }),\n        )\n        .await;\n        assert!(res.is_ok(), \"{res:?}\");\n        assert_eq!(fs::canonicalize(res.unwrap()).unwrap(), fs::canonicalize(&test_file).unwrap());\n        assert_eq!(fs::read_to_string(test_file).unwrap(), \"jwt_token_example\");\n    }\n\n    #[tokio::test]\n    async fn test_login_401() {\n        let mut server = mockito::Server::new_async().await;\n        server\n            .mock(\"POST\", \"/api/v1/auth/login\")\n            .with_status(401)\n            .with_header(\"content-type\", \"application/json\")\n            .with_body(r#\"{\"status\":\"401\"}\"#)\n            .create_async()\n            .await;\n\n        let test_file = testdir!().join(\"test_save_jwt\");\n        let res = async_with_vars(\n            [\n                (\"SOLDEER_API_URL\", Some(server.url())),\n                (\"SOLDEER_LOGIN_FILE\", Some(test_file.to_string_lossy().to_string())),\n            ],\n            execute_login(&Credentials {\n                email: \"test@test.com\".to_string(),\n                password: \"1234\".to_string(),\n            }),\n        )\n        .await;\n        assert!(matches!(res, Err(AuthError::InvalidCredentials)), \"{res:?}\");\n    }\n\n    #[tokio::test]\n    async fn test_login_500() {\n        let mut server = mockito::Server::new_async().await;\n        server\n            .mock(\"POST\", \"/api/v1/auth/login\")\n            .with_status(500)\n            .with_header(\"content-type\", \"application/json\")\n            .with_body(r#\"{\"status\":\"500\"}\"#)\n            .create_async()\n            .await;\n\n        let test_file = testdir!().join(\"test_save_jwt\");\n        let res = async_with_vars(\n            [\n                (\"SOLDEER_API_URL\", Some(server.url())),\n                (\"SOLDEER_LOGIN_FILE\", Some(test_file.to_string_lossy().to_string())),\n            ],\n            execute_login(&Credentials {\n                email: \"test@test.com\".to_string(),\n                password: \"1234\".to_string(),\n            }),\n        )\n        .await;\n        assert!(matches!(res, Err(AuthError::HttpError(_))), \"{res:?}\");\n    }\n\n    #[tokio::test]\n    async fn test_check_token_success() {\n        let mut server = mockito::Server::new_async().await;\n        server\n            .mock(\"GET\", \"/api/v1/auth/validate-cli-token\")\n            .with_status(200)\n            .with_header(\"content-type\", \"application/json\")\n            .with_body(\n                r#\"{\"status\":\"success\",\"data\":{\"created_at\": \"2024-08-04T14:21:31.622589Z\",\"email\": \"test@test.net\",\"id\": \"b6d56bf0-00a5-474f-b732-f416bef53e92\",\"organization\": \"test\",\"role\": \"owner\",\"updated_at\": \"2024-08-04T14:21:31.622589Z\",\"username\": \"test\",\"verified\": true}}\"#,\n            )\n            .create_async()\n            .await;\n\n        let res =\n            async_with_vars([(\"SOLDEER_API_URL\", Some(server.url()))], check_token(\"eyJ0...\"))\n                .await;\n        assert!(res.is_ok(), \"{res:?}\");\n        assert_eq!(res.unwrap(), \"test\");\n    }\n\n    #[tokio::test]\n    async fn test_check_token_failure() {\n        let mut server = mockito::Server::new_async().await;\n        server\n            .mock(\"GET\", \"/api/v1/auth/validate-cli-token\")\n            .with_status(401)\n            .with_header(\"content-type\", \"application/json\")\n            .with_body(r#\"{\"status\":\"fail\",\"message\":\"Invalid token\"}\"#)\n            .create_async()\n            .await;\n\n        let res =\n            async_with_vars([(\"SOLDEER_API_URL\", Some(server.url()))], check_token(\"foobar\")).await;\n        assert!(res.is_err(), \"{res:?}\");\n    }\n\n    #[test]\n    fn test_get_token_env() {\n        let res = with_var(\"SOLDEER_API_TOKEN\", Some(\"test\"), get_token);\n        assert!(res.is_ok(), \"{res:?}\");\n        assert_eq!(res.unwrap(), \"test\");\n    }\n}\n"
  },
  {
    "path": "crates/core/src/config.rs",
    "content": "//! Manage the Soldeer configuration and dependencies list.\nuse crate::{\n    download::{find_install_path, find_install_path_sync},\n    errors::ConfigError,\n    lock::SOLDEER_LOCK,\n    remappings::RemappingsLocation,\n};\nuse derive_more::derive::{Display, From, FromStr};\nuse log::{debug, warn};\nuse serde::Deserialize;\nuse std::{\n    env, fmt, fs,\n    path::{Path, PathBuf},\n};\nuse toml_edit::{Array, DocumentMut, InlineTable, Item, Table, value};\n\npub type Result<T> = std::result::Result<T, ConfigError>;\n\n#[derive(Debug, Clone, PartialEq, Eq, Hash, Display)]\n#[cfg_attr(feature = \"serde\", derive(serde::Serialize, serde::Deserialize))]\npub enum UrlType {\n    Git(String),\n    Http(String),\n}\n\nimpl UrlType {\n    pub fn git(url: impl Into<String>) -> Self {\n        Self::Git(url.into())\n    }\n\n    pub fn http(url: impl Into<String>) -> Self {\n        Self::Http(url.into())\n    }\n}\n\n/// The paths used by Soldeer.\n///\n/// The paths are canonicalized on creation of the object.\n///\n/// To create this object, the [`Paths::new`] and [`Paths::from_root`] methods can be used.\n///\n/// # Examples\n///\n/// ```\n/// # use soldeer_core::config::Paths;\n/// # let dir = testdir::testdir!();\n/// # std::env::set_current_dir(&dir).unwrap();\n/// # std::fs::write(\"foundry.toml\", \"[dependencies]\\n\").unwrap();\n/// let paths = Paths::new().unwrap(); // foundry.toml exists in the current path\n/// assert_eq!(paths.root, std::env::current_dir().unwrap());\n/// assert_eq!(paths.config, std::env::current_dir().unwrap().join(\"foundry.toml\"));\n///\n/// let paths = Paths::from_root(&dir).unwrap(); // root is the given path\n/// assert_eq!(paths.root, dir);\n/// ```\n#[derive(Debug, Clone, PartialEq, Eq, Hash)]\n#[cfg_attr(feature = \"serde\", derive(serde::Serialize, Deserialize))]\n// making sure the struct is not constructible from the outside without using the new/from methods\n#[non_exhaustive]\npub struct Paths {\n    /// The root directory of the project.\n    ///\n    /// At the moment, the current directory or the path given by the `SOLDEER_PROJECT_ROOT`\n    /// environment variable.\n    pub root: PathBuf,\n\n    /// The path to the config file.\n    ///\n    /// `foundry.toml` if it contains a `[dependencies]` table, otherwise `soldeer.toml` if it\n    /// exists. Otherwise, the `foundry.toml` file is used by default. When the config file does\n    /// not exist, a new one is created with default contents.\n    pub config: PathBuf,\n\n    /// The path to the dependencies folder (does not need to exist).\n    ///\n    /// This is `/dependencies` inside the root directory.\n    pub dependencies: PathBuf,\n\n    /// The path to the lockfile (does not need to exist).\n    ///\n    /// This is `/soldeer.lock` inside the root directory.\n    pub lock: PathBuf,\n\n    /// The path to the remappings file (does not need to exist).\n    ///\n    /// This path gets ignored if the remappings should be generated in the `foundry.toml` file.\n    /// This is `/remappings.txt` inside the root directory.\n    pub remappings: PathBuf,\n}\n\nimpl Paths {\n    /// Instantiate all the paths needed for Soldeer.\n    ///\n    /// The root path defaults to the current directory but can be overridden with the\n    /// `SOLDEER_PROJECT_ROOT` environment variable.\n    ///\n    /// The paths are canonicalized.\n    pub fn new() -> Result<Self> {\n        Self::with_config(None)\n    }\n\n    /// Instantiate all the paths needed for Soldeer.\n    ///\n    /// The root path is automatically detected (by traversing the path) but can be overridden with\n    /// the `SOLDEER_PROJECT_ROOT` environment variable.\n    /// Alternatively, the [`Paths::with_root_and_config`] constructor can be used.\n    ///\n    /// If a config location is provided, it bypasses auto-detection and uses that. If `None`, then\n    /// the location is auto-detected or if impossible, the `foundry.toml` file is used. If the\n    /// config file does not exist yet, it gets created with default content.\n    ///\n    /// The paths are canonicalized.\n    pub fn with_config(config_location: Option<ConfigLocation>) -> Result<Self> {\n        let root = dunce::canonicalize(Self::get_root_path())?;\n        Self::with_root_and_config(root, config_location)\n    }\n\n    /// Instantiate all the paths needed for Soldeer.\n    ///\n    /// If a config location is provided, it bypasses auto-detection and uses that. If `None`, then\n    /// the location is auto-detected or if impossible, the `foundry.toml` file is used. If the\n    /// config file does not exist yet, it gets created with default content.\n    ///\n    /// The paths are canonicalized.\n    pub fn with_root_and_config(\n        root: impl AsRef<Path>,\n        config_location: Option<ConfigLocation>,\n    ) -> Result<Self> {\n        let root = root.as_ref();\n        let config = Self::get_config_path(root, config_location)?;\n        let dependencies = root.join(\"dependencies\");\n        let lock = root.join(SOLDEER_LOCK);\n        let remappings = root.join(\"remappings.txt\");\n\n        Ok(Self { root: root.to_path_buf(), config, dependencies, lock, remappings })\n    }\n\n    /// Generate the paths object from a known root directory.\n    ///\n    /// The `SOLDEER_PROJECT_ROOT` environment variable is ignored.\n    ///\n    /// The paths are canonicalized.\n    pub fn from_root(root: impl AsRef<Path>) -> Result<Self> {\n        let root = dunce::canonicalize(root.as_ref())?;\n        let config = Self::get_config_path(&root, None)?;\n        let dependencies = root.join(\"dependencies\");\n        let lock = root.join(SOLDEER_LOCK);\n        let remappings = root.join(\"remappings.txt\");\n\n        Ok(Self { root, config, dependencies, lock, remappings })\n    }\n\n    /// Get the root directory path.\n    ///\n    /// If `SOLDEER_PROJECT` root is present in the environment, this is the returned value. Else,\n    /// we search for the root of the project with `find_project_root`.\n    pub fn get_root_path() -> PathBuf {\n        let res = env::var(\"SOLDEER_PROJECT_ROOT\").map_or_else(\n            |_| {\n                debug!(\"SOLDEER_PROJECT_ROOT not defined, searching for project root\");\n                find_project_root(None::<PathBuf>).expect(\"could not find project root\")\n            },\n            |p| {\n                if p.is_empty() {\n                    debug!(\"SOLDEER_PROJECT_ROOT exists but is empty, searching for project root\");\n                    find_project_root(None::<PathBuf>).expect(\"could not find project root\")\n                } else {\n                    debug!(path = p; \"root set by SOLDEER_PROJECT_ROOT\");\n                    PathBuf::from(p)\n                }\n            },\n        );\n        debug!(path:? = res; \"found project root\");\n        res\n    }\n\n    /// Get the path to the config file.\n    ///\n    /// If a parameter is given for `config_location`, it will be used. Otherwise, the function will\n    /// try to auto-detect the location based on the existence of the `dependencies` entry in\n    /// the foundry config file, or the existence of a `soldeer.toml` file. If no config can be\n    /// found, `foundry.toml` is used by default.\n    fn get_config_path(\n        root: impl AsRef<Path>,\n        config_location: Option<ConfigLocation>,\n    ) -> Result<PathBuf> {\n        let foundry_path = root.as_ref().join(\"foundry.toml\");\n        let soldeer_path = root.as_ref().join(\"soldeer.toml\");\n        // use the user preference if available\n        let location = config_location.or_else(|| {\n            debug!(\"no preferred config location, trying to detect automatically\");\n            detect_config_location(root)\n        }).unwrap_or_else(|| {\n            warn!(\"config file location could not be determined automatically, using foundry by default\");\n            ConfigLocation::Foundry\n        });\n        debug!(\"using config location {location:?}\");\n        create_or_modify_config(location, &foundry_path, &soldeer_path)\n    }\n\n    /// Default Foundry config file path\n    pub fn foundry_default() -> PathBuf {\n        let root: PathBuf =\n            dunce::canonicalize(Self::get_root_path()).expect(\"could not get the root\");\n        root.join(\"foundry.toml\")\n    }\n\n    /// Default Soldeer config file path\n    pub fn soldeer_default() -> PathBuf {\n        let root: PathBuf =\n            dunce::canonicalize(Self::get_root_path()).expect(\"could not get the root\");\n        root.join(\"soldeer.toml\")\n    }\n}\n\n/// For clap\nfn default_true() -> bool {\n    true\n}\n\n/// The Soldeer config options.\n#[derive(Deserialize, Debug, Clone, PartialEq, Eq, Hash)]\n#[cfg_attr(feature = \"serde\", derive(serde::Serialize))]\npub struct SoldeerConfig {\n    /// Whether to generate remappings or completely leave them untouched.\n    ///\n    /// Defaults to `true`.\n    #[serde(default = \"default_true\")]\n    pub remappings_generate: bool,\n\n    /// Whether to regenerate the remappings every time and ignore existing content.\n    ///\n    /// Defaults to `false`.\n    #[serde(default)]\n    pub remappings_regenerate: bool,\n\n    /// Whether to include the version requirement string in the left part of the remappings.\n    ///\n    /// Defaults to `true`.\n    #[serde(default = \"default_true\")]\n    pub remappings_version: bool,\n\n    /// A prefix to add to each dependency name in the left part of the remappings.\n    ///\n    /// None by default.\n    #[serde(default)]\n    pub remappings_prefix: String,\n\n    /// The location where the remappings file should be generated.\n    ///\n    /// Either inside the `foundry.toml` config file or as a separate `remappings.txt` file.\n    /// This gets ignored if the config file is `soldeer.toml`, in which case the remappings\n    /// are always generated in a separate file.\n    ///\n    /// Defaults to [`RemappingsLocation::Txt`].\n    #[serde(default)]\n    pub remappings_location: RemappingsLocation,\n\n    /// Whether to include dependencies from dependencies.\n    ///\n    /// For dependencies which use soldeer, the `soldeer install` command will be invoked.\n    /// Git dependencies which have submodules will see their submodules cloned as well.\n    ///\n    /// Defaults to `false`.\n    #[serde(default)]\n    pub recursive_deps: bool,\n}\n\nimpl Default for SoldeerConfig {\n    fn default() -> Self {\n        Self {\n            remappings_generate: true,\n            remappings_regenerate: false,\n            remappings_version: true,\n            remappings_prefix: String::new(),\n            remappings_location: RemappingsLocation::default(),\n            recursive_deps: false,\n        }\n    }\n}\n\n/// A git identifier used to specify a revision, branch or tag.\n///\n/// # Examples\n///\n/// ```\n/// # use soldeer_core::config::GitIdentifier;\n/// let rev = GitIdentifier::from_rev(\"082692fcb6b5b1ab8f856914897f7f2b46b84fd2\");\n/// let branch = GitIdentifier::from_branch(\"feature/foo\");\n/// let tag = GitIdentifier::from_tag(\"v1.0.0\");\n/// ```\n#[derive(Debug, Clone, PartialEq, Eq, Hash, Display)]\n#[cfg_attr(feature = \"serde\", derive(serde::Serialize, Deserialize))]\npub enum GitIdentifier {\n    /// A commit hash\n    Rev(String),\n\n    /// A branch name\n    Branch(String),\n\n    /// A tag name\n    Tag(String),\n}\n\nimpl GitIdentifier {\n    /// Create a new git identifier from a revision hash.\n    pub fn from_rev(rev: impl Into<String>) -> Self {\n        let rev: String = rev.into();\n        Self::Rev(rev)\n    }\n\n    /// Create a new git identifier from a branch name.\n    pub fn from_branch(branch: impl Into<String>) -> Self {\n        let branch: String = branch.into();\n        Self::Branch(branch)\n    }\n\n    /// Create a new git identifier from a tag name.\n    pub fn from_tag(tag: impl Into<String>) -> Self {\n        let tag: String = tag.into();\n        Self::Tag(tag)\n    }\n}\n\n/// A git dependency config item.\n///\n/// This struct is used to represent a git dependency from the config file.\n#[derive(Debug, Clone, PartialEq, Eq, Hash, bon::Builder)]\n#[allow(clippy::duplicated_attributes)]\n#[builder(on(String, into), on(PathBuf, into))]\n#[cfg_attr(feature = \"serde\", derive(serde::Serialize, Deserialize))]\npub struct GitDependency {\n    /// The name of the dependency (user-defined).\n    pub name: String,\n\n    /// The version requirement string (semver).\n    ///\n    /// Example: `>=1.9.3 || ^2.0.0`\n    ///\n    /// When no operator is used before the version number, it defaults to `=` which pins the\n    /// version.\n    #[cfg_attr(feature = \"serde\", serde(rename = \"version\"))]\n    pub version_req: String,\n\n    /// The git URL, must end with `.git`.\n    pub git: String,\n\n    /// The git identifier (revision, branch or tag).\n    ///\n    /// If omitted, the default branch is used.\n    pub identifier: Option<GitIdentifier>,\n\n    /// An optional relative path to the project's root within the repository.\n    ///\n    /// The project root is where the soldeer.toml or foundry.toml resides. If no path is provided,\n    /// then the repo's root must contain a Soldeer config.\n    pub project_root: Option<PathBuf>,\n}\n\nimpl fmt::Display for GitDependency {\n    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> core::fmt::Result {\n        write!(f, \"{}~{}\", self.name, self.version_req)\n    }\n}\n\n/// An HTTP dependency config item.\n///\n/// This struct is used to represent an HTTP dependency from the config file.\n#[derive(Debug, Clone, PartialEq, Eq, Hash, bon::Builder)]\n#[allow(clippy::duplicated_attributes)]\n#[builder(on(String, into), on(PathBuf, into))]\n#[cfg_attr(feature = \"serde\", derive(serde::Serialize, Deserialize))]\npub struct HttpDependency {\n    /// The name of the dependency (user-defined).\n    pub name: String,\n\n    /// The version requirement string (semver).\n    ///\n    /// Example: `>=1.9.3 || ^2.0.0`\n    ///\n    /// When no operator is used before the version number, it defaults to `=` which pins the\n    /// version.\n    #[cfg_attr(feature = \"serde\", serde(rename = \"version\"))]\n    pub version_req: String,\n\n    /// The URL to the dependency.\n    ///\n    /// If omitted, the registry will be contacted to get the download URL for that dependency (by\n    /// name).\n    pub url: Option<String>,\n\n    /// An optional relative path to the project's root within the zip file.\n    ///\n    /// The project root is where the soldeer.toml or foundry.toml resides. If no path is provided,\n    /// then the zip's root must contain a Soldeer config.\n    pub project_root: Option<PathBuf>,\n}\n\nimpl fmt::Display for HttpDependency {\n    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> core::fmt::Result {\n        write!(f, \"{}~{}\", self.name, self.version_req)\n    }\n}\n\n/// A git or HTTP dependency config item.\n///\n/// A builder can be used to create the underlying [`HttpDependency`] or [`GitDependency`] and then\n/// converted into this type with `.into()`.\n///\n/// # Examples\n///\n/// ```\n/// # use soldeer_core::config::{Dependency, HttpDependency};\n/// let dep: Dependency = HttpDependency::builder()\n///     .name(\"my-dep\")\n///     .version_req(\"^1.0.0\")\n///     .url(\"https://...\")\n///     .build()\n///     .into();\n/// ```\n#[derive(Debug, Clone, PartialEq, Eq, Hash, Display, From)]\n#[cfg_attr(feature = \"serde\", derive(serde::Serialize, Deserialize))]\npub enum Dependency {\n    #[from(HttpDependency)]\n    Http(HttpDependency),\n\n    #[from(GitDependency)]\n    Git(GitDependency),\n}\n\nimpl Dependency {\n    /// Create a new dependency from a name and version requirement string.\n    ///\n    /// The string should be in the format `name~version_req`.\n    ///\n    /// The version requirement string can use the semver format.\n    ///\n    /// Example: `dependency~^1.0.0`\n    ///\n    /// If a custom URL is provided, then the version requirement string\n    /// cannot contain the `=` character, as it would break the remappings.\n    ///\n    /// # Examples\n    ///\n    /// ```\n    /// # use soldeer_core::config::{Dependency, HttpDependency, GitDependency, GitIdentifier, UrlType};\n    /// assert_eq!(\n    ///     Dependency::from_name_version(\"my-lib~^1.0.0\", Some(UrlType::http(\"https://foo.bar/zip.zip\")), None)\n    ///         .unwrap(),\n    ///     HttpDependency::builder()\n    ///         .name(\"my-lib\")\n    ///         .version_req(\"^1.0.0\")\n    ///         .url(\"https://foo.bar/zip.zip\")\n    ///         .build()\n    ///         .into()\n    /// );\n    /// assert_eq!(\n    ///     Dependency::from_name_version(\n    ///         \"my-lib~^1.0.0\",\n    ///         Some(UrlType::git(\"git@github.com:foo/bar.git\")),\n    ///         Some(GitIdentifier::from_tag(\"v1.0.0\")),\n    ///     )\n    ///     .unwrap(),\n    ///     GitDependency::builder()\n    ///         .name(\"my-lib\")\n    ///         .version_req(\"^1.0.0\")\n    ///         .git(\"git@github.com:foo/bar.git\")\n    ///         .identifier(GitIdentifier::from_tag(\"v1.0.0\"))\n    ///         .build()\n    ///         .into()\n    /// );\n    /// ```\n    pub fn from_name_version(\n        name_version: &str,\n        custom_url: Option<UrlType>,\n        identifier: Option<GitIdentifier>,\n    ) -> Result<Self> {\n        let (dependency_name, dependency_version_req) = name_version\n            .split_once('~')\n            .ok_or(ConfigError::InvalidNameAndVersion(name_version.to_string()))?;\n        if dependency_version_req.is_empty() {\n            return Err(ConfigError::EmptyVersion(dependency_name.to_string()));\n        }\n        Ok(match custom_url {\n            Some(url) => {\n                // in this case (custom url or git dependency), the version requirement string is\n                // going to be used as part of the folder name inside the\n                // dependencies folder. As such, it's not allowed to contain the \"=\"\n                // character, because that would break the remappings.\n                if dependency_version_req.contains('=') {\n                    return Err(ConfigError::InvalidVersionReq(dependency_name.to_string()));\n                }\n                debug!(url:% = url; \"using custom url\");\n                match url {\n                    UrlType::Git(url) => GitDependency {\n                        name: dependency_name.to_string(),\n                        version_req: dependency_version_req.to_string(),\n                        git: url,\n                        identifier,\n                        project_root: None,\n                    }\n                    .into(),\n                    UrlType::Http(url) => HttpDependency {\n                        name: dependency_name.to_string(),\n                        version_req: dependency_version_req.to_string(),\n                        url: Some(url),\n                        project_root: None,\n                    }\n                    .into(),\n                }\n            }\n            None => HttpDependency {\n                name: dependency_name.to_string(),\n                version_req: dependency_version_req.to_string(),\n                url: None,\n                project_root: None,\n            }\n            .into(),\n        })\n    }\n\n    /// Get the name of the dependency.\n    pub fn name(&self) -> &str {\n        match self {\n            Self::Http(dep) => &dep.name,\n            Self::Git(dep) => &dep.name,\n        }\n    }\n\n    /// Get the version requirement string of the dependency.\n    pub fn version_req(&self) -> &str {\n        match self {\n            Self::Http(dep) => &dep.version_req,\n            Self::Git(dep) => &dep.version_req,\n        }\n    }\n\n    /// Get the URL of the dependency.\n    pub fn url(&self) -> Option<&String> {\n        match self {\n            Self::Http(dep) => dep.url.as_ref(),\n            Self::Git(dep) => Some(&dep.git),\n        }\n    }\n\n    /// Get the install path of the dependency (must exist already).\n    pub fn install_path_sync(&self, deps: impl AsRef<Path>) -> Option<PathBuf> {\n        debug!(dep:% = self; \"trying to find installation path of dependency (sync)\");\n        find_install_path_sync(self, deps)\n    }\n\n    /// Get the install path of the dependency in an async way (must exist already).\n    pub async fn install_path(&self, deps: impl AsRef<Path>) -> Option<PathBuf> {\n        debug!(dep:% = self; \"trying to find installation path of dependency (async)\");\n        find_install_path(self, deps).await\n    }\n\n    /// Get the relative path to the project root (config file location).\n    pub fn project_root(&self) -> Option<PathBuf> {\n        match self {\n            Self::Http(dep) => dep.project_root.clone(),\n            Self::Git(dep) => dep.project_root.clone(),\n        }\n    }\n\n    /// Convert the dependency to a TOML value for saving to the config file.\n    pub fn to_toml_value(&self) -> (String, Item) {\n        match self {\n            Self::Http(dep) => (\n                dep.name.clone(),\n                match &dep.url {\n                    Some(url) => {\n                        let mut table = InlineTable::new();\n                        table.insert(\n                            \"version\",\n                            value(&dep.version_req)\n                                .into_value()\n                                .expect(\"version should be a valid toml value\"),\n                        );\n                        table.insert(\n                            \"url\",\n                            value(url).into_value().expect(\"url should be a valid toml value\"),\n                        );\n                        if let Some(path) = dep.project_root.as_ref() {\n                            table.insert(\n                                \"project_root\",\n                                value(path.to_string_lossy().into_owned())\n                                    .into_value()\n                                    .expect(\"project_root should be a valid toml value\"),\n                            );\n                        }\n                        value(table)\n                    }\n                    None => value(&dep.version_req),\n                },\n            ),\n            Self::Git(dep) => {\n                let mut table = InlineTable::new();\n                table.insert(\n                    \"version\",\n                    value(&dep.version_req)\n                        .into_value()\n                        .expect(\"version should be a valid toml value\"),\n                );\n                table.insert(\n                    \"git\",\n                    value(&dep.git).into_value().expect(\"git URL should be a valid toml value\"),\n                );\n                match &dep.identifier {\n                    Some(GitIdentifier::Rev(rev)) => {\n                        table.insert(\n                            \"rev\",\n                            value(rev).into_value().expect(\"rev should be a valid toml value\"),\n                        );\n                    }\n                    Some(GitIdentifier::Branch(branch)) => {\n                        table.insert(\n                            \"branch\",\n                            value(branch)\n                                .into_value()\n                                .expect(\"branch should be a valid toml value\"),\n                        );\n                    }\n                    Some(GitIdentifier::Tag(tag)) => {\n                        table.insert(\n                            \"tag\",\n                            value(tag).into_value().expect(\"tag should be a valid toml value\"),\n                        );\n                    }\n                    None => {}\n                }\n                if let Some(path) = dep.project_root.as_ref() {\n                    table.insert(\n                        \"project_root\",\n                        value(path.to_string_lossy().into_owned())\n                            .into_value()\n                            .expect(\"project_root should be a valid toml value\"),\n                    );\n                }\n                (dep.name.clone(), value(table))\n            }\n        }\n    }\n\n    /// Check if the dependency is an HTTP dependency.\n    pub fn is_http(&self) -> bool {\n        matches!(self, Self::Http(_))\n    }\n\n    /// Cast to a HTTP dependency if it is one.\n    pub fn as_http(&self) -> Option<&HttpDependency> {\n        if let Self::Http(v) = self { Some(v) } else { None }\n    }\n\n    /// Cast to a mutable HTTP dependency if it is one.\n    pub fn as_http_mut(&mut self) -> Option<&mut HttpDependency> {\n        if let Self::Http(v) = self { Some(v) } else { None }\n    }\n\n    /// Check if the dependency is a git dependency.\n    pub fn is_git(&self) -> bool {\n        matches!(self, Self::Git(_))\n    }\n\n    /// Cast to a git dependency if it is one.\n    pub fn as_git(&self) -> Option<&GitDependency> {\n        if let Self::Git(v) = self { Some(v) } else { None }\n    }\n\n    /// Cast to a mutable git dependency if it is one.\n    pub fn as_git_mut(&mut self) -> Option<&mut GitDependency> {\n        if let Self::Git(v) = self { Some(v) } else { None }\n    }\n}\n\nimpl From<&HttpDependency> for Dependency {\n    fn from(dep: &HttpDependency) -> Self {\n        Self::Http(dep.clone())\n    }\n}\n\nimpl From<&GitDependency> for Dependency {\n    fn from(dep: &GitDependency) -> Self {\n        Self::Git(dep.clone())\n    }\n}\n\n/// The location where the Soldeer config should be stored.\n#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, FromStr)]\n#[cfg_attr(feature = \"serde\", derive(serde::Serialize, Deserialize))]\npub enum ConfigLocation {\n    /// The `foundry.toml` file.\n    Foundry,\n\n    /// The `soldeer.toml` file.\n    Soldeer,\n}\n\nimpl From<ConfigLocation> for PathBuf {\n    fn from(value: ConfigLocation) -> Self {\n        match value {\n            ConfigLocation::Foundry => Paths::foundry_default(),\n            ConfigLocation::Soldeer => Paths::soldeer_default(),\n        }\n    }\n}\n\n/// A warning generated during parsing of a dependency from the config file.\n#[derive(Debug, Clone, PartialEq, Eq, Hash)]\n#[cfg_attr(feature = \"serde\", derive(serde::Serialize, Deserialize))]\npub struct ParsingWarning {\n    dependency_name: String,\n    message: String,\n}\n\nimpl fmt::Display for ParsingWarning {\n    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n        write!(f, \"{}: {}\", self.dependency_name, self.message)\n    }\n}\n\n/// The result of parsing a dependency from the config file.\n#[derive(Debug, Clone, PartialEq, Eq, Hash)]\n#[cfg_attr(feature = \"serde\", derive(serde::Serialize, Deserialize))]\npub struct ParsingResult {\n    pub dependency: Dependency,\n    pub warnings: Vec<ParsingWarning>,\n}\n\nimpl ParsingResult {\n    /// Whether the parsing result contains one or more warnings.\n    pub fn has_warnings(&self) -> bool {\n        !self.warnings.is_empty()\n    }\n}\n\nimpl From<HttpDependency> for ParsingResult {\n    fn from(value: HttpDependency) -> Self {\n        Self { dependency: value.into(), warnings: Vec::default() }\n    }\n}\n\nimpl From<GitDependency> for ParsingResult {\n    fn from(value: GitDependency) -> Self {\n        Self { dependency: value.into(), warnings: Vec::default() }\n    }\n}\n\nimpl From<Dependency> for ParsingResult {\n    fn from(value: Dependency) -> Self {\n        Self { dependency: value, warnings: Vec::default() }\n    }\n}\n\n/// Detect the location of the config file in case no user preference is available.\n///\n/// The function will try to auto-detect the location based on the existence of the\n/// `dependencies` entry in the foundry config file, or the existence of a `soldeer.toml` file.\n/// If no config can be found, `None` is returned.\npub fn detect_config_location(root: impl AsRef<Path>) -> Option<ConfigLocation> {\n    let foundry_path = root.as_ref().join(\"foundry.toml\");\n    let soldeer_path = root.as_ref().join(\"soldeer.toml\");\n    if let Ok(contents) = fs::read_to_string(&foundry_path) {\n        debug!(path:? = foundry_path; \"found foundry.toml file\");\n        if let Ok(doc) = contents.parse::<DocumentMut>() {\n            if doc.contains_table(\"dependencies\") {\n                debug!(\"found `dependencies` table in foundry.toml, so using that file for config\");\n                return Some(ConfigLocation::Foundry);\n            } else {\n                debug!(\"foundry.toml does not contain `dependencies`, trying to use soldeer.toml\");\n            }\n        } else {\n            warn!(path:? = foundry_path; \"foundry.toml could not be parsed a toml\");\n        }\n    } else if soldeer_path.exists() {\n        debug!(path:? = soldeer_path; \"soldeer.toml exists, using that file for config\");\n        return Some(ConfigLocation::Soldeer);\n    }\n    debug!(\"could not determine existing config file location\");\n    None\n}\n\n/// Read the list of dependencies from the config file.\n///\n/// Dependencies are stored in a TOML table under the `dependencies` key.\n/// Each key inside of the table is the name of the dependency and the value can be:\n/// - a string representing the version requirement\n/// - a table with the following fields:\n///   - `version` (required): the version requirement string\n///   - `url` (optional): the URL to the dependency's zip file\n///   - `git` (optional): the git URL for git dependencies\n///   - `rev` (optional): the revision hash for git dependencies\n///   - `branch` (optional): the branch name for git dependencies\n///   - `tag` (optional): the tag name for git dependencies\n///   - `project_root` (optional): relative path to the folder containing the config file\npub fn read_config_deps(path: impl AsRef<Path>) -> Result<(Vec<Dependency>, Vec<ParsingWarning>)> {\n    let contents = fs::read_to_string(&path)?;\n    let doc: DocumentMut = contents.parse::<DocumentMut>()?;\n    let Some(Some(data)) = doc.get(\"dependencies\").map(|v| v.as_table()) else {\n        warn!(\"no `dependencies` table in config file\");\n        return Ok(Default::default());\n    };\n\n    let mut dependencies: Vec<Dependency> = Vec::new();\n    let mut warnings: Vec<ParsingWarning> = Vec::new();\n    for (name, v) in data {\n        let mut res = parse_dependency(name, v)?;\n        dependencies.push(res.dependency);\n        warnings.append(&mut res.warnings);\n    }\n    debug!(path:? = path.as_ref(); \"found {} dependencies in config file\", dependencies.len());\n    Ok((dependencies, warnings))\n}\n\n/// Read the Soldeer config from the config file.\npub fn read_soldeer_config(path: impl AsRef<Path>) -> Result<SoldeerConfig> {\n    #[derive(Deserialize)]\n    struct SoldeerConfigParsed {\n        #[serde(default)]\n        soldeer: SoldeerConfig,\n    }\n\n    let contents = fs::read_to_string(&path)?;\n\n    let config: SoldeerConfigParsed = toml_edit::de::from_str(&contents)?;\n\n    debug!(path:? = path.as_ref(); \"parsed soldeer config from file\");\n    Ok(config.soldeer)\n}\n\n/// Add a dependency to the config file.\npub fn add_to_config(dependency: &Dependency, config_path: impl AsRef<Path>) -> Result<()> {\n    let contents = fs::read_to_string(&config_path)?;\n    let mut doc: DocumentMut = contents.parse::<DocumentMut>()?;\n\n    // in case we don't have the dependencies section defined in the config file, we add it\n    if !doc.contains_table(\"dependencies\") {\n        debug!(\"`dependencies` table added to config file because it was missing\");\n        doc.insert(\"dependencies\", Item::Table(Table::default()));\n    }\n\n    let (name, value) = dependency.to_toml_value();\n    doc[\"dependencies\"]\n        .as_table_mut()\n        .expect(\"dependencies should be a table\")\n        .insert(&name, value);\n\n    fs::write(&config_path, doc.to_string())?;\n    debug!(dep:% = dependency, path:? = config_path.as_ref(); \"added dependency to config file\");\n    Ok(())\n}\n\n/// Delete a dependency from the config file.\npub fn delete_from_config(dependency_name: &str, path: impl AsRef<Path>) -> Result<Dependency> {\n    let contents = fs::read_to_string(&path)?;\n    let mut doc: DocumentMut = contents.parse::<DocumentMut>().expect(\"invalid doc\");\n\n    let Some(dependencies) = doc[\"dependencies\"].as_table_mut() else {\n        debug!(\"no `dependencies` table in config file\");\n        return Err(ConfigError::MissingDependency(dependency_name.to_string()));\n    };\n    let Some(item_removed) = dependencies.remove(dependency_name) else {\n        debug!(\"dependency not present in config file\");\n        return Err(ConfigError::MissingDependency(dependency_name.to_string()));\n    };\n\n    let dependency = parse_dependency(dependency_name, &item_removed)?;\n\n    fs::write(&path, doc.to_string())?;\n    debug!(dep = dependency_name, path:? = path.as_ref(); \"removed dependency from config file\");\n    Ok(dependency.dependency)\n}\n\n/// Update the config file to add the `dependencies` folder as a source for libraries and the\n/// `[dependencies]` table if necessary.\npub fn update_config_libs(foundry_config: impl AsRef<Path>) -> Result<()> {\n    let contents = fs::read_to_string(&foundry_config)?;\n    let mut doc: DocumentMut = contents.parse::<DocumentMut>()?;\n\n    if !doc.contains_key(\"profile\") {\n        debug!(\"missing `profile` in config file, adding it\");\n        let mut profile = Table::default();\n        profile[\"default\"] = Item::Table(Table::default());\n        profile.set_implicit(true);\n        doc[\"profile\"] = Item::Table(profile);\n    }\n\n    let profile = doc[\"profile\"].as_table_mut().expect(\"profile should be a table\");\n    if !profile.contains_key(\"default\") {\n        debug!(\"missing `default` profile in config file, adding it\");\n        profile[\"default\"] = Item::Table(Table::default());\n    }\n\n    let default_profile =\n        profile[\"default\"].as_table_mut().expect(\"default profile should be a table\");\n    if !default_profile.contains_key(\"libs\") {\n        debug!(\"missing `libs` array in config file, adding it\");\n        default_profile[\"libs\"] = value(Array::from_iter(&[\"dependencies\".to_string()]));\n    }\n\n    let libs = default_profile[\"libs\"].as_array_mut().expect(\"libs should be an array\");\n    if !libs.iter().any(|v| v.as_str() == Some(\"dependencies\")) {\n        debug!(\"adding `dependencies` folder to `libs` array\");\n        libs.push(\"dependencies\");\n    }\n\n    // in case we don't have the dependencies section defined in the config file, we add it\n    if !doc.contains_table(\"dependencies\") {\n        debug!(\"adding `dependencies` table in config file\");\n        doc.insert(\"dependencies\", Item::Table(Table::default()));\n    }\n\n    fs::write(&foundry_config, doc.to_string())?;\n    debug!(path:? = foundry_config.as_ref(); \"config file updated\");\n    Ok(())\n}\n\n/// Find the top-level directory of the working git tree.\n///\n/// If no `.git` folder is found in the ancestors, `None` is returned.\nfn find_git_root(relative_to: impl AsRef<Path>) -> Result<Option<PathBuf>> {\n    let root = dunce::canonicalize(relative_to)?;\n    Ok(root.ancestors().find(|p| p.join(\".git\").is_dir()).map(Path::to_path_buf))\n}\n\n/// Find the root of the project at the current directory or path specified by `cwd`.\n///\n/// Looks for a file named `foundry.toml` or `soldeer.toml` in the ancestors of the optional path\n/// passed as argument. If `None` is given, then the current directory is retrieved from the\n/// environment and used as the start point for the search.\n///\n/// The search is bounded by the root of the working git tree, so as to avoid false positives for\n/// nested dependencies. If no config file is found, but a `.git` folder is found, then the\n/// top-level directory of the working git tree will be returned. If the git root cannot be found,\n/// then the start point of the search is returned (current dir or given path).\n///\n/// This function is not meant to be used directly, instead use [`Paths::get_root_path`] which\n/// honors environment variables.\nfn find_project_root(cwd: Option<impl AsRef<Path>>) -> Result<PathBuf> {\n    let cwd = match cwd {\n        Some(path) => dunce::canonicalize(path)?,\n        None => env::current_dir()?,\n    };\n    let boundary = find_git_root(&cwd)?;\n    let found = cwd\n        .ancestors()\n        .take_while(|p| boundary.as_ref().map(|b| p.starts_with(b)).unwrap_or(true))\n        .find(|p| p.join(\"foundry.toml\").is_file() || p.join(\"soldeer.toml\").is_file())\n        .map(Path::to_path_buf);\n    Ok(found.or(boundary).unwrap_or_else(|| cwd.to_path_buf()))\n}\n\n/// Parse a dependency from a TOML value.\n///\n/// The value can be a string (version requirement) or a table.\n/// The table can have the following fields:\n/// - `version` (required): the version requirement string\n/// - `url` (optional): the URL to the dependency's zip file\n/// - `git` (optional): the git URL for git dependencies\n/// - `rev` (optional): the revision hash for git dependencies\n/// - `branch` (optional): the branch name for git dependencies\n/// - `tag` (optional): the tag name for git dependencies\n/// - `project_root` (optional): relative path to the folder containing the config file\n///\n/// Note that the version requirement string cannot contain the `=` symbol for git dependencies\n/// and HTTP dependencies with a custom URL.\nfn parse_dependency(name: impl Into<String>, value: &Item) -> Result<ParsingResult> {\n    let name: String = name.into();\n    if let Some(version_req) = value.as_str() {\n        if version_req.is_empty() {\n            return Err(ConfigError::EmptyVersion(name));\n        }\n        // this function does not retrieve the url\n        return Ok(HttpDependency {\n            name,\n            version_req: version_req.to_string(),\n            url: None,\n            project_root: None,\n        }\n        .into());\n    }\n\n    // we should have a table or inline table\n    let table = {\n        match value.as_inline_table() {\n            Some(table) => table,\n            None => match value.as_table() {\n                // we normalize to inline table\n                Some(table) => &table.clone().into_inline_table(),\n                None => {\n                    debug!(dep = name; \"dependency config entry could not be parsed as a table\");\n                    return Err(ConfigError::InvalidDependency(name));\n                }\n            },\n        }\n    };\n\n    let mut warnings = Vec::new();\n\n    // check for unsupported fields\n    warnings.extend(table.iter().filter_map(|(k, _)| {\n        if ![\"version\", \"url\", \"git\", \"rev\", \"branch\", \"tag\", \"project_root\"].contains(&k) {\n            warn!(dependency = name; \"toml parsing: `{k}` is not a valid dependency option\");\n            Some(ParsingWarning {\n                dependency_name: name.clone(),\n                message: format!(\"`{k}` is not a valid dependency option\"),\n            })\n        } else {\n            None\n        }\n    }));\n\n    // version is needed in both cases\n    let version_req = match table.get(\"version\").map(|v| v.as_str()) {\n        Some(None) => {\n            debug!(dep = name; \"dependency's `version` field is not a string\");\n            return Err(ConfigError::InvalidField { field: \"version\".to_string(), dep: name });\n        }\n        None => {\n            return Err(ConfigError::MissingField { field: \"version\".to_string(), dep: name });\n        }\n        Some(Some(version_req)) => version_req.to_string(),\n    };\n    if version_req.is_empty() {\n        return Err(ConfigError::EmptyVersion(name));\n    }\n\n    // both types of dependency definition can have the `project_root` field.\n    let project_root = match table.get(\"project_root\").map(|v| v.as_str()) {\n        Some(Some(path)) => Some(path.into()),\n        Some(None) => {\n            debug!(dep = name; \"dependency's `project_root` field is not a string\");\n            return Err(ConfigError::InvalidField { field: \"project_root\".to_string(), dep: name });\n        }\n        None => None,\n    };\n\n    // check if it's a git dependency\n    match table.get(\"git\").map(|v| v.as_str()) {\n        Some(None) => {\n            debug!(dep = name; \"dependency's `git` field is not a string\");\n            return Err(ConfigError::InvalidField { field: \"git\".to_string(), dep: name });\n        }\n        Some(Some(git)) => {\n            // we can't have an http url if we have a git url\n            if table.get(\"url\").is_some() {\n                return Err(ConfigError::FieldConflict {\n                    field: \"url\".to_string(),\n                    conflicts_with: \"git\".to_string(),\n                    dep: name,\n                });\n            }\n\n            // for git dependencies, the version requirement string is going to be used as part of\n            // the folder name inside the dependencies folder. As such, it's not allowed to contain\n            // the \"=\" character, because that would break the remappings.\n            if version_req.contains('=') {\n                return Err(ConfigError::InvalidVersionReq(name));\n            }\n            // rev/branch/tag fields are optional but need to be a string if present\n            let rev = match table.get(\"rev\").map(|v| v.as_str()) {\n                Some(Some(rev)) => Some(rev.to_string()),\n                Some(None) => {\n                    debug!(dep = name; \"dependency's `rev` field is not a string\");\n                    return Err(ConfigError::InvalidField { field: \"rev\".to_string(), dep: name });\n                }\n                None => None,\n            };\n            let branch = match table.get(\"branch\").map(|v| v.as_str()) {\n                Some(Some(tag)) => Some(tag.to_string()),\n                Some(None) => {\n                    debug!(dep = name; \"dependency's `branch` field is not a string\");\n                    return Err(ConfigError::InvalidField {\n                        field: \"branch\".to_string(),\n                        dep: name,\n                    });\n                }\n                None => None,\n            };\n            let tag = match table.get(\"tag\").map(|v| v.as_str()) {\n                Some(Some(tag)) => Some(tag.to_string()),\n                Some(None) => {\n                    debug!(dep = name; \"dependency's `tag` field is not a string\");\n                    return Err(ConfigError::InvalidField { field: \"tag\".to_string(), dep: name });\n                }\n                None => None,\n            };\n            let identifier = match (rev, branch, tag) {\n                (Some(rev), None, None) => Some(GitIdentifier::from_rev(rev)),\n                (None, Some(branch), None) => Some(GitIdentifier::from_branch(branch)),\n                (None, None, Some(tag)) => Some(GitIdentifier::from_tag(tag)),\n                (None, None, None) => None,\n                _ => {\n                    return Err(ConfigError::GitIdentifierConflict(name));\n                }\n            };\n            return Ok(ParsingResult {\n                dependency: GitDependency {\n                    name,\n                    git: git.to_string(),\n                    version_req,\n                    identifier,\n                    project_root,\n                }\n                .into(),\n                warnings,\n            });\n        }\n        None => {}\n    }\n\n    // we should have a HTTP dependency,\n\n    // check for extra fields in the HTTP context\n    warnings.extend(table.iter().filter_map(|(k, _)| {\n        if [\"rev\", \"branch\", \"tag\"].contains(&k) {\n            warn!(dependency = name; \"toml parsing: `{k}` is ignored if no `git` URL is provided\");\n            Some(ParsingWarning {\n                dependency_name: name.clone(),\n                message: format!(\"`{k}` is ignored if no `git` URL is provided\"),\n            })\n        } else {\n            None\n        }\n    }));\n\n    match table.get(\"url\").map(|v| v.as_str()) {\n        Some(None) => {\n            debug!(dep = name; \"dependency's `url` field is not a string\");\n            Err(ConfigError::InvalidField { field: \"url\".to_string(), dep: name })\n        }\n        None => Ok(ParsingResult {\n            dependency: HttpDependency { name, version_req, url: None, project_root }.into(),\n            warnings,\n        }),\n        Some(Some(url)) => {\n            // for HTTP dependencies with custom URL, the version requirement string is going to be\n            // used as part of the folder name inside the dependencies folder. As such,\n            // it's not allowed to contain the \"=\" character, because that would break\n            // the remappings.\n            if version_req.contains('=') {\n                return Err(ConfigError::InvalidVersionReq(name));\n            }\n            Ok(ParsingResult {\n                dependency: HttpDependency {\n                    name,\n                    version_req,\n                    url: Some(url.to_string()),\n                    project_root,\n                }\n                .into(),\n                warnings,\n            })\n        }\n    }\n}\n\n/// Create a basic config file with default contents if it doesn't exist, otherwise add\n/// `[dependencies]` if necessary.\nfn create_or_modify_config(\n    location: ConfigLocation,\n    foundry_path: impl AsRef<Path>,\n    soldeer_path: impl AsRef<Path>,\n) -> Result<PathBuf> {\n    match location {\n        ConfigLocation::Foundry => {\n            let foundry_path = foundry_path.as_ref();\n            if foundry_path.exists() {\n                update_config_libs(foundry_path)?;\n                return Ok(foundry_path.to_path_buf());\n            }\n            debug!(path:? = foundry_path; \"foundry.toml does not exist, creating it\");\n            let contents = r#\"[profile.default]\nsrc = \"src\"\nout = \"out\"\nlibs = [\"dependencies\"]\n\n[dependencies]\n\n# See more config options https://github.com/foundry-rs/foundry/blob/master/crates/config/README.md#all-options\n\"#;\n\n            fs::write(foundry_path, contents)?;\n            Ok(foundry_path.to_path_buf())\n        }\n        ConfigLocation::Soldeer => {\n            let soldeer_path = soldeer_path.as_ref();\n            if soldeer_path.exists() {\n                return Ok(soldeer_path.to_path_buf());\n            }\n            debug!(path:? = soldeer_path; \"soldeer.toml does not exist, creating it\");\n            fs::write(soldeer_path, \"[dependencies]\\n\")?;\n            Ok(soldeer_path.to_path_buf())\n        }\n    }\n}\n\n#[cfg(test)]\nmod tests {\n    use super::*;\n    use crate::errors::ConfigError;\n    use path_slash::PathBufExt;\n    use std::{fs, path::PathBuf};\n    use temp_env::with_var;\n    use testdir::testdir;\n\n    fn write_to_config(content: &str, filename: &str) -> PathBuf {\n        let path = testdir!().join(filename);\n        fs::write(&path, content).unwrap();\n        path\n    }\n\n    #[test]\n    fn test_paths_config_soldeer() {\n        let config_path = write_to_config(\"[dependencies]\\n\", \"soldeer.toml\");\n        with_var(\n            \"SOLDEER_PROJECT_ROOT\",\n            Some(config_path.parent().unwrap().to_string_lossy().to_string()),\n            || {\n                let res = Paths::new();\n                assert!(res.is_ok(), \"{res:?}\");\n                assert_eq!(res.unwrap().config.to_slash_lossy(), config_path.to_slash_lossy());\n            },\n        );\n    }\n\n    #[test]\n    fn test_paths_config_foundry() {\n        let config_contents = r#\"[profile.default]\nlibs = [\"dependencies\"]\n\n[dependencies]\n\"#;\n        let config_path = write_to_config(config_contents, \"foundry.toml\");\n        with_var(\n            \"SOLDEER_PROJECT_ROOT\",\n            Some(config_path.parent().unwrap().to_string_lossy().to_string()),\n            || {\n                let res = Paths::new();\n                assert!(res.is_ok(), \"{res:?}\");\n                assert_eq!(res.unwrap().config, config_path);\n            },\n        );\n    }\n\n    #[test]\n    fn test_paths_from_root() {\n        let config_path = write_to_config(\"[dependencies]\\n\", \"soldeer.toml\");\n        let root = config_path.parent().unwrap();\n        let res = Paths::from_root(root);\n        assert!(res.is_ok(), \"{res:?}\");\n        assert_eq!(res.unwrap().root, root);\n    }\n\n    #[test]\n    fn test_from_name_version_no_url() {\n        let res = Dependency::from_name_version(\"dependency~1.0.0\", None, None);\n        assert!(res.is_ok(), \"{res:?}\");\n        assert_eq!(\n            res.unwrap(),\n            HttpDependency::builder().name(\"dependency\").version_req(\"1.0.0\").build().into()\n        );\n    }\n\n    #[test]\n    fn test_from_name_version_with_http_url() {\n        let res = Dependency::from_name_version(\n            \"dependency~1.0.0\",\n            Some(UrlType::http(\"https://github.com/user/repo/archive/123.zip\")),\n            None,\n        );\n        assert!(res.is_ok(), \"{res:?}\");\n        assert_eq!(\n            res.unwrap(),\n            HttpDependency::builder()\n                .name(\"dependency\")\n                .version_req(\"1.0.0\")\n                .url(\"https://github.com/user/repo/archive/123.zip\")\n                .build()\n                .into()\n        );\n    }\n\n    #[test]\n    fn test_from_name_version_with_git_url() {\n        let res = Dependency::from_name_version(\n            \"dependency~1.0.0\",\n            Some(UrlType::git(\"https://github.com/user/repo.git\")),\n            None,\n        );\n        assert!(res.is_ok(), \"{res:?}\");\n        assert_eq!(\n            res.unwrap(),\n            GitDependency::builder()\n                .name(\"dependency\")\n                .version_req(\"1.0.0\")\n                .git(\"https://github.com/user/repo.git\")\n                .build()\n                .into()\n        );\n\n        let res = Dependency::from_name_version(\n            \"dependency~1.0.0\",\n            Some(UrlType::git(\"https://test:test@gitlab.com/user/repo.git\")),\n            None,\n        );\n        assert!(res.is_ok(), \"{res:?}\");\n        assert_eq!(\n            res.unwrap(),\n            GitDependency::builder()\n                .name(\"dependency\")\n                .version_req(\"1.0.0\")\n                .git(\"https://test:test@gitlab.com/user/repo.git\")\n                .build()\n                .into()\n        );\n    }\n\n    #[test]\n    fn test_from_name_version_with_git_url_rev() {\n        let res = Dependency::from_name_version(\n            \"dependency~1.0.0\",\n            Some(UrlType::git(\"https://github.com/user/repo.git\")),\n            Some(GitIdentifier::from_rev(\"123456\")),\n        );\n        assert!(res.is_ok(), \"{res:?}\");\n        assert_eq!(\n            res.unwrap(),\n            GitDependency::builder()\n                .name(\"dependency\")\n                .version_req(\"1.0.0\")\n                .git(\"https://github.com/user/repo.git\")\n                .identifier(GitIdentifier::from_rev(\"123456\"))\n                .build()\n                .into()\n        );\n    }\n\n    #[test]\n    fn test_from_name_version_with_git_url_branch() {\n        let res = Dependency::from_name_version(\n            \"dependency~1.0.0\",\n            Some(UrlType::git(\"https://github.com/user/repo.git\")),\n            Some(GitIdentifier::from_branch(\"dev\")),\n        );\n        assert!(res.is_ok(), \"{res:?}\");\n        assert_eq!(\n            res.unwrap(),\n            GitDependency::builder()\n                .name(\"dependency\")\n                .version_req(\"1.0.0\")\n                .git(\"https://github.com/user/repo.git\")\n                .identifier(GitIdentifier::from_branch(\"dev\"))\n                .build()\n                .into()\n        );\n    }\n\n    #[test]\n    fn test_from_name_version_with_git_url_tag() {\n        let res = Dependency::from_name_version(\n            \"dependency~1.0.0\",\n            Some(UrlType::git(\"https://github.com/user/repo.git\")),\n            Some(GitIdentifier::from_tag(\"v1.0.0\")),\n        );\n        assert!(res.is_ok(), \"{res:?}\");\n        assert_eq!(\n            res.unwrap(),\n            GitDependency::builder()\n                .name(\"dependency\")\n                .version_req(\"1.0.0\")\n                .git(\"https://github.com/user/repo.git\")\n                .identifier(GitIdentifier::from_tag(\"v1.0.0\"))\n                .build()\n                .into()\n        );\n    }\n\n    #[test]\n    fn test_from_name_version_with_git_ssh() {\n        let res = Dependency::from_name_version(\n            \"dependency~1.0.0\",\n            Some(UrlType::git(\"git@github.com:user/repo.git\")),\n            None,\n        );\n        assert!(res.is_ok(), \"{res:?}\");\n        assert_eq!(\n            res.unwrap(),\n            GitDependency::builder()\n                .name(\"dependency\")\n                .version_req(\"1.0.0\")\n                .git(\"git@github.com:user/repo.git\")\n                .build()\n                .into()\n        );\n    }\n\n    #[test]\n    fn test_from_name_version_with_git_ssh_rev() {\n        let res = Dependency::from_name_version(\n            \"dependency~1.0.0\",\n            Some(UrlType::git(\"git@github.com:user/repo.git\")),\n            Some(GitIdentifier::from_rev(\"123456\")),\n        );\n        assert!(res.is_ok(), \"{res:?}\");\n        assert_eq!(\n            res.unwrap(),\n            GitDependency::builder()\n                .name(\"dependency\")\n                .version_req(\"1.0.0\")\n                .git(\"git@github.com:user/repo.git\")\n                .identifier(GitIdentifier::from_rev(\"123456\"))\n                .build()\n                .into()\n        );\n    }\n\n    #[test]\n    fn test_from_name_version_empty_version() {\n        let res = Dependency::from_name_version(\"dependency~\", None, None);\n        assert!(matches!(res, Err(ConfigError::EmptyVersion(_))), \"{res:?}\");\n    }\n\n    #[test]\n    fn test_from_name_version_invalid_version() {\n        // for http deps, having the \"=\" character in the version requirement is ok\n        let res = Dependency::from_name_version(\"dependency~asdf=\", None, None);\n        assert!(res.is_ok(), \"{res:?}\");\n\n        let res = Dependency::from_name_version(\n            \"dependency~asdf=\",\n            Some(UrlType::http(\"https://example.com\")),\n            None,\n        );\n        assert!(matches!(res, Err(ConfigError::InvalidVersionReq(_))), \"{res:?}\");\n\n        let res = Dependency::from_name_version(\n            \"dependency~asdf=\",\n            Some(UrlType::git(\"git@github.com:user/repo.git\")),\n            None,\n        );\n        assert!(matches!(res, Err(ConfigError::InvalidVersionReq(_))), \"{res:?}\");\n    }\n\n    #[test]\n    fn test_read_soldeer_config_default() {\n        let config_contents = r#\"[profile.default]\nlibs = [\"dependencies\"]\n\"#;\n        let config_path = write_to_config(config_contents, \"foundry.toml\");\n        let res = read_soldeer_config(config_path);\n        assert!(res.is_ok(), \"{res:?}\");\n        assert_eq!(res.unwrap(), SoldeerConfig::default());\n    }\n\n    #[test]\n    fn test_read_soldeer_config() {\n        let config_contents = r#\"[soldeer]\nremappings_generate = false\nremappings_regenerate = true\nremappings_version = false\nremappings_prefix = \"@\"\nremappings_location = \"config\"\nrecursive_deps = true\n\"#;\n        let expected = SoldeerConfig {\n            remappings_generate: false,\n            remappings_regenerate: true,\n            remappings_version: false,\n            remappings_prefix: \"@\".to_string(),\n            remappings_location: RemappingsLocation::Config,\n            recursive_deps: true,\n        };\n\n        let config_path = write_to_config(config_contents, \"soldeer.toml\");\n        let res = read_soldeer_config(config_path);\n        assert!(res.is_ok(), \"{res:?}\");\n        assert_eq!(res.unwrap(), expected);\n\n        let config_path = write_to_config(config_contents, \"foundry.toml\");\n        let res = read_soldeer_config(config_path);\n        assert!(res.is_ok(), \"{res:?}\");\n        assert_eq!(res.unwrap(), expected);\n    }\n\n    #[test]\n    fn test_read_foundry_config_deps() {\n        let config_contents = r#\"[profile.default]\nlibs = [\"dependencies\"]\n\n[dependencies]\n\"lib1\" = \"1.0.0\"\n\"lib2\" = { version = \"2.0.0\" }\n\"lib3\" = { version = \"3.0.0\", url = \"https://example.com\" }\n\"lib4\" = { version = \"4.0.0\", git = \"https://example.com/repo.git\" }\n\"lib5\" = { version = \"5.0.0\", git = \"https://example.com/repo.git\", rev = \"123456\" }\n\"lib6\" = { version = \"6.0.0\", git = \"https://example.com/repo.git\", branch = \"dev\" }\n\"lib7\" = { version = \"7.0.0\", git = \"https://example.com/repo.git\", tag = \"v7.0.0\" }\n\"lib8\" = { version = \"8.0.0\", url = \"https://example.com\", project_root = \"foo/bar\" }\n\"lib9\" = { version = \"9.0.0\", git = \"https://example.com/repo.git\", project_root = \"test/test2\" }\n\"#;\n        let config_path = write_to_config(config_contents, \"foundry.toml\");\n        let res = read_config_deps(config_path);\n        assert!(res.is_ok(), \"{res:?}\");\n        let (result, _) = res.unwrap();\n\n        assert_eq!(\n            result[0],\n            HttpDependency::builder().name(\"lib1\").version_req(\"1.0.0\").build().into()\n        );\n        assert_eq!(\n            result[1],\n            HttpDependency::builder().name(\"lib2\").version_req(\"2.0.0\").build().into()\n        );\n        assert_eq!(\n            result[2],\n            HttpDependency::builder()\n                .name(\"lib3\")\n                .version_req(\"3.0.0\")\n                .url(\"https://example.com\")\n                .build()\n                .into()\n        );\n        assert_eq!(\n            result[3],\n            GitDependency::builder()\n                .name(\"lib4\")\n                .version_req(\"4.0.0\")\n                .git(\"https://example.com/repo.git\")\n                .build()\n                .into()\n        );\n        assert_eq!(\n            result[4],\n            GitDependency::builder()\n                .name(\"lib5\")\n                .version_req(\"5.0.0\")\n                .git(\"https://example.com/repo.git\")\n                .identifier(GitIdentifier::from_rev(\"123456\"))\n                .build()\n                .into()\n        );\n        assert_eq!(\n            result[5],\n            GitDependency::builder()\n                .name(\"lib6\")\n                .version_req(\"6.0.0\")\n                .git(\"https://example.com/repo.git\")\n                .identifier(GitIdentifier::from_branch(\"dev\"))\n                .build()\n                .into()\n        );\n        assert_eq!(\n            result[6],\n            GitDependency::builder()\n                .name(\"lib7\")\n                .version_req(\"7.0.0\")\n                .git(\"https://example.com/repo.git\")\n                .identifier(GitIdentifier::from_tag(\"v7.0.0\"))\n                .build()\n                .into()\n        );\n        assert_eq!(\n            result[7],\n            HttpDependency::builder()\n                .name(\"lib8\")\n                .version_req(\"8.0.0\")\n                .url(\"https://example.com\")\n                .project_root(\"foo/bar\")\n                .build()\n                .into()\n        );\n        assert_eq!(\n            result[8],\n            GitDependency::builder()\n                .name(\"lib9\")\n                .version_req(\"9.0.0\")\n                .git(\"https://example.com/repo.git\")\n                .project_root(\"test/test2\")\n                .build()\n                .into()\n        );\n    }\n\n    #[test]\n    fn test_read_soldeer_config_deps() {\n        let config_contents = r#\"[dependencies]\n\"lib1\" = \"1.0.0\"\n\"lib2\" = { version = \"2.0.0\" }\n\"lib3\" = { version = \"3.0.0\", url = \"https://example.com\" }\n\"lib4\" = { version = \"4.0.0\", git = \"https://example.com/repo.git\" }\n\"lib5\" = { version = \"5.0.0\", git = \"https://example.com/repo.git\", rev = \"123456\" }\n\"lib6\" = { version = \"6.0.0\", git = \"https://example.com/repo.git\", branch = \"dev\" }\n\"lib7\" = { version = \"7.0.0\", git = \"https://example.com/repo.git\", tag = \"v7.0.0\" }\n\"lib8\" = { version = \"8.0.0\", url = \"https://example.com\", project_root = \"foo/bar\" }\n\"lib9\" = { version = \"9.0.0\", git = \"https://example.com/repo.git\", project_root = \"test/test2\" }\n\"#;\n        let config_path = write_to_config(config_contents, \"soldeer.toml\");\n        let res = read_config_deps(config_path);\n        assert!(res.is_ok(), \"{res:?}\");\n        let (result, _) = res.unwrap();\n\n        assert_eq!(\n            result[0],\n            HttpDependency::builder().name(\"lib1\").version_req(\"1.0.0\").build().into()\n        );\n        assert_eq!(\n            result[1],\n            HttpDependency::builder().name(\"lib2\").version_req(\"2.0.0\").build().into()\n        );\n        assert_eq!(\n            result[2],\n            HttpDependency::builder()\n                .name(\"lib3\")\n                .version_req(\"3.0.0\")\n                .url(\"https://example.com\")\n                .build()\n                .into()\n        );\n        assert_eq!(\n            result[3],\n            GitDependency::builder()\n                .name(\"lib4\")\n                .version_req(\"4.0.0\")\n                .git(\"https://example.com/repo.git\")\n                .build()\n                .into()\n        );\n        assert_eq!(\n            result[4],\n            GitDependency::builder()\n                .name(\"lib5\")\n                .version_req(\"5.0.0\")\n                .git(\"https://example.com/repo.git\")\n                .identifier(GitIdentifier::from_rev(\"123456\"))\n                .build()\n                .into()\n        );\n        assert_eq!(\n            result[5],\n            GitDependency::builder()\n                .name(\"lib6\")\n                .version_req(\"6.0.0\")\n                .git(\"https://example.com/repo.git\")\n                .identifier(GitIdentifier::from_branch(\"dev\"))\n                .build()\n                .into()\n        );\n        assert_eq!(\n            result[6],\n            GitDependency::builder()\n                .name(\"lib7\")\n                .version_req(\"7.0.0\")\n                .git(\"https://example.com/repo.git\")\n                .identifier(GitIdentifier::from_tag(\"v7.0.0\"))\n                .build()\n                .into()\n        );\n        assert_eq!(\n            result[7],\n            HttpDependency::builder()\n                .name(\"lib8\")\n                .version_req(\"8.0.0\")\n                .url(\"https://example.com\")\n                .project_root(\"foo/bar\")\n                .build()\n                .into()\n        );\n        assert_eq!(\n            result[8],\n            GitDependency::builder()\n                .name(\"lib9\")\n                .version_req(\"9.0.0\")\n                .git(\"https://example.com/repo.git\")\n                .project_root(\"test/test2\")\n                .build()\n                .into()\n        );\n    }\n\n    #[test]\n    fn test_read_soldeer_config_deps_bad_version() {\n        for dep in [\n            r#\"\"lib1\" = \"\"\"#,\n            r#\"\"lib1\" = { version = \"\" }\"#,\n            r#\"\"lib1\" = { version = \"\", url = \"https://example.com\" }\"#,\n            r#\"\"lib1\" = { version = \"\", git = \"https://example.com/repo.git\" }\"#,\n            r#\"\"lib1\" = { version = \"\", git = \"https://example.com/repo.git\", rev = \"123456\" }\"#,\n        ] {\n            let config_contents = format!(\"[dependencies]\\n{dep}\");\n            let config_path = write_to_config(&config_contents, \"soldeer.toml\");\n            let res = read_config_deps(config_path);\n            assert!(matches!(res, Err(ConfigError::EmptyVersion(_))), \"{res:?}\");\n        }\n\n        for dep in [\n            r#\"\"lib1\" = { version = \"asdf=\", url = \"https://example.com\" }\"#,\n            r#\"\"lib1\" = { version = \"asdf=\", git = \"https://example.com/repo.git\" }\"#,\n            r#\"\"lib1\" = { version = \"asdf=\", git = \"https://example.com/repo.git\", rev = \"123456\" }\"#,\n        ] {\n            let config_contents = format!(\"[dependencies]\\n{dep}\");\n            let config_path = write_to_config(&config_contents, \"soldeer.toml\");\n            let res = read_config_deps(config_path);\n            assert!(matches!(res, Err(ConfigError::InvalidVersionReq(_))), \"{res:?}\");\n        }\n\n        // it's ok to have the \"=\" character in the version requirement for HTTP dependencies\n        // without a custom URL\n        let config_contents = r#\"[dependencies]\n\"lib1\" = \"asdf=\"\n\"lib2\" = { version = \"asdf=\" }\n\"#;\n        let config_path = write_to_config(config_contents, \"soldeer.toml\");\n        let res = read_config_deps(config_path);\n        assert!(res.is_ok(), \"{res:?}\");\n    }\n\n    #[test]\n    fn test_read_soldeer_config_deps_bad_git() {\n        for dep in [\n            r#\"\"lib1\" = { version = \"1.0.0\", git = \"https://example.com/repo.git\", rev = \"123456\", branch = \"dev\" }\"#,\n            r#\"\"lib1\" = { version = \"1.0.0\", git = \"https://example.com/repo.git\", rev = \"123456\", tag = \"v1.0.0\" }\"#,\n            r#\"\"lib1\" = { version = \"1.0.0\", git = \"https://example.com/repo.git\", branch = \"dev\", tag = \"v1.0.0\" }\"#,\n            r#\"\"lib1\" = { version = \"1.0.0\", git = \"https://example.com/repo.git\", rev = \"123456\", branch = \"dev\", tag = \"v1.0.0\" }\"#,\n        ] {\n            let config_contents = format!(\"[dependencies]\\n{dep}\");\n            let config_path = write_to_config(&config_contents, \"soldeer.toml\");\n            let res = read_config_deps(config_path);\n            assert!(matches!(res, Err(ConfigError::GitIdentifierConflict(_))), \"{res:?}\");\n        }\n    }\n\n    #[test]\n    fn test_add_to_config() {\n        let config_path = write_to_config(\"[dependencies]\\n\", \"soldeer.toml\");\n\n        let deps: &[Dependency] = &[\n            HttpDependency::builder().name(\"lib1\").version_req(\"1.0.0\").build().into(),\n            HttpDependency::builder()\n                .name(\"lib2\")\n                .version_req(\"1.0.0\")\n                .url(\"https://test.com/test.zip\")\n                .build()\n                .into(),\n            HttpDependency::builder()\n                .name(\"lib21\")\n                .version_req(\"1.0.0\")\n                .url(\"https://test.com/test.zip\")\n                .project_root(\"foo/bar\")\n                .build()\n                .into(),\n            GitDependency::builder()\n                .name(\"lib3\")\n                .version_req(\"1.0.0\")\n                .git(\"https://example.com/repo.git\")\n                .build()\n                .into(),\n            GitDependency::builder()\n                .name(\"lib4\")\n                .version_req(\"1.0.0\")\n                .git(\"https://example.com/repo.git\")\n                .identifier(GitIdentifier::from_rev(\"123456\"))\n                .build()\n                .into(),\n            GitDependency::builder()\n                .name(\"lib5\")\n                .version_req(\"1.0.0\")\n                .git(\"https://example.com/repo.git\")\n                .identifier(GitIdentifier::from_branch(\"dev\"))\n                .build()\n                .into(),\n            GitDependency::builder()\n                .name(\"lib6\")\n                .version_req(\"1.0.0\")\n                .git(\"https://example.com/repo.git\")\n                .identifier(GitIdentifier::from_tag(\"v1.0.0\"))\n                .build()\n                .into(),\n            GitDependency::builder()\n                .name(\"lib7\")\n                .version_req(\"1.0.0\")\n                .git(\"https://example.com/repo.git\")\n                .project_root(\"foo/bar\")\n                .build()\n                .into(),\n        ];\n        for dep in deps {\n            let res = add_to_config(dep, &config_path);\n            assert!(res.is_ok(), \"{dep}: {res:?}\");\n        }\n\n        let (parsed, _) = read_config_deps(&config_path).unwrap();\n        for (dep, parsed) in deps.iter().zip(parsed.iter()) {\n            assert_eq!(dep, parsed);\n        }\n    }\n\n    #[test]\n    fn test_add_to_config_no_section() {\n        let config_path = write_to_config(\"\", \"soldeer.toml\");\n        let dep = Dependency::from_name_version(\"lib1~1.0.0\", None, None).unwrap();\n        let res = add_to_config(&dep, &config_path);\n        assert!(res.is_ok(), \"{res:?}\");\n        let (parsed, _) = read_config_deps(&config_path).unwrap();\n        assert_eq!(parsed[0], dep);\n    }\n\n    #[test]\n    fn test_delete_from_config() {\n        let config_contents = r#\"[dependencies]\n\"lib1\" = \"1.0.0\"\n\"lib2\" = { version = \"2.0.0\" }\n\"lib3\" = { version = \"3.0.0\", url = \"https://example.com\" }\n\"lib4\" = { version = \"4.0.0\", git = \"https://example.com/repo.git\" }\n\"lib5\" = { version = \"5.0.0\", git = \"https://example.com/repo.git\", rev = \"123456\" }\n\"lib6\" = { version = \"6.0.0\", git = \"https://example.com/repo.git\", branch = \"dev\" }\n\"lib7\" = { version = \"7.0.0\", git = \"https://example.com/repo.git\", tag = \"v7.0.0\" }\n\"lib8\" = { version = \"8.0.0\", url = \"https://example.com\", project_root = \"foo/bar\" }\n\"lib9\" = { version = \"9.0.0\", git = \"https://example.com/repo.git\", project_root = \"foo/bar\" }\n        \"#;\n        let config_path = write_to_config(config_contents, \"soldeer.toml\");\n        let res = delete_from_config(\"lib1\", &config_path);\n        assert!(res.is_ok(), \"{res:?}\");\n        assert_eq!(res.unwrap().name(), \"lib1\");\n        assert_eq!(read_config_deps(&config_path).unwrap().0.len(), 8);\n\n        let res = delete_from_config(\"lib2\", &config_path);\n        assert!(res.is_ok(), \"{res:?}\");\n        assert_eq!(res.unwrap().name(), \"lib2\");\n        assert_eq!(read_config_deps(&config_path).unwrap().0.len(), 7);\n\n        let res = delete_from_config(\"lib3\", &config_path);\n        assert!(res.is_ok(), \"{res:?}\");\n        assert_eq!(res.unwrap().name(), \"lib3\");\n        assert_eq!(read_config_deps(&config_path).unwrap().0.len(), 6);\n\n        let res = delete_from_config(\"lib4\", &config_path);\n        assert!(res.is_ok(), \"{res:?}\");\n        assert_eq!(res.unwrap().name(), \"lib4\");\n        assert_eq!(read_config_deps(&config_path).unwrap().0.len(), 5);\n\n        let res = delete_from_config(\"lib5\", &config_path);\n        assert!(res.is_ok(), \"{res:?}\");\n        assert_eq!(res.unwrap().name(), \"lib5\");\n        assert_eq!(read_config_deps(&config_path).unwrap().0.len(), 4);\n\n        let res = delete_from_config(\"lib6\", &config_path);\n        assert!(res.is_ok(), \"{res:?}\");\n        assert_eq!(res.unwrap().name(), \"lib6\");\n        assert_eq!(read_config_deps(&config_path).unwrap().0.len(), 3);\n\n        let res = delete_from_config(\"lib7\", &config_path);\n        assert!(res.is_ok(), \"{res:?}\");\n        assert_eq!(res.unwrap().name(), \"lib7\");\n        assert_eq!(read_config_deps(&config_path).unwrap().0.len(), 2);\n\n        let res = delete_from_config(\"lib8\", &config_path);\n        assert!(res.is_ok(), \"{res:?}\");\n        assert_eq!(res.unwrap().name(), \"lib8\");\n        assert_eq!(read_config_deps(&config_path).unwrap().0.len(), 1);\n\n        let res = delete_from_config(\"lib9\", &config_path);\n        assert!(res.is_ok(), \"{res:?}\");\n        assert_eq!(res.unwrap().name(), \"lib9\");\n        assert!(read_config_deps(&config_path).unwrap().0.is_empty());\n    }\n\n    #[test]\n    fn test_delete_from_config_missing() {\n        let config_contents = r#\"[dependencies]\n\"lib1\" = \"1.0.0\"\n        \"#;\n        let config_path = write_to_config(config_contents, \"soldeer.toml\");\n        let res = delete_from_config(\"libfoo\", &config_path);\n        assert!(matches!(res, Err(ConfigError::MissingDependency(_))), \"{res:?}\");\n    }\n\n    #[test]\n    fn test_update_config_libs() {\n        let config_contents = r#\"[profile.default]\nlibs = [\"lib\"]\n\n[dependencies]\n\"#;\n        let config_path = write_to_config(config_contents, \"foundry.toml\");\n        let res = update_config_libs(&config_path);\n        assert!(res.is_ok(), \"{res:?}\");\n        let contents = fs::read_to_string(&config_path).unwrap();\n        assert_eq!(\n            contents,\n            r#\"[profile.default]\nlibs = [\"lib\", \"dependencies\"]\n\n[dependencies]\n\"#\n        );\n    }\n\n    #[test]\n    fn test_update_config_profile_empty() {\n        let config_contents = r#\"[dependencies]\n\"#;\n        let config_path = write_to_config(config_contents, \"foundry.toml\");\n        let res = update_config_libs(&config_path);\n        assert!(res.is_ok(), \"{res:?}\");\n        let contents = fs::read_to_string(&config_path).unwrap();\n        assert_eq!(\n            contents,\n            r#\"[dependencies]\n\n[profile.default]\nlibs = [\"dependencies\"]\n\"#\n        );\n    }\n\n    #[test]\n    fn test_update_config_libs_empty() {\n        let config_contents = r#\"[profile.default]\nsrc = \"src\"\n\n[dependencies]\n\"#;\n        let config_path = write_to_config(config_contents, \"foundry.toml\");\n        let res = update_config_libs(&config_path);\n        assert!(res.is_ok(), \"{res:?}\");\n        let contents = fs::read_to_string(&config_path).unwrap();\n        assert_eq!(\n            contents,\n            r#\"[profile.default]\nsrc = \"src\"\nlibs = [\"dependencies\"]\n\n[dependencies]\n\"#\n        );\n    }\n\n    #[test]\n    fn test_parse_dependency() {\n        let config_contents = r#\"[dependencies]\n\"lib1\" = \"1.0.0\"\n\"lib2\" = { version = \"2.0.0\" }\n\"lib3\" = { version = \"3.0.0\", url = \"https://example.com\" }\n\"lib4\" = { version = \"4.0.0\", git = \"https://example.com/repo.git\" }\n\"lib5\" = { version = \"5.0.0\", git = \"https://example.com/repo.git\", rev = \"123456\" }\n\"lib6\" = { version = \"6.0.0\", git = \"https://example.com/repo.git\", branch = \"dev\" }\n\"lib7\" = { version = \"7.0.0\", git = \"https://example.com/repo.git\", tag = \"v7.0.0\" }\n\"lib8\" = { version = \"8.0.0\", url = \"https://example.com\", project_root = \"foo/bar\" }\n\"lib9\" = { version = \"9.0.0\", git = \"https://example.com/repo.git\", project_root = \"foo/bar\" }\n\"#;\n        let doc: DocumentMut = config_contents.parse::<DocumentMut>().unwrap();\n        let data = doc.get(\"dependencies\").map(|v| v.as_table()).unwrap().unwrap();\n        for (name, v) in data {\n            let res = parse_dependency(name, v);\n            assert!(res.is_ok(), \"{res:?}\");\n        }\n    }\n\n    #[test]\n    fn test_parse_dependency_extra_field() {\n        let config_contents = r#\"[dependencies]\n\"lib1\" = { version = \"3.0.0\", url = \"https://example.com\", foo = \"bar\" }\n\"#;\n        let doc: DocumentMut = config_contents.parse::<DocumentMut>().unwrap();\n        let data = doc.get(\"dependencies\").map(|v| v.as_table()).unwrap().unwrap();\n        for (name, v) in data {\n            let res = parse_dependency(name, v).unwrap();\n            assert_eq!(res.warnings[0].message, \"`foo` is not a valid dependency option\");\n        }\n    }\n\n    #[test]\n    fn test_parse_dependency_git_extra_url() {\n        let config_contents = r#\"[dependencies]\n\"lib1\" = { version = \"3.0.0\", git = \"https://example.com/repo.git\", url = \"https://example.com\" }\n\"#;\n        let doc: DocumentMut = config_contents.parse::<DocumentMut>().unwrap();\n        let data = doc.get(\"dependencies\").map(|v| v.as_table()).unwrap().unwrap();\n        for (name, v) in data {\n            let res = parse_dependency(name, v);\n            assert!(\n                matches!(\n                    res,\n                    Err(ConfigError::FieldConflict { field: _, conflicts_with: _, dep: _ })\n                ),\n                \"{res:?}\"\n            );\n        }\n    }\n\n    #[test]\n    fn test_parse_dependency_git_field_conflict() {\n        let config_contents = r#\"[dependencies]\n\"lib2\" = { version = \"3.0.0\", git = \"https://example.com/repo.git\", rev = \"123456\", branch = \"dev\" }\n\"lib3\" = { version = \"3.0.0\", git = \"https://example.com/repo.git\", rev = \"123456\", tag = \"v7.0.0\" }\n\"lib4\" = { version = \"3.0.0\", git = \"https://example.com/repo.git\", branch = \"dev\", tag = \"v7.0.0\" }\n\"#;\n        let doc: DocumentMut = config_contents.parse::<DocumentMut>().unwrap();\n        let data = doc.get(\"dependencies\").map(|v| v.as_table()).unwrap().unwrap();\n        for (name, v) in data {\n            let res = parse_dependency(name, v);\n            assert!(matches!(res, Err(ConfigError::GitIdentifierConflict(_))), \"{res:?}\");\n        }\n    }\n\n    #[test]\n    fn test_parse_dependency_missing_url() {\n        let config_contents = r#\"[dependencies]\n\"lib1\" = { version = \"3.0.0\", rev = \"123456\" }\n\"lib2\" = { version = \"3.0.0\", branch = \"dev\" }\n\"lib3\" = { version = \"3.0.0\", tag = \"v7.0.0\" }\n\"#;\n        let doc: DocumentMut = config_contents.parse::<DocumentMut>().unwrap();\n        let data = doc.get(\"dependencies\").map(|v| v.as_table()).unwrap().unwrap();\n        for (name, v) in data {\n            let res = parse_dependency(name, v).unwrap();\n            assert!(res.warnings[0].message.ends_with(\"is ignored if no `git` URL is provided\"));\n        }\n    }\n\n    #[test]\n    fn test_find_git_root() {\n        let test_dir = testdir!();\n        let git_dir = test_dir.join(\".git\");\n        fs::create_dir(&git_dir).unwrap();\n\n        let result = find_git_root(&test_dir);\n        assert!(result.is_ok(), \"{result:?}\");\n        assert_eq!(result.unwrap(), Some(test_dir.clone()));\n\n        // test with a subdirectory\n        let sub_dir = test_dir.join(\"subdir\");\n        fs::create_dir(&sub_dir).unwrap();\n\n        let result = find_git_root(&sub_dir);\n        assert!(result.is_ok(), \"{result:?}\");\n        assert_eq!(result.unwrap(), Some(test_dir));\n\n        // test outside of a git folder\n        let temp_dir = std::env::temp_dir().join(\"soldeer_test_no_git\");\n        if !temp_dir.exists() {\n            fs::create_dir(&temp_dir).unwrap();\n        }\n\n        let result = find_git_root(&temp_dir);\n        assert_eq!(result.unwrap(), None);\n\n        // clean up\n        fs::remove_dir(&temp_dir).unwrap();\n    }\n\n    #[test]\n    fn test_find_git_root_nested() {\n        // test nested git repositories\n        let outer_dir = testdir!();\n        fs::create_dir(outer_dir.join(\".git\")).unwrap();\n\n        let inner_dir = outer_dir.join(\"inner\");\n        fs::create_dir(&inner_dir).unwrap();\n        fs::create_dir(inner_dir.join(\".git\")).unwrap();\n\n        // should find the inner git root when starting from inner directory\n        let result = find_git_root(&inner_dir);\n        assert!(result.is_ok(), \"{result:?}\");\n        assert_eq!(result.unwrap(), Some(inner_dir));\n\n        // should find the outer git root when starting from outer directory\n        let result = find_git_root(&outer_dir);\n        assert!(result.is_ok(), \"{result:?}\");\n        assert_eq!(result.unwrap(), Some(outer_dir));\n    }\n\n    #[test]\n    fn test_find_project_root_with_foundry_toml() {\n        let test_dir = testdir!();\n        let foundry_toml = test_dir.join(\"foundry.toml\");\n        fs::write(&foundry_toml, \"[dependencies]\\n\").unwrap();\n\n        let result = find_project_root(Some(&test_dir));\n        assert!(result.is_ok(), \"{result:?}\");\n        assert_eq!(result.unwrap(), test_dir);\n    }\n\n    #[test]\n    fn test_find_project_root_with_soldeer_toml() {\n        let test_dir = testdir!();\n        let soldeer_toml = test_dir.join(\"soldeer.toml\");\n        fs::write(&soldeer_toml, \"[dependencies]\\n\").unwrap();\n\n        let result = find_project_root(Some(&test_dir));\n        assert!(result.is_ok(), \"{result:?}\");\n        assert_eq!(result.unwrap(), test_dir);\n    }\n\n    #[test]\n    fn test_find_project_root_in_subdirectory() {\n        let test_dir = testdir!();\n        let foundry_toml = test_dir.join(\"foundry.toml\");\n        fs::write(&foundry_toml, \"[dependencies]\\n\").unwrap();\n\n        let sub_dir = test_dir.join(\"src\");\n        fs::create_dir(&sub_dir).unwrap();\n\n        let result = find_project_root(Some(&sub_dir));\n        assert!(result.is_ok(), \"{result:?}\");\n        assert_eq!(result.unwrap(), test_dir);\n    }\n\n    #[test]\n    fn test_find_project_root_git_boundary() {\n        let test_dir = testdir!();\n        let git_folder = test_dir.join(\".git\");\n        fs::create_dir(&git_folder).unwrap();\n\n        let sub_dir = test_dir.join(\"src\");\n        fs::create_dir(&sub_dir).unwrap();\n\n        let result = find_project_root(Some(&sub_dir));\n        assert!(result.is_ok(), \"{result:?}\");\n        assert_eq!(result.unwrap(), test_dir);\n    }\n}\n"
  },
  {
    "path": "crates/core/src/download.rs",
    "content": "//! Download and/or extract dependencies\nuse crate::{\n    config::{Dependency, GitIdentifier},\n    errors::DownloadError,\n    utils::{path_matches, run_git_command, sanitize_filename},\n};\nuse log::{debug, trace, warn};\nuse reqwest::{IntoUrl, Url};\nuse std::{\n    fs,\n    io::Cursor,\n    path::{Path, PathBuf},\n    str,\n};\nuse tokio::io::AsyncWriteExt as _;\n\npub type Result<T> = std::result::Result<T, DownloadError>;\n\n/// Download a zip file into the provided folder.\n///\n/// Depending on the platform, the folder path must exist prior to calling this function.\n/// The filename for the zip file will be the provided base name with the \".zip\" extension\npub async fn download_file(\n    url: impl IntoUrl,\n    folder_path: impl AsRef<Path>,\n    base_name: &str,\n) -> Result<PathBuf> {\n    let url: Url = url.into_url()?;\n    debug!(name = base_name, url:% = url; \"downloading file\");\n    let resp = reqwest::get(url).await?;\n    let mut resp = resp.error_for_status()?;\n\n    let zip_path = folder_path.as_ref().join(sanitize_filename(&format!(\"{base_name}.zip\")));\n    let mut file = tokio::fs::File::create(&zip_path)\n        .await\n        .map_err(|e| DownloadError::IOError { path: zip_path.clone(), source: e })?;\n    while let Some(mut chunk) = resp.chunk().await? {\n        file.write_all_buf(&mut chunk)\n            .await\n            .map_err(|e| DownloadError::IOError { path: zip_path.clone(), source: e })?;\n    }\n    file.flush().await.map_err(|e| DownloadError::IOError { path: zip_path.clone(), source: e })?;\n    debug!(path:? = zip_path; \"saved downloaded file\");\n    Ok(zip_path)\n}\n\n/// Unzip a file into a directory and then delete it.\npub async fn unzip_file(path: impl AsRef<Path>, into: impl AsRef<Path>) -> Result<()> {\n    let path = path.as_ref().to_path_buf();\n    let zip_contents = tokio::fs::read(&path)\n        .await\n        .map_err(|e| DownloadError::IOError { path: path.clone(), source: e })?;\n\n    tokio::task::spawn_blocking({\n        let out_dir = into.as_ref().to_path_buf();\n        #[allow(deprecated)] // until we can get rid of zip_extract\n        move || zip_extract::extract(Cursor::new(zip_contents), &out_dir, true)\n    })\n    .await??;\n    debug!(file:? = path, dest:? = into.as_ref(); \"unzipped file\");\n\n    tokio::fs::remove_file(&path)\n        .await\n        .map_err(|e| DownloadError::IOError { path: path.clone(), source: e })?;\n    debug!(path:?; \"removed zip file\");\n    Ok(())\n}\n\n/// Clone a git repo into the given path, optionally checking out a reference.\n///\n/// The repository is cloned without trees, which can speed up cloning when the full history is not\n/// needed. Contrary to a shallow clone, it's possible to checkout any ref and the missing trees\n/// will be retrieved as they are needed.\n///\n/// This function returns the commit hash corresponding to  the checked out reference (branch, tag,\n/// commit).\npub async fn clone_repo(\n    url: &str,\n    identifier: Option<&GitIdentifier>,\n    path: impl AsRef<Path>,\n) -> Result<String> {\n    let path = path.as_ref().to_path_buf();\n    run_git_command(\n        &[\"clone\", \"--tags\", \"--filter=tree:0\", url, path.to_string_lossy().as_ref()],\n        None,\n    )\n    .await?;\n    debug!(repo:? = path; \"git repo cloned\");\n    if let Some(identifier) = identifier {\n        run_git_command(&[\"checkout\", &identifier.to_string()], Some(&path)).await?;\n        debug!(ref:? = identifier, repo:? = path; \"checked out ref\");\n    }\n    let commit =\n        run_git_command(&[\"rev-parse\", \"--verify\", \"HEAD\"], Some(&path)).await?.trim().to_string();\n    debug!(repo:? = path; \"checked out commit is {commit}\");\n    Ok(commit)\n}\n\n/// Remove the files for a dependency (synchronous).\n///\n/// This function should only be called in sync contexts. For a version that is safe to run in\n/// multithreaded async contexts, see [`delete_dependency_files`].\npub fn delete_dependency_files_sync(dependency: &Dependency, deps: impl AsRef<Path>) -> Result<()> {\n    let Some(path) = find_install_path_sync(dependency, deps) else {\n        return Err(DownloadError::DependencyNotFound(dependency.to_string()));\n    };\n    fs::remove_dir_all(&path).map_err(|e| DownloadError::IOError { path, source: e })?;\n    debug!(dep:% = dependency; \"removed all files for dependency (sync)\");\n    Ok(())\n}\n\n/// Find the install path of a dependency by reading the dependencies directory and matching on the\n/// folder name.\n///\n/// If a dependency version requirement string is a semver requirement, any folder which version\n/// matches the requirements is returned.\npub fn find_install_path_sync(dependency: &Dependency, deps: impl AsRef<Path>) -> Option<PathBuf> {\n    let res = fs::read_dir(deps.as_ref())\n        .map(|read_dir| {\n            read_dir.into_iter().find_map(|e| {\n                e.ok().filter(|e| install_path_matches(dependency, e.path())).map(|e| e.path())\n            })\n        })\n        .ok()\n        .flatten()\n        .inspect(|res| debug!(path:? = res, dep:% = dependency; \"folder name matches dependency\"));\n    if res.is_none() {\n        debug!(dep:% = dependency; \"could not find install path of dependency\");\n    }\n    res\n}\n\n/// Find the install path of a dependency by reading the dependencies directory and matching on the\n/// folder name (async version).\n///\n/// If a dependency version requirement string is a semver requirement, any folder which version\n/// matches the requirements is returned.\npub async fn find_install_path(dependency: &Dependency, deps: impl AsRef<Path>) -> Option<PathBuf> {\n    let Ok(mut read_dir) = tokio::fs::read_dir(deps.as_ref()).await else {\n        warn!(path:? = deps.as_ref(); \"could not list files in deps folder\");\n        return None;\n    };\n\n    while let Ok(Some(entry)) = read_dir.next_entry().await {\n        let path = entry.path();\n        if !path.is_dir() {\n            continue;\n        }\n        trace!(path:?; \"found folder in deps\");\n        if install_path_matches(dependency, &path) {\n            debug!(path:?, dep:% = dependency; \"folder name matches dependency\");\n            return Some(path);\n        }\n    }\n    debug!(dep:% = dependency; \"could not find install path of dependency\");\n    None\n}\n\n/// Remove the files for a dependency from the dependencies folder.\n///\n/// A folder must exist for the dependency.\npub async fn delete_dependency_files(\n    dependency: &Dependency,\n    deps: impl AsRef<Path>,\n) -> Result<()> {\n    let Some(path) = find_install_path(dependency, deps).await else {\n        return Err(DownloadError::DependencyNotFound(dependency.to_string()));\n    };\n    tokio::fs::remove_dir_all(&path)\n        .await\n        .map_err(|e| DownloadError::IOError { path, source: e })?;\n    debug!(dep:% = dependency; \"removed all files for dependency (async)\");\n    Ok(())\n}\n\n/// Check if a path corresponds to the provided dependency.\n///\n/// The path must exist and be a folder, and the folder name must start with the dependency name\n/// (sanitized). For dependencies with a semver-compliant version requirement, any folder with a\n/// version that matches will give a result of `true`. Otherwise, the folder name must contain the\n/// version requirement string after the dependency name.\nfn install_path_matches(dependency: &Dependency, path: impl AsRef<Path>) -> bool {\n    let path = path.as_ref();\n    if !path.is_dir() {\n        trace!(path:?; \"path is not a directory\");\n        return false;\n    }\n    path_matches(dependency, path)\n}\n\n#[cfg(test)]\nmod tests {\n    use super::*;\n    use crate::{config::HttpDependency, push::zip_file};\n    use std::fs;\n    use testdir::testdir;\n\n    #[tokio::test]\n    async fn test_download_file() {\n        let path = testdir!().join(\"my-dependency\");\n        fs::create_dir(&path).unwrap();\n        let res = download_file(\n            \"https://raw.githubusercontent.com/mario-eth/soldeer/main/README.md\",\n            &path,\n            \"my-dependency\",\n        )\n        .await;\n        assert!(res.is_ok(), \"{res:?}\");\n        let zip_path = path.join(\"my-dependency.zip\");\n        assert!(zip_path.exists());\n    }\n\n    #[tokio::test]\n    async fn test_unzip_file() {\n        let dir = testdir!();\n        // create dummy zip\n        let file_path = dir.join(\"file.txt\");\n        fs::write(&file_path, \"foobar\").unwrap();\n        let zip_path = dir.join(\"my-dependency.zip\");\n        zip_file(&dir, &[file_path], &zip_path).unwrap();\n\n        let out_dir = dir.join(\"out\");\n        let res = unzip_file(&zip_path, &out_dir).await;\n        assert!(res.is_ok(), \"{res:?}\");\n        let file_path = out_dir.join(\"file.txt\");\n        assert!(file_path.exists());\n        assert!(!zip_path.exists());\n    }\n\n    #[tokio::test]\n    async fn test_clone_repo() {\n        let dir = testdir!();\n        let res = clone_repo(\"https://github.com/beeb/test-repo.git\", None, &dir).await;\n        assert!(res.is_ok(), \"{res:?}\");\n        assert_eq!(&res.unwrap(), \"d5d72fa135d28b2e8307650b3ea79115183f2406\");\n    }\n\n    #[tokio::test]\n    async fn test_clone_repo_rev() {\n        let dir = testdir!();\n        let res = clone_repo(\n            \"https://github.com/beeb/test-repo.git\",\n            Some(&GitIdentifier::from_rev(\"d230f5c588c0ed00821a4eb3ef38e300e4a519dc\")),\n            &dir,\n        )\n        .await;\n        assert!(res.is_ok(), \"{res:?}\");\n        assert_eq!(&res.unwrap(), \"d230f5c588c0ed00821a4eb3ef38e300e4a519dc\");\n    }\n\n    #[tokio::test]\n    async fn test_clone_repo_branch() {\n        let dir = testdir!();\n        let res = clone_repo(\n            \"https://github.com/beeb/test-repo.git\",\n            Some(&GitIdentifier::from_branch(\"dev\")),\n            &dir,\n        )\n        .await;\n        assert!(res.is_ok(), \"{res:?}\");\n        assert_eq!(&res.unwrap(), \"8d903e557e8f1b6e62bde768aa456d4ddfca72c4\");\n    }\n\n    #[tokio::test]\n    async fn test_clone_repo_tag() {\n        let dir = testdir!();\n        let res = clone_repo(\n            \"https://github.com/beeb/test-repo.git\",\n            Some(&GitIdentifier::from_tag(\"v0.1.0\")),\n            &dir,\n        )\n        .await;\n        assert!(res.is_ok(), \"{res:?}\");\n        assert_eq!(&res.unwrap(), \"78c2f6a1a54db26bab6c3f501854a1564eb3707f\");\n    }\n\n    #[test]\n    fn test_install_path_matches() {\n        let dependency: Dependency =\n            HttpDependency::builder().name(\"lib1\").version_req(\"^1.0.0\").build().into();\n        let dir = testdir!();\n        let path = dir.join(\"lib1-1.1.1\");\n        fs::create_dir(&path).unwrap();\n        assert!(install_path_matches(&dependency, &path));\n\n        let path = dir.join(\"lib1-2.0.0\");\n        fs::create_dir(&path).unwrap();\n        assert!(!install_path_matches(&dependency, &path));\n\n        let path = dir.join(\"lib2-1.0.0\");\n        fs::create_dir(&path).unwrap();\n        assert!(!install_path_matches(&dependency, &path));\n    }\n\n    #[test]\n    fn test_install_path_matches_nosemver() {\n        let dependency: Dependency =\n            HttpDependency::builder().name(\"lib1\").version_req(\"foobar\").build().into();\n        let dir = testdir!();\n        let path = dir.join(\"lib1-foobar\");\n        fs::create_dir(&path).unwrap();\n        assert!(install_path_matches(&dependency, &path));\n\n        let path = dir.join(\"lib1-somethingelse\");\n        fs::create_dir(&path).unwrap();\n        assert!(!install_path_matches(&dependency, &path));\n    }\n\n    #[test]\n    fn test_find_install_path_sync() {\n        let dependency: Dependency =\n            HttpDependency::builder().name(\"lib1\").version_req(\"^1.0.0\").build().into();\n        let dir = testdir!();\n        let path = dir.join(\"lib1-1.1.1\");\n        fs::create_dir(&path).unwrap();\n        let res = find_install_path_sync(&dependency, &dir);\n        assert!(res.is_some());\n        assert_eq!(res.unwrap(), path);\n    }\n\n    #[tokio::test]\n    async fn test_find_install_path() {\n        let dependency: Dependency =\n            HttpDependency::builder().name(\"lib1\").version_req(\"^1.0.0\").build().into();\n        let dir = testdir!();\n        let path = dir.join(\"lib1-1.2.5\");\n        fs::create_dir(&path).unwrap();\n        let res = find_install_path(&dependency, &dir).await;\n        assert!(res.is_some());\n        assert_eq!(res.unwrap(), path);\n    }\n}\n"
  },
  {
    "path": "crates/core/src/errors.rs",
    "content": "use std::{\n    io,\n    path::{PathBuf, StripPrefixError},\n};\nuse thiserror::Error;\n\n#[derive(Error, Debug)]\n#[non_exhaustive]\npub enum SoldeerError {\n    #[error(\"error during login: {0}\")]\n    AuthError(#[from] AuthError),\n\n    #[error(\"error during config operation: {0}\")]\n    ConfigError(#[from] ConfigError),\n\n    #[error(\"error during downloading ({dep}): {source}\")]\n    DownloadError { dep: String, source: DownloadError },\n\n    #[error(\"error during install operation: {0}\")]\n    InstallError(#[from] InstallError),\n\n    #[error(\"error during lockfile operation: {0}\")]\n    LockError(#[from] LockError),\n\n    #[error(\"error during publishing: {0}\")]\n    PublishError(#[from] PublishError),\n\n    #[error(\"error during remappings operation: {0}\")]\n    RemappingsError(#[from] RemappingsError),\n\n    #[error(\"error during registry operation: {0}\")]\n    RegistryError(#[from] RegistryError),\n\n    #[error(\"error during update operation: {0}\")]\n    UpdateError(#[from] UpdateError),\n\n    #[error(\"error during IO operation: {0}\")]\n    IOError(#[from] io::Error),\n}\n\n#[derive(Error, Debug)]\n#[non_exhaustive]\npub enum AuthError {\n    #[error(\"login error: invalid email or password\")]\n    InvalidCredentials,\n\n    #[error(\"login error: invalid token\")]\n    InvalidToken,\n\n    #[error(\"missing token, run `soldeer login`\")]\n    MissingToken,\n\n    #[error(\"error during IO operation for the security file: {0}\")]\n    IOError(#[from] io::Error),\n\n    #[error(\"http error during login: {0}\")]\n    HttpError(#[from] reqwest::Error),\n\n    #[error(\"TUI disabled and no credentials passed via CLI\")]\n    TuiDisabled,\n}\n\n#[derive(Error, Debug)]\n#[non_exhaustive]\npub enum ConfigError {\n    #[error(\"config file is not valid: {0}\")]\n    Parsing(#[from] toml_edit::TomlError),\n\n    #[error(\"error writing to config file: {0}\")]\n    FileWriteError(#[from] io::Error),\n\n    #[error(\"empty `version` field in {0}\")]\n    EmptyVersion(String),\n\n    #[error(\"missing `{field}` field in {dep}\")]\n    MissingField { field: String, dep: String },\n\n    #[error(\"invalid `{field}` field in {dep}\")]\n    InvalidField { field: String, dep: String },\n\n    #[error(\"field `{field}` conflicts with `{conflicts_with}` in {dep}\")]\n    FieldConflict { field: String, conflicts_with: String, dep: String },\n\n    #[error(\"only one of `rev`, `branch` or `tag` can be specified for git dependency {0}\")]\n    GitIdentifierConflict(String),\n\n    #[error(\"dependency {0} is not valid\")]\n    InvalidDependency(String),\n\n    #[error(\"dependency {0} was not found\")]\n    MissingDependency(String),\n\n    #[error(\"error parsing config file: {0}\")]\n    DeserializeError(#[from] toml_edit::de::Error),\n\n    #[error(\"error generating config file: {0}\")]\n    SerializeError(#[from] toml_edit::ser::Error),\n\n    #[error(\"error during config operation: {0}\")]\n    DownloadError(#[from] DownloadError),\n\n    #[error(\n        \"the version requirement string for {0} cannot contain the equal symbol for git dependencies and http dependencies with a custom URL\"\n    )]\n    InvalidVersionReq(String),\n\n    #[error(\"dependency specifier {0} cannot be parsed as name~version\")]\n    InvalidNameAndVersion(String),\n\n    #[error(\"invalid project root path in {dep_path}: {project_root}\")]\n    InvalidProjectRoot { project_root: PathBuf, dep_path: PathBuf },\n}\n\n#[derive(Error, Debug)]\n#[non_exhaustive]\npub enum DownloadError {\n    #[error(\"error downloading dependency: {0}\")]\n    HttpError(#[from] reqwest::Error),\n\n    #[error(\"error extracting dependency: {0}\")]\n    UnzipError(#[from] zip_extract::ZipExtractError),\n\n    #[error(\"error during git command {args:?}: {message}\")]\n    GitError { message: String, args: Vec<String> },\n\n    #[error(\"error during IO operation for {path:?}: {source}\")]\n    IOError { path: PathBuf, source: io::Error },\n\n    #[error(\"error during async operation: {0}\")]\n    AsyncError(#[from] tokio::task::JoinError),\n\n    #[error(\"could download the dependencies of this dependency {0}\")]\n    SubdependencyError(String),\n\n    #[error(\"the provided URL is invalid: {0}\")]\n    InvalidUrl(String),\n\n    #[error(\"error during registry operation: {0}\")]\n    RegistryError(#[from] RegistryError),\n\n    #[error(\"dependency not found: {0}\")]\n    DependencyNotFound(String),\n}\n\n#[derive(Error, Debug)]\n#[non_exhaustive]\npub enum InstallError {\n    #[error(\"zip checksum for {path} does not match lock file: expected {expected}, got {actual}\")]\n    ZipIntegrityError { path: PathBuf, expected: String, actual: String },\n\n    #[error(\"error during IO operation for {path:?}: {source}\")]\n    IOError { path: PathBuf, source: io::Error },\n\n    #[error(\"error during git command: {0}\")]\n    GitError(String),\n\n    #[error(\"error during dependency installation: {0}\")]\n    DownloadError(#[from] DownloadError),\n\n    #[error(\"error during dependency installation: {0}\")]\n    ConfigError(#[from] ConfigError),\n\n    #[error(\"error during async operation: {0}\")]\n    AsyncError(#[from] tokio::task::JoinError),\n\n    #[error(\"error during forge command: {0}\")]\n    ForgeError(String),\n\n    #[error(\"error during registry operation: {0}\")]\n    RegistryError(#[from] RegistryError),\n\n    #[error(\"error with lockfile: {0}\")]\n    LockError(#[from] LockError),\n}\n\n#[derive(Error, Debug)]\n#[non_exhaustive]\npub enum LockError {\n    #[error(\"soldeer.lock is missing\")]\n    Missing,\n\n    #[error(\"dependency {0} is already installed\")]\n    DependencyInstalled(String),\n\n    #[error(\"IO error for soldeer.lock: {0}\")]\n    IOError(#[from] io::Error),\n\n    #[error(\"error generating soldeer.lock contents: {0}\")]\n    SerializeError(#[from] toml_edit::ser::Error),\n\n    #[error(\"lock entry does not match a valid format\")]\n    InvalidLockEntry,\n\n    #[error(\"missing `{field}` field in lock entry for {dep}\")]\n    MissingField { field: String, dep: String },\n\n    #[error(\"foundry.lock is missing\")]\n    FoundryLockMissing,\n\n    #[error(\"error parsing lockfile contents: {0}\")]\n    DeserializeError(#[from] serde_json::Error),\n}\n\n#[derive(Error, Debug)]\n#[non_exhaustive]\npub enum PublishError {\n    #[error(\"no files to publish\")]\n    NoFiles,\n\n    #[error(\"error during zipping: {0}\")]\n    ZipError(#[from] zip::result::ZipError),\n\n    #[error(\"error during IO operation for {path:?}: {source}\")]\n    IOError { path: PathBuf, source: io::Error },\n\n    #[error(\"error while computing the relative path: {0}\")]\n    RelativePathError(#[from] StripPrefixError),\n\n    #[error(\"auth error: {0}\")]\n    AuthError(#[from] AuthError),\n\n    #[error(\"registry error during publishing: {0}\")]\n    DownloadError(#[from] RegistryError),\n\n    #[error(\n        \"Project not found. Make sure you send the right dependency name. The dependency name is the project name you created on https://soldeer.xyz\"\n    )]\n    ProjectNotFound,\n\n    #[error(\"dependency already exists\")]\n    AlreadyExists,\n\n    #[error(\"the package is too big (over 50 MB)\")]\n    PayloadTooLarge,\n\n    #[error(\"http error during publishing: {0}\")]\n    HttpError(#[from] reqwest::Error),\n\n    #[error(\n        \"invalid package name, only alphanumeric characters, `-` and `@` are allowed. Length must be between 3 and 100 characters\"\n    )]\n    InvalidName,\n\n    #[error(\"package version cannot be empty\")]\n    EmptyVersion,\n\n    #[error(\"user cancelled operation\")]\n    UserAborted,\n\n    #[error(\"unknown http error\")]\n    UnknownError,\n}\n\n#[derive(Error, Debug)]\n#[non_exhaustive]\npub enum RegistryError {\n    #[error(\"error with registry request: {0}\")]\n    HttpError(#[from] reqwest::Error),\n\n    #[error(\"could not get the dependency URL for {0}\")]\n    URLNotFound(String),\n\n    #[error(\n        \"project {0} not found. Private projects require to log in before install. Please check the dependency name (project name) or create a new project on https://soldeer.xyz\"\n    )]\n    ProjectNotFound(String),\n\n    #[error(\"auth error: {0}\")]\n    AuthError(#[from] AuthError),\n\n    #[error(\"package {0} has no version\")]\n    NoVersion(String),\n\n    #[error(\"no matching version found for {dependency} with version requirement {version_req}\")]\n    NoMatchingVersion { dependency: String, version_req: String },\n}\n\n#[derive(Error, Debug)]\n#[non_exhaustive]\npub enum RemappingsError {\n    #[error(\"error writing to remappings file: {0}\")]\n    FileWriteError(#[from] io::Error),\n\n    #[error(\"error while interacting with the config file: {0}\")]\n    ConfigError(#[from] ConfigError),\n\n    #[error(\"dependency not found: {0}\")]\n    DependencyNotFound(String),\n}\n\n#[derive(Error, Debug)]\n#[non_exhaustive]\npub enum UpdateError {\n    #[error(\"registry error: {0}\")]\n    RegistryError(#[from] RegistryError),\n\n    #[error(\"download error: {0}\")]\n    DownloadError(#[from] DownloadError),\n\n    #[error(\"error during install operation: {0}\")]\n    InstallError(#[from] InstallError),\n\n    #[error(\"error during async operation: {0}\")]\n    AsyncError(#[from] tokio::task::JoinError),\n}\n"
  },
  {
    "path": "crates/core/src/install.rs",
    "content": "//! Install dependencies.\n//!\n//! This module contains functions to install dependencies from the config object or from the\n//! lockfile. Dependencies can be installed in parallel.\nuse crate::{\n    config::{\n        Dependency, GitIdentifier, HttpDependency, Paths, detect_config_location, read_config_deps,\n        read_soldeer_config,\n    },\n    download::{clone_repo, delete_dependency_files, download_file, unzip_file},\n    errors::{ConfigError, InstallError, LockError},\n    lock::{\n        GitLockEntry, HttpLockEntry, Integrity, LockEntry, PrivateLockEntry, forge,\n        format_install_path, read_lockfile,\n    },\n    registry::{DownloadUrl, get_dependency_url_remote, get_latest_supported_version},\n    utils::{IntegrityChecksum, canonicalize, hash_file, hash_folder, run_git_command},\n};\nuse derive_more::derive::Display;\nuse log::{debug, info, warn};\nuse path_slash::PathBufExt as _;\nuse std::{\n    collections::HashMap,\n    fmt,\n    future::Future,\n    ops::Deref,\n    path::{Path, PathBuf},\n    pin::Pin,\n};\nuse tokio::{fs, sync::mpsc, task::JoinSet};\n\npub type Result<T> = std::result::Result<T, InstallError>;\n\n#[derive(Debug, Clone, Display)]\npub struct DependencyName(String);\n\nimpl Deref for DependencyName {\n    type Target = String;\n\n    fn deref(&self) -> &Self::Target {\n        &self.0\n    }\n}\n\nimpl<T: fmt::Display> From<&T> for DependencyName {\n    fn from(value: &T) -> Self {\n        Self(value.to_string())\n    }\n}\n\n/// Collection of channels to monitor the progress of the install process.\n#[derive(Debug)]\npub struct InstallMonitoring {\n    /// Channel to receive install progress logs.\n    pub logs: mpsc::UnboundedReceiver<String>,\n\n    /// Progress for calls to the API to retrieve the packages versions.\n    pub versions: mpsc::UnboundedReceiver<DependencyName>,\n\n    /// Progress for downloading the dependencies.\n    pub downloads: mpsc::UnboundedReceiver<DependencyName>,\n\n    /// Progress for unzipping the downloaded files.\n    pub unzip: mpsc::UnboundedReceiver<DependencyName>,\n\n    /// Progress for installing subdependencies.\n    pub subdependencies: mpsc::UnboundedReceiver<DependencyName>,\n\n    /// Progress for checking the integrity of the installed dependencies.\n    pub integrity: mpsc::UnboundedReceiver<DependencyName>,\n}\n\n/// Collection of channels to notify the caller of the install progress.\n#[derive(Debug, Clone)]\npub struct InstallProgress {\n    /// Channel to send messages to be logged to the user.\n    pub logs: mpsc::UnboundedSender<String>,\n\n    /// Progress for calls to the API to retrieve the packages versions.\n    pub versions: mpsc::UnboundedSender<DependencyName>,\n\n    /// Progress for downloading the dependencies.\n    pub downloads: mpsc::UnboundedSender<DependencyName>,\n\n    /// Progress for unzipping the downloaded files.\n    pub unzip: mpsc::UnboundedSender<DependencyName>,\n\n    /// Progress for installing subdependencies.\n    pub subdependencies: mpsc::UnboundedSender<DependencyName>,\n\n    /// Progress for checking the integrity of the installed dependencies.\n    pub integrity: mpsc::UnboundedSender<DependencyName>,\n}\n\nimpl InstallProgress {\n    /// Create a new install progress tracker, with a receiving half ([InstallMonitoring]) and a\n    /// sending half ([InstallProgress]).\n    pub fn new() -> (Self, InstallMonitoring) {\n        let (logs_tx, logs_rx) = mpsc::unbounded_channel();\n        let (versions_tx, versions_rx) = mpsc::unbounded_channel();\n        let (downloads_tx, downloads_rx) = mpsc::unbounded_channel();\n        let (unzip_tx, unzip_rx) = mpsc::unbounded_channel();\n        let (subdependencies_tx, subdependencies_rx) = mpsc::unbounded_channel();\n        let (integrity_tx, integrity_rx) = mpsc::unbounded_channel();\n        (\n            Self {\n                logs: logs_tx,\n                versions: versions_tx,\n                downloads: downloads_tx,\n                unzip: unzip_tx,\n                subdependencies: subdependencies_tx,\n                integrity: integrity_tx,\n            },\n            InstallMonitoring {\n                logs: logs_rx,\n                versions: versions_rx,\n                downloads: downloads_rx,\n                unzip: unzip_rx,\n                subdependencies: subdependencies_rx,\n                integrity: integrity_rx,\n            },\n        )\n    }\n\n    /// Log a message related to progress to the caller.\n    pub fn log(&self, msg: impl fmt::Display) {\n        if let Err(e) = self.logs.send(msg.to_string()) {\n            warn!(err:err = e; \"error sending log message to the install progress channel\");\n        }\n    }\n\n    /// Advance all progress trackers at once, passing the dependency name.\n    pub fn update_all(&self, dependency_name: DependencyName) {\n        if let Err(e) = self.versions.send(dependency_name.clone()) {\n            warn!(err:err = e; \"error sending version message to the install progress channel\");\n        }\n        if let Err(e) = self.downloads.send(dependency_name.clone()) {\n            warn!(err:err = e; \"error sending download message to the install progress channel\");\n        }\n        if let Err(e) = self.unzip.send(dependency_name.clone()) {\n            warn!(err:err = e; \"error sending unzip message to the install progress channel\");\n        }\n        if let Err(e) = self.subdependencies.send(dependency_name.clone()) {\n            warn!(err:err = e; \"error sending sudependencies message to the install progress channel\");\n        }\n        if let Err(e) = self.integrity.send(dependency_name) {\n            warn!(err:err = e; \"error sending integrity message to the install progress channel\");\n        }\n    }\n}\n\n/// Status of a dependency, which can either be missing, installed and untouched, or installed but\n/// failing the integrity check.\n#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]\n#[cfg_attr(feature = \"serde\", derive(serde::Serialize, serde::Deserialize))]\npub enum DependencyStatus {\n    /// The dependency is missing.\n    Missing,\n\n    /// The dependency is installed but the integrity check failed.\n    FailedIntegrity,\n\n    /// The dependency is installed and the integrity check passed.\n    Installed,\n}\n\n/// HTTP dependency installation information.\n#[derive(Debug, Clone, PartialEq, Eq, Hash, bon::Builder)]\n#[builder(on(String, into))]\nstruct HttpInstallInfo {\n    /// The name of the dependency.\n    name: String,\n\n    /// The version of the dependency. This is not a version requirement string but a specific.\n    /// version.\n    version: String,\n\n    /// The URL from which the zip file will be downloaded.\n    url: String,\n\n    /// The checksum of the downloaded zip file, if available (e.g. from the lockfile)\n    checksum: Option<String>,\n\n    /// An optional relative path to the project's root within the zip file.\n    ///\n    /// The project root is where the soldeer.toml or foundry.toml resides. If no path is provided,\n    /// then the zip's root must contain a Soldeer config.\n    project_root: Option<PathBuf>,\n}\n\nimpl fmt::Display for HttpInstallInfo {\n    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n        // since the version is an exact version number, we use a dash and not a tilde\n        write!(f, \"{}-{}\", self.name, self.version)\n    }\n}\n\n/// Git dependency installation information.\n#[derive(Debug, Clone, PartialEq, Eq, Hash, bon::Builder)]\n#[builder(on(String, into))]\nstruct GitInstallInfo {\n    /// The name of the dependency.\n    name: String,\n\n    /// The version of the dependency.\n    version: String,\n\n    /// The URL of the git repository.\n    git: String,\n\n    /// The identifier of the git dependency (e.g. a commit hash, branch name, or tag name). If\n    /// `None` is provided, the default branch is used.\n    identifier: Option<GitIdentifier>,\n\n    /// An optional relative path to the project's root within the repository.\n    ///\n    /// The project root is where the soldeer.toml or foundry.toml resides. If no path is provided,\n    /// then the repo's root must contain a Soldeer config.\n    project_root: Option<PathBuf>,\n}\n\nimpl fmt::Display for GitInstallInfo {\n    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n        write!(f, \"{}-{}\", self.name, self.version)\n    }\n}\n\n/// Installation information for a dependency.\n///\n/// A builder can be used to create the underlying [`HttpInstallInfo`] or [`GitInstallInfo`] and\n/// then converted into this type with `.into()`.\n#[derive(Debug, Clone, PartialEq, Eq, Hash, Display)]\nenum InstallInfo {\n    /// Installation information for an HTTP dependency.\n    Http(HttpInstallInfo),\n\n    /// Installation information for a git dependency.\n    Git(GitInstallInfo),\n\n    /// Installation information for a private dependency.\n    Private(HttpInstallInfo),\n}\n\nimpl From<HttpInstallInfo> for InstallInfo {\n    fn from(value: HttpInstallInfo) -> Self {\n        Self::Http(value)\n    }\n}\n\nimpl From<GitInstallInfo> for InstallInfo {\n    fn from(value: GitInstallInfo) -> Self {\n        Self::Git(value)\n    }\n}\n\nimpl InstallInfo {\n    async fn from_lock(lock: LockEntry, project_root: Option<PathBuf>) -> Result<Self> {\n        match lock {\n            LockEntry::Http(lock) => Ok(HttpInstallInfo {\n                name: lock.name,\n                version: lock.version,\n                url: lock.url,\n                checksum: Some(lock.checksum),\n                project_root,\n            }\n            .into()),\n            LockEntry::Git(lock) => Ok(GitInstallInfo {\n                name: lock.name,\n                version: lock.version,\n                git: lock.git,\n                identifier: Some(GitIdentifier::from_rev(lock.rev)),\n                project_root,\n            }\n            .into()),\n            LockEntry::Private(lock) => {\n                // need to retrieve a signed download URL from the registry\n                let download = get_dependency_url_remote(\n                    &HttpDependency::builder()\n                        .name(&lock.name)\n                        .version_req(&lock.version)\n                        .build()\n                        .into(),\n                    &lock.version,\n                )\n                .await?;\n                Ok(Self::Private(HttpInstallInfo {\n                    name: lock.name,\n                    version: lock.version,\n                    url: download.url,\n                    checksum: Some(lock.checksum),\n                    project_root,\n                }))\n            }\n        }\n    }\n}\n\n/// Git submodule information\n#[derive(Debug, Default, Clone, PartialEq, Eq, Hash)]\nstruct Submodule {\n    url: String,\n    path: String,\n    branch: Option<String>,\n}\n\n/// Install a list of dependencies in parallel.\n///\n/// This function spawns a task for each dependency and waits for all of them to finish. Each task\n/// checks the integrity of the dependency if found on disk, downloads the dependency (zip file or\n/// cloning repo) if not already present, unzips the zip file if necessary, installs\n/// sub-dependencies and generates the lockfile entry.\npub async fn install_dependencies(\n    dependencies: &[Dependency],\n    locks: &[LockEntry],\n    deps: impl AsRef<Path>,\n    recursive_deps: bool,\n    progress: InstallProgress,\n) -> Result<Vec<LockEntry>> {\n    let mut set = JoinSet::new();\n    for dep in dependencies {\n        debug!(dep:% = dep; \"spawning task to install dependency\");\n        set.spawn({\n            let d = dep.clone();\n            let p = progress.clone();\n            let lock = locks.iter().find(|l| l.name() == dep.name()).cloned();\n            let deps = deps.as_ref().to_path_buf();\n            async move {\n                install_dependency(\n                    &d,\n                    lock.as_ref(),\n                    deps,\n                    None,\n                    recursive_deps,\n                    p,\n                )\n                .await\n            }\n        });\n    }\n\n    let mut results = Vec::new();\n    while let Some(res) = set.join_next().await {\n        let res = res??;\n        debug!(dep:% = res.name(); \"install task finished\");\n        results.push(res);\n    }\n    debug!(\"all install tasks have finished\");\n    Ok(results)\n}\n\n/// Install a list of dependencies sequentially.\n///\n/// This function can be used inside another tokio task to avoid spawning more tasks, useful for\n/// recursive install. For each dep, checks the integrity of the dependency if found on disk,\n/// downloads the dependency (zip file or cloning repo) if not already present, unzips the zip file\n/// if necessary, installs sub-dependencies and generates the lockfile entry.\npub async fn install_dependencies_sequential(\n    dependencies: &[Dependency],\n    locks: &[LockEntry],\n    deps: impl AsRef<Path> + Clone,\n    recursive_deps: bool,\n    progress: InstallProgress,\n) -> Result<Vec<LockEntry>> {\n    let mut results = Vec::new();\n    for dep in dependencies {\n        debug!(dep:% = dep; \"installing dependency sequentially\");\n        let lock = locks.iter().find(|l| l.name() == dep.name());\n        results.push(\n            install_dependency(dep, lock, deps.clone(), None, recursive_deps, progress.clone())\n                .await?,\n        );\n        debug!(dep:% = dep; \"sequential install finished\");\n    }\n    debug!(\"all sequential installs have finished\");\n    Ok(results)\n}\n\n/// Install a single dependency.\n///\n/// This function checks the integrity of the dependency if found on disk, downloads the dependency\n/// (zip file or cloning repo) if not already present, unzips the zip file if necessary, installs\n/// sub-dependencies and generates the lockfile entry.\n///\n/// If no lockfile entry is provided, the dependency is installed from the config object and\n/// integrity checks are skipped.\npub async fn install_dependency(\n    dependency: &Dependency,\n    lock: Option<&LockEntry>,\n    deps: impl AsRef<Path>,\n    force_version: Option<String>,\n    recursive_deps: bool,\n    progress: InstallProgress,\n) -> Result<LockEntry> {\n    if let Some(lock) = lock {\n        debug!(dep:% = dependency; \"installing based on lock entry\");\n        match check_dependency_integrity(lock, &deps).await? {\n            DependencyStatus::Installed => {\n                info!(dep:% = dependency; \"skipped install, dependency already up-to-date with lockfile\");\n                progress.update_all(dependency.into());\n\n                return Ok(lock.clone());\n            }\n            DependencyStatus::FailedIntegrity => match dependency {\n                Dependency::Http(_) => {\n                    info!(dep:% = dependency; \"dependency failed integrity check, reinstalling\");\n                    progress.log(format!(\n                        \"Dependency {dependency} failed integrity check, reinstalling\"\n                    ));\n                    // we know the folder exists because otherwise we would have gotten\n                    // `Missing`\n                    delete_dependency_files(dependency, &deps).await?;\n                    debug!(dep:% = dependency; \"removed dependency folder\");\n                    // we won't need to retrieve the version number so we mark it as done\n                    progress.versions.send(dependency.into()).ok();\n                }\n                Dependency::Git(_) => {\n                    let commit = &lock.as_git().expect(\"lock entry should be of type git\").rev;\n                    info!(dep:% = dependency, commit; \"dependency failed integrity check, resetting to commit\");\n                    progress.log(format!(\n                        \"Dependency {dependency} failed integrity check, resetting to commit {commit}\"\n                    ));\n\n                    reset_git_dependency(\n                        lock.as_git().expect(\"lock entry should be of type git\"),\n                        &deps,\n                    )\n                    .await?;\n                    debug!(dep:% = dependency; \"reset git dependency\");\n                    // dependency should now be at the correct commit, we can exit\n                    progress.update_all(dependency.into());\n\n                    return Ok(lock.clone());\n                }\n            },\n            DependencyStatus::Missing => {\n                // make sure there is no existing directory for the dependency\n                if let Some(path) = dependency.install_path(&deps).await {\n                    fs::remove_dir_all(&path)\n                        .await\n                        .map_err(|e| InstallError::IOError { path, source: e })?;\n                }\n                info!(dep:% = dependency; \"dependency is missing, installing\");\n                // we won't need to retrieve the version number so we mark it as done\n                progress.versions.send(dependency.into()).ok();\n            }\n        }\n        install_dependency_inner(\n            &InstallInfo::from_lock(lock.clone(), dependency.project_root()).await?,\n            lock.install_path(&deps),\n            recursive_deps,\n            progress,\n        )\n        .await\n    } else {\n        // no lockfile entry, install from config object\n        debug!(dep:% = dependency; \"no lockfile entry, installing based on config\");\n        // make sure there is no existing directory for the dependency\n        if let Some(path) = dependency.install_path(&deps).await {\n            fs::remove_dir_all(&path)\n                .await\n                .map_err(|e| InstallError::IOError { path, source: e })?;\n        }\n\n        let (download, version) = match dependency.url() {\n            // for git dependencies and http dependencies which have a custom url, we use the\n            // version requirement string as version, because in that case a version requirement has\n            // little sense (we can't automatically bump the version)\n            Some(url) => (\n                DownloadUrl { url: url.clone(), private: false },\n                dependency.version_req().to_string(),\n            ),\n            None => {\n                let version = match force_version {\n                    Some(v) => v,\n                    None => get_latest_supported_version(dependency).await?,\n                };\n                (get_dependency_url_remote(dependency, &version).await?, version)\n            }\n        };\n        debug!(dep:% = dependency, version; \"resolved version\");\n        debug!(dep:% = dependency, url:? = download; \"resolved download URL\");\n        // indicate that we have retrieved the version number\n        progress.versions.send(dependency.into()).ok();\n\n        let info = match &dependency {\n            Dependency::Http(dep) => {\n                if download.private {\n                    InstallInfo::Private(\n                        HttpInstallInfo::builder()\n                            .name(&dep.name)\n                            .version(&version)\n                            .url(download.url)\n                            .build(),\n                    )\n                } else {\n                    HttpInstallInfo::builder()\n                        .name(&dep.name)\n                        .version(&version)\n                        .url(download.url)\n                        .build()\n                        .into()\n                }\n            }\n            Dependency::Git(dep) => GitInstallInfo::builder()\n                .name(&dep.name)\n                .version(&version)\n                .git(download.url)\n                .maybe_identifier(dep.identifier.clone())\n                .build()\n                .into(),\n        };\n        let install_path = format_install_path(dependency.name(), &version, &deps);\n        debug!(dep:% = dependency; \"installing to path {install_path:?}\");\n        install_dependency_inner(&info, install_path, recursive_deps, progress).await\n    }\n}\n\n/// Check the integrity of a dependency that was installed.\n///\n/// If any file has changed in the dependency directory (except ignored files and any `.git`\n/// directory), the integrity check will fail.\npub async fn check_dependency_integrity(\n    lock: &LockEntry,\n    deps: impl AsRef<Path>,\n) -> Result<DependencyStatus> {\n    match lock {\n        LockEntry::Http(lock) => check_http_dependency(lock, deps).await,\n        LockEntry::Private(lock) => check_http_dependency(lock, deps).await,\n        LockEntry::Git(lock) => check_git_dependency(lock, deps).await,\n    }\n}\n\n/// Ensure that the dependencies directory exists.\n///\n/// If the directory does not exist, it will be created.\npub fn ensure_dependencies_dir(path: impl AsRef<Path>) -> Result<()> {\n    let path = path.as_ref();\n    if !path.exists() {\n        debug!(path:?; \"dependencies dir doesn't exist, creating it\");\n        std::fs::create_dir(path)\n            .map_err(|e| InstallError::IOError { path: path.to_path_buf(), source: e })?;\n    }\n    Ok(())\n}\n\n/// Install a single dependency.\nasync fn install_dependency_inner(\n    dep: &InstallInfo,\n    path: impl AsRef<Path>,\n    subdependencies: bool,\n    progress: InstallProgress,\n) -> Result<LockEntry> {\n    match dep {\n        InstallInfo::Http(dep) => {\n            let (zip_integrity, integrity) =\n                install_http_dependency(dep, path, subdependencies, progress).await?;\n            Ok(HttpLockEntry::builder()\n                .name(&dep.name)\n                .version(&dep.version)\n                .url(&dep.url)\n                .checksum(zip_integrity.to_string())\n                .integrity(integrity.to_string())\n                .build()\n                .into())\n        }\n        InstallInfo::Private(dep) => {\n            let (zip_integrity, integrity) =\n                install_http_dependency(dep, path, subdependencies, progress).await?;\n            Ok(PrivateLockEntry::builder()\n                .name(&dep.name)\n                .version(&dep.version)\n                .checksum(zip_integrity.to_string())\n                .integrity(integrity.to_string())\n                .build()\n                .into())\n        }\n        InstallInfo::Git(dep) => {\n            // if the dependency was specified without a commit hash and we didn't have a lockfile,\n            // clone the default branch\n            let commit = clone_repo(&dep.git, dep.identifier.as_ref(), &path).await?;\n            progress.downloads.send(dep.into()).ok();\n\n            if subdependencies {\n                debug!(dep:% = dep; \"installing subdependencies\");\n                install_subdependencies(&path, dep.project_root.as_ref()).await?;\n                debug!(dep:% = dep; \"finished installing subdependencies\");\n            }\n            progress.unzip.send(dep.into()).ok();\n            progress.subdependencies.send(dep.into()).ok();\n            progress.integrity.send(dep.into()).ok();\n            Ok(GitLockEntry::builder()\n                .name(&dep.name)\n                .version(&dep.version)\n                .git(&dep.git)\n                .rev(commit)\n                .build()\n                .into())\n        }\n    }\n}\n\n/// Install subdependencies of a dependency.\n///\n/// This function checks for a `.gitmodules` file in the dependency directory and clones the\n/// submodules if it exists. If a valid Soldeer config is found at the project root (optionally a\n/// sub-dir of the dependency folder), the soldeer dependencies are installed.\nfn install_subdependencies(\n    path: impl AsRef<Path>,\n    project_root: Option<&PathBuf>,\n) -> Pin<Box<dyn Future<Output = Result<()>> + Send + '_>> {\n    let path = path.as_ref().to_path_buf();\n    Box::pin(async move {\n        let gitmodules_path = path.join(\".gitmodules\");\n        if fs::metadata(&gitmodules_path).await.is_ok() {\n            debug!(path:?; \"found .gitmodules, installing subdependencies with git\");\n            if fs::metadata(path.join(\".git\")).await.is_ok() {\n                debug!(path:?; \"subdependency contains .git directory, cloning submodules\");\n                run_git_command(&[\"submodule\", \"update\", \"--init\"], Some(&path)).await?;\n                // we need to recurse into each of the submodules to ensure any soldeer sub-deps\n                // of those are also installed\n                let submodules = get_submodules(&path).await?;\n                let mut set = JoinSet::new();\n                for (_, submodule) in submodules {\n                    let sub_path = path.join(submodule.path);\n                    debug!(sub_path:?; \"recursing into the git submodule\");\n                    set.spawn(async move { install_subdependencies(sub_path, None).await });\n                }\n                while let Some(res) = set.join_next().await {\n                    res??;\n                }\n            } else {\n                debug!(path:?; \"subdependency has git submodules configuration but is not a git repository\");\n                let submodule_paths = reinit_submodules(&path).await?;\n                // we need to recurse into each of the submodules to ensure any soldeer sub-deps\n                // of those are also installed\n                let mut set = JoinSet::new();\n                for sub_path in submodule_paths {\n                    debug!(sub_path:?; \"recursing into the git submodule\");\n                    set.spawn(async move { install_subdependencies(sub_path, None).await });\n                }\n                while let Some(res) = set.join_next().await {\n                    res??;\n                }\n            }\n        }\n        // if there's a suitable soldeer config, install the soldeer deps\n        let path = get_subdependency_root(path, project_root).await?;\n        if detect_config_location(&path).is_some() {\n            // install subdependencies\n            debug!(path:?; \"found soldeer config, installing subdependencies\");\n            install_subdependencies_inner(Paths::from_root(path)?).await?;\n        }\n        Ok(())\n    })\n}\n\n/// Inner logic for installing subdependencies at a given path.\n///\n/// This is a similar implementation to the one found in `soldeer_commands` but\n/// simplified.\nasync fn install_subdependencies_inner(paths: Paths) -> Result<()> {\n    let config = read_soldeer_config(&paths.config)?;\n    ensure_dependencies_dir(&paths.dependencies)?;\n    let (dependencies, _) = read_config_deps(&paths.config)?;\n    let lockfile = read_lockfile(&paths.lock)?;\n    let (progress, _) = InstallProgress::new(); // not used at the moment\n    let _ = install_dependencies(\n        &dependencies,\n        &lockfile.entries,\n        &paths.dependencies,\n        config.recursive_deps,\n        progress,\n    )\n    .await?;\n    Ok(())\n}\n\n/// Download and unzip an HTTP dependency\nasync fn install_http_dependency(\n    dep: &HttpInstallInfo,\n    path: impl AsRef<Path>,\n    subdependencies: bool,\n    progress: InstallProgress,\n) -> Result<(IntegrityChecksum, IntegrityChecksum)> {\n    let path = path.as_ref();\n    let zip_path = download_file(\n        &dep.url,\n        path.parent().expect(\"dependency install path should have a parent\"),\n        &format!(\"{}-{}\", dep.name, dep.version),\n    )\n    .await?;\n    progress.downloads.send(dep.into()).ok();\n\n    let zip_integrity = tokio::task::spawn_blocking({\n        let zip_path = zip_path.clone();\n        move || hash_file(zip_path)\n    })\n    .await?\n    .map_err(|e| InstallError::IOError { path: zip_path.clone(), source: e })?;\n    if let Some(checksum) = &dep.checksum {\n        if checksum != &zip_integrity.to_string() {\n            return Err(InstallError::ZipIntegrityError {\n                path: zip_path.clone(),\n                expected: checksum.to_string(),\n                actual: zip_integrity.to_string(),\n            });\n        }\n        debug!(zip_path:?; \"archive integrity check successful\");\n    } else {\n        debug!(zip_path:?; \"no checksum available for archive integrity check\");\n    }\n    unzip_file(&zip_path, path).await?;\n    progress.unzip.send(dep.into()).ok();\n\n    if subdependencies {\n        debug!(dep:% = dep; \"installing subdependencies\");\n        install_subdependencies(path, dep.project_root.as_ref()).await?;\n        debug!(dep:% = dep; \"finished installing subdependencies\");\n    }\n    progress.subdependencies.send(dep.into()).ok();\n\n    let integrity = tokio::task::spawn_blocking({\n        let path = path.to_path_buf();\n        move || hash_folder(&path)\n    })\n    .await?\n    .map_err(|e| InstallError::IOError { path: path.to_path_buf(), source: e })?;\n    debug!(dep:% = dep, checksum = integrity.0; \"integrity checksum computed\");\n    progress.integrity.send(dep.into()).ok();\n    Ok((zip_integrity, integrity))\n}\n\n/// Retrieve a map of git submodules for a path by looking at the `.gitmodules` file.\nasync fn get_submodules(path: &PathBuf) -> Result<HashMap<String, Submodule>> {\n    let submodules_config =\n        run_git_command(&[\"config\", \"-f\", \".gitmodules\", \"-l\"], Some(path)).await?;\n    let mut submodules = HashMap::<String, Submodule>::new();\n    for config_line in submodules_config.trim().lines() {\n        let (item, value) = config_line.split_once('=').expect(\"config format should be valid\");\n        let Some(item) = item.strip_prefix(\"submodule.\") else {\n            continue;\n        };\n        let (submodule_name, item_name) =\n            item.rsplit_once('.').expect(\"config format should be valid\");\n        let entry = submodules.entry(submodule_name.to_string()).or_default();\n        match item_name {\n            \"path\" => entry.path = value.to_string(),\n            \"url\" => entry.url = value.to_string(),\n            \"branch\" => entry.branch = Some(value.to_string()),\n            _ => {}\n        }\n    }\n    Ok(submodules)\n}\n\n/// Re-add submodules found in a `.gitmodules` when the folder has to be re-initialized as a git\n/// repo.\n///\n/// The file is parsed, and each module is added again with `git submodule add`.\nasync fn reinit_submodules(path: &PathBuf) -> Result<Vec<PathBuf>> {\n    debug!(path:?; \"running git init\");\n    run_git_command(&[\"init\"], Some(path)).await?;\n    let submodules = get_submodules(path).await?;\n    debug!(submodules:?, path:?; \"got submodules config\");\n    let mut foundry_lock = forge::Lockfile::new(path);\n    if foundry_lock.read().is_ok() {\n        debug!(path:?; \"foundry lockfile exists\");\n    }\n    let mut out = Vec::new();\n    for (submodule_name, submodule) in submodules {\n        // make sure to remove the path if it already exists\n        let dest_path = path.join(&submodule.path);\n        fs::remove_dir_all(&dest_path).await.ok(); // ignore error if folder doesn't exist\n        let mut args = vec![\"submodule\", \"add\", \"-f\", \"--name\", &submodule_name];\n        if let Some(branch) = &submodule.branch {\n            args.push(\"-b\");\n            args.push(branch);\n        }\n        args.push(&submodule.url);\n        args.push(&submodule.path);\n        run_git_command(args, Some(path)).await?;\n        if let Some(\n            forge::DepIdentifier::Branch { rev, .. } |\n            forge::DepIdentifier::Tag { rev, .. } |\n            forge::DepIdentifier::Rev { rev },\n        ) = foundry_lock.get(Path::new(&submodule.path))\n        {\n            debug!(submodule_name, path:?; \"found corresponding item in foundry lockfile\");\n            run_git_command([\"checkout\", rev], Some(&dest_path)).await?;\n            debug!(submodule_name, path:?; \"submodule checked out at {rev}\");\n        }\n        debug!(submodule_name, path:?; \"added submodule\");\n        out.push(path.join(submodule.path));\n    }\n    Ok(out)\n}\n\n/// Check the integrity of an HTTP dependency.\n///\n/// This function hashes the contents of the dependency directory and compares it with the lockfile\n/// entry.\nasync fn check_http_dependency(\n    lock: &impl Integrity,\n    deps: impl AsRef<Path>,\n) -> Result<DependencyStatus> {\n    let path = lock.install_path(deps);\n    if fs::metadata(&path).await.is_err() {\n        return Ok(DependencyStatus::Missing);\n    }\n    let current_hash = tokio::task::spawn_blocking({\n        let path = path.clone();\n        move || hash_folder(&path)\n    })\n    .await?\n    .map_err(|e| InstallError::IOError { path: path.to_path_buf(), source: e })?;\n    let Some(integrity) = lock.integrity() else {\n        return Err(LockError::MissingField {\n            field: \"integrity\".to_string(),\n            dep: path.to_string_lossy().to_string(),\n        }\n        .into())\n    };\n    if &current_hash.to_string() != integrity {\n        debug!(path:?, expected = integrity, computed = current_hash.0; \"integrity checksum mismatch\");\n        return Ok(DependencyStatus::FailedIntegrity);\n    }\n    Ok(DependencyStatus::Installed)\n}\n\n/// Check the integrity of a git dependency.\n///\n/// This function checks that the dependency is a git repository and that the current commit is the\n/// one specified in the lockfile entry.\nasync fn check_git_dependency(\n    lock: &GitLockEntry,\n    deps: impl AsRef<Path>,\n) -> Result<DependencyStatus> {\n    let path = lock.install_path(deps);\n    if fs::metadata(&path).await.is_err() {\n        return Ok(DependencyStatus::Missing);\n    }\n    // check that the location is a git repository\n    let top_level = match run_git_command(\n        &[\"rev-parse\", \"--show-toplevel\", path.to_string_lossy().as_ref()],\n        Some(&path),\n    )\n    .await\n    {\n        Ok(top_level) => {\n            // stdout contains the path twice, we only keep the first item\n            PathBuf::from(top_level.split_whitespace().next().unwrap_or_default())\n        }\n        Err(_) => {\n            // error getting the top level directory, assume the directory is not a git repository\n            debug!(path:?; \"`git rev-parse --show-toplevel` failed\");\n            return Ok(DependencyStatus::Missing);\n        }\n    };\n    let top_level = top_level.to_slash_lossy();\n    // compare the top level directory to the install path\n\n    let absolute_path = canonicalize(&path)\n        .await\n        .map_err(|e| InstallError::IOError { path: path.clone(), source: e })?;\n    if top_level.trim() != absolute_path.to_slash_lossy() {\n        // the top level directory is not the install path, assume the directory is not a git\n        // repository\n        debug!(path:?; \"dependency's toplevel dir is outside of dependency folder: not a git repo\");\n        return Ok(DependencyStatus::Missing);\n    }\n    // for git dependencies, the `rev` field holds the commit hash\n    match run_git_command(&[\"diff\", \"--exit-code\", &lock.rev], Some(&path)).await {\n        Ok(_) => Ok(DependencyStatus::Installed),\n        Err(_) => {\n            debug!(path:?, rev = lock.rev; \"git repo has non-empty diff compared to lockfile rev\");\n            Ok(DependencyStatus::FailedIntegrity)\n        }\n    }\n}\n\n/// Reset a git dependency to the commit specified in the lockfile entry.\n///\n/// This function runs `git reset --hard <commit>` and `git clean -fd` in the git dependency's\n/// directory.\nasync fn reset_git_dependency(lock: &GitLockEntry, deps: impl AsRef<Path>) -> Result<()> {\n    let path = lock.install_path(deps);\n    run_git_command(&[\"reset\", \"--hard\", &lock.rev], Some(&path)).await?;\n    run_git_command(&[\"clean\", \"-fd\"], Some(&path)).await?;\n    Ok(())\n}\n\n/// Normalize and check the path to a subdependency's project root.\n///\n/// The combination of the subdependency path with the relative path to the root must be at or below\n/// the level of the subdependency, to avoid directory traversal.\nasync fn get_subdependency_root(\n    subdependency_path: PathBuf,\n    relative_root: Option<&PathBuf>,\n) -> Result<PathBuf> {\n    let path = match relative_root {\n        Some(relative_root) => {\n            let tentative_path =\n                canonicalize(subdependency_path.join(relative_root)).await.map_err(|_| {\n                    InstallError::ConfigError(ConfigError::InvalidProjectRoot {\n                        project_root: relative_root.to_owned(),\n                        dep_path: subdependency_path.clone(),\n                    })\n                })?;\n            // final path must be below the dependency's folder\n            let path_with_slashes = subdependency_path.to_slash_lossy().into_owned();\n            if !tentative_path.to_slash_lossy().starts_with(&path_with_slashes) {\n                return Err(InstallError::ConfigError(ConfigError::InvalidProjectRoot {\n                    project_root: relative_root.to_owned(),\n                    dep_path: subdependency_path.clone(),\n                }));\n            }\n            tentative_path\n        }\n        None => subdependency_path,\n    };\n    Ok(path)\n}\n\n#[cfg(test)]\nmod tests {\n    use super::*;\n    use crate::config::{GitDependency, HttpDependency};\n    use mockito::{Matcher, Server, ServerGuard};\n    use temp_env::async_with_vars;\n    use testdir::testdir;\n\n    async fn mock_api_server() -> ServerGuard {\n        let mut server = Server::new_async().await;\n        let data = r#\"{\"data\":[{\"created_at\":\"2024-08-06T17:31:25.751079Z\",\"deleted\":false,\"downloads\":3389,\"id\":\"660132e6-4902-4804-8c4b-7cae0a648054\",\"internal_name\":\"forge-std/1_9_2_06-08-2024_17:31:25_forge-std-1.9.2.zip\",\"project_id\":\"37adefe5-9bc6-4777-aaf2-e56277d1f30b\",\"url\":\"https://soldeer-revisions.s3.amazonaws.com/forge-std/1_9_2_06-08-2024_17:31:25_forge-std-1.9.2.zip\",\"version\":\"1.9.2\"},{\"created_at\":\"2024-07-03T14:44:59.729623Z\",\"deleted\":false,\"downloads\":5290,\"id\":\"fa5160fc-ba7b-40fd-8e99-8becd6dadbe4\",\"internal_name\":\"forge-std/v1_9_1_03-07-2024_14:44:59_forge-std-v1.9.1.zip\",\"project_id\":\"37adefe5-9bc6-4777-aaf2-e56277d1f30b\",\"url\":\"https://soldeer-revisions.s3.amazonaws.com/forge-std/v1_9_1_03-07-2024_14:44:59_forge-std-v1.9.1.zip\",\"version\":\"1.9.1\"},{\"created_at\":\"2024-07-03T14:44:58.148723Z\",\"deleted\":false,\"downloads\":21,\"id\":\"b463683a-c4b4-40bf-b707-1c4eb343c4d2\",\"internal_name\":\"forge-std/v1_9_0_03-07-2024_14:44:57_forge-std-v1.9.0.zip\",\"project_id\":\"37adefe5-9bc6-4777-aaf2-e56277d1f30b\",\"url\":\"https://soldeer-revisions.s3.amazonaws.com/forge-std/v1_9_0_03-07-2024_14:44:57_forge-std-v1.9.0.zip\",\"version\":\"1.9.0\"}],\"status\":\"success\"}\"#;\n        server\n            .mock(\"GET\", \"/api/v1/revision\")\n            .match_query(Matcher::Any)\n            .with_header(\"content-type\", \"application/json\")\n            .with_body(data)\n            .create_async()\n            .await;\n        let data2 = r#\"{\"data\":[{\"created_at\":\"2024-08-06T17:31:25.751079Z\",\"deleted\":false,\"downloads\":3391,\"id\":\"660132e6-4902-4804-8c4b-7cae0a648054\",\"internal_name\":\"forge-std/1_9_2_06-08-2024_17:31:25_forge-std-1.9.2.zip\",\"project_id\":\"37adefe5-9bc6-4777-aaf2-e56277d1f30b\",\"url\":\"https://soldeer-revisions.s3.amazonaws.com/forge-std/1_9_2_06-08-2024_17:31:25_forge-std-1.9.2.zip\",\"version\":\"1.9.2\"}],\"status\":\"success\"}\"#;\n        server\n            .mock(\"GET\", \"/api/v1/revision-cli\")\n            .match_query(Matcher::Any)\n            .with_header(\"content-type\", \"application/json\")\n            .with_body(data2)\n            .create_async()\n            .await;\n        server\n    }\n\n    async fn mock_api_private() -> ServerGuard {\n        let mut server = Server::new_async().await;\n        let data = r#\"{\"data\":[{\"created_at\":\"2025-09-28T12:36:09.526660Z\",\"deleted\":false,\"downloads\":0,\"file_size\":65083,\"id\":\"0440c261-8cdf-4738-9139-c4dc7b0c7f3e\",\"internal_name\":\"test-private/0_1_0_28-09-2025_12:36:08_test-private.zip\",\"private\":true,\"project_id\":\"14f419e7-2d64-49e4-86b9-b44b36627786\",\"uploader\":\"bf8e75f4-0c36-4bcb-a23b-2682df92f176\",\"url\":\"https://github.com/mario-eth/soldeer/archive/8585a7ec85a29889cec8d08f4770e15ec4795943.zip\",\"version\":\"0.1.0\"}],\"status\":\"success\"}\"#;\n        server\n            .mock(\"GET\", \"/api/v1/revision\")\n            .match_query(Matcher::Any)\n            .with_header(\"content-type\", \"application/json\")\n            .with_body(data)\n            .create_async()\n            .await;\n        let data2 = r#\"{\"data\":[{\"created_at\":\"2025-09-28T12:36:09.526660Z\",\"deleted\":false,\"id\":\"0440c261-8cdf-4738-9139-c4dc7b0c7f3e\",\"internal_name\":\"test-private/0_1_0_28-09-2025_12:36:08_test-private.zip\",\"private\":true,\"project_id\":\"14f419e7-2d64-49e4-86b9-b44b36627786\",\"url\":\"https://github.com/mario-eth/soldeer/archive/8585a7ec85a29889cec8d08f4770e15ec4795943.zip\",\"version\":\"0.1.0\"}],\"status\":\"success\"}\"#;\n        server\n            .mock(\"GET\", \"/api/v1/revision-cli\")\n            .match_query(Matcher::Any)\n            .with_header(\"content-type\", \"application/json\")\n            .with_body(data2)\n            .create_async()\n            .await;\n        server\n    }\n\n    #[tokio::test]\n    async fn test_check_http_dependency() {\n        let lock = HttpLockEntry::builder()\n            .name(\"lib1\")\n            .version(\"1.0.0\")\n            .url(\"https://example.com/zip.zip\")\n            .checksum(\"\")\n            .integrity(\"beef\")\n            .build();\n        let dir = testdir!();\n        let path = dir.join(\"lib1-1.0.0\");\n        fs::create_dir(&path).await.unwrap();\n        fs::write(path.join(\"test.txt\"), \"foobar\").await.unwrap();\n        let res = check_http_dependency(&lock, &dir).await;\n        assert!(res.is_ok(), \"{res:?}\");\n        assert_eq!(res.unwrap(), DependencyStatus::FailedIntegrity);\n\n        let lock = HttpLockEntry::builder()\n            .name(\"lib2\")\n            .version(\"1.0.0\")\n            .url(\"https://example.com/zip.zip\")\n            .checksum(\"\")\n            .integrity(\"\")\n            .build();\n        let res = check_http_dependency(&lock, &dir).await;\n        assert!(res.is_ok(), \"{res:?}\");\n        assert_eq!(res.unwrap(), DependencyStatus::Missing);\n\n        let hash = hash_folder(&path).unwrap();\n        let lock = HttpLockEntry::builder()\n            .name(\"lib1\")\n            .version(\"1.0.0\")\n            .url(\"https://example.com/zip.zip\")\n            .checksum(\"\")\n            .integrity(hash.to_string())\n            .build();\n        let res = check_http_dependency(&lock, &dir).await;\n        assert!(res.is_ok(), \"{res:?}\");\n        assert_eq!(res.unwrap(), DependencyStatus::Installed);\n    }\n\n    #[tokio::test]\n    async fn test_check_git_dependency() {\n        // happy path\n        let dir = testdir!();\n        let path = &dir.join(\"test-repo-1.0.0\");\n        let rev = clone_repo(\"https://github.com/beeb/test-repo.git\", None, &path).await.unwrap();\n        let lock =\n            GitLockEntry::builder().name(\"test-repo\").version(\"1.0.0\").git(\"\").rev(rev).build();\n        let res = check_git_dependency(&lock, &dir).await;\n        assert!(res.is_ok(), \"{res:?}\");\n        assert_eq!(res.unwrap(), DependencyStatus::Installed);\n\n        // replace contents of existing file, diff is not empty\n        fs::write(path.join(\"foo.txt\"), \"foo\").await.unwrap();\n        let res = check_git_dependency(&lock, &dir).await;\n        assert!(res.is_ok(), \"{res:?}\");\n        assert_eq!(res.unwrap(), DependencyStatus::FailedIntegrity);\n\n        // wrong commit is checked out\n        let lock = GitLockEntry::builder()\n            .name(\"test-repo\")\n            .version(\"1.0.0\")\n            .git(\"\")\n            .rev(\"78c2f6a1a54db26bab6c3f501854a1564eb3707f\")\n            .build();\n        let res = check_git_dependency(&lock, &dir).await;\n        assert!(res.is_ok(), \"{res:?}\");\n        assert_eq!(res.unwrap(), DependencyStatus::FailedIntegrity);\n\n        // missing folder\n        let lock = GitLockEntry::builder().name(\"lib1\").version(\"1.0.0\").git(\"\").rev(\"\").build();\n        let res = check_git_dependency(&lock, &dir).await;\n        assert!(res.is_ok(), \"{res:?}\");\n        assert_eq!(res.unwrap(), DependencyStatus::Missing);\n\n        // remove .git folder -> not a git repo\n        let lock =\n            GitLockEntry::builder().name(\"test-repo\").version(\"1.0.0\").git(\"\").rev(\"\").build();\n        fs::remove_dir_all(path.join(\".git\")).await.unwrap();\n        let res = check_git_dependency(&lock, &dir).await;\n        assert!(res.is_ok(), \"{res:?}\");\n        assert_eq!(res.unwrap(), DependencyStatus::Missing);\n    }\n\n    #[tokio::test]\n    async fn test_reset_git_dependency() {\n        let dir = testdir!();\n        let path = &dir.join(\"test-repo-1.0.0\");\n        clone_repo(\"https://github.com/beeb/test-repo.git\", None, &path).await.unwrap();\n        let lock = GitLockEntry::builder()\n            .name(\"test-repo\")\n            .version(\"1.0.0\")\n            .git(\"\")\n            .rev(\"78c2f6a1a54db26bab6c3f501854a1564eb3707f\")\n            .build();\n        let test = path.join(\"test.txt\");\n        fs::write(&test, \"foobar\").await.unwrap();\n        let res = reset_git_dependency(&lock, &dir).await;\n        assert!(res.is_ok(), \"{res:?}\");\n        // non checked-in file\n        assert!(fs::metadata(test).await.is_err());\n        // file that is in `main` but not in `78c2f6a`\n        assert!(fs::metadata(path.join(\"foo.txt\")).await.is_err());\n        let commit = run_git_command(&[\"rev-parse\", \"--verify\", \"HEAD\"], Some(path))\n            .await\n            .unwrap()\n            .trim()\n            .to_string();\n        assert_eq!(commit, \"78c2f6a1a54db26bab6c3f501854a1564eb3707f\");\n    }\n\n    #[tokio::test]\n    async fn test_install_dependency_inner_http() {\n        let dir = testdir!();\n        let install: InstallInfo = HttpInstallInfo::builder().name(\"test\").version(\"1.0.0\").url(\"https://github.com/mario-eth/soldeer/archive/8585a7ec85a29889cec8d08f4770e15ec4795943.zip\").checksum(\"94a73dbe106f48179ea39b00d42e5d4dd96fdc6252caa3a89ce7efdaec0b9468\").build().into();\n        let (progress, _) = InstallProgress::new();\n        let res = install_dependency_inner(&install, &dir, false, progress).await;\n        assert!(res.is_ok(), \"{res:?}\");\n        let lock = res.unwrap();\n        assert_eq!(lock.name(), \"test\");\n        assert_eq!(lock.version(), \"1.0.0\");\n        let lock = lock.as_http().unwrap();\n        assert_eq!(\n            lock.url,\n            \"https://github.com/mario-eth/soldeer/archive/8585a7ec85a29889cec8d08f4770e15ec4795943.zip\"\n        );\n        assert_eq!(\n            lock.checksum,\n            \"94a73dbe106f48179ea39b00d42e5d4dd96fdc6252caa3a89ce7efdaec0b9468\"\n        );\n        let hash = hash_folder(&dir).unwrap();\n        assert_eq!(lock.integrity, hash.to_string());\n    }\n\n    #[tokio::test]\n    async fn test_install_dependency_inner_git() {\n        let dir = testdir!();\n        let install: InstallInfo = GitInstallInfo::builder()\n            .name(\"test\")\n            .version(\"1.0.0\")\n            .git(\"https://github.com/beeb/test-repo.git\")\n            .build()\n            .into();\n        let (progress, _) = InstallProgress::new();\n        let res = install_dependency_inner(&install, &dir, false, progress).await;\n        assert!(res.is_ok(), \"{res:?}\");\n        let lock = res.unwrap();\n        assert_eq!(lock.name(), \"test\");\n        assert_eq!(lock.version(), \"1.0.0\");\n        let lock = lock.as_git().unwrap();\n        assert_eq!(lock.git, \"https://github.com/beeb/test-repo.git\");\n        assert_eq!(lock.rev, \"d5d72fa135d28b2e8307650b3ea79115183f2406\");\n        assert!(dir.join(\".git\").exists());\n    }\n\n    #[tokio::test]\n    async fn test_install_dependency_inner_git_rev() {\n        let dir = testdir!();\n        let install: InstallInfo = GitInstallInfo::builder()\n            .name(\"test\")\n            .version(\"1.0.0\")\n            .git(\"https://github.com/beeb/test-repo.git\")\n            .identifier(GitIdentifier::from_rev(\"78c2f6a1a54db26bab6c3f501854a1564eb3707f\"))\n            .build()\n            .into();\n        let (progress, _) = InstallProgress::new();\n        let res = install_dependency_inner(&install, &dir, false, progress).await;\n        assert!(res.is_ok(), \"{res:?}\");\n        let lock = res.unwrap();\n        assert_eq!(lock.name(), \"test\");\n        assert_eq!(lock.version(), \"1.0.0\");\n        let lock = lock.as_git().unwrap();\n        assert_eq!(lock.git, \"https://github.com/beeb/test-repo.git\");\n        assert_eq!(lock.rev, \"78c2f6a1a54db26bab6c3f501854a1564eb3707f\");\n        assert!(dir.join(\".git\").exists());\n    }\n\n    #[tokio::test]\n    async fn test_install_dependency_inner_git_branch() {\n        let dir = testdir!();\n        let install: InstallInfo = GitInstallInfo::builder()\n            .name(\"test\")\n            .version(\"1.0.0\")\n            .git(\"https://github.com/beeb/test-repo.git\")\n            .identifier(GitIdentifier::from_branch(\"dev\"))\n            .build()\n            .into();\n        let (progress, _) = InstallProgress::new();\n        let res = install_dependency_inner(&install, &dir, false, progress).await;\n        assert!(res.is_ok(), \"{res:?}\");\n        let lock = res.unwrap();\n        assert_eq!(lock.name(), \"test\");\n        assert_eq!(lock.version(), \"1.0.0\");\n        let lock = lock.as_git().unwrap();\n        assert_eq!(lock.git, \"https://github.com/beeb/test-repo.git\");\n        assert_eq!(lock.rev, \"8d903e557e8f1b6e62bde768aa456d4ddfca72c4\");\n        assert!(dir.join(\".git\").exists());\n    }\n\n    #[tokio::test]\n    async fn test_install_dependency_inner_git_tag() {\n        let dir = testdir!();\n        let install: InstallInfo = GitInstallInfo::builder()\n            .name(\"test\")\n            .version(\"1.0.0\")\n            .git(\"https://github.com/beeb/test-repo.git\")\n            .identifier(GitIdentifier::from_tag(\"v0.1.0\"))\n            .build()\n            .into();\n        let (progress, _) = InstallProgress::new();\n        let res = install_dependency_inner(&install, &dir, false, progress).await;\n        assert!(res.is_ok(), \"{res:?}\");\n        let lock = res.unwrap();\n        assert_eq!(lock.name(), \"test\");\n        assert_eq!(lock.version(), \"1.0.0\");\n        let lock = lock.as_git().unwrap();\n        assert_eq!(lock.git, \"https://github.com/beeb/test-repo.git\");\n        assert_eq!(lock.rev, \"78c2f6a1a54db26bab6c3f501854a1564eb3707f\");\n        assert!(dir.join(\".git\").exists());\n    }\n\n    #[tokio::test]\n    async fn test_install_dependency_registry() {\n        let server = mock_api_server().await;\n        let dir = testdir!();\n        let dep = HttpDependency::builder().name(\"forge-std\").version_req(\"1.9.2\").build().into();\n        let (progress, _) = InstallProgress::new();\n        let res = async_with_vars(\n            [(\"SOLDEER_API_URL\", Some(server.url()))],\n            install_dependency(&dep, None, &dir, None, false, progress),\n        )\n        .await;\n        assert!(res.is_ok(), \"{res:?}\");\n        let lock = res.unwrap();\n        assert_eq!(lock.name(), dep.name());\n        assert_eq!(lock.version(), dep.version_req());\n        let lock = lock.as_http().unwrap();\n        assert_eq!(\n            &lock.url,\n            \"https://soldeer-revisions.s3.amazonaws.com/forge-std/1_9_2_06-08-2024_17:31:25_forge-std-1.9.2.zip\"\n        );\n        assert_eq!(\n            lock.checksum,\n            \"20fd008c7c69b6c737cc0284469d1c76497107bc3e004d8381f6d8781cb27980\"\n        );\n        let hash = hash_folder(lock.install_path(&dir)).unwrap();\n        assert_eq!(lock.integrity, hash.to_string());\n    }\n\n    #[tokio::test]\n    async fn test_install_dependency_registry_compatible() {\n        let server = mock_api_server().await;\n        let dir = testdir!();\n        let dep = HttpDependency::builder().name(\"forge-std\").version_req(\"^1.9.0\").build().into();\n        let (progress, _) = InstallProgress::new();\n        let res = async_with_vars(\n            [(\"SOLDEER_API_URL\", Some(server.url()))],\n            install_dependency(&dep, None, &dir, None, false, progress),\n        )\n        .await;\n        assert!(res.is_ok(), \"{res:?}\");\n        let lock = res.unwrap();\n        assert_eq!(lock.name(), dep.name());\n        assert_eq!(lock.version(), \"1.9.2\");\n        let lock = lock.as_http().unwrap();\n        assert_eq!(\n            &lock.url,\n            \"https://soldeer-revisions.s3.amazonaws.com/forge-std/1_9_2_06-08-2024_17:31:25_forge-std-1.9.2.zip\"\n        );\n        let hash = hash_folder(lock.install_path(&dir)).unwrap();\n        assert_eq!(lock.integrity, hash.to_string());\n    }\n\n    #[tokio::test]\n    async fn test_install_dependency_http() {\n        let dir = testdir!();\n        let dep = HttpDependency::builder().name(\"test\").version_req(\"1.0.0\").url(\"https://github.com/mario-eth/soldeer/archive/8585a7ec85a29889cec8d08f4770e15ec4795943.zip\").build().into();\n        let (progress, _) = InstallProgress::new();\n        let res = install_dependency(&dep, None, &dir, None, false, progress).await;\n        assert!(res.is_ok(), \"{res:?}\");\n        let lock = res.unwrap();\n        assert_eq!(lock.name(), dep.name());\n        assert_eq!(lock.version(), dep.version_req());\n        let lock = lock.as_http().unwrap();\n        assert_eq!(&lock.url, dep.url().unwrap());\n        assert_eq!(\n            lock.checksum,\n            \"94a73dbe106f48179ea39b00d42e5d4dd96fdc6252caa3a89ce7efdaec0b9468\"\n        );\n        let hash = hash_folder(lock.install_path(&dir)).unwrap();\n        assert_eq!(lock.integrity, hash.to_string());\n    }\n\n    #[tokio::test]\n    async fn test_install_dependency_git() {\n        let dir = testdir!();\n        let dep = GitDependency::builder()\n            .name(\"test\")\n            .version_req(\"1.0.0\")\n            .git(\"https://github.com/beeb/test-repo.git\")\n            .build()\n            .into();\n        let (progress, _) = InstallProgress::new();\n        let res = install_dependency(&dep, None, &dir, None, false, progress).await;\n        assert!(res.is_ok(), \"{res:?}\");\n        let lock = res.unwrap();\n        assert_eq!(lock.name(), dep.name());\n        assert_eq!(lock.version(), dep.version_req());\n        let lock = lock.as_git().unwrap();\n        assert_eq!(&lock.git, dep.url().unwrap());\n        assert_eq!(lock.rev, \"d5d72fa135d28b2e8307650b3ea79115183f2406\");\n    }\n\n    #[tokio::test]\n    async fn test_install_dependency_private() {\n        let server = mock_api_private().await;\n        let dir = testdir!();\n        let dep =\n            HttpDependency::builder().name(\"test-private\").version_req(\"0.1.0\").build().into();\n        let (progress, _) = InstallProgress::new();\n        let res = async_with_vars(\n            [(\"SOLDEER_API_URL\", Some(server.url()))],\n            install_dependency(&dep, None, &dir, None, false, progress),\n        )\n        .await;\n        assert!(res.is_ok(), \"{res:?}\");\n        let lock = res.unwrap();\n        assert_eq!(lock.name(), dep.name());\n        assert_eq!(lock.version(), dep.version_req());\n        let lock = lock.as_private().unwrap();\n        assert_eq!(\n            lock.checksum,\n            \"94a73dbe106f48179ea39b00d42e5d4dd96fdc6252caa3a89ce7efdaec0b9468\"\n        );\n        let hash = hash_folder(lock.install_path(&dir)).unwrap();\n        assert_eq!(lock.integrity, hash.to_string());\n    }\n}\n"
  },
  {
    "path": "crates/core/src/lib.rs",
    "content": "//! Low-level library for interacting with Soldeer registries and files\n#![cfg_attr(docsrs, feature(doc_cfg))]\npub use errors::SoldeerError;\n\npub type Result<T> = std::result::Result<T, SoldeerError>;\n\npub mod auth;\npub mod config;\npub mod download;\npub mod errors;\npub mod install;\npub mod lock;\npub mod push;\npub mod registry;\npub mod remappings;\npub mod update;\npub mod utils;\n"
  },
  {
    "path": "crates/core/src/lock/forge.rs",
    "content": "//! Vendored version of the `lockfile` module of `forge`.\n//!\n//! Slightly adapted to reduce dependencies.\n\nuse log::debug;\nuse serde::{Deserialize, Serialize};\nuse std::{\n    collections::HashMap,\n    fs,\n    path::{Path, PathBuf},\n};\n\nuse crate::errors::LockError;\n\nuse super::Result;\n\npub const FOUNDRY_LOCK: &str = \"foundry.lock\";\n\n/// A type alias for a HashMap of dependencies keyed by relative path to the submodule dir.\npub type DepMap = HashMap<PathBuf, DepIdentifier>;\n\n/// A lockfile handler that keeps track of the dependencies and their current state.\n#[derive(Debug, Clone, Serialize, Deserialize)]\npub struct Lockfile {\n    /// A map of the dependencies keyed by relative path to the submodule dir.\n    #[serde(flatten)]\n    deps: DepMap,\n    /// Absolute path to the lockfile.\n    #[serde(skip)]\n    lockfile_path: PathBuf,\n}\n\nimpl Lockfile {\n    /// Create a new [`Lockfile`] instance.\n    ///\n    /// `project_root` is the absolute path to the project root.\n    ///\n    /// You will need to call [`Lockfile::read`] to load the lockfile.\n    pub fn new(project_root: &Path) -> Self {\n        Self { deps: HashMap::default(), lockfile_path: project_root.join(FOUNDRY_LOCK) }\n    }\n\n    /// Loads the lockfile from the project root.\n    ///\n    /// Throws an error if the lockfile does not exist.\n    pub fn read(&mut self) -> Result<()> {\n        if !self.lockfile_path.exists() {\n            return Err(LockError::FoundryLockMissing);\n        }\n\n        let lockfile_str = fs::read_to_string(&self.lockfile_path)?;\n\n        self.deps = serde_json::from_str(&lockfile_str)?;\n\n        debug!(lockfile:? = self.deps; \"loaded lockfile\");\n\n        Ok(())\n    }\n\n    /// Get the [`DepIdentifier`] for a submodule at a given path.\n    pub fn get(&self, path: &Path) -> Option<&DepIdentifier> {\n        self.deps.get(path)\n    }\n\n    /// Returns the num of dependencies in the lockfile.\n    pub fn len(&self) -> usize {\n        self.deps.len()\n    }\n\n    /// Returns whether the lockfile is empty.\n    pub fn is_empty(&self) -> bool {\n        self.deps.is_empty()\n    }\n\n    /// Returns an iterator over the lockfile.\n    pub fn iter(&self) -> impl Iterator<Item = (&PathBuf, &DepIdentifier)> {\n        self.deps.iter()\n    }\n\n    pub fn exists(&self) -> bool {\n        self.lockfile_path.exists()\n    }\n}\n\n// Implement .iter() for &LockFile\n\n/// Identifies whether a dependency (submodule) is referenced by a branch,\n/// tag or rev (commit hash).\n///\n/// Each enum variant consists of an `r#override` flag which is used in `forge update` to decide\n/// whether to update a dep or not. This flag is skipped during serialization.\n#[derive(Debug, Clone, PartialEq, Eq, serde::Serialize, serde::Deserialize)]\npub enum DepIdentifier {\n    /// `name` of the branch and the `rev` it is currently pointing to.\n    #[serde(rename = \"branch\")]\n    Branch { name: String, rev: String },\n\n    /// Release tag `name` and the `rev` it is currently pointing to.\n    #[serde(rename = \"tag\")]\n    Tag { name: String, rev: String },\n\n    /// Commit hash `rev` the submodule is currently pointing to.\n    #[serde(rename = \"rev\", untagged)]\n    Rev { rev: String },\n}\n\nimpl DepIdentifier {\n    /// Get the commit hash of the dependency.\n    pub fn rev(&self) -> &str {\n        match self {\n            Self::Branch { rev, .. } => rev,\n            Self::Tag { rev, .. } => rev,\n            Self::Rev { rev, .. } => rev,\n        }\n    }\n\n    /// Get the name of the dependency.\n    ///\n    /// In case of a Rev, this will return the commit hash.\n    pub fn name(&self) -> &str {\n        match self {\n            Self::Branch { name, .. } => name,\n            Self::Tag { name, .. } => name,\n            Self::Rev { rev, .. } => rev,\n        }\n    }\n\n    /// Get the name/rev to checkout at.\n    pub fn checkout_id(&self) -> &str {\n        match self {\n            Self::Branch { name, .. } => name,\n            Self::Tag { name, .. } => name,\n            Self::Rev { rev, .. } => rev,\n        }\n    }\n\n    /// Returns whether the dependency is a branch.\n    pub fn is_branch(&self) -> bool {\n        matches!(self, Self::Branch { .. })\n    }\n}\n\nimpl std::fmt::Display for DepIdentifier {\n    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n        match self {\n            Self::Branch { name, rev, .. } => write!(f, \"branch={name}@{rev}\"),\n            Self::Tag { name, rev, .. } => write!(f, \"tag={name}@{rev}\"),\n            Self::Rev { rev, .. } => write!(f, \"rev={rev}\"),\n        }\n    }\n}\n"
  },
  {
    "path": "crates/core/src/lock.rs",
    "content": "//! Lockfile handling.\n//!\n//! The lockfile contains the resolved dependencies of a project. It is a TOML file with an array of\n//! dependencies, each containing the name, version, and other information about the dependency.\n//!\n//! The lockfile is used to ensure that the same versions of dependencies are installed across\n//! different machines. It is also used to skip the installation of dependencies that are already\n//! installed.\nuse crate::{config::Dependency, errors::LockError, utils::sanitize_filename};\nuse log::{debug, warn};\nuse serde::{Deserialize, Serialize};\nuse std::{\n    fs,\n    path::{Path, PathBuf},\n};\n\npub mod forge;\n\npub const SOLDEER_LOCK: &str = \"soldeer.lock\";\n\npub type Result<T> = std::result::Result<T, LockError>;\n\n/// A trait implemented by lockfile entries to provide the install path\npub trait Integrity {\n    /// Returns the install path of the dependency.\n    fn install_path(&self, deps: impl AsRef<Path>) -> PathBuf;\n\n    /// Returns the integrity checksum if relevant.\n    fn integrity(&self) -> Option<&String>;\n}\n\n/// A lock entry for a git dependency.\n#[derive(Debug, Clone, PartialEq, Eq, Hash, bon::Builder)]\n#[builder(on(String, into))]\n#[cfg_attr(feature = \"serde\", derive(Serialize, Deserialize))]\n#[non_exhaustive]\npub struct GitLockEntry {\n    /// The name of the dependency.\n    pub name: String,\n\n    /// The version (this corresponds to the version requirement of the dependency).\n    pub version: String,\n\n    /// The git url of the dependency.\n    pub git: String,\n\n    /// The resolved git commit hash.\n    pub rev: String,\n}\n\nimpl Integrity for GitLockEntry {\n    /// Returns the install path of the dependency.\n    ///\n    /// The directory does not need to exist. Since the lock entry contains the version,\n    /// the install path can be calculated without needing to check the actual directory.\n    fn install_path(&self, deps: impl AsRef<Path>) -> PathBuf {\n        format_install_path(&self.name, &self.version, deps)\n    }\n\n    /// There is no integrity checksum for git lock entries\n    fn integrity(&self) -> Option<&String> {\n        None\n    }\n}\n\n/// A lock entry for an HTTP dependency.\n#[derive(Debug, Clone, PartialEq, Eq, Hash, bon::Builder)]\n#[builder(on(String, into))]\n#[cfg_attr(feature = \"serde\", derive(Serialize, Deserialize))]\n#[non_exhaustive]\npub struct HttpLockEntry {\n    /// The name of the dependency.\n    pub name: String,\n\n    /// The resolved version of the dependency (not necessarily matches the version requirement of\n    /// the dependency).\n    ///\n    /// If the version req is a semver range, then this will be the exact version that was\n    /// resolved.\n    pub version: String,\n\n    /// The URL from where the dependency was downloaded.\n    pub url: String,\n\n    /// The checksum of the downloaded zip file.\n    pub checksum: String,\n\n    /// The integrity hash of the downloaded zip file after extraction.\n    pub integrity: String,\n}\n\nimpl Integrity for HttpLockEntry {\n    /// Returns the install path of the dependency.\n    ///\n    /// The directory does not need to exist. Since the lock entry contains the version,\n    /// the install path can be calculated without needing to check the actual directory.\n    fn install_path(&self, deps: impl AsRef<Path>) -> PathBuf {\n        format_install_path(&self.name, &self.version, deps)\n    }\n\n    /// Returns the integrity checksum\n    fn integrity(&self) -> Option<&String> {\n        Some(&self.integrity)\n    }\n}\n\n/// A lock entry for a private dependency.\n///\n/// The link is not stored in the lockfile as it must be fetched from the registry with a valid\n/// token before each download.\n#[derive(Debug, Clone, PartialEq, Eq, Hash, bon::Builder)]\n#[builder(on(String, into))]\n#[cfg_attr(feature = \"serde\", derive(Serialize, Deserialize))]\n#[non_exhaustive]\npub struct PrivateLockEntry {\n    /// The name of the dependency.\n    pub name: String,\n\n    /// The resolved version of the dependency (not necessarily matches the version requirement of\n    /// the dependency).\n    ///\n    /// If the version req is a semver range, then this will be the exact version that was\n    /// resolved.\n    pub version: String,\n\n    /// The checksum of the downloaded zip file.\n    pub checksum: String,\n\n    /// The integrity hash of the downloaded zip file after extraction.\n    pub integrity: String,\n}\n\nimpl Integrity for PrivateLockEntry {\n    /// Returns the install path of the dependency.\n    ///\n    /// The directory does not need to exist. Since the lock entry contains the version,\n    /// the install path can be calculated without needing to check the actual directory.\n    fn install_path(&self, deps: impl AsRef<Path>) -> PathBuf {\n        format_install_path(&self.name, &self.version, deps)\n    }\n\n    /// Returns the integrity checksum\n    fn integrity(&self) -> Option<&String> {\n        Some(&self.integrity)\n    }\n}\n\n/// A lock entry for a dependency.\n///\n/// A builder should be used to create the underlying [`HttpLockEntry`] or [`GitLockEntry`] and then\n/// converted into this type with `.into()`.\n///\n/// # Examples\n///\n/// ```\n/// # use soldeer_core::lock::{LockEntry, HttpLockEntry};\n/// let dep: LockEntry = HttpLockEntry::builder()\n///     .name(\"my-dep\")\n///     .version(\"1.2.3\")\n///     .url(\"https://...\")\n///     .checksum(\"dead\")\n///     .integrity(\"beef\")\n///     .build()\n///     .into();\n/// ```\n#[derive(Debug, Clone, PartialEq, Eq, Hash)]\n#[cfg_attr(feature = \"serde\", derive(Serialize, Deserialize))]\n#[cfg_attr(feature = \"serde\", serde(rename_all = \"lowercase\"))]\n#[non_exhaustive]\npub enum LockEntry {\n    /// A lock entry for an HTTP dependency.\n    Http(HttpLockEntry),\n\n    /// A lock entry for a git dependency.\n    Git(GitLockEntry),\n\n    /// A lock entry for a git dependency.\n    Private(PrivateLockEntry),\n}\n\n/// A TOML representation of a lock entry, which merges all fields from the two variants of\n/// [`LockEntry`].\n///\n/// This is used to serialize and deserialize lock entries to and from TOML. All fields which are\n/// not present in both variants are optional.\n#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, Hash)]\n#[non_exhaustive]\npub struct TomlLockEntry {\n    pub name: String,\n    pub version: String,\n    pub git: Option<String>,\n    pub url: Option<String>,\n    pub rev: Option<String>,\n    pub checksum: Option<String>,\n    pub integrity: Option<String>,\n}\n\nimpl From<LockEntry> for TomlLockEntry {\n    /// Convert a [`LockEntry`] into a [`TomlLockEntry`].\n    fn from(value: LockEntry) -> Self {\n        match value {\n            LockEntry::Http(lock) => Self {\n                name: lock.name,\n                version: lock.version,\n                git: None,\n                url: Some(lock.url),\n                rev: None,\n                checksum: Some(lock.checksum),\n                integrity: Some(lock.integrity),\n            },\n            LockEntry::Git(lock) => Self {\n                name: lock.name,\n                version: lock.version,\n                git: Some(lock.git),\n                url: None,\n                rev: Some(lock.rev),\n                checksum: None,\n                integrity: None,\n            },\n            LockEntry::Private(lock) => Self {\n                name: lock.name,\n                version: lock.version,\n                git: None,\n                url: None,\n                rev: None,\n                checksum: Some(lock.checksum),\n                integrity: Some(lock.integrity),\n            },\n        }\n    }\n}\n\nimpl TryFrom<TomlLockEntry> for LockEntry {\n    type Error = LockError;\n\n    /// Convert a [`TomlLockEntry`] into a [`LockEntry`] if possible.\n    fn try_from(value: TomlLockEntry) -> std::result::Result<Self, Self::Error> {\n        match (value.url, value.git) {\n            (None, None) => Ok(PrivateLockEntry::builder()\n                .name(&value.name)\n                .version(value.version)\n                .checksum(value.checksum.ok_or(LockError::MissingField {\n                    field: \"checksum\".to_string(),\n                    dep: value.name.clone(),\n                })?)\n                .integrity(value.integrity.ok_or(LockError::MissingField {\n                    field: \"integrity\".to_string(),\n                    dep: value.name,\n                })?)\n                .build()\n                .into()),\n            (None, Some(git)) => {\n                Ok(GitLockEntry::builder()\n                    .name(&value.name)\n                    .version(value.version)\n                    .git(git)\n                    .rev(value.rev.ok_or(LockError::MissingField {\n                        field: \"rev\".to_string(),\n                        dep: value.name,\n                    })?)\n                    .build()\n                    .into())\n            }\n            (Some(url), None) => Ok(HttpLockEntry::builder()\n                .name(&value.name)\n                .version(value.version)\n                .url(url)\n                .checksum(value.checksum.ok_or(LockError::MissingField {\n                    field: \"checksum\".to_string(),\n                    dep: value.name.clone(),\n                })?)\n                .integrity(value.integrity.ok_or(LockError::MissingField {\n                    field: \"integrity\".to_string(),\n                    dep: value.name,\n                })?)\n                .build()\n                .into()),\n            (Some(_), Some(_)) => Err(LockError::InvalidLockEntry),\n        }\n    }\n}\n\nimpl LockEntry {\n    /// The name of the dependency.\n    pub fn name(&self) -> &str {\n        match self {\n            Self::Git(lock) => &lock.name,\n            Self::Http(lock) => &lock.name,\n            Self::Private(lock) => &lock.name,\n        }\n    }\n\n    /// The version of the dependency.\n    pub fn version(&self) -> &str {\n        match self {\n            Self::Git(lock) => &lock.version,\n            Self::Http(lock) => &lock.version,\n            Self::Private(lock) => &lock.version,\n        }\n    }\n\n    /// The install path of the dependency.\n    pub fn install_path(&self, deps: impl AsRef<Path>) -> PathBuf {\n        match self {\n            Self::Git(lock) => lock.install_path(deps),\n            Self::Http(lock) => lock.install_path(deps),\n            Self::Private(lock) => lock.install_path(deps),\n        }\n    }\n\n    /// Get the underlying [`HttpLockEntry`] if this is an HTTP lock entry.\n    pub fn as_http(&self) -> Option<&HttpLockEntry> {\n        if let Self::Http(l) = self { Some(l) } else { None }\n    }\n\n    /// Get the underlying [`GitLockEntry`] if this is a git lock entry.\n    pub fn as_git(&self) -> Option<&GitLockEntry> {\n        if let Self::Git(l) = self { Some(l) } else { None }\n    }\n\n    /// Get the underlying [`PrivateLockEntry`] if this is a private package lock entry.\n    pub fn as_private(&self) -> Option<&PrivateLockEntry> {\n        if let Self::Private(l) = self { Some(l) } else { None }\n    }\n}\n\nimpl From<HttpLockEntry> for LockEntry {\n    /// Wrap an [`HttpLockEntry`] in a [`LockEntry`].\n    fn from(value: HttpLockEntry) -> Self {\n        Self::Http(value)\n    }\n}\n\nimpl From<GitLockEntry> for LockEntry {\n    /// Wrap a [`GitLockEntry`] in a [`LockEntry`].\n    fn from(value: GitLockEntry) -> Self {\n        Self::Git(value)\n    }\n}\n\nimpl From<PrivateLockEntry> for LockEntry {\n    /// Wrap a [`PrivateLockEntry`] in a [`LockEntry`].\n    fn from(value: PrivateLockEntry) -> Self {\n        Self::Private(value)\n    }\n}\n\n/// A parsed TOML lock file.\n///\n/// The lockfile is a table with one entry `dependencies` containing an array of [`TomlLockEntry`]s.\n#[derive(Serialize, Deserialize, Debug, Clone, Default, PartialEq, Eq, Hash)]\nstruct LockFileParsed {\n    dependencies: Vec<TomlLockEntry>,\n}\n\n/// The result of reading and parsing a lock file.\n///\n/// The [`TomlLockEntry`]s are converted into [`LockEntry`]s. A copy of the text contents of\n/// the lockfile is provided for diffing purposes.\n#[derive(Debug, Clone, Default, PartialEq, Eq, Hash)]\n#[cfg_attr(feature = \"serde\", derive(Serialize, Deserialize))]\npub struct LockFile {\n    /// The parsed lock entries.\n    pub entries: Vec<LockEntry>,\n\n    /// The raw contents of the lockfile.\n    pub raw: String,\n}\n\n/// Read a lockfile from disk.\npub fn read_lockfile(path: impl AsRef<Path>) -> Result<LockFile> {\n    if !path.as_ref().exists() {\n        debug!(path:? = path.as_ref(); \"lockfile does not exist\");\n        return Ok(LockFile::default());\n    }\n    let contents = fs::read_to_string(&path)?;\n\n    let data: LockFileParsed = toml_edit::de::from_str(&contents)\n        .inspect_err(|err| {\n            warn!(err:?; \"error while parsing lockfile contents, it will be ignored\");\n        })\n        .unwrap_or_default();\n    Ok(LockFile {\n        entries: data.dependencies.into_iter().filter_map(|d| d.try_into().ok()).collect(),\n        raw: contents,\n    })\n}\n\n/// Generate the contents of a lockfile from a list of lock entries.\n///\n/// The entries do not need to be sorted, they will be sorted by name.\npub fn generate_lockfile_contents(mut entries: Vec<LockEntry>) -> String {\n    entries.sort_unstable_by(|a, b| a.name().cmp(b.name()));\n    let data = LockFileParsed { dependencies: entries.into_iter().map(Into::into).collect() };\n    toml_edit::ser::to_string_pretty(&data).expect(\"Lock entries should be serializable\")\n}\n\n/// Add a lock entry to a lockfile.\n///\n/// If an entry with the same name already exists, it will be replaced.\n/// The entries are sorted by name before being written back to the file.\npub fn add_to_lockfile(entry: LockEntry, path: impl AsRef<Path>) -> Result<()> {\n    let mut lockfile = read_lockfile(&path)?;\n    if let Some(index) = lockfile.entries.iter().position(|e| e.name() == entry.name()) {\n        debug!(name = entry.name(); \"replacing existing lockfile entry\");\n        let _ = std::mem::replace(&mut lockfile.entries[index], entry);\n    } else {\n        debug!(name = entry.name(); \"adding new lockfile entry\");\n        lockfile.entries.push(entry);\n    }\n    let new_contents = generate_lockfile_contents(lockfile.entries);\n    fs::write(&path, new_contents)?;\n    debug!(path:? = path.as_ref(); \"lockfile modified\");\n    Ok(())\n}\n\n/// Remove a lock entry from a lockfile, matching on the name.\n///\n/// If the entry is the last entry in the lockfile, the lockfile will be removed.\npub fn remove_lock(dependency: &Dependency, path: impl AsRef<Path>) -> Result<()> {\n    let lockfile = read_lockfile(&path)?;\n\n    let entries: Vec<_> = lockfile\n        .entries\n        .into_iter()\n        .filter_map(|e| if e.name() != dependency.name() { Some(e.into()) } else { None })\n        .collect();\n\n    if entries.is_empty() {\n        // remove lock file if there are no deps left\n        debug!(path:? = path.as_ref(); \"no remaining lockfile entry, deleting file\");\n        let _ = fs::remove_file(&path);\n        return Ok(());\n    }\n\n    let file_contents =\n        toml_edit::ser::to_string_pretty(&LockFileParsed { dependencies: entries })?;\n\n    // replace contents of lockfile with new contents\n    fs::write(&path, file_contents)?;\n    debug!(path:? = path.as_ref(); \"lockfile modified\");\n    Ok(())\n}\n\n/// Format the install path of a dependency.\n///\n/// The folder name is sanitized to remove disallowed characters.\npub fn format_install_path(name: &str, version: &str, deps: impl AsRef<Path>) -> PathBuf {\n    deps.as_ref().join(sanitize_filename(&format!(\"{name}-{version}\")))\n}\n\n#[cfg(test)]\nmod tests {\n    use super::*;\n    use testdir::testdir;\n\n    #[test]\n    fn test_toml_to_lock_entry_conversion_http() {\n        let toml_entry = TomlLockEntry {\n            name: \"test\".to_string(),\n            version: \"1.0.0\".to_string(),\n            git: None,\n            url: Some(\"https://example.com/zip.zip\".to_string()),\n            rev: None,\n            checksum: Some(\"123456\".to_string()),\n            integrity: Some(\"beef\".to_string()),\n        };\n        let entry: Result<LockEntry> = toml_entry.try_into();\n        assert!(entry.is_ok(), \"{entry:?}\");\n        let entry = entry.unwrap();\n        assert_eq!(entry.name(), \"test\");\n        assert_eq!(entry.version(), \"1.0.0\");\n        let http = entry.as_http().unwrap();\n        assert_eq!(http.url, \"https://example.com/zip.zip\");\n        assert_eq!(http.checksum, \"123456\");\n        assert_eq!(http.integrity, \"beef\");\n    }\n\n    #[test]\n    fn test_toml_to_lock_entry_conversion_git() {\n        let toml_entry = TomlLockEntry {\n            name: \"test\".to_string(),\n            version: \"1.0.0\".to_string(),\n            git: Some(\"git@github.com:test/test.git\".to_string()),\n            url: None,\n            rev: Some(\"123456\".to_string()),\n            checksum: None,\n            integrity: None,\n        };\n        let entry: Result<LockEntry> = toml_entry.try_into();\n        assert!(entry.is_ok(), \"{entry:?}\");\n        let entry = entry.unwrap();\n        assert_eq!(entry.name(), \"test\");\n        assert_eq!(entry.version(), \"1.0.0\");\n        let git = entry.as_git().unwrap();\n        assert_eq!(git.git, \"git@github.com:test/test.git\");\n        assert_eq!(git.rev, \"123456\");\n    }\n\n    #[test]\n    fn test_toml_lock_entry_bad_http() {\n        let toml_entry = TomlLockEntry {\n            name: \"test\".to_string(),\n            version: \"1.0.0\".to_string(),\n            git: None,\n            url: Some(\"https://example.com/zip.zip\".to_string()),\n            rev: None,\n            checksum: None,\n            integrity: None,\n        };\n        let entry: Result<LockEntry> = toml_entry.try_into();\n        assert!(\n            matches!(entry, Err(LockError::MissingField { ref field, dep: _ }) if field == \"checksum\"),\n            \"{entry:?}\"\n        );\n\n        let toml_entry = TomlLockEntry {\n            name: \"test\".to_string(),\n            version: \"1.0.0\".to_string(),\n            git: None,\n            url: Some(\"https://example.com/zip.zip\".to_string()),\n            rev: None,\n            checksum: Some(\"123456\".to_string()),\n            integrity: None,\n        };\n        let entry: Result<LockEntry> = toml_entry.try_into();\n        assert!(\n            matches!(entry, Err(LockError::MissingField { ref field, dep: _ }) if field == \"integrity\"),\n            \"{entry:?}\"\n        );\n    }\n\n    #[test]\n    fn test_toml_lock_entry_bad_private() {\n        let toml_entry = TomlLockEntry {\n            name: \"test\".to_string(),\n            version: \"1.0.0\".to_string(),\n            git: None,\n            url: None,\n            rev: None,\n            checksum: None,\n            integrity: None,\n        };\n        let entry: Result<LockEntry> = toml_entry.try_into();\n        assert!(\n            matches!(entry, Err(LockError::MissingField { ref field, dep: _ }) if field == \"checksum\"),\n            \"{entry:?}\"\n        );\n    }\n\n    #[test]\n    fn test_toml_lock_entry_bad_git() {\n        let toml_entry = TomlLockEntry {\n            name: \"test\".to_string(),\n            version: \"1.0.0\".to_string(),\n            git: Some(\"git@github.com:test/test.git\".to_string()),\n            url: Some(\"https://example.com/zip.zip\".to_string()),\n            rev: None,\n            checksum: None,\n            integrity: None,\n        };\n        let entry: Result<LockEntry> = toml_entry.try_into();\n        assert!(matches!(entry, Err(LockError::InvalidLockEntry)), \"{entry:?}\");\n\n        let toml_entry = TomlLockEntry {\n            name: \"test\".to_string(),\n            version: \"1.0.0\".to_string(),\n            git: Some(\"git@github.com:test/test.git\".to_string()),\n            url: None,\n            rev: None,\n            checksum: None,\n            integrity: None,\n        };\n        let entry: Result<LockEntry> = toml_entry.try_into();\n        assert!(\n            matches!(entry, Err(LockError::MissingField { ref field, dep: _ }) if field == \"rev\"),\n            \"{entry:?}\"\n        );\n    }\n\n    #[test]\n    fn test_read_lockfile() {\n        let dir = testdir!();\n        let file_path = dir.join(SOLDEER_LOCK);\n        // last entry is invalid and should be skipped\n        let content = r#\"[[dependencies]]\nname = \"test\"\nversion = \"1.0.0\"\ngit = \"git@github.com:test/test.git\"\nrev = \"123456\"\n\n[[dependencies]]\nname = \"test2\"\nversion = \"1.0.0\"\nurl = \"https://example.com/zip.zip\"\nchecksum = \"123456\"\nintegrity = \"beef\"\n\n[[dependencies]]\nname = \"test3\"\nversion = \"1.0.0\"\n\"#;\n        fs::write(&file_path, content).unwrap();\n        let res = read_lockfile(&file_path);\n        assert!(res.is_ok(), \"{res:?}\");\n        let lockfile = res.unwrap();\n        assert_eq!(lockfile.entries.len(), 2);\n        assert_eq!(lockfile.entries[0].name(), \"test\");\n        assert_eq!(lockfile.entries[0].version(), \"1.0.0\");\n        let git = lockfile.entries[0].as_git().unwrap();\n        assert_eq!(git.git, \"git@github.com:test/test.git\");\n        assert_eq!(git.rev, \"123456\");\n        assert_eq!(lockfile.entries[1].name(), \"test2\");\n        assert_eq!(lockfile.entries[1].version(), \"1.0.0\");\n        let http = lockfile.entries[1].as_http().unwrap();\n        assert_eq!(http.url, \"https://example.com/zip.zip\");\n        assert_eq!(http.checksum, \"123456\");\n        assert_eq!(http.integrity, \"beef\");\n        assert_eq!(lockfile.raw, content);\n    }\n\n    #[test]\n    fn test_generate_lockfile_content() {\n        let dir = testdir!();\n        let file_path = dir.join(SOLDEER_LOCK);\n        let content = r#\"[[dependencies]]\nname = \"test\"\nversion = \"1.0.0\"\ngit = \"git@github.com:test/test.git\"\nrev = \"123456\"\n\n[[dependencies]]\nname = \"test2\"\nversion = \"1.0.0\"\nurl = \"https://example.com/zip.zip\"\nchecksum = \"123456\"\nintegrity = \"beef\"\n\"#;\n        fs::write(&file_path, content).unwrap();\n        let lockfile = read_lockfile(&file_path).unwrap();\n        let new_content = generate_lockfile_contents(lockfile.entries);\n        assert_eq!(new_content, content);\n    }\n\n    #[test]\n    fn test_add_to_lockfile() {\n        let dir = testdir!();\n        let file_path = dir.join(SOLDEER_LOCK);\n        let content = r#\"[[dependencies]]\nname = \"test\"\nversion = \"1.0.0\"\ngit = \"git@github.com:test/test.git\"\nrev = \"123456\"\n\"#;\n        fs::write(&file_path, content).unwrap();\n        let entry: LockEntry = HttpLockEntry::builder()\n            .name(\"test2\")\n            .version(\"1.0.0\")\n            .url(\"https://example.com/zip.zip\")\n            .checksum(\"123456\")\n            .integrity(\"beef\")\n            .build()\n            .into();\n        let res = add_to_lockfile(entry.clone(), &file_path);\n        assert!(res.is_ok(), \"{res:?}\");\n        let lockfile = read_lockfile(&file_path).unwrap();\n        assert_eq!(lockfile.entries.len(), 2);\n        assert_eq!(lockfile.entries[1], entry);\n    }\n\n    #[test]\n    fn test_replace_in_lockfile() {\n        let dir = testdir!();\n        let file_path = dir.join(SOLDEER_LOCK);\n        let content = r#\"[[dependencies]]\nname = \"test\"\nversion = \"1.0.0\"\ngit = \"git@github.com:test/test.git\"\nrev = \"123456\"\n\"#;\n        fs::write(&file_path, content).unwrap();\n        let entry: LockEntry = HttpLockEntry::builder()\n            .name(\"test\")\n            .version(\"2.0.0\")\n            .url(\"https://example.com/zip.zip\")\n            .checksum(\"123456\")\n            .integrity(\"beef\")\n            .build()\n            .into();\n        let res = add_to_lockfile(entry.clone(), &file_path);\n        assert!(res.is_ok(), \"{res:?}\");\n        let lockfile = read_lockfile(&file_path).unwrap();\n        assert_eq!(lockfile.entries.len(), 1);\n        assert_eq!(lockfile.entries[0], entry);\n    }\n\n    #[test]\n    fn test_remove_lock() {\n        let dir = testdir!();\n        let file_path = dir.join(SOLDEER_LOCK);\n        let content = r#\"[[dependencies]]\nname = \"test\"\nversion = \"1.0.0\"\ngit = \"git@github.com:test/test.git\"\nrev = \"123456\"\n\n[[dependencies]]\nname = \"test2\"\nversion = \"1.0.0\"\nurl = \"https://example.com/zip.zip\"\nchecksum = \"123456\"\nintegrity = \"beef\"\n\"#;\n        fs::write(&file_path, content).unwrap();\n        let dep = Dependency::from_name_version(\"test2~2.0.0\", None, None).unwrap();\n        let res = remove_lock(&dep, &file_path);\n        assert!(res.is_ok(), \"{res:?}\");\n        let lockfile = read_lockfile(&file_path).unwrap();\n        assert_eq!(lockfile.entries.len(), 1);\n        assert_eq!(lockfile.entries[0].name(), \"test\");\n    }\n\n    #[test]\n    fn test_remove_lock_empty() {\n        let dir = testdir!();\n        let file_path = dir.join(SOLDEER_LOCK);\n        let content = r#\"[[dependencies]]\nname = \"test\"\nversion = \"1.0.0\"\ngit = \"git@github.com:test/test.git\"\nrev = \"123456\"\n\"#;\n        fs::write(&file_path, content).unwrap();\n        let dep = Dependency::from_name_version(\"test~1.0.0\", None, None).unwrap();\n        let res = remove_lock(&dep, &file_path);\n        assert!(res.is_ok(), \"{res:?}\");\n        assert!(!file_path.exists());\n    }\n}\n"
  },
  {
    "path": "crates/core/src/push.rs",
    "content": "//! Handle publishing of a dependency to the registry.\nuse crate::{\n    auth::get_token,\n    errors::{AuthError, PublishError},\n    registry::{api_url, get_project_id},\n};\nuse ignore::{WalkBuilder, WalkState};\nuse log::debug;\nuse path_slash::{PathBufExt as _, PathExt as _};\nuse regex::Regex;\nuse reqwest::{\n    Client, StatusCode,\n    header::{AUTHORIZATION, CONTENT_TYPE, HeaderMap, HeaderValue},\n    multipart::{Form, Part},\n};\nuse std::{\n    fs,\n    io::{Read as _, Write as _},\n    path::{Path, PathBuf},\n    sync::mpsc,\n};\nuse zip::{CompressionMethod, ZipWriter, write::SimpleFileOptions};\n\npub type Result<T> = std::result::Result<T, PublishError>;\n\n/// Push a new version of a dependency to the registry.\n///\n/// The provided root folder will be zipped and uploaded to the registry, then deleted, unless the\n/// `dry_run` argument is set to `true`. In that case, the function will only create the zip file\n/// and return its path.\n///\n/// An authentication token is required to push a zip file to the registry. The token is retrieved\n/// from the login file (see [`login_file_path`][crate::utils::login_file_path] and\n/// [`execute_login`][crate::auth::execute_login]).\npub async fn push_version(\n    dependency_name: &str,\n    dependency_version: &str,\n    root_directory_path: impl AsRef<Path>,\n    files_to_copy: &[PathBuf],\n    dry_run: bool,\n) -> Result<Option<PathBuf>> {\n    let file_name =\n        root_directory_path.as_ref().file_name().expect(\"path should have a last component\");\n\n    let zip_archive = match zip_file(&root_directory_path, files_to_copy, file_name) {\n        Ok(zip) => zip,\n        Err(err) => {\n            return Err(err);\n        }\n    };\n    debug!(root:? = root_directory_path.as_ref(), zip_archive:?; \"created zip file from folder\");\n\n    if dry_run {\n        debug!(zip_archive:?; \"push dry run, zip file created but not uploading\");\n        return Ok(Some(PathBuf::from_slash_lossy(&zip_archive)));\n    }\n\n    if let Err(error) = push_to_repo(&zip_archive, dependency_name, dependency_version).await {\n        let _ = fs::remove_file(&zip_archive);\n        debug!(zip_archive:?; \"zip file deleted\");\n        return Err(error);\n    }\n\n    let _ = fs::remove_file(&zip_archive);\n    debug!(zip_archive:?; \"zip file deleted\");\n\n    Ok(None)\n}\n\n/// Validate the name of a dependency.\n///\n/// The name must be between 3 and 100 characters long, and can only contain lowercase letters,\n/// numbers, hyphens and the `@` symbol. It cannot start or end with a hyphen.\npub fn validate_name(name: &str) -> Result<()> {\n    let regex = Regex::new(r\"^[@|a-z0-9][a-z0-9-]*[a-z0-9]$\").expect(\"regex should compile\");\n    if !regex.is_match(name) {\n        debug!(name; \"package name contains unauthorized characters\");\n        return Err(PublishError::InvalidName);\n    }\n    if !(3..=100).contains(&name.len()) {\n        debug!(name; \"package name is too short or too long\");\n        return Err(PublishError::InvalidName);\n    }\n    Ok(())\n}\n\npub fn validate_version(version: &str) -> Result<()> {\n    if version.is_empty() {\n        return Err(PublishError::EmptyVersion);\n    }\n    Ok(())\n}\n\n/// Create a zip file from a list of files.\n///\n/// The zip file will be created in the root directory, with the provided name and the `.zip`\n/// extension. The function returns the path to the created zip file.\npub fn zip_file(\n    root_directory_path: impl AsRef<Path>,\n    files_to_copy: &[PathBuf],\n    file_name: impl Into<PathBuf>,\n) -> Result<PathBuf> {\n    let mut file_name: PathBuf = file_name.into();\n    file_name.set_extension(\"zip\");\n    let zip_file_path = root_directory_path.as_ref().join(file_name);\n    let file = fs::File::create(&zip_file_path)\n        .map_err(|e| PublishError::IOError { path: zip_file_path.clone(), source: e })?;\n    debug!(path:? = zip_file_path; \"zip file handle created\");\n    let mut zip = ZipWriter::new(file);\n    let options = SimpleFileOptions::default().compression_method(CompressionMethod::Deflated);\n    if files_to_copy.is_empty() {\n        return Err(PublishError::NoFiles);\n    }\n    let mut added_dirs = Vec::new();\n\n    for file_path in files_to_copy {\n        let path = file_path.as_path();\n        if !path.is_file() {\n            debug!(path:?; \"skipping non-file entry\");\n            continue;\n        }\n\n        // This is the relative path, we basically get the relative path to the target folder\n        // that we want to push and zip that as a name so we won't screw up the\n        // file/dir hierarchy in the zip file.\n        let relative_file_path = file_path.strip_prefix(root_directory_path.as_ref())?;\n        debug!(relative_path:? = relative_file_path; \"resolved relative file path for zip archive\");\n\n        // we add folders explicitly to the zip file, some tools might not handle this properly\n        // otherwise\n        if let Some(parent) = relative_file_path.parent() &&\n            !parent.as_os_str().is_empty() &&\n            !added_dirs.contains(&parent)\n        {\n            zip.add_directory(parent.to_slash_lossy(), options)?;\n            debug!(folder:? = parent; \"added parent directory in zip archive\");\n            added_dirs.push(parent);\n        }\n\n        let mut f = fs::File::open(file_path.clone())\n            .map_err(|e| PublishError::IOError { path: file_path.clone(), source: e })?;\n        let mut buffer = Vec::new();\n        zip.start_file(relative_file_path.to_slash_lossy(), options)?;\n        f.read_to_end(&mut buffer)\n            .map_err(|e| PublishError::IOError { path: file_path.clone(), source: e })?;\n        zip.write_all(&buffer)\n            .map_err(|e| PublishError::IOError { path: zip_file_path.clone(), source: e })?;\n        debug!(file:? = path; \"file added to zip archive\");\n    }\n    zip.finish()?;\n    debug!(path:? = zip_file_path; \"zip archive written\");\n    Ok(zip_file_path)\n}\n\n/// Filter the files in a directory according to ignore rules.\n///\n/// The following ignore files are supported:\n/// - `.ignore`\n/// - `.gitignore` (including any global one)\n/// - `.git/info/exclude`\n/// - `.soldeerignore`\n///\n/// The `.git` folders are always skipped.\npub fn filter_ignored_files(root_directory_path: impl AsRef<Path>) -> Vec<PathBuf> {\n    let (tx, rx) = mpsc::channel::<PathBuf>();\n    let walker = WalkBuilder::new(root_directory_path)\n        .add_custom_ignore_filename(\".soldeerignore\")\n        .hidden(false)\n        .filter_entry(|entry| {\n            !(entry.path().is_dir() && entry.path().file_name().unwrap_or_default() == \".git\")\n        })\n        .build_parallel();\n    walker.run(|| {\n        let tx = tx.clone();\n        // function executed for each DirEntry\n        Box::new(move |result| {\n            let Ok(entry) = result else {\n                return WalkState::Continue;\n            };\n            let path = entry.path();\n            if path.is_dir() {\n                debug!(path:?; \"ignoring dir entry\");\n                return WalkState::Continue;\n            }\n            debug!(path:?; \"adding file to list\");\n            tx.send(path.to_path_buf())\n                .expect(\"Channel receiver should never be dropped before end of function scope\");\n            WalkState::Continue\n        })\n    });\n\n    drop(tx);\n    // this cannot happen before tx is dropped safely\n    let mut files = Vec::new();\n    while let Ok(path) = rx.recv() {\n        files.push(path);\n    }\n    files\n}\n\n/// Push a zip file to the registry.\n///\n/// An authentication token is required to push a zip file to the registry. The token is retrieved\n/// from the login file (see [`login_file_path`][crate::utils::login_file_path] and\n/// [`execute_login`][crate::auth::execute_login]).\nasync fn push_to_repo(\n    zip_file: &Path,\n    dependency_name: &str,\n    dependency_version: &str,\n) -> Result<()> {\n    debug!(zip_file:?; \"uploading zip archive to registry\");\n    let token = get_token()?;\n    let client = Client::new();\n\n    let url = api_url(\"v1\", \"revision/upload\", &[]);\n\n    let mut headers: HeaderMap = HeaderMap::new();\n\n    let header_string = format!(\"Bearer {token}\");\n    let header_value = HeaderValue::from_str(&header_string);\n\n    headers.insert(AUTHORIZATION, header_value.expect(\"Could not set auth header\"));\n\n    let file_fs = fs::read(zip_file)\n        .map_err(|e| PublishError::IOError { path: zip_file.to_path_buf(), source: e })?;\n    let mut part = Part::bytes(file_fs).file_name(\n        zip_file\n            .file_name()\n            .expect(\"path should have a last component\")\n            .to_string_lossy()\n            .into_owned(),\n    );\n\n    // set the mime as app zip\n    part = part.mime_str(\"application/zip\").expect(\"Could not set mime type\");\n\n    let project_id = get_project_id(dependency_name).await?;\n    debug!(project_id; \"project ID fetched from registry\");\n\n    let form = Form::new()\n        .text(\"project_id\", project_id)\n        .text(\"revision\", dependency_version.to_string())\n        .part(\"zip_name\", part);\n\n    headers.insert(\n        CONTENT_TYPE,\n        HeaderValue::from_str(&(\"multipart/form-data; boundary=\".to_owned() + form.boundary()))\n            .expect(\"Could not set content type\"),\n    );\n    let response = client.post(url).headers(headers.clone()).multipart(form).send().await?;\n    match response.status() {\n        StatusCode::OK => Ok(()),\n        StatusCode::NO_CONTENT => Err(PublishError::ProjectNotFound),\n        StatusCode::ALREADY_REPORTED => Err(PublishError::AlreadyExists),\n        StatusCode::UNAUTHORIZED => Err(PublishError::AuthError(AuthError::InvalidCredentials)),\n        StatusCode::PAYLOAD_TOO_LARGE => Err(PublishError::PayloadTooLarge),\n        s if s.is_server_error() || s.is_client_error() => Err(PublishError::HttpError(\n            response.error_for_status().expect_err(\"result should be an error\"),\n        )),\n        _ => Err(PublishError::UnknownError),\n    }\n}\n\n#[cfg(test)]\nmod tests {\n    use super::*;\n    use crate::download::unzip_file;\n    use std::fs;\n    use testdir::testdir;\n\n    #[test]\n    fn test_validate_name() {\n        assert!(validate_name(\"foo\").is_ok());\n        assert!(validate_name(\"test\").is_ok());\n        assert!(validate_name(\"test-123\").is_ok());\n        assert!(validate_name(\"@test-123\").is_ok());\n\n        assert!(validate_name(\"t\").is_err());\n        assert!(validate_name(\"te\").is_err());\n        assert!(validate_name(\"@t\").is_err());\n        assert!(validate_name(\"test@123\").is_err());\n        assert!(validate_name(\"test-123-\").is_err());\n        assert!(validate_name(\"foo.bar\").is_err());\n        assert!(validate_name(\"mypäckage\").is_err());\n        assert!(validate_name(&\"a\".repeat(101)).is_err());\n    }\n\n    #[test]\n    fn test_empty_version() {\n        assert!(validate_version(\"\").is_err());\n    }\n\n    #[test]\n    fn test_filter_files_to_copy() {\n        let dir = testdir!();\n        // ignore file\n        // *.toml\n        // !/broadcast\n        // /broadcast/31337/\n        // /broadcast/*/dry_run/\n        fs::write(\n            dir.join(\".soldeerignore\"),\n            \"*.toml\\n!/broadcast\\n/broadcast/31337/\\n/broadcast/*/dry_run/\\n\",\n        )\n        .unwrap();\n\n        let mut ignored = Vec::new();\n        let mut included = vec![dir.join(\".soldeerignore\")];\n\n        // test structure\n        // - testdir/\n        // --- .soldeerignore <= not ignored\n        // --- random_dir/\n        // --- --- random.toml <= ignored\n        // --- --- random.zip <= not ignored\n        // --- broadcast/\n        // --- --- random.toml <= ignored\n        // --- --- random.zip <= not ignored\n        // --- --- 31337/\n        // --- --- --- random.toml <= ignored\n        // --- --- --- random.zip <= ignored\n        // --- --- random_dir_in_broadcast/\n        // --- --- --- random.zip <= not ignored\n        // --- --- --- random.toml <= ignored\n        // --- --- --- dry_run/\n        // --- --- --- --- zip <= ignored\n        // --- --- --- --- toml <= ignored\n        fs::create_dir(dir.join(\"random_dir\")).unwrap();\n        fs::create_dir(dir.join(\"broadcast\")).unwrap();\n        fs::create_dir(dir.join(\"broadcast/31337\")).unwrap();\n        fs::create_dir(dir.join(\"broadcast/random_dir_in_broadcast\")).unwrap();\n        fs::create_dir(dir.join(\"broadcast/random_dir_in_broadcast/dry_run\")).unwrap();\n\n        ignored.push(dir.join(\"random_dir/random.toml\"));\n        fs::write(ignored.last().unwrap(), \"ignored\").unwrap();\n        included.push(dir.join(\"random_dir/random.zip\"));\n        fs::write(included.last().unwrap(), \"included\").unwrap();\n        ignored.push(dir.join(\"broadcast/random.toml\"));\n        fs::write(ignored.last().unwrap(), \"ignored\").unwrap();\n        included.push(dir.join(\"broadcast/random.zip\"));\n        fs::write(included.last().unwrap(), \"included\").unwrap();\n        ignored.push(dir.join(\"broadcast/31337/random.toml\"));\n        fs::write(ignored.last().unwrap(), \"ignored\").unwrap();\n        ignored.push(dir.join(\"broadcast/31337/random.zip\"));\n        fs::write(ignored.last().unwrap(), \"ignored\").unwrap();\n        included.push(dir.join(\"broadcast/random_dir_in_broadcast/random.zip\"));\n        fs::write(included.last().unwrap(), \"included\").unwrap();\n        ignored.push(dir.join(\"broadcast/random_dir_in_broadcast/random.toml\"));\n        fs::write(ignored.last().unwrap(), \"ignored\").unwrap();\n        ignored.push(dir.join(\"broadcast/random_dir_in_broadcast/dry_run/zip\"));\n        fs::write(ignored.last().unwrap(), \"ignored\").unwrap();\n        ignored.push(dir.join(\"broadcast/random_dir_in_broadcast/dry_run/toml\"));\n        fs::write(ignored.last().unwrap(), \"ignored\").unwrap();\n\n        let res = filter_ignored_files(&dir);\n        assert_eq!(res.len(), included.len());\n        for r in res {\n            assert!(included.contains(&r));\n        }\n    }\n\n    #[tokio::test]\n    async fn test_zip_file() {\n        let dir = testdir!().join(\"test_zip\");\n        fs::create_dir(&dir).unwrap();\n        let mut files = Vec::new();\n        files.push(dir.join(\"a.txt\"));\n        fs::write(files.last().unwrap(), \"test\").unwrap();\n        files.push(dir.join(\"b.txt\"));\n        fs::write(files.last().unwrap(), \"test\").unwrap();\n        fs::create_dir(dir.join(\"sub\")).unwrap();\n        files.push(dir.join(\"sub/c.txt\"));\n        fs::write(files.last().unwrap(), \"test\").unwrap();\n        fs::create_dir(dir.join(\"sub/sub\")).unwrap();\n        files.push(dir.join(\"sub/sub/d.txt\"));\n        fs::write(files.last().unwrap(), \"test\").unwrap();\n        fs::create_dir(dir.join(\"empty\")).unwrap();\n\n        let res = zip_file(&dir, &files, \"test\");\n        assert!(res.is_ok(), \"{res:?}\");\n\n        fs::copy(dir.join(\"test.zip\"), testdir!().join(\"test.zip\")).unwrap();\n        fs::remove_dir_all(&dir).unwrap();\n        fs::create_dir(&dir).unwrap();\n        unzip_file(testdir!().join(\"test.zip\"), &dir).await.unwrap();\n        for f in files {\n            assert!(f.exists());\n        }\n    }\n}\n"
  },
  {
    "path": "crates/core/src/registry.rs",
    "content": "//! Soldeer registry client.\n//!\n//! The registry client is responsible for fetching information about packages from the Soldeer\n//! registry at <https://soldeer.xyz>.\nuse crate::{\n    auth::get_auth_headers,\n    config::{Dependency, HttpDependency},\n    errors::RegistryError,\n};\nuse chrono::{DateTime, Utc};\nuse log::{debug, warn};\nuse reqwest::{Client, Url};\nuse semver::{Version, VersionReq};\nuse serde::Deserialize;\nuse std::env;\n\npub type Result<T> = std::result::Result<T, RegistryError>;\n\n/// A revision (version) for a project (package).\n#[derive(Deserialize, Debug, Clone, PartialEq, Eq, Hash)]\n#[cfg_attr(feature = \"serde\", derive(serde::Serialize))]\npub struct Revision {\n    /// The unique ID for the revision.\n    pub id: uuid::Uuid,\n\n    /// The version of the revision.\n    pub version: String,\n\n    /// The internal name (path of zip file) for the revision.\n    pub internal_name: String,\n\n    /// The zip file download URL.\n    pub url: String,\n\n    /// The project unique ID.\n    pub project_id: uuid::Uuid,\n\n    /// Whether this revision has been deleted.\n    pub deleted: bool,\n\n    /// Creation date for the revision.\n    pub created_at: Option<DateTime<Utc>>,\n\n    /// Whether the revision is private.\n    pub private: Option<bool>,\n}\n\n/// A project (package) in the registry.\n#[derive(Deserialize, Debug, Clone, PartialEq, Eq, Hash)]\n#[cfg_attr(feature = \"serde\", derive(serde::Serialize))]\npub struct Project {\n    /// The unique ID for the project.\n    pub id: uuid::Uuid,\n\n    /// The name of the project.\n    pub name: String,\n\n    /// The description of the project.\n    pub description: String,\n\n    /// The URL of the repository on GitHub.\n    pub github_url: String,\n\n    /// The unique ID for the owner of the project.\n    pub created_by: uuid::Uuid,\n\n    /// Whether this project has been deleted.\n    pub deleted: Option<bool>,\n\n    /// Whether the project is private.\n    pub private: Option<bool>,\n\n    /// Other metadata below\n    pub downloads: Option<i64>,\n    pub image: Option<String>,\n    pub long_description: Option<String>,\n    pub created_at: Option<DateTime<Utc>>,\n    pub updated_at: Option<DateTime<Utc>>,\n    pub organization_id: Option<uuid::Uuid>,\n    pub latest_version: Option<String>,\n    pub deprecated: Option<bool>,\n    pub organization_name: Option<String>,\n    pub organization_verified: Option<bool>,\n}\n\n/// The response from the revision endpoint.\n#[derive(Deserialize, Debug, Clone, PartialEq, Eq, Hash)]\n#[cfg_attr(feature = \"serde\", derive(serde::Serialize))]\npub struct RevisionResponse {\n    /// The revisions.\n    data: Vec<Revision>,\n\n    /// The status of the response.\n    status: String,\n}\n\n/// The response from the project endpoint.\n#[derive(Deserialize, Debug, Clone, PartialEq, Eq, Hash)]\n#[cfg_attr(feature = \"serde\", derive(serde::Serialize))]\npub struct ProjectResponse {\n    /// The projects.\n    data: Vec<Project>,\n\n    /// The status of the response.\n    status: String,\n}\n\n/// A download URL for a revision.\n#[derive(Debug, Clone, PartialEq, Eq, Hash)]\n#[cfg_attr(feature = \"serde\", derive(serde::Deserialize, serde::Serialize))]\npub struct DownloadUrl {\n    /// The download URL.\n    pub url: String,\n\n    /// Whether this revision is private.\n    pub private: bool,\n}\n\n/// Construct a URL for the Soldeer API.\n///\n/// The URL is constructed from the `SOLDEER_API_URL` environment variable, or defaults to\n/// <https://api.soldeer.xyz>. The API version prefix and path are appended to the base URL,\n/// and any query parameters are URL-encoded and appended to the URL.\n///\n/// # Examples\n///\n/// ```\n/// # use soldeer_core::registry::api_url;\n/// let url = api_url(\n///     \"v1\",\n///     \"revision\",\n///     &[(\"project_name\", \"forge-std\"), (\"offset\", \"0\"), (\"limit\", \"1\")],\n/// );\n/// assert_eq!(\n///     url.as_str(),\n///     \"https://api.soldeer.xyz/api/v1/revision?project_name=forge-std&offset=0&limit=1\"\n/// );\n/// ```\npub fn api_url(version: &str, path: &str, params: &[(&str, &str)]) -> Url {\n    let url = env::var(\"SOLDEER_API_URL\").unwrap_or(\"https://api.soldeer.xyz\".to_string());\n    let mut url = Url::parse(&url).expect(\"SOLDEER_API_URL is invalid\");\n    url.set_path(&format!(\"api/{version}/{path}\"));\n    if params.is_empty() {\n        return url;\n    }\n    url.query_pairs_mut().extend_pairs(params.iter());\n    url\n}\n\n/// Get the download URL for a dependency at a specific version.\npub async fn get_dependency_url_remote(\n    dependency: &Dependency,\n    version: &str,\n) -> Result<DownloadUrl> {\n    debug!(dep:% = dependency; \"retrieving URL for dependency\");\n    let url = api_url(\n        \"v1\",\n        \"revision-cli\",\n        &[(\"project_name\", dependency.name()), (\"revision\", version)],\n    );\n\n    let res = Client::new().get(url).headers(get_auth_headers()?).send().await?;\n    let res = res.error_for_status()?;\n    let revision: RevisionResponse = res.json().await?;\n    let Some(r) = revision.data.first() else {\n        return Err(RegistryError::URLNotFound(dependency.to_string()));\n    };\n    debug!(dep:% = dependency, url = r.url; \"URL for dependency was found\");\n    Ok(DownloadUrl { url: r.url.clone(), private: r.private.unwrap_or_default() })\n}\n\n/// Get the unique ID for a project by name.\npub async fn get_project_id(dependency_name: &str) -> Result<String> {\n    debug!(name = dependency_name; \"retrieving project ID\");\n    let url = api_url(\"v2\", \"project\", &[(\"project_name\", dependency_name)]);\n    let res = Client::new().get(url).headers(get_auth_headers()?).send().await?;\n    let res = res.error_for_status()?;\n    let project: ProjectResponse = res.json().await?;\n    let Some(p) = project.data.first() else {\n        return Err(RegistryError::ProjectNotFound(dependency_name.to_string()));\n    };\n    debug!(name = dependency_name, id:% = p.id; \"project ID was found\");\n    Ok(p.id.to_string())\n}\n\n/// Get the latest version of a dependency.\npub async fn get_latest_version(dependency_name: &str) -> Result<Dependency> {\n    debug!(dep = dependency_name; \"retrieving latest version for dependency\");\n    let url = api_url(\n        \"v1\",\n        \"revision\",\n        &[(\"project_name\", dependency_name), (\"offset\", \"0\"), (\"limit\", \"1\")],\n    );\n    let res = Client::new().get(url).headers(get_auth_headers()?).send().await?;\n    let res = res.error_for_status()?;\n    let revision: RevisionResponse = res.json().await?;\n    let Some(data) = revision.data.first() else {\n        return Err(RegistryError::URLNotFound(dependency_name.to_string()));\n    };\n    debug!(dep = dependency_name, version = data.version; \"latest version found\");\n    Ok(HttpDependency {\n        name: dependency_name.to_string(),\n        version_req: data.clone().version,\n        url: None,\n        project_root: None,\n    }\n    .into())\n}\n\n/// The versions of a dependency.\n///\n/// If all versions can be parsed as semver, then the versions are sorted in descending order\n/// according to semver. If not all versions can be parsed as semver, then the versions are returned\n/// in the order they were received from the API (descending creation date).\n#[derive(Debug, Clone, PartialEq, Eq, Hash)]\npub enum Versions {\n    /// All versions are semver compliant.\n    Semver(Vec<Version>),\n\n    /// Not all versions are semver compliant.\n    NonSemver(Vec<String>),\n}\n\n/// Get all versions of a dependency sorted in descending order\n///\n/// If all versions can be parsed as semver, then the versions are sorted in descending order\n/// according to semver. If not all versions can be parsed as semver, then the versions are returned\n/// in the order they were received from the API (descending creation date).\npub async fn get_all_versions_descending(dependency_name: &str) -> Result<Versions> {\n    // TODO: provide a more efficient endpoint which already sorts by descending semver if possible\n    // and only returns the version strings\n    debug!(dep = dependency_name; \"retrieving all dependency versions\");\n    let url = api_url(\n        \"v1\",\n        \"revision\",\n        &[(\"project_name\", dependency_name), (\"offset\", \"0\"), (\"limit\", \"10000\")],\n    );\n    let res = Client::new().get(url).headers(get_auth_headers()?).send().await?;\n    let res = res.error_for_status()?;\n    let revision: RevisionResponse = res.json().await?;\n    if revision.data.is_empty() {\n        return Err(RegistryError::NoVersion(dependency_name.to_string()));\n    }\n\n    match revision\n        .data\n        .iter()\n        .map(|r| Version::parse(&r.version))\n        .collect::<std::result::Result<Vec<Version>, _>>()\n    {\n        Ok(mut versions) => {\n            debug!(dep = dependency_name; \"all versions are semver compliant, sorting by descending version\");\n            versions.sort_unstable_by(|a, b| b.cmp(a)); // sort in descending order\n            Ok(Versions::Semver(versions))\n        }\n        Err(_) => {\n            debug!(dep = dependency_name; \"not all versions are semver compliant, using API ordering\");\n            Ok(Versions::NonSemver(revision.data.iter().map(|r| r.version.to_string()).collect()))\n        }\n    }\n}\n\n/// Get the latest version of a dependency that satisfies the version requirement.\n///\n/// If the API response contains non-semver-compliant versions, then we attempt to find an exact\n/// match for the requirement, or error out.\npub async fn get_latest_supported_version(dependency: &Dependency) -> Result<String> {\n    debug!(dep:% = dependency, version_req = dependency.version_req(); \"retrieving latest version according to version requirement\");\n    match get_all_versions_descending(dependency.name()).await? {\n        Versions::Semver(all_versions) => {\n            match parse_version_req(dependency.version_req()) {\n                Some(req) => {\n                    let new_version = all_versions\n                        .iter()\n                        .find(|version| req.matches(version))\n                        .ok_or(RegistryError::NoMatchingVersion {\n                            dependency: dependency.name().to_string(),\n                            version_req: dependency.version_req().to_string(),\n                        })?;\n                    debug!(dep:% = dependency, version:% = new_version; \"acceptable version found\");\n                    Ok(new_version.to_string())\n                }\n                None => {\n                    warn!(dep:% = dependency, version_req = dependency.version_req(); \"could not parse version req according to semver, using latest version\");\n                    // we can't check which version is newer, so we just take the latest one\n                    Ok(all_versions\n                        .into_iter()\n                        .next()\n                        .map(|v| v.to_string())\n                        .expect(\"there should be at least 1 version\"))\n                }\n            }\n        }\n        Versions::NonSemver(all_versions) => {\n            // try to find the exact version specifier in the list of all versions, otherwise error\n            // out\n            debug!(dep:% = dependency; \"versions are not all semver compliant, trying to find exact match\");\n            all_versions.into_iter().find(|v| v == dependency.version_req()).ok_or_else(|| {\n                RegistryError::NoMatchingVersion {\n                    dependency: dependency.name().to_string(),\n                    version_req: dependency.version_req().to_string(),\n                }\n            })\n        }\n    }\n}\n\n/// Parse a version requirement string into a `VersionReq`.\n///\n/// Adds the \"equal\" operator to the req if it doesn't have an operator.\n/// This is necessary because the [`semver`] crate considers no operator to be equivalent to the\n/// \"compatible\" operator, but we want to treat it as the \"equal\" operator.\npub fn parse_version_req(version_req: &str) -> Option<VersionReq> {\n    let Ok(mut req) = version_req.parse::<VersionReq>() else {\n        debug!(version_req; \"version requirement cannot be parsed by semver\");\n        return None;\n    };\n    if req.comparators.is_empty() {\n        debug!(version_req; \"comparators list is empty (wildcard req), no further action needed\");\n        return Some(req); // wildcard/any version\n    }\n    let orig_items: Vec<_> = version_req.split(',').collect();\n    // we only perform the operator conversion if we can reference the original string, i.e. if the\n    // parsed result has the same number of comparators as the original string\n\n    if orig_items.len() == req.comparators.len() {\n        for (comparator, orig) in req.comparators.iter_mut().zip(orig_items) {\n            if comparator.op == semver::Op::Caret && !orig.trim_start_matches(' ').starts_with('^')\n            {\n                debug!(comparator:% = comparator; \"adding exact operator for comparator\");\n                comparator.op = semver::Op::Exact;\n            }\n        }\n    }\n    Some(req)\n}\n\n#[cfg(test)]\nmod tests {\n    use super::*;\n    use mockito::{Matcher, Server};\n    use temp_env::async_with_vars;\n\n    #[tokio::test]\n    async fn test_get_dependency_url() {\n        let mut server = Server::new_async().await;\n        let data = r#\"{\"data\":[{\"created_at\":\"2024-08-06T17:31:25.751079Z\",\"deleted\":false,\"downloads\":3391,\"id\":\"660132e6-4902-4804-8c4b-7cae0a648054\",\"internal_name\":\"forge-std/1_9_2_06-08-2024_17:31:25_forge-std-1.9.2.zip\",\"project_id\":\"37adefe5-9bc6-4777-aaf2-e56277d1f30b\",\"url\":\"https://soldeer-revisions.s3.amazonaws.com/forge-std/1_9_2_06-08-2024_17:31:25_forge-std-1.9.2.zip\",\"version\":\"1.9.2\"}],\"status\":\"success\"}\"#;\n        server\n            .mock(\"GET\", \"/api/v1/revision-cli\")\n            .match_query(Matcher::Any)\n            .with_header(\"content-type\", \"application/json\")\n            .with_body(data)\n            .create_async()\n            .await;\n\n        let dependency =\n            HttpDependency::builder().name(\"forge-std\").version_req(\"^1.9.0\").build().into();\n        let res = async_with_vars(\n            [(\"SOLDEER_API_URL\", Some(server.url()))],\n            get_dependency_url_remote(&dependency, \"1.9.2\"),\n        )\n        .await;\n        assert!(res.is_ok(), \"{res:?}\");\n        assert_eq!(\n            res.unwrap().url,\n            \"https://soldeer-revisions.s3.amazonaws.com/forge-std/1_9_2_06-08-2024_17:31:25_forge-std-1.9.2.zip\"\n        );\n    }\n\n    #[tokio::test]\n    async fn test_get_dependency_url_nomatch() {\n        let mut server = Server::new_async().await;\n        let data = r#\"{\"data\":[],\"status\":\"success\"}\"#;\n        server\n            .mock(\"GET\", \"/api/v1/revision-cli\")\n            .match_query(Matcher::Any)\n            .with_header(\"content-type\", \"application/json\")\n            .with_body(data)\n            .create_async()\n            .await;\n\n        let dependency =\n            HttpDependency::builder().name(\"forge-std\").version_req(\"^1.9.0\").build().into();\n        let res = async_with_vars(\n            [(\"SOLDEER_API_URL\", Some(server.url()))],\n            get_dependency_url_remote(&dependency, \"1.9.2\"),\n        )\n        .await;\n        assert!(matches!(res, Err(RegistryError::URLNotFound(_))));\n    }\n\n    #[tokio::test]\n    async fn test_get_project_id() {\n        let mut server = Server::new_async().await;\n        let data = r#\"{\"data\":[{\"created_at\":\"2024-02-27T19:19:23.938837Z\",\"created_by\":\"96228bb5-f777-4c19-ba72-363d14b8beed\",\"deleted\":false,\"deprecated\":false,\"description\":\"Forge Standard Library is a collection of helpful contracts and libraries for use with Forge and Foundry.\",\"downloads\":648041,\"github_url\":\"https://github.com/foundry-rs/forge-std\",\"id\":\"37adefe5-9bc6-4777-aaf2-e56277d1f30b\",\"image\":\"https://soldeer-resources.s3.amazonaws.com/default_icon.png\",\"latest_version\":\"1.10.0\",\"long_description\":\"Description\",\"name\":\"forge-std\",\"organization_id\":\"ff9c0d8e-9275-4f6f-a1b7-2e822450a7ba\",\"organization_name\":\"Soldeer\",\"organization_verified\":true,\"updated_at\":\"2024-02-27T19:19:23.938837Z\"}],\"status\":\"success\"}\"#;\n        server\n            .mock(\"GET\", \"/api/v2/project\")\n            .match_query(Matcher::Any)\n            .with_header(\"content-type\", \"application/json\")\n            .with_body(data)\n            .create_async()\n            .await;\n        let res =\n            async_with_vars([(\"SOLDEER_API_URL\", Some(server.url()))], get_project_id(\"forge-std\"))\n                .await;\n        assert!(res.is_ok(), \"{res:?}\");\n        assert_eq!(res.unwrap(), \"37adefe5-9bc6-4777-aaf2-e56277d1f30b\");\n    }\n\n    #[tokio::test]\n    async fn test_get_project_id_nomatch() {\n        let mut server = Server::new_async().await;\n        let data = r#\"{\"data\":[],\"status\":\"success\"}\"#;\n        server\n            .mock(\"GET\", \"/api/v2/project\")\n            .match_query(Matcher::Any)\n            .with_header(\"content-type\", \"application/json\")\n            .with_body(data)\n            .create_async()\n            .await;\n\n        let res =\n            async_with_vars([(\"SOLDEER_API_URL\", Some(server.url()))], get_project_id(\"forge-std\"))\n                .await;\n        assert!(matches!(res, Err(RegistryError::ProjectNotFound(_))));\n    }\n\n    #[tokio::test]\n    async fn test_get_latest_forge_std() {\n        let mut server = Server::new_async().await;\n        let data = r#\"{\"data\":[{\"created_at\":\"2024-08-06T17:31:25.751079Z\",\"deleted\":false,\"downloads\":3391,\"id\":\"660132e6-4902-4804-8c4b-7cae0a648054\",\"internal_name\":\"forge-std/1_9_2_06-08-2024_17:31:25_forge-std-1.9.2.zip\",\"project_id\":\"37adefe5-9bc6-4777-aaf2-e56277d1f30b\",\"url\":\"https://soldeer-revisions.s3.amazonaws.com/forge-std/1_9_2_06-08-2024_17:31:25_forge-std-1.9.2.zip\",\"version\":\"1.9.2\"}],\"status\":\"success\"}\"#;\n        server\n            .mock(\"GET\", \"/api/v1/revision\")\n            .match_query(Matcher::Any)\n            .with_header(\"content-type\", \"application/json\")\n            .with_body(data)\n            .create_async()\n            .await;\n\n        let dependency =\n            HttpDependency::builder().name(\"forge-std\").version_req(\"1.9.2\").build().into();\n        let res = async_with_vars(\n            [(\"SOLDEER_API_URL\", Some(server.url()))],\n            get_latest_version(\"forge-std\"),\n        )\n        .await;\n        assert!(res.is_ok(), \"{res:?}\");\n        assert_eq!(res.unwrap(), dependency);\n    }\n\n    #[tokio::test]\n    async fn test_get_all_versions_descending() {\n        let mut server = Server::new_async().await;\n        // data is not sorted in reverse semver order\n        let data = r#\"{\"data\":[{\"created_at\":\"2024-07-03T14:44:58.148723Z\",\"deleted\":false,\"downloads\":21,\"id\":\"b463683a-c4b4-40bf-b707-1c4eb343c4d2\",\"internal_name\":\"forge-std/v1_9_0_03-07-2024_14:44:57_forge-std-v1.9.0.zip\",\"project_id\":\"37adefe5-9bc6-4777-aaf2-e56277d1f30b\",\"url\":\"https://soldeer-revisions.s3.amazonaws.com/forge-std/v1_9_0_03-07-2024_14:44:57_forge-std-v1.9.0.zip\",\"version\":\"1.9.0\"},{\"created_at\":\"2024-08-06T17:31:25.751079Z\",\"deleted\":false,\"downloads\":3389,\"id\":\"660132e6-4902-4804-8c4b-7cae0a648054\",\"internal_name\":\"forge-std/1_9_2_06-08-2024_17:31:25_forge-std-1.9.2.zip\",\"project_id\":\"37adefe5-9bc6-4777-aaf2-e56277d1f30b\",\"url\":\"https://soldeer-revisions.s3.amazonaws.com/forge-std/1_9_2_06-08-2024_17:31:25_forge-std-1.9.2.zip\",\"version\":\"1.9.2\"},{\"created_at\":\"2024-07-03T14:44:59.729623Z\",\"deleted\":false,\"downloads\":5290,\"id\":\"fa5160fc-ba7b-40fd-8e99-8becd6dadbe4\",\"internal_name\":\"forge-std/v1_9_1_03-07-2024_14:44:59_forge-std-v1.9.1.zip\",\"project_id\":\"37adefe5-9bc6-4777-aaf2-e56277d1f30b\",\"url\":\"https://soldeer-revisions.s3.amazonaws.com/forge-std/v1_9_1_03-07-2024_14:44:59_forge-std-v1.9.1.zip\",\"version\":\"1.9.1\"}],\"status\":\"success\"}\"#;\n        server\n            .mock(\"GET\", \"/api/v1/revision\")\n            .match_query(Matcher::Any)\n            .with_header(\"content-type\", \"application/json\")\n            .with_body(data)\n            .create_async()\n            .await;\n\n        let res = async_with_vars(\n            [(\"SOLDEER_API_URL\", Some(server.url()))],\n            get_all_versions_descending(\"forge-std\"),\n        )\n        .await;\n        assert!(res.is_ok(), \"{res:?}\");\n        assert_eq!(\n            res.unwrap(),\n            Versions::Semver(vec![\n                \"1.9.2\".parse().unwrap(),\n                \"1.9.1\".parse().unwrap(),\n                \"1.9.0\".parse().unwrap()\n            ])\n        );\n    }\n\n    #[tokio::test]\n    async fn test_get_latest_supported_version_semver() {\n        let mut server = Server::new_async().await;\n        let data = r#\"{\"data\":[{\"created_at\":\"2024-08-06T17:31:25.751079Z\",\"deleted\":false,\"downloads\":3389,\"id\":\"660132e6-4902-4804-8c4b-7cae0a648054\",\"internal_name\":\"forge-std/1_9_2_06-08-2024_17:31:25_forge-std-1.9.2.zip\",\"project_id\":\"37adefe5-9bc6-4777-aaf2-e56277d1f30b\",\"url\":\"https://soldeer-revisions.s3.amazonaws.com/forge-std/1_9_2_06-08-2024_17:31:25_forge-std-1.9.2.zip\",\"version\":\"1.9.2\"},{\"created_at\":\"2024-07-03T14:44:59.729623Z\",\"deleted\":false,\"downloads\":5290,\"id\":\"fa5160fc-ba7b-40fd-8e99-8becd6dadbe4\",\"internal_name\":\"forge-std/v1_9_1_03-07-2024_14:44:59_forge-std-v1.9.1.zip\",\"project_id\":\"37adefe5-9bc6-4777-aaf2-e56277d1f30b\",\"url\":\"https://soldeer-revisions.s3.amazonaws.com/forge-std/v1_9_1_03-07-2024_14:44:59_forge-std-v1.9.1.zip\",\"version\":\"1.9.1\"},{\"created_at\":\"2024-07-03T14:44:58.148723Z\",\"deleted\":false,\"downloads\":21,\"id\":\"b463683a-c4b4-40bf-b707-1c4eb343c4d2\",\"internal_name\":\"forge-std/v1_9_0_03-07-2024_14:44:57_forge-std-v1.9.0.zip\",\"project_id\":\"37adefe5-9bc6-4777-aaf2-e56277d1f30b\",\"url\":\"https://soldeer-revisions.s3.amazonaws.com/forge-std/v1_9_0_03-07-2024_14:44:57_forge-std-v1.9.0.zip\",\"version\":\"1.9.0\"}],\"status\":\"success\"}\"#;\n        server\n            .mock(\"GET\", \"/api/v1/revision\")\n            .match_query(Matcher::Any)\n            .with_header(\"content-type\", \"application/json\")\n            .with_body(data)\n            .create_async()\n            .await;\n\n        let dependency: Dependency =\n            HttpDependency::builder().name(\"forge-std\").version_req(\"^1.9.0\").build().into();\n        let res = async_with_vars(\n            [(\"SOLDEER_API_URL\", Some(server.url()))],\n            get_latest_supported_version(&dependency),\n        )\n        .await;\n        assert!(res.is_ok(), \"{res:?}\");\n        assert_eq!(res.unwrap(), \"1.9.2\");\n    }\n\n    #[tokio::test]\n    async fn test_get_latest_supported_version_no_semver() {\n        let mut server = Server::new_async().await;\n        let data = r#\"{\"data\":[{\"created_at\":\"2024-08-06T17:31:25.751079Z\",\"deleted\":false,\"downloads\":3389,\"id\":\"660132e6-4902-4804-8c4b-7cae0a648054\",\"internal_name\":\"forge-std/1_9_2_06-08-2024_17:31:25_forge-std-1.9.2.zip\",\"project_id\":\"37adefe5-9bc6-4777-aaf2-e56277d1f30b\",\"url\":\"https://soldeer-revisions.s3.amazonaws.com/forge-std/1_9_2_06-08-2024_17:31:25_forge-std-1.9.2.zip\",\"version\":\"2024-08\"},{\"created_at\":\"2024-07-03T14:44:59.729623Z\",\"deleted\":false,\"downloads\":5290,\"id\":\"fa5160fc-ba7b-40fd-8e99-8becd6dadbe4\",\"internal_name\":\"forge-std/v1_9_1_03-07-2024_14:44:59_forge-std-v1.9.1.zip\",\"project_id\":\"37adefe5-9bc6-4777-aaf2-e56277d1f30b\",\"url\":\"https://soldeer-revisions.s3.amazonaws.com/forge-std/v1_9_1_03-07-2024_14:44:59_forge-std-v1.9.1.zip\",\"version\":\"2024-07\"},{\"created_at\":\"2024-07-03T14:44:58.148723Z\",\"deleted\":false,\"downloads\":21,\"id\":\"b463683a-c4b4-40bf-b707-1c4eb343c4d2\",\"internal_name\":\"forge-std/v1_9_0_03-07-2024_14:44:57_forge-std-v1.9.0.zip\",\"project_id\":\"37adefe5-9bc6-4777-aaf2-e56277d1f30b\",\"url\":\"https://soldeer-revisions.s3.amazonaws.com/forge-std/v1_9_0_03-07-2024_14:44:57_forge-std-v1.9.0.zip\",\"version\":\"2024-06\"}],\"status\":\"success\"}\"#;\n        server\n            .mock(\"GET\", \"/api/v1/revision\")\n            .match_query(Matcher::Any)\n            .with_header(\"content-type\", \"application/json\")\n            .with_body(data)\n            .create_async()\n            .await;\n\n        let dependency: Dependency =\n            HttpDependency::builder().name(\"forge-std\").version_req(\"2024-06\").build().into();\n        let res = async_with_vars(\n            [(\"SOLDEER_API_URL\", Some(server.url()))],\n            get_latest_supported_version(&dependency),\n        )\n        .await;\n        assert!(res.is_ok(), \"{res:?}\");\n        assert_eq!(res.unwrap(), \"2024-06\"); // should resolve to the exact match\n\n        let dependency: Dependency =\n            HttpDependency::builder().name(\"forge-std\").version_req(\"non-existant\").build().into();\n        let res = async_with_vars(\n            [(\"SOLDEER_API_URL\", Some(server.url()))],\n            get_latest_supported_version(&dependency),\n        )\n        .await;\n        assert!(matches!(res, Err(RegistryError::NoMatchingVersion { .. })));\n    }\n\n    #[test]\n    fn test_parse_version_req() {\n        assert_eq!(parse_version_req(\"1.9.0\"), Some(VersionReq::parse(\"=1.9.0\").unwrap()));\n        assert_eq!(parse_version_req(\"=1.9.0\"), Some(VersionReq::parse(\"=1.9.0\").unwrap()));\n        assert_eq!(parse_version_req(\"^1.9.0\"), Some(VersionReq::parse(\"^1.9.0\").unwrap()));\n        assert_eq!(\n            parse_version_req(\"^1.9.0,^1.10.0\"),\n            Some(VersionReq::parse(\"^1.9.0, ^1.10.0\").unwrap())\n        );\n        assert_eq!(\n            parse_version_req(\"1.9.0,1.10.0\"),\n            Some(VersionReq::parse(\"=1.9.0,=1.10.0\").unwrap())\n        );\n        assert_eq!(parse_version_req(\">=1.9.0\"), Some(VersionReq::parse(\">=1.9.0\").unwrap()));\n        assert_eq!(parse_version_req(\"\"), None);\n        assert_eq!(parse_version_req(\"foobar\"), None);\n        assert_eq!(parse_version_req(\"*\"), Some(VersionReq::STAR));\n    }\n}\n"
  },
  {
    "path": "crates/core/src/remappings.rs",
    "content": "//! Remappings management.\nuse crate::{\n    config::{Dependency, Paths, SoldeerConfig, read_config_deps},\n    errors::RemappingsError,\n    utils::path_matches,\n};\nuse derive_more::derive::From;\nuse log::debug;\nuse path_slash::PathExt as _;\nuse rayon::prelude::*;\nuse serde::{Deserialize, Serialize};\nuse std::{\n    fs::{self, File},\n    io::Write as _,\n    path::PathBuf,\n};\nuse toml_edit::{Array, DocumentMut, value};\n\npub type Result<T> = std::result::Result<T, RemappingsError>;\n\n/// Action to perform on the remappings.\n#[derive(Debug, Clone, PartialEq, Eq, Hash)]\n#[cfg_attr(feature = \"serde\", derive(Serialize, Deserialize))]\npub enum RemappingsAction {\n    /// Add a dependency to the remappings.\n    Add(Dependency),\n\n    /// Remove a dependency from the remappings.\n    Remove(Dependency),\n\n    /// Update the remappings according to the config file.\n    Update,\n}\n\n/// Location where to store the remappings, either in `remappings.txt` or the config file\n/// (foundry/soldeer).\n#[derive(Serialize, Deserialize, Debug, Clone, Default, PartialEq, Eq, Hash)]\n#[serde(rename_all = \"lowercase\")]\npub enum RemappingsLocation {\n    /// Store the remappings in a dedicated `remappings.txt` file.\n    #[default]\n    Txt,\n\n    /// Store the remappings in the `foundry.toml` config file.\n    ///\n    /// Note that remappings are never stored in the `soldeer.toml` file because foundry wouldn't\n    /// be able to read them from there.\n    Config,\n}\n\n/// Generate the remappings for storing into the `remappings.txt` file.\n///\n/// If the `remappings_regenerate` option is set to `true`, then any existing remappings are\n/// discarded and the remappings are generated from the dependencies in the config file.\n///\n/// Otherwise, existing remappings are kept, and depending on the action, a remapping entry is added\n/// or removed. For the [`RemappingsAction::Update`] action, the existing remappings are merged with\n/// the dependencies in the config file.\npub fn remappings_txt(\n    action: &RemappingsAction,\n    paths: &Paths,\n    soldeer_config: &SoldeerConfig,\n) -> Result<()> {\n    if soldeer_config.remappings_regenerate && paths.remappings.exists() {\n        fs::remove_file(&paths.remappings)?;\n        debug!(path:? = paths.remappings; \"removed existing remappings file\");\n    }\n    let contents = if paths.remappings.exists() {\n        debug!(path:? = paths.remappings; \"reading existing remappings from remappings.txt file\");\n        fs::read_to_string(&paths.remappings)?\n    } else {\n        String::new()\n    };\n    let existing_remappings: Vec<_> = contents.lines().filter_map(|r| r.split_once('=')).collect();\n\n    let new_remappings = generate_remappings(action, paths, soldeer_config, &existing_remappings)?;\n\n    let mut file = File::create(&paths.remappings)?;\n    for remapping in new_remappings {\n        writeln!(file, \"{remapping}\")?;\n    }\n    debug!(path:? = paths.remappings; \"updated remappings.txt file\");\n    Ok(())\n}\n\n/// Generate the remappings for storing into the `foundry.toml` config file.\n///\n/// If the `remappings_regenerate` option is set to `true`, then any existing remappings are\n/// discarded and the remappings are generated from the dependencies in the config file.\n///\n/// Otherwise, existing remappings are kept, and depending on the action, a remapping entry is added\n/// or removed. For the [`RemappingsAction::Update`] action, the existing remappings are merged with\n/// the dependencies in the config file.\n///\n/// The remappings are added to the default profile in all cases, and to any other profile that\n/// already has a `remappings key`. If the profile doesn't have a remappings key, it is left\n/// untouched.\npub fn remappings_foundry(\n    action: &RemappingsAction,\n    paths: &Paths,\n    soldeer_config: &SoldeerConfig,\n) -> Result<()> {\n    let contents = fs::read_to_string(&paths.config)?;\n    let mut doc: DocumentMut =\n        contents.parse::<DocumentMut>().expect(\"config file should be valid toml\");\n    let Some(profiles) = doc[\"profile\"].as_table_mut() else {\n        // we don't add remappings if there are no profiles\n        debug!(\"no config profile found, skipping remappings generation\");\n        return Ok(());\n    };\n\n    for (name, profile) in profiles.iter_mut() {\n        // we normally only edit remappings of profiles which already have a remappings key\n        match profile.get_mut(\"remappings\").map(|v| v.as_array_mut()) {\n            Some(Some(remappings)) => {\n                debug!(name:% = name; \"updating remappings for profile\");\n                let existing_remappings: Vec<_> = remappings\n                    .iter()\n                    .filter_map(|r| r.as_str())\n                    .filter_map(|r| r.split_once('='))\n                    .collect();\n                let new_remappings =\n                    generate_remappings(action, paths, soldeer_config, &existing_remappings)?;\n                remappings.clear();\n                for remapping in new_remappings {\n                    remappings.push(remapping);\n                }\n                format_array(remappings);\n            }\n            _ => {\n                if name == \"default\" {\n                    debug!(\"updating remappings for default profile\");\n                    // except the default profile, where we always add the remappings\n                    let new_remappings = generate_remappings(action, paths, soldeer_config, &[])?;\n                    let mut array = new_remappings.into_iter().collect::<Array>();\n                    format_array(&mut array);\n                    profile[\"remappings\"] = value(array);\n                }\n            }\n        }\n    }\n\n    fs::write(&paths.config, doc.to_string())?;\n    debug!(path:? = paths.config; \"remappings updated in config file\");\n\n    Ok(())\n}\n\n/// Edit the remappings according to the action and the configuration.\n///\n/// Depending on the configuration, the remappings are either stored in a `remappings.txt` file or\n/// in the `foundry.toml` config file.\n///\n/// Note that if the config is stored in a dedicated `soldeer.toml` file, then the\n/// `remappings_location` setting is ignored and the remappings are always stored in a\n/// `remappings.txt` file.\npub fn edit_remappings(\n    action: &RemappingsAction,\n    config: &SoldeerConfig,\n    paths: &Paths,\n) -> Result<()> {\n    if config.remappings_generate {\n        if paths.config.to_string_lossy().contains(\"foundry.toml\") {\n            match config.remappings_location {\n                RemappingsLocation::Txt => {\n                    debug!(\"updating remappings.txt according to config option\");\n                    remappings_txt(action, paths, config)?;\n                }\n                RemappingsLocation::Config => {\n                    debug!(\"updating foundry.toml remappings according to config option\");\n                    remappings_foundry(action, paths, config)?;\n                }\n            }\n        } else {\n            debug!(\"updating remappings.txt because config file is soldeer.toml\");\n            remappings_txt(action, paths, config)?;\n        }\n    } else {\n        debug!(\"skipping remappings update according to config option\");\n    }\n    Ok(())\n}\n\n/// Format the default left part (alias) for a remappings entry.\n///\n/// The optional `remappings_prefix` setting is prepended to the dependency name, and the\n/// version requirement string is appended (after a hyphen) if the `remappings_version` setting is\n/// set to `true`. Finally, a trailing slash is added to the alias.\npub fn format_remap_name(soldeer_config: &SoldeerConfig, dependency: &Dependency) -> String {\n    let version_suffix = if soldeer_config.remappings_version {\n        &format!(\"-{}\", dependency.version_req().replace('=', \"\"))\n    } else {\n        \"\"\n    };\n    format!(\"{}{}{}/\", soldeer_config.remappings_prefix, dependency.name(), version_suffix)\n}\n\n/// Generate the remappings for a given action.\n///\n/// If the `remappings_regenerate` option is set to `true`, then any existing remappings are\n/// discarded and the remappings are generated from the dependencies in the config file.\n///\n/// Otherwise, existing remappings are kept, and depending on the action, a remapping entry is added\n/// or removed. For the [`RemappingsAction::Update`] action, the existing remappings are merged with\n/// the dependencies in the config file.\n///\n/// Dependencies are sorted alphabetically for consistency.\nfn generate_remappings(\n    action: &RemappingsAction,\n    paths: &Paths,\n    soldeer_config: &SoldeerConfig,\n    existing_remappings: &[(&str, &str)],\n) -> Result<Vec<String>> {\n    let mut new_remappings = Vec::new();\n    if soldeer_config.remappings_regenerate {\n        debug!(\"ignoring existing remappings and recreating from config\");\n        let (dependencies, _) = read_config_deps(&paths.config)?;\n        new_remappings = remappings_from_deps(&dependencies, paths, soldeer_config)?\n            .into_iter()\n            .map(|i| i.remapping_string)\n            .collect();\n    } else {\n        match &action {\n            RemappingsAction::Remove(remove_dep) => {\n                debug!(dep:% = remove_dep; \"trying to remove dependency from remappings\");\n                // only keep items not matching the dependency to remove\n                if let Ok(remove_og) = get_install_dir_relative(remove_dep, paths) {\n                    for (existing_remapped, existing_og) in existing_remappings {\n                        // TODO: make the detection smarter, and match on any path where the version\n                        // is semver-compatible too.\n                        if !existing_og.trim_end_matches('/').starts_with(&remove_og) {\n                            new_remappings.push(format!(\"{existing_remapped}={existing_og}\"));\n                        } else {\n                            debug!(dep:% = remove_dep; \"found existing remapping corresponding to dependency to remove\");\n                        }\n                    }\n                } else {\n                    debug!(dep:% = remove_dep; \"could not find a directory matching the dependency to remove\");\n                    for (remapped, og) in existing_remappings {\n                        new_remappings.push(format!(\"{remapped}={og}\"));\n                    }\n                }\n            }\n            RemappingsAction::Add(add_dep) => {\n                debug!(dep:% = add_dep; \"adding remapping for dependency if necessary\");\n                // we only add the remapping if it's not already existing, otherwise we keep the old\n                // remapping\n                let add_dep_remapped = format_remap_name(soldeer_config, add_dep);\n                let add_dep_og = get_install_dir_relative(add_dep, paths)?;\n                let mut found = false; // whether a remapping existed for that dep already\n                for (existing_remapped, existing_og) in existing_remappings {\n                    new_remappings.push(format!(\"{existing_remapped}={existing_og}\"));\n                    if existing_og.trim_end_matches('/').starts_with(&add_dep_og) {\n                        debug!(dep:% = add_dep; \"remapping exists already, skipping\");\n                        found = true;\n                    }\n                }\n                if !found {\n                    debug!(dep:% = add_dep; \"remapping not found, adding it\");\n                    new_remappings.push(format!(\"{add_dep_remapped}={add_dep_og}/\"));\n                }\n            }\n            RemappingsAction::Update => {\n                // This is where we end up in the `update` command if we don't want to re-generate\n                // all remappings. We need to merge existing remappings with the full list of deps.\n                // We generate all remappings from the dependencies, then replace existing items.\n                debug!(\n                    \"updating remappings, merging existing ones with the ones generated from config\"\n                );\n                let (dependencies, _) = read_config_deps(&paths.config)?;\n                let new_remappings_info =\n                    remappings_from_deps(&dependencies, paths, soldeer_config)?;\n                if existing_remappings.is_empty() {\n                    debug!(\"no existing remappings, using the ones from config\");\n                    new_remappings =\n                        new_remappings_info.into_iter().map(|i| i.remapping_string).collect();\n                } else {\n                    let mut existing_remappings = Vec::from(existing_remappings);\n                    for RemappingInfo { remapping_string: item, dependency: dep } in\n                        new_remappings_info\n                    {\n                        debug!(dep:% = dep; \"trying to find a matching existing remapping for config item\");\n                        let (_, item_og) =\n                            item.split_once('=').expect(\"remappings should have two parts\");\n                        // try to find all existing items pointing to a matching dependency folder\n                        let mut found = false;\n                        existing_remappings.retain(|(existing_remapped, existing_og)| {\n                            // only keep the first two components of the path (`dependencies`\n                            // folder and the dependency folder)\n                            let path: PathBuf =\n                                PathBuf::from(existing_og).components().take(2).collect();\n                            // if path matches, we should update the item's path with the new\n                            // one and add it to the final list\n                            if path_matches(&dep, &path) {\n                                debug!(path = existing_og; \"existing remapping matches the config item\");\n                                let path: PathBuf =\n                                    PathBuf::from(existing_og).components().take(2).collect();\n                                let existing_og_updated = existing_og.replace(\n                                    path.to_slash_lossy().as_ref(),\n                                    item_og.trim_end_matches('/'),\n                                );\n                                debug!(new_path = existing_og_updated; \"updated remapping path\");\n                                new_remappings\n                                    .push(format!(\"{existing_remapped}={existing_og_updated}\"));\n                                found = true;\n                                // we remove this item from the existing remappings list as it's\n                                // been processed\n                                return false;\n                            }\n                            // keep this item to add it to the remappings again later\n                            true\n                        });\n                        if !found {\n                            debug!(dep:% = dep;\"no existing remapping found for config item, adding it\");\n                            new_remappings.push(item);\n                        }\n                    }\n                    // add extra existing remappings back\n                    for (existing_remapped, existing_og) in existing_remappings {\n                        debug!(path = existing_og; \"adding extra remapping which was existing but didn't match a config item\");\n                        new_remappings.push(format!(\"{existing_remapped}={existing_og}\"));\n                    }\n                }\n            }\n        }\n    }\n\n    // sort the remappings\n    new_remappings.sort_unstable();\n    Ok(new_remappings)\n}\n\n#[derive(Debug, Clone, From)]\nstruct RemappingInfo {\n    remapping_string: String,\n    dependency: Dependency,\n}\n\n/// Generate remappings from the dependencies list.\n///\n/// The remappings are generated in the form `alias/=path/`, where `alias` is the dependency name\n/// with an optional prefix and version requirement suffix, and `path` is the relative path to the\n/// dependency folder.\nfn remappings_from_deps(\n    dependencies: &[Dependency],\n    paths: &Paths,\n    soldeer_config: &SoldeerConfig,\n) -> Result<Vec<RemappingInfo>> {\n    dependencies\n        .par_iter()\n        .map(|dependency| {\n            let dependency_name_formatted = format_remap_name(soldeer_config, dependency); // contains trailing slash\n            let relative_path = get_install_dir_relative(dependency, paths)?;\n            Ok((format!(\"{dependency_name_formatted}={relative_path}/\"), dependency.clone()).into())\n        })\n        .collect::<Result<Vec<RemappingInfo>>>()\n}\n\n/// Find the install path (relative to project root) for a dependency that was already installed\n///\n/// # Errors\n/// If the there is no folder in the dependencies folder corresponding to the dependency\nfn get_install_dir_relative(dependency: &Dependency, paths: &Paths) -> Result<String> {\n    let path = dunce::canonicalize(\n        dependency\n            .install_path_sync(&paths.dependencies)\n            .ok_or(RemappingsError::DependencyNotFound(dependency.to_string()))?,\n    )?;\n    Ok(path\n        .strip_prefix(&paths.root) // already canonicalized\n        .map_err(|_| RemappingsError::DependencyNotFound(dependency.to_string()))?\n        .to_slash_lossy()\n        .to_string())\n}\n\n/// Format a TOML array as a multi-line array with indentation in case there is more than one\n/// element.\n///\n/// # Examples\n///\n/// ```toml\n/// [profile.default]\n/// remappings = []\n/// ```\n///\n/// ```toml\n/// [profile.default]\n/// remappings = [\"lib1-1.0.0/=dependencies/lib1-1.0.0/\"]\n/// ```\n///\n/// ```toml\n/// [profile.default]\n/// remappings = [\n///     \"lib1-1.0.0/=dependencies/lib1-1.0.0/\",\n///     \"lib2-2.0.0/=dependencies/lib2-2.0.0/\",\n/// ]\n/// ```\nfn format_array(array: &mut Array) {\n    array.fmt();\n    if (0..=1).contains(&array.len()) {\n        array.set_trailing(\"\");\n        array.set_trailing_comma(false);\n    } else {\n        for item in array.iter_mut() {\n            item.decor_mut().set_prefix(\"\\n    \");\n        }\n        array.set_trailing(\"\\n\");\n        array.set_trailing_comma(true);\n    }\n}\n\n#[cfg(test)]\nmod tests {\n    use super::*;\n    use crate::config::{GitDependency, HttpDependency};\n    use testdir::testdir;\n\n    #[test]\n    fn test_get_install_dir_relative() {\n        let dir = testdir!();\n        fs::write(dir.join(\"soldeer.toml\"), \"[dependencies]\\n\").unwrap();\n        let dependencies_dir = dir.join(\"dependencies\");\n        fs::create_dir_all(&dependencies_dir).unwrap();\n        let paths = Paths::from_root(&dir).unwrap();\n\n        fs::create_dir_all(dependencies_dir.join(\"dep1-1.1.1\")).unwrap();\n        let dependency =\n            HttpDependency::builder().name(\"dep1\").version_req(\"^1.0.0\").build().into();\n        let res = get_install_dir_relative(&dependency, &paths);\n        assert!(res.is_ok(), \"{res:?}\");\n        assert_eq!(res.unwrap(), \"dependencies/dep1-1.1.1\");\n\n        fs::create_dir_all(dependencies_dir.join(\"dep2-2.0.0\")).unwrap();\n        let dependency = GitDependency::builder()\n            .name(\"dep2\")\n            .version_req(\"2.0.0\")\n            .git(\"git@github.com:test/test.git\")\n            .build()\n            .into();\n        let res = get_install_dir_relative(&dependency, &paths);\n        assert!(res.is_ok(), \"{res:?}\");\n        assert_eq!(res.unwrap(), \"dependencies/dep2-2.0.0\");\n\n        let dependency = HttpDependency::builder().name(\"dep3\").version_req(\"3.0.0\").build().into();\n        let res = get_install_dir_relative(&dependency, &paths);\n        assert!(res.is_err(), \"{res:?}\");\n    }\n\n    #[test]\n    fn test_format_remap_name() {\n        let dependency =\n            HttpDependency::builder().name(\"dep1\").version_req(\"^1.0.0\").build().into();\n        let res = format_remap_name(\n            &SoldeerConfig {\n                remappings_version: false,\n                remappings_prefix: String::new(),\n                ..Default::default()\n            },\n            &dependency,\n        );\n        assert_eq!(res, \"dep1/\");\n        let res = format_remap_name(\n            &SoldeerConfig {\n                remappings_version: true,\n                remappings_prefix: String::new(),\n                ..Default::default()\n            },\n            &dependency,\n        );\n        assert_eq!(res, \"dep1-^1.0.0/\");\n        let res = format_remap_name(\n            &SoldeerConfig {\n                remappings_version: false,\n                remappings_prefix: \"@\".to_string(),\n                ..Default::default()\n            },\n            &dependency,\n        );\n        assert_eq!(res, \"@dep1/\");\n        let res = format_remap_name(\n            &SoldeerConfig {\n                remappings_version: true,\n                remappings_prefix: \"@\".to_string(),\n                ..Default::default()\n            },\n            &dependency,\n        );\n        assert_eq!(res, \"@dep1-^1.0.0/\");\n\n        let dependency =\n            HttpDependency::builder().name(\"dep1\").version_req(\"=1.0.0\").build().into();\n        let res = format_remap_name(\n            &SoldeerConfig {\n                remappings_version: true,\n                remappings_prefix: String::new(),\n                ..Default::default()\n            },\n            &dependency,\n        );\n        assert_eq!(res, \"dep1-1.0.0/\");\n    }\n\n    #[test]\n    fn test_remappings_from_deps() {\n        let dir = testdir!();\n        let config = r#\"[dependencies]\ndep1 = \"^1.0.0\"\ndep2 = \"2.0.0\"\ndep3 = { version = \"foobar\", git = \"git@github.com:test/test.git\", branch = \"foobar\" }\n\"#;\n        fs::write(dir.join(\"soldeer.toml\"), config).unwrap();\n        let dependencies_dir = dir.join(\"dependencies\");\n        fs::create_dir_all(&dependencies_dir).unwrap();\n        let paths = Paths::from_root(&dir).unwrap();\n\n        fs::create_dir_all(dependencies_dir.join(\"dep1-1.1.1\")).unwrap();\n        fs::create_dir_all(dependencies_dir.join(\"dep2-2.0.0\")).unwrap();\n        fs::create_dir_all(dependencies_dir.join(\"dep3-foobar\")).unwrap();\n\n        let (dependencies, _) = read_config_deps(&paths.config).unwrap();\n        let res = remappings_from_deps(&dependencies, &paths, &SoldeerConfig::default());\n        assert!(res.is_ok(), \"{res:?}\");\n        let res = res.unwrap();\n        assert_eq!(res.len(), 3);\n        assert_eq!(res[0].remapping_string, \"dep1-^1.0.0/=dependencies/dep1-1.1.1/\");\n        assert_eq!(res[1].remapping_string, \"dep2-2.0.0/=dependencies/dep2-2.0.0/\");\n        assert_eq!(res[2].remapping_string, \"dep3-foobar/=dependencies/dep3-foobar/\");\n    }\n\n    #[test]\n    fn test_generate_remappings_add() {\n        let dir = testdir!();\n        fs::write(dir.join(\"soldeer.toml\"), \"[dependencies]\\n\").unwrap();\n        let paths = Paths::from_root(&dir).unwrap();\n        fs::create_dir_all(paths.dependencies.join(\"lib1-1.0.0\")).unwrap();\n        let config = SoldeerConfig::default();\n        // empty existing remappings\n        let existing_deps = vec![];\n        let dep = HttpDependency::builder().name(\"lib1\").version_req(\"1.0.0\").build().into();\n        let res = generate_remappings(&RemappingsAction::Add(dep), &paths, &config, &existing_deps);\n        assert!(res.is_ok(), \"{res:?}\");\n        assert_eq!(res.unwrap(), vec![\"lib1-1.0.0/=dependencies/lib1-1.0.0/\"]);\n\n        // existing remappings not matching new one\n        let existing_deps = vec![(\"lib1-1.0.0/\", \"dependencies/lib1-1.0.0/\")];\n        fs::create_dir_all(paths.dependencies.join(\"lib2-1.1.1\")).unwrap();\n        let dep = HttpDependency::builder().name(\"lib2\").version_req(\"^1.0.0\").build().into();\n        let res = generate_remappings(&RemappingsAction::Add(dep), &paths, &config, &existing_deps);\n        assert!(res.is_ok(), \"{res:?}\");\n        assert_eq!(\n            res.unwrap(),\n            vec![\"lib1-1.0.0/=dependencies/lib1-1.0.0/\", \"lib2-^1.0.0/=dependencies/lib2-1.1.1/\"]\n        );\n\n        // existing remappings matching the new one\n        let existing_deps = vec![(\"@lib1-1.0.0/foo\", \"dependencies/lib1-1.0.0/src\")];\n        let dep = HttpDependency::builder().name(\"lib1\").version_req(\"1.0.0\").build().into();\n        let res = generate_remappings(&RemappingsAction::Add(dep), &paths, &config, &existing_deps);\n        assert!(res.is_ok(), \"{res:?}\");\n        assert_eq!(res.unwrap(), vec![\"@lib1-1.0.0/foo=dependencies/lib1-1.0.0/src\"]);\n    }\n\n    #[test]\n    fn test_generate_remappings_remove() {\n        let dir = testdir!();\n        fs::write(dir.join(\"soldeer.toml\"), \"[dependencies]\\n\").unwrap();\n        let paths = Paths::from_root(&dir).unwrap();\n        fs::create_dir_all(paths.dependencies.join(\"lib1-1.0.0\")).unwrap();\n        fs::create_dir_all(paths.dependencies.join(\"lib2-2.0.0\")).unwrap();\n        let config = SoldeerConfig::default();\n        let existing_deps = vec![\n            (\"lib1-1.0.0/\", \"dependencies/lib1-1.0.0/\"),\n            (\"lib2-2.0.0/\", \"dependencies/lib2-2.0.0/\"),\n        ];\n        let dep = HttpDependency::builder().name(\"lib1\").version_req(\"1.0.0\").build().into();\n        let res =\n            generate_remappings(&RemappingsAction::Remove(dep), &paths, &config, &existing_deps);\n        assert!(res.is_ok(), \"{res:?}\");\n        assert_eq!(res.unwrap(), vec![\"lib2-2.0.0/=dependencies/lib2-2.0.0/\"]);\n\n        // dep does not exist, no error\n        let dep = HttpDependency::builder().name(\"lib3\").version_req(\"1.0.0\").build().into();\n        let res =\n            generate_remappings(&RemappingsAction::Remove(dep), &paths, &config, &existing_deps);\n        assert!(res.is_ok(), \"{res:?}\");\n        assert_eq!(\n            res.unwrap(),\n            vec![\"lib1-1.0.0/=dependencies/lib1-1.0.0/\", \"lib2-2.0.0/=dependencies/lib2-2.0.0/\"]\n        );\n    }\n\n    #[test]\n    fn test_generate_remappings_update() {\n        let dir = testdir!();\n        let contents = r#\"[dependencies]\nlib1 = \"1.0.0\"\nlib2 = \"2.0.0\"\n\"#;\n        fs::write(dir.join(\"soldeer.toml\"), contents).unwrap();\n        let paths = Paths::from_root(&dir).unwrap();\n        fs::create_dir_all(paths.dependencies.join(\"lib1-1.0.0\")).unwrap();\n        fs::create_dir_all(paths.dependencies.join(\"lib2-2.0.0\")).unwrap();\n        let config = SoldeerConfig::default();\n        // all entries are customized\n        let existing_deps = vec![\n            (\"lib1-1.0.0/\", \"dependencies/lib1-1.0.0/src/\"),\n            (\"lib2/\", \"dependencies/lib2-2.0.0/\"),\n        ];\n        let res = generate_remappings(&RemappingsAction::Update, &paths, &config, &existing_deps);\n        assert!(res.is_ok(), \"{res:?}\");\n        assert_eq!(\n            res.unwrap(),\n            vec![\"lib1-1.0.0/=dependencies/lib1-1.0.0/src/\", \"lib2/=dependencies/lib2-2.0.0/\"]\n        );\n\n        // one entry is missing\n        let existing_deps = vec![(\"lib1-1.0.0/\", \"dependencies/lib1-1.0.0/\")];\n        let res = generate_remappings(&RemappingsAction::Update, &paths, &config, &existing_deps);\n        assert!(res.is_ok(), \"{res:?}\");\n        assert_eq!(\n            res.unwrap(),\n            vec![\"lib1-1.0.0/=dependencies/lib1-1.0.0/\", \"lib2-2.0.0/=dependencies/lib2-2.0.0/\"]\n        );\n\n        // extra entries are kep\n        let existing_deps = vec![\n            (\"lib1-1.0.0/\", \"dependencies/lib1-1.0.0/\"),\n            (\"lib2-2.0.0/\", \"dependencies/lib2-2.0.0/\"),\n            (\"lib3/\", \"dependencies/lib3/\"),\n        ];\n        let res = generate_remappings(&RemappingsAction::Update, &paths, &config, &existing_deps);\n        assert!(res.is_ok(), \"{res:?}\");\n        assert_eq!(\n            res.unwrap(),\n            vec![\n                \"lib1-1.0.0/=dependencies/lib1-1.0.0/\",\n                \"lib2-2.0.0/=dependencies/lib2-2.0.0/\",\n                \"lib3/=dependencies/lib3/\"\n            ]\n        );\n    }\n\n    #[test]\n    fn test_remappings_foundry_default_profile_empty() {\n        let dir = testdir!();\n        let contents = r#\"[profile.default]\n\n[dependencies]\nlib1 = \"1.0.0\"\n\"#;\n        fs::write(dir.join(\"foundry.toml\"), contents).unwrap();\n        let paths = Paths::from_root(&dir).unwrap();\n        fs::create_dir_all(paths.dependencies.join(\"lib1-1.0.0\")).unwrap();\n        let config = SoldeerConfig::default();\n        let res = remappings_foundry(&RemappingsAction::Update, &paths, &config);\n        assert!(res.is_ok(), \"{res:?}\");\n        let contents = fs::read_to_string(&paths.config).unwrap();\n        let doc: DocumentMut = contents.parse::<DocumentMut>().unwrap();\n        assert_eq!(\n            doc[\"profile\"][\"default\"][\"remappings\"]\n                .as_array()\n                .unwrap()\n                .into_iter()\n                .map(|i| i.as_str().unwrap())\n                .collect::<Vec<_>>(),\n            vec![\"lib1-1.0.0/=dependencies/lib1-1.0.0/\"]\n        );\n    }\n\n    #[test]\n    fn test_remappings_foundry_second_profile_empty() {\n        let dir = testdir!();\n        let contents = r#\"[profile.default]\n\n[profile.local]\n\n[dependencies]\nlib1 = \"1.0.0\"\n\"#;\n        fs::write(dir.join(\"foundry.toml\"), contents).unwrap();\n        let paths = Paths::from_root(&dir).unwrap();\n        fs::create_dir_all(paths.dependencies.join(\"lib1-1.0.0\")).unwrap();\n        let config = SoldeerConfig::default();\n        // should only add remappings to the default profile\n        let res = remappings_foundry(&RemappingsAction::Update, &paths, &config);\n        assert!(res.is_ok(), \"{res:?}\");\n        let contents = fs::read_to_string(&paths.config).unwrap();\n        let doc: DocumentMut = contents.parse::<DocumentMut>().unwrap();\n        assert_eq!(\n            doc[\"profile\"][\"default\"][\"remappings\"]\n                .as_array()\n                .unwrap()\n                .into_iter()\n                .map(|i| i.as_str().unwrap())\n                .collect::<Vec<_>>(),\n            vec![\"lib1-1.0.0/=dependencies/lib1-1.0.0/\"]\n        );\n        assert!(!doc[\"profile\"][\"local\"].as_table().unwrap().contains_key(\"remappings\"));\n    }\n\n    #[test]\n    fn test_remappings_foundry_two_profiles() {\n        let dir = testdir!();\n        let contents = r#\"[profile.default]\nremappings = []\n\n[profile.local]\nremappings = []\n\n[dependencies]\nlib1 = \"1.0.0\"\n\"#;\n        fs::write(dir.join(\"foundry.toml\"), contents).unwrap();\n        let paths = Paths::from_root(&dir).unwrap();\n        fs::create_dir_all(paths.dependencies.join(\"lib1-1.0.0\")).unwrap();\n        let config = SoldeerConfig::default();\n        let res = remappings_foundry(&RemappingsAction::Update, &paths, &config);\n        assert!(res.is_ok(), \"{res:?}\");\n        let contents = fs::read_to_string(&paths.config).unwrap();\n        let doc: DocumentMut = contents.parse::<DocumentMut>().unwrap();\n        assert_eq!(\n            doc[\"profile\"][\"default\"][\"remappings\"]\n                .as_array()\n                .unwrap()\n                .into_iter()\n                .map(|i| i.as_str().unwrap())\n                .collect::<Vec<_>>(),\n            vec![\"lib1-1.0.0/=dependencies/lib1-1.0.0/\"]\n        );\n        assert_eq!(\n            doc[\"profile\"][\"local\"][\"remappings\"]\n                .as_array()\n                .unwrap()\n                .into_iter()\n                .map(|i| i.as_str().unwrap())\n                .collect::<Vec<_>>(),\n            vec![\"lib1-1.0.0/=dependencies/lib1-1.0.0/\"]\n        );\n    }\n\n    #[test]\n    fn test_remappings_foundry_keep_existing() {\n        let dir = testdir!();\n        let contents = r#\"[profile.default]\nremappings = [\"lib1/=dependencies/lib1-1.0.0/src/\"]\n\n[dependencies]\nlib1 = \"1.0.0\"\n\"#;\n        fs::write(dir.join(\"foundry.toml\"), contents).unwrap();\n        let paths = Paths::from_root(&dir).unwrap();\n        fs::create_dir_all(paths.dependencies.join(\"lib1-1.0.0\")).unwrap();\n        let config = SoldeerConfig::default();\n        let res = remappings_foundry(&RemappingsAction::Update, &paths, &config);\n        assert!(res.is_ok(), \"{res:?}\");\n        let contents = fs::read_to_string(&paths.config).unwrap();\n        let doc: DocumentMut = contents.parse::<DocumentMut>().unwrap();\n        assert_eq!(\n            doc[\"profile\"][\"default\"][\"remappings\"]\n                .as_array()\n                .unwrap()\n                .into_iter()\n                .map(|i| i.as_str().unwrap())\n                .collect::<Vec<_>>(),\n            vec![\"lib1/=dependencies/lib1-1.0.0/src/\"]\n        );\n    }\n\n    #[test]\n    fn test_remappings_txt_keep() {\n        let dir = testdir!();\n        let contents = r#\"[dependencies]\nlib1 = \"1.0.0\"\n\"#;\n        fs::write(dir.join(\"soldeer.toml\"), contents).unwrap();\n        let paths = Paths::from_root(&dir).unwrap();\n        fs::create_dir_all(paths.dependencies.join(\"lib1-1.0.0\")).unwrap();\n        let remappings = \"lib1/=dependencies/lib1-1.0.0/src/\\n\";\n        fs::write(dir.join(\"remappings.txt\"), remappings).unwrap();\n        let config = SoldeerConfig::default();\n        let res = remappings_txt(&RemappingsAction::Update, &paths, &config);\n        assert!(res.is_ok(), \"{res:?}\");\n        let contents = fs::read_to_string(&paths.remappings).unwrap();\n        assert_eq!(contents, remappings);\n    }\n\n    #[test]\n    fn test_remappings_txt_regenerate() {\n        let dir = testdir!();\n        let contents = r#\"[dependencies]\nlib1 = \"1.0.0\"\n\"#;\n        fs::write(dir.join(\"soldeer.toml\"), contents).unwrap();\n        let paths = Paths::from_root(&dir).unwrap();\n        fs::create_dir_all(paths.dependencies.join(\"lib1-1.0.0\")).unwrap();\n        let remappings = \"lib1/=dependencies/lib1-1.0.0/src/\\n\";\n        fs::write(dir.join(\"remappings.txt\"), remappings).unwrap();\n        let config = SoldeerConfig { remappings_regenerate: true, ..Default::default() };\n        let res = remappings_txt(&RemappingsAction::Update, &paths, &config);\n        assert!(res.is_ok(), \"{res:?}\");\n        let contents = fs::read_to_string(&paths.remappings).unwrap();\n        assert_eq!(contents, \"lib1-1.0.0/=dependencies/lib1-1.0.0/\\n\");\n    }\n\n    #[test]\n    fn test_remappings_txt_missing() {\n        let dir = testdir!();\n        let contents = r#\"[dependencies]\nlib1 = \"1.0.0\"\nlib2 = \"2.0.0\"\n\"#;\n        fs::write(dir.join(\"soldeer.toml\"), contents).unwrap();\n        let paths = Paths::from_root(&dir).unwrap();\n        fs::create_dir_all(paths.dependencies.join(\"lib1-1.0.0\")).unwrap();\n        fs::create_dir_all(paths.dependencies.join(\"lib2-2.0.0\")).unwrap();\n        let remappings = \"lib1/=dependencies/lib1-1.0.0/src/\\n\";\n        fs::write(dir.join(\"remappings.txt\"), remappings).unwrap();\n        let config = SoldeerConfig::default();\n        let res = remappings_txt(&RemappingsAction::Update, &paths, &config);\n        assert!(res.is_ok(), \"{res:?}\");\n        let contents = fs::read_to_string(&paths.remappings).unwrap();\n        assert_eq!(\n            contents,\n            \"lib1/=dependencies/lib1-1.0.0/src/\\nlib2-2.0.0/=dependencies/lib2-2.0.0/\\n\"\n        );\n    }\n\n    #[test]\n    fn test_edit_remappings_soldeer_config() {\n        let dir = testdir!();\n        let contents = r#\"[dependencies]\nlib1 = \"1.0.0\"\n\"#;\n        fs::write(dir.join(\"soldeer.toml\"), contents).unwrap();\n        let paths = Paths::from_root(&dir).unwrap();\n        fs::create_dir_all(paths.dependencies.join(\"lib1-1.0.0\")).unwrap();\n        // the config gets ignored in this case\n        let config =\n            SoldeerConfig { remappings_location: RemappingsLocation::Config, ..Default::default() };\n        let res = edit_remappings(&RemappingsAction::Update, &config, &paths);\n        assert!(res.is_ok(), \"{res:?}\");\n        let contents = fs::read_to_string(&paths.remappings).unwrap();\n        assert_eq!(contents, \"lib1-1.0.0/=dependencies/lib1-1.0.0/\\n\");\n    }\n\n    #[test]\n    fn test_generate_remappings_update_semver_custom() {\n        let dir = testdir!();\n        let contents = r#\"[dependencies]\nlib1 = \"1\"\nlib2 = \"2\"\n\"#;\n        fs::write(dir.join(\"soldeer.toml\"), contents).unwrap();\n        let paths = Paths::from_root(&dir).unwrap();\n        // libs have been updated to newer versions\n        fs::create_dir_all(paths.dependencies.join(\"lib1-1.2.0\")).unwrap();\n        fs::create_dir_all(paths.dependencies.join(\"lib2-2.1.0\")).unwrap();\n        let config = SoldeerConfig::default();\n        // all entries are customized, using an old version of the libs\n        let existing_deps = vec![\n            (\"lib1-1/\", \"dependencies/lib1-1.1.1/src/\"), // customize right part\n            (\"lib2/\", \"dependencies/lib2-2.0.1/src/\"),   // customize both sides\n        ];\n        let res = generate_remappings(&RemappingsAction::Update, &paths, &config, &existing_deps);\n        assert!(res.is_ok(), \"{res:?}\");\n        assert_eq!(\n            res.unwrap(),\n            vec![\"lib1-1/=dependencies/lib1-1.2.0/src/\", \"lib2/=dependencies/lib2-2.1.0/src/\"]\n        );\n    }\n\n    #[test]\n    fn test_generate_remappings_duplicates() {\n        let dir = testdir!();\n        let contents = r#\"[profile.default]\nremappings = [\n    \"@openzeppelin-contracts/=dependencies/@openzeppelin-contracts-5.0.2/\",\n    \"@openzeppelin/contracts/=dependencies/@openzeppelin-contracts-5.0.2/\",\n    \"foo/=bar/\",\n]\nlibs = [\"dependencies\"]\n\n[dependencies]\n\"@openzeppelin-contracts\" = \"5.0.2\"\n\"#;\n        fs::write(dir.join(\"foundry.toml\"), contents).unwrap();\n        let paths = Paths::from_root(&dir).unwrap();\n        fs::create_dir_all(paths.dependencies.join(\"@openzeppelin-contracts-5.0.2\")).unwrap();\n        let res = remappings_foundry(\n            &RemappingsAction::Update,\n            &paths,\n            &SoldeerConfig {\n                remappings_location: RemappingsLocation::Config,\n                ..Default::default()\n            },\n        );\n        assert!(res.is_ok(), \"{res:?}\");\n        assert_eq!(fs::read_to_string(dir.join(\"foundry.toml\")).unwrap(), contents);\n    }\n}\n"
  },
  {
    "path": "crates/core/src/update.rs",
    "content": "//! Update dependencies to the latest version.\nuse crate::{\n    config::{Dependency, GitIdentifier},\n    errors::UpdateError,\n    install::{InstallProgress, install_dependency},\n    lock::{GitLockEntry, LockEntry, format_install_path},\n    registry::get_latest_supported_version,\n    utils::run_git_command,\n};\nuse log::debug;\nuse std::path::Path;\nuse tokio::task::JoinSet;\n\npub type Result<T> = std::result::Result<T, UpdateError>;\n\n/// Update the dependencies to a new version.\n///\n/// This function spawns a task for each dependency and waits for all of them to finish.\n///\n/// For Git dependencies without a ref or with a\n/// [`GitIdentifier::Branch`] ref, the function will update\n/// the dependency to the latest commit with `git pull`.\n///\n/// For Git dependencies with a [`GitIdentifier::Rev`] or [`GitIdentifier::Tag`] ref, the function\n/// will reset the repo to the ref if the integrity check fails. An update is not really possible in\n/// this case.\n///\n/// For HTTP dependencies, the function will install the latest version of the dependency according\n/// to the version requirement in the config file. If the version requirement is not a semver range,\n/// the function will install the latest version from the registry.\npub async fn update_dependencies(\n    dependencies: &[Dependency],\n    locks: &[LockEntry],\n    deps_path: impl AsRef<Path>,\n    recursive_deps: bool,\n    progress: InstallProgress,\n) -> Result<Vec<LockEntry>> {\n    let mut set = JoinSet::new();\n    for dep in dependencies {\n        debug!(dep:% = dep; \"spawning task to update dependency\");\n        set.spawn({\n            let d = dep.clone();\n            let p = progress.clone();\n\n            let lock = locks.iter().find(|l| l.name() == dep.name()).cloned();\n            let paths = deps_path.as_ref().to_path_buf();\n            async move { update_dependency(&d, lock.as_ref(), &paths, recursive_deps, p).await }\n        });\n    }\n\n    let mut results = Vec::new();\n    while let Some(res) = set.join_next().await {\n        results.push(res??);\n    }\n    debug!(\"all update tasks have finished\");\n    Ok(results)\n}\n\n/// Update a single dependency to a new version.\n///\n/// For Git dependencies without a ref or with a\n/// [`GitIdentifier::Branch`] ref, the function will update\n/// the dependency to the latest commit with `git pull`.\n///\n/// For Git dependencies with a [`GitIdentifier::Rev`] or [`GitIdentifier::Tag`] ref, the function\n/// will reset the repo to the ref if the integrity check fails. An update is not really possible in\n/// this case.\n///\n/// For HTTP dependencies, the function will install the latest version of the dependency according\n/// to the version requirement in the config file. If the version requirement is not a semver range,\n/// the function will install the latest version from the registry.\npub async fn update_dependency(\n    dependency: &Dependency,\n    lock: Option<&LockEntry>,\n    deps: impl AsRef<Path>,\n    recursive_deps: bool,\n    progress: InstallProgress,\n) -> Result<LockEntry> {\n    match dependency {\n        Dependency::Git(dep) if matches!(dep.identifier, None | Some(GitIdentifier::Branch(_))) => {\n            // we handle the git case in a special way because we don't need to re-clone the repo\n            // update to the latest commit (git pull)\n            debug!(dep:% = dependency; \"updating git dependency based on a branch\");\n            let path = match lock {\n                Some(lock) => lock.install_path(&deps),\n                None => dependency.install_path(&deps).await.unwrap_or_else(|| {\n                    format_install_path(dependency.name(), dependency.version_req(), &deps)\n                }),\n            };\n            run_git_command(&[\"reset\", \"--hard\", \"HEAD\"], Some(&path)).await?;\n            run_git_command(&[\"clean\", \"-fd\"], Some(&path)).await?;\n            let old_commit = run_git_command(&[\"rev-parse\", \"--verify\", \"HEAD\"], Some(&path))\n                .await?\n                .trim()\n                .to_string();\n            debug!(dep:% = dependency; \"old commit was {old_commit}\");\n\n            if let Some(GitIdentifier::Branch(ref branch)) = dep.identifier {\n                // checkout the desired branch\n                debug!(dep:% = dependency, branch; \"checking out required branch\");\n                run_git_command(&[\"checkout\", branch], Some(&path)).await?;\n            } else {\n                // necessarily `None` because of the match above\n                // checkout the default branch\n                debug!(dep:% = dependency; \"checking out default branch\");\n                let branch = run_git_command(\n                    &[\"symbolic-ref\", \"refs/remotes/origin/HEAD\", \"--short\"],\n                    Some(&path),\n                )\n                .await?\n                .trim_start_matches(\"origin/\")\n                .trim()\n                .to_string();\n                debug!(dep:% = dependency; \"default branch is {branch}\");\n                run_git_command(&[\"checkout\", &branch], Some(&path)).await?;\n            }\n            // pull the latest commits\n            debug!(dep:% = dependency; \"running git pull\");\n            run_git_command(&[\"pull\"], Some(&path)).await?;\n            let commit = run_git_command(&[\"rev-parse\", \"--verify\", \"HEAD\"], Some(&path))\n                .await?\n                .trim()\n                .to_string();\n            debug!(dep:% = dependency; \"new commit is {commit}\");\n            if commit != old_commit {\n                debug!(dep:% = dependency, old_commit, new_commit = commit; \"updated dependency\");\n                progress.log(format!(\"Updating {dependency} from {old_commit:.7} to {commit:.7}\"));\n            } else {\n                debug!(dep:% = dependency; \"there was no update available\");\n            }\n            let new_lock = GitLockEntry::builder()\n                .name(&dep.name)\n                .version(&dep.version_req)\n                .git(&dep.git)\n                .rev(commit)\n                .build()\n                .into();\n            progress.update_all(dependency.into());\n\n            Ok(new_lock)\n        }\n        Dependency::Git(dep) if dep.identifier.is_some() => {\n            // check integrity against the existing version since we can't update to a new rev\n            debug!(dep:% = dependency; \"checking git repo integrity against required rev (can't update)\");\n            let lock = match lock {\n                Some(lock) => lock,\n                None => &GitLockEntry::builder()\n                    .name(&dep.name)\n                    .version(&dep.version_req)\n                    .git(&dep.git)\n                    .rev(dep.identifier.as_ref().expect(\"identifier should be present\").to_string())\n                    .build()\n                    .into(),\n            };\n            let new_lock =\n                install_dependency(dependency, Some(lock), &deps, None, recursive_deps, progress)\n                    .await?;\n            Ok(new_lock)\n        }\n        _ => {\n            // for http dependencies, we simply install them as if there was no lock entry\n            debug!(dep:% = dependency; \"updating http dependency\");\n\n            // to show which version we update to, we already need to know the new version, so we\n            // can pass it to `install_dependency` to spare us from another call to the\n            // registry\n            let force_version = match (dependency.url(), lock) {\n                (None, Some(lock)) => {\n                    let new_version = get_latest_supported_version(dependency).await?;\n                    if lock.version() != new_version {\n                        debug!(dep:% = dependency, old_version = lock.version(), new_version; \"dependency has a new version available\");\n                        progress.log(format!(\n                            \"Updating {} from {} to {new_version}\",\n                            dependency.name(),\n                            lock.version(),\n                        ));\n                    }\n                    Some(new_version)\n                }\n                _ => None,\n            };\n            let new_lock = install_dependency(\n                dependency,\n                None,\n                &deps,\n                force_version,\n                recursive_deps,\n                progress,\n            )\n            .await?;\n            Ok(new_lock)\n        }\n    }\n}\n"
  },
  {
    "path": "crates/core/src/utils.rs",
    "content": "//! Utility functions used throughout the codebase.\nuse crate::{\n    config::Dependency,\n    errors::{DownloadError, InstallError},\n    registry::parse_version_req,\n};\nuse derive_more::derive::{Display, From};\nuse ignore::{WalkBuilder, WalkState};\nuse log::{debug, warn};\nuse path_slash::PathExt as _;\nuse rayon::prelude::*;\nuse semver::Version;\nuse sha2::{Digest as _, Sha256};\nuse std::{\n    borrow::Cow,\n    env,\n    ffi::OsStr,\n    fs,\n    io::Read,\n    path::{Path, PathBuf},\n    sync::{Arc, mpsc},\n};\nuse tokio::process::Command;\n\n/// Newtype for the string representation of an integrity checksum (SHA256).\n#[derive(Debug, Clone, PartialEq, Eq, Hash, From, Display)]\n#[from(Cow<'static, str>, String, &'static str)]\n#[cfg_attr(feature = \"serde\", derive(serde::Serialize, serde::Deserialize))]\npub struct IntegrityChecksum(pub String);\n\n/// Get the location where the token file is stored or read from.\n///\n/// The token file is stored in the home directory of the user, or in the current directory\n/// if the home cannot be found, in a hidden folder called `.soldeer`. The token file is called\n/// `.soldeer_login`.\n///\n/// The path can be overridden by setting the `SOLDEER_LOGIN_FILE` environment variable.\npub fn login_file_path() -> Result<PathBuf, std::io::Error> {\n    if let Ok(file_path) = env::var(\"SOLDEER_LOGIN_FILE\") &&\n        !file_path.is_empty()\n    {\n        debug!(\"using soldeer login file defined in environment variable\");\n        return Ok(file_path.into());\n    }\n\n    // if home dir cannot be found, use the current dir\n    let dir = home::home_dir().unwrap_or(env::current_dir()?);\n    let security_directory = dir.join(\".soldeer\");\n    if !security_directory.exists() {\n        debug!(dir:?; \".soldeer folder does not exist, creating it\");\n        fs::create_dir(&security_directory)?;\n    }\n    let login_file = security_directory.join(\".soldeer_login\");\n    debug!(login_file:?; \"path to login file\");\n    Ok(login_file)\n}\n\n/// Check if any filename in the list of paths starts with a period.\npub fn check_dotfiles(files: &[PathBuf]) -> bool {\n    files\n        .par_iter()\n        .any(|file| file.file_name().unwrap_or_default().to_string_lossy().starts_with('.'))\n}\n\n/// Sanitize a filename by replacing invalid characters with a dash.\npub fn sanitize_filename(dependency_name: &str) -> String {\n    let options =\n        sanitize_filename::Options { truncate: true, windows: cfg!(windows), replacement: \"-\" };\n\n    let filename = sanitize_filename::sanitize_with_options(dependency_name, options);\n    debug!(filename; \"sanitized filename\");\n    filename\n}\n\n/// Hash the contents of a Reader with SHA256\npub fn hash_content<R: Read>(content: &mut R) -> [u8; 32] {\n    let mut hasher = Sha256::new();\n    let mut buf = [0; 1024];\n    while let Ok(size) = content.read(&mut buf) {\n        if size == 0 {\n            break;\n        }\n        hasher.update(&buf[0..size]);\n    }\n    hasher.finalize().into()\n}\n\n/// Walk a folder and compute the SHA256 hash of all non-hidden and non-ignored files inside the\n/// dir, combining them into a single hash.\n///\n/// The paths of the folders and files are hashes too, so we can the integrity of their names and\n/// location can be checked.\npub fn hash_folder(folder_path: impl AsRef<Path>) -> Result<IntegrityChecksum, std::io::Error> {\n    debug!(path:? = folder_path.as_ref(); \"hashing folder\");\n    // a list of hashes, one for each DirEntry\n    let root_path = Arc::new(dunce::canonicalize(folder_path.as_ref())?);\n\n    let (tx, rx) = mpsc::channel::<[u8; 32]>();\n\n    // we use a parallel walker to speed things up\n    let walker = WalkBuilder::new(&folder_path)\n        .filter_entry(|entry| {\n            !(entry.path().is_dir() && entry.path().file_name().unwrap_or_default() == \".git\")\n        })\n        .hidden(false)\n        .require_git(false)\n        .parents(false)\n        .git_global(false)\n        .git_exclude(false)\n        .build_parallel();\n    walker.run(|| {\n        let tx = tx.clone();\n        let root_path = Arc::clone(&root_path);\n        // function executed for each DirEntry\n        Box::new(move |result| {\n            let Ok(entry) = result else {\n                return WalkState::Continue;\n            };\n            let path = entry.path();\n            // first hash the filename/dirname to make sure it can't be renamed or removed\n            let mut hasher = Sha256::new();\n            hasher.update(\n                path.strip_prefix(root_path.as_ref())\n                    .expect(\"path should be a child of root\")\n                    .to_slash_lossy()\n                    .as_bytes(),\n            );\n            // for files, also hash the contents\n            if let Some(true) = entry.file_type().map(|t| t.is_file()) {\n                if let Ok(file) = fs::File::open(path) {\n                    let mut reader = std::io::BufReader::new(file);\n                    let hash = hash_content(&mut reader);\n                    hasher.update(hash);\n                } else {\n                    warn!(path:?; \"could not read file while hashing folder\");\n                }\n            }\n            // record the hash for that file/folder in the list\n            let hash: [u8; 32] = hasher.finalize().into();\n            tx.send(hash)\n                .expect(\"Channel receiver should never be dropped before end of function scope\");\n            WalkState::Continue\n        })\n    });\n    drop(tx);\n    let mut hasher = Sha256::new();\n    // this cannot happen before tx is dropped safely\n    let mut hashes = Vec::new();\n    while let Ok(msg) = rx.recv() {\n        hashes.push(msg);\n    }\n    // sort hashes\n    hashes.par_sort_unstable();\n    // hash the hashes (yo dawg...)\n    for hash in hashes.iter() {\n        hasher.update(hash);\n    }\n    let hash: [u8; 32] = hasher.finalize().into();\n    let hash = const_hex::encode(hash);\n    debug!(path:? = folder_path.as_ref(), hash; \"folder hash was computed\");\n    Ok(hash.into())\n}\n\n/// Compute the SHA256 hash of the contents of a file\npub fn hash_file(path: impl AsRef<Path>) -> Result<IntegrityChecksum, std::io::Error> {\n    debug!(path:? = path.as_ref(); \"hashing file\");\n    let file = fs::File::open(&path)?;\n    let mut reader = std::io::BufReader::new(file);\n    let bytes = hash_content(&mut reader);\n    let hash = const_hex::encode(bytes);\n    debug!(path:? = path.as_ref(), hash; \"file hash was computed\");\n    Ok(hash.into())\n}\n\n/// Run a `git` command with the given arguments in the given directory.\n///\n/// The function output is parsed as a UTF-8 string and returned.\npub async fn run_git_command<I, S>(\n    args: I,\n    current_dir: Option<&PathBuf>,\n) -> Result<String, DownloadError>\nwhere\n    I: IntoIterator<Item = S> + Clone,\n    S: AsRef<OsStr>,\n{\n    let mut git = Command::new(\"git\");\n    git.args(args.clone()).env(\"GIT_TERMINAL_PROMPT\", \"0\");\n    if let Some(current_dir) = current_dir {\n        git.current_dir(\n            canonicalize(current_dir)\n                .await\n                .map_err(|e| DownloadError::IOError { path: current_dir.clone(), source: e })?,\n        );\n    }\n    let git = git.output().await.map_err(|e| DownloadError::GitError {\n        message: e.to_string(),\n        args: args.clone().into_iter().map(|a| a.as_ref().to_string_lossy().into_owned()).collect(),\n    })?;\n    if !git.status.success() {\n        return Err(DownloadError::GitError {\n            message: String::from_utf8(git.stderr).unwrap_or_default(),\n            args: args.into_iter().map(|a| a.as_ref().to_string_lossy().into_owned()).collect(),\n        });\n    }\n    Ok(String::from_utf8(git.stdout).expect(\"git command output should be valid utf-8\"))\n}\n\n/// Run a `forge` command with the given arguments in the given directory.\n///\n/// The function output is parsed as a UTF-8 string and returned.\npub async fn run_forge_command<I, S>(\n    args: I,\n    current_dir: Option<&PathBuf>,\n) -> Result<String, InstallError>\nwhere\n    I: IntoIterator<Item = S>,\n    S: AsRef<OsStr>,\n{\n    let mut forge = Command::new(\"forge\");\n    forge.args(args);\n    if let Some(current_dir) = current_dir {\n        forge.current_dir(\n            canonicalize(current_dir)\n                .await\n                .map_err(|e| InstallError::IOError { path: current_dir.clone(), source: e })?,\n        );\n    }\n    let forge = forge.output().await.map_err(|e| InstallError::ForgeError(e.to_string()))?;\n    if !forge.status.success() {\n        return Err(InstallError::ForgeError(String::from_utf8(forge.stderr).unwrap_or_default()));\n    }\n    Ok(String::from_utf8(forge.stdout).expect(\"forge command output should be valid utf-8\"))\n}\n\n/// Remove/uninstall the `forge-std` library installed as a git submodule in a foundry project.\n///\n/// This function removes the `forge-std` submodule, the `.gitmodules` file and the `lib` directory\n/// from the project.\npub async fn remove_forge_lib(root: impl AsRef<Path>) -> Result<(), InstallError> {\n    debug!(\"removing forge-std installed as a git submodule\");\n    let gitmodules_path = root.as_ref().join(\".gitmodules\");\n    let lib_dir = root.as_ref().join(\"lib\");\n    let forge_std_dir = lib_dir.join(\"forge-std\");\n    if forge_std_dir.exists() {\n        run_git_command(\n            &[\"rm\", &forge_std_dir.to_string_lossy()],\n            Some(&root.as_ref().to_path_buf()),\n        )\n        .await?;\n        debug!(\"removed lib/forge-std\");\n    }\n    if lib_dir.exists() {\n        fs::remove_dir_all(&lib_dir)\n            .map_err(|e| InstallError::IOError { path: lib_dir.clone(), source: e })?;\n        debug!(\"removed lib dir\");\n    }\n    if gitmodules_path.exists() {\n        fs::remove_file(&gitmodules_path)\n            .map_err(|e| InstallError::IOError { path: lib_dir, source: e })?;\n        debug!(\"removed .gitmodules file\");\n    }\n    Ok(())\n}\n\n/// Canonicalize a path, resolving symlinks and relative paths.\n///\n/// This function also normalizes paths on Windows to use the MS-DOS format (as opposed to UNC)\n/// whenever possible.\npub async fn canonicalize(path: impl AsRef<Path>) -> Result<PathBuf, std::io::Error> {\n    let path = path.as_ref().to_path_buf();\n    tokio::task::spawn_blocking(move || dunce::canonicalize(&path)).await?\n}\n\n/// Canonicalize a path, resolving symlinks and relative paths, synchronously.\n///\n/// This function also normalizes paths on Windows to use the MS-DOS format (as opposed to UNC)\n/// whenever possible.\npub fn canonicalize_sync(path: impl AsRef<Path>) -> Result<PathBuf, std::io::Error> {\n    dunce::canonicalize(path)\n}\n\n/// Check if a path corresponds to the provided dependency.\n///\n/// The folder does not need to exist. The folder name must start with the dependency name\n/// (sanitized). For dependencies with a semver-compliant version requirement, any folder with a\n/// version that matches will give a result of `true`. Otherwise, the folder name must contain the\n/// version requirement string after the dependency name.\npub fn path_matches(dependency: &Dependency, path: impl AsRef<Path>) -> bool {\n    let path = path.as_ref();\n    let Some(dir_name) = path.file_name() else {\n        return false;\n    };\n    let dir_name = dir_name.to_string_lossy();\n    let prefix = format!(\"{}-\", sanitize_filename(dependency.name()));\n    if !dir_name.starts_with(&prefix) {\n        return false;\n    }\n    match (\n        parse_version_req(dependency.version_req()),\n        Version::parse(dir_name.strip_prefix(&prefix).expect(\"prefix should be present\")),\n    ) {\n        (None, _) | (Some(_), Err(_)) => {\n            // not semver compliant\n            dir_name == format!(\"{prefix}{}\", sanitize_filename(dependency.version_req()))\n        }\n        (Some(version_req), Ok(version)) => version_req.matches(&version),\n    }\n}\n\n#[cfg(test)]\nmod tests {\n    use super::*;\n    use std::fs;\n    use testdir::testdir;\n\n    fn create_test_folder(name: Option<&str>) -> PathBuf {\n        let dir = testdir!();\n        let named_dir = match name {\n            None => dir,\n            Some(name) => {\n                let d = dir.join(name);\n                fs::create_dir(&d).unwrap();\n                d\n            }\n        };\n        fs::write(named_dir.join(\"a.txt\"), \"this is a test file\").unwrap();\n        fs::write(named_dir.join(\"b.txt\"), \"this is a second test file\").unwrap();\n        fs::write(named_dir.join(\"ignored.txt\"), \"this file should be ignored\").unwrap();\n        fs::write(named_dir.join(\".gitignore\"), \"ignored.txt\\n\").unwrap();\n        fs::write(\n            named_dir.parent().unwrap().join(\".gitignore\"),\n            format!(\"{}/a.txt\", named_dir.file_name().unwrap().to_string_lossy()),\n        )\n        .unwrap(); // this file should be ignored because it's in the parent dir\n        dunce::canonicalize(named_dir).unwrap()\n    }\n\n    #[test]\n    fn test_hash_content() {\n        let mut content = \"this is a test file\".as_bytes();\n        let hash = hash_content(&mut content);\n        assert_eq!(\n            const_hex::encode(hash),\n            \"5881707e54b0112f901bc83a1ffbacac8fab74ea46a6f706a3efc5f7d4c1c625\".to_string()\n        );\n    }\n\n    #[test]\n    fn test_hash_content_content_sensitive() {\n        let mut content = \"foobar\".as_bytes();\n        let hash = hash_content(&mut content);\n        let mut content2 = \"baz\".as_bytes();\n        let hash2 = hash_content(&mut content2);\n        assert_ne!(hash, hash2);\n    }\n\n    #[test]\n    fn test_hash_file() {\n        let path = testdir!().join(\"test.txt\");\n        fs::write(&path, \"this is a test file\").unwrap();\n        let hash = hash_file(&path).unwrap();\n        assert_eq!(hash, \"5881707e54b0112f901bc83a1ffbacac8fab74ea46a6f706a3efc5f7d4c1c625\".into());\n    }\n\n    #[test]\n    fn test_hash_folder_abs_path_insensitive() {\n        let folder1 = create_test_folder(Some(\"dir1\"));\n        let folder2 = create_test_folder(Some(\"dir2\"));\n        let hash1 = hash_folder(&folder1).unwrap();\n        let hash2 = hash_folder(&folder2).unwrap();\n        assert_eq!(\n            hash1.to_string(),\n            \"c5328a2c3db7582b9074d5f5263ef111b496bbf9cda9b6c5fb0f97f1dc17b766\"\n        );\n        assert_eq!(hash1, hash2);\n        // ignored.txt should be ignored in the checksum calculation, so removing it should yield\n        // the same checksum\n        fs::remove_file(folder1.join(\"ignored.txt\")).unwrap();\n        let hash1 = hash_folder(&folder1).unwrap();\n        assert_eq!(hash1, hash2);\n    }\n\n    #[test]\n    fn test_hash_folder_rel_path_sensitive() {\n        let folder = create_test_folder(Some(\"dir\"));\n        let hash1 = hash_folder(&folder).unwrap();\n        fs::rename(folder.join(\"a.txt\"), folder.join(\"c.txt\")).unwrap();\n        let hash2 = hash_folder(&folder).unwrap();\n        assert_ne!(hash1, hash2);\n    }\n\n    #[test]\n    fn test_hash_folder_content_sensitive() {\n        let folder = create_test_folder(Some(\"dir\"));\n        let hash1 = hash_folder(&folder).unwrap();\n        fs::create_dir(folder.join(\"test\")).unwrap();\n        let hash2 = hash_folder(&folder).unwrap();\n        assert_ne!(hash1, hash2);\n        fs::write(folder.join(\"test/c.txt\"), \"this is a third test file\").unwrap();\n        let hash3 = hash_folder(&folder).unwrap();\n        assert_ne!(hash2, hash3);\n        assert_ne!(hash1, hash3);\n    }\n}\n"
  },
  {
    "path": "flake.nix",
    "content": "{\n  inputs = {\n    nixpkgs.url = \"github:NixOS/nixpkgs/nixpkgs-unstable\";\n    fenix = {\n      url = \"github:nix-community/fenix\";\n      inputs.nixpkgs.follows = \"nixpkgs\";\n    };\n  };\n\n  outputs = { self, nixpkgs, fenix }:\n    let\n      forAllSystems = nixpkgs.lib.genAttrs nixpkgs.lib.systems.flakeExposed;\n    in\n    {\n      devShells = forAllSystems (system:\n        let\n          pkgs = import nixpkgs {\n            inherit system;\n            overlays = [ fenix.overlays.default ];\n          };\n          toolchain = fenix.packages.${system}.stable.withComponents [\n            \"rustc\"\n            \"cargo\"\n            \"rust-std\"\n            \"clippy-preview\"\n            \"rust-analyzer-preview\"\n            \"rust-src\"\n          ];\n          nightlyToolchain = fenix.packages.${system}.latest.withComponents [\n            \"rustfmt-preview\"\n          ];\n        in\n        {\n          default = pkgs.mkShell {\n            buildInputs = with pkgs; [\n              cargo-nextest\n              foundry\n              nightlyToolchain\n              openssl\n              pkg-config\n              toolchain\n            ];\n\n            RUST_SRC_PATH = \"${toolchain}/lib/rustlib/src/rust/library\";\n          };\n        }\n      );\n    };\n}\n"
  },
  {
    "path": "release-plz.toml",
    "content": "[workspace]\ndependencies_update = true\ngit_release_enable = false        # we only need to create a git tag for one of the crates\ngit_tag_enable = false\npublish = false                   # cargo publish will be done by hand for now\nchangelog_path = \"./CHANGELOG.md\"\n\n[[package]]\nname = \"soldeer-core\"\nversion_group = \"soldeer\"\n\n[[package]]\nname = \"soldeer-commands\"\nversion_group = \"soldeer\"\n\n[[package]]\nname = \"soldeer\"\nversion_group = \"soldeer\"\ngit_tag_name = \"v{{ version }}\"\ngit_release_name = \"v{{ version }}\"\ngit_tag_enable = true\ngit_release_enable = true\n\n[changelog]\nbody = \"\"\"\n\n## `{{ package }}` - [{{ version | trim_start_matches(pat=\"v\") }}]{%- if release_link -%}({{ release_link }}){% endif %} - {{ timestamp | date(format=\"%Y-%m-%d\") }}\n{% for group, commits in commits | group_by(attribute=\"group\") %}\n### {{ group | upper_first }}\n{% for commit in commits %}\n{%- if commit.scope -%}\n- *({{commit.scope}})* {% if commit.breaking %}[**breaking**] {% endif %}{{ commit.message }}{%- if commit.links %} ({% for link in commit.links %}[{{link.text}}]({{link.href}}) {% endfor -%}){% endif %}\n{% else -%}\n- {% if commit.breaking %}[**breaking**] {% endif %}{{ commit.message }}\n{% endif -%}\n{% endfor -%}\n{% endfor -%}\n\"\"\"\n"
  },
  {
    "path": "rustfmt.toml",
    "content": "reorder_imports = true\nimports_granularity = \"Crate\"\nuse_small_heuristics = \"Max\"\ncomment_width = 100\nwrap_comments = true\nbinop_separator = \"Back\"\ntrailing_comma = \"Vertical\"\ntrailing_semicolon = false\nuse_field_init_shorthand = true\nformat_code_in_doc_comments = true\ndoc_comment_code_block_width = 100\n"
  }
]