Repository: mario-eth/soldeer Branch: main Commit: e4aac2865953 Files: 56 Total size: 455.1 KB Directory structure: gitextract_fq4q0vwy/ ├── .config/ │ └── nextest.toml ├── .github/ │ ├── CODE_OF_CONDUCT.md │ ├── ISSUE_TEMPLATE/ │ │ ├── bug_report.yml │ │ ├── config.yml │ │ ├── feature_request.yml │ │ └── registry_request.yml │ ├── PULL_REQUEST_TEMPLATE.md │ ├── dependabot.yml │ └── workflows/ │ ├── release.yml │ └── rust.yml ├── .gitignore ├── .vscode/ │ └── settings.json ├── CHANGELOG.md ├── CONTRIBUTING.md ├── Cargo.toml ├── LICENSE ├── README.md ├── USAGE.md ├── clippy.toml ├── crates/ │ ├── cli/ │ │ ├── Cargo.toml │ │ └── src/ │ │ └── main.rs │ ├── commands/ │ │ ├── Cargo.toml │ │ ├── src/ │ │ │ ├── commands/ │ │ │ │ ├── clean.rs │ │ │ │ ├── init.rs │ │ │ │ ├── install.rs │ │ │ │ ├── login.rs │ │ │ │ ├── mod.rs │ │ │ │ ├── push.rs │ │ │ │ ├── uninstall.rs │ │ │ │ └── update.rs │ │ │ ├── lib.rs │ │ │ └── utils.rs │ │ └── tests/ │ │ ├── tests-clean.rs │ │ ├── tests-init.rs │ │ ├── tests-install.rs │ │ ├── tests-login.rs │ │ ├── tests-push.rs │ │ ├── tests-uninstall.rs │ │ └── tests-update.rs │ └── core/ │ ├── Cargo.toml │ └── src/ │ ├── auth.rs │ ├── config.rs │ ├── download.rs │ ├── errors.rs │ ├── install.rs │ ├── lib.rs │ ├── lock/ │ │ └── forge.rs │ ├── lock.rs │ ├── push.rs │ ├── registry.rs │ ├── remappings.rs │ ├── update.rs │ └── utils.rs ├── flake.nix ├── release-plz.toml └── rustfmt.toml ================================================ FILE CONTENTS ================================================ ================================================ FILE: .config/nextest.toml ================================================ [profile.default] retries = { backoff = "exponential", count = 2, delay = "2s", jitter = true } slow-timeout = { period = "1m", terminate-after = 3 } fail-fast = false ================================================ FILE: .github/CODE_OF_CONDUCT.md ================================================ The Soldeer project adheres to the [Rust Code of Conduct](https://www.rust-lang.org/policies/code-of-conduct). This code of conduct describes the minimum behavior expected from all contributors. Instances of violations of the Code of Conduct can contact the project maintainers on the [Contributors' Telegram Chat](https://t.me/+tn6gOCJseD83OTZk). ================================================ FILE: .github/ISSUE_TEMPLATE/bug_report.yml ================================================ name: 🐛 Bug Report description: Report an issue found in Soldeer. labels: ['bug'] body: - type: markdown attributes: value: | Thanks for taking the time to report a bug! Please fill out the sections below to help us reproduce and fix the bug as quickly as possible. - type: checkboxes attributes: label: 'I have checked the following:' options: - label: 'I have searched the issues of this repository and believe that this is not a duplicate.' required: true - label: 'I have checked that the bug is reproducible with the latest version of Soldeer.' required: true - type: input id: version attributes: label: Soldeer Version description: What is the result of running `soldeer version` or `forge soldeer version` placeholder: soldeer x.y.z validations: required: true - type: textarea id: what-happened attributes: label: What Happened? description: Describe the issue you are experiencing. You can run `soldeer` commands with the `-vvv` flag to see debug logs. placeholder: A clear and concise description of what the bug is. validations: required: true - type: textarea id: expected-behavior attributes: label: Expected Behavior description: Describe what you expected to happen. placeholder: A clear and concise description of what you expected to happen in such a case. validations: required: false - type: textarea id: reproduction attributes: label: Reproduction Steps description: Provide a detailed list of steps to reproduce the issue. placeholder: | 1. Insert the "..." options into the config file 2. Run the command `...` 3. Observe that ... happens validations: required: false - type: textarea id: configuration attributes: label: Configuration description: Provide the relevant sections of your `foundry.toml` or `soldeer.toml` file render: toml placeholder: | [soldeer] # Insert the relevant configuration options here validations: required: false ================================================ FILE: .github/ISSUE_TEMPLATE/config.yml ================================================ blank_issues_enabled: true contact_links: - name: Soldeer Contributors Telegram url: https://t.me/+tn6gOCJseD83OTZk about: Please ask and answer questions here. ================================================ FILE: .github/ISSUE_TEMPLATE/feature_request.yml ================================================ name: 💡 Feature Request description: Suggest a feature for Soldeer labels: ['enhancement'] body: - type: markdown attributes: value: | Thanks for taking the time to suggest a feature! Please fill out the sections below to help us understand your request. - type: checkboxes attributes: label: 'I have checked the following:' options: - label: 'I have searched the issues of this repository and believe that this is not a duplicate.' required: true - type: textarea id: problem attributes: label: Problem description: What problem are you facing that you believe this feature would solve? placeholder: A clear and concise description of what the problem is. validations: required: true - type: textarea id: solution attributes: label: Solution description: Describe the solution you'd like to see. placeholder: A clear and concise description of what you want to happen. validations: required: true - type: textarea id: context attributes: label: Additional Context description: Add any other context or screenshots about the feature request here. validations: required: false ================================================ FILE: .github/ISSUE_TEMPLATE/registry_request.yml ================================================ name: 📦 Registry Addition description: Suggest a missing package for the Soldeer registry. labels: ['add-dependency'] assignees: ['mario-eth'] body: - type: markdown attributes: value: | Thanks for taking the time to suggest a package for the Soldeer registry! Please fill out the sections below to help us understand your request. - type: checkboxes attributes: label: 'I have checked the following:' options: - label: 'I have searched the issues of this repository and believe that this is not a duplicate.' required: true - type: input id: package-name attributes: label: Package Name description: What is the name of the package you would like to see added to the registry? placeholder: soldeer-package-name validations: required: true - type: input id: project-url attributes: label: Project URL description: Provide a link to the package repository or documentation. placeholder: https://github.com/... validations: required: true - type: textarea id: additional-context attributes: label: Additional Context description: Add any context to help us understand why this package should be added. validations: required: false ================================================ FILE: .github/PULL_REQUEST_TEMPLATE.md ================================================ ================================================ FILE: .github/dependabot.yml ================================================ version: 2 updates: - package-ecosystem: "github-actions" directory: "/" # Check for updates every Monday schedule: interval: "weekly" ================================================ FILE: .github/workflows/release.yml ================================================ name: Release permissions: pull-requests: write contents: write on: push: branches: - main jobs: # Release unpublished packages. release-plz-release: name: Release-plz release runs-on: ubuntu-latest permissions: contents: write steps: - name: Generate GitHub token uses: actions/create-github-app-token@v2 id: generate-token with: app-id: ${{ secrets.APP_ID }} private-key: ${{ secrets.APP_PRIVATE_KEY }} - name: Checkout repository uses: actions/checkout@v4 with: fetch-depth: 0 token: ${{ steps.generate-token.outputs.token }} - name: Install Rust toolchain uses: dtolnay/rust-toolchain@stable - name: Run release-plz uses: release-plz/action@v0.5 with: command: release env: GITHUB_TOKEN: ${{ steps.generate-token.outputs.token }} # CARGO_REGISTRY_TOKEN: ${{ secrets.CARGO_REGISTRY_TOKEN }} # Create a PR with the new versions and changelog, preparing the next release. release-plz-pr: name: Release-plz PR runs-on: ubuntu-latest permissions: contents: write pull-requests: write concurrency: group: release-plz-${{ github.ref }} cancel-in-progress: false steps: - name: Generate GitHub token uses: actions/create-github-app-token@v2 id: generate-token with: app-id: ${{ secrets.APP_ID }} private-key: ${{ secrets.APP_PRIVATE_KEY }} - name: Checkout repository uses: actions/checkout@v4 with: fetch-depth: 0 token: ${{ steps.generate-token.outputs.token }} - name: Install Rust toolchain uses: dtolnay/rust-toolchain@stable - name: Run release-plz uses: release-plz/action@v0.5 with: command: release-pr env: GITHUB_TOKEN: ${{ steps.generate-token.outputs.token }} # CARGO_REGISTRY_TOKEN: ${{ secrets.CARGO_REGISTRY_TOKEN }} ================================================ FILE: .github/workflows/rust.yml ================================================ name: Rust on: push: branches: ['main'] pull_request: env: CARGO_TERM_COLOR: always jobs: build-test: strategy: matrix: platform: [ubuntu-latest, windows-latest, macos-latest] runs-on: ${{ matrix.platform }} steps: - uses: actions/checkout@v4 - uses: dtolnay/rust-toolchain@stable - uses: taiki-e/install-action@nextest - name: Install Foundry uses: foundry-rs/foundry-toolchain@v1 - name: Run tests run: cargo nextest run doctests: runs-on: ubuntu-latest timeout-minutes: 30 steps: - uses: actions/checkout@v4 - uses: dtolnay/rust-toolchain@stable - uses: Swatinem/rust-cache@v2 with: cache-on-failure: true - run: cargo test --workspace --doc feature-checks: runs-on: ubuntu-latest timeout-minutes: 30 steps: - uses: actions/checkout@v4 - uses: dtolnay/rust-toolchain@stable - uses: taiki-e/install-action@cargo-hack - uses: Swatinem/rust-cache@v2 with: cache-on-failure: true - name: cargo hack run: cargo hack check --feature-powerset --depth 2 clippy: runs-on: ubuntu-latest timeout-minutes: 30 steps: - uses: actions/checkout@v4 - uses: dtolnay/rust-toolchain@stable with: components: clippy - uses: Swatinem/rust-cache@v2 with: cache-on-failure: true - run: cargo clippy --workspace --all-targets --all-features env: RUSTFLAGS: -Dwarnings docs: runs-on: ubuntu-latest timeout-minutes: 30 steps: - uses: actions/checkout@v4 - uses: dtolnay/rust-toolchain@nightly - uses: Swatinem/rust-cache@v2 with: cache-on-failure: true - run: cargo doc --workspace --all-features --no-deps --document-private-items env: RUSTDOCFLAGS: '--cfg docsrs -D warnings' fmt: runs-on: ubuntu-latest timeout-minutes: 30 steps: - uses: actions/checkout@v4 - uses: dtolnay/rust-toolchain@nightly with: components: rustfmt - run: cargo fmt --all --check ================================================ FILE: .gitignore ================================================ /target dependencies/ .dependency_reading.toml remappings.txt crawler/target/ *.DS_Store* package-lock.json package.json repositories.db crawler/node_modules/ crawler/zipped/* crawler/zipped/ src/soldeer.toml *soldeer.lock test/* !emptyfile !emptyfile2 test_push_sensitive test_push_skip_sensitive .soldeer/ ================================================ FILE: .vscode/settings.json ================================================ { "git.ignoreLimitWarning": true, "editor.formatOnSave": true, "rust-analyzer.rustfmt.extraArgs": ["+nightly"], "[rust]": { "editor.defaultFormatter": "rust-lang.rust-analyzer" }, "rust-analyzer.cargo.features": "all" } ================================================ FILE: CHANGELOG.md ================================================ # Changelog All notable changes to this project will be documented in this file. The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). ## [Unreleased] ## `soldeer` - [0.11.0](https://github.com/mario-eth/soldeer/compare/v0.10.1...v0.11.0) - 2026-04-16 ### Fixed - *(commands)* do not init logging backend in the library crate ([#350](https://github.com/mario-eth/soldeer/pull/350)) ## `soldeer-commands` - [0.11.0](https://github.com/mario-eth/soldeer/compare/soldeer-commands-v0.10.1...soldeer-commands-v0.11.0) - 2026-04-16 ### Fixed - *(commands)* do not init logging backend in the library crate ([#350](https://github.com/mario-eth/soldeer/pull/350)) ### Other - *(deps)* update dependencies ([#355](https://github.com/mario-eth/soldeer/pull/355)) ## `soldeer-core` - [0.11.0](https://github.com/mario-eth/soldeer/compare/soldeer-core-v0.10.1...soldeer-core-v0.11.0) - 2026-04-16 ### Other - *(deps)* update dependencies ([#355](https://github.com/mario-eth/soldeer/pull/355)) - *(install)* concurrent subdependencies install ([#352](https://github.com/mario-eth/soldeer/pull/352)) ## `soldeer-core` - [0.10.1](https://github.com/mario-eth/soldeer/compare/soldeer-core-v0.10.0...soldeer-core-v0.10.1) - 2026-02-16 ### Added - *(core)* support foundry.lock file ([#347](https://github.com/mario-eth/soldeer/pull/347)) ## `soldeer-core` - [0.10.0](https://github.com/mario-eth/soldeer/compare/soldeer-core-v0.9.0...soldeer-core-v0.10.0) - 2025-12-03 ### Added - *(config)* [**breaking**] allow to specify the project root path for dependencies ([#341](https://github.com/mario-eth/soldeer/pull/341)) ## `soldeer` - [0.9.0](https://github.com/mario-eth/soldeer/compare/v0.8.0...v0.9.0) - 2025-10-16 ### Other - update Cargo.lock dependencies ## `soldeer-commands` - [0.9.0](https://github.com/mario-eth/soldeer/compare/soldeer-commands-v0.8.0...soldeer-commands-v0.9.0) - 2025-10-16 ### Added - detect project root ([#333](https://github.com/mario-eth/soldeer/pull/333)) - *(commands)* add `soldeer clean` command ([#332](https://github.com/mario-eth/soldeer/pull/332)) ## `soldeer-core` - [0.9.0](https://github.com/mario-eth/soldeer/compare/soldeer-core-v0.8.0...soldeer-core-v0.9.0) - 2025-10-16 ### Added - detect project root ([#333](https://github.com/mario-eth/soldeer/pull/333)) ### Other - *(deps)* update deps ([#336](https://github.com/mario-eth/soldeer/pull/336)) ## `soldeer-commands` - [0.8.0](https://github.com/mario-eth/soldeer/compare/soldeer-commands-v0.7.1...soldeer-commands-v0.8.0) - 2025-09-29 ### Added - add support for private packages ([#327](https://github.com/mario-eth/soldeer/pull/327)) ## `soldeer-core` - [0.8.0](https://github.com/mario-eth/soldeer/compare/soldeer-core-v0.7.1...soldeer-core-v0.8.0) - 2025-09-29 ### Added - add support for private packages ([#327](https://github.com/mario-eth/soldeer/pull/327)) ## `soldeer-core` - [0.7.1](https://github.com/mario-eth/soldeer/compare/soldeer-core-v0.7.0...soldeer-core-v0.7.1) - 2025-09-19 ### Fixed - *(core)* install git submodules ([#328](https://github.com/mario-eth/soldeer/pull/328)) ## `soldeer` - [0.7.0](https://github.com/mario-eth/soldeer/compare/v0.6.1...v0.7.0) - 2025-09-02 ### Other - rust edition 2024 ([#319](https://github.com/mario-eth/soldeer/pull/319)) ## `soldeer-commands` - [0.7.0](https://github.com/mario-eth/soldeer/compare/soldeer-commands-v0.6.1...soldeer-commands-v0.7.0) - 2025-09-02 ### Added - *(registry)* use new API endpoints ([#318](https://github.com/mario-eth/soldeer/pull/318)) - add support for CLI tokens ([#311](https://github.com/mario-eth/soldeer/pull/311)) ### Fixed - *(cmd)* avoid panicking if logger was already initialized ([#312](https://github.com/mario-eth/soldeer/pull/312)) ### Other - rust edition 2024 ([#319](https://github.com/mario-eth/soldeer/pull/319)) ## `soldeer-core` - [0.7.0](https://github.com/mario-eth/soldeer/compare/soldeer-core-v0.6.1...soldeer-core-v0.7.0) - 2025-09-02 ### Added - *(registry)* use new API endpoints ([#318](https://github.com/mario-eth/soldeer/pull/318)) - add support for CLI tokens ([#311](https://github.com/mario-eth/soldeer/pull/311)) ### Fixed - *(cmd)* avoid panicking if logger was already initialized ([#312](https://github.com/mario-eth/soldeer/pull/312)) ### Other - rust edition 2024 ([#319](https://github.com/mario-eth/soldeer/pull/319)) ## `soldeer-core` - [0.6.1](https://github.com/mario-eth/soldeer/compare/soldeer-core-v0.6.0...soldeer-core-v0.6.1) - 2025-07-23 ### Other - add nix flake and fix clippy ([#301](https://github.com/mario-eth/soldeer/pull/301)) - remove bzip2 support ([#298](https://github.com/mario-eth/soldeer/pull/298)) ## `soldeer` - [0.6.0](https://github.com/mario-eth/soldeer/compare/v0.5.4...v0.6.0) - 2025-07-10 ### Other - update Cargo.lock dependencies ## `soldeer-commands` - [0.6.0](https://github.com/mario-eth/soldeer/compare/soldeer-commands-v0.5.4...soldeer-commands-v0.6.0) - 2025-07-10 ### Added - *(commands)* if adding a dependency which is already present, re-install all ([#289](https://github.com/mario-eth/soldeer/pull/289)) ### Fixed - *(core)* recursive subdependencies install ([#288](https://github.com/mario-eth/soldeer/pull/288)) - *(commands)* canonicalize path in push command ([#284](https://github.com/mario-eth/soldeer/pull/284)) ## `soldeer-core` - [0.6.0](https://github.com/mario-eth/soldeer/compare/soldeer-core-v0.5.4...soldeer-core-v0.6.0) - 2025-07-10 ### Added - *(core)* remove forge requirement for recursive install ([#281](https://github.com/mario-eth/soldeer/pull/281)) ### Fixed - *(core)* recursive subdependencies install ([#288](https://github.com/mario-eth/soldeer/pull/288)) - *(commands)* canonicalize path in push command ([#284](https://github.com/mario-eth/soldeer/pull/284)) ## `soldeer` - [0.5.4](https://github.com/mario-eth/soldeer/compare/v0.5.3...v0.5.4) - 2025-04-27 ### Other - update Cargo.lock dependencies ## `soldeer-core` - [0.5.4](https://github.com/mario-eth/soldeer/compare/soldeer-core-v0.5.3...soldeer-core-v0.5.4) - 2025-04-27 ### Fixed - *(registry)* version resolution when no SemVer ([#271](https://github.com/mario-eth/soldeer/pull/271)) ## `soldeer` - [0.5.3](https://github.com/mario-eth/soldeer/compare/v0.5.2...v0.5.3) - 2025-03-18 ### Changed - fix(core): remove hardcoded git domains by @puuuuh in https://github.com/mario-eth/soldeer/pull/244 - refactor!: logging by @beeb in https://github.com/mario-eth/soldeer/pull/242 - fix(push): ensure version is non-empty when pushing to registry by @kubkon in https://github.com/mario-eth/soldeer/pull/247 - feat!: improve toml validation by @beeb in https://github.com/mario-eth/soldeer/pull/248 - chore(deps): update deps by @beeb in https://github.com/mario-eth/soldeer/pull/257 ## `soldeer` - [0.5.2](https://github.com/mario-eth/soldeer/compare/v0.5.1...v0.5.2) - 2024-11-21 ### Changed - fix(core): gitignore config for integrity checksum by @beeb in #233 ## `soldeer` - [0.5.1](https://github.com/mario-eth/soldeer/compare/v0.5.0...v0.5.1) - 2024-11-13 ### Changed - fix(core): keep duplicate and orphan remappings by @beeb in #226 ## `soldeer` - [0.5.0](https://github.com/mario-eth/soldeer/compare/v0.4.1...v0.5.0) - 2024-11-07 ### Changed - 185 add cli args to skip interaction for all commands by @mario-eth in #218 ## `soldeer` - [0.4.1](https://github.com/mario-eth/soldeer/compare/v0.4.0...v0.4.1) - 2024-10-11 ### Changed - updated readme by @mario-eth in #209 - fix(core): all commands add the `[dependencies]` table in config if m… by @mario-eth in #214 - Add core version by @mario-eth in #210 ## `soldeer` - [0.4.0](https://github.com/mario-eth/soldeer/compare/v0.3.4...v0.4.0) - 2024-10-07 ### Changed - refactor!: v0.4.0 main rewrite by @beeb in #150 - docs(core): document `auth` and `config` modules by @beeb in #175 - feat: format multiline remappings array by @beeb in #174 - docs(core): add documentation by @beeb in #177 - docs(core): add documentation by @beeb in #178 - docs(core): update and utils modules by @beeb in #179 - test(commands): init integration tests by @beeb in #180 - refactor!: minor refactor and integration tests by @beeb in #186 - test(commands): add integration test (install/uninstall) by @beeb in #190 - feat(core): improve remappings matching by @beeb in #191 - fix(core): updating git dependencies by @beeb in #192 - feat(commands): update libs in foundry config during init by @beeb in #193 - refactor: remove all unwraps by @beeb in #194 - ci: speed up test by using cargo-nextest by @beeb in #196 - perf: lock-free synchronization, add rayon by @crypdoughdoteth in #198 - feat(cli): add banner by @xyizko in #199 - refactor: use new syntax for bon builders by @beeb in #200 - ci: add nextest config by @beeb in #201 - test(commands): integration tests for push by @beeb in #197 - fix(core): `path_matches` semver comparison by @beeb in #205 - fix(cli): respect environment and tty preference for color by @beeb in #206 - test(commands): fix tests when run with `cargo test` by @beeb in #207 ## `soldeer` - [0.3.4](https://github.com/mario-eth/soldeer/compare/v0.3.3...v0.3.4) - 2024-09-04 ### Changed - Moving the canonicalization to respect windows slashing by @mario-eth in #172 ## `soldeer` - [0.3.3](https://github.com/mario-eth/soldeer/compare/v0.3.2...v0.3.3) - 2024-09-04 ### Changed - chore(deps): bump zip-extract to 0.2.0 by @DaniPopes in #161 - fix(config): preserve existing remappings by @beeb in #171 ## `soldeer` - [0.3.2](https://github.com/mario-eth/soldeer/compare/v0.3.1...v0.3.2) - 2024-08-29 ### Changed - hotfix os independent bytes by @mario-eth in #163 - remappings_generated -> remappings_generate typo by @0xCalibur in #164 - fix(utils): always consider relative path in hashing by @beeb in #168 ## `soldeer` - [0.3.1](https://github.com/mario-eth/soldeer/compare/v0.3.0...v0.3.1) - 2024-08-27 ### Changed - Hotfix on OS independent bytes on hashing ## `soldeer` - [0.3.0](https://github.com/mario-eth/soldeer/compare/v0.2.19...v0.3.0) - 2024-08-27 ### Changed - Updated readme and version by @mario-eth in #104 - 89 add soldeer uninstall by @mario-eth in #105 - Feat/soldeer init by @Solthodox in #56 - style(fmt): update formatter configuration and improve consistency by @beeb in #111 - refactor!: cleanup, more idiomatic rust by @beeb in #113 - perf(lock): better handling of missing lockfile by @beeb in #114 - refactor!: big rewrite by @beeb in #118 - fix(config)!: fix remappings logic and logging by @beeb in #125 - chore: update deps and remove serde_derive by @beeb in #129 - Handling dependency name sanitization by @mario-eth in #127 - fix: parallel downloads order by @beeb in #133 - Recursive Dependencies by @mario-eth in #136 - Removing transform git to http by @mario-eth in #137 - Hotfixes and extra tests before 0.3.0 by @mario-eth in #139 - Hotfixes after refactor and extra tests by @mario-eth in #141 - feat: add integrity checksum to lockfile by @beeb in #132 - chore: update logo by @beeb in #143 - chore: enable some more lints by @DaniPopes in #160 - chore(deps): replace simple-home-dir with home by @DaniPopes in #157 - chore: remove unused dev dep env_logger by @DaniPopes in #159 - chore(deps): replace `once_cell` with `std::sync` by @DaniPopes in #158 - Using git branch/tag to pull dependencies by @mario-eth in #147 ================================================ FILE: CONTRIBUTING.md ================================================ ## Contributing to Soldeer Thanks for your interest in improving Soldeer! There are multiple opportunities to contribute at any level. It doesn't matter if you are just getting started with Rust or are the most weathered expert, we can use your help. This document will help you get started. **Do not let the document intimidate you**. It should be considered as a guide to help you navigate the process. The [Contributors' Telegram Chat][telegram] is available for any concerns you may have that are not covered in this guide. ### Code of Conduct The Soldeer project adheres to the [Rust Code of Conduct][rust-coc]. This code of conduct describes the _minimum_ behavior expected from all contributors. Instances of violations of the Code of Conduct can contact the project maintainers on the [Contributors' Telegram Chat][telegram]. ### Ways to contribute There are fundamentally four ways an individual can contribute: 1. **By opening an issue:** For example, if you believe that you have uncovered a bug in Soldeer, creating a new issue in the issue tracker is the way to report it. 2. **By adding context:** Providing additional context to existing issues, such as screenshots and code snippets, which help resolve issues. 3. **By resolving issues:** Typically this is done in the form of either demonstrating that the issue reported is not a problem after all, or more often, by opening a pull request that fixes the underlying problem, in a concrete and reviewable manner. **Anybody can participate in any stage of contribution**. We urge you to participate in the discussion around bugs and participate in reviewing PRs. ### Contributions Related to Spelling and Grammar At this time, we will not be accepting contributions that only fix spelling or grammatical errors in documentation, code or elsewhere. ### Asking for help If you have reviewed existing documentation and still have questions, or you are having problems, you can get help in the following ways: - **Asking in the support Telegram:** The [Soldeer Support Telegram][telegram] is a fast and easy way to ask questions. As Soldeer is still in heavy development, the documentation can be a bit scattered. ### Submitting a bug report When filing a new bug report in the issue tracker, you will be presented with a basic form to fill out. If you believe that you have uncovered a bug, please fill out the form to the best of your ability. Do not worry if you cannot answer every detail; just fill in what you can. Contributors will ask follow-up questions if something is unclear. The most important pieces of information we need in a bug report are: - The Soldeer version you are on (and that it is up to date) - The platform you are on (Windows, macOS, an M1 Mac or Linux) - Code snippets if this is happening in relation to testing or building code - Concrete steps to reproduce the bug In order to rule out the possibility of the bug being in your project, the code snippets should be as minimal as possible. It is better if you can reproduce the bug with a small snippet as opposed to an entire project! See [this guide][mcve] on how to create a minimal, complete, and verifiable example. ### Submitting a feature request When adding a feature request in the issue tracker, you will be presented with a basic form to fill out. Please include as detailed of an explanation as possible of the feature you would like, adding additional context if necessary. If you have examples of other tools that have the feature you are requesting, please include them as well. ### Resolving an issue Pull requests are the way concrete changes are made to the code, documentation, and dependencies of Soldeer. Please also make sure that the following commands pass if you have changed the code: ```sh cargo check --all cargo test --all --all-features cargo +nightly fmt -- --check cargo +nightly clippy --all --all-targets --all-features -- -D warnings ``` If you are working in VSCode, we recommend you install the [rust-analyzer](https://rust-analyzer.github.io/) extension, and use the following VSCode user settings: ```json "editor.formatOnSave": true, "rust-analyzer.rustfmt.extraArgs": ["+nightly"], "[rust]": { "editor.defaultFormatter": "rust-lang.rust-analyzer" } ``` #### Adding tests If the change being proposed alters code, it is either adding new functionality to Soldeer, or fixing existing, broken functionality. In both of these cases, the pull request should include one or more tests to ensure that Soldeer does not regress in the future. Types of tests include: - **Unit tests**: Functions which have very specific tasks should be unit tested. - **Integration tests**: For general purpose, far reaching functionality, integration tests should be added. The best way to add a new integration test is to look at existing ones and follow the style. #### Commits It is a recommended best practice to keep your changes as logically grouped as possible within individual commits. There is no limit to the number of commits any single pull request may have, and many contributors find it easier to review changes that are split across multiple commits. That said, if you have a number of commits that are "checkpoints" and don't represent a single logical change, please squash those together. Please adhere to the [Conventional Commits][conventional-commits] format for commit messages and PR titles. Prefer all-lowercase descriptions when possible. The following types should be used: - **build**: changes that affect the build system or external dependencies (example scope: cargo) - **chore**: tool configuration, metadata, manifest changes, dependencies updates, miscellaneous changes (anything that doesn't fit the other types) - **ci**: changes to the CI configuration files and scripts (GitHub Actions) - **docs**: documentation-only changes (doc comments, mdbook) - **feat**: a new feature - **fix**: a bug fix - **perf**: a code change that improves performance - **refactor**: a code change that neither fixes a bug nor adds a feature - **revert**: reverting an older commit or change - **style**: changes that do not affect the meaning of the code (whitespace, formatting, etc.) - **test**: adding or modifying tests (no change to lib/binary source code allowed) #### Opening the pull request From within GitHub, opening a new pull request will present you with a template that should be filled out. Please try your best at filling out the details, but feel free to skip parts if you're not sure what to put. Make sure to use the [Conventional Commits][conventional-commits] format described above for your PR title. #### Discuss and update You will probably get feedback or requests for changes to your pull request. This is a big part of the submission process, so don't be discouraged! Some contributors may sign off on the pull request right away, others may have more detailed comments or feedback. This is a necessary part of the process in order to evaluate whether the changes are correct and necessary. **Any community member can review a PR, so you might get conflicting feedback**. Keep an eye out for comments from code owners to provide guidance on conflicting feedback. #### Reviewing pull requests **Any Soldeer community member is welcome to review any pull request**. All contributors who choose to review and provide feedback on pull requests have a responsibility to both the project and individual making the contribution. Reviews and feedback must be helpful, insightful, and geared towards improving the contribution as opposed to simply blocking it. If there are reasons why you feel the PR should not be merged, explain what those are. Do not expect to be able to block a PR from advancing simply because you say "no" without giving an explanation. Be open to having your mind changed. Be open to working _with_ the contributor to make the pull request better. Reviews that are dismissive or disrespectful of the contributor or any other reviewers are strictly counter to the Code of Conduct. When reviewing a pull request, the primary goals are for the codebase to improve and for the person submitting the request to succeed. **Even if a pull request is not merged, the submitter should come away from the experience feeling like their effort was not unappreciated**. Every PR from a new contributor is an opportunity to grow the community. ##### Review a bit at a time Do not overwhelm new contributors. It is tempting to micro-optimize and make everything about relative performance, perfect grammar, or exact style matches. Do not succumb to that temptation.. Focus first on the most significant aspects of the change: 1. Does this change make sense for Soldeer? 2. Does this change make Soldeer better, even if only incrementally? 3. Are there clear bugs or larger scale issues that need attending? 4. Are the commit messages readable and correct? If it contains a breaking change, is it clear enough? Note that only **incremental** improvement is needed to land a PR. This means that the PR does not need to be perfect, only better than the status quo. Follow-up PRs may be opened to continue iterating. When changes are necessary, _request_ them, do not _demand_ them, and **do not assume that the submitter already knows how to add a test or run a benchmark**. Specific performance optimization techniques, coding styles and conventions change over time. The first impression you give to a new contributor never does. Nits (requests for small changes that are not essential) are fine, but try to avoid stalling the pull request. Most nits can typically be fixed by the Soldeer maintainers merging the pull request, but they can also be an opportunity for the contributor to learn a bit more about the project. It is always good to clearly indicate nits when you comment, e.g.: `Nit: change foo() to bar(). But this is not blocking`. If your comments were addressed but were not folded after new commits, or if they proved to be mistaken, please, [hide them][hiding-a-comment] with the appropriate reason to keep the conversation flow concise and relevant. ##### Be aware of the person behind the code Be aware that _how_ you communicate requests and reviews in your feedback can have a significant impact on the success of the pull request. Yes, we may merge a particular change that makes Soldeer better, but the individual might just not want to have anything to do with Soldeer ever again. The goal is not just having good code. ##### Abandoned or stale pull requests If a pull request appears to be abandoned or stalled, it is polite to first check with the contributor to see if they intend to continue the work before checking if they would mind if you took it over (especially if it just has nits left). When doing so, it is courteous to give the original contributor credit for the work they started, either by preserving their name and e-mail address in the commit log, or by using the `Author: ` or `Co-authored-by: ` metadata tag in the commits. _Adapted from the [ethers-rs contributing guide](https://github.com/gakonst/ethers-rs/blob/master/CONTRIBUTING.md)_. [telegram]: https://t.me/+tn6gOCJseD83OTZk [rust-coc]: https://www.rust-lang.org/policies/code-of-conduct [mcve]: https://stackoverflow.com/help/mcve [hiding-a-comment]: https://help.github.com/articles/managing-disruptive-comments/#hiding-a-comment [conventional-commits]: https://www.conventionalcommits.org/en/v1.0.0 ================================================ FILE: Cargo.toml ================================================ [workspace] members = ["crates/cli", "crates/core", "crates/commands"] resolver = "2" [workspace.package] authors = ["m4rio"] categories = ["development-tools"] description = "A minimal Solidity package manager written in Rust, best used with Foundry" edition = "2024" exclude = ["tests/"] homepage = "https://soldeer.xyz" keywords = ["solidity", "package-manager", "foundry"] license = "MIT" readme = "./README.md" repository = "https://github.com/mario-eth/soldeer" rust-version = "1.88" version = "0.11.0" [workspace.lints.clippy] dbg-macro = "warn" manual-string-new = "warn" uninlined-format-args = "warn" use-self = "warn" redundant-clone = "warn" unwrap_used = "warn" [workspace.lints.rust] rust-2018-idioms = "warn" unreachable-pub = "warn" unused-must-use = "warn" redundant-lifetimes = "warn" [workspace.dependencies] bon = "3.0.0" clap = { version = "4.5.9", features = ["derive"] } cliclack = "0.5.4" derive_more = { version = "2.0.1", features = ["from", "display", "from_str"] } log = { version = "0.4.25", features = ["kv"] } mockito = "1.5.0" path-slash = "0.2.1" rayon = "1.10.0" reqwest = "0.13.2" temp-env = { version = "0.3.6", features = ["async_closure"] } testdir = "0.10.0" thiserror = "2.0.3" tokio = { version = "1.38.0", features = [ "io-util", "macros", "process", "rt-multi-thread", ] } ================================================ FILE: LICENSE ================================================ MIT License Copyright (c) 2023 mario-eth Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. ================================================ FILE: README.md ================================================ # Soldeer ![Rust][rust-badge] [![License: MIT][license-badge]][license] [rust-badge]: https://img.shields.io/badge/Built%20with%20-Rust-e43716.svg [license]: https://opensource.org/licenses/MIT [license-badge]: https://img.shields.io/badge/License-MIT-blue.svg

Soldeer is a package manager for Solidity built in Rust and integrated into Foundry. Solidity development started to become more and more complex. The need for a package manager was evident. This project was started to solve the following issues: - git submodules in Foundry are not a good solution for managing dependencies - npmjs was built for the JS ecosystem, not for Solidity - github versioning of the releases is a pain and not all the projects are using it correctly ## Installation (Foundry) Soldeer is already integrated into Foundry. You can use it by running the following command: ```bash forge soldeer [COMMAND] ``` To check which version of Soldeer is packaged with your Foundry install, run `forge soldeer version`. ## Installation (standalone) Soldeer is available on [crates.io](https://crates.io/crates/soldeer) and can be installed with: ```bash cargo install soldeer ``` ### Verify installation ```bash soldeer help ``` ## Compile from Source Clone this repository, then run `cargo build --release` inside the root. The `soldeer` binary will be located inside the `target/release/` folder. ## Usage Check out the [usage guide](https://github.com/mario-eth/soldeer/blob/main/USAGE.md) or [Foundry Book](https://book.getfoundry.sh/projects/soldeer). ## Changelog Please see the [changelog](https://github.com/mario-eth/soldeer/blob/main/CHANGES.md) for more information about each release. ## Contributing See the [contribution guide](https://github.com/mario-eth/soldeer/blob/main/CONTRIBUTING.md) for more information. ================================================ FILE: USAGE.md ================================================ # Usage Guide `Soldeer` is straightforward to use. It can either be invoked from the `forge` tool provided by Foundry, or installed as a standalone executable named `soldeer`. Dependencies and configuration options can be specified inside Foundry's `foundry.toml` config file, or inside a dedicated `soldeer.toml` file. In the following sections, commands can be prefixed with `forge` to use the built-in version packaged with Foundry. ## Initializing a New Project ```bash [forge] soldeer init [--clean] ``` The `init` command can be used to setup a project for use with Soldeer. The command will generate or modify the project's config file (`foundry.toml` or `soldeer.toml`) and perform optional removal of Foundry-style submodule dependencies with the `--clean` flag. This command automatically adds the latest `forge-std` dependency to your project. Note that Soldeer installs dependencies into a folder named `dependencies`. There is currently no way to customize this path. ## Adding Dependencies ### From the Soldeer Registry ```bash [forge] soldeer install ~ ``` This command searches the Soldeer registry at [https://soldeer.xyz](https://soldeer.xyz) for the specified dependency by name and version. If a match is found, a ZIP file containing the package source will be downloaded and unzipped into the `dependencies` directory. The command also adds the dependency to the project's config file and creates the necessary [remappings](https://book.getfoundry.sh/projects/dependencies#remapping-dependencies) if configured to do so. #### Version Requirement The `VERSION` argument is a version requirement string and can use operators and wildcards to match a range of versions. By default, if no operator is provided, it defaults to `=` which means "exactly this version". Examples: ``` 1.2.3 // exactly 1.2.3, equivalent to `=1.2.3` >=1.2.3 // any version greater than or equal to 1.2.3, including any 2.x version or more ^1.2.3 // the patch and minor version can increase, but not the major 1 // any version >=1.0.0 but <2.0.0 1.2 // any version >=1.2.0 but <2.0.0 ~1.2.3 // only the patch number can increase >1.2.3,<1.4.0 // multiple requirements can be separated by a comma ``` Note that this only makes sense when used with the Soldeer registry, as it provides a list of available versions to select from. Dependencies specified with a custom URL do not use the version requirement string in this way. ### With a Custom URL #### ZIP file ```bash [forge] soldeer install ~ --url ``` If the URL to a ZIP file is provided, the registry is not used and the file is downloaded from the URL directly. Note that a version must still be provided, but it can be freely chosen. #### Git Repository ```bash [forge] soldeer install ~ --git ``` If the URL to a git repository is provided, then the repository will be cloned into the `dependencies` folder with the `git` CLI available on the system. HTTPS and SSH-style URLs are supported (see examples below). Cloning a specific identifier can be done with the `--rev `, `--branch ` or `--tag ` arguments. If omitted, then the default branch is checked out. Some examples: ```bash [forge] soldeer install test-project~v1 --git git@github.com:test/test.git [forge] soldeer install test-project~v1 --git git@gitlab.com:test/test.git ``` ```bash [forge] soldeer install test-project~v1 --git https://github.com/test/test.git [forge] soldeer install test-project~v1 --git https://gitlab.com/test/test.git ``` ```bash [forge] soldeer install test-project~v1 --git git@github.com:test/test.git --rev 345e611cd84bfb4e62c583fa1886c1928bc1a464 [forge] soldeer install test-project~v1 --git git@github.com:test/test.git --branch dev [forge] soldeer install test-project~v1 --git git@github.com:test/test.git --tag v1 ``` Note that a version must still be provided, but it can be freely chosen. ## Installing Existing Dependencies ```bash [forge] soldeer install ``` When invoked without arguments, the `install` command installs the project's existing dependencies by looking at the configuration file (`soldeer.toml`/`foundry.toml`) and lockfile `soldeer.lock` if present. Dependencies which are already present inside the `dependencies` folder are not downloaded again. For dependencies with a version range specified in the config file, the exact version that is written in the lockfile is used, even if a newer version exists on the registry. To update the lockfile to use the latest supported version, use `soldeer update`. ### Recursive Installation With the `--recursive-deps` flag, Soldeer will install the dependencies of each installed dependency, recursively. This is done internally by running `git submodule update --init --recursive` and/or installing Soldeer dependencies inside of the dependency's folder. This behavior can also be enabled permanently via the config file. #### Specifying the Project Root for a Dependency If recursive installation is enabled, Soldeer must find a `foundry.toml` or `soldeer.toml` config file within the dependency's directory to know which subdependencies to install. In case that config file is not located at the root of the dependency's directory (meaning at the root of a git repository or at the root of the zip file), then the path to the folder containing that file must be specified with `project_root`: ```toml # foundry.toml [dependencies] mydep = { version = "1.0.0", project_root = "contracts" } [soldeer] recursive_deps = true ``` The path is a relative path, starting from the root of the dependency, to the folder containing the config file. You should use forward slashes (`/`) as separator on all platforms. #### Note on Sub-Dependencies Since each dependency is free to use its own remappings, their resolution might become tricky in case of conflicting versions. For example: We have a project called `my-project` with the following dependencies: - `dependency~1` - `openzeppelin~5.0.2` with remapping `@openzeppelin/contracts/=dependencies/openzeppelin-5.0.2/` A contract inside `my-project` has the following import: ```solidity @openzeppelin/contracts/token/ERC20/ERC20.sol ``` However, `dependency~1` also depends on `openzeppelin`, but it uses version `4.9.2` (with remapping `@openzeppelin/contracts/=dependencies/openzeppelin-4.9.2/`). The contract inside `dependency-1` has the same import path because they chose to use the same remappings path as `my-project`: ```solidity @openzeppelin/contracts/token/ERC20/ERC20.sol ``` This situation creates ambiguity. Furthermore, if `dependency~1` were to import a file that is no longer present in `v5`, the compiler would give an error. As such, we recommend to always include the version requirement string as part of the remappings path. The version requirement string does not need to target a specific version, but could e.g. target a major version: ```toml [profile.default] remappings = ["@openzeppelin-contracts-5/=dependencies/@openzeppelin-contracts-5.0.2/contracts/"] [dependencies] "@openzeppelin-contracts" = "5" ``` ```solidity import from '@openzeppelin-contracts-5/token/ERC20/ERC20.sol'; ``` This approach should ensure that the correct version (or at least a compatible version) of the included file is used. ## Updating Dependencies ```bash [forge] soldeer update ``` For dependencies from the online registry which specify a version range, the `update` command can be used to retrieve the latest version that matches the requirements. The `soldeer.lock` lockfile is then updated accordingly. Remappings are automatically updated to the new version if Soldeer is configured to generate remappings. For git dependencies which specify no identifier or a branch identifier, the `update` command checks out the latest commit on the default or specified branch. ## Removing a Dependency ```bash [forge] soldeer uninstall ``` The `uninstall` command removes the dependency files and entry into the config file, lockfile and remappings. ## Publishing a Package to the Repository ```bash [forge] soldeer push ~ ``` In order to push a new dependency to the repository, an account must first be created at [https://soldeer.xyz](https://soldeer.xyz). Then, a project with the dependency name must be created through the website. Finally, the `[forge] soldeer login` command must be used to retrieve or provide an access token for the API. CLI tokens can be generated on soldeer.xyz and should be preferred over using the email and password in the CLI, because email login will be removed in a future version of Soldeer. Alternatively, you can provide a valid CLI token via the `SOLDEER_API_TOKEN` environment variable. Example: Create a project called `my-project` and then use the `[forge] soldeer push my-project~1.0.0`. This will push the project to the repository as version `1.0.0` and makes it available for anyone to use. ### Specifying a Path ```bash [forge] soldeer push ~ [PATH] ``` If the files to push are not located in the current directory, a path to the files can be provided. ### Ignoring Files If you want to ignore certain files from the published package, you need to create one or more `.soldeerignore` files that must contain the patterns that you want to ignore. These files can be at any level of your directory structure. They use the `.gitignore` syntax. Any file that matches a pattern present in `.gitignore` and `.ignore` files is also automatically excluded from the published package. ### Dry Run ```bash [forge] soldeer push ~ --dry-run ``` With the `--dry-run` flag, the `push` command only creates a ZIP file containing the published package's content, but does not upload it to the registry. The file can then be inspected to check that the contents is suitable. We recommend that everyone runs a dry-run before pushing a new dependency to avoid publishing unwanted files. **Warning** ⚠️ You are at risk to push sensitive files to the central repository that then can be seen by everyone. Make sure to exclude sensitive files in the `.soldeerignore` or `.gitignore` file. Furthermore, we've implemented a warning that gets triggered if the package contains any dotfile (a file with a name starting with `.`). This warning can be ignored with `--skip-warnings`. ## Configuration The `foundry.toml`/`soldeer.toml` file can have a `[soldeer]` section to configure the tool's behavior. See the default configuration below: ```toml [soldeer] # whether Soldeer manages remappings remappings_generate = true # whether Soldeer re-generates all remappings when installing, updating or uninstalling deps remappings_regenerate = false # whether to suffix the remapping with the version requirement string: `name-a.b.c` remappings_version = true # a prefix to add to the remappings ("@" would give `@name`) remappings_prefix = "" # where to store the remappings ("txt" for `remappings.txt` or "config" for `foundry.toml`) # ignored when `soldeer.toml` is used as config (uses `remappings.txt`) remappings_location = "txt" # whether to install sub-dependencies or not. If true this will install the dependencies of dependencies recursively. recursive_deps = false ``` ## List of Available Commands For more commands and their usage, see `[forge] soldeer --help` and `[forge] soldeer --help`. ## Remappings Caveats If you use other dependency managers, such as git submodules or npm, ensure you don't duplicate dependencies between soldeer and the other manager. Remappings targeting dependencies installed without Soldeer are not modified or removed when using Soldeer commands, unless the `--regenerate-remappings` flag is specified or the `remappings_regenerate = true` option is set. ## Dependencies Maintenance The vision for Soldeer is that major projects such as OpenZeppelin, Solady, Uniswap would start publishing their own packages to the Soldeer registry so that the community can easily include them and get timely updates. Until this happens, the Soldeer maintenance team (currently m4rio.eth) will push the most popular dependencies to the repository by relying on their npmjs or GitHub versions. We are using [an open-source crawler tool](https://github.com/mario-eth/soldeer-crawler) to crawl and push the dependencies under the `soldeer` organization. For those who want an extra layer of security, the `soldeer.lock` file saves a `SHA-256` hash for each downloaded ZIP file and the corresponding unzipped folder (see `soldeer_core::utils::hash_folder` to see how it gets generated). These can be compared with the official releases to ensure the files were not manipulated. **For Project Maintainers** If you want to move your project from the Soldeer organization and take care of pushing the versions to Soldeer yourself, please open an issue on GitHub or contact m4rio.eth on [X (formerly Twitter)](https://twitter.com/m4rio_eth). ================================================ FILE: clippy.toml ================================================ allow-unwrap-in-tests = true ================================================ FILE: crates/cli/Cargo.toml ================================================ [package] name = "soldeer" description.workspace = true authors.workspace = true categories.workspace = true edition.workspace = true exclude.workspace = true homepage.workspace = true keywords.workspace = true license.workspace = true readme.workspace = true repository.workspace = true rust-version.workspace = true version.workspace = true [lints] workspace = true [[bin]] name = "soldeer" path = "src/main.rs" [dependencies] env_logger = { version = "0.11.9", features = ["unstable-kv"] } log.workspace = true soldeer-commands = { path = "../commands", version = "0.11.0" } tokio.workspace = true yansi = { version = "1.0.1", features = ["detect-tty", "detect-env"] } ================================================ FILE: crates/cli/src/main.rs ================================================ //! Soldeer is a package manager for Solidity projects use std::env; use log::Level; use soldeer_commands::{Args, commands::Parser as _, run}; use yansi::{Condition, Paint as _}; const HAVE_COLOR: Condition = Condition(|| { std::env::var_os("NO_COLOR").is_none() && (Condition::CLICOLOR_LIVE)() && Condition::stdouterr_are_tty_live() }); #[tokio::main] async fn main() { // disable colors if unsupported yansi::whenever(HAVE_COLOR); let args = Args::parse(); // setup logging if env::var("RUST_LOG").is_ok() { env_logger::builder().init(); } else if let Some(level) = args.verbose.log_level() && level > Level::Error { // the user requested structured logging (-v[v*]) // init logger env_logger::Builder::new().filter_level(args.verbose.log_level_filter()).init(); } if !args.verbose.is_present() { banner(); } if let Err(err) = run(args.command, args.verbose).await { eprintln!("{}", err.to_string().red()) } } /// Generate and print a banner fn banner() { println!( "{}", format!( " +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ ╔═╗╔═╗╦ ╔╦╗╔═╗╔═╗╦═╗ Solidity Package Manager ╚═╗║ ║║ ║║║╣ ║╣ ╠╦╝ ╚═╝╚═╝╩═╝═╩╝╚═╝╚═╝╩╚═ github.com/mario-eth/soldeer v{} soldeer.xyz +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ ", env!("CARGO_PKG_VERSION") ) .bright_cyan() ); } ================================================ FILE: crates/commands/Cargo.toml ================================================ [package] name = "soldeer-commands" description = "High-level commands for the Soldeer CLI" authors.workspace = true categories.workspace = true edition.workspace = true exclude.workspace = true homepage.workspace = true keywords.workspace = true license.workspace = true readme.workspace = true repository.workspace = true rust-version.workspace = true version.workspace = true [lints] workspace = true [dependencies] bon.workspace = true clap.workspace = true clap-verbosity-flag = "3.0.2" cliclack.workspace = true derive_more.workspace = true email-address-parser = "2.0.0" path-slash.workspace = true rayon.workspace = true soldeer-core = { path = "../core", version = "0.11.0" } tokio.workspace = true [dev-dependencies] mockito.workspace = true reqwest.workspace = true temp-env.workspace = true testdir.workspace = true [features] serde = ["soldeer-core/serde"] ================================================ FILE: crates/commands/src/commands/clean.rs ================================================ use crate::utils::success; use clap::Parser; use soldeer_core::{Result, config::Paths}; use std::fs; /// Clean downloaded dependencies and generated artifacts #[derive(Debug, Clone, Default, Parser, bon::Builder)] #[builder(on(String, into))] #[clap(after_help = "For more information, read the README.md")] #[non_exhaustive] pub struct Clean { // No options for basic implementation } pub(crate) fn clean_command(paths: &Paths, _cmd: &Clean) -> Result<()> { // Remove dependencies folder if it exists if paths.dependencies.exists() { fs::remove_dir_all(&paths.dependencies)?; success!("Dependencies folder removed"); } Ok(()) } ================================================ FILE: crates/commands/src/commands/init.rs ================================================ use crate::{ ConfigLocation, utils::{Progress, remark, success}, }; use clap::Parser; use soldeer_core::{ Result, config::{Paths, add_to_config, read_soldeer_config, update_config_libs}, install::{InstallProgress, ensure_dependencies_dir, install_dependency}, lock::add_to_lockfile, registry::get_latest_version, remappings::{RemappingsAction, edit_remappings}, utils::remove_forge_lib, }; use std::fs; /// Convert a Foundry project to use Soldeer #[derive(Debug, Clone, Default, Parser, bon::Builder)] #[allow(clippy::duplicated_attributes)] #[builder(on(String, into), on(ConfigLocation, into))] #[clap(after_help = "For more information, read the README.md")] #[non_exhaustive] pub struct Init { /// Clean the Foundry project by removing .gitmodules and the lib directory #[arg(long, default_value_t = false)] #[builder(default)] pub clean: bool, /// Specify the config location. /// /// This prevents prompting the user if the automatic detection can't determine the config /// location. #[arg(long, value_enum)] pub config_location: Option, } pub(crate) async fn init_command(paths: &Paths, cmd: Init) -> Result<()> { if cmd.clean { remark!("Flag `--clean` was set, removing `lib` dir and submodules"); remove_forge_lib(&paths.root).await?; } let config = read_soldeer_config(&paths.config)?; success!("Done reading config"); ensure_dependencies_dir(&paths.dependencies)?; let dependency = get_latest_version("forge-std").await?; let (progress, monitor) = InstallProgress::new(); let bars = Progress::new(format!("Installing {dependency}"), 1, monitor); bars.start_all(); let lock = install_dependency(&dependency, None, &paths.dependencies, None, false, progress) .await .inspect_err(|e| { bars.set_error(e); })?; bars.stop_all(); add_to_config(&dependency, &paths.config)?; let foundry_config = paths.root.join("foundry.toml"); if foundry_config.exists() { update_config_libs(foundry_config)?; } success!("Dependency added to config"); add_to_lockfile(lock, &paths.lock)?; success!("Dependency added to lockfile"); edit_remappings(&RemappingsAction::Add(dependency), &config, paths)?; success!("Dependency added to remappings"); let gitignore_path = paths.root.join(".gitignore"); if gitignore_path.exists() { let mut gitignore = fs::read_to_string(&gitignore_path)?; if !gitignore.contains("dependencies") { gitignore.push_str("\n\n# Soldeer\n/dependencies\n"); fs::write(&gitignore_path, gitignore)?; } } success!("Added `dependencies` to .gitignore"); Ok(()) } ================================================ FILE: crates/commands/src/commands/install.rs ================================================ use super::validate_dependency; use crate::{ ConfigLocation, utils::{Progress, remark, success, warning}, }; use clap::Parser; use soldeer_core::{ Result, config::{ Dependency, GitIdentifier, Paths, UrlType, add_to_config, read_config_deps, read_soldeer_config, }, errors::{InstallError, LockError}, install::{InstallProgress, ensure_dependencies_dir, install_dependencies, install_dependency}, lock::{add_to_lockfile, generate_lockfile_contents, read_lockfile}, remappings::{RemappingsAction, edit_remappings}, }; use std::fs; /// Install a dependency #[derive(Debug, Clone, Default, Parser, bon::Builder)] #[allow(clippy::duplicated_attributes)] #[builder(on(String, into), on(ConfigLocation, into))] #[clap( long_about = "Install a dependency If used with arguments, a dependency will be added to the configuration. When used without argument, installs all dependencies that are missing. Examples: - Install all: soldeer install - Add from registry: soldeer install lib_name~2.3.0 - Add with custom URL: soldeer install lib_name~2.3.0 --url https://foo.bar/lib.zip - Add with git: soldeer install lib_name~2.3.0 --git git@github.com:foo/bar.git - Add with git (commit): soldeer install lib_name~2.3.0 --git git@github.com:foo/bar.git --rev 05f218fb6617932e56bf5388c3b389c3028a7b73 - Add with git (tag): soldeer install lib_name~2.3.0 --git git@github.com:foo/bar.git --tag v2.3.0 - Add with git (branch): soldeer install lib_name~2.3.0 --git git@github.com:foo/bar.git --branch feature/baz", after_help = "For more information, read the README.md" )] #[non_exhaustive] pub struct Install { /// The dependency name and version, separated by a tilde. The version is always required. /// /// If not present, this command will install all dependencies which are missing. #[arg(value_parser = validate_dependency, value_name = "DEPENDENCY~VERSION")] pub dependency: Option, /// The URL to the dependency zip file. /// /// Example: https://my-domain/dep.zip #[arg(long = "url", requires = "dependency", conflicts_with = "git_url")] pub zip_url: Option, /// The URL to the dependency repository. /// /// Example: git@github.com:foo/bar.git #[arg(long = "git", requires = "dependency", conflicts_with = "zip_url")] pub git_url: Option, /// A Git commit hash #[arg(long, group = "identifier", requires = "git_url")] pub rev: Option, /// A Git tag #[arg(long, group = "identifier", requires = "git_url")] pub tag: Option, /// A Git branch #[arg(long, group = "identifier", requires = "git_url")] pub branch: Option, /// If set, this command will delete the existing remappings and re-create them #[arg(short = 'g', long, default_value_t = false)] #[builder(default)] pub regenerate_remappings: bool, /// If set, this command will install dependencies recursively (via git submodules or via /// soldeer) #[arg(short = 'd', long, default_value_t = false)] #[builder(default)] pub recursive_deps: bool, /// Perform a clean install by re-installing all dependencies #[arg(long, default_value_t = false)] #[builder(default)] pub clean: bool, /// Specify the config location without prompting. /// /// This prevents prompting the user if the automatic detection can't determine the config /// location. #[arg(long, value_enum)] pub config_location: Option, } pub(crate) async fn install_command(paths: &Paths, cmd: Install) -> Result<()> { let mut config = read_soldeer_config(&paths.config)?; if cmd.regenerate_remappings { config.remappings_regenerate = true; } if cmd.recursive_deps { config.recursive_deps = true; } success!("Done reading config"); ensure_dependencies_dir(&paths.dependencies)?; let (dependencies, warnings) = read_config_deps(&paths.config)?; for w in warnings { warning!(format!("Config warning: {w}")); } match &cmd.dependency { None => { let lockfile = read_lockfile(&paths.lock)?; success!("Done reading lockfile"); if cmd.clean { remark!("Flag `--clean` was set, re-installing all dependencies"); fs::remove_dir_all(&paths.dependencies).map_err(|e| InstallError::IOError { path: paths.dependencies.clone(), source: e, })?; ensure_dependencies_dir(&paths.dependencies)?; } let (progress, monitor) = InstallProgress::new(); let bars = Progress::new("Installing dependencies", dependencies.len(), monitor); bars.start_all(); let new_locks = install_dependencies( &dependencies, &lockfile.entries, &paths.dependencies, config.recursive_deps, progress, ) .await?; bars.stop_all(); let new_lockfile_content = generate_lockfile_contents(new_locks); if !lockfile.raw.is_empty() && new_lockfile_content != lockfile.raw { warning!( "Warning: the lock file is out of sync with the dependencies. Consider running `soldeer update` to re-generate the lockfile." ); } else if lockfile.raw.is_empty() { fs::write(&paths.lock, new_lockfile_content).map_err(LockError::IOError)?; } edit_remappings(&RemappingsAction::Update, &config, paths)?; success!("Updated remappings"); } Some(dependency) => { let identifier = match (&cmd.rev, &cmd.branch, &cmd.tag) { (Some(rev), None, None) => Some(GitIdentifier::from_rev(rev)), (None, Some(branch), None) => Some(GitIdentifier::from_branch(branch)), (None, None, Some(tag)) => Some(GitIdentifier::from_tag(tag)), (None, None, None) => None, _ => unreachable!("clap should prevent this"), }; let url = cmd.zip_url.as_ref().map(UrlType::http).or(cmd.git_url.as_ref().map(UrlType::git)); let mut dep = Dependency::from_name_version(dependency, url, identifier)?; if dependencies .iter() .any(|d| d.name() == dep.name() && d.version_req() == dep.version_req()) { remark!(format!("{dep} is already installed, running `install` instead")); Box::pin(install_command( paths, Install::builder() .regenerate_remappings(cmd.regenerate_remappings) .recursive_deps(cmd.recursive_deps) .clean(cmd.clean) .maybe_config_location(cmd.config_location) .build(), )) .await?; return Ok(()); } let (progress, monitor) = InstallProgress::new(); let bars = Progress::new(format!("Installing {dep}"), 1, monitor); bars.start_all(); let lock = install_dependency( &dep, None, &paths.dependencies, None, config.recursive_deps, progress, ) .await?; bars.stop_all(); // for git deps, we need to add the commit hash before adding them to the // config, unless a branch/tag was specified if let Some(git_dep) = dep.as_git_mut() && git_dep.identifier.is_none() { git_dep.identifier = Some(GitIdentifier::from_rev( &lock.as_git().expect("lock entry should be of type git").rev, )); } add_to_config(&dep, &paths.config)?; success!("Dependency added to config"); add_to_lockfile(lock, &paths.lock)?; success!("Dependency added to lockfile"); edit_remappings(&RemappingsAction::Add(dep), &config, paths)?; success!("Dependency added to remappings"); } } Ok(()) } ================================================ FILE: crates/commands/src/commands/login.rs ================================================ use crate::utils::{info, remark, step, success, warning}; use clap::Parser; use email_address_parser::{EmailAddress, ParsingOptions}; use path_slash::PathBufExt as _; use soldeer_core::{ Result, auth::{Credentials, check_token, execute_login, save_token}, errors::AuthError, }; use std::path::PathBuf; /// Log into the central repository to push packages /// /// The credentials are saved by default into ~/.soldeer. /// If you want to overwrite that location, use the SOLDEER_LOGIN_FILE env var. #[derive(Debug, Clone, Default, Parser, bon::Builder)] #[builder(on(String, into))] #[clap(after_help = "For more information, read the README.md")] #[non_exhaustive] pub struct Login { /// Specify the email without prompting. #[arg(long, conflicts_with = "token")] pub email: Option, /// Specify the password without prompting. #[arg(long, conflicts_with = "token")] pub password: Option, /// Login with a token created via soldeer.xyz. #[arg(long)] pub token: Option, } pub(crate) async fn login_command(cmd: Login) -> Result<()> { remark!("If you do not have an account, please visit soldeer.xyz to create one."); if let Some(token) = cmd.token { let token = token.trim(); let username = check_token(token).await?; let token_path = save_token(token)?; info!(format!( "Token is valid for user {username} and was saved in: {}", PathBuf::from_slash_lossy(&token_path).to_string_lossy() /* normalize separators */ )); return Ok(()); } warning!( "The option to login via email and password will be removed in a future version of Soldeer. Please update your usage by either using `soldeer login --token [YOUR CLI TOKEN]` or passing the `SOLDEER_API_TOKEN` environment variable to the `push` command." ); let email: String = match cmd.email { Some(email) => { if EmailAddress::parse(&email, Some(ParsingOptions::default())).is_none() { return Err(AuthError::InvalidCredentials.into()); } step!(format!("Email: {email}")); email } None => { if !crate::TUI_ENABLED.load(std::sync::atomic::Ordering::Relaxed) { return Err(AuthError::TuiDisabled.into()); } cliclack::input("Email address") .validate(|input: &String| { if input.is_empty() { Err("Email is required") } else { match EmailAddress::parse(input, Some(ParsingOptions::default())) { None => Err("Invalid email address"), Some(_) => Ok(()), } } }) .interact()? } }; let password = match cmd.password { Some(pw) => pw, None => { if !crate::TUI_ENABLED.load(std::sync::atomic::Ordering::Relaxed) { return Err(AuthError::TuiDisabled.into()); } cliclack::password("Password").mask('▪').interact()? } }; let token_path = execute_login(&Credentials { email, password }).await?; success!("Login successful"); info!(format!( "Token saved in: {}", PathBuf::from_slash_lossy(&token_path).to_string_lossy() /* normalize separators */ )); Ok(()) } ================================================ FILE: crates/commands/src/commands/mod.rs ================================================ pub use clap::{Parser, Subcommand}; use clap_verbosity_flag::{LogLevel, VerbosityFilter}; use derive_more::derive::From; pub mod clean; pub mod init; pub mod install; pub mod login; pub mod push; pub mod uninstall; pub mod update; #[derive(Copy, Clone, Debug, Default)] pub struct CustomLevel; impl LogLevel for CustomLevel { fn default_filter() -> VerbosityFilter { VerbosityFilter::Error } fn verbose_help() -> Option<&'static str> { Some("Use structured logging and increase verbosity") } fn verbose_long_help() -> Option<&'static str> { Some( r#"Use structured logging and increase verbosity Pass multiple times to increase the logging level (e.g. -v, -vv, -vvv). If omitted, then a pretty TUI output will be used. Otherwise: - 1 (-v): print logs with level error and warning - 2 (-vv): print logs with level info - 3 (-vvv): print logs with level debug - 4 (-vvvv): print logs with level trace "#, ) } fn quiet_help() -> Option<&'static str> { Some("Disable logs and output, or reduce verbosity") } } /// A minimal Solidity dependency manager #[derive(Parser, Debug, bon::Builder)] #[clap(name = "soldeer", author = "m4rio.eth", version)] #[non_exhaustive] pub struct Args { #[clap(subcommand)] pub command: Command, /// Test #[command(flatten)] pub verbose: clap_verbosity_flag::Verbosity, } /// The available commands for Soldeer #[derive(Debug, Clone, Subcommand, From)] #[non_exhaustive] pub enum Command { Init(init::Init), Install(install::Install), Update(update::Update), Login(login::Login), Push(push::Push), Uninstall(uninstall::Uninstall), Clean(clean::Clean), Version(Version), } /// Display the version of Soldeer #[derive(Debug, Clone, Default, Parser)] #[non_exhaustive] pub struct Version {} fn validate_dependency(dep: &str) -> std::result::Result { if dep.split('~').count() != 2 { return Err("The dependency should be in the format ~".to_string()); } Ok(dep.to_string()) } ================================================ FILE: crates/commands/src/commands/push.rs ================================================ use super::validate_dependency; use crate::utils::{info, remark, success, warning}; use clap::Parser; use soldeer_core::{ Result, errors::PublishError, push::{filter_ignored_files, push_version, validate_name, validate_version}, utils::{canonicalize_sync, check_dotfiles}, }; use std::{env, path::PathBuf, sync::atomic::Ordering}; /// Push a dependency to the repository #[derive(Debug, Clone, Parser, bon::Builder)] #[allow(clippy::duplicated_attributes)] #[builder(on(String, into), on(PathBuf, into))] #[clap( long_about = "Push a dependency to the soldeer.xyz repository. You need to be logged in first (soldeer login) or provide the `SOLDEER_API_TOKEN` environment variable with a valid CLI token generated on soldeer.xyz. Examples: - Current directory: soldeer push mypkg~0.1.0 - Custom directory: soldeer push mypkg~0.1.0 /path/to/dep - Dry run: soldeer push mypkg~0.1.0 --dry-run To ignore certain files, create a `.soldeerignore` file in the root of the project and add the files you want to ignore. The `.soldeerignore` uses the same syntax as `.gitignore`.", after_help = "For more information, read the README.md" )] #[non_exhaustive] pub struct Push { /// The dependency name and version, separated by a tilde. /// /// This should always be used when you want to push a dependency to the central repository: ``. #[arg(value_parser = validate_dependency, value_name = "DEPENDENCY>~, /// If set, does not publish the package but generates a zip file that can be inspected. #[arg(short, long, default_value_t = false)] #[builder(default)] pub dry_run: bool, /// Use this if you want to skip the warnings that can be triggered when trying to push /// dotfiles like .env. #[arg(long, default_value_t = false)] #[builder(default)] pub skip_warnings: bool, } pub(crate) async fn push_command(cmd: Push) -> Result<()> { let path = cmd.path.unwrap_or(env::current_dir()?); let path = canonicalize_sync(&path)?; let files_to_copy: Vec = filter_ignored_files(&path); // Check for sensitive files or directories if !cmd.dry_run && !cmd.skip_warnings && check_dotfiles(&files_to_copy) && !prompt_user_for_confirmation()? { return Err(PublishError::UserAborted.into()); } if cmd.dry_run { remark!("Running in dry-run mode, a zip file will be created for inspection"); } if cmd.skip_warnings { warning!("Sensitive file warnings are being ignored as requested"); } let (dependency_name, dependency_version) = cmd.dependency.split_once('~').expect("dependency string should have name and version"); validate_name(dependency_name)?; validate_version(dependency_version)?; if let Some(zip_path) = push_version(dependency_name, dependency_version, path, &files_to_copy, cmd.dry_run).await? { info!(format!("Zip file created at {}", zip_path.to_string_lossy())); } else { success!("Pushed to repository!"); } Ok(()) } // Function to prompt the user for confirmation fn prompt_user_for_confirmation() -> Result { remark!("You are about to include some sensitive files in this version"); info!( "If you are not sure which files will be included, you can run the command with `--dry-run`and inspect the generated zip file." ); if crate::TUI_ENABLED.load(Ordering::Relaxed) { cliclack::confirm("Do you want to continue?") .interact() .map_err(|e| PublishError::IOError { path: PathBuf::new(), source: e }.into()) } else { Ok(true) } } ================================================ FILE: crates/commands/src/commands/uninstall.rs ================================================ use crate::utils::success; use clap::Parser; use soldeer_core::{ Result, SoldeerError, config::{Paths, delete_from_config, read_soldeer_config}, download::delete_dependency_files_sync, lock::remove_lock, remappings::{RemappingsAction, edit_remappings}, }; /// Uninstall a dependency #[derive(Debug, Clone, Parser, bon::Builder)] #[builder(on(String, into))] #[clap(after_help = "For more information, read the README.md")] #[non_exhaustive] pub struct Uninstall { /// The dependency name. Specifying a version is not necessary. pub dependency: String, } pub(crate) fn uninstall_command(paths: &Paths, cmd: &Uninstall) -> Result<()> { let config = read_soldeer_config(&paths.config)?; success!("Done reading config"); // delete from the config file and return the dependency let dependency = delete_from_config(&cmd.dependency, &paths.config)?; success!("Dependency removed from config file"); edit_remappings(&RemappingsAction::Remove(dependency.clone()), &config, paths)?; success!("Dependency removed from remappings"); // deleting the files delete_dependency_files_sync(&dependency, &paths.dependencies) .map_err(|e| SoldeerError::DownloadError { dep: dependency.to_string(), source: e })?; success!("Dependency removed from disk"); remove_lock(&dependency, &paths.lock)?; success!("Dependency removed from lockfile"); Ok(()) } ================================================ FILE: crates/commands/src/commands/update.rs ================================================ use crate::{ ConfigLocation, utils::{Progress, success, warning}, }; use clap::Parser; use soldeer_core::{ Result, config::{Paths, read_config_deps, read_soldeer_config}, errors::LockError, install::{InstallProgress, ensure_dependencies_dir}, lock::{generate_lockfile_contents, read_lockfile}, remappings::{RemappingsAction, edit_remappings}, update::update_dependencies, }; use std::fs; /// Update dependencies by reading the config file #[derive(Debug, Clone, Default, Parser, bon::Builder)] #[allow(clippy::duplicated_attributes)] #[builder(on(String, into), on(ConfigLocation, into))] #[clap(after_help = "For more information, read the README.md")] #[non_exhaustive] pub struct Update { /// If set, this command will delete the existing remappings and re-create them #[arg(short = 'g', long, default_value_t = false)] #[builder(default)] pub regenerate_remappings: bool, /// If set, this command will install the dependencies recursively (via submodules or via /// soldeer) #[arg(short = 'd', long, default_value_t = false)] #[builder(default)] pub recursive_deps: bool, /// Specify the config location without prompting. /// /// This prevents prompting the user if the automatic detection can't determine the config /// location. #[arg(long, value_enum)] pub config_location: Option, } // TODO: add a parameter for a dependency name, where we would only update that particular // dependency pub(crate) async fn update_command(paths: &Paths, cmd: Update) -> Result<()> { let mut config = read_soldeer_config(&paths.config)?; if cmd.regenerate_remappings { config.remappings_regenerate = true; } if cmd.recursive_deps { config.recursive_deps = true; } success!("Done reading config"); ensure_dependencies_dir(&paths.dependencies)?; let (dependencies, warnings) = read_config_deps(&paths.config)?; for w in warnings { warning!(format!("Config warning: {w}")); } let lockfile = read_lockfile(&paths.lock)?; success!("Done reading lockfile"); let (progress, monitor) = InstallProgress::new(); let bars = Progress::new("Updating dependencies", dependencies.len(), monitor); bars.start_all(); let new_locks = update_dependencies( &dependencies, &lockfile.entries, &paths.dependencies, config.recursive_deps, progress, ) .await?; bars.stop_all(); let new_lockfile_content = generate_lockfile_contents(new_locks); fs::write(&paths.lock, new_lockfile_content).map_err(LockError::IOError)?; success!("Updated lockfile"); edit_remappings(&RemappingsAction::Update, &config, paths)?; success!("Updated remappings"); Ok(()) } ================================================ FILE: crates/commands/src/lib.rs ================================================ //! High-level commands for the Soldeer CLI #![cfg_attr(docsrs, feature(doc_cfg))] pub use crate::commands::{Args, Command}; use clap::builder::PossibleValue; pub use clap_verbosity_flag::Verbosity; use clap_verbosity_flag::log::Level; use commands::CustomLevel; use derive_more::derive::FromStr; use soldeer_core::{Result, config::Paths}; use std::{ env, path::PathBuf, sync::atomic::{AtomicBool, Ordering}, }; use utils::{get_config_location, intro, outro, outro_cancel, step}; pub mod commands; pub mod utils; static TUI_ENABLED: AtomicBool = AtomicBool::new(true); /// The location where the Soldeer config should be stored. /// /// This is a new type so we can implement the `ValueEnum` trait for it. #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, FromStr)] pub struct ConfigLocation(soldeer_core::config::ConfigLocation); impl clap::ValueEnum for ConfigLocation { fn value_variants<'a>() -> &'a [Self] { &[ Self(soldeer_core::config::ConfigLocation::Foundry), Self(soldeer_core::config::ConfigLocation::Soldeer), ] } fn to_possible_value(&self) -> Option { Some(match self.0 { soldeer_core::config::ConfigLocation::Foundry => PossibleValue::new("foundry"), soldeer_core::config::ConfigLocation::Soldeer => PossibleValue::new("soldeer"), }) } } impl From for soldeer_core::config::ConfigLocation { fn from(value: ConfigLocation) -> Self { value.0 } } impl From for ConfigLocation { fn from(value: soldeer_core::config::ConfigLocation) -> Self { Self(value) } } pub async fn run(command: Command, verbosity: Verbosity) -> Result<()> { if let Some(level) = verbosity.log_level() && level <= Level::Error && env::var("RUST_LOG").is_err() { // enable TUI if no `-v` flag and no RUST_LOG is provided TUI_ENABLED.store(true, Ordering::Relaxed); } else { TUI_ENABLED.store(false, Ordering::Relaxed); } match command { Command::Init(cmd) => { intro!("🦌 Soldeer Init 🦌"); step!("Initialize Foundry project to use Soldeer"); // for init, we always use the current dir as root, unless specified by env let root = env::var("SOLDEER_PROJECT_ROOT") .ok() .filter(|p| !p.is_empty()) .map_or(env::current_dir()?, PathBuf::from); let paths = Paths::with_root_and_config( &root, Some(get_config_location(&root, cmd.config_location)?), )?; commands::init::init_command(&paths, cmd).await.inspect_err(|_| { outro_cancel!("An error occurred during initialization"); })?; outro!("Done initializing!"); } Command::Install(cmd) => { intro!("🦌 Soldeer Install 🦌"); let root = Paths::get_root_path(); let paths = Paths::with_root_and_config( &root, Some(get_config_location(&root, cmd.config_location)?), )?; commands::install::install_command(&paths, cmd).await.inspect_err(|_| { outro_cancel!("An error occurred during install"); })?; outro!("Done installing!"); } Command::Update(cmd) => { intro!("🦌 Soldeer Update 🦌"); let root = Paths::get_root_path(); let paths = Paths::with_root_and_config( &root, Some(get_config_location(&root, cmd.config_location)?), )?; commands::update::update_command(&paths, cmd).await.inspect_err(|_| { outro_cancel!("An error occurred during the update"); })?; outro!("Done updating!"); } Command::Uninstall(cmd) => { intro!("🦌 Soldeer Uninstall 🦌"); let root = Paths::get_root_path(); let paths = Paths::with_root_and_config(&root, Some(get_config_location(&root, None)?))?; commands::uninstall::uninstall_command(&paths, &cmd).inspect_err(|_| { outro_cancel!("An error occurred during uninstall"); })?; outro!("Done uninstalling!"); } Command::Clean(cmd) => { intro!("🦌 Soldeer Clean 🦌"); let root = Paths::get_root_path(); let paths = Paths::with_root_and_config(&root, Some(get_config_location(&root, None)?))?; commands::clean::clean_command(&paths, &cmd).inspect_err(|_| { outro_cancel!("An error occurred during clean"); })?; outro!("Done cleaning!"); } Command::Login(cmd) => { intro!("🦌 Soldeer Login 🦌"); commands::login::login_command(cmd).await.inspect_err(|_| { outro_cancel!("An error occurred during login"); })?; outro!("Done logging in!"); } Command::Push(cmd) => { intro!("🦌 Soldeer Push 🦌"); commands::push::push_command(cmd).await.inspect_err(|_| { outro_cancel!("An error occurred during push"); })?; outro!("Done!"); } Command::Version(_) => { const VERSION: &str = env!("CARGO_PKG_VERSION"); println!("soldeer {VERSION}"); } } Ok(()) } ================================================ FILE: crates/commands/src/utils.rs ================================================ #![allow(unused_macros)] //! Utils for the commands crate use std::{fmt, path::Path}; use crate::ConfigLocation; use cliclack::{MultiProgress, ProgressBar, multi_progress, progress_bar, select}; use soldeer_core::{Result, config::detect_config_location, install::InstallMonitoring}; /// Template for the progress bars. pub const PROGRESS_TEMPLATE: &str = "[{elapsed_precise}] {bar:30.magenta} ({pos}/{len}) {msg}"; /// A collection of progress bars for the installation/update process. #[derive(Clone, Default)] pub struct Progress { multi: Option, versions: Option, downloads: Option, unzip: Option, subdependencies: Option, integrity: Option, } impl Progress { /// Create a new progress bar object. /// /// A title and the total number of dependencies to install must be passed as an argument. pub fn new(title: impl fmt::Display, total: usize, mut monitor: InstallMonitoring) -> Self { if !crate::TUI_ENABLED.load(std::sync::atomic::Ordering::Relaxed) { tokio::task::spawn(async move { while (monitor.logs.recv().await).is_some() {} }); tokio::task::spawn(async move { while (monitor.versions.recv().await).is_some() {} }); tokio::task::spawn(async move { while (monitor.downloads.recv().await).is_some() {} }); tokio::task::spawn(async move { while (monitor.unzip.recv().await).is_some() {} }); tokio::task::spawn( async move { while (monitor.subdependencies.recv().await).is_some() {} }, ); tokio::task::spawn(async move { while (monitor.integrity.recv().await).is_some() {} }); return Self::default(); } let multi = multi_progress(title); let versions = multi.add(progress_bar(total as u64).with_template(PROGRESS_TEMPLATE)); let downloads = multi.add(progress_bar(total as u64).with_template(PROGRESS_TEMPLATE)); let unzip = multi.add(progress_bar(total as u64).with_template(PROGRESS_TEMPLATE)); let subdependencies = multi.add(progress_bar(total as u64).with_template(PROGRESS_TEMPLATE)); let integrity = multi.add(progress_bar(total as u64).with_template(PROGRESS_TEMPLATE)); tokio::task::spawn({ let multi = multi.clone(); async move { while let Some(log) = monitor.logs.recv().await { multi.println(log); } } }); tokio::task::spawn({ let versions = versions.clone(); async move { while let Some(dep) = monitor.versions.recv().await { versions.inc(1); versions.set_message(format!("Got version for {dep}")); } } }); tokio::task::spawn({ let downloads = downloads.clone(); async move { while let Some(dep) = monitor.downloads.recv().await { downloads.inc(1); downloads.set_message(format!("Downloaded {dep}")); } } }); tokio::task::spawn({ let unzip = unzip.clone(); async move { while let Some(dep) = monitor.unzip.recv().await { unzip.inc(1); unzip.set_message(format!("Unzipped {dep}")); } } }); tokio::task::spawn({ let subdependencies = subdependencies.clone(); async move { while let Some(dep) = monitor.subdependencies.recv().await { subdependencies.inc(1); subdependencies.set_message(format!("Installed subdeps for {dep}")); } } }); tokio::task::spawn({ let integrity = integrity.clone(); async move { while let Some(dep) = monitor.integrity.recv().await { integrity.inc(1); integrity.set_message(format!("Checked integrity of {dep}")); } } }); Self { multi: Some(multi), versions: Some(versions), downloads: Some(downloads), unzip: Some(unzip), subdependencies: Some(subdependencies), integrity: Some(integrity), } } /// Start all progress bars. pub fn start_all(&self) { self.versions.as_ref().inspect(|p| p.start("Retrieving versions...")); self.downloads.as_ref().inspect(|p| p.start("Downloading dependencies...")); self.unzip.as_ref().inspect(|p| p.start("Unzipping dependencies...")); self.subdependencies.as_ref().inspect(|p| p.start("Installing subdependencies...")); self.integrity.as_ref().inspect(|p| p.start("Checking integrity...")); } /// Stop all progress bars. pub fn stop_all(&self) { self.versions.as_ref().inspect(|p| p.stop("Done retrieving versions")); self.downloads.as_ref().inspect(|p| p.stop("Done downloading dependencies")); self.unzip.as_ref().inspect(|p| p.stop("Done unzipping dependencies")); self.subdependencies.as_ref().inspect(|p| p.stop("Done installing subdependencies")); self.integrity.as_ref().inspect(|p| p.stop("Done checking integrity")); self.multi.as_ref().inspect(|p| p.stop()); } pub fn set_error(&self, error: impl fmt::Display) { self.multi.as_ref().inspect(|m| m.error(error)); } } /// Auto-detect config location or prompt the user for preference. pub fn get_config_location( root: impl AsRef, arg: Option, ) -> Result { Ok(match arg { Some(loc) => loc.into(), None => match detect_config_location(root) { Some(loc) => loc, None => prompt_config_location()?.into(), }, }) } /// Prompt the user for their desired config location in case it cannot be auto-detected. pub fn prompt_config_location() -> Result { Ok(select("Select how you want to configure Soldeer") .initial_value("foundry") .item("foundry", "Using foundry.toml", "recommended") .item("soldeer", "Using soldeer.toml", "for non-foundry projects") .interact()? .parse() .expect("all options should be valid variants of the ConfigLocation enum")) } macro_rules! define_cliclack_macro { ($name:ident, $path:path) => { macro_rules! $name { ($expression:expr) => { if $crate::TUI_ENABLED.load(::std::sync::atomic::Ordering::Relaxed) { $path($expression).ok(); } }; } }; } define_cliclack_macro!(intro, ::cliclack::intro); define_cliclack_macro!(note, ::cliclack::note); define_cliclack_macro!(outro, ::cliclack::outro); define_cliclack_macro!(outro_cancel, ::cliclack::outro_cancel); define_cliclack_macro!(outro_note, ::cliclack::outro_note); define_cliclack_macro!(error, ::cliclack::log::error); define_cliclack_macro!(info, ::cliclack::log::info); define_cliclack_macro!(remark, ::cliclack::log::remark); define_cliclack_macro!(step, ::cliclack::log::step); define_cliclack_macro!(success, ::cliclack::log::success); define_cliclack_macro!(warning, ::cliclack::log::warning); #[allow(unused_imports)] pub(crate) use error; pub(crate) use info; pub(crate) use intro; #[allow(unused_imports)] pub(crate) use note; pub(crate) use outro; pub(crate) use outro_cancel; #[allow(unused_imports)] pub(crate) use outro_note; pub(crate) use remark; pub(crate) use step; pub(crate) use success; pub(crate) use warning; ================================================ FILE: crates/commands/tests/tests-clean.rs ================================================ use soldeer_commands::{ Command, Verbosity, commands::{clean::Clean, install::Install}, run, }; use soldeer_core::{ config::read_config_deps, lock::{SOLDEER_LOCK, read_lockfile}, }; #[cfg(unix)] use std::os::unix::fs::PermissionsExt; use std::{ fs, path::{Path, PathBuf}, }; use temp_env::async_with_vars; use testdir::testdir; #[allow(clippy::unwrap_used)] fn check_clean_success(dir: &Path, config_filename: &str) { assert!(!dir.join("dependencies").exists(), "Dependencies folder should be removed"); let config_path = dir.join(config_filename); assert!(config_path.exists(), "Config file should be preserved"); let (deps, _) = read_config_deps(&config_path).unwrap(); assert_eq!(deps.len(), 2, "Config should still have 2 dependencies"); assert_eq!(deps[0].name(), "@openzeppelin-contracts"); assert_eq!(deps[1].name(), "solady"); } #[allow(clippy::unwrap_used)] fn check_artifacts_exist(dir: &Path) { assert!(dir.join("dependencies").exists(), "Dependencies folder should exist"); assert!(dir.join(SOLDEER_LOCK).exists(), "Lock file should exist"); let lock = read_lockfile(dir.join(SOLDEER_LOCK)).unwrap(); assert_eq!(lock.entries.len(), 2, "Lock file should have 2 entries"); let deps_dir = dir.join("dependencies"); let entries: Vec<_> = fs::read_dir(&deps_dir).unwrap().collect::, _>>().unwrap(); assert!(!entries.is_empty(), "Dependencies directory should have content"); } #[allow(clippy::unwrap_used)] async fn setup_project_with_dependencies(config_filename: &str) -> PathBuf { let dir = testdir!(); let mut contents = r#"[dependencies] "@openzeppelin-contracts" = "5.0.2" solady = "0.0.238" "# .to_string(); if config_filename == "foundry.toml" { contents = format!( r#"[profile.default] libs = ["dependencies"] {contents}"# ); } fs::write(dir.join(config_filename), contents).unwrap(); let cmd: Command = Install::default().into(); let res = async_with_vars( [("SOLDEER_PROJECT_ROOT", Some(dir.to_string_lossy().as_ref()))], run(cmd, Verbosity::default()), ) .await; assert!(res.is_ok(), "{res:?}"); dir } #[tokio::test] async fn test_clean_basic() { let dir = setup_project_with_dependencies("soldeer.toml").await; assert!(dir.join("dependencies").exists()); let cmd: Command = Clean::builder().build().into(); let res = async_with_vars( [("SOLDEER_PROJECT_ROOT", Some(dir.to_string_lossy().as_ref()))], run(cmd, Verbosity::default()), ) .await; assert!(res.is_ok(), "{res:?}"); check_clean_success(&dir, "soldeer.toml"); } #[tokio::test] async fn test_clean_foundry_config() { let dir = setup_project_with_dependencies("foundry.toml").await; check_artifacts_exist(&dir); let cmd: Command = Clean::builder().build().into(); let res = async_with_vars( [("SOLDEER_PROJECT_ROOT", Some(dir.to_string_lossy().as_ref()))], run(cmd, Verbosity::default()), ) .await; assert!(res.is_ok(), "{res:?}"); check_clean_success(&dir, "foundry.toml"); } #[tokio::test] async fn test_clean_no_artifacts() { let dir = testdir!(); fs::write(dir.join("soldeer.toml"), "[dependencies]\n").unwrap(); // Run clean on empty project (no dependencies folder or lock file) let cmd: Command = Clean::builder().build().into(); let res = async_with_vars( [("SOLDEER_PROJECT_ROOT", Some(dir.to_string_lossy().as_ref()))], run(cmd, Verbosity::default()), ) .await; // Should succeed silently assert!(res.is_ok(), "{res:?}"); } #[tokio::test] async fn test_clean_restores_with_install() { let dir = setup_project_with_dependencies("soldeer.toml").await; let cmd: Command = Clean::builder().build().into(); let res = async_with_vars( [("SOLDEER_PROJECT_ROOT", Some(dir.to_string_lossy().as_ref()))], run(cmd, Verbosity::default()), ) .await; assert!(res.is_ok(), "{res:?}"); assert!(!dir.join("dependencies").exists()); assert!(dir.join(SOLDEER_LOCK).exists(), "Lock file should remain after clean"); let cmd: Command = Install::default().into(); let res = async_with_vars( [("SOLDEER_PROJECT_ROOT", Some(dir.to_string_lossy().as_ref()))], run(cmd, Verbosity::default()), ) .await; assert!(res.is_ok(), "{res:?}"); assert!(dir.join("dependencies").exists()); let dependencies_dir = dir.join("dependencies"); let entries: Vec<_> = fs::read_dir(dependencies_dir).unwrap().collect::, _>>().unwrap(); assert!(!entries.is_empty(), "Dependencies should be installed"); } #[tokio::test] async fn test_clean_with_complex_file_structure() { let dir = setup_project_with_dependencies("soldeer.toml").await; let complex_path = dir.join("dependencies").join("nested").join("deep").join("structure"); fs::create_dir_all(&complex_path).unwrap(); fs::write(complex_path.join("test.txt"), "nested content").unwrap(); // Create symlink (Unix only) #[cfg(unix)] { use std::os::unix::fs::symlink; let _ = symlink(dir.join("soldeer.toml"), dir.join("dependencies").join("config_link")); } // Create large file to test performance let large_content = "x".repeat(1024 * 1024); // 1MB fs::write(dir.join("dependencies").join("large_file.txt"), large_content).unwrap(); let cmd: Command = Clean::builder().build().into(); let res: Result<(), soldeer_core::SoldeerError> = async_with_vars( [("SOLDEER_PROJECT_ROOT", Some(dir.to_string_lossy().as_ref()))], run(cmd, Verbosity::default()), ) .await; assert!(res.is_ok(), "{res:?}"); check_clean_success(&dir, "soldeer.toml"); } #[tokio::test] async fn test_clean_permission_error() { let dir = setup_project_with_dependencies("soldeer.toml").await; #[cfg(unix)] { let deps_path = dir.join("dependencies"); let mut perms = fs::metadata(&deps_path).unwrap().permissions(); perms.set_mode(0o444); // Read-only fs::set_permissions(&deps_path, perms).unwrap(); let cmd: Command = Clean::builder().build().into(); let res: Result<(), soldeer_core::SoldeerError> = async_with_vars( [("SOLDEER_PROJECT_ROOT", Some(dir.to_string_lossy().as_ref()))], run(cmd, Verbosity::default()), ) .await; // Should fail due to permission error assert!(res.is_err(), "Clean should fail with permission error"); let mut perms = fs::metadata(&deps_path).unwrap().permissions(); perms.set_mode(0o755); fs::set_permissions(&deps_path, perms).unwrap(); } #[cfg(not(unix))] { // On non-Unix systems, just run a successful clean let cmd: Command = Clean::builder().build().into(); let res = async_with_vars( [("SOLDEER_PROJECT_ROOT", Some(dir.to_string_lossy().as_ref()))], run(cmd, Verbosity::default()), ) .await; assert!(res.is_ok(), "{res:?}"); } } #[tokio::test] async fn test_clean_with_soldeer_config_variations() { let dir = testdir!(); let contents = r#"[soldeer] remappings_generate = false remappings_regenerate = true remappings_location = "config" [dependencies] "@openzeppelin-contracts" = "5.0.2" solady = "0.0.238" "#; fs::write(dir.join("soldeer.toml"), contents).unwrap(); let cmd: Command = Install::default().into(); let res = async_with_vars( [("SOLDEER_PROJECT_ROOT", Some(dir.to_string_lossy().as_ref()))], run(cmd, Verbosity::default()), ) .await; assert!(res.is_ok(), "{res:?}"); check_artifacts_exist(&dir); let cmd: Command = Clean::builder().build().into(); let res = async_with_vars( [("SOLDEER_PROJECT_ROOT", Some(dir.to_string_lossy().as_ref()))], run(cmd, Verbosity::default()), ) .await; assert!(res.is_ok(), "{res:?}"); check_clean_success(&dir, "soldeer.toml"); // Verify custom config is preserved let config_content = fs::read_to_string(dir.join("soldeer.toml")).unwrap(); assert!(config_content.contains("remappings_generate = false")); assert!(config_content.contains("remappings_location = \"config\"")); } #[tokio::test] async fn test_clean_multiple_times() { let dir = setup_project_with_dependencies("soldeer.toml").await; let cmd: Command = Clean::builder().build().into(); let res = async_with_vars( [("SOLDEER_PROJECT_ROOT", Some(dir.to_string_lossy().as_ref()))], run(cmd, Verbosity::default()), ) .await; assert!(res.is_ok(), "{res:?}"); let cmd: Command = Clean::builder().build().into(); let res = async_with_vars( [("SOLDEER_PROJECT_ROOT", Some(dir.to_string_lossy().as_ref()))], run(cmd, Verbosity::default()), ) .await; assert!(res.is_ok(), "{res:?}"); let cmd: Command = Clean::builder().build().into(); let res = async_with_vars( [("SOLDEER_PROJECT_ROOT", Some(dir.to_string_lossy().as_ref()))], run(cmd, Verbosity::default()), ) .await; assert!(res.is_ok(), "{res:?}"); // Verify final state check_clean_success(&dir, "soldeer.toml"); } ================================================ FILE: crates/commands/tests/tests-init.rs ================================================ use soldeer_commands::{Command, Verbosity, commands::init::Init, run}; use soldeer_core::{ config::{ConfigLocation, read_config_deps}, lock::{SOLDEER_LOCK, read_lockfile}, registry::get_latest_version, utils::run_git_command, }; use std::fs; use temp_env::async_with_vars; use testdir::testdir; #[tokio::test] async fn test_init_clean() { let dir = testdir!(); run_git_command( ["clone", "--recursive", "https://github.com/foundry-rs/forge-template.git", "."], Some(&dir), ) .await .unwrap(); fs::write(dir.join("soldeer.toml"), "[dependencies]\n").unwrap(); let cmd: Command = Init::builder().clean(true).config_location(ConfigLocation::Soldeer).build().into(); let res = async_with_vars( [("SOLDEER_PROJECT_ROOT", Some(dir.to_string_lossy().as_ref()))], run(cmd, Verbosity::default()), ) .await; assert!(res.is_ok(), "{res:?}"); assert!(!dir.join("lib").exists()); assert!(!dir.join(".gitmodules").exists()); assert!(dir.join("dependencies").exists()); let (deps, _) = read_config_deps(dir.join("soldeer.toml")).unwrap(); assert_eq!(deps.first().unwrap().name(), "forge-std"); let lock = read_lockfile(dir.join(SOLDEER_LOCK)).unwrap(); assert_eq!(lock.entries.first().unwrap().name(), "forge-std"); let remappings = fs::read_to_string(dir.join("remappings.txt")).unwrap(); assert!(remappings.contains("forge-std")); let gitignore = fs::read_to_string(dir.join(".gitignore")).unwrap(); assert!(gitignore.contains("/dependencies")); let foundry_config = fs::read_to_string(dir.join("foundry.toml")).unwrap(); assert!(foundry_config.contains("libs = [\"dependencies\"]")); } #[tokio::test] async fn test_init_no_clean() { let dir = testdir!(); run_git_command( ["clone", "--recursive", "https://github.com/foundry-rs/forge-template.git", "."], Some(&dir), ) .await .unwrap(); fs::write(dir.join("soldeer.toml"), "[dependencies]\n").unwrap(); let cmd: Command = Init::builder().config_location(ConfigLocation::Soldeer).build().into(); let res = async_with_vars( [("SOLDEER_PROJECT_ROOT", Some(dir.to_string_lossy().as_ref()))], run(cmd, Verbosity::default()), ) .await; assert!(res.is_ok(), "{res:?}"); assert!(dir.join("lib").exists()); assert!(dir.join(".gitmodules").exists()); assert!(dir.join("dependencies").exists()); let (deps, _) = read_config_deps(dir.join("soldeer.toml")).unwrap(); assert_eq!(deps.first().unwrap().name(), "forge-std"); let lock = read_lockfile(dir.join(SOLDEER_LOCK)).unwrap(); assert_eq!(lock.entries.first().unwrap().name(), "forge-std"); let remappings = fs::read_to_string(dir.join("remappings.txt")).unwrap(); assert!(remappings.contains("forge-std")); let gitignore = fs::read_to_string(dir.join(".gitignore")).unwrap(); assert!(gitignore.contains("/dependencies")); let foundry_config = fs::read_to_string(dir.join("foundry.toml")).unwrap(); assert!(foundry_config.contains("libs = [\"dependencies\"]")); } #[tokio::test] async fn test_init_no_remappings() { let dir = testdir!(); run_git_command( ["clone", "--recursive", "https://github.com/foundry-rs/forge-template.git", "."], Some(&dir), ) .await .unwrap(); let contents = r"[soldeer] remappings_generate = false [dependencies] "; fs::write(dir.join("soldeer.toml"), contents).unwrap(); let cmd: Command = Init::builder().clean(true).config_location(ConfigLocation::Soldeer).build().into(); let res = async_with_vars( [("SOLDEER_PROJECT_ROOT", Some(dir.to_string_lossy().as_ref()))], run(cmd, Verbosity::default()), ) .await; assert!(res.is_ok(), "{res:?}"); assert!(!dir.join("remappings.txt").exists()); } #[tokio::test] async fn test_init_no_gitignore() { let dir = testdir!(); run_git_command( ["clone", "--recursive", "https://github.com/foundry-rs/forge-template.git", "."], Some(&dir), ) .await .unwrap(); fs::remove_file(dir.join(".gitignore")).unwrap(); fs::write(dir.join("soldeer.toml"), "[dependencies]\n").unwrap(); let cmd: Command = Init::builder().clean(true).config_location(ConfigLocation::Soldeer).build().into(); let res = async_with_vars( [("SOLDEER_PROJECT_ROOT", Some(dir.to_string_lossy().as_ref()))], run(cmd, Verbosity::default()), ) .await; assert!(res.is_ok(), "{res:?}"); assert!(!dir.join(".gitignore").exists()); } #[tokio::test] async fn test_init_select_foundry_location() { let dir = testdir!(); let cmd: Command = Init::builder().clean(true).config_location(ConfigLocation::Foundry).build().into(); let res = async_with_vars( [("SOLDEER_PROJECT_ROOT", Some(dir.to_string_lossy().as_ref()))], run(cmd, Verbosity::default()), ) .await; assert!(res.is_ok(), "{res:?}"); let forge_std = get_latest_version("forge-std").await.unwrap(); let config_path = dir.join("foundry.toml"); assert!(config_path.exists()); let contents = format!( r#"[profile.default] src = "src" out = "out" libs = ["dependencies"] [dependencies] forge-std = "{}" # See more config options https://github.com/foundry-rs/foundry/blob/master/crates/config/README.md#all-options "#, forge_std.version_req() ); assert_eq!(fs::read_to_string(config_path).unwrap(), contents); } #[tokio::test] async fn test_init_select_soldeer_location() { let dir = testdir!(); let cmd: Command = Init::builder().clean(true).config_location(ConfigLocation::Soldeer).build().into(); let res = async_with_vars( [("SOLDEER_PROJECT_ROOT", Some(dir.to_string_lossy().as_ref()))], run(cmd, Verbosity::default()), ) .await; assert!(res.is_ok(), "{res:?}"); let forge_std = get_latest_version("forge-std").await.unwrap(); let config_path = dir.join("soldeer.toml"); assert!(config_path.exists()); let contents = format!( r#"[dependencies] forge-std = "{}" "#, forge_std.version_req() ); assert_eq!(fs::read_to_string(config_path).unwrap(), contents); } ================================================ FILE: crates/commands/tests/tests-install.rs ================================================ #![allow(clippy::unwrap_used)] use mockito::Matcher; use soldeer_commands::{Command, Verbosity, commands::install::Install, run}; use soldeer_core::{ SoldeerError, config::{ConfigLocation, read_config_deps}, download::download_file, errors::InstallError, lock::{SOLDEER_LOCK, read_lockfile}, push::zip_file, utils::hash_file, }; use std::{ fs::{self}, path::{Path, PathBuf}, }; use temp_env::async_with_vars; use testdir::testdir; fn check_install(dir: &Path, name: &str, version_req: &str) { assert!(dir.join("dependencies").exists()); let mut config_path = dir.join("soldeer.toml"); if !config_path.exists() { config_path = dir.join("foundry.toml"); } let (deps, _) = read_config_deps(config_path).unwrap(); assert_eq!(deps.first().unwrap().name(), name); let remappings = fs::read_to_string(dir.join("remappings.txt")).unwrap(); assert!(remappings.contains(name)); let lock = read_lockfile(dir.join(SOLDEER_LOCK)).unwrap(); assert_eq!(lock.entries.first().unwrap().name(), name); let version = lock.entries.first().unwrap().version(); assert!(version.starts_with(version_req)); assert!(dir.join("dependencies").join(format!("{name}-{version}")).exists()); } fn create_zip_monorepo(testdir: &Path) -> PathBuf { let root = testdir.join("monorepo"); fs::create_dir(&root).unwrap(); let contracts = root.join("contracts"); fs::create_dir(&contracts).unwrap(); let mut files = Vec::new(); files.push(root.join("README.md")); fs::write( files.last().unwrap(), "Root of the repo is here, foundry project is under `contracts`", ) .unwrap(); files.push(contracts.join("foundry.toml")); fs::write( files.last().unwrap(), r#"[profile.default] libs = ["dependencies"] remappings = ["forge-std/=dependencies/forge-std-1.11.0/src/"] [dependencies] forge-std = "1.11.0" [soldeer] remappings_location = "config" recursive_deps = true"#, ) .unwrap(); zip_file(&root, &files, "test").unwrap() // zip is inside the `monorepo` folder } fn create_zip_with_foundry_lock(testdir: &Path, branch: Option<&str>) -> PathBuf { let root = testdir.join("foundry_lock_project"); fs::create_dir(&root).unwrap(); let lib = root.join("lib"); fs::create_dir(&lib).unwrap(); let mut files = Vec::new(); files.push(root.join("foundry.toml")); fs::write( files.last().unwrap(), r#"[profile.default] src = "src" out = "out" libs = ["lib"] "#, ) .unwrap(); files.push(root.join(".gitmodules")); let gitmodules_content = if let Some(branch) = branch { format!( r#"[submodule "lib/forge-std"] path = lib/forge-std url = https://github.com/foundry-rs/forge-std branch = {branch} "# ) } else { r#"[submodule "lib/forge-std"] path = lib/forge-std url = https://github.com/foundry-rs/forge-std "# .to_string() }; fs::write(files.last().unwrap(), gitmodules_content).unwrap(); files.push(root.join("foundry.lock")); let foundry_lock_content = if let Some(branch) = branch { format!( r#"{{ "lib/forge-std": {{ "branch": {{ "name": "{branch}", "rev": "c29afdd40a82db50a3d3709d324416be50050e5e" }} }} }}"# ) } else { r#"{ "lib/forge-std": { "rev": "c29afdd40a82db50a3d3709d324416be50050e5e" } }"# .to_string() }; fs::write(files.last().unwrap(), foundry_lock_content).unwrap(); zip_file(&root, &files, "test").unwrap() } #[tokio::test] async fn test_install_registry_any_version() { let dir = testdir!(); fs::write(dir.join("soldeer.toml"), "[dependencies]\n").unwrap(); let cmd: Command = Install::builder().dependency("@openzeppelin-contracts~5").build().into(); let res = async_with_vars( [("SOLDEER_PROJECT_ROOT", Some(dir.to_string_lossy().as_ref()))], run(cmd, Verbosity::default()), ) .await; assert!(res.is_ok(), "{res:?}"); check_install(&dir, "@openzeppelin-contracts", "5"); } #[tokio::test] async fn test_install_registry_wildcard() { let dir = testdir!(); fs::write(dir.join("soldeer.toml"), "[dependencies]\n").unwrap(); let cmd: Command = Install::builder().dependency("solady~*").build().into(); let res = async_with_vars( [("SOLDEER_PROJECT_ROOT", Some(dir.to_string_lossy().as_ref()))], run(cmd, Verbosity::default()), ) .await; assert!(res.is_ok(), "{res:?}"); check_install(&dir, "solady", ""); } #[tokio::test] async fn test_install_registry_specific_version() { let dir = testdir!(); fs::write(dir.join("soldeer.toml"), "[dependencies]\n").unwrap(); let cmd: Command = Install::builder().dependency("@openzeppelin-contracts~4.9.5").build().into(); let res = async_with_vars( [("SOLDEER_PROJECT_ROOT", Some(dir.to_string_lossy().as_ref()))], run(cmd, Verbosity::default()), ) .await; assert!(res.is_ok(), "{res:?}"); check_install(&dir, "@openzeppelin-contracts", "4.9.5"); } #[tokio::test] async fn test_install_custom_http() { let dir = testdir!(); fs::write(dir.join("soldeer.toml"), "[dependencies]\n").unwrap(); let cmd: Command = Install::builder().dependency("mylib~1.0.0") .zip_url("https://github.com/mario-eth/soldeer/archive/8585a7ec85a29889cec8d08f4770e15ec4795943.zip") .build() .into(); let res = async_with_vars( [("SOLDEER_PROJECT_ROOT", Some(dir.to_string_lossy().as_ref()))], run(cmd, Verbosity::default()), ) .await; assert!(res.is_ok(), "{res:?}"); check_install(&dir, "mylib", "1.0.0"); let lock = read_lockfile(dir.join(SOLDEER_LOCK)).unwrap(); assert_eq!( lock.entries.first().unwrap().as_http().unwrap().url, "https://github.com/mario-eth/soldeer/archive/8585a7ec85a29889cec8d08f4770e15ec4795943.zip" ); assert!(&dir.join("dependencies").join("mylib-1.0.0").join("README.md").exists()); } #[tokio::test] async fn test_install_git_main() { let dir = testdir!(); fs::write(dir.join("soldeer.toml"), "[dependencies]\n").unwrap(); let cmd: Command = Install::builder() .dependency("mylib~0.1.0") .git_url("https://github.com/beeb/test-repo.git") .build() .into(); let res = async_with_vars( [("SOLDEER_PROJECT_ROOT", Some(dir.to_string_lossy().as_ref()))], run(cmd, Verbosity::default()), ) .await; assert!(res.is_ok(), "{res:?}"); check_install(&dir, "mylib", "0.1.0"); let lock = read_lockfile(dir.join(SOLDEER_LOCK)).unwrap(); assert_eq!( lock.entries.first().unwrap().as_git().unwrap().rev, "d5d72fa135d28b2e8307650b3ea79115183f2406" ); assert!(&dir.join("dependencies").join("mylib-0.1.0").join("foo.txt").exists()); } #[tokio::test] async fn test_install_git_commit() { let dir = testdir!(); fs::write(dir.join("soldeer.toml"), "[dependencies]\n").unwrap(); let cmd: Command = Install::builder() .dependency("mylib~0.1.0") .git_url("https://github.com/beeb/test-repo.git") .rev("78c2f6a1a54db26bab6c3f501854a1564eb3707f") .build() .into(); let res = async_with_vars( [("SOLDEER_PROJECT_ROOT", Some(dir.to_string_lossy().as_ref()))], run(cmd, Verbosity::default()), ) .await; assert!(res.is_ok(), "{res:?}"); check_install(&dir, "mylib", "0.1.0"); let lock = read_lockfile(dir.join(SOLDEER_LOCK)).unwrap(); assert_eq!( lock.entries.first().unwrap().as_git().unwrap().rev, "78c2f6a1a54db26bab6c3f501854a1564eb3707f" ); assert!(!&dir.join("dependencies").join("mylib-1.0.0").join("foo.txt").exists()); } #[tokio::test] async fn test_install_git_tag() { let dir = testdir!(); fs::write(dir.join("soldeer.toml"), "[dependencies]\n").unwrap(); let cmd: Command = Install::builder() .dependency("mylib~0.1.0") .git_url("https://github.com/beeb/test-repo.git") .tag("v0.1.0") .build() .into(); let res = async_with_vars( [("SOLDEER_PROJECT_ROOT", Some(dir.to_string_lossy().as_ref()))], run(cmd, Verbosity::default()), ) .await; assert!(res.is_ok(), "{res:?}"); check_install(&dir, "mylib", "0.1.0"); let lock = read_lockfile(dir.join(SOLDEER_LOCK)).unwrap(); assert_eq!( lock.entries.first().unwrap().as_git().unwrap().rev, "78c2f6a1a54db26bab6c3f501854a1564eb3707f" ); assert!(!&dir.join("dependencies").join("mylib-1.0.0").join("foo.txt").exists()); } #[tokio::test] async fn test_install_git_branch() { let dir = testdir!(); fs::write(dir.join("soldeer.toml"), "[dependencies]\n").unwrap(); let cmd: Command = Install::builder() .dependency("mylib~dev") .git_url("https://github.com/beeb/test-repo.git") .branch("dev") .build() .into(); let res = async_with_vars( [("SOLDEER_PROJECT_ROOT", Some(dir.to_string_lossy().as_ref()))], run(cmd, Verbosity::default()), ) .await; assert!(res.is_ok(), "{res:?}"); check_install(&dir, "mylib", "dev"); let lock = read_lockfile(dir.join(SOLDEER_LOCK)).unwrap(); assert_eq!( lock.entries.first().unwrap().as_git().unwrap().rev, "8d903e557e8f1b6e62bde768aa456d4ddfca72c4" ); assert!(!&dir.join("dependencies").join("mylib-1.0.0").join("test.txt").exists()); } #[tokio::test] async fn test_install_foundry_config() { let dir = testdir!(); fs::write(dir.join("foundry.toml"), "[dependencies]\n").unwrap(); let cmd: Command = Install::builder().dependency("@openzeppelin-contracts~5").build().into(); let res = async_with_vars( [("SOLDEER_PROJECT_ROOT", Some(dir.to_string_lossy().as_ref()))], run(cmd, Verbosity::default()), ) .await; assert!(res.is_ok(), "{res:?}"); check_install(&dir, "@openzeppelin-contracts", "5"); } #[tokio::test] async fn test_install_foundry_remappings() { let dir = testdir!(); let contents = r#"[profile.default] [soldeer] remappings_location = "config" [dependencies] "@openzeppelin-contracts" = "5.1.0" "#; fs::write(dir.join("foundry.toml"), contents).unwrap(); let cmd: Command = Install::builder().build().into(); let res = async_with_vars( [("SOLDEER_PROJECT_ROOT", Some(dir.to_string_lossy().as_ref()))], run(cmd, Verbosity::default()), ) .await; assert!(res.is_ok(), "{res:?}"); let config = fs::read_to_string(dir.join("foundry.toml")).unwrap(); assert!(config.contains( "remappings = [\"@openzeppelin-contracts-5.1.0/=dependencies/@openzeppelin-contracts-5.1.0/\"]" )); } #[tokio::test] async fn test_install_missing_no_lock() { let dir = testdir!(); let contents = r#"[dependencies] "@openzeppelin-contracts" = "5.0.2" "#; fs::write(dir.join("soldeer.toml"), contents).unwrap(); let cmd: Command = Install::builder().build().into(); let res = async_with_vars( [("SOLDEER_PROJECT_ROOT", Some(dir.to_string_lossy().as_ref()))], run(cmd, Verbosity::default()), ) .await; assert!(res.is_ok(), "{res:?}"); check_install(&dir, "@openzeppelin-contracts", "5.0.2"); } #[tokio::test] async fn test_install_missing_with_lock() { let dir = testdir!(); let contents = r#"[dependencies] mylib = "1.1" "#; fs::write(dir.join("soldeer.toml"), contents).unwrap(); let lock = r#"[[dependencies]] name = "mylib" version = "1.1.0" url = "https://github.com/mario-eth/soldeer/archive/8585a7ec85a29889cec8d08f4770e15ec4795943.zip" checksum = "94a73dbe106f48179ea39b00d42e5d4dd96fdc6252caa3a89ce7efdaec0b9468" integrity = "f3c628f3e9eae4db14fe14f9ab29e49a0107c47b8ee956e4cee57b616b493fc2" "#; fs::write(dir.join(SOLDEER_LOCK), lock).unwrap(); let cmd: Command = Install::builder().build().into(); let res = async_with_vars( [("SOLDEER_PROJECT_ROOT", Some(dir.to_string_lossy().as_ref()))], run(cmd, Verbosity::default()), ) .await; assert!(res.is_ok(), "{res:?}"); check_install(&dir, "mylib", "1.1"); } #[tokio::test] async fn test_install_second_time() { let dir = testdir!(); let contents = r#"[dependencies] mylib = "1.1" "#; fs::write(dir.join("soldeer.toml"), contents).unwrap(); // get zip file locally for mock let zip_file = download_file( "https://github.com/mario-eth/soldeer/archive/8585a7ec85a29889cec8d08f4770e15ec4795943.zip", &dir, "tmp", ) .await .unwrap(); // serve the file with mock server let mut server = mockito::Server::new_async().await; let mock = server.mock("GET", "/file.zip").with_body_from_file(zip_file).create_async().await; let mock = mock.expect(1); // download link should be called exactly once let lock = format!( r#"[[dependencies]] name = "mylib" version = "1.1.0" url = "{}/file.zip" checksum = "94a73dbe106f48179ea39b00d42e5d4dd96fdc6252caa3a89ce7efdaec0b9468" integrity = "f3c628f3e9eae4db14fe14f9ab29e49a0107c47b8ee956e4cee57b616b493fc2" "#, server.url() ); fs::write(dir.join(SOLDEER_LOCK), lock).unwrap(); let cmd: Command = Install::builder().build().into(); let res = async_with_vars( [("SOLDEER_PROJECT_ROOT", Some(dir.to_string_lossy().as_ref()))], run(cmd.clone(), Verbosity::default()), ) .await; assert!(res.is_ok(), "{res:?}"); mock.assert(); // download link was called // second install let res = async_with_vars( [("SOLDEER_PROJECT_ROOT", Some(dir.to_string_lossy().as_ref()))], run(cmd, Verbosity::default()), ) .await; assert!(res.is_ok(), "{res:?}"); mock.assert(); // download link was not called a second time } #[tokio::test] async fn test_install_private_second_time() { let dir = testdir!(); let contents = r#"[dependencies] test-private = "0.1.0" "#; fs::write(dir.join("soldeer.toml"), contents).unwrap(); // get zip file locally for mock let zip_file = download_file( "https://github.com/mario-eth/soldeer/archive/8585a7ec85a29889cec8d08f4770e15ec4795943.zip", &dir, "tmp", ) .await .unwrap(); // serve the file with mock server let mut server = mockito::Server::new_async().await; let data = format!( r#"{{"data":[{{"created_at":"2025-09-28T12:36:09.526660Z","deleted":false,"id":"0440c261-8cdf-4738-9139-c4dc7b0c7f3e","internal_name":"test-private/0_1_0_28-09-2025_12:36:08_test-private.zip","private":true,"project_id":"14f419e7-2d64-49e4-86b9-b44b36627786","url":"{}/file.zip","version":"0.1.0"}}],"status":"success"}}"#, server.url() ); server.mock("GET", "/file.zip").with_body_from_file(zip_file).create_async().await; server .mock("GET", "/api/v1/revision-cli") .match_query(Matcher::Any) .with_header("content-type", "application/json") .with_body(data) .create_async() .await; let lock = r#"[[dependencies]] name = "test-private" version = "0.1.0" checksum = "94a73dbe106f48179ea39b00d42e5d4dd96fdc6252caa3a89ce7efdaec0b9468" integrity = "f3c628f3e9eae4db14fe14f9ab29e49a0107c47b8ee956e4cee57b616b493fc2" "#; fs::write(dir.join(SOLDEER_LOCK), lock).unwrap(); let cmd: Command = Install::builder().build().into(); let res = async_with_vars( [ ("SOLDEER_API_URL", Some(server.url().as_str())), ("SOLDEER_PROJECT_ROOT", Some(dir.to_string_lossy().as_ref())), ], run(cmd.clone(), Verbosity::default()), ) .await; assert!(res.is_ok(), "{res:?}"); // second install let res = async_with_vars( [ ("SOLDEER_API_URL", Some(server.url().as_str())), ("SOLDEER_PROJECT_ROOT", Some(dir.to_string_lossy().as_ref())), ], run(cmd, Verbosity::default()), ) .await; assert!(res.is_ok(), "{res:?}"); } #[tokio::test] async fn test_install_add_existing_reinstall() { let dir = testdir!(); let contents = r#"[dependencies] "@openzeppelin-contracts" = "5.0.2" "#; fs::write(dir.join("soldeer.toml"), contents).unwrap(); let cmd: Command = Install::builder().build().into(); let res = async_with_vars( [("SOLDEER_PROJECT_ROOT", Some(dir.to_string_lossy().as_ref()))], run(cmd, Verbosity::default()), ) .await; assert!(res.is_ok()); // remove dependencies folder and lockfile fs::remove_dir_all(dir.join("dependencies")).unwrap(); fs::remove_file(dir.join(SOLDEER_LOCK)).unwrap(); // re-add the same dep, should re-install it let cmd: Command = Install::builder().dependency("@openzeppelin-contracts~5.0.2").build().into(); let res = async_with_vars( [("SOLDEER_PROJECT_ROOT", Some(dir.to_string_lossy().as_ref()))], run(cmd, Verbosity::default()), ) .await; assert!(res.is_ok()); let dep_path = dir.join("dependencies").join("@openzeppelin-contracts-5.0.2"); assert!(dep_path.exists()); } #[tokio::test] async fn test_install_clean() { let dir = testdir!(); let contents = r#"[dependencies] "@openzeppelin-contracts" = "5.0.2" "#; fs::write(dir.join("soldeer.toml"), contents).unwrap(); let test_path = dir.join("dependencies").join("foo"); fs::create_dir_all(&test_path).unwrap(); fs::write(test_path.join("foo.txt"), "test").unwrap(); let cmd: Command = Install::builder().clean(true).build().into(); let res = async_with_vars( [("SOLDEER_PROJECT_ROOT", Some(dir.to_string_lossy().as_ref()))], run(cmd, Verbosity::default()), ) .await; assert!(res.is_ok(), "{res:?}"); assert!(!test_path.exists()); } #[tokio::test] async fn test_install_recursive_deps() { let dir = testdir!(); let contents = r#"[dependencies] foo = { version = "0.1.0", git = "https://github.com/foundry-rs/forge-template.git" } "#; fs::write(dir.join("soldeer.toml"), contents).unwrap(); let cmd: Command = Install::builder().recursive_deps(true).build().into(); let res = async_with_vars( [("SOLDEER_PROJECT_ROOT", Some(dir.to_string_lossy().as_ref()))], run(cmd, Verbosity::default()), ) .await; assert!(res.is_ok(), "{res:?}"); let dep_path = dir.join("dependencies").join("foo-0.1.0"); assert!(dep_path.exists()); let sub_dirs_path = dep_path.join("lib"); assert!(sub_dirs_path.exists()); assert!(sub_dirs_path.join("forge-std").join("src").exists()); } #[tokio::test] async fn test_install_recursive_deps_soldeer() { let dir = testdir!(); // this template uses soldeer to install forge-std let contents = r#"[dependencies] foo = { version = "0.1.0", git = "https://github.com/beeb/forge-template.git" } "#; fs::write(dir.join("soldeer.toml"), contents).unwrap(); let cmd: Command = Install::builder().recursive_deps(true).build().into(); let res = async_with_vars( [("SOLDEER_PROJECT_ROOT", Some(dir.to_string_lossy().as_ref()))], run(cmd, Verbosity::default()), ) .await; assert!(res.is_ok(), "{res:?}"); let dep_path = dir.join("dependencies").join("foo-0.1.0"); assert!(dep_path.exists()); let sub_dirs_path = dep_path.join("dependencies"); assert!(sub_dirs_path.exists()); assert!(sub_dirs_path.join("forge-std-1.9.7").join("src").exists()); } #[tokio::test] async fn test_install_recursive_deps_nested() { let dir = testdir!(); let contents = r#"[dependencies] "@uniswap-permit2" = { version = "1.0.0", url = "https://github.com/Uniswap/permit2/archive/cc56ad0f3439c502c246fc5cfcc3db92bb8b7219.zip" } "#; fs::write(dir.join("soldeer.toml"), contents).unwrap(); let cmd: Command = Install::builder().recursive_deps(true).build().into(); let res = async_with_vars( [("SOLDEER_PROJECT_ROOT", Some(dir.to_string_lossy().as_ref()))], run(cmd, Verbosity::default()), ) .await; assert!(res.is_ok(), "{res:?}"); let paths = [ "@uniswap-permit2-1.0.0/lib/forge-std/src", "@uniswap-permit2-1.0.0/lib/forge-gas-snapshot/dependencies/forge-std-1.9.2/src", "@uniswap-permit2-1.0.0/lib/openzeppelin-contracts/lib/erc4626-tests/ERC4626.test.sol", "@uniswap-permit2-1.0.0/lib/openzeppelin-contracts/lib/forge-std/src", "@uniswap-permit2-1.0.0/lib/openzeppelin-contracts/lib/halmos-cheatcodes/src", "@uniswap-permit2-1.0.0/lib/solmate/lib/ds-test/src", ]; for path in paths { let dep_path = dir.join("dependencies").join(path); assert!(dep_path.exists()); } } #[tokio::test] async fn test_install_recursive_project_root() { let dir = testdir!(); let zip_path = create_zip_monorepo(&dir); let checksum = hash_file(&zip_path).unwrap(); let contents = r#"[dependencies] mylib = { version = "1.0.0", project_root = "contracts" } [soldeer] recursive_deps = true "#; // serve the dependency which uses foundry in a `contracts` subfolder let mut server = mockito::Server::new_async().await; server.mock("GET", "/file.zip").with_body_from_file(zip_path).create_async().await; fs::write(dir.join("soldeer.toml"), contents).unwrap(); let lock = format!( r#"[[dependencies]] name = "mylib" version = "1.0.0" url = "{}/file.zip" checksum = "{checksum}" integrity = "e629088e5b74df78f116a24c328a64fd002b4e42449607b6ca78f9afb799374d" "#, server.url() ); fs::write(dir.join(SOLDEER_LOCK), lock).unwrap(); let cmd: Command = Install::builder().build().into(); let res = async_with_vars( [("SOLDEER_PROJECT_ROOT", Some(dir.to_string_lossy().as_ref()))], run(cmd.clone(), Verbosity::default()), ) .await; assert!(res.is_ok(), "{res:?}"); // check that we recursively installed all deps assert!(dir.join("dependencies/mylib-1.0.0/contracts/dependencies/forge-std-1.11.0").is_dir()); } #[tokio::test] async fn test_install_recursive_project_root_invalid_path() { let dir = testdir!(); let zip_path = create_zip_monorepo(&dir); let checksum = hash_file(&zip_path).unwrap(); // directory traversal is forbidden let contents = r#"[dependencies] mylib = { version = "1.0.0", project_root = "../../../contracts" } [soldeer] recursive_deps = true "#; // serve the dependency which uses foundry in a `contracts` subfolder let mut server = mockito::Server::new_async().await; server.mock("GET", "/file.zip").with_body_from_file(zip_path).create_async().await; fs::write(dir.join("soldeer.toml"), contents).unwrap(); let lock = format!( r#"[[dependencies]] name = "mylib" version = "1.0.0" url = "{}/file.zip" checksum = "{checksum}" integrity = "e629088e5b74df78f116a24c328a64fd002b4e42449607b6ca78f9afb799374d" "#, server.url() ); fs::write(dir.join(SOLDEER_LOCK), lock).unwrap(); let cmd: Command = Install::builder().build().into(); let res = async_with_vars( [("SOLDEER_PROJECT_ROOT", Some(dir.to_string_lossy().as_ref()))], run(cmd.clone(), Verbosity::default()), ) .await; assert!(matches!( res.unwrap_err(), SoldeerError::InstallError(InstallError::ConfigError( soldeer_core::errors::ConfigError::InvalidProjectRoot { .. } )) )); } #[tokio::test] async fn test_install_regenerate_remappings() { let dir = testdir!(); fs::write(dir.join("soldeer.toml"), "[dependencies]\n").unwrap(); fs::write(dir.join("remappings.txt"), "foo=bar").unwrap(); let cmd: Command = Install::builder() .dependency("@openzeppelin-contracts~5") .regenerate_remappings(true) .build() .into(); let res = async_with_vars( [("SOLDEER_PROJECT_ROOT", Some(dir.to_string_lossy().as_ref()))], run(cmd, Verbosity::default()), ) .await; assert!(res.is_ok(), "{res:?}"); let remappings = fs::read_to_string(dir.join("remappings.txt")).unwrap(); assert!(!remappings.contains("foo=bar")); assert!(remappings.contains("@openzeppelin-contracts")); } #[tokio::test] async fn test_add_remappings() { let dir = testdir!(); let contents = r#"[profile.default] src = "src" out = "out" libs = ["dependencies"] # See more config options https://github.com/foundry-rs/foundry/blob/master/crates/config/README.md#all-options [soldeer] remappings_generate = true remappings_prefix = "@custom-f@" remappings_location = "config" remappings_regenerate = true [dependencies] "#; fs::write(dir.join("foundry.toml"), contents).unwrap(); let cmd: Command = Install::builder().dependency("forge-std~1.8.1").build().into(); let res = async_with_vars( [("SOLDEER_PROJECT_ROOT", Some(dir.to_string_lossy().as_ref()))], run(cmd, Verbosity::default()), ) .await; assert!(res.is_ok(), "{res:?}"); let updated_contents = r#"[profile.default] src = "src" out = "out" libs = ["dependencies"] remappings = ["@custom-f@forge-std-1.8.1/=dependencies/forge-std-1.8.1/"] # See more config options https://github.com/foundry-rs/foundry/blob/master/crates/config/README.md#all-options [soldeer] remappings_generate = true remappings_prefix = "@custom-f@" remappings_location = "config" remappings_regenerate = true [dependencies] forge-std = "1.8.1" "#; assert_eq!(updated_contents, fs::read_to_string(dir.join("foundry.toml")).unwrap()); } #[tokio::test] async fn test_modifying_remappings_prefix_config() { let dir = testdir!(); let contents = r#"[profile.default] libs = ["dependencies"] remappings = ["@custom-f@forge-std-1.8.1/=dependencies/forge-std-1.8.1/"] [soldeer] remappings_prefix = "!custom-f!" remappings_regenerate = true remappings_location = "config" [dependencies] "#; fs::write(dir.join("foundry.toml"), contents).unwrap(); let cmd: Command = Install::builder().dependency("forge-std~1.8.1").build().into(); let res = async_with_vars( [("SOLDEER_PROJECT_ROOT", Some(dir.to_string_lossy().as_ref()))], run(cmd.clone(), Verbosity::default()), ) .await; assert!(res.is_ok(), "{res:?}"); let expected = r#"[profile.default] libs = ["dependencies"] remappings = ["!custom-f!forge-std-1.8.1/=dependencies/forge-std-1.8.1/"] [soldeer] remappings_prefix = "!custom-f!" remappings_regenerate = true remappings_location = "config" [dependencies] forge-std = "1.8.1" "#; assert_eq!(expected, fs::read_to_string(dir.join("foundry.toml")).unwrap()); } #[tokio::test] async fn test_modifying_remappings_prefix_txt() { let dir = testdir!(); let contents = r#"[profile.default] [soldeer] remappings_prefix = "!custom-f!" remappings_regenerate = true [dependencies] "#; fs::write( dir.join("remappings.txt"), "@custom-f@forge-std-1.8.1/=dependencies/forge-std-1.8.1/", ) .unwrap(); fs::write(dir.join("foundry.toml"), contents).unwrap(); let cmd: Command = Install::builder().dependency("forge-std~1.8.1").build().into(); let res = async_with_vars( [("SOLDEER_PROJECT_ROOT", Some(dir.to_string_lossy().as_ref()))], run(cmd.clone(), Verbosity::default()), ) .await; assert!(res.is_ok(), "{res:?}"); let updated_contents = r#"!custom-f!forge-std-1.8.1/=dependencies/forge-std-1.8.1/ "#; assert_eq!(updated_contents, fs::read_to_string(dir.join("remappings.txt")).unwrap()); } #[tokio::test] async fn test_install_new_foundry_no_dependency_tag() { let dir = testdir!(); let contents = r#"[profile.default] libs = ["lib"] "#; fs::write(dir.join("foundry.toml"), contents).unwrap(); let cmd: Command = Install::builder() .dependency("@openzeppelin-contracts~5") .config_location(ConfigLocation::Foundry) .build() .into(); let res = async_with_vars( [("SOLDEER_PROJECT_ROOT", Some(dir.to_string_lossy().as_ref()))], run(cmd, Verbosity::default()), ) .await; assert!(res.is_ok(), "{res:?}"); let config = fs::read_to_string(dir.join("foundry.toml")).unwrap(); let content = r#"[profile.default] libs = ["lib", "dependencies"] [dependencies] "@openzeppelin-contracts" = "5" "#; assert_eq!(config, content); } #[tokio::test] async fn test_install_new_soldeer_no_soldeer_toml() { let dir = testdir!(); let cmd: Command = Install::builder() .dependency("@openzeppelin-contracts~5") .config_location(ConfigLocation::Soldeer) .build() .into(); let res = async_with_vars( [("SOLDEER_PROJECT_ROOT", Some(dir.to_string_lossy().as_ref()))], run(cmd, Verbosity::default()), ) .await; assert!(res.is_ok(), "{res:?}"); let config = fs::read_to_string(dir.join("soldeer.toml")).unwrap(); let content = r#"[dependencies] "@openzeppelin-contracts" = "5" "#; assert_eq!(config, content); } #[tokio::test] async fn test_install_new_soldeer_no_dependency_tag() { let dir = testdir!(); let contents = r#"[soldeer] "#; fs::write(dir.join("soldeer.toml"), contents).unwrap(); let cmd: Command = Install::builder() .dependency("@openzeppelin-contracts~5") .config_location(ConfigLocation::Soldeer) .build() .into(); let res = async_with_vars( [("SOLDEER_PROJECT_ROOT", Some(dir.to_string_lossy().as_ref()))], run(cmd, Verbosity::default()), ) .await; assert!(res.is_ok(), "{res:?}"); let config = fs::read_to_string(dir.join("soldeer.toml")).unwrap(); let content = r#"[soldeer] [dependencies] "@openzeppelin-contracts" = "5" "#; assert_eq!(config, content); } #[tokio::test] async fn test_install_recursive_deps_with_foundry_lock() { let dir = testdir!(); let zip_path = create_zip_with_foundry_lock(&dir, None); let checksum = hash_file(&zip_path).unwrap(); let contents = r#"[dependencies] mylib = "1.0.0" [soldeer] recursive_deps = true "#; // Serve the dependency via mock server let mut server = mockito::Server::new_async().await; server.mock("GET", "/file.zip").with_body_from_file(&zip_path).create_async().await; fs::write(dir.join("soldeer.toml"), contents).unwrap(); let lock = format!( r#"[[dependencies]] name = "mylib" version = "1.0.0" url = "{}/file.zip" checksum = "{checksum}" integrity = "placeholder" "#, server.url() ); fs::write(dir.join(SOLDEER_LOCK), lock).unwrap(); let cmd: Command = Install::builder().build().into(); let res = async_with_vars( [("SOLDEER_PROJECT_ROOT", Some(dir.to_string_lossy().as_ref()))], run(cmd.clone(), Verbosity::default()), ) .await; assert!(res.is_ok(), "{res:?}"); // Verify the submodule exists let forge_std_path = dir.join("dependencies/mylib-1.0.0/lib/forge-std"); assert!(forge_std_path.exists()); // Verify it's checked out at the specific revision from foundry.lock let output = std::process::Command::new("git") .args(["rev-parse", "HEAD"]) .current_dir(&forge_std_path) .output() .expect("failed to run git rev-parse"); let current_rev = String::from_utf8_lossy(&output.stdout).trim().to_string(); assert_eq!(current_rev, "c29afdd40a82db50a3d3709d324416be50050e5e"); } #[tokio::test] async fn test_install_recursive_deps_with_foundry_lock_branch() { let dir = testdir!(); let zip_path = create_zip_with_foundry_lock(&dir, Some("master")); let checksum = hash_file(&zip_path).unwrap(); let contents = r#"[dependencies] mylib = "1.0.0" [soldeer] recursive_deps = true "#; // Serve the dependency via mock server let mut server = mockito::Server::new_async().await; server.mock("GET", "/file.zip").with_body_from_file(&zip_path).create_async().await; fs::write(dir.join("soldeer.toml"), contents).unwrap(); let lock = format!( r#"[[dependencies]] name = "mylib" version = "1.0.0" url = "{}/file.zip" checksum = "{checksum}" integrity = "placeholder" "#, server.url() ); fs::write(dir.join(SOLDEER_LOCK), lock).unwrap(); let cmd: Command = Install::builder().build().into(); let res = async_with_vars( [("SOLDEER_PROJECT_ROOT", Some(dir.to_string_lossy().as_ref()))], run(cmd.clone(), Verbosity::default()), ) .await; assert!(res.is_ok(), "{res:?}"); // Verify the submodule exists let forge_std_path = dir.join("dependencies/mylib-1.0.0/lib/forge-std"); assert!(forge_std_path.exists()); // Verify it's checked out at the specific revision from foundry.lock let output = std::process::Command::new("git") .args(["rev-parse", "HEAD"]) .current_dir(&forge_std_path) .output() .expect("failed to run git rev-parse"); let current_rev = String::from_utf8_lossy(&output.stdout).trim().to_string(); assert_eq!(current_rev, "c29afdd40a82db50a3d3709d324416be50050e5e",); } ================================================ FILE: crates/commands/tests/tests-login.rs ================================================ use std::{fs, path::PathBuf}; use mockito::{Matcher, Mock, ServerGuard}; use soldeer_commands::{Command, Verbosity, commands::login::Login, run}; use temp_env::async_with_vars; use testdir::testdir; async fn mock_api_server() -> (ServerGuard, Mock) { let mut server = mockito::Server::new_async().await; let body = r#"{"status":"success","token": "example_token_jwt"}"#; let mock = server .mock("POST", "/api/v1/auth/login") .match_query(Matcher::Any) .with_header("content-type", "application/json") .with_body(body) .create_async() .await; (server, mock) } async fn mock_api_server_token() -> (ServerGuard, Mock) { let mut server = mockito::Server::new_async().await; let body = r#"{"status":"success","data":{"created_at": "2024-08-04T14:21:31.622589Z","email": "test@test.net","id": "b6d56bf0-00a5-474f-b732-f416bef53e92","organization": "test","role": "owner","updated_at": "2024-08-04T14:21:31.622589Z","username": "test","verified": true}}"#; let mock = server .mock("GET", "/api/v1/auth/validate-cli-token") .match_query(Matcher::Any) .with_header("content-type", "application/json") .with_body(body) .create_async() .await; (server, mock) } #[tokio::test] async fn test_login_without_prompt_err_400() { let cmd: Command = Login::builder().email("test@test.com").password("111111").build().into(); let res = run(cmd, Verbosity::default()).await; assert_eq!( res.unwrap_err().to_string(), "error during login: http error during login: HTTP status client error (400 Bad Request) for url (https://api.soldeer.xyz/api/v1/auth/login)" ); } #[tokio::test] async fn test_login_without_prompt_success() { let (server, mock) = mock_api_server().await; let dir = testdir!(); let login_file: PathBuf = dir.join("test_save_jwt"); let cmd: Command = Login::builder().email("test@test.com").password("111111").build().into(); let res = async_with_vars( [ ("SOLDEER_API_URL", Some(server.url())), ("SOLDEER_LOGIN_FILE", Some(login_file.to_string_lossy().to_string())), ], run(cmd, Verbosity::default()), ) .await; assert!(res.is_ok()); assert!(login_file.exists()); assert_eq!(fs::read_to_string(login_file).unwrap(), "example_token_jwt"); mock.expect(1); } #[tokio::test] async fn test_login_token_success() { let (server, mock) = mock_api_server_token().await; let dir = testdir!(); let login_file: PathBuf = dir.join("test_save_jwt"); let cmd: Command = Login::builder().token("example_token_jwt").build().into(); let res = async_with_vars( [ ("SOLDEER_API_URL", Some(server.url())), ("SOLDEER_LOGIN_FILE", Some(login_file.to_string_lossy().to_string())), ], run(cmd, Verbosity::default()), ) .await; assert!(res.is_ok()); assert!(login_file.exists()); assert_eq!(fs::read_to_string(login_file).unwrap(), "example_token_jwt"); mock.expect(1); } #[tokio::test] async fn test_login_token_failure() { let cmd: Command = Login::builder().token("asdf").build().into(); let res = run(cmd, Verbosity::default()).await; assert_eq!(res.unwrap_err().to_string(), "error during login: login error: invalid token"); } ================================================ FILE: crates/commands/tests/tests-push.rs ================================================ use mockito::{Matcher, Mock, ServerGuard}; use reqwest::StatusCode; use soldeer_commands::{Verbosity, commands::push::Push, run}; use soldeer_core::{SoldeerError, errors::PublishError}; use std::{env, fs, path::PathBuf}; use temp_env::async_with_vars; use testdir::testdir; #[allow(clippy::unwrap_used)] fn setup_project(dotfile: bool) -> (PathBuf, PathBuf) { let dir = testdir!(); let login_file: PathBuf = dir.join("test_save_jwt"); fs::write(&login_file, "jwt_token_example").unwrap(); let project_path = dir.join("mypkg"); fs::create_dir(&project_path).unwrap(); fs::write(project_path.join("foundry.toml"), "[dependencies]\n").unwrap(); if dotfile { fs::write(project_path.join(".env"), "super-secret-stuff").unwrap(); } (login_file, project_path) } async fn mock_api_server(status_code: Option) -> (ServerGuard, Mock) { let mut server = mockito::Server::new_async().await; let body = r#"{"data":[{"created_at":"2024-02-27T19:19:23.938837Z","created_by":"96228bb5-f777-4c19-ba72-363d14b8beed","deleted":false,"deprecated":false,"description":"","downloads":648041,"github_url":"","id":"37adefe5-9bc6-4777-aaf2-e56277d1f30b","image":"","latest_version":"1.10.0","long_description":"","name":"mock","organization_id":"ff9c0d8e-9275-4f6f-a1b7-2e822450a7ba","organization_name":"","organization_verified":true,"updated_at":"2024-02-27T19:19:23.938837Z"}],"status":"success"}"#; server .mock("GET", "/api/v2/project") .match_query(Matcher::Any) .with_header("content-type", "application/json") .with_body(body) .create_async() .await; let mock = match status_code { Some(status_code) => { server .mock("POST", "/api/v1/revision/upload") .with_header("content-type", "application/json") .with_status(status_code.as_u16() as usize) .with_body(r#"{"status":"fail","message": "failure"}"#) .create_async() .await } None => { server .mock("POST", "/api/v1/revision/upload") .with_header("content-type", "application/json") .with_body(r#"{"status":"success","data":{"data":{"project_id":"mock"}}}"#) .create_async() .await } }; (server, mock) } #[tokio::test] async fn test_push_success() { let (login_file, project_path) = setup_project(false); let (server, mock) = mock_api_server(None).await; env::set_current_dir(&project_path).unwrap(); let res = async_with_vars( [ ("SOLDEER_PROJECT_ROOT", Some(project_path.to_string_lossy().to_string())), ("SOLDEER_API_URL", Some(server.url())), ("SOLDEER_LOGIN_FILE", Some(login_file.to_string_lossy().to_string())), ], run(Push::builder().dependency("mypkg~0.1.0").build().into(), Verbosity::default()), ) .await; assert!(res.is_ok(), "{res:?}"); mock.expect(1); } #[tokio::test] async fn test_push_other_dir_success() { let dir = testdir!(); fs::write(dir.join("foundry.toml"), "[dependencies]\n").unwrap(); let login_file = dir.join("test_save_jwt"); fs::write(&login_file, "jwt_token_example").unwrap(); let project_path = dir.join("mypkg"); fs::create_dir(&project_path).unwrap(); fs::write(project_path.join("test.sol"), "contract Foo {}\n").unwrap(); let (server, mock) = mock_api_server(None).await; let res = async_with_vars( [ ("SOLDEER_PROJECT_ROOT", Some(dir.to_string_lossy().to_string())), ("SOLDEER_API_URL", Some(server.url())), ("SOLDEER_LOGIN_FILE", Some(login_file.to_string_lossy().to_string())), ], run( Push::builder().dependency("mypkg~0.1.0").path(project_path).build().into(), Verbosity::default(), ), ) .await; assert!(res.is_ok(), "{res:?}"); mock.expect(1); } #[tokio::test] async fn test_push_not_found() { let (login_file, project_path) = setup_project(false); let (server, mock) = mock_api_server(Some(StatusCode::NO_CONTENT)).await; let res = async_with_vars( [ ("SOLDEER_PROJECT_ROOT", Some(project_path.to_string_lossy().to_string())), ("SOLDEER_API_URL", Some(server.url())), ("SOLDEER_LOGIN_FILE", Some(login_file.to_string_lossy().to_string())), ], run( Push::builder().dependency("mypkg~0.1.0").path(project_path).build().into(), Verbosity::default(), ), ) .await; assert!(matches!(res, Err(SoldeerError::PublishError(PublishError::ProjectNotFound)))); mock.expect(1); } #[tokio::test] async fn test_push_already_exists() { let (login_file, project_path) = setup_project(false); let (server, mock) = mock_api_server(Some(StatusCode::ALREADY_REPORTED)).await; let res = async_with_vars( [ ("SOLDEER_PROJECT_ROOT", Some(project_path.to_string_lossy().to_string())), ("SOLDEER_API_URL", Some(server.url())), ("SOLDEER_LOGIN_FILE", Some(login_file.to_string_lossy().to_string())), ], run( Push::builder().dependency("mypkg~0.1.0").path(project_path).build().into(), Verbosity::default(), ), ) .await; assert!(matches!(res, Err(SoldeerError::PublishError(PublishError::AlreadyExists)))); mock.expect(1); } #[tokio::test] async fn test_push_unauthorized() { let (login_file, project_path) = setup_project(false); let (server, mock) = mock_api_server(Some(StatusCode::UNAUTHORIZED)).await; let res = async_with_vars( [ ("SOLDEER_PROJECT_ROOT", Some(project_path.to_string_lossy().to_string())), ("SOLDEER_API_URL", Some(server.url())), ("SOLDEER_LOGIN_FILE", Some(login_file.to_string_lossy().to_string())), ], run( Push::builder().dependency("mypkg~0.1.0").path(project_path).build().into(), Verbosity::default(), ), ) .await; assert!(matches!(res, Err(SoldeerError::PublishError(PublishError::AuthError(_))))); mock.expect(1); } #[tokio::test] async fn test_push_payload_too_large() { let (login_file, project_path) = setup_project(false); let (server, mock) = mock_api_server(Some(StatusCode::PAYLOAD_TOO_LARGE)).await; let res = async_with_vars( [ ("SOLDEER_PROJECT_ROOT", Some(project_path.to_string_lossy().to_string())), ("SOLDEER_API_URL", Some(server.url())), ("SOLDEER_LOGIN_FILE", Some(login_file.to_string_lossy().to_string())), ], run( Push::builder().dependency("mypkg~0.1.0").path(project_path).build().into(), Verbosity::default(), ), ) .await; assert!(matches!(res, Err(SoldeerError::PublishError(PublishError::PayloadTooLarge)))); mock.expect(1); } #[tokio::test] async fn test_push_other_error() { let (login_file, project_path) = setup_project(false); let (server, mock) = mock_api_server(Some(StatusCode::INTERNAL_SERVER_ERROR)).await; let res = async_with_vars( [ ("SOLDEER_PROJECT_ROOT", Some(project_path.to_string_lossy().to_string())), ("SOLDEER_API_URL", Some(server.url())), ("SOLDEER_LOGIN_FILE", Some(login_file.to_string_lossy().to_string())), ], run( Push::builder().dependency("mypkg~0.1.0").path(project_path).build().into(), Verbosity::default(), ), ) .await; assert!(matches!(res, Err(SoldeerError::PublishError(PublishError::HttpError(_))))); mock.expect(1); } #[tokio::test] async fn test_push_dry_run() { let (login_file, project_path) = setup_project(true); // insert a .env file let (server, mock) = mock_api_server(None).await; let res = async_with_vars( [ ("SOLDEER_PROJECT_ROOT", Some(project_path.to_string_lossy().to_string())), ("SOLDEER_API_URL", Some(server.url())), ("SOLDEER_LOGIN_FILE", Some(login_file.to_string_lossy().to_string())), ], run( Push::builder() .dependency("mypkg~0.1.0") .path(&project_path) .dry_run(true) .build() .into(), Verbosity::default(), ), ) .await; assert!(res.is_ok(), "{res:?}"); mock.expect(0); assert!(project_path.join("mypkg.zip").exists()); } #[tokio::test] async fn test_push_skip_warnings() { let (login_file, project_path) = setup_project(true); // insert a .env file let (server, mock) = mock_api_server(None).await; let res = async_with_vars( [ ("SOLDEER_PROJECT_ROOT", Some(project_path.to_string_lossy().to_string())), ("SOLDEER_API_URL", Some(server.url())), ("SOLDEER_LOGIN_FILE", Some(login_file.to_string_lossy().to_string())), ], run( Push::builder() .dependency("mypkg~0.1.0") .path(&project_path) .skip_warnings(true) .build() .into(), Verbosity::default(), ), ) .await; assert!(res.is_ok(), "{res:?}"); mock.expect(1); } ================================================ FILE: crates/commands/tests/tests-uninstall.rs ================================================ use soldeer_commands::{ Command, Verbosity, commands::{install::Install, uninstall::Uninstall}, run, }; use soldeer_core::{ config::read_config_deps, lock::{SOLDEER_LOCK, read_lockfile}, }; use std::{fs, path::PathBuf}; use temp_env::async_with_vars; use testdir::testdir; #[allow(clippy::unwrap_used)] async fn setup(config_filename: &str) -> PathBuf { let dir = testdir!(); let mut contents = r#"[dependencies] "@openzeppelin-contracts" = "5.0.2" solady = "0.0.238" "# .to_string(); if config_filename == "foundry.toml" { contents = format!( r#"[profile.default] [soldeer] remappings_location = "config" {contents}"# ); } fs::write(dir.join(config_filename), contents).unwrap(); let cmd: Command = Install::default().into(); let res = async_with_vars( [("SOLDEER_PROJECT_ROOT", Some(dir.to_string_lossy().as_ref()))], run(cmd, Verbosity::default()), ) .await; assert!(res.is_ok(), "{res:?}"); dir } #[tokio::test] async fn test_uninstall_one() { let dir = setup("soldeer.toml").await; let cmd: Command = Uninstall::builder().dependency("solady").build().into(); let res = async_with_vars( [("SOLDEER_PROJECT_ROOT", Some(dir.to_string_lossy().as_ref()))], run(cmd, Verbosity::default()), ) .await; assert!(res.is_ok(), "{res:?}"); let (deps, _) = read_config_deps(dir.join("soldeer.toml")).unwrap(); assert!(!deps.iter().any(|d| d.name() == "solady")); let remappings = fs::read_to_string(dir.join("remappings.txt")).unwrap(); assert!(!remappings.contains("solady")); let lock = read_lockfile(dir.join(SOLDEER_LOCK)).unwrap(); assert!(!lock.entries.iter().any(|d| d.name() == "solady")); assert!(!dir.join("dependencies").join("solady-0.0.238").exists()); } #[tokio::test] async fn test_uninstall_all() { let dir = setup("soldeer.toml").await; let cmd: Command = Uninstall::builder().dependency("solady").build().into(); let res = async_with_vars( [("SOLDEER_PROJECT_ROOT", Some(dir.to_string_lossy().as_ref()))], run(cmd, Verbosity::default()), ) .await; assert!(res.is_ok(), "{res:?}"); let cmd: Command = Uninstall::builder().dependency("@openzeppelin-contracts").build().into(); let res = async_with_vars( [("SOLDEER_PROJECT_ROOT", Some(dir.to_string_lossy().as_ref()))], run(cmd, Verbosity::default()), ) .await; assert!(res.is_ok(), "{res:?}"); let (deps, _) = read_config_deps(dir.join("soldeer.toml")).unwrap(); assert!(deps.is_empty()); let remappings = fs::read_to_string(dir.join("remappings.txt")).unwrap(); assert_eq!(remappings, ""); assert!(!dir.join(SOLDEER_LOCK).exists()); } #[tokio::test] async fn test_uninstall_foundry_config() { let dir = setup("foundry.toml").await; let cmd: Command = Uninstall::builder().dependency("solady").build().into(); let res = async_with_vars( [("SOLDEER_PROJECT_ROOT", Some(dir.to_string_lossy().as_ref()))], run(cmd, Verbosity::default()), ) .await; assert!(res.is_ok(), "{res:?}"); let (deps, _) = read_config_deps(dir.join("foundry.toml")).unwrap(); assert!(!deps.iter().any(|d| d.name() == "solady")); let config = fs::read_to_string(dir.join("foundry.toml")).unwrap(); assert!(!config.contains("solady")); } ================================================ FILE: crates/commands/tests/tests-update.rs ================================================ use soldeer_commands::{ Command, Verbosity, commands::{install::Install, update::Update}, run, }; use soldeer_core::{ config::ConfigLocation, lock::{SOLDEER_LOCK, read_lockfile}, }; use std::{fs, path::PathBuf}; use temp_env::async_with_vars; use testdir::testdir; #[allow(clippy::unwrap_used)] async fn setup(config_filename: &str) -> PathBuf { // install v1.9.0 of forge-std (faking an old install) let dir = testdir!(); let mut contents = r#"[dependencies] forge-std = "1.9.0" "# .to_string(); if config_filename == "foundry.toml" { contents = format!( r#"[profile.default] [soldeer] remappings_location = "config" {contents}"# ); } fs::write(dir.join(config_filename), &contents).unwrap(); let cmd: Command = Install::default().into(); let res = async_with_vars( [("SOLDEER_PROJECT_ROOT", Some(dir.to_string_lossy().as_ref()))], run(cmd, Verbosity::default()), ) .await; assert!(res.is_ok(), "{res:?}"); // change install requirement to forge-std ^1.0.0 (making the current install outdated) contents = contents.replace("1.9.0", "1"); fs::write(dir.join(config_filename), &contents).unwrap(); // update remappings accordingly fs::write(dir.join("remappings.txt"), "forge-std-1/=dependencies/forge-std-1.9.0/\n").unwrap(); dir } #[tokio::test] async fn test_update_existing() { let dir = setup("soldeer.toml").await; let cmd: Command = Update::default().into(); let res = async_with_vars( [("SOLDEER_PROJECT_ROOT", Some(dir.to_string_lossy().as_ref()))], run(cmd, Verbosity::default()), ) .await; assert!(res.is_ok(), "{res:?}"); let lockfile = read_lockfile(dir.join(SOLDEER_LOCK)).unwrap(); let version = lockfile.entries.first().unwrap().version(); assert_ne!(version, "1.9.0"); let remappings = fs::read_to_string(dir.join("remappings.txt")).unwrap(); assert_eq!(remappings, format!("forge-std-1/=dependencies/forge-std-{version}/\n")); assert!(dir.join("dependencies").join(format!("forge-std-{version}")).exists()); } #[tokio::test] async fn test_update_foundry_config() { let dir = setup("foundry.toml").await; let cmd: Command = Update::default().into(); let res = async_with_vars( [("SOLDEER_PROJECT_ROOT", Some(dir.to_string_lossy().as_ref()))], run(cmd, Verbosity::default()), ) .await; assert!(res.is_ok(), "{res:?}"); let lockfile = read_lockfile(dir.join(SOLDEER_LOCK)).unwrap(); let version = lockfile.entries.first().unwrap().version(); assert_ne!(version, "1.9.0"); assert!(dir.join("dependencies").join(format!("forge-std-{version}")).exists()); } #[tokio::test] async fn test_update_missing() { let dir = testdir!(); let contents = r#"[dependencies] forge-std = "1" "#; fs::write(dir.join("soldeer.toml"), contents).unwrap(); let cmd: Command = Update::default().into(); let res = async_with_vars( [("SOLDEER_PROJECT_ROOT", Some(dir.to_string_lossy().as_ref()))], run(cmd, Verbosity::default()), ) .await; assert!(res.is_ok(), "{res:?}"); let lockfile = read_lockfile(dir.join(SOLDEER_LOCK)).unwrap(); let version = lockfile.entries.first().unwrap().version(); assert!(dir.join("dependencies").join(format!("forge-std-{version}")).exists()); } #[tokio::test] async fn test_update_custom_remappings() { let dir = setup("soldeer.toml").await; // customize remappings before update fs::write(dir.join("remappings.txt"), "forge-std/=dependencies/forge-std-1.9.0/src/\n") .unwrap(); let cmd: Command = Update::default().into(); let res = async_with_vars( [("SOLDEER_PROJECT_ROOT", Some(dir.to_string_lossy().as_ref()))], run(cmd, Verbosity::default()), ) .await; assert!(res.is_ok(), "{res:?}"); let lockfile = read_lockfile(dir.join(SOLDEER_LOCK)).unwrap(); let version = lockfile.entries.first().unwrap().version(); let remappings = fs::read_to_string(dir.join("remappings.txt")).unwrap(); assert_eq!(remappings, format!("forge-std/=dependencies/forge-std-{version}/src/\n")); } #[tokio::test] async fn test_update_git_main() { let dir = testdir!(); // install older commit in "main" branch let contents = r#"[dependencies] my-lib = { version = "branch-main", git = "https://github.com/beeb/test-repo.git" } "#; fs::write(dir.join("soldeer.toml"), contents).unwrap(); let lockfile = r#"[[dependencies]] name = "my-lib" version = "branch-main" git = "https://github.com/beeb/test-repo.git" rev = "78c2f6a1a54db26bab6c3f501854a1564eb3707f" "#; fs::write(dir.join(SOLDEER_LOCK), lockfile).unwrap(); let cmd: Command = Install::default().into(); let res = async_with_vars( [("SOLDEER_PROJECT_ROOT", Some(dir.to_string_lossy().as_ref()))], run(cmd, Verbosity::default()), ) .await; assert!(res.is_ok(), "{res:?}"); // update to latest commit in "main" branch let cmd: Command = Update::default().into(); let res = async_with_vars( [("SOLDEER_PROJECT_ROOT", Some(dir.to_string_lossy().as_ref()))], run(cmd, Verbosity::default()), ) .await; assert!(res.is_ok(), "{res:?}"); let lockfile = read_lockfile(dir.join(SOLDEER_LOCK)).unwrap(); assert_eq!( lockfile.entries.first().unwrap().as_git().unwrap().rev, "d5d72fa135d28b2e8307650b3ea79115183f2406" ); } #[tokio::test] async fn test_update_git_branch() { let dir = testdir!(); // install older commit in "dev" branch let contents = r#"[dependencies] my-lib = { version = "branch-dev", git = "https://github.com/beeb/test-repo.git", branch = "dev" } "#; fs::write(dir.join("soldeer.toml"), contents).unwrap(); let lockfile = r#"[[dependencies]] name = "my-lib" version = "branch-dev" git = "https://github.com/beeb/test-repo.git" rev = "78c2f6a1a54db26bab6c3f501854a1564eb3707f" "#; fs::write(dir.join(SOLDEER_LOCK), lockfile).unwrap(); let cmd: Command = Install::default().into(); let res = async_with_vars( [("SOLDEER_PROJECT_ROOT", Some(dir.to_string_lossy().as_ref()))], run(cmd, Verbosity::default()), ) .await; assert!(res.is_ok(), "{res:?}"); // update to latest commit in "dev" branch let cmd: Command = Update::default().into(); let res = async_with_vars( [("SOLDEER_PROJECT_ROOT", Some(dir.to_string_lossy().as_ref()))], run(cmd, Verbosity::default()), ) .await; assert!(res.is_ok(), "{res:?}"); let lockfile = read_lockfile(dir.join(SOLDEER_LOCK)).unwrap(); assert_eq!( lockfile.entries.first().unwrap().as_git().unwrap().rev, "8d903e557e8f1b6e62bde768aa456d4ddfca72c4" ); } #[tokio::test] async fn test_update_foundry_config_multi_dep() { let dir = testdir!(); let contents = r#"[profile.default] [dependencies] "@tt" = {version = "1.6.1", url = "https://soldeer-revisions.s3.amazonaws.com/@openzeppelin-contracts/3_3_0-rc_2_22-01-2024_13:12:57_contracts.zip"} forge-std = { version = "1.8.1" } solmate = "6.7.0" mario = { version = "1.0", git = "https://gitlab.com/mario4582928/Mario.git", rev = "22868f426bd4dd0e682b5ec5f9bd55507664240c" } mario-custom-tag = { version = "1.0", git = "https://gitlab.com/mario4582928/Mario.git", tag = "custom-tag" } mario-custom-branch = { version = "1.0", git = "https://gitlab.com/mario4582928/Mario.git", tag = "custom-branch" } [soldeer] remappings_location = "config" "#; fs::write(dir.join("foundry.toml"), contents).unwrap(); let cmd: Command = Update::default().into(); let res = async_with_vars( [("SOLDEER_PROJECT_ROOT", Some(dir.to_string_lossy().as_ref()))], run(cmd, Verbosity::default()), ) .await; assert!(res.is_ok(), "{res:?}"); let deps = dir.join("dependencies"); assert!(deps.join("@tt-1.6.1").exists()); assert!(deps.join("forge-std-1.8.1").exists()); assert!(deps.join("solmate-6.7.0").exists()); assert!(deps.join("mario-1.0").exists()); assert!(deps.join("mario-custom-tag-1.0").exists()); assert!(deps.join("mario-custom-branch-1.0").exists()); } #[tokio::test] async fn test_install_new_foundry_no_foundry_toml() { let dir = testdir!(); let cmd: Command = Update::builder().config_location(ConfigLocation::Foundry).build().into(); let res = async_with_vars( [("SOLDEER_PROJECT_ROOT", Some(dir.to_string_lossy().as_ref()))], run(cmd, Verbosity::default()), ) .await; assert!(res.is_ok(), "{res:?}"); let config = fs::read_to_string(dir.join("foundry.toml")).unwrap(); let expected = r#"[profile.default] src = "src" out = "out" libs = ["dependencies"] [dependencies] # See more config options https://github.com/foundry-rs/foundry/blob/master/crates/config/README.md#all-options "#; assert_eq!(config, expected); } #[tokio::test] async fn test_install_new_soldeer_no_soldeer_toml() { let dir = testdir!(); let cmd: Command = Update::builder().config_location(ConfigLocation::Soldeer).build().into(); let res = async_with_vars( [("SOLDEER_PROJECT_ROOT", Some(dir.to_string_lossy().as_ref()))], run(cmd, Verbosity::default()), ) .await; assert!(res.is_ok(), "{res:?}"); let config = fs::read_to_string(dir.join("soldeer.toml")).unwrap(); let content = "[dependencies]\n"; assert_eq!(config, content); } ================================================ FILE: crates/core/Cargo.toml ================================================ [package] name = "soldeer-core" description = "Core functionality for Soldeer" authors.workspace = true categories.workspace = true edition.workspace = true exclude.workspace = true homepage.workspace = true keywords.workspace = true license.workspace = true readme.workspace = true repository.workspace = true rust-version.workspace = true version.workspace = true [lints] workspace = true [dependencies] bon.workspace = true chrono = { version = "0.4.38", default-features = false, features = [ "serde", "std", ] } const-hex = "1.12.0" derive_more.workspace = true dunce = "1.0.5" home = "0.5.9" ignore = { version = "0.4.24", features = ["simd-accel"] } log = { workspace = true, features = ["kv_std"] } path-slash.workspace = true rayon.workspace = true regex = "1.10.5" reqwest = { workspace = true, features = ["json", "multipart", "stream"] } sanitize-filename = "0.6.0" semver = "1.0.23" serde = { version = "1.0.204", features = ["derive"] } serde_json = "1.0.120" sha2 = "0.10.8" thiserror.workspace = true tokio.workspace = true toml_edit = { version = "0.25.11", features = ["serde"] } uuid = { version = "1.10.0", features = ["serde", "v4"] } zip = { version = "4.0.0", default-features = false, features = ["deflate"] } zip-extract = { version = "0.4.0", default-features = false, features = [ "deflate", ] } [dev-dependencies] mockito.workspace = true temp-env.workspace = true testdir.workspace = true [features] serde = [] ================================================ FILE: crates/core/src/auth.rs ================================================ //! Registry authentication use crate::{errors::AuthError, registry::api_url, utils::login_file_path}; use log::{debug, info, warn}; use reqwest::{ Client, StatusCode, header::{AUTHORIZATION, HeaderMap, HeaderValue}, }; use serde::{Deserialize, Serialize}; use std::{env, fs, path::PathBuf}; pub type Result = std::result::Result; /// Credentials to be used for login #[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, Hash)] pub struct Credentials { pub email: String, pub password: String, } /// Response from the login endpoint #[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, Hash)] pub struct LoginResponse { pub status: String, /// JWT token pub token: String, } /// Get the JWT token from the environment or from the login file /// /// Precedence is given to the `SOLDEER_API_TOKEN` environment variable. pub fn get_token() -> Result { if let Ok(token) = env::var("SOLDEER_API_TOKEN") && !token.is_empty() { return Ok(token) } let token_path = login_file_path()?; let jwt = fs::read_to_string(&token_path).map_err(|_| AuthError::MissingToken)?.trim().to_string(); if jwt.is_empty() { debug!(token_path:?; "token file exists but is empty"); return Err(AuthError::MissingToken); } debug!(token_path:?; "token retrieved from file"); Ok(jwt) } /// Get a header map with the bearer token set up if it exists pub fn get_auth_headers() -> Result { let mut headers: HeaderMap = HeaderMap::new(); let Ok(token) = get_token() else { return Ok(headers); }; let header_value = HeaderValue::from_str(&format!("Bearer {token}")).map_err(|_| AuthError::InvalidToken)?; headers.insert(AUTHORIZATION, header_value); Ok(headers) } /// Save an access token in the login file pub fn save_token(token: &str) -> Result { let token_path = login_file_path()?; fs::write(&token_path, token)?; Ok(token_path) } /// Retrieve user profile for the token to check its validity, returning the username pub async fn check_token(token: &str) -> Result { let client = Client::new(); let url = api_url("v1", "auth/validate-cli-token", &[]); let mut headers: HeaderMap = HeaderMap::new(); let header_value = HeaderValue::from_str(&format!("Bearer {token}")).map_err(|_| AuthError::InvalidToken)?; headers.insert(AUTHORIZATION, header_value); let response = client.get(url).headers(headers).send().await?; match response.status() { s if s.is_success() => { #[derive(Deserialize)] struct User { id: String, username: String, } #[derive(Deserialize)] struct UserResponse { data: User, } let res: UserResponse = response.json().await?; debug!("token is valid for user {} with ID {}", res.data.username, res.data.id); Ok(res.data.username) } StatusCode::UNAUTHORIZED => Err(AuthError::InvalidToken), _ => Err(AuthError::HttpError( response.error_for_status().expect_err("result should be an error"), )), } } /// Execute the login request and store the JWT token in the login file pub async fn execute_login(login: &Credentials) -> Result { warn!( "the option to login via email and password will be removed in a future version of Soldeer. Please update your usage by either using `soldeer login --token [YOUR CLI TOKEN]` or passing the `SOLDEER_API_TOKEN` environment variable to the `push` command." ); let token_path = login_file_path()?; let url = api_url("v1", "auth/login", &[]); let client = Client::new(); let res = client.post(url).json(login).send().await?; match res.status() { s if s.is_success() => { debug!("login request completed"); let response: LoginResponse = res.json().await?; fs::write(&token_path, response.token)?; info!(token_path:?; "login successful"); Ok(token_path) } StatusCode::UNAUTHORIZED => Err(AuthError::InvalidCredentials), _ => Err(AuthError::HttpError( res.error_for_status().expect_err("result should be an error"), )), } } #[cfg(test)] mod tests { use super::*; use temp_env::{async_with_vars, with_var}; use testdir::testdir; #[tokio::test] async fn test_login_success() { let mut server = mockito::Server::new_async().await; server .mock("POST", "/api/v1/auth/login") .with_status(201) .with_header("content-type", "application/json") .with_body(r#"{"status":"200","token":"jwt_token_example"}"#) .create_async() .await; let test_file = testdir!().join("test_save_jwt"); let res = async_with_vars( [ ("SOLDEER_API_URL", Some(server.url())), ("SOLDEER_LOGIN_FILE", Some(test_file.to_string_lossy().to_string())), ], execute_login(&Credentials { email: "test@test.com".to_string(), password: "1234".to_string(), }), ) .await; assert!(res.is_ok(), "{res:?}"); assert_eq!(fs::canonicalize(res.unwrap()).unwrap(), fs::canonicalize(&test_file).unwrap()); assert_eq!(fs::read_to_string(test_file).unwrap(), "jwt_token_example"); } #[tokio::test] async fn test_login_401() { let mut server = mockito::Server::new_async().await; server .mock("POST", "/api/v1/auth/login") .with_status(401) .with_header("content-type", "application/json") .with_body(r#"{"status":"401"}"#) .create_async() .await; let test_file = testdir!().join("test_save_jwt"); let res = async_with_vars( [ ("SOLDEER_API_URL", Some(server.url())), ("SOLDEER_LOGIN_FILE", Some(test_file.to_string_lossy().to_string())), ], execute_login(&Credentials { email: "test@test.com".to_string(), password: "1234".to_string(), }), ) .await; assert!(matches!(res, Err(AuthError::InvalidCredentials)), "{res:?}"); } #[tokio::test] async fn test_login_500() { let mut server = mockito::Server::new_async().await; server .mock("POST", "/api/v1/auth/login") .with_status(500) .with_header("content-type", "application/json") .with_body(r#"{"status":"500"}"#) .create_async() .await; let test_file = testdir!().join("test_save_jwt"); let res = async_with_vars( [ ("SOLDEER_API_URL", Some(server.url())), ("SOLDEER_LOGIN_FILE", Some(test_file.to_string_lossy().to_string())), ], execute_login(&Credentials { email: "test@test.com".to_string(), password: "1234".to_string(), }), ) .await; assert!(matches!(res, Err(AuthError::HttpError(_))), "{res:?}"); } #[tokio::test] async fn test_check_token_success() { let mut server = mockito::Server::new_async().await; server .mock("GET", "/api/v1/auth/validate-cli-token") .with_status(200) .with_header("content-type", "application/json") .with_body( r#"{"status":"success","data":{"created_at": "2024-08-04T14:21:31.622589Z","email": "test@test.net","id": "b6d56bf0-00a5-474f-b732-f416bef53e92","organization": "test","role": "owner","updated_at": "2024-08-04T14:21:31.622589Z","username": "test","verified": true}}"#, ) .create_async() .await; let res = async_with_vars([("SOLDEER_API_URL", Some(server.url()))], check_token("eyJ0...")) .await; assert!(res.is_ok(), "{res:?}"); assert_eq!(res.unwrap(), "test"); } #[tokio::test] async fn test_check_token_failure() { let mut server = mockito::Server::new_async().await; server .mock("GET", "/api/v1/auth/validate-cli-token") .with_status(401) .with_header("content-type", "application/json") .with_body(r#"{"status":"fail","message":"Invalid token"}"#) .create_async() .await; let res = async_with_vars([("SOLDEER_API_URL", Some(server.url()))], check_token("foobar")).await; assert!(res.is_err(), "{res:?}"); } #[test] fn test_get_token_env() { let res = with_var("SOLDEER_API_TOKEN", Some("test"), get_token); assert!(res.is_ok(), "{res:?}"); assert_eq!(res.unwrap(), "test"); } } ================================================ FILE: crates/core/src/config.rs ================================================ //! Manage the Soldeer configuration and dependencies list. use crate::{ download::{find_install_path, find_install_path_sync}, errors::ConfigError, lock::SOLDEER_LOCK, remappings::RemappingsLocation, }; use derive_more::derive::{Display, From, FromStr}; use log::{debug, warn}; use serde::Deserialize; use std::{ env, fmt, fs, path::{Path, PathBuf}, }; use toml_edit::{Array, DocumentMut, InlineTable, Item, Table, value}; pub type Result = std::result::Result; #[derive(Debug, Clone, PartialEq, Eq, Hash, Display)] #[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] pub enum UrlType { Git(String), Http(String), } impl UrlType { pub fn git(url: impl Into) -> Self { Self::Git(url.into()) } pub fn http(url: impl Into) -> Self { Self::Http(url.into()) } } /// The paths used by Soldeer. /// /// The paths are canonicalized on creation of the object. /// /// To create this object, the [`Paths::new`] and [`Paths::from_root`] methods can be used. /// /// # Examples /// /// ``` /// # use soldeer_core::config::Paths; /// # let dir = testdir::testdir!(); /// # std::env::set_current_dir(&dir).unwrap(); /// # std::fs::write("foundry.toml", "[dependencies]\n").unwrap(); /// let paths = Paths::new().unwrap(); // foundry.toml exists in the current path /// assert_eq!(paths.root, std::env::current_dir().unwrap()); /// assert_eq!(paths.config, std::env::current_dir().unwrap().join("foundry.toml")); /// /// let paths = Paths::from_root(&dir).unwrap(); // root is the given path /// assert_eq!(paths.root, dir); /// ``` #[derive(Debug, Clone, PartialEq, Eq, Hash)] #[cfg_attr(feature = "serde", derive(serde::Serialize, Deserialize))] // making sure the struct is not constructible from the outside without using the new/from methods #[non_exhaustive] pub struct Paths { /// The root directory of the project. /// /// At the moment, the current directory or the path given by the `SOLDEER_PROJECT_ROOT` /// environment variable. pub root: PathBuf, /// The path to the config file. /// /// `foundry.toml` if it contains a `[dependencies]` table, otherwise `soldeer.toml` if it /// exists. Otherwise, the `foundry.toml` file is used by default. When the config file does /// not exist, a new one is created with default contents. pub config: PathBuf, /// The path to the dependencies folder (does not need to exist). /// /// This is `/dependencies` inside the root directory. pub dependencies: PathBuf, /// The path to the lockfile (does not need to exist). /// /// This is `/soldeer.lock` inside the root directory. pub lock: PathBuf, /// The path to the remappings file (does not need to exist). /// /// This path gets ignored if the remappings should be generated in the `foundry.toml` file. /// This is `/remappings.txt` inside the root directory. pub remappings: PathBuf, } impl Paths { /// Instantiate all the paths needed for Soldeer. /// /// The root path defaults to the current directory but can be overridden with the /// `SOLDEER_PROJECT_ROOT` environment variable. /// /// The paths are canonicalized. pub fn new() -> Result { Self::with_config(None) } /// Instantiate all the paths needed for Soldeer. /// /// The root path is automatically detected (by traversing the path) but can be overridden with /// the `SOLDEER_PROJECT_ROOT` environment variable. /// Alternatively, the [`Paths::with_root_and_config`] constructor can be used. /// /// If a config location is provided, it bypasses auto-detection and uses that. If `None`, then /// the location is auto-detected or if impossible, the `foundry.toml` file is used. If the /// config file does not exist yet, it gets created with default content. /// /// The paths are canonicalized. pub fn with_config(config_location: Option) -> Result { let root = dunce::canonicalize(Self::get_root_path())?; Self::with_root_and_config(root, config_location) } /// Instantiate all the paths needed for Soldeer. /// /// If a config location is provided, it bypasses auto-detection and uses that. If `None`, then /// the location is auto-detected or if impossible, the `foundry.toml` file is used. If the /// config file does not exist yet, it gets created with default content. /// /// The paths are canonicalized. pub fn with_root_and_config( root: impl AsRef, config_location: Option, ) -> Result { let root = root.as_ref(); let config = Self::get_config_path(root, config_location)?; let dependencies = root.join("dependencies"); let lock = root.join(SOLDEER_LOCK); let remappings = root.join("remappings.txt"); Ok(Self { root: root.to_path_buf(), config, dependencies, lock, remappings }) } /// Generate the paths object from a known root directory. /// /// The `SOLDEER_PROJECT_ROOT` environment variable is ignored. /// /// The paths are canonicalized. pub fn from_root(root: impl AsRef) -> Result { let root = dunce::canonicalize(root.as_ref())?; let config = Self::get_config_path(&root, None)?; let dependencies = root.join("dependencies"); let lock = root.join(SOLDEER_LOCK); let remappings = root.join("remappings.txt"); Ok(Self { root, config, dependencies, lock, remappings }) } /// Get the root directory path. /// /// If `SOLDEER_PROJECT` root is present in the environment, this is the returned value. Else, /// we search for the root of the project with `find_project_root`. pub fn get_root_path() -> PathBuf { let res = env::var("SOLDEER_PROJECT_ROOT").map_or_else( |_| { debug!("SOLDEER_PROJECT_ROOT not defined, searching for project root"); find_project_root(None::).expect("could not find project root") }, |p| { if p.is_empty() { debug!("SOLDEER_PROJECT_ROOT exists but is empty, searching for project root"); find_project_root(None::).expect("could not find project root") } else { debug!(path = p; "root set by SOLDEER_PROJECT_ROOT"); PathBuf::from(p) } }, ); debug!(path:? = res; "found project root"); res } /// Get the path to the config file. /// /// If a parameter is given for `config_location`, it will be used. Otherwise, the function will /// try to auto-detect the location based on the existence of the `dependencies` entry in /// the foundry config file, or the existence of a `soldeer.toml` file. If no config can be /// found, `foundry.toml` is used by default. fn get_config_path( root: impl AsRef, config_location: Option, ) -> Result { let foundry_path = root.as_ref().join("foundry.toml"); let soldeer_path = root.as_ref().join("soldeer.toml"); // use the user preference if available let location = config_location.or_else(|| { debug!("no preferred config location, trying to detect automatically"); detect_config_location(root) }).unwrap_or_else(|| { warn!("config file location could not be determined automatically, using foundry by default"); ConfigLocation::Foundry }); debug!("using config location {location:?}"); create_or_modify_config(location, &foundry_path, &soldeer_path) } /// Default Foundry config file path pub fn foundry_default() -> PathBuf { let root: PathBuf = dunce::canonicalize(Self::get_root_path()).expect("could not get the root"); root.join("foundry.toml") } /// Default Soldeer config file path pub fn soldeer_default() -> PathBuf { let root: PathBuf = dunce::canonicalize(Self::get_root_path()).expect("could not get the root"); root.join("soldeer.toml") } } /// For clap fn default_true() -> bool { true } /// The Soldeer config options. #[derive(Deserialize, Debug, Clone, PartialEq, Eq, Hash)] #[cfg_attr(feature = "serde", derive(serde::Serialize))] pub struct SoldeerConfig { /// Whether to generate remappings or completely leave them untouched. /// /// Defaults to `true`. #[serde(default = "default_true")] pub remappings_generate: bool, /// Whether to regenerate the remappings every time and ignore existing content. /// /// Defaults to `false`. #[serde(default)] pub remappings_regenerate: bool, /// Whether to include the version requirement string in the left part of the remappings. /// /// Defaults to `true`. #[serde(default = "default_true")] pub remappings_version: bool, /// A prefix to add to each dependency name in the left part of the remappings. /// /// None by default. #[serde(default)] pub remappings_prefix: String, /// The location where the remappings file should be generated. /// /// Either inside the `foundry.toml` config file or as a separate `remappings.txt` file. /// This gets ignored if the config file is `soldeer.toml`, in which case the remappings /// are always generated in a separate file. /// /// Defaults to [`RemappingsLocation::Txt`]. #[serde(default)] pub remappings_location: RemappingsLocation, /// Whether to include dependencies from dependencies. /// /// For dependencies which use soldeer, the `soldeer install` command will be invoked. /// Git dependencies which have submodules will see their submodules cloned as well. /// /// Defaults to `false`. #[serde(default)] pub recursive_deps: bool, } impl Default for SoldeerConfig { fn default() -> Self { Self { remappings_generate: true, remappings_regenerate: false, remappings_version: true, remappings_prefix: String::new(), remappings_location: RemappingsLocation::default(), recursive_deps: false, } } } /// A git identifier used to specify a revision, branch or tag. /// /// # Examples /// /// ``` /// # use soldeer_core::config::GitIdentifier; /// let rev = GitIdentifier::from_rev("082692fcb6b5b1ab8f856914897f7f2b46b84fd2"); /// let branch = GitIdentifier::from_branch("feature/foo"); /// let tag = GitIdentifier::from_tag("v1.0.0"); /// ``` #[derive(Debug, Clone, PartialEq, Eq, Hash, Display)] #[cfg_attr(feature = "serde", derive(serde::Serialize, Deserialize))] pub enum GitIdentifier { /// A commit hash Rev(String), /// A branch name Branch(String), /// A tag name Tag(String), } impl GitIdentifier { /// Create a new git identifier from a revision hash. pub fn from_rev(rev: impl Into) -> Self { let rev: String = rev.into(); Self::Rev(rev) } /// Create a new git identifier from a branch name. pub fn from_branch(branch: impl Into) -> Self { let branch: String = branch.into(); Self::Branch(branch) } /// Create a new git identifier from a tag name. pub fn from_tag(tag: impl Into) -> Self { let tag: String = tag.into(); Self::Tag(tag) } } /// A git dependency config item. /// /// This struct is used to represent a git dependency from the config file. #[derive(Debug, Clone, PartialEq, Eq, Hash, bon::Builder)] #[allow(clippy::duplicated_attributes)] #[builder(on(String, into), on(PathBuf, into))] #[cfg_attr(feature = "serde", derive(serde::Serialize, Deserialize))] pub struct GitDependency { /// The name of the dependency (user-defined). pub name: String, /// The version requirement string (semver). /// /// Example: `>=1.9.3 || ^2.0.0` /// /// When no operator is used before the version number, it defaults to `=` which pins the /// version. #[cfg_attr(feature = "serde", serde(rename = "version"))] pub version_req: String, /// The git URL, must end with `.git`. pub git: String, /// The git identifier (revision, branch or tag). /// /// If omitted, the default branch is used. pub identifier: Option, /// An optional relative path to the project's root within the repository. /// /// The project root is where the soldeer.toml or foundry.toml resides. If no path is provided, /// then the repo's root must contain a Soldeer config. pub project_root: Option, } impl fmt::Display for GitDependency { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> core::fmt::Result { write!(f, "{}~{}", self.name, self.version_req) } } /// An HTTP dependency config item. /// /// This struct is used to represent an HTTP dependency from the config file. #[derive(Debug, Clone, PartialEq, Eq, Hash, bon::Builder)] #[allow(clippy::duplicated_attributes)] #[builder(on(String, into), on(PathBuf, into))] #[cfg_attr(feature = "serde", derive(serde::Serialize, Deserialize))] pub struct HttpDependency { /// The name of the dependency (user-defined). pub name: String, /// The version requirement string (semver). /// /// Example: `>=1.9.3 || ^2.0.0` /// /// When no operator is used before the version number, it defaults to `=` which pins the /// version. #[cfg_attr(feature = "serde", serde(rename = "version"))] pub version_req: String, /// The URL to the dependency. /// /// If omitted, the registry will be contacted to get the download URL for that dependency (by /// name). pub url: Option, /// An optional relative path to the project's root within the zip file. /// /// The project root is where the soldeer.toml or foundry.toml resides. If no path is provided, /// then the zip's root must contain a Soldeer config. pub project_root: Option, } impl fmt::Display for HttpDependency { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> core::fmt::Result { write!(f, "{}~{}", self.name, self.version_req) } } /// A git or HTTP dependency config item. /// /// A builder can be used to create the underlying [`HttpDependency`] or [`GitDependency`] and then /// converted into this type with `.into()`. /// /// # Examples /// /// ``` /// # use soldeer_core::config::{Dependency, HttpDependency}; /// let dep: Dependency = HttpDependency::builder() /// .name("my-dep") /// .version_req("^1.0.0") /// .url("https://...") /// .build() /// .into(); /// ``` #[derive(Debug, Clone, PartialEq, Eq, Hash, Display, From)] #[cfg_attr(feature = "serde", derive(serde::Serialize, Deserialize))] pub enum Dependency { #[from(HttpDependency)] Http(HttpDependency), #[from(GitDependency)] Git(GitDependency), } impl Dependency { /// Create a new dependency from a name and version requirement string. /// /// The string should be in the format `name~version_req`. /// /// The version requirement string can use the semver format. /// /// Example: `dependency~^1.0.0` /// /// If a custom URL is provided, then the version requirement string /// cannot contain the `=` character, as it would break the remappings. /// /// # Examples /// /// ``` /// # use soldeer_core::config::{Dependency, HttpDependency, GitDependency, GitIdentifier, UrlType}; /// assert_eq!( /// Dependency::from_name_version("my-lib~^1.0.0", Some(UrlType::http("https://foo.bar/zip.zip")), None) /// .unwrap(), /// HttpDependency::builder() /// .name("my-lib") /// .version_req("^1.0.0") /// .url("https://foo.bar/zip.zip") /// .build() /// .into() /// ); /// assert_eq!( /// Dependency::from_name_version( /// "my-lib~^1.0.0", /// Some(UrlType::git("git@github.com:foo/bar.git")), /// Some(GitIdentifier::from_tag("v1.0.0")), /// ) /// .unwrap(), /// GitDependency::builder() /// .name("my-lib") /// .version_req("^1.0.0") /// .git("git@github.com:foo/bar.git") /// .identifier(GitIdentifier::from_tag("v1.0.0")) /// .build() /// .into() /// ); /// ``` pub fn from_name_version( name_version: &str, custom_url: Option, identifier: Option, ) -> Result { let (dependency_name, dependency_version_req) = name_version .split_once('~') .ok_or(ConfigError::InvalidNameAndVersion(name_version.to_string()))?; if dependency_version_req.is_empty() { return Err(ConfigError::EmptyVersion(dependency_name.to_string())); } Ok(match custom_url { Some(url) => { // in this case (custom url or git dependency), the version requirement string is // going to be used as part of the folder name inside the // dependencies folder. As such, it's not allowed to contain the "=" // character, because that would break the remappings. if dependency_version_req.contains('=') { return Err(ConfigError::InvalidVersionReq(dependency_name.to_string())); } debug!(url:% = url; "using custom url"); match url { UrlType::Git(url) => GitDependency { name: dependency_name.to_string(), version_req: dependency_version_req.to_string(), git: url, identifier, project_root: None, } .into(), UrlType::Http(url) => HttpDependency { name: dependency_name.to_string(), version_req: dependency_version_req.to_string(), url: Some(url), project_root: None, } .into(), } } None => HttpDependency { name: dependency_name.to_string(), version_req: dependency_version_req.to_string(), url: None, project_root: None, } .into(), }) } /// Get the name of the dependency. pub fn name(&self) -> &str { match self { Self::Http(dep) => &dep.name, Self::Git(dep) => &dep.name, } } /// Get the version requirement string of the dependency. pub fn version_req(&self) -> &str { match self { Self::Http(dep) => &dep.version_req, Self::Git(dep) => &dep.version_req, } } /// Get the URL of the dependency. pub fn url(&self) -> Option<&String> { match self { Self::Http(dep) => dep.url.as_ref(), Self::Git(dep) => Some(&dep.git), } } /// Get the install path of the dependency (must exist already). pub fn install_path_sync(&self, deps: impl AsRef) -> Option { debug!(dep:% = self; "trying to find installation path of dependency (sync)"); find_install_path_sync(self, deps) } /// Get the install path of the dependency in an async way (must exist already). pub async fn install_path(&self, deps: impl AsRef) -> Option { debug!(dep:% = self; "trying to find installation path of dependency (async)"); find_install_path(self, deps).await } /// Get the relative path to the project root (config file location). pub fn project_root(&self) -> Option { match self { Self::Http(dep) => dep.project_root.clone(), Self::Git(dep) => dep.project_root.clone(), } } /// Convert the dependency to a TOML value for saving to the config file. pub fn to_toml_value(&self) -> (String, Item) { match self { Self::Http(dep) => ( dep.name.clone(), match &dep.url { Some(url) => { let mut table = InlineTable::new(); table.insert( "version", value(&dep.version_req) .into_value() .expect("version should be a valid toml value"), ); table.insert( "url", value(url).into_value().expect("url should be a valid toml value"), ); if let Some(path) = dep.project_root.as_ref() { table.insert( "project_root", value(path.to_string_lossy().into_owned()) .into_value() .expect("project_root should be a valid toml value"), ); } value(table) } None => value(&dep.version_req), }, ), Self::Git(dep) => { let mut table = InlineTable::new(); table.insert( "version", value(&dep.version_req) .into_value() .expect("version should be a valid toml value"), ); table.insert( "git", value(&dep.git).into_value().expect("git URL should be a valid toml value"), ); match &dep.identifier { Some(GitIdentifier::Rev(rev)) => { table.insert( "rev", value(rev).into_value().expect("rev should be a valid toml value"), ); } Some(GitIdentifier::Branch(branch)) => { table.insert( "branch", value(branch) .into_value() .expect("branch should be a valid toml value"), ); } Some(GitIdentifier::Tag(tag)) => { table.insert( "tag", value(tag).into_value().expect("tag should be a valid toml value"), ); } None => {} } if let Some(path) = dep.project_root.as_ref() { table.insert( "project_root", value(path.to_string_lossy().into_owned()) .into_value() .expect("project_root should be a valid toml value"), ); } (dep.name.clone(), value(table)) } } } /// Check if the dependency is an HTTP dependency. pub fn is_http(&self) -> bool { matches!(self, Self::Http(_)) } /// Cast to a HTTP dependency if it is one. pub fn as_http(&self) -> Option<&HttpDependency> { if let Self::Http(v) = self { Some(v) } else { None } } /// Cast to a mutable HTTP dependency if it is one. pub fn as_http_mut(&mut self) -> Option<&mut HttpDependency> { if let Self::Http(v) = self { Some(v) } else { None } } /// Check if the dependency is a git dependency. pub fn is_git(&self) -> bool { matches!(self, Self::Git(_)) } /// Cast to a git dependency if it is one. pub fn as_git(&self) -> Option<&GitDependency> { if let Self::Git(v) = self { Some(v) } else { None } } /// Cast to a mutable git dependency if it is one. pub fn as_git_mut(&mut self) -> Option<&mut GitDependency> { if let Self::Git(v) = self { Some(v) } else { None } } } impl From<&HttpDependency> for Dependency { fn from(dep: &HttpDependency) -> Self { Self::Http(dep.clone()) } } impl From<&GitDependency> for Dependency { fn from(dep: &GitDependency) -> Self { Self::Git(dep.clone()) } } /// The location where the Soldeer config should be stored. #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, FromStr)] #[cfg_attr(feature = "serde", derive(serde::Serialize, Deserialize))] pub enum ConfigLocation { /// The `foundry.toml` file. Foundry, /// The `soldeer.toml` file. Soldeer, } impl From for PathBuf { fn from(value: ConfigLocation) -> Self { match value { ConfigLocation::Foundry => Paths::foundry_default(), ConfigLocation::Soldeer => Paths::soldeer_default(), } } } /// A warning generated during parsing of a dependency from the config file. #[derive(Debug, Clone, PartialEq, Eq, Hash)] #[cfg_attr(feature = "serde", derive(serde::Serialize, Deserialize))] pub struct ParsingWarning { dependency_name: String, message: String, } impl fmt::Display for ParsingWarning { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "{}: {}", self.dependency_name, self.message) } } /// The result of parsing a dependency from the config file. #[derive(Debug, Clone, PartialEq, Eq, Hash)] #[cfg_attr(feature = "serde", derive(serde::Serialize, Deserialize))] pub struct ParsingResult { pub dependency: Dependency, pub warnings: Vec, } impl ParsingResult { /// Whether the parsing result contains one or more warnings. pub fn has_warnings(&self) -> bool { !self.warnings.is_empty() } } impl From for ParsingResult { fn from(value: HttpDependency) -> Self { Self { dependency: value.into(), warnings: Vec::default() } } } impl From for ParsingResult { fn from(value: GitDependency) -> Self { Self { dependency: value.into(), warnings: Vec::default() } } } impl From for ParsingResult { fn from(value: Dependency) -> Self { Self { dependency: value, warnings: Vec::default() } } } /// Detect the location of the config file in case no user preference is available. /// /// The function will try to auto-detect the location based on the existence of the /// `dependencies` entry in the foundry config file, or the existence of a `soldeer.toml` file. /// If no config can be found, `None` is returned. pub fn detect_config_location(root: impl AsRef) -> Option { let foundry_path = root.as_ref().join("foundry.toml"); let soldeer_path = root.as_ref().join("soldeer.toml"); if let Ok(contents) = fs::read_to_string(&foundry_path) { debug!(path:? = foundry_path; "found foundry.toml file"); if let Ok(doc) = contents.parse::() { if doc.contains_table("dependencies") { debug!("found `dependencies` table in foundry.toml, so using that file for config"); return Some(ConfigLocation::Foundry); } else { debug!("foundry.toml does not contain `dependencies`, trying to use soldeer.toml"); } } else { warn!(path:? = foundry_path; "foundry.toml could not be parsed a toml"); } } else if soldeer_path.exists() { debug!(path:? = soldeer_path; "soldeer.toml exists, using that file for config"); return Some(ConfigLocation::Soldeer); } debug!("could not determine existing config file location"); None } /// Read the list of dependencies from the config file. /// /// Dependencies are stored in a TOML table under the `dependencies` key. /// Each key inside of the table is the name of the dependency and the value can be: /// - a string representing the version requirement /// - a table with the following fields: /// - `version` (required): the version requirement string /// - `url` (optional): the URL to the dependency's zip file /// - `git` (optional): the git URL for git dependencies /// - `rev` (optional): the revision hash for git dependencies /// - `branch` (optional): the branch name for git dependencies /// - `tag` (optional): the tag name for git dependencies /// - `project_root` (optional): relative path to the folder containing the config file pub fn read_config_deps(path: impl AsRef) -> Result<(Vec, Vec)> { let contents = fs::read_to_string(&path)?; let doc: DocumentMut = contents.parse::()?; let Some(Some(data)) = doc.get("dependencies").map(|v| v.as_table()) else { warn!("no `dependencies` table in config file"); return Ok(Default::default()); }; let mut dependencies: Vec = Vec::new(); let mut warnings: Vec = Vec::new(); for (name, v) in data { let mut res = parse_dependency(name, v)?; dependencies.push(res.dependency); warnings.append(&mut res.warnings); } debug!(path:? = path.as_ref(); "found {} dependencies in config file", dependencies.len()); Ok((dependencies, warnings)) } /// Read the Soldeer config from the config file. pub fn read_soldeer_config(path: impl AsRef) -> Result { #[derive(Deserialize)] struct SoldeerConfigParsed { #[serde(default)] soldeer: SoldeerConfig, } let contents = fs::read_to_string(&path)?; let config: SoldeerConfigParsed = toml_edit::de::from_str(&contents)?; debug!(path:? = path.as_ref(); "parsed soldeer config from file"); Ok(config.soldeer) } /// Add a dependency to the config file. pub fn add_to_config(dependency: &Dependency, config_path: impl AsRef) -> Result<()> { let contents = fs::read_to_string(&config_path)?; let mut doc: DocumentMut = contents.parse::()?; // in case we don't have the dependencies section defined in the config file, we add it if !doc.contains_table("dependencies") { debug!("`dependencies` table added to config file because it was missing"); doc.insert("dependencies", Item::Table(Table::default())); } let (name, value) = dependency.to_toml_value(); doc["dependencies"] .as_table_mut() .expect("dependencies should be a table") .insert(&name, value); fs::write(&config_path, doc.to_string())?; debug!(dep:% = dependency, path:? = config_path.as_ref(); "added dependency to config file"); Ok(()) } /// Delete a dependency from the config file. pub fn delete_from_config(dependency_name: &str, path: impl AsRef) -> Result { let contents = fs::read_to_string(&path)?; let mut doc: DocumentMut = contents.parse::().expect("invalid doc"); let Some(dependencies) = doc["dependencies"].as_table_mut() else { debug!("no `dependencies` table in config file"); return Err(ConfigError::MissingDependency(dependency_name.to_string())); }; let Some(item_removed) = dependencies.remove(dependency_name) else { debug!("dependency not present in config file"); return Err(ConfigError::MissingDependency(dependency_name.to_string())); }; let dependency = parse_dependency(dependency_name, &item_removed)?; fs::write(&path, doc.to_string())?; debug!(dep = dependency_name, path:? = path.as_ref(); "removed dependency from config file"); Ok(dependency.dependency) } /// Update the config file to add the `dependencies` folder as a source for libraries and the /// `[dependencies]` table if necessary. pub fn update_config_libs(foundry_config: impl AsRef) -> Result<()> { let contents = fs::read_to_string(&foundry_config)?; let mut doc: DocumentMut = contents.parse::()?; if !doc.contains_key("profile") { debug!("missing `profile` in config file, adding it"); let mut profile = Table::default(); profile["default"] = Item::Table(Table::default()); profile.set_implicit(true); doc["profile"] = Item::Table(profile); } let profile = doc["profile"].as_table_mut().expect("profile should be a table"); if !profile.contains_key("default") { debug!("missing `default` profile in config file, adding it"); profile["default"] = Item::Table(Table::default()); } let default_profile = profile["default"].as_table_mut().expect("default profile should be a table"); if !default_profile.contains_key("libs") { debug!("missing `libs` array in config file, adding it"); default_profile["libs"] = value(Array::from_iter(&["dependencies".to_string()])); } let libs = default_profile["libs"].as_array_mut().expect("libs should be an array"); if !libs.iter().any(|v| v.as_str() == Some("dependencies")) { debug!("adding `dependencies` folder to `libs` array"); libs.push("dependencies"); } // in case we don't have the dependencies section defined in the config file, we add it if !doc.contains_table("dependencies") { debug!("adding `dependencies` table in config file"); doc.insert("dependencies", Item::Table(Table::default())); } fs::write(&foundry_config, doc.to_string())?; debug!(path:? = foundry_config.as_ref(); "config file updated"); Ok(()) } /// Find the top-level directory of the working git tree. /// /// If no `.git` folder is found in the ancestors, `None` is returned. fn find_git_root(relative_to: impl AsRef) -> Result> { let root = dunce::canonicalize(relative_to)?; Ok(root.ancestors().find(|p| p.join(".git").is_dir()).map(Path::to_path_buf)) } /// Find the root of the project at the current directory or path specified by `cwd`. /// /// Looks for a file named `foundry.toml` or `soldeer.toml` in the ancestors of the optional path /// passed as argument. If `None` is given, then the current directory is retrieved from the /// environment and used as the start point for the search. /// /// The search is bounded by the root of the working git tree, so as to avoid false positives for /// nested dependencies. If no config file is found, but a `.git` folder is found, then the /// top-level directory of the working git tree will be returned. If the git root cannot be found, /// then the start point of the search is returned (current dir or given path). /// /// This function is not meant to be used directly, instead use [`Paths::get_root_path`] which /// honors environment variables. fn find_project_root(cwd: Option>) -> Result { let cwd = match cwd { Some(path) => dunce::canonicalize(path)?, None => env::current_dir()?, }; let boundary = find_git_root(&cwd)?; let found = cwd .ancestors() .take_while(|p| boundary.as_ref().map(|b| p.starts_with(b)).unwrap_or(true)) .find(|p| p.join("foundry.toml").is_file() || p.join("soldeer.toml").is_file()) .map(Path::to_path_buf); Ok(found.or(boundary).unwrap_or_else(|| cwd.to_path_buf())) } /// Parse a dependency from a TOML value. /// /// The value can be a string (version requirement) or a table. /// The table can have the following fields: /// - `version` (required): the version requirement string /// - `url` (optional): the URL to the dependency's zip file /// - `git` (optional): the git URL for git dependencies /// - `rev` (optional): the revision hash for git dependencies /// - `branch` (optional): the branch name for git dependencies /// - `tag` (optional): the tag name for git dependencies /// - `project_root` (optional): relative path to the folder containing the config file /// /// Note that the version requirement string cannot contain the `=` symbol for git dependencies /// and HTTP dependencies with a custom URL. fn parse_dependency(name: impl Into, value: &Item) -> Result { let name: String = name.into(); if let Some(version_req) = value.as_str() { if version_req.is_empty() { return Err(ConfigError::EmptyVersion(name)); } // this function does not retrieve the url return Ok(HttpDependency { name, version_req: version_req.to_string(), url: None, project_root: None, } .into()); } // we should have a table or inline table let table = { match value.as_inline_table() { Some(table) => table, None => match value.as_table() { // we normalize to inline table Some(table) => &table.clone().into_inline_table(), None => { debug!(dep = name; "dependency config entry could not be parsed as a table"); return Err(ConfigError::InvalidDependency(name)); } }, } }; let mut warnings = Vec::new(); // check for unsupported fields warnings.extend(table.iter().filter_map(|(k, _)| { if !["version", "url", "git", "rev", "branch", "tag", "project_root"].contains(&k) { warn!(dependency = name; "toml parsing: `{k}` is not a valid dependency option"); Some(ParsingWarning { dependency_name: name.clone(), message: format!("`{k}` is not a valid dependency option"), }) } else { None } })); // version is needed in both cases let version_req = match table.get("version").map(|v| v.as_str()) { Some(None) => { debug!(dep = name; "dependency's `version` field is not a string"); return Err(ConfigError::InvalidField { field: "version".to_string(), dep: name }); } None => { return Err(ConfigError::MissingField { field: "version".to_string(), dep: name }); } Some(Some(version_req)) => version_req.to_string(), }; if version_req.is_empty() { return Err(ConfigError::EmptyVersion(name)); } // both types of dependency definition can have the `project_root` field. let project_root = match table.get("project_root").map(|v| v.as_str()) { Some(Some(path)) => Some(path.into()), Some(None) => { debug!(dep = name; "dependency's `project_root` field is not a string"); return Err(ConfigError::InvalidField { field: "project_root".to_string(), dep: name }); } None => None, }; // check if it's a git dependency match table.get("git").map(|v| v.as_str()) { Some(None) => { debug!(dep = name; "dependency's `git` field is not a string"); return Err(ConfigError::InvalidField { field: "git".to_string(), dep: name }); } Some(Some(git)) => { // we can't have an http url if we have a git url if table.get("url").is_some() { return Err(ConfigError::FieldConflict { field: "url".to_string(), conflicts_with: "git".to_string(), dep: name, }); } // for git dependencies, the version requirement string is going to be used as part of // the folder name inside the dependencies folder. As such, it's not allowed to contain // the "=" character, because that would break the remappings. if version_req.contains('=') { return Err(ConfigError::InvalidVersionReq(name)); } // rev/branch/tag fields are optional but need to be a string if present let rev = match table.get("rev").map(|v| v.as_str()) { Some(Some(rev)) => Some(rev.to_string()), Some(None) => { debug!(dep = name; "dependency's `rev` field is not a string"); return Err(ConfigError::InvalidField { field: "rev".to_string(), dep: name }); } None => None, }; let branch = match table.get("branch").map(|v| v.as_str()) { Some(Some(tag)) => Some(tag.to_string()), Some(None) => { debug!(dep = name; "dependency's `branch` field is not a string"); return Err(ConfigError::InvalidField { field: "branch".to_string(), dep: name, }); } None => None, }; let tag = match table.get("tag").map(|v| v.as_str()) { Some(Some(tag)) => Some(tag.to_string()), Some(None) => { debug!(dep = name; "dependency's `tag` field is not a string"); return Err(ConfigError::InvalidField { field: "tag".to_string(), dep: name }); } None => None, }; let identifier = match (rev, branch, tag) { (Some(rev), None, None) => Some(GitIdentifier::from_rev(rev)), (None, Some(branch), None) => Some(GitIdentifier::from_branch(branch)), (None, None, Some(tag)) => Some(GitIdentifier::from_tag(tag)), (None, None, None) => None, _ => { return Err(ConfigError::GitIdentifierConflict(name)); } }; return Ok(ParsingResult { dependency: GitDependency { name, git: git.to_string(), version_req, identifier, project_root, } .into(), warnings, }); } None => {} } // we should have a HTTP dependency, // check for extra fields in the HTTP context warnings.extend(table.iter().filter_map(|(k, _)| { if ["rev", "branch", "tag"].contains(&k) { warn!(dependency = name; "toml parsing: `{k}` is ignored if no `git` URL is provided"); Some(ParsingWarning { dependency_name: name.clone(), message: format!("`{k}` is ignored if no `git` URL is provided"), }) } else { None } })); match table.get("url").map(|v| v.as_str()) { Some(None) => { debug!(dep = name; "dependency's `url` field is not a string"); Err(ConfigError::InvalidField { field: "url".to_string(), dep: name }) } None => Ok(ParsingResult { dependency: HttpDependency { name, version_req, url: None, project_root }.into(), warnings, }), Some(Some(url)) => { // for HTTP dependencies with custom URL, the version requirement string is going to be // used as part of the folder name inside the dependencies folder. As such, // it's not allowed to contain the "=" character, because that would break // the remappings. if version_req.contains('=') { return Err(ConfigError::InvalidVersionReq(name)); } Ok(ParsingResult { dependency: HttpDependency { name, version_req, url: Some(url.to_string()), project_root, } .into(), warnings, }) } } } /// Create a basic config file with default contents if it doesn't exist, otherwise add /// `[dependencies]` if necessary. fn create_or_modify_config( location: ConfigLocation, foundry_path: impl AsRef, soldeer_path: impl AsRef, ) -> Result { match location { ConfigLocation::Foundry => { let foundry_path = foundry_path.as_ref(); if foundry_path.exists() { update_config_libs(foundry_path)?; return Ok(foundry_path.to_path_buf()); } debug!(path:? = foundry_path; "foundry.toml does not exist, creating it"); let contents = r#"[profile.default] src = "src" out = "out" libs = ["dependencies"] [dependencies] # See more config options https://github.com/foundry-rs/foundry/blob/master/crates/config/README.md#all-options "#; fs::write(foundry_path, contents)?; Ok(foundry_path.to_path_buf()) } ConfigLocation::Soldeer => { let soldeer_path = soldeer_path.as_ref(); if soldeer_path.exists() { return Ok(soldeer_path.to_path_buf()); } debug!(path:? = soldeer_path; "soldeer.toml does not exist, creating it"); fs::write(soldeer_path, "[dependencies]\n")?; Ok(soldeer_path.to_path_buf()) } } } #[cfg(test)] mod tests { use super::*; use crate::errors::ConfigError; use path_slash::PathBufExt; use std::{fs, path::PathBuf}; use temp_env::with_var; use testdir::testdir; fn write_to_config(content: &str, filename: &str) -> PathBuf { let path = testdir!().join(filename); fs::write(&path, content).unwrap(); path } #[test] fn test_paths_config_soldeer() { let config_path = write_to_config("[dependencies]\n", "soldeer.toml"); with_var( "SOLDEER_PROJECT_ROOT", Some(config_path.parent().unwrap().to_string_lossy().to_string()), || { let res = Paths::new(); assert!(res.is_ok(), "{res:?}"); assert_eq!(res.unwrap().config.to_slash_lossy(), config_path.to_slash_lossy()); }, ); } #[test] fn test_paths_config_foundry() { let config_contents = r#"[profile.default] libs = ["dependencies"] [dependencies] "#; let config_path = write_to_config(config_contents, "foundry.toml"); with_var( "SOLDEER_PROJECT_ROOT", Some(config_path.parent().unwrap().to_string_lossy().to_string()), || { let res = Paths::new(); assert!(res.is_ok(), "{res:?}"); assert_eq!(res.unwrap().config, config_path); }, ); } #[test] fn test_paths_from_root() { let config_path = write_to_config("[dependencies]\n", "soldeer.toml"); let root = config_path.parent().unwrap(); let res = Paths::from_root(root); assert!(res.is_ok(), "{res:?}"); assert_eq!(res.unwrap().root, root); } #[test] fn test_from_name_version_no_url() { let res = Dependency::from_name_version("dependency~1.0.0", None, None); assert!(res.is_ok(), "{res:?}"); assert_eq!( res.unwrap(), HttpDependency::builder().name("dependency").version_req("1.0.0").build().into() ); } #[test] fn test_from_name_version_with_http_url() { let res = Dependency::from_name_version( "dependency~1.0.0", Some(UrlType::http("https://github.com/user/repo/archive/123.zip")), None, ); assert!(res.is_ok(), "{res:?}"); assert_eq!( res.unwrap(), HttpDependency::builder() .name("dependency") .version_req("1.0.0") .url("https://github.com/user/repo/archive/123.zip") .build() .into() ); } #[test] fn test_from_name_version_with_git_url() { let res = Dependency::from_name_version( "dependency~1.0.0", Some(UrlType::git("https://github.com/user/repo.git")), None, ); assert!(res.is_ok(), "{res:?}"); assert_eq!( res.unwrap(), GitDependency::builder() .name("dependency") .version_req("1.0.0") .git("https://github.com/user/repo.git") .build() .into() ); let res = Dependency::from_name_version( "dependency~1.0.0", Some(UrlType::git("https://test:test@gitlab.com/user/repo.git")), None, ); assert!(res.is_ok(), "{res:?}"); assert_eq!( res.unwrap(), GitDependency::builder() .name("dependency") .version_req("1.0.0") .git("https://test:test@gitlab.com/user/repo.git") .build() .into() ); } #[test] fn test_from_name_version_with_git_url_rev() { let res = Dependency::from_name_version( "dependency~1.0.0", Some(UrlType::git("https://github.com/user/repo.git")), Some(GitIdentifier::from_rev("123456")), ); assert!(res.is_ok(), "{res:?}"); assert_eq!( res.unwrap(), GitDependency::builder() .name("dependency") .version_req("1.0.0") .git("https://github.com/user/repo.git") .identifier(GitIdentifier::from_rev("123456")) .build() .into() ); } #[test] fn test_from_name_version_with_git_url_branch() { let res = Dependency::from_name_version( "dependency~1.0.0", Some(UrlType::git("https://github.com/user/repo.git")), Some(GitIdentifier::from_branch("dev")), ); assert!(res.is_ok(), "{res:?}"); assert_eq!( res.unwrap(), GitDependency::builder() .name("dependency") .version_req("1.0.0") .git("https://github.com/user/repo.git") .identifier(GitIdentifier::from_branch("dev")) .build() .into() ); } #[test] fn test_from_name_version_with_git_url_tag() { let res = Dependency::from_name_version( "dependency~1.0.0", Some(UrlType::git("https://github.com/user/repo.git")), Some(GitIdentifier::from_tag("v1.0.0")), ); assert!(res.is_ok(), "{res:?}"); assert_eq!( res.unwrap(), GitDependency::builder() .name("dependency") .version_req("1.0.0") .git("https://github.com/user/repo.git") .identifier(GitIdentifier::from_tag("v1.0.0")) .build() .into() ); } #[test] fn test_from_name_version_with_git_ssh() { let res = Dependency::from_name_version( "dependency~1.0.0", Some(UrlType::git("git@github.com:user/repo.git")), None, ); assert!(res.is_ok(), "{res:?}"); assert_eq!( res.unwrap(), GitDependency::builder() .name("dependency") .version_req("1.0.0") .git("git@github.com:user/repo.git") .build() .into() ); } #[test] fn test_from_name_version_with_git_ssh_rev() { let res = Dependency::from_name_version( "dependency~1.0.0", Some(UrlType::git("git@github.com:user/repo.git")), Some(GitIdentifier::from_rev("123456")), ); assert!(res.is_ok(), "{res:?}"); assert_eq!( res.unwrap(), GitDependency::builder() .name("dependency") .version_req("1.0.0") .git("git@github.com:user/repo.git") .identifier(GitIdentifier::from_rev("123456")) .build() .into() ); } #[test] fn test_from_name_version_empty_version() { let res = Dependency::from_name_version("dependency~", None, None); assert!(matches!(res, Err(ConfigError::EmptyVersion(_))), "{res:?}"); } #[test] fn test_from_name_version_invalid_version() { // for http deps, having the "=" character in the version requirement is ok let res = Dependency::from_name_version("dependency~asdf=", None, None); assert!(res.is_ok(), "{res:?}"); let res = Dependency::from_name_version( "dependency~asdf=", Some(UrlType::http("https://example.com")), None, ); assert!(matches!(res, Err(ConfigError::InvalidVersionReq(_))), "{res:?}"); let res = Dependency::from_name_version( "dependency~asdf=", Some(UrlType::git("git@github.com:user/repo.git")), None, ); assert!(matches!(res, Err(ConfigError::InvalidVersionReq(_))), "{res:?}"); } #[test] fn test_read_soldeer_config_default() { let config_contents = r#"[profile.default] libs = ["dependencies"] "#; let config_path = write_to_config(config_contents, "foundry.toml"); let res = read_soldeer_config(config_path); assert!(res.is_ok(), "{res:?}"); assert_eq!(res.unwrap(), SoldeerConfig::default()); } #[test] fn test_read_soldeer_config() { let config_contents = r#"[soldeer] remappings_generate = false remappings_regenerate = true remappings_version = false remappings_prefix = "@" remappings_location = "config" recursive_deps = true "#; let expected = SoldeerConfig { remappings_generate: false, remappings_regenerate: true, remappings_version: false, remappings_prefix: "@".to_string(), remappings_location: RemappingsLocation::Config, recursive_deps: true, }; let config_path = write_to_config(config_contents, "soldeer.toml"); let res = read_soldeer_config(config_path); assert!(res.is_ok(), "{res:?}"); assert_eq!(res.unwrap(), expected); let config_path = write_to_config(config_contents, "foundry.toml"); let res = read_soldeer_config(config_path); assert!(res.is_ok(), "{res:?}"); assert_eq!(res.unwrap(), expected); } #[test] fn test_read_foundry_config_deps() { let config_contents = r#"[profile.default] libs = ["dependencies"] [dependencies] "lib1" = "1.0.0" "lib2" = { version = "2.0.0" } "lib3" = { version = "3.0.0", url = "https://example.com" } "lib4" = { version = "4.0.0", git = "https://example.com/repo.git" } "lib5" = { version = "5.0.0", git = "https://example.com/repo.git", rev = "123456" } "lib6" = { version = "6.0.0", git = "https://example.com/repo.git", branch = "dev" } "lib7" = { version = "7.0.0", git = "https://example.com/repo.git", tag = "v7.0.0" } "lib8" = { version = "8.0.0", url = "https://example.com", project_root = "foo/bar" } "lib9" = { version = "9.0.0", git = "https://example.com/repo.git", project_root = "test/test2" } "#; let config_path = write_to_config(config_contents, "foundry.toml"); let res = read_config_deps(config_path); assert!(res.is_ok(), "{res:?}"); let (result, _) = res.unwrap(); assert_eq!( result[0], HttpDependency::builder().name("lib1").version_req("1.0.0").build().into() ); assert_eq!( result[1], HttpDependency::builder().name("lib2").version_req("2.0.0").build().into() ); assert_eq!( result[2], HttpDependency::builder() .name("lib3") .version_req("3.0.0") .url("https://example.com") .build() .into() ); assert_eq!( result[3], GitDependency::builder() .name("lib4") .version_req("4.0.0") .git("https://example.com/repo.git") .build() .into() ); assert_eq!( result[4], GitDependency::builder() .name("lib5") .version_req("5.0.0") .git("https://example.com/repo.git") .identifier(GitIdentifier::from_rev("123456")) .build() .into() ); assert_eq!( result[5], GitDependency::builder() .name("lib6") .version_req("6.0.0") .git("https://example.com/repo.git") .identifier(GitIdentifier::from_branch("dev")) .build() .into() ); assert_eq!( result[6], GitDependency::builder() .name("lib7") .version_req("7.0.0") .git("https://example.com/repo.git") .identifier(GitIdentifier::from_tag("v7.0.0")) .build() .into() ); assert_eq!( result[7], HttpDependency::builder() .name("lib8") .version_req("8.0.0") .url("https://example.com") .project_root("foo/bar") .build() .into() ); assert_eq!( result[8], GitDependency::builder() .name("lib9") .version_req("9.0.0") .git("https://example.com/repo.git") .project_root("test/test2") .build() .into() ); } #[test] fn test_read_soldeer_config_deps() { let config_contents = r#"[dependencies] "lib1" = "1.0.0" "lib2" = { version = "2.0.0" } "lib3" = { version = "3.0.0", url = "https://example.com" } "lib4" = { version = "4.0.0", git = "https://example.com/repo.git" } "lib5" = { version = "5.0.0", git = "https://example.com/repo.git", rev = "123456" } "lib6" = { version = "6.0.0", git = "https://example.com/repo.git", branch = "dev" } "lib7" = { version = "7.0.0", git = "https://example.com/repo.git", tag = "v7.0.0" } "lib8" = { version = "8.0.0", url = "https://example.com", project_root = "foo/bar" } "lib9" = { version = "9.0.0", git = "https://example.com/repo.git", project_root = "test/test2" } "#; let config_path = write_to_config(config_contents, "soldeer.toml"); let res = read_config_deps(config_path); assert!(res.is_ok(), "{res:?}"); let (result, _) = res.unwrap(); assert_eq!( result[0], HttpDependency::builder().name("lib1").version_req("1.0.0").build().into() ); assert_eq!( result[1], HttpDependency::builder().name("lib2").version_req("2.0.0").build().into() ); assert_eq!( result[2], HttpDependency::builder() .name("lib3") .version_req("3.0.0") .url("https://example.com") .build() .into() ); assert_eq!( result[3], GitDependency::builder() .name("lib4") .version_req("4.0.0") .git("https://example.com/repo.git") .build() .into() ); assert_eq!( result[4], GitDependency::builder() .name("lib5") .version_req("5.0.0") .git("https://example.com/repo.git") .identifier(GitIdentifier::from_rev("123456")) .build() .into() ); assert_eq!( result[5], GitDependency::builder() .name("lib6") .version_req("6.0.0") .git("https://example.com/repo.git") .identifier(GitIdentifier::from_branch("dev")) .build() .into() ); assert_eq!( result[6], GitDependency::builder() .name("lib7") .version_req("7.0.0") .git("https://example.com/repo.git") .identifier(GitIdentifier::from_tag("v7.0.0")) .build() .into() ); assert_eq!( result[7], HttpDependency::builder() .name("lib8") .version_req("8.0.0") .url("https://example.com") .project_root("foo/bar") .build() .into() ); assert_eq!( result[8], GitDependency::builder() .name("lib9") .version_req("9.0.0") .git("https://example.com/repo.git") .project_root("test/test2") .build() .into() ); } #[test] fn test_read_soldeer_config_deps_bad_version() { for dep in [ r#""lib1" = """#, r#""lib1" = { version = "" }"#, r#""lib1" = { version = "", url = "https://example.com" }"#, r#""lib1" = { version = "", git = "https://example.com/repo.git" }"#, r#""lib1" = { version = "", git = "https://example.com/repo.git", rev = "123456" }"#, ] { let config_contents = format!("[dependencies]\n{dep}"); let config_path = write_to_config(&config_contents, "soldeer.toml"); let res = read_config_deps(config_path); assert!(matches!(res, Err(ConfigError::EmptyVersion(_))), "{res:?}"); } for dep in [ r#""lib1" = { version = "asdf=", url = "https://example.com" }"#, r#""lib1" = { version = "asdf=", git = "https://example.com/repo.git" }"#, r#""lib1" = { version = "asdf=", git = "https://example.com/repo.git", rev = "123456" }"#, ] { let config_contents = format!("[dependencies]\n{dep}"); let config_path = write_to_config(&config_contents, "soldeer.toml"); let res = read_config_deps(config_path); assert!(matches!(res, Err(ConfigError::InvalidVersionReq(_))), "{res:?}"); } // it's ok to have the "=" character in the version requirement for HTTP dependencies // without a custom URL let config_contents = r#"[dependencies] "lib1" = "asdf=" "lib2" = { version = "asdf=" } "#; let config_path = write_to_config(config_contents, "soldeer.toml"); let res = read_config_deps(config_path); assert!(res.is_ok(), "{res:?}"); } #[test] fn test_read_soldeer_config_deps_bad_git() { for dep in [ r#""lib1" = { version = "1.0.0", git = "https://example.com/repo.git", rev = "123456", branch = "dev" }"#, r#""lib1" = { version = "1.0.0", git = "https://example.com/repo.git", rev = "123456", tag = "v1.0.0" }"#, r#""lib1" = { version = "1.0.0", git = "https://example.com/repo.git", branch = "dev", tag = "v1.0.0" }"#, r#""lib1" = { version = "1.0.0", git = "https://example.com/repo.git", rev = "123456", branch = "dev", tag = "v1.0.0" }"#, ] { let config_contents = format!("[dependencies]\n{dep}"); let config_path = write_to_config(&config_contents, "soldeer.toml"); let res = read_config_deps(config_path); assert!(matches!(res, Err(ConfigError::GitIdentifierConflict(_))), "{res:?}"); } } #[test] fn test_add_to_config() { let config_path = write_to_config("[dependencies]\n", "soldeer.toml"); let deps: &[Dependency] = &[ HttpDependency::builder().name("lib1").version_req("1.0.0").build().into(), HttpDependency::builder() .name("lib2") .version_req("1.0.0") .url("https://test.com/test.zip") .build() .into(), HttpDependency::builder() .name("lib21") .version_req("1.0.0") .url("https://test.com/test.zip") .project_root("foo/bar") .build() .into(), GitDependency::builder() .name("lib3") .version_req("1.0.0") .git("https://example.com/repo.git") .build() .into(), GitDependency::builder() .name("lib4") .version_req("1.0.0") .git("https://example.com/repo.git") .identifier(GitIdentifier::from_rev("123456")) .build() .into(), GitDependency::builder() .name("lib5") .version_req("1.0.0") .git("https://example.com/repo.git") .identifier(GitIdentifier::from_branch("dev")) .build() .into(), GitDependency::builder() .name("lib6") .version_req("1.0.0") .git("https://example.com/repo.git") .identifier(GitIdentifier::from_tag("v1.0.0")) .build() .into(), GitDependency::builder() .name("lib7") .version_req("1.0.0") .git("https://example.com/repo.git") .project_root("foo/bar") .build() .into(), ]; for dep in deps { let res = add_to_config(dep, &config_path); assert!(res.is_ok(), "{dep}: {res:?}"); } let (parsed, _) = read_config_deps(&config_path).unwrap(); for (dep, parsed) in deps.iter().zip(parsed.iter()) { assert_eq!(dep, parsed); } } #[test] fn test_add_to_config_no_section() { let config_path = write_to_config("", "soldeer.toml"); let dep = Dependency::from_name_version("lib1~1.0.0", None, None).unwrap(); let res = add_to_config(&dep, &config_path); assert!(res.is_ok(), "{res:?}"); let (parsed, _) = read_config_deps(&config_path).unwrap(); assert_eq!(parsed[0], dep); } #[test] fn test_delete_from_config() { let config_contents = r#"[dependencies] "lib1" = "1.0.0" "lib2" = { version = "2.0.0" } "lib3" = { version = "3.0.0", url = "https://example.com" } "lib4" = { version = "4.0.0", git = "https://example.com/repo.git" } "lib5" = { version = "5.0.0", git = "https://example.com/repo.git", rev = "123456" } "lib6" = { version = "6.0.0", git = "https://example.com/repo.git", branch = "dev" } "lib7" = { version = "7.0.0", git = "https://example.com/repo.git", tag = "v7.0.0" } "lib8" = { version = "8.0.0", url = "https://example.com", project_root = "foo/bar" } "lib9" = { version = "9.0.0", git = "https://example.com/repo.git", project_root = "foo/bar" } "#; let config_path = write_to_config(config_contents, "soldeer.toml"); let res = delete_from_config("lib1", &config_path); assert!(res.is_ok(), "{res:?}"); assert_eq!(res.unwrap().name(), "lib1"); assert_eq!(read_config_deps(&config_path).unwrap().0.len(), 8); let res = delete_from_config("lib2", &config_path); assert!(res.is_ok(), "{res:?}"); assert_eq!(res.unwrap().name(), "lib2"); assert_eq!(read_config_deps(&config_path).unwrap().0.len(), 7); let res = delete_from_config("lib3", &config_path); assert!(res.is_ok(), "{res:?}"); assert_eq!(res.unwrap().name(), "lib3"); assert_eq!(read_config_deps(&config_path).unwrap().0.len(), 6); let res = delete_from_config("lib4", &config_path); assert!(res.is_ok(), "{res:?}"); assert_eq!(res.unwrap().name(), "lib4"); assert_eq!(read_config_deps(&config_path).unwrap().0.len(), 5); let res = delete_from_config("lib5", &config_path); assert!(res.is_ok(), "{res:?}"); assert_eq!(res.unwrap().name(), "lib5"); assert_eq!(read_config_deps(&config_path).unwrap().0.len(), 4); let res = delete_from_config("lib6", &config_path); assert!(res.is_ok(), "{res:?}"); assert_eq!(res.unwrap().name(), "lib6"); assert_eq!(read_config_deps(&config_path).unwrap().0.len(), 3); let res = delete_from_config("lib7", &config_path); assert!(res.is_ok(), "{res:?}"); assert_eq!(res.unwrap().name(), "lib7"); assert_eq!(read_config_deps(&config_path).unwrap().0.len(), 2); let res = delete_from_config("lib8", &config_path); assert!(res.is_ok(), "{res:?}"); assert_eq!(res.unwrap().name(), "lib8"); assert_eq!(read_config_deps(&config_path).unwrap().0.len(), 1); let res = delete_from_config("lib9", &config_path); assert!(res.is_ok(), "{res:?}"); assert_eq!(res.unwrap().name(), "lib9"); assert!(read_config_deps(&config_path).unwrap().0.is_empty()); } #[test] fn test_delete_from_config_missing() { let config_contents = r#"[dependencies] "lib1" = "1.0.0" "#; let config_path = write_to_config(config_contents, "soldeer.toml"); let res = delete_from_config("libfoo", &config_path); assert!(matches!(res, Err(ConfigError::MissingDependency(_))), "{res:?}"); } #[test] fn test_update_config_libs() { let config_contents = r#"[profile.default] libs = ["lib"] [dependencies] "#; let config_path = write_to_config(config_contents, "foundry.toml"); let res = update_config_libs(&config_path); assert!(res.is_ok(), "{res:?}"); let contents = fs::read_to_string(&config_path).unwrap(); assert_eq!( contents, r#"[profile.default] libs = ["lib", "dependencies"] [dependencies] "# ); } #[test] fn test_update_config_profile_empty() { let config_contents = r#"[dependencies] "#; let config_path = write_to_config(config_contents, "foundry.toml"); let res = update_config_libs(&config_path); assert!(res.is_ok(), "{res:?}"); let contents = fs::read_to_string(&config_path).unwrap(); assert_eq!( contents, r#"[dependencies] [profile.default] libs = ["dependencies"] "# ); } #[test] fn test_update_config_libs_empty() { let config_contents = r#"[profile.default] src = "src" [dependencies] "#; let config_path = write_to_config(config_contents, "foundry.toml"); let res = update_config_libs(&config_path); assert!(res.is_ok(), "{res:?}"); let contents = fs::read_to_string(&config_path).unwrap(); assert_eq!( contents, r#"[profile.default] src = "src" libs = ["dependencies"] [dependencies] "# ); } #[test] fn test_parse_dependency() { let config_contents = r#"[dependencies] "lib1" = "1.0.0" "lib2" = { version = "2.0.0" } "lib3" = { version = "3.0.0", url = "https://example.com" } "lib4" = { version = "4.0.0", git = "https://example.com/repo.git" } "lib5" = { version = "5.0.0", git = "https://example.com/repo.git", rev = "123456" } "lib6" = { version = "6.0.0", git = "https://example.com/repo.git", branch = "dev" } "lib7" = { version = "7.0.0", git = "https://example.com/repo.git", tag = "v7.0.0" } "lib8" = { version = "8.0.0", url = "https://example.com", project_root = "foo/bar" } "lib9" = { version = "9.0.0", git = "https://example.com/repo.git", project_root = "foo/bar" } "#; let doc: DocumentMut = config_contents.parse::().unwrap(); let data = doc.get("dependencies").map(|v| v.as_table()).unwrap().unwrap(); for (name, v) in data { let res = parse_dependency(name, v); assert!(res.is_ok(), "{res:?}"); } } #[test] fn test_parse_dependency_extra_field() { let config_contents = r#"[dependencies] "lib1" = { version = "3.0.0", url = "https://example.com", foo = "bar" } "#; let doc: DocumentMut = config_contents.parse::().unwrap(); let data = doc.get("dependencies").map(|v| v.as_table()).unwrap().unwrap(); for (name, v) in data { let res = parse_dependency(name, v).unwrap(); assert_eq!(res.warnings[0].message, "`foo` is not a valid dependency option"); } } #[test] fn test_parse_dependency_git_extra_url() { let config_contents = r#"[dependencies] "lib1" = { version = "3.0.0", git = "https://example.com/repo.git", url = "https://example.com" } "#; let doc: DocumentMut = config_contents.parse::().unwrap(); let data = doc.get("dependencies").map(|v| v.as_table()).unwrap().unwrap(); for (name, v) in data { let res = parse_dependency(name, v); assert!( matches!( res, Err(ConfigError::FieldConflict { field: _, conflicts_with: _, dep: _ }) ), "{res:?}" ); } } #[test] fn test_parse_dependency_git_field_conflict() { let config_contents = r#"[dependencies] "lib2" = { version = "3.0.0", git = "https://example.com/repo.git", rev = "123456", branch = "dev" } "lib3" = { version = "3.0.0", git = "https://example.com/repo.git", rev = "123456", tag = "v7.0.0" } "lib4" = { version = "3.0.0", git = "https://example.com/repo.git", branch = "dev", tag = "v7.0.0" } "#; let doc: DocumentMut = config_contents.parse::().unwrap(); let data = doc.get("dependencies").map(|v| v.as_table()).unwrap().unwrap(); for (name, v) in data { let res = parse_dependency(name, v); assert!(matches!(res, Err(ConfigError::GitIdentifierConflict(_))), "{res:?}"); } } #[test] fn test_parse_dependency_missing_url() { let config_contents = r#"[dependencies] "lib1" = { version = "3.0.0", rev = "123456" } "lib2" = { version = "3.0.0", branch = "dev" } "lib3" = { version = "3.0.0", tag = "v7.0.0" } "#; let doc: DocumentMut = config_contents.parse::().unwrap(); let data = doc.get("dependencies").map(|v| v.as_table()).unwrap().unwrap(); for (name, v) in data { let res = parse_dependency(name, v).unwrap(); assert!(res.warnings[0].message.ends_with("is ignored if no `git` URL is provided")); } } #[test] fn test_find_git_root() { let test_dir = testdir!(); let git_dir = test_dir.join(".git"); fs::create_dir(&git_dir).unwrap(); let result = find_git_root(&test_dir); assert!(result.is_ok(), "{result:?}"); assert_eq!(result.unwrap(), Some(test_dir.clone())); // test with a subdirectory let sub_dir = test_dir.join("subdir"); fs::create_dir(&sub_dir).unwrap(); let result = find_git_root(&sub_dir); assert!(result.is_ok(), "{result:?}"); assert_eq!(result.unwrap(), Some(test_dir)); // test outside of a git folder let temp_dir = std::env::temp_dir().join("soldeer_test_no_git"); if !temp_dir.exists() { fs::create_dir(&temp_dir).unwrap(); } let result = find_git_root(&temp_dir); assert_eq!(result.unwrap(), None); // clean up fs::remove_dir(&temp_dir).unwrap(); } #[test] fn test_find_git_root_nested() { // test nested git repositories let outer_dir = testdir!(); fs::create_dir(outer_dir.join(".git")).unwrap(); let inner_dir = outer_dir.join("inner"); fs::create_dir(&inner_dir).unwrap(); fs::create_dir(inner_dir.join(".git")).unwrap(); // should find the inner git root when starting from inner directory let result = find_git_root(&inner_dir); assert!(result.is_ok(), "{result:?}"); assert_eq!(result.unwrap(), Some(inner_dir)); // should find the outer git root when starting from outer directory let result = find_git_root(&outer_dir); assert!(result.is_ok(), "{result:?}"); assert_eq!(result.unwrap(), Some(outer_dir)); } #[test] fn test_find_project_root_with_foundry_toml() { let test_dir = testdir!(); let foundry_toml = test_dir.join("foundry.toml"); fs::write(&foundry_toml, "[dependencies]\n").unwrap(); let result = find_project_root(Some(&test_dir)); assert!(result.is_ok(), "{result:?}"); assert_eq!(result.unwrap(), test_dir); } #[test] fn test_find_project_root_with_soldeer_toml() { let test_dir = testdir!(); let soldeer_toml = test_dir.join("soldeer.toml"); fs::write(&soldeer_toml, "[dependencies]\n").unwrap(); let result = find_project_root(Some(&test_dir)); assert!(result.is_ok(), "{result:?}"); assert_eq!(result.unwrap(), test_dir); } #[test] fn test_find_project_root_in_subdirectory() { let test_dir = testdir!(); let foundry_toml = test_dir.join("foundry.toml"); fs::write(&foundry_toml, "[dependencies]\n").unwrap(); let sub_dir = test_dir.join("src"); fs::create_dir(&sub_dir).unwrap(); let result = find_project_root(Some(&sub_dir)); assert!(result.is_ok(), "{result:?}"); assert_eq!(result.unwrap(), test_dir); } #[test] fn test_find_project_root_git_boundary() { let test_dir = testdir!(); let git_folder = test_dir.join(".git"); fs::create_dir(&git_folder).unwrap(); let sub_dir = test_dir.join("src"); fs::create_dir(&sub_dir).unwrap(); let result = find_project_root(Some(&sub_dir)); assert!(result.is_ok(), "{result:?}"); assert_eq!(result.unwrap(), test_dir); } } ================================================ FILE: crates/core/src/download.rs ================================================ //! Download and/or extract dependencies use crate::{ config::{Dependency, GitIdentifier}, errors::DownloadError, utils::{path_matches, run_git_command, sanitize_filename}, }; use log::{debug, trace, warn}; use reqwest::{IntoUrl, Url}; use std::{ fs, io::Cursor, path::{Path, PathBuf}, str, }; use tokio::io::AsyncWriteExt as _; pub type Result = std::result::Result; /// Download a zip file into the provided folder. /// /// Depending on the platform, the folder path must exist prior to calling this function. /// The filename for the zip file will be the provided base name with the ".zip" extension pub async fn download_file( url: impl IntoUrl, folder_path: impl AsRef, base_name: &str, ) -> Result { let url: Url = url.into_url()?; debug!(name = base_name, url:% = url; "downloading file"); let resp = reqwest::get(url).await?; let mut resp = resp.error_for_status()?; let zip_path = folder_path.as_ref().join(sanitize_filename(&format!("{base_name}.zip"))); let mut file = tokio::fs::File::create(&zip_path) .await .map_err(|e| DownloadError::IOError { path: zip_path.clone(), source: e })?; while let Some(mut chunk) = resp.chunk().await? { file.write_all_buf(&mut chunk) .await .map_err(|e| DownloadError::IOError { path: zip_path.clone(), source: e })?; } file.flush().await.map_err(|e| DownloadError::IOError { path: zip_path.clone(), source: e })?; debug!(path:? = zip_path; "saved downloaded file"); Ok(zip_path) } /// Unzip a file into a directory and then delete it. pub async fn unzip_file(path: impl AsRef, into: impl AsRef) -> Result<()> { let path = path.as_ref().to_path_buf(); let zip_contents = tokio::fs::read(&path) .await .map_err(|e| DownloadError::IOError { path: path.clone(), source: e })?; tokio::task::spawn_blocking({ let out_dir = into.as_ref().to_path_buf(); #[allow(deprecated)] // until we can get rid of zip_extract move || zip_extract::extract(Cursor::new(zip_contents), &out_dir, true) }) .await??; debug!(file:? = path, dest:? = into.as_ref(); "unzipped file"); tokio::fs::remove_file(&path) .await .map_err(|e| DownloadError::IOError { path: path.clone(), source: e })?; debug!(path:?; "removed zip file"); Ok(()) } /// Clone a git repo into the given path, optionally checking out a reference. /// /// The repository is cloned without trees, which can speed up cloning when the full history is not /// needed. Contrary to a shallow clone, it's possible to checkout any ref and the missing trees /// will be retrieved as they are needed. /// /// This function returns the commit hash corresponding to the checked out reference (branch, tag, /// commit). pub async fn clone_repo( url: &str, identifier: Option<&GitIdentifier>, path: impl AsRef, ) -> Result { let path = path.as_ref().to_path_buf(); run_git_command( &["clone", "--tags", "--filter=tree:0", url, path.to_string_lossy().as_ref()], None, ) .await?; debug!(repo:? = path; "git repo cloned"); if let Some(identifier) = identifier { run_git_command(&["checkout", &identifier.to_string()], Some(&path)).await?; debug!(ref:? = identifier, repo:? = path; "checked out ref"); } let commit = run_git_command(&["rev-parse", "--verify", "HEAD"], Some(&path)).await?.trim().to_string(); debug!(repo:? = path; "checked out commit is {commit}"); Ok(commit) } /// Remove the files for a dependency (synchronous). /// /// This function should only be called in sync contexts. For a version that is safe to run in /// multithreaded async contexts, see [`delete_dependency_files`]. pub fn delete_dependency_files_sync(dependency: &Dependency, deps: impl AsRef) -> Result<()> { let Some(path) = find_install_path_sync(dependency, deps) else { return Err(DownloadError::DependencyNotFound(dependency.to_string())); }; fs::remove_dir_all(&path).map_err(|e| DownloadError::IOError { path, source: e })?; debug!(dep:% = dependency; "removed all files for dependency (sync)"); Ok(()) } /// Find the install path of a dependency by reading the dependencies directory and matching on the /// folder name. /// /// If a dependency version requirement string is a semver requirement, any folder which version /// matches the requirements is returned. pub fn find_install_path_sync(dependency: &Dependency, deps: impl AsRef) -> Option { let res = fs::read_dir(deps.as_ref()) .map(|read_dir| { read_dir.into_iter().find_map(|e| { e.ok().filter(|e| install_path_matches(dependency, e.path())).map(|e| e.path()) }) }) .ok() .flatten() .inspect(|res| debug!(path:? = res, dep:% = dependency; "folder name matches dependency")); if res.is_none() { debug!(dep:% = dependency; "could not find install path of dependency"); } res } /// Find the install path of a dependency by reading the dependencies directory and matching on the /// folder name (async version). /// /// If a dependency version requirement string is a semver requirement, any folder which version /// matches the requirements is returned. pub async fn find_install_path(dependency: &Dependency, deps: impl AsRef) -> Option { let Ok(mut read_dir) = tokio::fs::read_dir(deps.as_ref()).await else { warn!(path:? = deps.as_ref(); "could not list files in deps folder"); return None; }; while let Ok(Some(entry)) = read_dir.next_entry().await { let path = entry.path(); if !path.is_dir() { continue; } trace!(path:?; "found folder in deps"); if install_path_matches(dependency, &path) { debug!(path:?, dep:% = dependency; "folder name matches dependency"); return Some(path); } } debug!(dep:% = dependency; "could not find install path of dependency"); None } /// Remove the files for a dependency from the dependencies folder. /// /// A folder must exist for the dependency. pub async fn delete_dependency_files( dependency: &Dependency, deps: impl AsRef, ) -> Result<()> { let Some(path) = find_install_path(dependency, deps).await else { return Err(DownloadError::DependencyNotFound(dependency.to_string())); }; tokio::fs::remove_dir_all(&path) .await .map_err(|e| DownloadError::IOError { path, source: e })?; debug!(dep:% = dependency; "removed all files for dependency (async)"); Ok(()) } /// Check if a path corresponds to the provided dependency. /// /// The path must exist and be a folder, and the folder name must start with the dependency name /// (sanitized). For dependencies with a semver-compliant version requirement, any folder with a /// version that matches will give a result of `true`. Otherwise, the folder name must contain the /// version requirement string after the dependency name. fn install_path_matches(dependency: &Dependency, path: impl AsRef) -> bool { let path = path.as_ref(); if !path.is_dir() { trace!(path:?; "path is not a directory"); return false; } path_matches(dependency, path) } #[cfg(test)] mod tests { use super::*; use crate::{config::HttpDependency, push::zip_file}; use std::fs; use testdir::testdir; #[tokio::test] async fn test_download_file() { let path = testdir!().join("my-dependency"); fs::create_dir(&path).unwrap(); let res = download_file( "https://raw.githubusercontent.com/mario-eth/soldeer/main/README.md", &path, "my-dependency", ) .await; assert!(res.is_ok(), "{res:?}"); let zip_path = path.join("my-dependency.zip"); assert!(zip_path.exists()); } #[tokio::test] async fn test_unzip_file() { let dir = testdir!(); // create dummy zip let file_path = dir.join("file.txt"); fs::write(&file_path, "foobar").unwrap(); let zip_path = dir.join("my-dependency.zip"); zip_file(&dir, &[file_path], &zip_path).unwrap(); let out_dir = dir.join("out"); let res = unzip_file(&zip_path, &out_dir).await; assert!(res.is_ok(), "{res:?}"); let file_path = out_dir.join("file.txt"); assert!(file_path.exists()); assert!(!zip_path.exists()); } #[tokio::test] async fn test_clone_repo() { let dir = testdir!(); let res = clone_repo("https://github.com/beeb/test-repo.git", None, &dir).await; assert!(res.is_ok(), "{res:?}"); assert_eq!(&res.unwrap(), "d5d72fa135d28b2e8307650b3ea79115183f2406"); } #[tokio::test] async fn test_clone_repo_rev() { let dir = testdir!(); let res = clone_repo( "https://github.com/beeb/test-repo.git", Some(&GitIdentifier::from_rev("d230f5c588c0ed00821a4eb3ef38e300e4a519dc")), &dir, ) .await; assert!(res.is_ok(), "{res:?}"); assert_eq!(&res.unwrap(), "d230f5c588c0ed00821a4eb3ef38e300e4a519dc"); } #[tokio::test] async fn test_clone_repo_branch() { let dir = testdir!(); let res = clone_repo( "https://github.com/beeb/test-repo.git", Some(&GitIdentifier::from_branch("dev")), &dir, ) .await; assert!(res.is_ok(), "{res:?}"); assert_eq!(&res.unwrap(), "8d903e557e8f1b6e62bde768aa456d4ddfca72c4"); } #[tokio::test] async fn test_clone_repo_tag() { let dir = testdir!(); let res = clone_repo( "https://github.com/beeb/test-repo.git", Some(&GitIdentifier::from_tag("v0.1.0")), &dir, ) .await; assert!(res.is_ok(), "{res:?}"); assert_eq!(&res.unwrap(), "78c2f6a1a54db26bab6c3f501854a1564eb3707f"); } #[test] fn test_install_path_matches() { let dependency: Dependency = HttpDependency::builder().name("lib1").version_req("^1.0.0").build().into(); let dir = testdir!(); let path = dir.join("lib1-1.1.1"); fs::create_dir(&path).unwrap(); assert!(install_path_matches(&dependency, &path)); let path = dir.join("lib1-2.0.0"); fs::create_dir(&path).unwrap(); assert!(!install_path_matches(&dependency, &path)); let path = dir.join("lib2-1.0.0"); fs::create_dir(&path).unwrap(); assert!(!install_path_matches(&dependency, &path)); } #[test] fn test_install_path_matches_nosemver() { let dependency: Dependency = HttpDependency::builder().name("lib1").version_req("foobar").build().into(); let dir = testdir!(); let path = dir.join("lib1-foobar"); fs::create_dir(&path).unwrap(); assert!(install_path_matches(&dependency, &path)); let path = dir.join("lib1-somethingelse"); fs::create_dir(&path).unwrap(); assert!(!install_path_matches(&dependency, &path)); } #[test] fn test_find_install_path_sync() { let dependency: Dependency = HttpDependency::builder().name("lib1").version_req("^1.0.0").build().into(); let dir = testdir!(); let path = dir.join("lib1-1.1.1"); fs::create_dir(&path).unwrap(); let res = find_install_path_sync(&dependency, &dir); assert!(res.is_some()); assert_eq!(res.unwrap(), path); } #[tokio::test] async fn test_find_install_path() { let dependency: Dependency = HttpDependency::builder().name("lib1").version_req("^1.0.0").build().into(); let dir = testdir!(); let path = dir.join("lib1-1.2.5"); fs::create_dir(&path).unwrap(); let res = find_install_path(&dependency, &dir).await; assert!(res.is_some()); assert_eq!(res.unwrap(), path); } } ================================================ FILE: crates/core/src/errors.rs ================================================ use std::{ io, path::{PathBuf, StripPrefixError}, }; use thiserror::Error; #[derive(Error, Debug)] #[non_exhaustive] pub enum SoldeerError { #[error("error during login: {0}")] AuthError(#[from] AuthError), #[error("error during config operation: {0}")] ConfigError(#[from] ConfigError), #[error("error during downloading ({dep}): {source}")] DownloadError { dep: String, source: DownloadError }, #[error("error during install operation: {0}")] InstallError(#[from] InstallError), #[error("error during lockfile operation: {0}")] LockError(#[from] LockError), #[error("error during publishing: {0}")] PublishError(#[from] PublishError), #[error("error during remappings operation: {0}")] RemappingsError(#[from] RemappingsError), #[error("error during registry operation: {0}")] RegistryError(#[from] RegistryError), #[error("error during update operation: {0}")] UpdateError(#[from] UpdateError), #[error("error during IO operation: {0}")] IOError(#[from] io::Error), } #[derive(Error, Debug)] #[non_exhaustive] pub enum AuthError { #[error("login error: invalid email or password")] InvalidCredentials, #[error("login error: invalid token")] InvalidToken, #[error("missing token, run `soldeer login`")] MissingToken, #[error("error during IO operation for the security file: {0}")] IOError(#[from] io::Error), #[error("http error during login: {0}")] HttpError(#[from] reqwest::Error), #[error("TUI disabled and no credentials passed via CLI")] TuiDisabled, } #[derive(Error, Debug)] #[non_exhaustive] pub enum ConfigError { #[error("config file is not valid: {0}")] Parsing(#[from] toml_edit::TomlError), #[error("error writing to config file: {0}")] FileWriteError(#[from] io::Error), #[error("empty `version` field in {0}")] EmptyVersion(String), #[error("missing `{field}` field in {dep}")] MissingField { field: String, dep: String }, #[error("invalid `{field}` field in {dep}")] InvalidField { field: String, dep: String }, #[error("field `{field}` conflicts with `{conflicts_with}` in {dep}")] FieldConflict { field: String, conflicts_with: String, dep: String }, #[error("only one of `rev`, `branch` or `tag` can be specified for git dependency {0}")] GitIdentifierConflict(String), #[error("dependency {0} is not valid")] InvalidDependency(String), #[error("dependency {0} was not found")] MissingDependency(String), #[error("error parsing config file: {0}")] DeserializeError(#[from] toml_edit::de::Error), #[error("error generating config file: {0}")] SerializeError(#[from] toml_edit::ser::Error), #[error("error during config operation: {0}")] DownloadError(#[from] DownloadError), #[error( "the version requirement string for {0} cannot contain the equal symbol for git dependencies and http dependencies with a custom URL" )] InvalidVersionReq(String), #[error("dependency specifier {0} cannot be parsed as name~version")] InvalidNameAndVersion(String), #[error("invalid project root path in {dep_path}: {project_root}")] InvalidProjectRoot { project_root: PathBuf, dep_path: PathBuf }, } #[derive(Error, Debug)] #[non_exhaustive] pub enum DownloadError { #[error("error downloading dependency: {0}")] HttpError(#[from] reqwest::Error), #[error("error extracting dependency: {0}")] UnzipError(#[from] zip_extract::ZipExtractError), #[error("error during git command {args:?}: {message}")] GitError { message: String, args: Vec }, #[error("error during IO operation for {path:?}: {source}")] IOError { path: PathBuf, source: io::Error }, #[error("error during async operation: {0}")] AsyncError(#[from] tokio::task::JoinError), #[error("could download the dependencies of this dependency {0}")] SubdependencyError(String), #[error("the provided URL is invalid: {0}")] InvalidUrl(String), #[error("error during registry operation: {0}")] RegistryError(#[from] RegistryError), #[error("dependency not found: {0}")] DependencyNotFound(String), } #[derive(Error, Debug)] #[non_exhaustive] pub enum InstallError { #[error("zip checksum for {path} does not match lock file: expected {expected}, got {actual}")] ZipIntegrityError { path: PathBuf, expected: String, actual: String }, #[error("error during IO operation for {path:?}: {source}")] IOError { path: PathBuf, source: io::Error }, #[error("error during git command: {0}")] GitError(String), #[error("error during dependency installation: {0}")] DownloadError(#[from] DownloadError), #[error("error during dependency installation: {0}")] ConfigError(#[from] ConfigError), #[error("error during async operation: {0}")] AsyncError(#[from] tokio::task::JoinError), #[error("error during forge command: {0}")] ForgeError(String), #[error("error during registry operation: {0}")] RegistryError(#[from] RegistryError), #[error("error with lockfile: {0}")] LockError(#[from] LockError), } #[derive(Error, Debug)] #[non_exhaustive] pub enum LockError { #[error("soldeer.lock is missing")] Missing, #[error("dependency {0} is already installed")] DependencyInstalled(String), #[error("IO error for soldeer.lock: {0}")] IOError(#[from] io::Error), #[error("error generating soldeer.lock contents: {0}")] SerializeError(#[from] toml_edit::ser::Error), #[error("lock entry does not match a valid format")] InvalidLockEntry, #[error("missing `{field}` field in lock entry for {dep}")] MissingField { field: String, dep: String }, #[error("foundry.lock is missing")] FoundryLockMissing, #[error("error parsing lockfile contents: {0}")] DeserializeError(#[from] serde_json::Error), } #[derive(Error, Debug)] #[non_exhaustive] pub enum PublishError { #[error("no files to publish")] NoFiles, #[error("error during zipping: {0}")] ZipError(#[from] zip::result::ZipError), #[error("error during IO operation for {path:?}: {source}")] IOError { path: PathBuf, source: io::Error }, #[error("error while computing the relative path: {0}")] RelativePathError(#[from] StripPrefixError), #[error("auth error: {0}")] AuthError(#[from] AuthError), #[error("registry error during publishing: {0}")] DownloadError(#[from] RegistryError), #[error( "Project not found. Make sure you send the right dependency name. The dependency name is the project name you created on https://soldeer.xyz" )] ProjectNotFound, #[error("dependency already exists")] AlreadyExists, #[error("the package is too big (over 50 MB)")] PayloadTooLarge, #[error("http error during publishing: {0}")] HttpError(#[from] reqwest::Error), #[error( "invalid package name, only alphanumeric characters, `-` and `@` are allowed. Length must be between 3 and 100 characters" )] InvalidName, #[error("package version cannot be empty")] EmptyVersion, #[error("user cancelled operation")] UserAborted, #[error("unknown http error")] UnknownError, } #[derive(Error, Debug)] #[non_exhaustive] pub enum RegistryError { #[error("error with registry request: {0}")] HttpError(#[from] reqwest::Error), #[error("could not get the dependency URL for {0}")] URLNotFound(String), #[error( "project {0} not found. Private projects require to log in before install. Please check the dependency name (project name) or create a new project on https://soldeer.xyz" )] ProjectNotFound(String), #[error("auth error: {0}")] AuthError(#[from] AuthError), #[error("package {0} has no version")] NoVersion(String), #[error("no matching version found for {dependency} with version requirement {version_req}")] NoMatchingVersion { dependency: String, version_req: String }, } #[derive(Error, Debug)] #[non_exhaustive] pub enum RemappingsError { #[error("error writing to remappings file: {0}")] FileWriteError(#[from] io::Error), #[error("error while interacting with the config file: {0}")] ConfigError(#[from] ConfigError), #[error("dependency not found: {0}")] DependencyNotFound(String), } #[derive(Error, Debug)] #[non_exhaustive] pub enum UpdateError { #[error("registry error: {0}")] RegistryError(#[from] RegistryError), #[error("download error: {0}")] DownloadError(#[from] DownloadError), #[error("error during install operation: {0}")] InstallError(#[from] InstallError), #[error("error during async operation: {0}")] AsyncError(#[from] tokio::task::JoinError), } ================================================ FILE: crates/core/src/install.rs ================================================ //! Install dependencies. //! //! This module contains functions to install dependencies from the config object or from the //! lockfile. Dependencies can be installed in parallel. use crate::{ config::{ Dependency, GitIdentifier, HttpDependency, Paths, detect_config_location, read_config_deps, read_soldeer_config, }, download::{clone_repo, delete_dependency_files, download_file, unzip_file}, errors::{ConfigError, InstallError, LockError}, lock::{ GitLockEntry, HttpLockEntry, Integrity, LockEntry, PrivateLockEntry, forge, format_install_path, read_lockfile, }, registry::{DownloadUrl, get_dependency_url_remote, get_latest_supported_version}, utils::{IntegrityChecksum, canonicalize, hash_file, hash_folder, run_git_command}, }; use derive_more::derive::Display; use log::{debug, info, warn}; use path_slash::PathBufExt as _; use std::{ collections::HashMap, fmt, future::Future, ops::Deref, path::{Path, PathBuf}, pin::Pin, }; use tokio::{fs, sync::mpsc, task::JoinSet}; pub type Result = std::result::Result; #[derive(Debug, Clone, Display)] pub struct DependencyName(String); impl Deref for DependencyName { type Target = String; fn deref(&self) -> &Self::Target { &self.0 } } impl From<&T> for DependencyName { fn from(value: &T) -> Self { Self(value.to_string()) } } /// Collection of channels to monitor the progress of the install process. #[derive(Debug)] pub struct InstallMonitoring { /// Channel to receive install progress logs. pub logs: mpsc::UnboundedReceiver, /// Progress for calls to the API to retrieve the packages versions. pub versions: mpsc::UnboundedReceiver, /// Progress for downloading the dependencies. pub downloads: mpsc::UnboundedReceiver, /// Progress for unzipping the downloaded files. pub unzip: mpsc::UnboundedReceiver, /// Progress for installing subdependencies. pub subdependencies: mpsc::UnboundedReceiver, /// Progress for checking the integrity of the installed dependencies. pub integrity: mpsc::UnboundedReceiver, } /// Collection of channels to notify the caller of the install progress. #[derive(Debug, Clone)] pub struct InstallProgress { /// Channel to send messages to be logged to the user. pub logs: mpsc::UnboundedSender, /// Progress for calls to the API to retrieve the packages versions. pub versions: mpsc::UnboundedSender, /// Progress for downloading the dependencies. pub downloads: mpsc::UnboundedSender, /// Progress for unzipping the downloaded files. pub unzip: mpsc::UnboundedSender, /// Progress for installing subdependencies. pub subdependencies: mpsc::UnboundedSender, /// Progress for checking the integrity of the installed dependencies. pub integrity: mpsc::UnboundedSender, } impl InstallProgress { /// Create a new install progress tracker, with a receiving half ([InstallMonitoring]) and a /// sending half ([InstallProgress]). pub fn new() -> (Self, InstallMonitoring) { let (logs_tx, logs_rx) = mpsc::unbounded_channel(); let (versions_tx, versions_rx) = mpsc::unbounded_channel(); let (downloads_tx, downloads_rx) = mpsc::unbounded_channel(); let (unzip_tx, unzip_rx) = mpsc::unbounded_channel(); let (subdependencies_tx, subdependencies_rx) = mpsc::unbounded_channel(); let (integrity_tx, integrity_rx) = mpsc::unbounded_channel(); ( Self { logs: logs_tx, versions: versions_tx, downloads: downloads_tx, unzip: unzip_tx, subdependencies: subdependencies_tx, integrity: integrity_tx, }, InstallMonitoring { logs: logs_rx, versions: versions_rx, downloads: downloads_rx, unzip: unzip_rx, subdependencies: subdependencies_rx, integrity: integrity_rx, }, ) } /// Log a message related to progress to the caller. pub fn log(&self, msg: impl fmt::Display) { if let Err(e) = self.logs.send(msg.to_string()) { warn!(err:err = e; "error sending log message to the install progress channel"); } } /// Advance all progress trackers at once, passing the dependency name. pub fn update_all(&self, dependency_name: DependencyName) { if let Err(e) = self.versions.send(dependency_name.clone()) { warn!(err:err = e; "error sending version message to the install progress channel"); } if let Err(e) = self.downloads.send(dependency_name.clone()) { warn!(err:err = e; "error sending download message to the install progress channel"); } if let Err(e) = self.unzip.send(dependency_name.clone()) { warn!(err:err = e; "error sending unzip message to the install progress channel"); } if let Err(e) = self.subdependencies.send(dependency_name.clone()) { warn!(err:err = e; "error sending sudependencies message to the install progress channel"); } if let Err(e) = self.integrity.send(dependency_name) { warn!(err:err = e; "error sending integrity message to the install progress channel"); } } } /// Status of a dependency, which can either be missing, installed and untouched, or installed but /// failing the integrity check. #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] #[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] pub enum DependencyStatus { /// The dependency is missing. Missing, /// The dependency is installed but the integrity check failed. FailedIntegrity, /// The dependency is installed and the integrity check passed. Installed, } /// HTTP dependency installation information. #[derive(Debug, Clone, PartialEq, Eq, Hash, bon::Builder)] #[builder(on(String, into))] struct HttpInstallInfo { /// The name of the dependency. name: String, /// The version of the dependency. This is not a version requirement string but a specific. /// version. version: String, /// The URL from which the zip file will be downloaded. url: String, /// The checksum of the downloaded zip file, if available (e.g. from the lockfile) checksum: Option, /// An optional relative path to the project's root within the zip file. /// /// The project root is where the soldeer.toml or foundry.toml resides. If no path is provided, /// then the zip's root must contain a Soldeer config. project_root: Option, } impl fmt::Display for HttpInstallInfo { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { // since the version is an exact version number, we use a dash and not a tilde write!(f, "{}-{}", self.name, self.version) } } /// Git dependency installation information. #[derive(Debug, Clone, PartialEq, Eq, Hash, bon::Builder)] #[builder(on(String, into))] struct GitInstallInfo { /// The name of the dependency. name: String, /// The version of the dependency. version: String, /// The URL of the git repository. git: String, /// The identifier of the git dependency (e.g. a commit hash, branch name, or tag name). If /// `None` is provided, the default branch is used. identifier: Option, /// An optional relative path to the project's root within the repository. /// /// The project root is where the soldeer.toml or foundry.toml resides. If no path is provided, /// then the repo's root must contain a Soldeer config. project_root: Option, } impl fmt::Display for GitInstallInfo { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "{}-{}", self.name, self.version) } } /// Installation information for a dependency. /// /// A builder can be used to create the underlying [`HttpInstallInfo`] or [`GitInstallInfo`] and /// then converted into this type with `.into()`. #[derive(Debug, Clone, PartialEq, Eq, Hash, Display)] enum InstallInfo { /// Installation information for an HTTP dependency. Http(HttpInstallInfo), /// Installation information for a git dependency. Git(GitInstallInfo), /// Installation information for a private dependency. Private(HttpInstallInfo), } impl From for InstallInfo { fn from(value: HttpInstallInfo) -> Self { Self::Http(value) } } impl From for InstallInfo { fn from(value: GitInstallInfo) -> Self { Self::Git(value) } } impl InstallInfo { async fn from_lock(lock: LockEntry, project_root: Option) -> Result { match lock { LockEntry::Http(lock) => Ok(HttpInstallInfo { name: lock.name, version: lock.version, url: lock.url, checksum: Some(lock.checksum), project_root, } .into()), LockEntry::Git(lock) => Ok(GitInstallInfo { name: lock.name, version: lock.version, git: lock.git, identifier: Some(GitIdentifier::from_rev(lock.rev)), project_root, } .into()), LockEntry::Private(lock) => { // need to retrieve a signed download URL from the registry let download = get_dependency_url_remote( &HttpDependency::builder() .name(&lock.name) .version_req(&lock.version) .build() .into(), &lock.version, ) .await?; Ok(Self::Private(HttpInstallInfo { name: lock.name, version: lock.version, url: download.url, checksum: Some(lock.checksum), project_root, })) } } } } /// Git submodule information #[derive(Debug, Default, Clone, PartialEq, Eq, Hash)] struct Submodule { url: String, path: String, branch: Option, } /// Install a list of dependencies in parallel. /// /// This function spawns a task for each dependency and waits for all of them to finish. Each task /// checks the integrity of the dependency if found on disk, downloads the dependency (zip file or /// cloning repo) if not already present, unzips the zip file if necessary, installs /// sub-dependencies and generates the lockfile entry. pub async fn install_dependencies( dependencies: &[Dependency], locks: &[LockEntry], deps: impl AsRef, recursive_deps: bool, progress: InstallProgress, ) -> Result> { let mut set = JoinSet::new(); for dep in dependencies { debug!(dep:% = dep; "spawning task to install dependency"); set.spawn({ let d = dep.clone(); let p = progress.clone(); let lock = locks.iter().find(|l| l.name() == dep.name()).cloned(); let deps = deps.as_ref().to_path_buf(); async move { install_dependency( &d, lock.as_ref(), deps, None, recursive_deps, p, ) .await } }); } let mut results = Vec::new(); while let Some(res) = set.join_next().await { let res = res??; debug!(dep:% = res.name(); "install task finished"); results.push(res); } debug!("all install tasks have finished"); Ok(results) } /// Install a list of dependencies sequentially. /// /// This function can be used inside another tokio task to avoid spawning more tasks, useful for /// recursive install. For each dep, checks the integrity of the dependency if found on disk, /// downloads the dependency (zip file or cloning repo) if not already present, unzips the zip file /// if necessary, installs sub-dependencies and generates the lockfile entry. pub async fn install_dependencies_sequential( dependencies: &[Dependency], locks: &[LockEntry], deps: impl AsRef + Clone, recursive_deps: bool, progress: InstallProgress, ) -> Result> { let mut results = Vec::new(); for dep in dependencies { debug!(dep:% = dep; "installing dependency sequentially"); let lock = locks.iter().find(|l| l.name() == dep.name()); results.push( install_dependency(dep, lock, deps.clone(), None, recursive_deps, progress.clone()) .await?, ); debug!(dep:% = dep; "sequential install finished"); } debug!("all sequential installs have finished"); Ok(results) } /// Install a single dependency. /// /// This function checks the integrity of the dependency if found on disk, downloads the dependency /// (zip file or cloning repo) if not already present, unzips the zip file if necessary, installs /// sub-dependencies and generates the lockfile entry. /// /// If no lockfile entry is provided, the dependency is installed from the config object and /// integrity checks are skipped. pub async fn install_dependency( dependency: &Dependency, lock: Option<&LockEntry>, deps: impl AsRef, force_version: Option, recursive_deps: bool, progress: InstallProgress, ) -> Result { if let Some(lock) = lock { debug!(dep:% = dependency; "installing based on lock entry"); match check_dependency_integrity(lock, &deps).await? { DependencyStatus::Installed => { info!(dep:% = dependency; "skipped install, dependency already up-to-date with lockfile"); progress.update_all(dependency.into()); return Ok(lock.clone()); } DependencyStatus::FailedIntegrity => match dependency { Dependency::Http(_) => { info!(dep:% = dependency; "dependency failed integrity check, reinstalling"); progress.log(format!( "Dependency {dependency} failed integrity check, reinstalling" )); // we know the folder exists because otherwise we would have gotten // `Missing` delete_dependency_files(dependency, &deps).await?; debug!(dep:% = dependency; "removed dependency folder"); // we won't need to retrieve the version number so we mark it as done progress.versions.send(dependency.into()).ok(); } Dependency::Git(_) => { let commit = &lock.as_git().expect("lock entry should be of type git").rev; info!(dep:% = dependency, commit; "dependency failed integrity check, resetting to commit"); progress.log(format!( "Dependency {dependency} failed integrity check, resetting to commit {commit}" )); reset_git_dependency( lock.as_git().expect("lock entry should be of type git"), &deps, ) .await?; debug!(dep:% = dependency; "reset git dependency"); // dependency should now be at the correct commit, we can exit progress.update_all(dependency.into()); return Ok(lock.clone()); } }, DependencyStatus::Missing => { // make sure there is no existing directory for the dependency if let Some(path) = dependency.install_path(&deps).await { fs::remove_dir_all(&path) .await .map_err(|e| InstallError::IOError { path, source: e })?; } info!(dep:% = dependency; "dependency is missing, installing"); // we won't need to retrieve the version number so we mark it as done progress.versions.send(dependency.into()).ok(); } } install_dependency_inner( &InstallInfo::from_lock(lock.clone(), dependency.project_root()).await?, lock.install_path(&deps), recursive_deps, progress, ) .await } else { // no lockfile entry, install from config object debug!(dep:% = dependency; "no lockfile entry, installing based on config"); // make sure there is no existing directory for the dependency if let Some(path) = dependency.install_path(&deps).await { fs::remove_dir_all(&path) .await .map_err(|e| InstallError::IOError { path, source: e })?; } let (download, version) = match dependency.url() { // for git dependencies and http dependencies which have a custom url, we use the // version requirement string as version, because in that case a version requirement has // little sense (we can't automatically bump the version) Some(url) => ( DownloadUrl { url: url.clone(), private: false }, dependency.version_req().to_string(), ), None => { let version = match force_version { Some(v) => v, None => get_latest_supported_version(dependency).await?, }; (get_dependency_url_remote(dependency, &version).await?, version) } }; debug!(dep:% = dependency, version; "resolved version"); debug!(dep:% = dependency, url:? = download; "resolved download URL"); // indicate that we have retrieved the version number progress.versions.send(dependency.into()).ok(); let info = match &dependency { Dependency::Http(dep) => { if download.private { InstallInfo::Private( HttpInstallInfo::builder() .name(&dep.name) .version(&version) .url(download.url) .build(), ) } else { HttpInstallInfo::builder() .name(&dep.name) .version(&version) .url(download.url) .build() .into() } } Dependency::Git(dep) => GitInstallInfo::builder() .name(&dep.name) .version(&version) .git(download.url) .maybe_identifier(dep.identifier.clone()) .build() .into(), }; let install_path = format_install_path(dependency.name(), &version, &deps); debug!(dep:% = dependency; "installing to path {install_path:?}"); install_dependency_inner(&info, install_path, recursive_deps, progress).await } } /// Check the integrity of a dependency that was installed. /// /// If any file has changed in the dependency directory (except ignored files and any `.git` /// directory), the integrity check will fail. pub async fn check_dependency_integrity( lock: &LockEntry, deps: impl AsRef, ) -> Result { match lock { LockEntry::Http(lock) => check_http_dependency(lock, deps).await, LockEntry::Private(lock) => check_http_dependency(lock, deps).await, LockEntry::Git(lock) => check_git_dependency(lock, deps).await, } } /// Ensure that the dependencies directory exists. /// /// If the directory does not exist, it will be created. pub fn ensure_dependencies_dir(path: impl AsRef) -> Result<()> { let path = path.as_ref(); if !path.exists() { debug!(path:?; "dependencies dir doesn't exist, creating it"); std::fs::create_dir(path) .map_err(|e| InstallError::IOError { path: path.to_path_buf(), source: e })?; } Ok(()) } /// Install a single dependency. async fn install_dependency_inner( dep: &InstallInfo, path: impl AsRef, subdependencies: bool, progress: InstallProgress, ) -> Result { match dep { InstallInfo::Http(dep) => { let (zip_integrity, integrity) = install_http_dependency(dep, path, subdependencies, progress).await?; Ok(HttpLockEntry::builder() .name(&dep.name) .version(&dep.version) .url(&dep.url) .checksum(zip_integrity.to_string()) .integrity(integrity.to_string()) .build() .into()) } InstallInfo::Private(dep) => { let (zip_integrity, integrity) = install_http_dependency(dep, path, subdependencies, progress).await?; Ok(PrivateLockEntry::builder() .name(&dep.name) .version(&dep.version) .checksum(zip_integrity.to_string()) .integrity(integrity.to_string()) .build() .into()) } InstallInfo::Git(dep) => { // if the dependency was specified without a commit hash and we didn't have a lockfile, // clone the default branch let commit = clone_repo(&dep.git, dep.identifier.as_ref(), &path).await?; progress.downloads.send(dep.into()).ok(); if subdependencies { debug!(dep:% = dep; "installing subdependencies"); install_subdependencies(&path, dep.project_root.as_ref()).await?; debug!(dep:% = dep; "finished installing subdependencies"); } progress.unzip.send(dep.into()).ok(); progress.subdependencies.send(dep.into()).ok(); progress.integrity.send(dep.into()).ok(); Ok(GitLockEntry::builder() .name(&dep.name) .version(&dep.version) .git(&dep.git) .rev(commit) .build() .into()) } } } /// Install subdependencies of a dependency. /// /// This function checks for a `.gitmodules` file in the dependency directory and clones the /// submodules if it exists. If a valid Soldeer config is found at the project root (optionally a /// sub-dir of the dependency folder), the soldeer dependencies are installed. fn install_subdependencies( path: impl AsRef, project_root: Option<&PathBuf>, ) -> Pin> + Send + '_>> { let path = path.as_ref().to_path_buf(); Box::pin(async move { let gitmodules_path = path.join(".gitmodules"); if fs::metadata(&gitmodules_path).await.is_ok() { debug!(path:?; "found .gitmodules, installing subdependencies with git"); if fs::metadata(path.join(".git")).await.is_ok() { debug!(path:?; "subdependency contains .git directory, cloning submodules"); run_git_command(&["submodule", "update", "--init"], Some(&path)).await?; // we need to recurse into each of the submodules to ensure any soldeer sub-deps // of those are also installed let submodules = get_submodules(&path).await?; let mut set = JoinSet::new(); for (_, submodule) in submodules { let sub_path = path.join(submodule.path); debug!(sub_path:?; "recursing into the git submodule"); set.spawn(async move { install_subdependencies(sub_path, None).await }); } while let Some(res) = set.join_next().await { res??; } } else { debug!(path:?; "subdependency has git submodules configuration but is not a git repository"); let submodule_paths = reinit_submodules(&path).await?; // we need to recurse into each of the submodules to ensure any soldeer sub-deps // of those are also installed let mut set = JoinSet::new(); for sub_path in submodule_paths { debug!(sub_path:?; "recursing into the git submodule"); set.spawn(async move { install_subdependencies(sub_path, None).await }); } while let Some(res) = set.join_next().await { res??; } } } // if there's a suitable soldeer config, install the soldeer deps let path = get_subdependency_root(path, project_root).await?; if detect_config_location(&path).is_some() { // install subdependencies debug!(path:?; "found soldeer config, installing subdependencies"); install_subdependencies_inner(Paths::from_root(path)?).await?; } Ok(()) }) } /// Inner logic for installing subdependencies at a given path. /// /// This is a similar implementation to the one found in `soldeer_commands` but /// simplified. async fn install_subdependencies_inner(paths: Paths) -> Result<()> { let config = read_soldeer_config(&paths.config)?; ensure_dependencies_dir(&paths.dependencies)?; let (dependencies, _) = read_config_deps(&paths.config)?; let lockfile = read_lockfile(&paths.lock)?; let (progress, _) = InstallProgress::new(); // not used at the moment let _ = install_dependencies( &dependencies, &lockfile.entries, &paths.dependencies, config.recursive_deps, progress, ) .await?; Ok(()) } /// Download and unzip an HTTP dependency async fn install_http_dependency( dep: &HttpInstallInfo, path: impl AsRef, subdependencies: bool, progress: InstallProgress, ) -> Result<(IntegrityChecksum, IntegrityChecksum)> { let path = path.as_ref(); let zip_path = download_file( &dep.url, path.parent().expect("dependency install path should have a parent"), &format!("{}-{}", dep.name, dep.version), ) .await?; progress.downloads.send(dep.into()).ok(); let zip_integrity = tokio::task::spawn_blocking({ let zip_path = zip_path.clone(); move || hash_file(zip_path) }) .await? .map_err(|e| InstallError::IOError { path: zip_path.clone(), source: e })?; if let Some(checksum) = &dep.checksum { if checksum != &zip_integrity.to_string() { return Err(InstallError::ZipIntegrityError { path: zip_path.clone(), expected: checksum.to_string(), actual: zip_integrity.to_string(), }); } debug!(zip_path:?; "archive integrity check successful"); } else { debug!(zip_path:?; "no checksum available for archive integrity check"); } unzip_file(&zip_path, path).await?; progress.unzip.send(dep.into()).ok(); if subdependencies { debug!(dep:% = dep; "installing subdependencies"); install_subdependencies(path, dep.project_root.as_ref()).await?; debug!(dep:% = dep; "finished installing subdependencies"); } progress.subdependencies.send(dep.into()).ok(); let integrity = tokio::task::spawn_blocking({ let path = path.to_path_buf(); move || hash_folder(&path) }) .await? .map_err(|e| InstallError::IOError { path: path.to_path_buf(), source: e })?; debug!(dep:% = dep, checksum = integrity.0; "integrity checksum computed"); progress.integrity.send(dep.into()).ok(); Ok((zip_integrity, integrity)) } /// Retrieve a map of git submodules for a path by looking at the `.gitmodules` file. async fn get_submodules(path: &PathBuf) -> Result> { let submodules_config = run_git_command(&["config", "-f", ".gitmodules", "-l"], Some(path)).await?; let mut submodules = HashMap::::new(); for config_line in submodules_config.trim().lines() { let (item, value) = config_line.split_once('=').expect("config format should be valid"); let Some(item) = item.strip_prefix("submodule.") else { continue; }; let (submodule_name, item_name) = item.rsplit_once('.').expect("config format should be valid"); let entry = submodules.entry(submodule_name.to_string()).or_default(); match item_name { "path" => entry.path = value.to_string(), "url" => entry.url = value.to_string(), "branch" => entry.branch = Some(value.to_string()), _ => {} } } Ok(submodules) } /// Re-add submodules found in a `.gitmodules` when the folder has to be re-initialized as a git /// repo. /// /// The file is parsed, and each module is added again with `git submodule add`. async fn reinit_submodules(path: &PathBuf) -> Result> { debug!(path:?; "running git init"); run_git_command(&["init"], Some(path)).await?; let submodules = get_submodules(path).await?; debug!(submodules:?, path:?; "got submodules config"); let mut foundry_lock = forge::Lockfile::new(path); if foundry_lock.read().is_ok() { debug!(path:?; "foundry lockfile exists"); } let mut out = Vec::new(); for (submodule_name, submodule) in submodules { // make sure to remove the path if it already exists let dest_path = path.join(&submodule.path); fs::remove_dir_all(&dest_path).await.ok(); // ignore error if folder doesn't exist let mut args = vec!["submodule", "add", "-f", "--name", &submodule_name]; if let Some(branch) = &submodule.branch { args.push("-b"); args.push(branch); } args.push(&submodule.url); args.push(&submodule.path); run_git_command(args, Some(path)).await?; if let Some( forge::DepIdentifier::Branch { rev, .. } | forge::DepIdentifier::Tag { rev, .. } | forge::DepIdentifier::Rev { rev }, ) = foundry_lock.get(Path::new(&submodule.path)) { debug!(submodule_name, path:?; "found corresponding item in foundry lockfile"); run_git_command(["checkout", rev], Some(&dest_path)).await?; debug!(submodule_name, path:?; "submodule checked out at {rev}"); } debug!(submodule_name, path:?; "added submodule"); out.push(path.join(submodule.path)); } Ok(out) } /// Check the integrity of an HTTP dependency. /// /// This function hashes the contents of the dependency directory and compares it with the lockfile /// entry. async fn check_http_dependency( lock: &impl Integrity, deps: impl AsRef, ) -> Result { let path = lock.install_path(deps); if fs::metadata(&path).await.is_err() { return Ok(DependencyStatus::Missing); } let current_hash = tokio::task::spawn_blocking({ let path = path.clone(); move || hash_folder(&path) }) .await? .map_err(|e| InstallError::IOError { path: path.to_path_buf(), source: e })?; let Some(integrity) = lock.integrity() else { return Err(LockError::MissingField { field: "integrity".to_string(), dep: path.to_string_lossy().to_string(), } .into()) }; if ¤t_hash.to_string() != integrity { debug!(path:?, expected = integrity, computed = current_hash.0; "integrity checksum mismatch"); return Ok(DependencyStatus::FailedIntegrity); } Ok(DependencyStatus::Installed) } /// Check the integrity of a git dependency. /// /// This function checks that the dependency is a git repository and that the current commit is the /// one specified in the lockfile entry. async fn check_git_dependency( lock: &GitLockEntry, deps: impl AsRef, ) -> Result { let path = lock.install_path(deps); if fs::metadata(&path).await.is_err() { return Ok(DependencyStatus::Missing); } // check that the location is a git repository let top_level = match run_git_command( &["rev-parse", "--show-toplevel", path.to_string_lossy().as_ref()], Some(&path), ) .await { Ok(top_level) => { // stdout contains the path twice, we only keep the first item PathBuf::from(top_level.split_whitespace().next().unwrap_or_default()) } Err(_) => { // error getting the top level directory, assume the directory is not a git repository debug!(path:?; "`git rev-parse --show-toplevel` failed"); return Ok(DependencyStatus::Missing); } }; let top_level = top_level.to_slash_lossy(); // compare the top level directory to the install path let absolute_path = canonicalize(&path) .await .map_err(|e| InstallError::IOError { path: path.clone(), source: e })?; if top_level.trim() != absolute_path.to_slash_lossy() { // the top level directory is not the install path, assume the directory is not a git // repository debug!(path:?; "dependency's toplevel dir is outside of dependency folder: not a git repo"); return Ok(DependencyStatus::Missing); } // for git dependencies, the `rev` field holds the commit hash match run_git_command(&["diff", "--exit-code", &lock.rev], Some(&path)).await { Ok(_) => Ok(DependencyStatus::Installed), Err(_) => { debug!(path:?, rev = lock.rev; "git repo has non-empty diff compared to lockfile rev"); Ok(DependencyStatus::FailedIntegrity) } } } /// Reset a git dependency to the commit specified in the lockfile entry. /// /// This function runs `git reset --hard ` and `git clean -fd` in the git dependency's /// directory. async fn reset_git_dependency(lock: &GitLockEntry, deps: impl AsRef) -> Result<()> { let path = lock.install_path(deps); run_git_command(&["reset", "--hard", &lock.rev], Some(&path)).await?; run_git_command(&["clean", "-fd"], Some(&path)).await?; Ok(()) } /// Normalize and check the path to a subdependency's project root. /// /// The combination of the subdependency path with the relative path to the root must be at or below /// the level of the subdependency, to avoid directory traversal. async fn get_subdependency_root( subdependency_path: PathBuf, relative_root: Option<&PathBuf>, ) -> Result { let path = match relative_root { Some(relative_root) => { let tentative_path = canonicalize(subdependency_path.join(relative_root)).await.map_err(|_| { InstallError::ConfigError(ConfigError::InvalidProjectRoot { project_root: relative_root.to_owned(), dep_path: subdependency_path.clone(), }) })?; // final path must be below the dependency's folder let path_with_slashes = subdependency_path.to_slash_lossy().into_owned(); if !tentative_path.to_slash_lossy().starts_with(&path_with_slashes) { return Err(InstallError::ConfigError(ConfigError::InvalidProjectRoot { project_root: relative_root.to_owned(), dep_path: subdependency_path.clone(), })); } tentative_path } None => subdependency_path, }; Ok(path) } #[cfg(test)] mod tests { use super::*; use crate::config::{GitDependency, HttpDependency}; use mockito::{Matcher, Server, ServerGuard}; use temp_env::async_with_vars; use testdir::testdir; async fn mock_api_server() -> ServerGuard { let mut server = Server::new_async().await; let data = r#"{"data":[{"created_at":"2024-08-06T17:31:25.751079Z","deleted":false,"downloads":3389,"id":"660132e6-4902-4804-8c4b-7cae0a648054","internal_name":"forge-std/1_9_2_06-08-2024_17:31:25_forge-std-1.9.2.zip","project_id":"37adefe5-9bc6-4777-aaf2-e56277d1f30b","url":"https://soldeer-revisions.s3.amazonaws.com/forge-std/1_9_2_06-08-2024_17:31:25_forge-std-1.9.2.zip","version":"1.9.2"},{"created_at":"2024-07-03T14:44:59.729623Z","deleted":false,"downloads":5290,"id":"fa5160fc-ba7b-40fd-8e99-8becd6dadbe4","internal_name":"forge-std/v1_9_1_03-07-2024_14:44:59_forge-std-v1.9.1.zip","project_id":"37adefe5-9bc6-4777-aaf2-e56277d1f30b","url":"https://soldeer-revisions.s3.amazonaws.com/forge-std/v1_9_1_03-07-2024_14:44:59_forge-std-v1.9.1.zip","version":"1.9.1"},{"created_at":"2024-07-03T14:44:58.148723Z","deleted":false,"downloads":21,"id":"b463683a-c4b4-40bf-b707-1c4eb343c4d2","internal_name":"forge-std/v1_9_0_03-07-2024_14:44:57_forge-std-v1.9.0.zip","project_id":"37adefe5-9bc6-4777-aaf2-e56277d1f30b","url":"https://soldeer-revisions.s3.amazonaws.com/forge-std/v1_9_0_03-07-2024_14:44:57_forge-std-v1.9.0.zip","version":"1.9.0"}],"status":"success"}"#; server .mock("GET", "/api/v1/revision") .match_query(Matcher::Any) .with_header("content-type", "application/json") .with_body(data) .create_async() .await; let data2 = r#"{"data":[{"created_at":"2024-08-06T17:31:25.751079Z","deleted":false,"downloads":3391,"id":"660132e6-4902-4804-8c4b-7cae0a648054","internal_name":"forge-std/1_9_2_06-08-2024_17:31:25_forge-std-1.9.2.zip","project_id":"37adefe5-9bc6-4777-aaf2-e56277d1f30b","url":"https://soldeer-revisions.s3.amazonaws.com/forge-std/1_9_2_06-08-2024_17:31:25_forge-std-1.9.2.zip","version":"1.9.2"}],"status":"success"}"#; server .mock("GET", "/api/v1/revision-cli") .match_query(Matcher::Any) .with_header("content-type", "application/json") .with_body(data2) .create_async() .await; server } async fn mock_api_private() -> ServerGuard { let mut server = Server::new_async().await; let data = r#"{"data":[{"created_at":"2025-09-28T12:36:09.526660Z","deleted":false,"downloads":0,"file_size":65083,"id":"0440c261-8cdf-4738-9139-c4dc7b0c7f3e","internal_name":"test-private/0_1_0_28-09-2025_12:36:08_test-private.zip","private":true,"project_id":"14f419e7-2d64-49e4-86b9-b44b36627786","uploader":"bf8e75f4-0c36-4bcb-a23b-2682df92f176","url":"https://github.com/mario-eth/soldeer/archive/8585a7ec85a29889cec8d08f4770e15ec4795943.zip","version":"0.1.0"}],"status":"success"}"#; server .mock("GET", "/api/v1/revision") .match_query(Matcher::Any) .with_header("content-type", "application/json") .with_body(data) .create_async() .await; let data2 = r#"{"data":[{"created_at":"2025-09-28T12:36:09.526660Z","deleted":false,"id":"0440c261-8cdf-4738-9139-c4dc7b0c7f3e","internal_name":"test-private/0_1_0_28-09-2025_12:36:08_test-private.zip","private":true,"project_id":"14f419e7-2d64-49e4-86b9-b44b36627786","url":"https://github.com/mario-eth/soldeer/archive/8585a7ec85a29889cec8d08f4770e15ec4795943.zip","version":"0.1.0"}],"status":"success"}"#; server .mock("GET", "/api/v1/revision-cli") .match_query(Matcher::Any) .with_header("content-type", "application/json") .with_body(data2) .create_async() .await; server } #[tokio::test] async fn test_check_http_dependency() { let lock = HttpLockEntry::builder() .name("lib1") .version("1.0.0") .url("https://example.com/zip.zip") .checksum("") .integrity("beef") .build(); let dir = testdir!(); let path = dir.join("lib1-1.0.0"); fs::create_dir(&path).await.unwrap(); fs::write(path.join("test.txt"), "foobar").await.unwrap(); let res = check_http_dependency(&lock, &dir).await; assert!(res.is_ok(), "{res:?}"); assert_eq!(res.unwrap(), DependencyStatus::FailedIntegrity); let lock = HttpLockEntry::builder() .name("lib2") .version("1.0.0") .url("https://example.com/zip.zip") .checksum("") .integrity("") .build(); let res = check_http_dependency(&lock, &dir).await; assert!(res.is_ok(), "{res:?}"); assert_eq!(res.unwrap(), DependencyStatus::Missing); let hash = hash_folder(&path).unwrap(); let lock = HttpLockEntry::builder() .name("lib1") .version("1.0.0") .url("https://example.com/zip.zip") .checksum("") .integrity(hash.to_string()) .build(); let res = check_http_dependency(&lock, &dir).await; assert!(res.is_ok(), "{res:?}"); assert_eq!(res.unwrap(), DependencyStatus::Installed); } #[tokio::test] async fn test_check_git_dependency() { // happy path let dir = testdir!(); let path = &dir.join("test-repo-1.0.0"); let rev = clone_repo("https://github.com/beeb/test-repo.git", None, &path).await.unwrap(); let lock = GitLockEntry::builder().name("test-repo").version("1.0.0").git("").rev(rev).build(); let res = check_git_dependency(&lock, &dir).await; assert!(res.is_ok(), "{res:?}"); assert_eq!(res.unwrap(), DependencyStatus::Installed); // replace contents of existing file, diff is not empty fs::write(path.join("foo.txt"), "foo").await.unwrap(); let res = check_git_dependency(&lock, &dir).await; assert!(res.is_ok(), "{res:?}"); assert_eq!(res.unwrap(), DependencyStatus::FailedIntegrity); // wrong commit is checked out let lock = GitLockEntry::builder() .name("test-repo") .version("1.0.0") .git("") .rev("78c2f6a1a54db26bab6c3f501854a1564eb3707f") .build(); let res = check_git_dependency(&lock, &dir).await; assert!(res.is_ok(), "{res:?}"); assert_eq!(res.unwrap(), DependencyStatus::FailedIntegrity); // missing folder let lock = GitLockEntry::builder().name("lib1").version("1.0.0").git("").rev("").build(); let res = check_git_dependency(&lock, &dir).await; assert!(res.is_ok(), "{res:?}"); assert_eq!(res.unwrap(), DependencyStatus::Missing); // remove .git folder -> not a git repo let lock = GitLockEntry::builder().name("test-repo").version("1.0.0").git("").rev("").build(); fs::remove_dir_all(path.join(".git")).await.unwrap(); let res = check_git_dependency(&lock, &dir).await; assert!(res.is_ok(), "{res:?}"); assert_eq!(res.unwrap(), DependencyStatus::Missing); } #[tokio::test] async fn test_reset_git_dependency() { let dir = testdir!(); let path = &dir.join("test-repo-1.0.0"); clone_repo("https://github.com/beeb/test-repo.git", None, &path).await.unwrap(); let lock = GitLockEntry::builder() .name("test-repo") .version("1.0.0") .git("") .rev("78c2f6a1a54db26bab6c3f501854a1564eb3707f") .build(); let test = path.join("test.txt"); fs::write(&test, "foobar").await.unwrap(); let res = reset_git_dependency(&lock, &dir).await; assert!(res.is_ok(), "{res:?}"); // non checked-in file assert!(fs::metadata(test).await.is_err()); // file that is in `main` but not in `78c2f6a` assert!(fs::metadata(path.join("foo.txt")).await.is_err()); let commit = run_git_command(&["rev-parse", "--verify", "HEAD"], Some(path)) .await .unwrap() .trim() .to_string(); assert_eq!(commit, "78c2f6a1a54db26bab6c3f501854a1564eb3707f"); } #[tokio::test] async fn test_install_dependency_inner_http() { let dir = testdir!(); let install: InstallInfo = HttpInstallInfo::builder().name("test").version("1.0.0").url("https://github.com/mario-eth/soldeer/archive/8585a7ec85a29889cec8d08f4770e15ec4795943.zip").checksum("94a73dbe106f48179ea39b00d42e5d4dd96fdc6252caa3a89ce7efdaec0b9468").build().into(); let (progress, _) = InstallProgress::new(); let res = install_dependency_inner(&install, &dir, false, progress).await; assert!(res.is_ok(), "{res:?}"); let lock = res.unwrap(); assert_eq!(lock.name(), "test"); assert_eq!(lock.version(), "1.0.0"); let lock = lock.as_http().unwrap(); assert_eq!( lock.url, "https://github.com/mario-eth/soldeer/archive/8585a7ec85a29889cec8d08f4770e15ec4795943.zip" ); assert_eq!( lock.checksum, "94a73dbe106f48179ea39b00d42e5d4dd96fdc6252caa3a89ce7efdaec0b9468" ); let hash = hash_folder(&dir).unwrap(); assert_eq!(lock.integrity, hash.to_string()); } #[tokio::test] async fn test_install_dependency_inner_git() { let dir = testdir!(); let install: InstallInfo = GitInstallInfo::builder() .name("test") .version("1.0.0") .git("https://github.com/beeb/test-repo.git") .build() .into(); let (progress, _) = InstallProgress::new(); let res = install_dependency_inner(&install, &dir, false, progress).await; assert!(res.is_ok(), "{res:?}"); let lock = res.unwrap(); assert_eq!(lock.name(), "test"); assert_eq!(lock.version(), "1.0.0"); let lock = lock.as_git().unwrap(); assert_eq!(lock.git, "https://github.com/beeb/test-repo.git"); assert_eq!(lock.rev, "d5d72fa135d28b2e8307650b3ea79115183f2406"); assert!(dir.join(".git").exists()); } #[tokio::test] async fn test_install_dependency_inner_git_rev() { let dir = testdir!(); let install: InstallInfo = GitInstallInfo::builder() .name("test") .version("1.0.0") .git("https://github.com/beeb/test-repo.git") .identifier(GitIdentifier::from_rev("78c2f6a1a54db26bab6c3f501854a1564eb3707f")) .build() .into(); let (progress, _) = InstallProgress::new(); let res = install_dependency_inner(&install, &dir, false, progress).await; assert!(res.is_ok(), "{res:?}"); let lock = res.unwrap(); assert_eq!(lock.name(), "test"); assert_eq!(lock.version(), "1.0.0"); let lock = lock.as_git().unwrap(); assert_eq!(lock.git, "https://github.com/beeb/test-repo.git"); assert_eq!(lock.rev, "78c2f6a1a54db26bab6c3f501854a1564eb3707f"); assert!(dir.join(".git").exists()); } #[tokio::test] async fn test_install_dependency_inner_git_branch() { let dir = testdir!(); let install: InstallInfo = GitInstallInfo::builder() .name("test") .version("1.0.0") .git("https://github.com/beeb/test-repo.git") .identifier(GitIdentifier::from_branch("dev")) .build() .into(); let (progress, _) = InstallProgress::new(); let res = install_dependency_inner(&install, &dir, false, progress).await; assert!(res.is_ok(), "{res:?}"); let lock = res.unwrap(); assert_eq!(lock.name(), "test"); assert_eq!(lock.version(), "1.0.0"); let lock = lock.as_git().unwrap(); assert_eq!(lock.git, "https://github.com/beeb/test-repo.git"); assert_eq!(lock.rev, "8d903e557e8f1b6e62bde768aa456d4ddfca72c4"); assert!(dir.join(".git").exists()); } #[tokio::test] async fn test_install_dependency_inner_git_tag() { let dir = testdir!(); let install: InstallInfo = GitInstallInfo::builder() .name("test") .version("1.0.0") .git("https://github.com/beeb/test-repo.git") .identifier(GitIdentifier::from_tag("v0.1.0")) .build() .into(); let (progress, _) = InstallProgress::new(); let res = install_dependency_inner(&install, &dir, false, progress).await; assert!(res.is_ok(), "{res:?}"); let lock = res.unwrap(); assert_eq!(lock.name(), "test"); assert_eq!(lock.version(), "1.0.0"); let lock = lock.as_git().unwrap(); assert_eq!(lock.git, "https://github.com/beeb/test-repo.git"); assert_eq!(lock.rev, "78c2f6a1a54db26bab6c3f501854a1564eb3707f"); assert!(dir.join(".git").exists()); } #[tokio::test] async fn test_install_dependency_registry() { let server = mock_api_server().await; let dir = testdir!(); let dep = HttpDependency::builder().name("forge-std").version_req("1.9.2").build().into(); let (progress, _) = InstallProgress::new(); let res = async_with_vars( [("SOLDEER_API_URL", Some(server.url()))], install_dependency(&dep, None, &dir, None, false, progress), ) .await; assert!(res.is_ok(), "{res:?}"); let lock = res.unwrap(); assert_eq!(lock.name(), dep.name()); assert_eq!(lock.version(), dep.version_req()); let lock = lock.as_http().unwrap(); assert_eq!( &lock.url, "https://soldeer-revisions.s3.amazonaws.com/forge-std/1_9_2_06-08-2024_17:31:25_forge-std-1.9.2.zip" ); assert_eq!( lock.checksum, "20fd008c7c69b6c737cc0284469d1c76497107bc3e004d8381f6d8781cb27980" ); let hash = hash_folder(lock.install_path(&dir)).unwrap(); assert_eq!(lock.integrity, hash.to_string()); } #[tokio::test] async fn test_install_dependency_registry_compatible() { let server = mock_api_server().await; let dir = testdir!(); let dep = HttpDependency::builder().name("forge-std").version_req("^1.9.0").build().into(); let (progress, _) = InstallProgress::new(); let res = async_with_vars( [("SOLDEER_API_URL", Some(server.url()))], install_dependency(&dep, None, &dir, None, false, progress), ) .await; assert!(res.is_ok(), "{res:?}"); let lock = res.unwrap(); assert_eq!(lock.name(), dep.name()); assert_eq!(lock.version(), "1.9.2"); let lock = lock.as_http().unwrap(); assert_eq!( &lock.url, "https://soldeer-revisions.s3.amazonaws.com/forge-std/1_9_2_06-08-2024_17:31:25_forge-std-1.9.2.zip" ); let hash = hash_folder(lock.install_path(&dir)).unwrap(); assert_eq!(lock.integrity, hash.to_string()); } #[tokio::test] async fn test_install_dependency_http() { let dir = testdir!(); let dep = HttpDependency::builder().name("test").version_req("1.0.0").url("https://github.com/mario-eth/soldeer/archive/8585a7ec85a29889cec8d08f4770e15ec4795943.zip").build().into(); let (progress, _) = InstallProgress::new(); let res = install_dependency(&dep, None, &dir, None, false, progress).await; assert!(res.is_ok(), "{res:?}"); let lock = res.unwrap(); assert_eq!(lock.name(), dep.name()); assert_eq!(lock.version(), dep.version_req()); let lock = lock.as_http().unwrap(); assert_eq!(&lock.url, dep.url().unwrap()); assert_eq!( lock.checksum, "94a73dbe106f48179ea39b00d42e5d4dd96fdc6252caa3a89ce7efdaec0b9468" ); let hash = hash_folder(lock.install_path(&dir)).unwrap(); assert_eq!(lock.integrity, hash.to_string()); } #[tokio::test] async fn test_install_dependency_git() { let dir = testdir!(); let dep = GitDependency::builder() .name("test") .version_req("1.0.0") .git("https://github.com/beeb/test-repo.git") .build() .into(); let (progress, _) = InstallProgress::new(); let res = install_dependency(&dep, None, &dir, None, false, progress).await; assert!(res.is_ok(), "{res:?}"); let lock = res.unwrap(); assert_eq!(lock.name(), dep.name()); assert_eq!(lock.version(), dep.version_req()); let lock = lock.as_git().unwrap(); assert_eq!(&lock.git, dep.url().unwrap()); assert_eq!(lock.rev, "d5d72fa135d28b2e8307650b3ea79115183f2406"); } #[tokio::test] async fn test_install_dependency_private() { let server = mock_api_private().await; let dir = testdir!(); let dep = HttpDependency::builder().name("test-private").version_req("0.1.0").build().into(); let (progress, _) = InstallProgress::new(); let res = async_with_vars( [("SOLDEER_API_URL", Some(server.url()))], install_dependency(&dep, None, &dir, None, false, progress), ) .await; assert!(res.is_ok(), "{res:?}"); let lock = res.unwrap(); assert_eq!(lock.name(), dep.name()); assert_eq!(lock.version(), dep.version_req()); let lock = lock.as_private().unwrap(); assert_eq!( lock.checksum, "94a73dbe106f48179ea39b00d42e5d4dd96fdc6252caa3a89ce7efdaec0b9468" ); let hash = hash_folder(lock.install_path(&dir)).unwrap(); assert_eq!(lock.integrity, hash.to_string()); } } ================================================ FILE: crates/core/src/lib.rs ================================================ //! Low-level library for interacting with Soldeer registries and files #![cfg_attr(docsrs, feature(doc_cfg))] pub use errors::SoldeerError; pub type Result = std::result::Result; pub mod auth; pub mod config; pub mod download; pub mod errors; pub mod install; pub mod lock; pub mod push; pub mod registry; pub mod remappings; pub mod update; pub mod utils; ================================================ FILE: crates/core/src/lock/forge.rs ================================================ //! Vendored version of the `lockfile` module of `forge`. //! //! Slightly adapted to reduce dependencies. use log::debug; use serde::{Deserialize, Serialize}; use std::{ collections::HashMap, fs, path::{Path, PathBuf}, }; use crate::errors::LockError; use super::Result; pub const FOUNDRY_LOCK: &str = "foundry.lock"; /// A type alias for a HashMap of dependencies keyed by relative path to the submodule dir. pub type DepMap = HashMap; /// A lockfile handler that keeps track of the dependencies and their current state. #[derive(Debug, Clone, Serialize, Deserialize)] pub struct Lockfile { /// A map of the dependencies keyed by relative path to the submodule dir. #[serde(flatten)] deps: DepMap, /// Absolute path to the lockfile. #[serde(skip)] lockfile_path: PathBuf, } impl Lockfile { /// Create a new [`Lockfile`] instance. /// /// `project_root` is the absolute path to the project root. /// /// You will need to call [`Lockfile::read`] to load the lockfile. pub fn new(project_root: &Path) -> Self { Self { deps: HashMap::default(), lockfile_path: project_root.join(FOUNDRY_LOCK) } } /// Loads the lockfile from the project root. /// /// Throws an error if the lockfile does not exist. pub fn read(&mut self) -> Result<()> { if !self.lockfile_path.exists() { return Err(LockError::FoundryLockMissing); } let lockfile_str = fs::read_to_string(&self.lockfile_path)?; self.deps = serde_json::from_str(&lockfile_str)?; debug!(lockfile:? = self.deps; "loaded lockfile"); Ok(()) } /// Get the [`DepIdentifier`] for a submodule at a given path. pub fn get(&self, path: &Path) -> Option<&DepIdentifier> { self.deps.get(path) } /// Returns the num of dependencies in the lockfile. pub fn len(&self) -> usize { self.deps.len() } /// Returns whether the lockfile is empty. pub fn is_empty(&self) -> bool { self.deps.is_empty() } /// Returns an iterator over the lockfile. pub fn iter(&self) -> impl Iterator { self.deps.iter() } pub fn exists(&self) -> bool { self.lockfile_path.exists() } } // Implement .iter() for &LockFile /// Identifies whether a dependency (submodule) is referenced by a branch, /// tag or rev (commit hash). /// /// Each enum variant consists of an `r#override` flag which is used in `forge update` to decide /// whether to update a dep or not. This flag is skipped during serialization. #[derive(Debug, Clone, PartialEq, Eq, serde::Serialize, serde::Deserialize)] pub enum DepIdentifier { /// `name` of the branch and the `rev` it is currently pointing to. #[serde(rename = "branch")] Branch { name: String, rev: String }, /// Release tag `name` and the `rev` it is currently pointing to. #[serde(rename = "tag")] Tag { name: String, rev: String }, /// Commit hash `rev` the submodule is currently pointing to. #[serde(rename = "rev", untagged)] Rev { rev: String }, } impl DepIdentifier { /// Get the commit hash of the dependency. pub fn rev(&self) -> &str { match self { Self::Branch { rev, .. } => rev, Self::Tag { rev, .. } => rev, Self::Rev { rev, .. } => rev, } } /// Get the name of the dependency. /// /// In case of a Rev, this will return the commit hash. pub fn name(&self) -> &str { match self { Self::Branch { name, .. } => name, Self::Tag { name, .. } => name, Self::Rev { rev, .. } => rev, } } /// Get the name/rev to checkout at. pub fn checkout_id(&self) -> &str { match self { Self::Branch { name, .. } => name, Self::Tag { name, .. } => name, Self::Rev { rev, .. } => rev, } } /// Returns whether the dependency is a branch. pub fn is_branch(&self) -> bool { matches!(self, Self::Branch { .. }) } } impl std::fmt::Display for DepIdentifier { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match self { Self::Branch { name, rev, .. } => write!(f, "branch={name}@{rev}"), Self::Tag { name, rev, .. } => write!(f, "tag={name}@{rev}"), Self::Rev { rev, .. } => write!(f, "rev={rev}"), } } } ================================================ FILE: crates/core/src/lock.rs ================================================ //! Lockfile handling. //! //! The lockfile contains the resolved dependencies of a project. It is a TOML file with an array of //! dependencies, each containing the name, version, and other information about the dependency. //! //! The lockfile is used to ensure that the same versions of dependencies are installed across //! different machines. It is also used to skip the installation of dependencies that are already //! installed. use crate::{config::Dependency, errors::LockError, utils::sanitize_filename}; use log::{debug, warn}; use serde::{Deserialize, Serialize}; use std::{ fs, path::{Path, PathBuf}, }; pub mod forge; pub const SOLDEER_LOCK: &str = "soldeer.lock"; pub type Result = std::result::Result; /// A trait implemented by lockfile entries to provide the install path pub trait Integrity { /// Returns the install path of the dependency. fn install_path(&self, deps: impl AsRef) -> PathBuf; /// Returns the integrity checksum if relevant. fn integrity(&self) -> Option<&String>; } /// A lock entry for a git dependency. #[derive(Debug, Clone, PartialEq, Eq, Hash, bon::Builder)] #[builder(on(String, into))] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[non_exhaustive] pub struct GitLockEntry { /// The name of the dependency. pub name: String, /// The version (this corresponds to the version requirement of the dependency). pub version: String, /// The git url of the dependency. pub git: String, /// The resolved git commit hash. pub rev: String, } impl Integrity for GitLockEntry { /// Returns the install path of the dependency. /// /// The directory does not need to exist. Since the lock entry contains the version, /// the install path can be calculated without needing to check the actual directory. fn install_path(&self, deps: impl AsRef) -> PathBuf { format_install_path(&self.name, &self.version, deps) } /// There is no integrity checksum for git lock entries fn integrity(&self) -> Option<&String> { None } } /// A lock entry for an HTTP dependency. #[derive(Debug, Clone, PartialEq, Eq, Hash, bon::Builder)] #[builder(on(String, into))] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[non_exhaustive] pub struct HttpLockEntry { /// The name of the dependency. pub name: String, /// The resolved version of the dependency (not necessarily matches the version requirement of /// the dependency). /// /// If the version req is a semver range, then this will be the exact version that was /// resolved. pub version: String, /// The URL from where the dependency was downloaded. pub url: String, /// The checksum of the downloaded zip file. pub checksum: String, /// The integrity hash of the downloaded zip file after extraction. pub integrity: String, } impl Integrity for HttpLockEntry { /// Returns the install path of the dependency. /// /// The directory does not need to exist. Since the lock entry contains the version, /// the install path can be calculated without needing to check the actual directory. fn install_path(&self, deps: impl AsRef) -> PathBuf { format_install_path(&self.name, &self.version, deps) } /// Returns the integrity checksum fn integrity(&self) -> Option<&String> { Some(&self.integrity) } } /// A lock entry for a private dependency. /// /// The link is not stored in the lockfile as it must be fetched from the registry with a valid /// token before each download. #[derive(Debug, Clone, PartialEq, Eq, Hash, bon::Builder)] #[builder(on(String, into))] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[non_exhaustive] pub struct PrivateLockEntry { /// The name of the dependency. pub name: String, /// The resolved version of the dependency (not necessarily matches the version requirement of /// the dependency). /// /// If the version req is a semver range, then this will be the exact version that was /// resolved. pub version: String, /// The checksum of the downloaded zip file. pub checksum: String, /// The integrity hash of the downloaded zip file after extraction. pub integrity: String, } impl Integrity for PrivateLockEntry { /// Returns the install path of the dependency. /// /// The directory does not need to exist. Since the lock entry contains the version, /// the install path can be calculated without needing to check the actual directory. fn install_path(&self, deps: impl AsRef) -> PathBuf { format_install_path(&self.name, &self.version, deps) } /// Returns the integrity checksum fn integrity(&self) -> Option<&String> { Some(&self.integrity) } } /// A lock entry for a dependency. /// /// A builder should be used to create the underlying [`HttpLockEntry`] or [`GitLockEntry`] and then /// converted into this type with `.into()`. /// /// # Examples /// /// ``` /// # use soldeer_core::lock::{LockEntry, HttpLockEntry}; /// let dep: LockEntry = HttpLockEntry::builder() /// .name("my-dep") /// .version("1.2.3") /// .url("https://...") /// .checksum("dead") /// .integrity("beef") /// .build() /// .into(); /// ``` #[derive(Debug, Clone, PartialEq, Eq, Hash)] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[cfg_attr(feature = "serde", serde(rename_all = "lowercase"))] #[non_exhaustive] pub enum LockEntry { /// A lock entry for an HTTP dependency. Http(HttpLockEntry), /// A lock entry for a git dependency. Git(GitLockEntry), /// A lock entry for a git dependency. Private(PrivateLockEntry), } /// A TOML representation of a lock entry, which merges all fields from the two variants of /// [`LockEntry`]. /// /// This is used to serialize and deserialize lock entries to and from TOML. All fields which are /// not present in both variants are optional. #[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, Hash)] #[non_exhaustive] pub struct TomlLockEntry { pub name: String, pub version: String, pub git: Option, pub url: Option, pub rev: Option, pub checksum: Option, pub integrity: Option, } impl From for TomlLockEntry { /// Convert a [`LockEntry`] into a [`TomlLockEntry`]. fn from(value: LockEntry) -> Self { match value { LockEntry::Http(lock) => Self { name: lock.name, version: lock.version, git: None, url: Some(lock.url), rev: None, checksum: Some(lock.checksum), integrity: Some(lock.integrity), }, LockEntry::Git(lock) => Self { name: lock.name, version: lock.version, git: Some(lock.git), url: None, rev: Some(lock.rev), checksum: None, integrity: None, }, LockEntry::Private(lock) => Self { name: lock.name, version: lock.version, git: None, url: None, rev: None, checksum: Some(lock.checksum), integrity: Some(lock.integrity), }, } } } impl TryFrom for LockEntry { type Error = LockError; /// Convert a [`TomlLockEntry`] into a [`LockEntry`] if possible. fn try_from(value: TomlLockEntry) -> std::result::Result { match (value.url, value.git) { (None, None) => Ok(PrivateLockEntry::builder() .name(&value.name) .version(value.version) .checksum(value.checksum.ok_or(LockError::MissingField { field: "checksum".to_string(), dep: value.name.clone(), })?) .integrity(value.integrity.ok_or(LockError::MissingField { field: "integrity".to_string(), dep: value.name, })?) .build() .into()), (None, Some(git)) => { Ok(GitLockEntry::builder() .name(&value.name) .version(value.version) .git(git) .rev(value.rev.ok_or(LockError::MissingField { field: "rev".to_string(), dep: value.name, })?) .build() .into()) } (Some(url), None) => Ok(HttpLockEntry::builder() .name(&value.name) .version(value.version) .url(url) .checksum(value.checksum.ok_or(LockError::MissingField { field: "checksum".to_string(), dep: value.name.clone(), })?) .integrity(value.integrity.ok_or(LockError::MissingField { field: "integrity".to_string(), dep: value.name, })?) .build() .into()), (Some(_), Some(_)) => Err(LockError::InvalidLockEntry), } } } impl LockEntry { /// The name of the dependency. pub fn name(&self) -> &str { match self { Self::Git(lock) => &lock.name, Self::Http(lock) => &lock.name, Self::Private(lock) => &lock.name, } } /// The version of the dependency. pub fn version(&self) -> &str { match self { Self::Git(lock) => &lock.version, Self::Http(lock) => &lock.version, Self::Private(lock) => &lock.version, } } /// The install path of the dependency. pub fn install_path(&self, deps: impl AsRef) -> PathBuf { match self { Self::Git(lock) => lock.install_path(deps), Self::Http(lock) => lock.install_path(deps), Self::Private(lock) => lock.install_path(deps), } } /// Get the underlying [`HttpLockEntry`] if this is an HTTP lock entry. pub fn as_http(&self) -> Option<&HttpLockEntry> { if let Self::Http(l) = self { Some(l) } else { None } } /// Get the underlying [`GitLockEntry`] if this is a git lock entry. pub fn as_git(&self) -> Option<&GitLockEntry> { if let Self::Git(l) = self { Some(l) } else { None } } /// Get the underlying [`PrivateLockEntry`] if this is a private package lock entry. pub fn as_private(&self) -> Option<&PrivateLockEntry> { if let Self::Private(l) = self { Some(l) } else { None } } } impl From for LockEntry { /// Wrap an [`HttpLockEntry`] in a [`LockEntry`]. fn from(value: HttpLockEntry) -> Self { Self::Http(value) } } impl From for LockEntry { /// Wrap a [`GitLockEntry`] in a [`LockEntry`]. fn from(value: GitLockEntry) -> Self { Self::Git(value) } } impl From for LockEntry { /// Wrap a [`PrivateLockEntry`] in a [`LockEntry`]. fn from(value: PrivateLockEntry) -> Self { Self::Private(value) } } /// A parsed TOML lock file. /// /// The lockfile is a table with one entry `dependencies` containing an array of [`TomlLockEntry`]s. #[derive(Serialize, Deserialize, Debug, Clone, Default, PartialEq, Eq, Hash)] struct LockFileParsed { dependencies: Vec, } /// The result of reading and parsing a lock file. /// /// The [`TomlLockEntry`]s are converted into [`LockEntry`]s. A copy of the text contents of /// the lockfile is provided for diffing purposes. #[derive(Debug, Clone, Default, PartialEq, Eq, Hash)] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] pub struct LockFile { /// The parsed lock entries. pub entries: Vec, /// The raw contents of the lockfile. pub raw: String, } /// Read a lockfile from disk. pub fn read_lockfile(path: impl AsRef) -> Result { if !path.as_ref().exists() { debug!(path:? = path.as_ref(); "lockfile does not exist"); return Ok(LockFile::default()); } let contents = fs::read_to_string(&path)?; let data: LockFileParsed = toml_edit::de::from_str(&contents) .inspect_err(|err| { warn!(err:?; "error while parsing lockfile contents, it will be ignored"); }) .unwrap_or_default(); Ok(LockFile { entries: data.dependencies.into_iter().filter_map(|d| d.try_into().ok()).collect(), raw: contents, }) } /// Generate the contents of a lockfile from a list of lock entries. /// /// The entries do not need to be sorted, they will be sorted by name. pub fn generate_lockfile_contents(mut entries: Vec) -> String { entries.sort_unstable_by(|a, b| a.name().cmp(b.name())); let data = LockFileParsed { dependencies: entries.into_iter().map(Into::into).collect() }; toml_edit::ser::to_string_pretty(&data).expect("Lock entries should be serializable") } /// Add a lock entry to a lockfile. /// /// If an entry with the same name already exists, it will be replaced. /// The entries are sorted by name before being written back to the file. pub fn add_to_lockfile(entry: LockEntry, path: impl AsRef) -> Result<()> { let mut lockfile = read_lockfile(&path)?; if let Some(index) = lockfile.entries.iter().position(|e| e.name() == entry.name()) { debug!(name = entry.name(); "replacing existing lockfile entry"); let _ = std::mem::replace(&mut lockfile.entries[index], entry); } else { debug!(name = entry.name(); "adding new lockfile entry"); lockfile.entries.push(entry); } let new_contents = generate_lockfile_contents(lockfile.entries); fs::write(&path, new_contents)?; debug!(path:? = path.as_ref(); "lockfile modified"); Ok(()) } /// Remove a lock entry from a lockfile, matching on the name. /// /// If the entry is the last entry in the lockfile, the lockfile will be removed. pub fn remove_lock(dependency: &Dependency, path: impl AsRef) -> Result<()> { let lockfile = read_lockfile(&path)?; let entries: Vec<_> = lockfile .entries .into_iter() .filter_map(|e| if e.name() != dependency.name() { Some(e.into()) } else { None }) .collect(); if entries.is_empty() { // remove lock file if there are no deps left debug!(path:? = path.as_ref(); "no remaining lockfile entry, deleting file"); let _ = fs::remove_file(&path); return Ok(()); } let file_contents = toml_edit::ser::to_string_pretty(&LockFileParsed { dependencies: entries })?; // replace contents of lockfile with new contents fs::write(&path, file_contents)?; debug!(path:? = path.as_ref(); "lockfile modified"); Ok(()) } /// Format the install path of a dependency. /// /// The folder name is sanitized to remove disallowed characters. pub fn format_install_path(name: &str, version: &str, deps: impl AsRef) -> PathBuf { deps.as_ref().join(sanitize_filename(&format!("{name}-{version}"))) } #[cfg(test)] mod tests { use super::*; use testdir::testdir; #[test] fn test_toml_to_lock_entry_conversion_http() { let toml_entry = TomlLockEntry { name: "test".to_string(), version: "1.0.0".to_string(), git: None, url: Some("https://example.com/zip.zip".to_string()), rev: None, checksum: Some("123456".to_string()), integrity: Some("beef".to_string()), }; let entry: Result = toml_entry.try_into(); assert!(entry.is_ok(), "{entry:?}"); let entry = entry.unwrap(); assert_eq!(entry.name(), "test"); assert_eq!(entry.version(), "1.0.0"); let http = entry.as_http().unwrap(); assert_eq!(http.url, "https://example.com/zip.zip"); assert_eq!(http.checksum, "123456"); assert_eq!(http.integrity, "beef"); } #[test] fn test_toml_to_lock_entry_conversion_git() { let toml_entry = TomlLockEntry { name: "test".to_string(), version: "1.0.0".to_string(), git: Some("git@github.com:test/test.git".to_string()), url: None, rev: Some("123456".to_string()), checksum: None, integrity: None, }; let entry: Result = toml_entry.try_into(); assert!(entry.is_ok(), "{entry:?}"); let entry = entry.unwrap(); assert_eq!(entry.name(), "test"); assert_eq!(entry.version(), "1.0.0"); let git = entry.as_git().unwrap(); assert_eq!(git.git, "git@github.com:test/test.git"); assert_eq!(git.rev, "123456"); } #[test] fn test_toml_lock_entry_bad_http() { let toml_entry = TomlLockEntry { name: "test".to_string(), version: "1.0.0".to_string(), git: None, url: Some("https://example.com/zip.zip".to_string()), rev: None, checksum: None, integrity: None, }; let entry: Result = toml_entry.try_into(); assert!( matches!(entry, Err(LockError::MissingField { ref field, dep: _ }) if field == "checksum"), "{entry:?}" ); let toml_entry = TomlLockEntry { name: "test".to_string(), version: "1.0.0".to_string(), git: None, url: Some("https://example.com/zip.zip".to_string()), rev: None, checksum: Some("123456".to_string()), integrity: None, }; let entry: Result = toml_entry.try_into(); assert!( matches!(entry, Err(LockError::MissingField { ref field, dep: _ }) if field == "integrity"), "{entry:?}" ); } #[test] fn test_toml_lock_entry_bad_private() { let toml_entry = TomlLockEntry { name: "test".to_string(), version: "1.0.0".to_string(), git: None, url: None, rev: None, checksum: None, integrity: None, }; let entry: Result = toml_entry.try_into(); assert!( matches!(entry, Err(LockError::MissingField { ref field, dep: _ }) if field == "checksum"), "{entry:?}" ); } #[test] fn test_toml_lock_entry_bad_git() { let toml_entry = TomlLockEntry { name: "test".to_string(), version: "1.0.0".to_string(), git: Some("git@github.com:test/test.git".to_string()), url: Some("https://example.com/zip.zip".to_string()), rev: None, checksum: None, integrity: None, }; let entry: Result = toml_entry.try_into(); assert!(matches!(entry, Err(LockError::InvalidLockEntry)), "{entry:?}"); let toml_entry = TomlLockEntry { name: "test".to_string(), version: "1.0.0".to_string(), git: Some("git@github.com:test/test.git".to_string()), url: None, rev: None, checksum: None, integrity: None, }; let entry: Result = toml_entry.try_into(); assert!( matches!(entry, Err(LockError::MissingField { ref field, dep: _ }) if field == "rev"), "{entry:?}" ); } #[test] fn test_read_lockfile() { let dir = testdir!(); let file_path = dir.join(SOLDEER_LOCK); // last entry is invalid and should be skipped let content = r#"[[dependencies]] name = "test" version = "1.0.0" git = "git@github.com:test/test.git" rev = "123456" [[dependencies]] name = "test2" version = "1.0.0" url = "https://example.com/zip.zip" checksum = "123456" integrity = "beef" [[dependencies]] name = "test3" version = "1.0.0" "#; fs::write(&file_path, content).unwrap(); let res = read_lockfile(&file_path); assert!(res.is_ok(), "{res:?}"); let lockfile = res.unwrap(); assert_eq!(lockfile.entries.len(), 2); assert_eq!(lockfile.entries[0].name(), "test"); assert_eq!(lockfile.entries[0].version(), "1.0.0"); let git = lockfile.entries[0].as_git().unwrap(); assert_eq!(git.git, "git@github.com:test/test.git"); assert_eq!(git.rev, "123456"); assert_eq!(lockfile.entries[1].name(), "test2"); assert_eq!(lockfile.entries[1].version(), "1.0.0"); let http = lockfile.entries[1].as_http().unwrap(); assert_eq!(http.url, "https://example.com/zip.zip"); assert_eq!(http.checksum, "123456"); assert_eq!(http.integrity, "beef"); assert_eq!(lockfile.raw, content); } #[test] fn test_generate_lockfile_content() { let dir = testdir!(); let file_path = dir.join(SOLDEER_LOCK); let content = r#"[[dependencies]] name = "test" version = "1.0.0" git = "git@github.com:test/test.git" rev = "123456" [[dependencies]] name = "test2" version = "1.0.0" url = "https://example.com/zip.zip" checksum = "123456" integrity = "beef" "#; fs::write(&file_path, content).unwrap(); let lockfile = read_lockfile(&file_path).unwrap(); let new_content = generate_lockfile_contents(lockfile.entries); assert_eq!(new_content, content); } #[test] fn test_add_to_lockfile() { let dir = testdir!(); let file_path = dir.join(SOLDEER_LOCK); let content = r#"[[dependencies]] name = "test" version = "1.0.0" git = "git@github.com:test/test.git" rev = "123456" "#; fs::write(&file_path, content).unwrap(); let entry: LockEntry = HttpLockEntry::builder() .name("test2") .version("1.0.0") .url("https://example.com/zip.zip") .checksum("123456") .integrity("beef") .build() .into(); let res = add_to_lockfile(entry.clone(), &file_path); assert!(res.is_ok(), "{res:?}"); let lockfile = read_lockfile(&file_path).unwrap(); assert_eq!(lockfile.entries.len(), 2); assert_eq!(lockfile.entries[1], entry); } #[test] fn test_replace_in_lockfile() { let dir = testdir!(); let file_path = dir.join(SOLDEER_LOCK); let content = r#"[[dependencies]] name = "test" version = "1.0.0" git = "git@github.com:test/test.git" rev = "123456" "#; fs::write(&file_path, content).unwrap(); let entry: LockEntry = HttpLockEntry::builder() .name("test") .version("2.0.0") .url("https://example.com/zip.zip") .checksum("123456") .integrity("beef") .build() .into(); let res = add_to_lockfile(entry.clone(), &file_path); assert!(res.is_ok(), "{res:?}"); let lockfile = read_lockfile(&file_path).unwrap(); assert_eq!(lockfile.entries.len(), 1); assert_eq!(lockfile.entries[0], entry); } #[test] fn test_remove_lock() { let dir = testdir!(); let file_path = dir.join(SOLDEER_LOCK); let content = r#"[[dependencies]] name = "test" version = "1.0.0" git = "git@github.com:test/test.git" rev = "123456" [[dependencies]] name = "test2" version = "1.0.0" url = "https://example.com/zip.zip" checksum = "123456" integrity = "beef" "#; fs::write(&file_path, content).unwrap(); let dep = Dependency::from_name_version("test2~2.0.0", None, None).unwrap(); let res = remove_lock(&dep, &file_path); assert!(res.is_ok(), "{res:?}"); let lockfile = read_lockfile(&file_path).unwrap(); assert_eq!(lockfile.entries.len(), 1); assert_eq!(lockfile.entries[0].name(), "test"); } #[test] fn test_remove_lock_empty() { let dir = testdir!(); let file_path = dir.join(SOLDEER_LOCK); let content = r#"[[dependencies]] name = "test" version = "1.0.0" git = "git@github.com:test/test.git" rev = "123456" "#; fs::write(&file_path, content).unwrap(); let dep = Dependency::from_name_version("test~1.0.0", None, None).unwrap(); let res = remove_lock(&dep, &file_path); assert!(res.is_ok(), "{res:?}"); assert!(!file_path.exists()); } } ================================================ FILE: crates/core/src/push.rs ================================================ //! Handle publishing of a dependency to the registry. use crate::{ auth::get_token, errors::{AuthError, PublishError}, registry::{api_url, get_project_id}, }; use ignore::{WalkBuilder, WalkState}; use log::debug; use path_slash::{PathBufExt as _, PathExt as _}; use regex::Regex; use reqwest::{ Client, StatusCode, header::{AUTHORIZATION, CONTENT_TYPE, HeaderMap, HeaderValue}, multipart::{Form, Part}, }; use std::{ fs, io::{Read as _, Write as _}, path::{Path, PathBuf}, sync::mpsc, }; use zip::{CompressionMethod, ZipWriter, write::SimpleFileOptions}; pub type Result = std::result::Result; /// Push a new version of a dependency to the registry. /// /// The provided root folder will be zipped and uploaded to the registry, then deleted, unless the /// `dry_run` argument is set to `true`. In that case, the function will only create the zip file /// and return its path. /// /// An authentication token is required to push a zip file to the registry. The token is retrieved /// from the login file (see [`login_file_path`][crate::utils::login_file_path] and /// [`execute_login`][crate::auth::execute_login]). pub async fn push_version( dependency_name: &str, dependency_version: &str, root_directory_path: impl AsRef, files_to_copy: &[PathBuf], dry_run: bool, ) -> Result> { let file_name = root_directory_path.as_ref().file_name().expect("path should have a last component"); let zip_archive = match zip_file(&root_directory_path, files_to_copy, file_name) { Ok(zip) => zip, Err(err) => { return Err(err); } }; debug!(root:? = root_directory_path.as_ref(), zip_archive:?; "created zip file from folder"); if dry_run { debug!(zip_archive:?; "push dry run, zip file created but not uploading"); return Ok(Some(PathBuf::from_slash_lossy(&zip_archive))); } if let Err(error) = push_to_repo(&zip_archive, dependency_name, dependency_version).await { let _ = fs::remove_file(&zip_archive); debug!(zip_archive:?; "zip file deleted"); return Err(error); } let _ = fs::remove_file(&zip_archive); debug!(zip_archive:?; "zip file deleted"); Ok(None) } /// Validate the name of a dependency. /// /// The name must be between 3 and 100 characters long, and can only contain lowercase letters, /// numbers, hyphens and the `@` symbol. It cannot start or end with a hyphen. pub fn validate_name(name: &str) -> Result<()> { let regex = Regex::new(r"^[@|a-z0-9][a-z0-9-]*[a-z0-9]$").expect("regex should compile"); if !regex.is_match(name) { debug!(name; "package name contains unauthorized characters"); return Err(PublishError::InvalidName); } if !(3..=100).contains(&name.len()) { debug!(name; "package name is too short or too long"); return Err(PublishError::InvalidName); } Ok(()) } pub fn validate_version(version: &str) -> Result<()> { if version.is_empty() { return Err(PublishError::EmptyVersion); } Ok(()) } /// Create a zip file from a list of files. /// /// The zip file will be created in the root directory, with the provided name and the `.zip` /// extension. The function returns the path to the created zip file. pub fn zip_file( root_directory_path: impl AsRef, files_to_copy: &[PathBuf], file_name: impl Into, ) -> Result { let mut file_name: PathBuf = file_name.into(); file_name.set_extension("zip"); let zip_file_path = root_directory_path.as_ref().join(file_name); let file = fs::File::create(&zip_file_path) .map_err(|e| PublishError::IOError { path: zip_file_path.clone(), source: e })?; debug!(path:? = zip_file_path; "zip file handle created"); let mut zip = ZipWriter::new(file); let options = SimpleFileOptions::default().compression_method(CompressionMethod::Deflated); if files_to_copy.is_empty() { return Err(PublishError::NoFiles); } let mut added_dirs = Vec::new(); for file_path in files_to_copy { let path = file_path.as_path(); if !path.is_file() { debug!(path:?; "skipping non-file entry"); continue; } // This is the relative path, we basically get the relative path to the target folder // that we want to push and zip that as a name so we won't screw up the // file/dir hierarchy in the zip file. let relative_file_path = file_path.strip_prefix(root_directory_path.as_ref())?; debug!(relative_path:? = relative_file_path; "resolved relative file path for zip archive"); // we add folders explicitly to the zip file, some tools might not handle this properly // otherwise if let Some(parent) = relative_file_path.parent() && !parent.as_os_str().is_empty() && !added_dirs.contains(&parent) { zip.add_directory(parent.to_slash_lossy(), options)?; debug!(folder:? = parent; "added parent directory in zip archive"); added_dirs.push(parent); } let mut f = fs::File::open(file_path.clone()) .map_err(|e| PublishError::IOError { path: file_path.clone(), source: e })?; let mut buffer = Vec::new(); zip.start_file(relative_file_path.to_slash_lossy(), options)?; f.read_to_end(&mut buffer) .map_err(|e| PublishError::IOError { path: file_path.clone(), source: e })?; zip.write_all(&buffer) .map_err(|e| PublishError::IOError { path: zip_file_path.clone(), source: e })?; debug!(file:? = path; "file added to zip archive"); } zip.finish()?; debug!(path:? = zip_file_path; "zip archive written"); Ok(zip_file_path) } /// Filter the files in a directory according to ignore rules. /// /// The following ignore files are supported: /// - `.ignore` /// - `.gitignore` (including any global one) /// - `.git/info/exclude` /// - `.soldeerignore` /// /// The `.git` folders are always skipped. pub fn filter_ignored_files(root_directory_path: impl AsRef) -> Vec { let (tx, rx) = mpsc::channel::(); let walker = WalkBuilder::new(root_directory_path) .add_custom_ignore_filename(".soldeerignore") .hidden(false) .filter_entry(|entry| { !(entry.path().is_dir() && entry.path().file_name().unwrap_or_default() == ".git") }) .build_parallel(); walker.run(|| { let tx = tx.clone(); // function executed for each DirEntry Box::new(move |result| { let Ok(entry) = result else { return WalkState::Continue; }; let path = entry.path(); if path.is_dir() { debug!(path:?; "ignoring dir entry"); return WalkState::Continue; } debug!(path:?; "adding file to list"); tx.send(path.to_path_buf()) .expect("Channel receiver should never be dropped before end of function scope"); WalkState::Continue }) }); drop(tx); // this cannot happen before tx is dropped safely let mut files = Vec::new(); while let Ok(path) = rx.recv() { files.push(path); } files } /// Push a zip file to the registry. /// /// An authentication token is required to push a zip file to the registry. The token is retrieved /// from the login file (see [`login_file_path`][crate::utils::login_file_path] and /// [`execute_login`][crate::auth::execute_login]). async fn push_to_repo( zip_file: &Path, dependency_name: &str, dependency_version: &str, ) -> Result<()> { debug!(zip_file:?; "uploading zip archive to registry"); let token = get_token()?; let client = Client::new(); let url = api_url("v1", "revision/upload", &[]); let mut headers: HeaderMap = HeaderMap::new(); let header_string = format!("Bearer {token}"); let header_value = HeaderValue::from_str(&header_string); headers.insert(AUTHORIZATION, header_value.expect("Could not set auth header")); let file_fs = fs::read(zip_file) .map_err(|e| PublishError::IOError { path: zip_file.to_path_buf(), source: e })?; let mut part = Part::bytes(file_fs).file_name( zip_file .file_name() .expect("path should have a last component") .to_string_lossy() .into_owned(), ); // set the mime as app zip part = part.mime_str("application/zip").expect("Could not set mime type"); let project_id = get_project_id(dependency_name).await?; debug!(project_id; "project ID fetched from registry"); let form = Form::new() .text("project_id", project_id) .text("revision", dependency_version.to_string()) .part("zip_name", part); headers.insert( CONTENT_TYPE, HeaderValue::from_str(&("multipart/form-data; boundary=".to_owned() + form.boundary())) .expect("Could not set content type"), ); let response = client.post(url).headers(headers.clone()).multipart(form).send().await?; match response.status() { StatusCode::OK => Ok(()), StatusCode::NO_CONTENT => Err(PublishError::ProjectNotFound), StatusCode::ALREADY_REPORTED => Err(PublishError::AlreadyExists), StatusCode::UNAUTHORIZED => Err(PublishError::AuthError(AuthError::InvalidCredentials)), StatusCode::PAYLOAD_TOO_LARGE => Err(PublishError::PayloadTooLarge), s if s.is_server_error() || s.is_client_error() => Err(PublishError::HttpError( response.error_for_status().expect_err("result should be an error"), )), _ => Err(PublishError::UnknownError), } } #[cfg(test)] mod tests { use super::*; use crate::download::unzip_file; use std::fs; use testdir::testdir; #[test] fn test_validate_name() { assert!(validate_name("foo").is_ok()); assert!(validate_name("test").is_ok()); assert!(validate_name("test-123").is_ok()); assert!(validate_name("@test-123").is_ok()); assert!(validate_name("t").is_err()); assert!(validate_name("te").is_err()); assert!(validate_name("@t").is_err()); assert!(validate_name("test@123").is_err()); assert!(validate_name("test-123-").is_err()); assert!(validate_name("foo.bar").is_err()); assert!(validate_name("mypäckage").is_err()); assert!(validate_name(&"a".repeat(101)).is_err()); } #[test] fn test_empty_version() { assert!(validate_version("").is_err()); } #[test] fn test_filter_files_to_copy() { let dir = testdir!(); // ignore file // *.toml // !/broadcast // /broadcast/31337/ // /broadcast/*/dry_run/ fs::write( dir.join(".soldeerignore"), "*.toml\n!/broadcast\n/broadcast/31337/\n/broadcast/*/dry_run/\n", ) .unwrap(); let mut ignored = Vec::new(); let mut included = vec![dir.join(".soldeerignore")]; // test structure // - testdir/ // --- .soldeerignore <= not ignored // --- random_dir/ // --- --- random.toml <= ignored // --- --- random.zip <= not ignored // --- broadcast/ // --- --- random.toml <= ignored // --- --- random.zip <= not ignored // --- --- 31337/ // --- --- --- random.toml <= ignored // --- --- --- random.zip <= ignored // --- --- random_dir_in_broadcast/ // --- --- --- random.zip <= not ignored // --- --- --- random.toml <= ignored // --- --- --- dry_run/ // --- --- --- --- zip <= ignored // --- --- --- --- toml <= ignored fs::create_dir(dir.join("random_dir")).unwrap(); fs::create_dir(dir.join("broadcast")).unwrap(); fs::create_dir(dir.join("broadcast/31337")).unwrap(); fs::create_dir(dir.join("broadcast/random_dir_in_broadcast")).unwrap(); fs::create_dir(dir.join("broadcast/random_dir_in_broadcast/dry_run")).unwrap(); ignored.push(dir.join("random_dir/random.toml")); fs::write(ignored.last().unwrap(), "ignored").unwrap(); included.push(dir.join("random_dir/random.zip")); fs::write(included.last().unwrap(), "included").unwrap(); ignored.push(dir.join("broadcast/random.toml")); fs::write(ignored.last().unwrap(), "ignored").unwrap(); included.push(dir.join("broadcast/random.zip")); fs::write(included.last().unwrap(), "included").unwrap(); ignored.push(dir.join("broadcast/31337/random.toml")); fs::write(ignored.last().unwrap(), "ignored").unwrap(); ignored.push(dir.join("broadcast/31337/random.zip")); fs::write(ignored.last().unwrap(), "ignored").unwrap(); included.push(dir.join("broadcast/random_dir_in_broadcast/random.zip")); fs::write(included.last().unwrap(), "included").unwrap(); ignored.push(dir.join("broadcast/random_dir_in_broadcast/random.toml")); fs::write(ignored.last().unwrap(), "ignored").unwrap(); ignored.push(dir.join("broadcast/random_dir_in_broadcast/dry_run/zip")); fs::write(ignored.last().unwrap(), "ignored").unwrap(); ignored.push(dir.join("broadcast/random_dir_in_broadcast/dry_run/toml")); fs::write(ignored.last().unwrap(), "ignored").unwrap(); let res = filter_ignored_files(&dir); assert_eq!(res.len(), included.len()); for r in res { assert!(included.contains(&r)); } } #[tokio::test] async fn test_zip_file() { let dir = testdir!().join("test_zip"); fs::create_dir(&dir).unwrap(); let mut files = Vec::new(); files.push(dir.join("a.txt")); fs::write(files.last().unwrap(), "test").unwrap(); files.push(dir.join("b.txt")); fs::write(files.last().unwrap(), "test").unwrap(); fs::create_dir(dir.join("sub")).unwrap(); files.push(dir.join("sub/c.txt")); fs::write(files.last().unwrap(), "test").unwrap(); fs::create_dir(dir.join("sub/sub")).unwrap(); files.push(dir.join("sub/sub/d.txt")); fs::write(files.last().unwrap(), "test").unwrap(); fs::create_dir(dir.join("empty")).unwrap(); let res = zip_file(&dir, &files, "test"); assert!(res.is_ok(), "{res:?}"); fs::copy(dir.join("test.zip"), testdir!().join("test.zip")).unwrap(); fs::remove_dir_all(&dir).unwrap(); fs::create_dir(&dir).unwrap(); unzip_file(testdir!().join("test.zip"), &dir).await.unwrap(); for f in files { assert!(f.exists()); } } } ================================================ FILE: crates/core/src/registry.rs ================================================ //! Soldeer registry client. //! //! The registry client is responsible for fetching information about packages from the Soldeer //! registry at . use crate::{ auth::get_auth_headers, config::{Dependency, HttpDependency}, errors::RegistryError, }; use chrono::{DateTime, Utc}; use log::{debug, warn}; use reqwest::{Client, Url}; use semver::{Version, VersionReq}; use serde::Deserialize; use std::env; pub type Result = std::result::Result; /// A revision (version) for a project (package). #[derive(Deserialize, Debug, Clone, PartialEq, Eq, Hash)] #[cfg_attr(feature = "serde", derive(serde::Serialize))] pub struct Revision { /// The unique ID for the revision. pub id: uuid::Uuid, /// The version of the revision. pub version: String, /// The internal name (path of zip file) for the revision. pub internal_name: String, /// The zip file download URL. pub url: String, /// The project unique ID. pub project_id: uuid::Uuid, /// Whether this revision has been deleted. pub deleted: bool, /// Creation date for the revision. pub created_at: Option>, /// Whether the revision is private. pub private: Option, } /// A project (package) in the registry. #[derive(Deserialize, Debug, Clone, PartialEq, Eq, Hash)] #[cfg_attr(feature = "serde", derive(serde::Serialize))] pub struct Project { /// The unique ID for the project. pub id: uuid::Uuid, /// The name of the project. pub name: String, /// The description of the project. pub description: String, /// The URL of the repository on GitHub. pub github_url: String, /// The unique ID for the owner of the project. pub created_by: uuid::Uuid, /// Whether this project has been deleted. pub deleted: Option, /// Whether the project is private. pub private: Option, /// Other metadata below pub downloads: Option, pub image: Option, pub long_description: Option, pub created_at: Option>, pub updated_at: Option>, pub organization_id: Option, pub latest_version: Option, pub deprecated: Option, pub organization_name: Option, pub organization_verified: Option, } /// The response from the revision endpoint. #[derive(Deserialize, Debug, Clone, PartialEq, Eq, Hash)] #[cfg_attr(feature = "serde", derive(serde::Serialize))] pub struct RevisionResponse { /// The revisions. data: Vec, /// The status of the response. status: String, } /// The response from the project endpoint. #[derive(Deserialize, Debug, Clone, PartialEq, Eq, Hash)] #[cfg_attr(feature = "serde", derive(serde::Serialize))] pub struct ProjectResponse { /// The projects. data: Vec, /// The status of the response. status: String, } /// A download URL for a revision. #[derive(Debug, Clone, PartialEq, Eq, Hash)] #[cfg_attr(feature = "serde", derive(serde::Deserialize, serde::Serialize))] pub struct DownloadUrl { /// The download URL. pub url: String, /// Whether this revision is private. pub private: bool, } /// Construct a URL for the Soldeer API. /// /// The URL is constructed from the `SOLDEER_API_URL` environment variable, or defaults to /// . The API version prefix and path are appended to the base URL, /// and any query parameters are URL-encoded and appended to the URL. /// /// # Examples /// /// ``` /// # use soldeer_core::registry::api_url; /// let url = api_url( /// "v1", /// "revision", /// &[("project_name", "forge-std"), ("offset", "0"), ("limit", "1")], /// ); /// assert_eq!( /// url.as_str(), /// "https://api.soldeer.xyz/api/v1/revision?project_name=forge-std&offset=0&limit=1" /// ); /// ``` pub fn api_url(version: &str, path: &str, params: &[(&str, &str)]) -> Url { let url = env::var("SOLDEER_API_URL").unwrap_or("https://api.soldeer.xyz".to_string()); let mut url = Url::parse(&url).expect("SOLDEER_API_URL is invalid"); url.set_path(&format!("api/{version}/{path}")); if params.is_empty() { return url; } url.query_pairs_mut().extend_pairs(params.iter()); url } /// Get the download URL for a dependency at a specific version. pub async fn get_dependency_url_remote( dependency: &Dependency, version: &str, ) -> Result { debug!(dep:% = dependency; "retrieving URL for dependency"); let url = api_url( "v1", "revision-cli", &[("project_name", dependency.name()), ("revision", version)], ); let res = Client::new().get(url).headers(get_auth_headers()?).send().await?; let res = res.error_for_status()?; let revision: RevisionResponse = res.json().await?; let Some(r) = revision.data.first() else { return Err(RegistryError::URLNotFound(dependency.to_string())); }; debug!(dep:% = dependency, url = r.url; "URL for dependency was found"); Ok(DownloadUrl { url: r.url.clone(), private: r.private.unwrap_or_default() }) } /// Get the unique ID for a project by name. pub async fn get_project_id(dependency_name: &str) -> Result { debug!(name = dependency_name; "retrieving project ID"); let url = api_url("v2", "project", &[("project_name", dependency_name)]); let res = Client::new().get(url).headers(get_auth_headers()?).send().await?; let res = res.error_for_status()?; let project: ProjectResponse = res.json().await?; let Some(p) = project.data.first() else { return Err(RegistryError::ProjectNotFound(dependency_name.to_string())); }; debug!(name = dependency_name, id:% = p.id; "project ID was found"); Ok(p.id.to_string()) } /// Get the latest version of a dependency. pub async fn get_latest_version(dependency_name: &str) -> Result { debug!(dep = dependency_name; "retrieving latest version for dependency"); let url = api_url( "v1", "revision", &[("project_name", dependency_name), ("offset", "0"), ("limit", "1")], ); let res = Client::new().get(url).headers(get_auth_headers()?).send().await?; let res = res.error_for_status()?; let revision: RevisionResponse = res.json().await?; let Some(data) = revision.data.first() else { return Err(RegistryError::URLNotFound(dependency_name.to_string())); }; debug!(dep = dependency_name, version = data.version; "latest version found"); Ok(HttpDependency { name: dependency_name.to_string(), version_req: data.clone().version, url: None, project_root: None, } .into()) } /// The versions of a dependency. /// /// If all versions can be parsed as semver, then the versions are sorted in descending order /// according to semver. If not all versions can be parsed as semver, then the versions are returned /// in the order they were received from the API (descending creation date). #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub enum Versions { /// All versions are semver compliant. Semver(Vec), /// Not all versions are semver compliant. NonSemver(Vec), } /// Get all versions of a dependency sorted in descending order /// /// If all versions can be parsed as semver, then the versions are sorted in descending order /// according to semver. If not all versions can be parsed as semver, then the versions are returned /// in the order they were received from the API (descending creation date). pub async fn get_all_versions_descending(dependency_name: &str) -> Result { // TODO: provide a more efficient endpoint which already sorts by descending semver if possible // and only returns the version strings debug!(dep = dependency_name; "retrieving all dependency versions"); let url = api_url( "v1", "revision", &[("project_name", dependency_name), ("offset", "0"), ("limit", "10000")], ); let res = Client::new().get(url).headers(get_auth_headers()?).send().await?; let res = res.error_for_status()?; let revision: RevisionResponse = res.json().await?; if revision.data.is_empty() { return Err(RegistryError::NoVersion(dependency_name.to_string())); } match revision .data .iter() .map(|r| Version::parse(&r.version)) .collect::, _>>() { Ok(mut versions) => { debug!(dep = dependency_name; "all versions are semver compliant, sorting by descending version"); versions.sort_unstable_by(|a, b| b.cmp(a)); // sort in descending order Ok(Versions::Semver(versions)) } Err(_) => { debug!(dep = dependency_name; "not all versions are semver compliant, using API ordering"); Ok(Versions::NonSemver(revision.data.iter().map(|r| r.version.to_string()).collect())) } } } /// Get the latest version of a dependency that satisfies the version requirement. /// /// If the API response contains non-semver-compliant versions, then we attempt to find an exact /// match for the requirement, or error out. pub async fn get_latest_supported_version(dependency: &Dependency) -> Result { debug!(dep:% = dependency, version_req = dependency.version_req(); "retrieving latest version according to version requirement"); match get_all_versions_descending(dependency.name()).await? { Versions::Semver(all_versions) => { match parse_version_req(dependency.version_req()) { Some(req) => { let new_version = all_versions .iter() .find(|version| req.matches(version)) .ok_or(RegistryError::NoMatchingVersion { dependency: dependency.name().to_string(), version_req: dependency.version_req().to_string(), })?; debug!(dep:% = dependency, version:% = new_version; "acceptable version found"); Ok(new_version.to_string()) } None => { warn!(dep:% = dependency, version_req = dependency.version_req(); "could not parse version req according to semver, using latest version"); // we can't check which version is newer, so we just take the latest one Ok(all_versions .into_iter() .next() .map(|v| v.to_string()) .expect("there should be at least 1 version")) } } } Versions::NonSemver(all_versions) => { // try to find the exact version specifier in the list of all versions, otherwise error // out debug!(dep:% = dependency; "versions are not all semver compliant, trying to find exact match"); all_versions.into_iter().find(|v| v == dependency.version_req()).ok_or_else(|| { RegistryError::NoMatchingVersion { dependency: dependency.name().to_string(), version_req: dependency.version_req().to_string(), } }) } } } /// Parse a version requirement string into a `VersionReq`. /// /// Adds the "equal" operator to the req if it doesn't have an operator. /// This is necessary because the [`semver`] crate considers no operator to be equivalent to the /// "compatible" operator, but we want to treat it as the "equal" operator. pub fn parse_version_req(version_req: &str) -> Option { let Ok(mut req) = version_req.parse::() else { debug!(version_req; "version requirement cannot be parsed by semver"); return None; }; if req.comparators.is_empty() { debug!(version_req; "comparators list is empty (wildcard req), no further action needed"); return Some(req); // wildcard/any version } let orig_items: Vec<_> = version_req.split(',').collect(); // we only perform the operator conversion if we can reference the original string, i.e. if the // parsed result has the same number of comparators as the original string if orig_items.len() == req.comparators.len() { for (comparator, orig) in req.comparators.iter_mut().zip(orig_items) { if comparator.op == semver::Op::Caret && !orig.trim_start_matches(' ').starts_with('^') { debug!(comparator:% = comparator; "adding exact operator for comparator"); comparator.op = semver::Op::Exact; } } } Some(req) } #[cfg(test)] mod tests { use super::*; use mockito::{Matcher, Server}; use temp_env::async_with_vars; #[tokio::test] async fn test_get_dependency_url() { let mut server = Server::new_async().await; let data = r#"{"data":[{"created_at":"2024-08-06T17:31:25.751079Z","deleted":false,"downloads":3391,"id":"660132e6-4902-4804-8c4b-7cae0a648054","internal_name":"forge-std/1_9_2_06-08-2024_17:31:25_forge-std-1.9.2.zip","project_id":"37adefe5-9bc6-4777-aaf2-e56277d1f30b","url":"https://soldeer-revisions.s3.amazonaws.com/forge-std/1_9_2_06-08-2024_17:31:25_forge-std-1.9.2.zip","version":"1.9.2"}],"status":"success"}"#; server .mock("GET", "/api/v1/revision-cli") .match_query(Matcher::Any) .with_header("content-type", "application/json") .with_body(data) .create_async() .await; let dependency = HttpDependency::builder().name("forge-std").version_req("^1.9.0").build().into(); let res = async_with_vars( [("SOLDEER_API_URL", Some(server.url()))], get_dependency_url_remote(&dependency, "1.9.2"), ) .await; assert!(res.is_ok(), "{res:?}"); assert_eq!( res.unwrap().url, "https://soldeer-revisions.s3.amazonaws.com/forge-std/1_9_2_06-08-2024_17:31:25_forge-std-1.9.2.zip" ); } #[tokio::test] async fn test_get_dependency_url_nomatch() { let mut server = Server::new_async().await; let data = r#"{"data":[],"status":"success"}"#; server .mock("GET", "/api/v1/revision-cli") .match_query(Matcher::Any) .with_header("content-type", "application/json") .with_body(data) .create_async() .await; let dependency = HttpDependency::builder().name("forge-std").version_req("^1.9.0").build().into(); let res = async_with_vars( [("SOLDEER_API_URL", Some(server.url()))], get_dependency_url_remote(&dependency, "1.9.2"), ) .await; assert!(matches!(res, Err(RegistryError::URLNotFound(_)))); } #[tokio::test] async fn test_get_project_id() { let mut server = Server::new_async().await; let data = r#"{"data":[{"created_at":"2024-02-27T19:19:23.938837Z","created_by":"96228bb5-f777-4c19-ba72-363d14b8beed","deleted":false,"deprecated":false,"description":"Forge Standard Library is a collection of helpful contracts and libraries for use with Forge and Foundry.","downloads":648041,"github_url":"https://github.com/foundry-rs/forge-std","id":"37adefe5-9bc6-4777-aaf2-e56277d1f30b","image":"https://soldeer-resources.s3.amazonaws.com/default_icon.png","latest_version":"1.10.0","long_description":"Description","name":"forge-std","organization_id":"ff9c0d8e-9275-4f6f-a1b7-2e822450a7ba","organization_name":"Soldeer","organization_verified":true,"updated_at":"2024-02-27T19:19:23.938837Z"}],"status":"success"}"#; server .mock("GET", "/api/v2/project") .match_query(Matcher::Any) .with_header("content-type", "application/json") .with_body(data) .create_async() .await; let res = async_with_vars([("SOLDEER_API_URL", Some(server.url()))], get_project_id("forge-std")) .await; assert!(res.is_ok(), "{res:?}"); assert_eq!(res.unwrap(), "37adefe5-9bc6-4777-aaf2-e56277d1f30b"); } #[tokio::test] async fn test_get_project_id_nomatch() { let mut server = Server::new_async().await; let data = r#"{"data":[],"status":"success"}"#; server .mock("GET", "/api/v2/project") .match_query(Matcher::Any) .with_header("content-type", "application/json") .with_body(data) .create_async() .await; let res = async_with_vars([("SOLDEER_API_URL", Some(server.url()))], get_project_id("forge-std")) .await; assert!(matches!(res, Err(RegistryError::ProjectNotFound(_)))); } #[tokio::test] async fn test_get_latest_forge_std() { let mut server = Server::new_async().await; let data = r#"{"data":[{"created_at":"2024-08-06T17:31:25.751079Z","deleted":false,"downloads":3391,"id":"660132e6-4902-4804-8c4b-7cae0a648054","internal_name":"forge-std/1_9_2_06-08-2024_17:31:25_forge-std-1.9.2.zip","project_id":"37adefe5-9bc6-4777-aaf2-e56277d1f30b","url":"https://soldeer-revisions.s3.amazonaws.com/forge-std/1_9_2_06-08-2024_17:31:25_forge-std-1.9.2.zip","version":"1.9.2"}],"status":"success"}"#; server .mock("GET", "/api/v1/revision") .match_query(Matcher::Any) .with_header("content-type", "application/json") .with_body(data) .create_async() .await; let dependency = HttpDependency::builder().name("forge-std").version_req("1.9.2").build().into(); let res = async_with_vars( [("SOLDEER_API_URL", Some(server.url()))], get_latest_version("forge-std"), ) .await; assert!(res.is_ok(), "{res:?}"); assert_eq!(res.unwrap(), dependency); } #[tokio::test] async fn test_get_all_versions_descending() { let mut server = Server::new_async().await; // data is not sorted in reverse semver order let data = r#"{"data":[{"created_at":"2024-07-03T14:44:58.148723Z","deleted":false,"downloads":21,"id":"b463683a-c4b4-40bf-b707-1c4eb343c4d2","internal_name":"forge-std/v1_9_0_03-07-2024_14:44:57_forge-std-v1.9.0.zip","project_id":"37adefe5-9bc6-4777-aaf2-e56277d1f30b","url":"https://soldeer-revisions.s3.amazonaws.com/forge-std/v1_9_0_03-07-2024_14:44:57_forge-std-v1.9.0.zip","version":"1.9.0"},{"created_at":"2024-08-06T17:31:25.751079Z","deleted":false,"downloads":3389,"id":"660132e6-4902-4804-8c4b-7cae0a648054","internal_name":"forge-std/1_9_2_06-08-2024_17:31:25_forge-std-1.9.2.zip","project_id":"37adefe5-9bc6-4777-aaf2-e56277d1f30b","url":"https://soldeer-revisions.s3.amazonaws.com/forge-std/1_9_2_06-08-2024_17:31:25_forge-std-1.9.2.zip","version":"1.9.2"},{"created_at":"2024-07-03T14:44:59.729623Z","deleted":false,"downloads":5290,"id":"fa5160fc-ba7b-40fd-8e99-8becd6dadbe4","internal_name":"forge-std/v1_9_1_03-07-2024_14:44:59_forge-std-v1.9.1.zip","project_id":"37adefe5-9bc6-4777-aaf2-e56277d1f30b","url":"https://soldeer-revisions.s3.amazonaws.com/forge-std/v1_9_1_03-07-2024_14:44:59_forge-std-v1.9.1.zip","version":"1.9.1"}],"status":"success"}"#; server .mock("GET", "/api/v1/revision") .match_query(Matcher::Any) .with_header("content-type", "application/json") .with_body(data) .create_async() .await; let res = async_with_vars( [("SOLDEER_API_URL", Some(server.url()))], get_all_versions_descending("forge-std"), ) .await; assert!(res.is_ok(), "{res:?}"); assert_eq!( res.unwrap(), Versions::Semver(vec![ "1.9.2".parse().unwrap(), "1.9.1".parse().unwrap(), "1.9.0".parse().unwrap() ]) ); } #[tokio::test] async fn test_get_latest_supported_version_semver() { let mut server = Server::new_async().await; let data = r#"{"data":[{"created_at":"2024-08-06T17:31:25.751079Z","deleted":false,"downloads":3389,"id":"660132e6-4902-4804-8c4b-7cae0a648054","internal_name":"forge-std/1_9_2_06-08-2024_17:31:25_forge-std-1.9.2.zip","project_id":"37adefe5-9bc6-4777-aaf2-e56277d1f30b","url":"https://soldeer-revisions.s3.amazonaws.com/forge-std/1_9_2_06-08-2024_17:31:25_forge-std-1.9.2.zip","version":"1.9.2"},{"created_at":"2024-07-03T14:44:59.729623Z","deleted":false,"downloads":5290,"id":"fa5160fc-ba7b-40fd-8e99-8becd6dadbe4","internal_name":"forge-std/v1_9_1_03-07-2024_14:44:59_forge-std-v1.9.1.zip","project_id":"37adefe5-9bc6-4777-aaf2-e56277d1f30b","url":"https://soldeer-revisions.s3.amazonaws.com/forge-std/v1_9_1_03-07-2024_14:44:59_forge-std-v1.9.1.zip","version":"1.9.1"},{"created_at":"2024-07-03T14:44:58.148723Z","deleted":false,"downloads":21,"id":"b463683a-c4b4-40bf-b707-1c4eb343c4d2","internal_name":"forge-std/v1_9_0_03-07-2024_14:44:57_forge-std-v1.9.0.zip","project_id":"37adefe5-9bc6-4777-aaf2-e56277d1f30b","url":"https://soldeer-revisions.s3.amazonaws.com/forge-std/v1_9_0_03-07-2024_14:44:57_forge-std-v1.9.0.zip","version":"1.9.0"}],"status":"success"}"#; server .mock("GET", "/api/v1/revision") .match_query(Matcher::Any) .with_header("content-type", "application/json") .with_body(data) .create_async() .await; let dependency: Dependency = HttpDependency::builder().name("forge-std").version_req("^1.9.0").build().into(); let res = async_with_vars( [("SOLDEER_API_URL", Some(server.url()))], get_latest_supported_version(&dependency), ) .await; assert!(res.is_ok(), "{res:?}"); assert_eq!(res.unwrap(), "1.9.2"); } #[tokio::test] async fn test_get_latest_supported_version_no_semver() { let mut server = Server::new_async().await; let data = r#"{"data":[{"created_at":"2024-08-06T17:31:25.751079Z","deleted":false,"downloads":3389,"id":"660132e6-4902-4804-8c4b-7cae0a648054","internal_name":"forge-std/1_9_2_06-08-2024_17:31:25_forge-std-1.9.2.zip","project_id":"37adefe5-9bc6-4777-aaf2-e56277d1f30b","url":"https://soldeer-revisions.s3.amazonaws.com/forge-std/1_9_2_06-08-2024_17:31:25_forge-std-1.9.2.zip","version":"2024-08"},{"created_at":"2024-07-03T14:44:59.729623Z","deleted":false,"downloads":5290,"id":"fa5160fc-ba7b-40fd-8e99-8becd6dadbe4","internal_name":"forge-std/v1_9_1_03-07-2024_14:44:59_forge-std-v1.9.1.zip","project_id":"37adefe5-9bc6-4777-aaf2-e56277d1f30b","url":"https://soldeer-revisions.s3.amazonaws.com/forge-std/v1_9_1_03-07-2024_14:44:59_forge-std-v1.9.1.zip","version":"2024-07"},{"created_at":"2024-07-03T14:44:58.148723Z","deleted":false,"downloads":21,"id":"b463683a-c4b4-40bf-b707-1c4eb343c4d2","internal_name":"forge-std/v1_9_0_03-07-2024_14:44:57_forge-std-v1.9.0.zip","project_id":"37adefe5-9bc6-4777-aaf2-e56277d1f30b","url":"https://soldeer-revisions.s3.amazonaws.com/forge-std/v1_9_0_03-07-2024_14:44:57_forge-std-v1.9.0.zip","version":"2024-06"}],"status":"success"}"#; server .mock("GET", "/api/v1/revision") .match_query(Matcher::Any) .with_header("content-type", "application/json") .with_body(data) .create_async() .await; let dependency: Dependency = HttpDependency::builder().name("forge-std").version_req("2024-06").build().into(); let res = async_with_vars( [("SOLDEER_API_URL", Some(server.url()))], get_latest_supported_version(&dependency), ) .await; assert!(res.is_ok(), "{res:?}"); assert_eq!(res.unwrap(), "2024-06"); // should resolve to the exact match let dependency: Dependency = HttpDependency::builder().name("forge-std").version_req("non-existant").build().into(); let res = async_with_vars( [("SOLDEER_API_URL", Some(server.url()))], get_latest_supported_version(&dependency), ) .await; assert!(matches!(res, Err(RegistryError::NoMatchingVersion { .. }))); } #[test] fn test_parse_version_req() { assert_eq!(parse_version_req("1.9.0"), Some(VersionReq::parse("=1.9.0").unwrap())); assert_eq!(parse_version_req("=1.9.0"), Some(VersionReq::parse("=1.9.0").unwrap())); assert_eq!(parse_version_req("^1.9.0"), Some(VersionReq::parse("^1.9.0").unwrap())); assert_eq!( parse_version_req("^1.9.0,^1.10.0"), Some(VersionReq::parse("^1.9.0, ^1.10.0").unwrap()) ); assert_eq!( parse_version_req("1.9.0,1.10.0"), Some(VersionReq::parse("=1.9.0,=1.10.0").unwrap()) ); assert_eq!(parse_version_req(">=1.9.0"), Some(VersionReq::parse(">=1.9.0").unwrap())); assert_eq!(parse_version_req(""), None); assert_eq!(parse_version_req("foobar"), None); assert_eq!(parse_version_req("*"), Some(VersionReq::STAR)); } } ================================================ FILE: crates/core/src/remappings.rs ================================================ //! Remappings management. use crate::{ config::{Dependency, Paths, SoldeerConfig, read_config_deps}, errors::RemappingsError, utils::path_matches, }; use derive_more::derive::From; use log::debug; use path_slash::PathExt as _; use rayon::prelude::*; use serde::{Deserialize, Serialize}; use std::{ fs::{self, File}, io::Write as _, path::PathBuf, }; use toml_edit::{Array, DocumentMut, value}; pub type Result = std::result::Result; /// Action to perform on the remappings. #[derive(Debug, Clone, PartialEq, Eq, Hash)] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] pub enum RemappingsAction { /// Add a dependency to the remappings. Add(Dependency), /// Remove a dependency from the remappings. Remove(Dependency), /// Update the remappings according to the config file. Update, } /// Location where to store the remappings, either in `remappings.txt` or the config file /// (foundry/soldeer). #[derive(Serialize, Deserialize, Debug, Clone, Default, PartialEq, Eq, Hash)] #[serde(rename_all = "lowercase")] pub enum RemappingsLocation { /// Store the remappings in a dedicated `remappings.txt` file. #[default] Txt, /// Store the remappings in the `foundry.toml` config file. /// /// Note that remappings are never stored in the `soldeer.toml` file because foundry wouldn't /// be able to read them from there. Config, } /// Generate the remappings for storing into the `remappings.txt` file. /// /// If the `remappings_regenerate` option is set to `true`, then any existing remappings are /// discarded and the remappings are generated from the dependencies in the config file. /// /// Otherwise, existing remappings are kept, and depending on the action, a remapping entry is added /// or removed. For the [`RemappingsAction::Update`] action, the existing remappings are merged with /// the dependencies in the config file. pub fn remappings_txt( action: &RemappingsAction, paths: &Paths, soldeer_config: &SoldeerConfig, ) -> Result<()> { if soldeer_config.remappings_regenerate && paths.remappings.exists() { fs::remove_file(&paths.remappings)?; debug!(path:? = paths.remappings; "removed existing remappings file"); } let contents = if paths.remappings.exists() { debug!(path:? = paths.remappings; "reading existing remappings from remappings.txt file"); fs::read_to_string(&paths.remappings)? } else { String::new() }; let existing_remappings: Vec<_> = contents.lines().filter_map(|r| r.split_once('=')).collect(); let new_remappings = generate_remappings(action, paths, soldeer_config, &existing_remappings)?; let mut file = File::create(&paths.remappings)?; for remapping in new_remappings { writeln!(file, "{remapping}")?; } debug!(path:? = paths.remappings; "updated remappings.txt file"); Ok(()) } /// Generate the remappings for storing into the `foundry.toml` config file. /// /// If the `remappings_regenerate` option is set to `true`, then any existing remappings are /// discarded and the remappings are generated from the dependencies in the config file. /// /// Otherwise, existing remappings are kept, and depending on the action, a remapping entry is added /// or removed. For the [`RemappingsAction::Update`] action, the existing remappings are merged with /// the dependencies in the config file. /// /// The remappings are added to the default profile in all cases, and to any other profile that /// already has a `remappings key`. If the profile doesn't have a remappings key, it is left /// untouched. pub fn remappings_foundry( action: &RemappingsAction, paths: &Paths, soldeer_config: &SoldeerConfig, ) -> Result<()> { let contents = fs::read_to_string(&paths.config)?; let mut doc: DocumentMut = contents.parse::().expect("config file should be valid toml"); let Some(profiles) = doc["profile"].as_table_mut() else { // we don't add remappings if there are no profiles debug!("no config profile found, skipping remappings generation"); return Ok(()); }; for (name, profile) in profiles.iter_mut() { // we normally only edit remappings of profiles which already have a remappings key match profile.get_mut("remappings").map(|v| v.as_array_mut()) { Some(Some(remappings)) => { debug!(name:% = name; "updating remappings for profile"); let existing_remappings: Vec<_> = remappings .iter() .filter_map(|r| r.as_str()) .filter_map(|r| r.split_once('=')) .collect(); let new_remappings = generate_remappings(action, paths, soldeer_config, &existing_remappings)?; remappings.clear(); for remapping in new_remappings { remappings.push(remapping); } format_array(remappings); } _ => { if name == "default" { debug!("updating remappings for default profile"); // except the default profile, where we always add the remappings let new_remappings = generate_remappings(action, paths, soldeer_config, &[])?; let mut array = new_remappings.into_iter().collect::(); format_array(&mut array); profile["remappings"] = value(array); } } } } fs::write(&paths.config, doc.to_string())?; debug!(path:? = paths.config; "remappings updated in config file"); Ok(()) } /// Edit the remappings according to the action and the configuration. /// /// Depending on the configuration, the remappings are either stored in a `remappings.txt` file or /// in the `foundry.toml` config file. /// /// Note that if the config is stored in a dedicated `soldeer.toml` file, then the /// `remappings_location` setting is ignored and the remappings are always stored in a /// `remappings.txt` file. pub fn edit_remappings( action: &RemappingsAction, config: &SoldeerConfig, paths: &Paths, ) -> Result<()> { if config.remappings_generate { if paths.config.to_string_lossy().contains("foundry.toml") { match config.remappings_location { RemappingsLocation::Txt => { debug!("updating remappings.txt according to config option"); remappings_txt(action, paths, config)?; } RemappingsLocation::Config => { debug!("updating foundry.toml remappings according to config option"); remappings_foundry(action, paths, config)?; } } } else { debug!("updating remappings.txt because config file is soldeer.toml"); remappings_txt(action, paths, config)?; } } else { debug!("skipping remappings update according to config option"); } Ok(()) } /// Format the default left part (alias) for a remappings entry. /// /// The optional `remappings_prefix` setting is prepended to the dependency name, and the /// version requirement string is appended (after a hyphen) if the `remappings_version` setting is /// set to `true`. Finally, a trailing slash is added to the alias. pub fn format_remap_name(soldeer_config: &SoldeerConfig, dependency: &Dependency) -> String { let version_suffix = if soldeer_config.remappings_version { &format!("-{}", dependency.version_req().replace('=', "")) } else { "" }; format!("{}{}{}/", soldeer_config.remappings_prefix, dependency.name(), version_suffix) } /// Generate the remappings for a given action. /// /// If the `remappings_regenerate` option is set to `true`, then any existing remappings are /// discarded and the remappings are generated from the dependencies in the config file. /// /// Otherwise, existing remappings are kept, and depending on the action, a remapping entry is added /// or removed. For the [`RemappingsAction::Update`] action, the existing remappings are merged with /// the dependencies in the config file. /// /// Dependencies are sorted alphabetically for consistency. fn generate_remappings( action: &RemappingsAction, paths: &Paths, soldeer_config: &SoldeerConfig, existing_remappings: &[(&str, &str)], ) -> Result> { let mut new_remappings = Vec::new(); if soldeer_config.remappings_regenerate { debug!("ignoring existing remappings and recreating from config"); let (dependencies, _) = read_config_deps(&paths.config)?; new_remappings = remappings_from_deps(&dependencies, paths, soldeer_config)? .into_iter() .map(|i| i.remapping_string) .collect(); } else { match &action { RemappingsAction::Remove(remove_dep) => { debug!(dep:% = remove_dep; "trying to remove dependency from remappings"); // only keep items not matching the dependency to remove if let Ok(remove_og) = get_install_dir_relative(remove_dep, paths) { for (existing_remapped, existing_og) in existing_remappings { // TODO: make the detection smarter, and match on any path where the version // is semver-compatible too. if !existing_og.trim_end_matches('/').starts_with(&remove_og) { new_remappings.push(format!("{existing_remapped}={existing_og}")); } else { debug!(dep:% = remove_dep; "found existing remapping corresponding to dependency to remove"); } } } else { debug!(dep:% = remove_dep; "could not find a directory matching the dependency to remove"); for (remapped, og) in existing_remappings { new_remappings.push(format!("{remapped}={og}")); } } } RemappingsAction::Add(add_dep) => { debug!(dep:% = add_dep; "adding remapping for dependency if necessary"); // we only add the remapping if it's not already existing, otherwise we keep the old // remapping let add_dep_remapped = format_remap_name(soldeer_config, add_dep); let add_dep_og = get_install_dir_relative(add_dep, paths)?; let mut found = false; // whether a remapping existed for that dep already for (existing_remapped, existing_og) in existing_remappings { new_remappings.push(format!("{existing_remapped}={existing_og}")); if existing_og.trim_end_matches('/').starts_with(&add_dep_og) { debug!(dep:% = add_dep; "remapping exists already, skipping"); found = true; } } if !found { debug!(dep:% = add_dep; "remapping not found, adding it"); new_remappings.push(format!("{add_dep_remapped}={add_dep_og}/")); } } RemappingsAction::Update => { // This is where we end up in the `update` command if we don't want to re-generate // all remappings. We need to merge existing remappings with the full list of deps. // We generate all remappings from the dependencies, then replace existing items. debug!( "updating remappings, merging existing ones with the ones generated from config" ); let (dependencies, _) = read_config_deps(&paths.config)?; let new_remappings_info = remappings_from_deps(&dependencies, paths, soldeer_config)?; if existing_remappings.is_empty() { debug!("no existing remappings, using the ones from config"); new_remappings = new_remappings_info.into_iter().map(|i| i.remapping_string).collect(); } else { let mut existing_remappings = Vec::from(existing_remappings); for RemappingInfo { remapping_string: item, dependency: dep } in new_remappings_info { debug!(dep:% = dep; "trying to find a matching existing remapping for config item"); let (_, item_og) = item.split_once('=').expect("remappings should have two parts"); // try to find all existing items pointing to a matching dependency folder let mut found = false; existing_remappings.retain(|(existing_remapped, existing_og)| { // only keep the first two components of the path (`dependencies` // folder and the dependency folder) let path: PathBuf = PathBuf::from(existing_og).components().take(2).collect(); // if path matches, we should update the item's path with the new // one and add it to the final list if path_matches(&dep, &path) { debug!(path = existing_og; "existing remapping matches the config item"); let path: PathBuf = PathBuf::from(existing_og).components().take(2).collect(); let existing_og_updated = existing_og.replace( path.to_slash_lossy().as_ref(), item_og.trim_end_matches('/'), ); debug!(new_path = existing_og_updated; "updated remapping path"); new_remappings .push(format!("{existing_remapped}={existing_og_updated}")); found = true; // we remove this item from the existing remappings list as it's // been processed return false; } // keep this item to add it to the remappings again later true }); if !found { debug!(dep:% = dep;"no existing remapping found for config item, adding it"); new_remappings.push(item); } } // add extra existing remappings back for (existing_remapped, existing_og) in existing_remappings { debug!(path = existing_og; "adding extra remapping which was existing but didn't match a config item"); new_remappings.push(format!("{existing_remapped}={existing_og}")); } } } } } // sort the remappings new_remappings.sort_unstable(); Ok(new_remappings) } #[derive(Debug, Clone, From)] struct RemappingInfo { remapping_string: String, dependency: Dependency, } /// Generate remappings from the dependencies list. /// /// The remappings are generated in the form `alias/=path/`, where `alias` is the dependency name /// with an optional prefix and version requirement suffix, and `path` is the relative path to the /// dependency folder. fn remappings_from_deps( dependencies: &[Dependency], paths: &Paths, soldeer_config: &SoldeerConfig, ) -> Result> { dependencies .par_iter() .map(|dependency| { let dependency_name_formatted = format_remap_name(soldeer_config, dependency); // contains trailing slash let relative_path = get_install_dir_relative(dependency, paths)?; Ok((format!("{dependency_name_formatted}={relative_path}/"), dependency.clone()).into()) }) .collect::>>() } /// Find the install path (relative to project root) for a dependency that was already installed /// /// # Errors /// If the there is no folder in the dependencies folder corresponding to the dependency fn get_install_dir_relative(dependency: &Dependency, paths: &Paths) -> Result { let path = dunce::canonicalize( dependency .install_path_sync(&paths.dependencies) .ok_or(RemappingsError::DependencyNotFound(dependency.to_string()))?, )?; Ok(path .strip_prefix(&paths.root) // already canonicalized .map_err(|_| RemappingsError::DependencyNotFound(dependency.to_string()))? .to_slash_lossy() .to_string()) } /// Format a TOML array as a multi-line array with indentation in case there is more than one /// element. /// /// # Examples /// /// ```toml /// [profile.default] /// remappings = [] /// ``` /// /// ```toml /// [profile.default] /// remappings = ["lib1-1.0.0/=dependencies/lib1-1.0.0/"] /// ``` /// /// ```toml /// [profile.default] /// remappings = [ /// "lib1-1.0.0/=dependencies/lib1-1.0.0/", /// "lib2-2.0.0/=dependencies/lib2-2.0.0/", /// ] /// ``` fn format_array(array: &mut Array) { array.fmt(); if (0..=1).contains(&array.len()) { array.set_trailing(""); array.set_trailing_comma(false); } else { for item in array.iter_mut() { item.decor_mut().set_prefix("\n "); } array.set_trailing("\n"); array.set_trailing_comma(true); } } #[cfg(test)] mod tests { use super::*; use crate::config::{GitDependency, HttpDependency}; use testdir::testdir; #[test] fn test_get_install_dir_relative() { let dir = testdir!(); fs::write(dir.join("soldeer.toml"), "[dependencies]\n").unwrap(); let dependencies_dir = dir.join("dependencies"); fs::create_dir_all(&dependencies_dir).unwrap(); let paths = Paths::from_root(&dir).unwrap(); fs::create_dir_all(dependencies_dir.join("dep1-1.1.1")).unwrap(); let dependency = HttpDependency::builder().name("dep1").version_req("^1.0.0").build().into(); let res = get_install_dir_relative(&dependency, &paths); assert!(res.is_ok(), "{res:?}"); assert_eq!(res.unwrap(), "dependencies/dep1-1.1.1"); fs::create_dir_all(dependencies_dir.join("dep2-2.0.0")).unwrap(); let dependency = GitDependency::builder() .name("dep2") .version_req("2.0.0") .git("git@github.com:test/test.git") .build() .into(); let res = get_install_dir_relative(&dependency, &paths); assert!(res.is_ok(), "{res:?}"); assert_eq!(res.unwrap(), "dependencies/dep2-2.0.0"); let dependency = HttpDependency::builder().name("dep3").version_req("3.0.0").build().into(); let res = get_install_dir_relative(&dependency, &paths); assert!(res.is_err(), "{res:?}"); } #[test] fn test_format_remap_name() { let dependency = HttpDependency::builder().name("dep1").version_req("^1.0.0").build().into(); let res = format_remap_name( &SoldeerConfig { remappings_version: false, remappings_prefix: String::new(), ..Default::default() }, &dependency, ); assert_eq!(res, "dep1/"); let res = format_remap_name( &SoldeerConfig { remappings_version: true, remappings_prefix: String::new(), ..Default::default() }, &dependency, ); assert_eq!(res, "dep1-^1.0.0/"); let res = format_remap_name( &SoldeerConfig { remappings_version: false, remappings_prefix: "@".to_string(), ..Default::default() }, &dependency, ); assert_eq!(res, "@dep1/"); let res = format_remap_name( &SoldeerConfig { remappings_version: true, remappings_prefix: "@".to_string(), ..Default::default() }, &dependency, ); assert_eq!(res, "@dep1-^1.0.0/"); let dependency = HttpDependency::builder().name("dep1").version_req("=1.0.0").build().into(); let res = format_remap_name( &SoldeerConfig { remappings_version: true, remappings_prefix: String::new(), ..Default::default() }, &dependency, ); assert_eq!(res, "dep1-1.0.0/"); } #[test] fn test_remappings_from_deps() { let dir = testdir!(); let config = r#"[dependencies] dep1 = "^1.0.0" dep2 = "2.0.0" dep3 = { version = "foobar", git = "git@github.com:test/test.git", branch = "foobar" } "#; fs::write(dir.join("soldeer.toml"), config).unwrap(); let dependencies_dir = dir.join("dependencies"); fs::create_dir_all(&dependencies_dir).unwrap(); let paths = Paths::from_root(&dir).unwrap(); fs::create_dir_all(dependencies_dir.join("dep1-1.1.1")).unwrap(); fs::create_dir_all(dependencies_dir.join("dep2-2.0.0")).unwrap(); fs::create_dir_all(dependencies_dir.join("dep3-foobar")).unwrap(); let (dependencies, _) = read_config_deps(&paths.config).unwrap(); let res = remappings_from_deps(&dependencies, &paths, &SoldeerConfig::default()); assert!(res.is_ok(), "{res:?}"); let res = res.unwrap(); assert_eq!(res.len(), 3); assert_eq!(res[0].remapping_string, "dep1-^1.0.0/=dependencies/dep1-1.1.1/"); assert_eq!(res[1].remapping_string, "dep2-2.0.0/=dependencies/dep2-2.0.0/"); assert_eq!(res[2].remapping_string, "dep3-foobar/=dependencies/dep3-foobar/"); } #[test] fn test_generate_remappings_add() { let dir = testdir!(); fs::write(dir.join("soldeer.toml"), "[dependencies]\n").unwrap(); let paths = Paths::from_root(&dir).unwrap(); fs::create_dir_all(paths.dependencies.join("lib1-1.0.0")).unwrap(); let config = SoldeerConfig::default(); // empty existing remappings let existing_deps = vec![]; let dep = HttpDependency::builder().name("lib1").version_req("1.0.0").build().into(); let res = generate_remappings(&RemappingsAction::Add(dep), &paths, &config, &existing_deps); assert!(res.is_ok(), "{res:?}"); assert_eq!(res.unwrap(), vec!["lib1-1.0.0/=dependencies/lib1-1.0.0/"]); // existing remappings not matching new one let existing_deps = vec![("lib1-1.0.0/", "dependencies/lib1-1.0.0/")]; fs::create_dir_all(paths.dependencies.join("lib2-1.1.1")).unwrap(); let dep = HttpDependency::builder().name("lib2").version_req("^1.0.0").build().into(); let res = generate_remappings(&RemappingsAction::Add(dep), &paths, &config, &existing_deps); assert!(res.is_ok(), "{res:?}"); assert_eq!( res.unwrap(), vec!["lib1-1.0.0/=dependencies/lib1-1.0.0/", "lib2-^1.0.0/=dependencies/lib2-1.1.1/"] ); // existing remappings matching the new one let existing_deps = vec![("@lib1-1.0.0/foo", "dependencies/lib1-1.0.0/src")]; let dep = HttpDependency::builder().name("lib1").version_req("1.0.0").build().into(); let res = generate_remappings(&RemappingsAction::Add(dep), &paths, &config, &existing_deps); assert!(res.is_ok(), "{res:?}"); assert_eq!(res.unwrap(), vec!["@lib1-1.0.0/foo=dependencies/lib1-1.0.0/src"]); } #[test] fn test_generate_remappings_remove() { let dir = testdir!(); fs::write(dir.join("soldeer.toml"), "[dependencies]\n").unwrap(); let paths = Paths::from_root(&dir).unwrap(); fs::create_dir_all(paths.dependencies.join("lib1-1.0.0")).unwrap(); fs::create_dir_all(paths.dependencies.join("lib2-2.0.0")).unwrap(); let config = SoldeerConfig::default(); let existing_deps = vec![ ("lib1-1.0.0/", "dependencies/lib1-1.0.0/"), ("lib2-2.0.0/", "dependencies/lib2-2.0.0/"), ]; let dep = HttpDependency::builder().name("lib1").version_req("1.0.0").build().into(); let res = generate_remappings(&RemappingsAction::Remove(dep), &paths, &config, &existing_deps); assert!(res.is_ok(), "{res:?}"); assert_eq!(res.unwrap(), vec!["lib2-2.0.0/=dependencies/lib2-2.0.0/"]); // dep does not exist, no error let dep = HttpDependency::builder().name("lib3").version_req("1.0.0").build().into(); let res = generate_remappings(&RemappingsAction::Remove(dep), &paths, &config, &existing_deps); assert!(res.is_ok(), "{res:?}"); assert_eq!( res.unwrap(), vec!["lib1-1.0.0/=dependencies/lib1-1.0.0/", "lib2-2.0.0/=dependencies/lib2-2.0.0/"] ); } #[test] fn test_generate_remappings_update() { let dir = testdir!(); let contents = r#"[dependencies] lib1 = "1.0.0" lib2 = "2.0.0" "#; fs::write(dir.join("soldeer.toml"), contents).unwrap(); let paths = Paths::from_root(&dir).unwrap(); fs::create_dir_all(paths.dependencies.join("lib1-1.0.0")).unwrap(); fs::create_dir_all(paths.dependencies.join("lib2-2.0.0")).unwrap(); let config = SoldeerConfig::default(); // all entries are customized let existing_deps = vec![ ("lib1-1.0.0/", "dependencies/lib1-1.0.0/src/"), ("lib2/", "dependencies/lib2-2.0.0/"), ]; let res = generate_remappings(&RemappingsAction::Update, &paths, &config, &existing_deps); assert!(res.is_ok(), "{res:?}"); assert_eq!( res.unwrap(), vec!["lib1-1.0.0/=dependencies/lib1-1.0.0/src/", "lib2/=dependencies/lib2-2.0.0/"] ); // one entry is missing let existing_deps = vec![("lib1-1.0.0/", "dependencies/lib1-1.0.0/")]; let res = generate_remappings(&RemappingsAction::Update, &paths, &config, &existing_deps); assert!(res.is_ok(), "{res:?}"); assert_eq!( res.unwrap(), vec!["lib1-1.0.0/=dependencies/lib1-1.0.0/", "lib2-2.0.0/=dependencies/lib2-2.0.0/"] ); // extra entries are kep let existing_deps = vec![ ("lib1-1.0.0/", "dependencies/lib1-1.0.0/"), ("lib2-2.0.0/", "dependencies/lib2-2.0.0/"), ("lib3/", "dependencies/lib3/"), ]; let res = generate_remappings(&RemappingsAction::Update, &paths, &config, &existing_deps); assert!(res.is_ok(), "{res:?}"); assert_eq!( res.unwrap(), vec![ "lib1-1.0.0/=dependencies/lib1-1.0.0/", "lib2-2.0.0/=dependencies/lib2-2.0.0/", "lib3/=dependencies/lib3/" ] ); } #[test] fn test_remappings_foundry_default_profile_empty() { let dir = testdir!(); let contents = r#"[profile.default] [dependencies] lib1 = "1.0.0" "#; fs::write(dir.join("foundry.toml"), contents).unwrap(); let paths = Paths::from_root(&dir).unwrap(); fs::create_dir_all(paths.dependencies.join("lib1-1.0.0")).unwrap(); let config = SoldeerConfig::default(); let res = remappings_foundry(&RemappingsAction::Update, &paths, &config); assert!(res.is_ok(), "{res:?}"); let contents = fs::read_to_string(&paths.config).unwrap(); let doc: DocumentMut = contents.parse::().unwrap(); assert_eq!( doc["profile"]["default"]["remappings"] .as_array() .unwrap() .into_iter() .map(|i| i.as_str().unwrap()) .collect::>(), vec!["lib1-1.0.0/=dependencies/lib1-1.0.0/"] ); } #[test] fn test_remappings_foundry_second_profile_empty() { let dir = testdir!(); let contents = r#"[profile.default] [profile.local] [dependencies] lib1 = "1.0.0" "#; fs::write(dir.join("foundry.toml"), contents).unwrap(); let paths = Paths::from_root(&dir).unwrap(); fs::create_dir_all(paths.dependencies.join("lib1-1.0.0")).unwrap(); let config = SoldeerConfig::default(); // should only add remappings to the default profile let res = remappings_foundry(&RemappingsAction::Update, &paths, &config); assert!(res.is_ok(), "{res:?}"); let contents = fs::read_to_string(&paths.config).unwrap(); let doc: DocumentMut = contents.parse::().unwrap(); assert_eq!( doc["profile"]["default"]["remappings"] .as_array() .unwrap() .into_iter() .map(|i| i.as_str().unwrap()) .collect::>(), vec!["lib1-1.0.0/=dependencies/lib1-1.0.0/"] ); assert!(!doc["profile"]["local"].as_table().unwrap().contains_key("remappings")); } #[test] fn test_remappings_foundry_two_profiles() { let dir = testdir!(); let contents = r#"[profile.default] remappings = [] [profile.local] remappings = [] [dependencies] lib1 = "1.0.0" "#; fs::write(dir.join("foundry.toml"), contents).unwrap(); let paths = Paths::from_root(&dir).unwrap(); fs::create_dir_all(paths.dependencies.join("lib1-1.0.0")).unwrap(); let config = SoldeerConfig::default(); let res = remappings_foundry(&RemappingsAction::Update, &paths, &config); assert!(res.is_ok(), "{res:?}"); let contents = fs::read_to_string(&paths.config).unwrap(); let doc: DocumentMut = contents.parse::().unwrap(); assert_eq!( doc["profile"]["default"]["remappings"] .as_array() .unwrap() .into_iter() .map(|i| i.as_str().unwrap()) .collect::>(), vec!["lib1-1.0.0/=dependencies/lib1-1.0.0/"] ); assert_eq!( doc["profile"]["local"]["remappings"] .as_array() .unwrap() .into_iter() .map(|i| i.as_str().unwrap()) .collect::>(), vec!["lib1-1.0.0/=dependencies/lib1-1.0.0/"] ); } #[test] fn test_remappings_foundry_keep_existing() { let dir = testdir!(); let contents = r#"[profile.default] remappings = ["lib1/=dependencies/lib1-1.0.0/src/"] [dependencies] lib1 = "1.0.0" "#; fs::write(dir.join("foundry.toml"), contents).unwrap(); let paths = Paths::from_root(&dir).unwrap(); fs::create_dir_all(paths.dependencies.join("lib1-1.0.0")).unwrap(); let config = SoldeerConfig::default(); let res = remappings_foundry(&RemappingsAction::Update, &paths, &config); assert!(res.is_ok(), "{res:?}"); let contents = fs::read_to_string(&paths.config).unwrap(); let doc: DocumentMut = contents.parse::().unwrap(); assert_eq!( doc["profile"]["default"]["remappings"] .as_array() .unwrap() .into_iter() .map(|i| i.as_str().unwrap()) .collect::>(), vec!["lib1/=dependencies/lib1-1.0.0/src/"] ); } #[test] fn test_remappings_txt_keep() { let dir = testdir!(); let contents = r#"[dependencies] lib1 = "1.0.0" "#; fs::write(dir.join("soldeer.toml"), contents).unwrap(); let paths = Paths::from_root(&dir).unwrap(); fs::create_dir_all(paths.dependencies.join("lib1-1.0.0")).unwrap(); let remappings = "lib1/=dependencies/lib1-1.0.0/src/\n"; fs::write(dir.join("remappings.txt"), remappings).unwrap(); let config = SoldeerConfig::default(); let res = remappings_txt(&RemappingsAction::Update, &paths, &config); assert!(res.is_ok(), "{res:?}"); let contents = fs::read_to_string(&paths.remappings).unwrap(); assert_eq!(contents, remappings); } #[test] fn test_remappings_txt_regenerate() { let dir = testdir!(); let contents = r#"[dependencies] lib1 = "1.0.0" "#; fs::write(dir.join("soldeer.toml"), contents).unwrap(); let paths = Paths::from_root(&dir).unwrap(); fs::create_dir_all(paths.dependencies.join("lib1-1.0.0")).unwrap(); let remappings = "lib1/=dependencies/lib1-1.0.0/src/\n"; fs::write(dir.join("remappings.txt"), remappings).unwrap(); let config = SoldeerConfig { remappings_regenerate: true, ..Default::default() }; let res = remappings_txt(&RemappingsAction::Update, &paths, &config); assert!(res.is_ok(), "{res:?}"); let contents = fs::read_to_string(&paths.remappings).unwrap(); assert_eq!(contents, "lib1-1.0.0/=dependencies/lib1-1.0.0/\n"); } #[test] fn test_remappings_txt_missing() { let dir = testdir!(); let contents = r#"[dependencies] lib1 = "1.0.0" lib2 = "2.0.0" "#; fs::write(dir.join("soldeer.toml"), contents).unwrap(); let paths = Paths::from_root(&dir).unwrap(); fs::create_dir_all(paths.dependencies.join("lib1-1.0.0")).unwrap(); fs::create_dir_all(paths.dependencies.join("lib2-2.0.0")).unwrap(); let remappings = "lib1/=dependencies/lib1-1.0.0/src/\n"; fs::write(dir.join("remappings.txt"), remappings).unwrap(); let config = SoldeerConfig::default(); let res = remappings_txt(&RemappingsAction::Update, &paths, &config); assert!(res.is_ok(), "{res:?}"); let contents = fs::read_to_string(&paths.remappings).unwrap(); assert_eq!( contents, "lib1/=dependencies/lib1-1.0.0/src/\nlib2-2.0.0/=dependencies/lib2-2.0.0/\n" ); } #[test] fn test_edit_remappings_soldeer_config() { let dir = testdir!(); let contents = r#"[dependencies] lib1 = "1.0.0" "#; fs::write(dir.join("soldeer.toml"), contents).unwrap(); let paths = Paths::from_root(&dir).unwrap(); fs::create_dir_all(paths.dependencies.join("lib1-1.0.0")).unwrap(); // the config gets ignored in this case let config = SoldeerConfig { remappings_location: RemappingsLocation::Config, ..Default::default() }; let res = edit_remappings(&RemappingsAction::Update, &config, &paths); assert!(res.is_ok(), "{res:?}"); let contents = fs::read_to_string(&paths.remappings).unwrap(); assert_eq!(contents, "lib1-1.0.0/=dependencies/lib1-1.0.0/\n"); } #[test] fn test_generate_remappings_update_semver_custom() { let dir = testdir!(); let contents = r#"[dependencies] lib1 = "1" lib2 = "2" "#; fs::write(dir.join("soldeer.toml"), contents).unwrap(); let paths = Paths::from_root(&dir).unwrap(); // libs have been updated to newer versions fs::create_dir_all(paths.dependencies.join("lib1-1.2.0")).unwrap(); fs::create_dir_all(paths.dependencies.join("lib2-2.1.0")).unwrap(); let config = SoldeerConfig::default(); // all entries are customized, using an old version of the libs let existing_deps = vec![ ("lib1-1/", "dependencies/lib1-1.1.1/src/"), // customize right part ("lib2/", "dependencies/lib2-2.0.1/src/"), // customize both sides ]; let res = generate_remappings(&RemappingsAction::Update, &paths, &config, &existing_deps); assert!(res.is_ok(), "{res:?}"); assert_eq!( res.unwrap(), vec!["lib1-1/=dependencies/lib1-1.2.0/src/", "lib2/=dependencies/lib2-2.1.0/src/"] ); } #[test] fn test_generate_remappings_duplicates() { let dir = testdir!(); let contents = r#"[profile.default] remappings = [ "@openzeppelin-contracts/=dependencies/@openzeppelin-contracts-5.0.2/", "@openzeppelin/contracts/=dependencies/@openzeppelin-contracts-5.0.2/", "foo/=bar/", ] libs = ["dependencies"] [dependencies] "@openzeppelin-contracts" = "5.0.2" "#; fs::write(dir.join("foundry.toml"), contents).unwrap(); let paths = Paths::from_root(&dir).unwrap(); fs::create_dir_all(paths.dependencies.join("@openzeppelin-contracts-5.0.2")).unwrap(); let res = remappings_foundry( &RemappingsAction::Update, &paths, &SoldeerConfig { remappings_location: RemappingsLocation::Config, ..Default::default() }, ); assert!(res.is_ok(), "{res:?}"); assert_eq!(fs::read_to_string(dir.join("foundry.toml")).unwrap(), contents); } } ================================================ FILE: crates/core/src/update.rs ================================================ //! Update dependencies to the latest version. use crate::{ config::{Dependency, GitIdentifier}, errors::UpdateError, install::{InstallProgress, install_dependency}, lock::{GitLockEntry, LockEntry, format_install_path}, registry::get_latest_supported_version, utils::run_git_command, }; use log::debug; use std::path::Path; use tokio::task::JoinSet; pub type Result = std::result::Result; /// Update the dependencies to a new version. /// /// This function spawns a task for each dependency and waits for all of them to finish. /// /// For Git dependencies without a ref or with a /// [`GitIdentifier::Branch`] ref, the function will update /// the dependency to the latest commit with `git pull`. /// /// For Git dependencies with a [`GitIdentifier::Rev`] or [`GitIdentifier::Tag`] ref, the function /// will reset the repo to the ref if the integrity check fails. An update is not really possible in /// this case. /// /// For HTTP dependencies, the function will install the latest version of the dependency according /// to the version requirement in the config file. If the version requirement is not a semver range, /// the function will install the latest version from the registry. pub async fn update_dependencies( dependencies: &[Dependency], locks: &[LockEntry], deps_path: impl AsRef, recursive_deps: bool, progress: InstallProgress, ) -> Result> { let mut set = JoinSet::new(); for dep in dependencies { debug!(dep:% = dep; "spawning task to update dependency"); set.spawn({ let d = dep.clone(); let p = progress.clone(); let lock = locks.iter().find(|l| l.name() == dep.name()).cloned(); let paths = deps_path.as_ref().to_path_buf(); async move { update_dependency(&d, lock.as_ref(), &paths, recursive_deps, p).await } }); } let mut results = Vec::new(); while let Some(res) = set.join_next().await { results.push(res??); } debug!("all update tasks have finished"); Ok(results) } /// Update a single dependency to a new version. /// /// For Git dependencies without a ref or with a /// [`GitIdentifier::Branch`] ref, the function will update /// the dependency to the latest commit with `git pull`. /// /// For Git dependencies with a [`GitIdentifier::Rev`] or [`GitIdentifier::Tag`] ref, the function /// will reset the repo to the ref if the integrity check fails. An update is not really possible in /// this case. /// /// For HTTP dependencies, the function will install the latest version of the dependency according /// to the version requirement in the config file. If the version requirement is not a semver range, /// the function will install the latest version from the registry. pub async fn update_dependency( dependency: &Dependency, lock: Option<&LockEntry>, deps: impl AsRef, recursive_deps: bool, progress: InstallProgress, ) -> Result { match dependency { Dependency::Git(dep) if matches!(dep.identifier, None | Some(GitIdentifier::Branch(_))) => { // we handle the git case in a special way because we don't need to re-clone the repo // update to the latest commit (git pull) debug!(dep:% = dependency; "updating git dependency based on a branch"); let path = match lock { Some(lock) => lock.install_path(&deps), None => dependency.install_path(&deps).await.unwrap_or_else(|| { format_install_path(dependency.name(), dependency.version_req(), &deps) }), }; run_git_command(&["reset", "--hard", "HEAD"], Some(&path)).await?; run_git_command(&["clean", "-fd"], Some(&path)).await?; let old_commit = run_git_command(&["rev-parse", "--verify", "HEAD"], Some(&path)) .await? .trim() .to_string(); debug!(dep:% = dependency; "old commit was {old_commit}"); if let Some(GitIdentifier::Branch(ref branch)) = dep.identifier { // checkout the desired branch debug!(dep:% = dependency, branch; "checking out required branch"); run_git_command(&["checkout", branch], Some(&path)).await?; } else { // necessarily `None` because of the match above // checkout the default branch debug!(dep:% = dependency; "checking out default branch"); let branch = run_git_command( &["symbolic-ref", "refs/remotes/origin/HEAD", "--short"], Some(&path), ) .await? .trim_start_matches("origin/") .trim() .to_string(); debug!(dep:% = dependency; "default branch is {branch}"); run_git_command(&["checkout", &branch], Some(&path)).await?; } // pull the latest commits debug!(dep:% = dependency; "running git pull"); run_git_command(&["pull"], Some(&path)).await?; let commit = run_git_command(&["rev-parse", "--verify", "HEAD"], Some(&path)) .await? .trim() .to_string(); debug!(dep:% = dependency; "new commit is {commit}"); if commit != old_commit { debug!(dep:% = dependency, old_commit, new_commit = commit; "updated dependency"); progress.log(format!("Updating {dependency} from {old_commit:.7} to {commit:.7}")); } else { debug!(dep:% = dependency; "there was no update available"); } let new_lock = GitLockEntry::builder() .name(&dep.name) .version(&dep.version_req) .git(&dep.git) .rev(commit) .build() .into(); progress.update_all(dependency.into()); Ok(new_lock) } Dependency::Git(dep) if dep.identifier.is_some() => { // check integrity against the existing version since we can't update to a new rev debug!(dep:% = dependency; "checking git repo integrity against required rev (can't update)"); let lock = match lock { Some(lock) => lock, None => &GitLockEntry::builder() .name(&dep.name) .version(&dep.version_req) .git(&dep.git) .rev(dep.identifier.as_ref().expect("identifier should be present").to_string()) .build() .into(), }; let new_lock = install_dependency(dependency, Some(lock), &deps, None, recursive_deps, progress) .await?; Ok(new_lock) } _ => { // for http dependencies, we simply install them as if there was no lock entry debug!(dep:% = dependency; "updating http dependency"); // to show which version we update to, we already need to know the new version, so we // can pass it to `install_dependency` to spare us from another call to the // registry let force_version = match (dependency.url(), lock) { (None, Some(lock)) => { let new_version = get_latest_supported_version(dependency).await?; if lock.version() != new_version { debug!(dep:% = dependency, old_version = lock.version(), new_version; "dependency has a new version available"); progress.log(format!( "Updating {} from {} to {new_version}", dependency.name(), lock.version(), )); } Some(new_version) } _ => None, }; let new_lock = install_dependency( dependency, None, &deps, force_version, recursive_deps, progress, ) .await?; Ok(new_lock) } } } ================================================ FILE: crates/core/src/utils.rs ================================================ //! Utility functions used throughout the codebase. use crate::{ config::Dependency, errors::{DownloadError, InstallError}, registry::parse_version_req, }; use derive_more::derive::{Display, From}; use ignore::{WalkBuilder, WalkState}; use log::{debug, warn}; use path_slash::PathExt as _; use rayon::prelude::*; use semver::Version; use sha2::{Digest as _, Sha256}; use std::{ borrow::Cow, env, ffi::OsStr, fs, io::Read, path::{Path, PathBuf}, sync::{Arc, mpsc}, }; use tokio::process::Command; /// Newtype for the string representation of an integrity checksum (SHA256). #[derive(Debug, Clone, PartialEq, Eq, Hash, From, Display)] #[from(Cow<'static, str>, String, &'static str)] #[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] pub struct IntegrityChecksum(pub String); /// Get the location where the token file is stored or read from. /// /// The token file is stored in the home directory of the user, or in the current directory /// if the home cannot be found, in a hidden folder called `.soldeer`. The token file is called /// `.soldeer_login`. /// /// The path can be overridden by setting the `SOLDEER_LOGIN_FILE` environment variable. pub fn login_file_path() -> Result { if let Ok(file_path) = env::var("SOLDEER_LOGIN_FILE") && !file_path.is_empty() { debug!("using soldeer login file defined in environment variable"); return Ok(file_path.into()); } // if home dir cannot be found, use the current dir let dir = home::home_dir().unwrap_or(env::current_dir()?); let security_directory = dir.join(".soldeer"); if !security_directory.exists() { debug!(dir:?; ".soldeer folder does not exist, creating it"); fs::create_dir(&security_directory)?; } let login_file = security_directory.join(".soldeer_login"); debug!(login_file:?; "path to login file"); Ok(login_file) } /// Check if any filename in the list of paths starts with a period. pub fn check_dotfiles(files: &[PathBuf]) -> bool { files .par_iter() .any(|file| file.file_name().unwrap_or_default().to_string_lossy().starts_with('.')) } /// Sanitize a filename by replacing invalid characters with a dash. pub fn sanitize_filename(dependency_name: &str) -> String { let options = sanitize_filename::Options { truncate: true, windows: cfg!(windows), replacement: "-" }; let filename = sanitize_filename::sanitize_with_options(dependency_name, options); debug!(filename; "sanitized filename"); filename } /// Hash the contents of a Reader with SHA256 pub fn hash_content(content: &mut R) -> [u8; 32] { let mut hasher = Sha256::new(); let mut buf = [0; 1024]; while let Ok(size) = content.read(&mut buf) { if size == 0 { break; } hasher.update(&buf[0..size]); } hasher.finalize().into() } /// Walk a folder and compute the SHA256 hash of all non-hidden and non-ignored files inside the /// dir, combining them into a single hash. /// /// The paths of the folders and files are hashes too, so we can the integrity of their names and /// location can be checked. pub fn hash_folder(folder_path: impl AsRef) -> Result { debug!(path:? = folder_path.as_ref(); "hashing folder"); // a list of hashes, one for each DirEntry let root_path = Arc::new(dunce::canonicalize(folder_path.as_ref())?); let (tx, rx) = mpsc::channel::<[u8; 32]>(); // we use a parallel walker to speed things up let walker = WalkBuilder::new(&folder_path) .filter_entry(|entry| { !(entry.path().is_dir() && entry.path().file_name().unwrap_or_default() == ".git") }) .hidden(false) .require_git(false) .parents(false) .git_global(false) .git_exclude(false) .build_parallel(); walker.run(|| { let tx = tx.clone(); let root_path = Arc::clone(&root_path); // function executed for each DirEntry Box::new(move |result| { let Ok(entry) = result else { return WalkState::Continue; }; let path = entry.path(); // first hash the filename/dirname to make sure it can't be renamed or removed let mut hasher = Sha256::new(); hasher.update( path.strip_prefix(root_path.as_ref()) .expect("path should be a child of root") .to_slash_lossy() .as_bytes(), ); // for files, also hash the contents if let Some(true) = entry.file_type().map(|t| t.is_file()) { if let Ok(file) = fs::File::open(path) { let mut reader = std::io::BufReader::new(file); let hash = hash_content(&mut reader); hasher.update(hash); } else { warn!(path:?; "could not read file while hashing folder"); } } // record the hash for that file/folder in the list let hash: [u8; 32] = hasher.finalize().into(); tx.send(hash) .expect("Channel receiver should never be dropped before end of function scope"); WalkState::Continue }) }); drop(tx); let mut hasher = Sha256::new(); // this cannot happen before tx is dropped safely let mut hashes = Vec::new(); while let Ok(msg) = rx.recv() { hashes.push(msg); } // sort hashes hashes.par_sort_unstable(); // hash the hashes (yo dawg...) for hash in hashes.iter() { hasher.update(hash); } let hash: [u8; 32] = hasher.finalize().into(); let hash = const_hex::encode(hash); debug!(path:? = folder_path.as_ref(), hash; "folder hash was computed"); Ok(hash.into()) } /// Compute the SHA256 hash of the contents of a file pub fn hash_file(path: impl AsRef) -> Result { debug!(path:? = path.as_ref(); "hashing file"); let file = fs::File::open(&path)?; let mut reader = std::io::BufReader::new(file); let bytes = hash_content(&mut reader); let hash = const_hex::encode(bytes); debug!(path:? = path.as_ref(), hash; "file hash was computed"); Ok(hash.into()) } /// Run a `git` command with the given arguments in the given directory. /// /// The function output is parsed as a UTF-8 string and returned. pub async fn run_git_command( args: I, current_dir: Option<&PathBuf>, ) -> Result where I: IntoIterator + Clone, S: AsRef, { let mut git = Command::new("git"); git.args(args.clone()).env("GIT_TERMINAL_PROMPT", "0"); if let Some(current_dir) = current_dir { git.current_dir( canonicalize(current_dir) .await .map_err(|e| DownloadError::IOError { path: current_dir.clone(), source: e })?, ); } let git = git.output().await.map_err(|e| DownloadError::GitError { message: e.to_string(), args: args.clone().into_iter().map(|a| a.as_ref().to_string_lossy().into_owned()).collect(), })?; if !git.status.success() { return Err(DownloadError::GitError { message: String::from_utf8(git.stderr).unwrap_or_default(), args: args.into_iter().map(|a| a.as_ref().to_string_lossy().into_owned()).collect(), }); } Ok(String::from_utf8(git.stdout).expect("git command output should be valid utf-8")) } /// Run a `forge` command with the given arguments in the given directory. /// /// The function output is parsed as a UTF-8 string and returned. pub async fn run_forge_command( args: I, current_dir: Option<&PathBuf>, ) -> Result where I: IntoIterator, S: AsRef, { let mut forge = Command::new("forge"); forge.args(args); if let Some(current_dir) = current_dir { forge.current_dir( canonicalize(current_dir) .await .map_err(|e| InstallError::IOError { path: current_dir.clone(), source: e })?, ); } let forge = forge.output().await.map_err(|e| InstallError::ForgeError(e.to_string()))?; if !forge.status.success() { return Err(InstallError::ForgeError(String::from_utf8(forge.stderr).unwrap_or_default())); } Ok(String::from_utf8(forge.stdout).expect("forge command output should be valid utf-8")) } /// Remove/uninstall the `forge-std` library installed as a git submodule in a foundry project. /// /// This function removes the `forge-std` submodule, the `.gitmodules` file and the `lib` directory /// from the project. pub async fn remove_forge_lib(root: impl AsRef) -> Result<(), InstallError> { debug!("removing forge-std installed as a git submodule"); let gitmodules_path = root.as_ref().join(".gitmodules"); let lib_dir = root.as_ref().join("lib"); let forge_std_dir = lib_dir.join("forge-std"); if forge_std_dir.exists() { run_git_command( &["rm", &forge_std_dir.to_string_lossy()], Some(&root.as_ref().to_path_buf()), ) .await?; debug!("removed lib/forge-std"); } if lib_dir.exists() { fs::remove_dir_all(&lib_dir) .map_err(|e| InstallError::IOError { path: lib_dir.clone(), source: e })?; debug!("removed lib dir"); } if gitmodules_path.exists() { fs::remove_file(&gitmodules_path) .map_err(|e| InstallError::IOError { path: lib_dir, source: e })?; debug!("removed .gitmodules file"); } Ok(()) } /// Canonicalize a path, resolving symlinks and relative paths. /// /// This function also normalizes paths on Windows to use the MS-DOS format (as opposed to UNC) /// whenever possible. pub async fn canonicalize(path: impl AsRef) -> Result { let path = path.as_ref().to_path_buf(); tokio::task::spawn_blocking(move || dunce::canonicalize(&path)).await? } /// Canonicalize a path, resolving symlinks and relative paths, synchronously. /// /// This function also normalizes paths on Windows to use the MS-DOS format (as opposed to UNC) /// whenever possible. pub fn canonicalize_sync(path: impl AsRef) -> Result { dunce::canonicalize(path) } /// Check if a path corresponds to the provided dependency. /// /// The folder does not need to exist. The folder name must start with the dependency name /// (sanitized). For dependencies with a semver-compliant version requirement, any folder with a /// version that matches will give a result of `true`. Otherwise, the folder name must contain the /// version requirement string after the dependency name. pub fn path_matches(dependency: &Dependency, path: impl AsRef) -> bool { let path = path.as_ref(); let Some(dir_name) = path.file_name() else { return false; }; let dir_name = dir_name.to_string_lossy(); let prefix = format!("{}-", sanitize_filename(dependency.name())); if !dir_name.starts_with(&prefix) { return false; } match ( parse_version_req(dependency.version_req()), Version::parse(dir_name.strip_prefix(&prefix).expect("prefix should be present")), ) { (None, _) | (Some(_), Err(_)) => { // not semver compliant dir_name == format!("{prefix}{}", sanitize_filename(dependency.version_req())) } (Some(version_req), Ok(version)) => version_req.matches(&version), } } #[cfg(test)] mod tests { use super::*; use std::fs; use testdir::testdir; fn create_test_folder(name: Option<&str>) -> PathBuf { let dir = testdir!(); let named_dir = match name { None => dir, Some(name) => { let d = dir.join(name); fs::create_dir(&d).unwrap(); d } }; fs::write(named_dir.join("a.txt"), "this is a test file").unwrap(); fs::write(named_dir.join("b.txt"), "this is a second test file").unwrap(); fs::write(named_dir.join("ignored.txt"), "this file should be ignored").unwrap(); fs::write(named_dir.join(".gitignore"), "ignored.txt\n").unwrap(); fs::write( named_dir.parent().unwrap().join(".gitignore"), format!("{}/a.txt", named_dir.file_name().unwrap().to_string_lossy()), ) .unwrap(); // this file should be ignored because it's in the parent dir dunce::canonicalize(named_dir).unwrap() } #[test] fn test_hash_content() { let mut content = "this is a test file".as_bytes(); let hash = hash_content(&mut content); assert_eq!( const_hex::encode(hash), "5881707e54b0112f901bc83a1ffbacac8fab74ea46a6f706a3efc5f7d4c1c625".to_string() ); } #[test] fn test_hash_content_content_sensitive() { let mut content = "foobar".as_bytes(); let hash = hash_content(&mut content); let mut content2 = "baz".as_bytes(); let hash2 = hash_content(&mut content2); assert_ne!(hash, hash2); } #[test] fn test_hash_file() { let path = testdir!().join("test.txt"); fs::write(&path, "this is a test file").unwrap(); let hash = hash_file(&path).unwrap(); assert_eq!(hash, "5881707e54b0112f901bc83a1ffbacac8fab74ea46a6f706a3efc5f7d4c1c625".into()); } #[test] fn test_hash_folder_abs_path_insensitive() { let folder1 = create_test_folder(Some("dir1")); let folder2 = create_test_folder(Some("dir2")); let hash1 = hash_folder(&folder1).unwrap(); let hash2 = hash_folder(&folder2).unwrap(); assert_eq!( hash1.to_string(), "c5328a2c3db7582b9074d5f5263ef111b496bbf9cda9b6c5fb0f97f1dc17b766" ); assert_eq!(hash1, hash2); // ignored.txt should be ignored in the checksum calculation, so removing it should yield // the same checksum fs::remove_file(folder1.join("ignored.txt")).unwrap(); let hash1 = hash_folder(&folder1).unwrap(); assert_eq!(hash1, hash2); } #[test] fn test_hash_folder_rel_path_sensitive() { let folder = create_test_folder(Some("dir")); let hash1 = hash_folder(&folder).unwrap(); fs::rename(folder.join("a.txt"), folder.join("c.txt")).unwrap(); let hash2 = hash_folder(&folder).unwrap(); assert_ne!(hash1, hash2); } #[test] fn test_hash_folder_content_sensitive() { let folder = create_test_folder(Some("dir")); let hash1 = hash_folder(&folder).unwrap(); fs::create_dir(folder.join("test")).unwrap(); let hash2 = hash_folder(&folder).unwrap(); assert_ne!(hash1, hash2); fs::write(folder.join("test/c.txt"), "this is a third test file").unwrap(); let hash3 = hash_folder(&folder).unwrap(); assert_ne!(hash2, hash3); assert_ne!(hash1, hash3); } } ================================================ FILE: flake.nix ================================================ { inputs = { nixpkgs.url = "github:NixOS/nixpkgs/nixpkgs-unstable"; fenix = { url = "github:nix-community/fenix"; inputs.nixpkgs.follows = "nixpkgs"; }; }; outputs = { self, nixpkgs, fenix }: let forAllSystems = nixpkgs.lib.genAttrs nixpkgs.lib.systems.flakeExposed; in { devShells = forAllSystems (system: let pkgs = import nixpkgs { inherit system; overlays = [ fenix.overlays.default ]; }; toolchain = fenix.packages.${system}.stable.withComponents [ "rustc" "cargo" "rust-std" "clippy-preview" "rust-analyzer-preview" "rust-src" ]; nightlyToolchain = fenix.packages.${system}.latest.withComponents [ "rustfmt-preview" ]; in { default = pkgs.mkShell { buildInputs = with pkgs; [ cargo-nextest foundry nightlyToolchain openssl pkg-config toolchain ]; RUST_SRC_PATH = "${toolchain}/lib/rustlib/src/rust/library"; }; } ); }; } ================================================ FILE: release-plz.toml ================================================ [workspace] dependencies_update = true git_release_enable = false # we only need to create a git tag for one of the crates git_tag_enable = false publish = false # cargo publish will be done by hand for now changelog_path = "./CHANGELOG.md" [[package]] name = "soldeer-core" version_group = "soldeer" [[package]] name = "soldeer-commands" version_group = "soldeer" [[package]] name = "soldeer" version_group = "soldeer" git_tag_name = "v{{ version }}" git_release_name = "v{{ version }}" git_tag_enable = true git_release_enable = true [changelog] body = """ ## `{{ package }}` - [{{ version | trim_start_matches(pat="v") }}]{%- if release_link -%}({{ release_link }}){% endif %} - {{ timestamp | date(format="%Y-%m-%d") }} {% for group, commits in commits | group_by(attribute="group") %} ### {{ group | upper_first }} {% for commit in commits %} {%- if commit.scope -%} - *({{commit.scope}})* {% if commit.breaking %}[**breaking**] {% endif %}{{ commit.message }}{%- if commit.links %} ({% for link in commit.links %}[{{link.text}}]({{link.href}}) {% endfor -%}){% endif %} {% else -%} - {% if commit.breaking %}[**breaking**] {% endif %}{{ commit.message }} {% endif -%} {% endfor -%} {% endfor -%} """ ================================================ FILE: rustfmt.toml ================================================ reorder_imports = true imports_granularity = "Crate" use_small_heuristics = "Max" comment_width = 100 wrap_comments = true binop_separator = "Back" trailing_comma = "Vertical" trailing_semicolon = false use_field_init_shorthand = true format_code_in_doc_comments = true doc_comment_code_block_width = 100