[
  {
    "path": ".cargo/config.toml",
    "content": "# On Windows MSVC, statically link the C runtime so that the resulting EXE does\n# not depend on the vcruntime DLL.\n#\n# See: https://github.com/sharkdp/fd/issues/1874\n\n[target.x86_64-pc-windows-msvc]\nrustflags = [\"-C\", \"target-feature=+crt-static\"]\n[target.i686-pc-windows-msvc]\nrustflags = [\"-C\", \"target-feature=+crt-static\"]"
  },
  {
    "path": ".github/FUNDING.yml",
    "content": "github: [sharkdp, tavianator]\n"
  },
  {
    "path": ".github/ISSUE_TEMPLATE/bug_report.yaml",
    "content": "name: Bug Report\ndescription: Report a bug.\ntitle: \"[BUG] \"\nlabels: bug\nbody:\n  - type: markdown\n    attributes:\n      value: |\n        Please check out the [troubleshooting section](https://github.com/sharkdp/fd#troubleshooting) first.\n  - type: checkboxes\n    attributes:\n      label: Checks\n      options:\n        - label: I have read the troubleshooting section and still think this is a bug.\n          required: true\n  - type: textarea\n    id: bug\n    attributes:\n      label: \"Describe the bug you encountered:\"\n    validations:\n      required: true\n  - type: textarea\n    id: expected\n    attributes:\n      label: \"Describe what you expected to happen:\"\n  - type: input\n    id: version\n    attributes:\n      label: \"What version of `fd` are you using?\"\n      placeholder: \"paste the output of `fd --version` here\"\n    validations:\n      required: true\n  - type: textarea\n    id: os\n    attributes:\n      label: Which operating system / distribution are you on?\n      placeholder: |\n        Unix: paste the output of `uname -srm` and `lsb_release -a` here.\n        Windows: please tell us your Windows version\n      render: shell\n    validations:\n      required: true\n"
  },
  {
    "path": ".github/ISSUE_TEMPLATE/config.yml",
    "content": "blank_issues_enabled: true\n"
  },
  {
    "path": ".github/ISSUE_TEMPLATE/feature_request.md",
    "content": "---\nname: Feature Request\nabout: Suggest an idea for this project.\ntitle: ''\nlabels: feature-request\nassignees: ''\n\n---\n"
  },
  {
    "path": ".github/ISSUE_TEMPLATE/question.md",
    "content": "---\nname: Question\nabout: Ask a question about 'fd'.\ntitle: ''\nlabels: question\nassignees: ''\n\n---\n\n\n\n**What version of `fd` are you using?**\n[paste the output of `fd --version` here]\n"
  },
  {
    "path": ".github/dependabot.yml",
    "content": "version: 2\nupdates:\n  - package-ecosystem: \"cargo\"\n    directory: \"/\"\n    schedule:\n      interval: \"monthly\"\n    cooldown:\n      default-days: 7\n  - package-ecosystem: \"github-actions\"\n    directory: \"/\"\n    schedule:\n      interval: \"daily\"\n    cooldown:\n      default-days: 7\n"
  },
  {
    "path": ".github/workflows/CICD.yml",
    "content": "name: CICD\n\nenv:\n  CICD_INTERMEDIATES_DIR: \"_cicd-intermediates\"\n  MSRV_FEATURES: \"--all-features\"\n\non:\n  workflow_dispatch:\n  pull_request:\n  push:\n    branches:\n      - master\n    tags:\n      - '*'\n\npermissions:\n  contents: read\n\njobs:\n  crate_metadata:\n    name: Extract crate metadata\n    runs-on: ubuntu-latest\n    steps:\n    - uses: actions/checkout@v6\n      with:\n        persist-credentials: false\n    - name: Extract crate information\n      id: crate_metadata\n      run: |\n        echo \"name=fd\" | tee -a $GITHUB_OUTPUT\n        cargo metadata --no-deps --format-version 1 | jq -r '\"version=\" + .packages[0].version' | tee -a $GITHUB_OUTPUT\n        cargo metadata --no-deps --format-version 1 | jq -r '\"maintainer=\" + .packages[0].authors[0]' | tee -a $GITHUB_OUTPUT\n        cargo metadata --no-deps --format-version 1 | jq -r '\"homepage=\" + .packages[0].homepage' | tee -a $GITHUB_OUTPUT\n        cargo metadata --no-deps --format-version 1 | jq -r '\"msrv=\" + .packages[0].rust_version' | tee -a $GITHUB_OUTPUT\n    outputs:\n      name: ${{ steps.crate_metadata.outputs.name }}\n      version: ${{ steps.crate_metadata.outputs.version }}\n      maintainer: ${{ steps.crate_metadata.outputs.maintainer }}\n      homepage: ${{ steps.crate_metadata.outputs.homepage }}\n      msrv: ${{ steps.crate_metadata.outputs.msrv }}\n\n  ensure_cargo_fmt:\n    name: Ensure 'cargo fmt' has been run\n    runs-on: ubuntu-22.04\n    steps:\n    - uses: dtolnay/rust-toolchain@stable\n      with:\n        components: rustfmt\n    - uses: actions/checkout@v6\n      with:\n        persist-credentials: false\n    - run: cargo fmt -- --check\n\n  lint_check:\n    name: Ensure 'cargo clippy' has no warnings\n    runs-on: ubuntu-latest\n    steps:\n    - uses: dtolnay/rust-toolchain@stable\n      with:\n        components: clippy\n    - uses: actions/checkout@v6\n      with:\n        persist-credentials: false\n    - run: cargo clippy --all-targets --all-features -- -Dwarnings\n\n  min_version:\n    name: Minimum supported rust version\n    runs-on: ubuntu-22.04\n    needs: crate_metadata\n    steps:\n    - name: Checkout source code\n      uses: actions/checkout@v6\n      with:\n        persist-credentials: false\n\n    - name: Install rust toolchain (v${{ needs.crate_metadata.outputs.msrv }})\n      uses: dtolnay/rust-toolchain@master\n      with:\n        toolchain: ${{ needs.crate_metadata.outputs.msrv }}\n        components: clippy\n    - name: Run clippy (on minimum supported rust version to prevent warnings we can't fix)\n      run: cargo clippy --locked --all-targets \"${MSRV_FEATURES}\"\n    - name: Run tests\n      run: cargo test --locked \"${MSRV_FEATURES}\"\n\n  build:\n    name: '${{ matrix.job.target }} (${{ matrix.job.os }})'\n    runs-on: ${{ matrix.job.os }}\n    needs: crate_metadata\n    permissions:\n      id-token: write\n      contents: write\n      attestations: write\n    strategy:\n      fail-fast: false\n      matrix:\n        job:\n          - { target: aarch64-unknown-linux-gnu   , os: ubuntu-24.04, use-cross: true }\n          - { target: aarch64-unknown-linux-musl  , os: ubuntu-24.04, use-cross: true }\n          - { target: arm-unknown-linux-gnueabihf , os: ubuntu-24.04, use-cross: true }\n          - { target: arm-unknown-linux-musleabihf, os: ubuntu-24.04, use-cross: true }\n          - { target: i686-pc-windows-msvc        , os: windows-2022                  }\n          - { target: i686-unknown-linux-gnu      , os: ubuntu-24.04, use-cross: true }\n          - { target: i686-unknown-linux-musl     , os: ubuntu-24.04, use-cross: true }\n          - { target: aarch64-apple-darwin        , os: macos-14                      }\n          - { target: x86_64-pc-windows-gnu       , os: windows-2022                  }\n          - { target: x86_64-pc-windows-msvc      , os: windows-2022                  }\n          - { target: aarch64-pc-windows-msvc     , os: windows-11-arm                }\n          - { target: x86_64-unknown-linux-gnu    , os: ubuntu-24.04, use-cross: true }\n          - { target: x86_64-unknown-linux-musl   , os: ubuntu-24.04, use-cross: true }\n    env:\n      BUILD_CMD: \"${{ matrix.job.use-cross && 'cross' || 'cargo' }}\"\n      target: ${{ matrix.job.target }}\n      name: ${{ needs.crate_metadata.outputs.name }}\n    steps:\n    - name: Checkout source code\n      uses: actions/checkout@v6\n      with:\n        persist-credentials: false\n\n    - name: Install prerequisites\n      shell: bash\n      run: |\n        case ${target} in\n          arm-unknown-linux-*) sudo apt-get -y update ; sudo apt-get -y install gcc-arm-linux-gnueabihf ;;\n          aarch64-unknown-linux-gnu) sudo apt-get -y update ; sudo apt-get -y install gcc-aarch64-linux-gnu ;;\n        esac\n\n    - name: Install Rust toolchain\n      uses: dtolnay/rust-toolchain@stable\n      with:\n        targets: ${{ matrix.job.target }}\n        toolchain: \"stable\"\n\n    - name: Install cross\n      if: matrix.job.use-cross\n      env:\n        cross_version: \"v0.2.5\"\n        package_name: \"cross-x86_64-unknown-linux-gnu.tar.gz\"\n        GH_TOKEN: \"${{ github.token }}\"\n      run: |\n        dir=\"$HOME/.local/bin/\"\n        mkdir -p \"$dir\"\n        gh release download --repo cross-rs/cross  \\\n          --pattern \"${package_name}\" -O - \"${cross_version}\" \\\n          | tar -C \"$dir\" -xz\n        echo \"$dir\" >> $GITHUB_PATH\n        echo \"Installed cross $cross_version\" >> $GITHUB_STEP_SUMMARY\n    - name: Show version information (Rust, cargo, GCC)\n      shell: bash\n      run: |\n        gcc --version || true\n        rustup -V\n        rustup toolchain list\n        rustup default\n        cargo -V\n        rustc -V\n\n    - name: Build\n      shell: bash\n      run: $BUILD_CMD build --locked --release --target=\"${target}\"\n\n    - name: Set binary name & path\n      id: bin\n      shell: bash\n      run: |\n        # Figure out suffix of binary\n        EXE_suffix=\"\"\n        case ${target} in\n          *-pc-windows-*) EXE_suffix=\".exe\" ;;\n        esac;\n\n        # Setup paths\n        BIN_NAME=\"${name}${EXE_suffix}\"\n        BIN_PATH=\"target/${target}/release/${BIN_NAME}\"\n\n        # Let subsequent steps know where to find the binary\n        echo \"BIN_PATH=${BIN_PATH}\" >> $GITHUB_OUTPUT\n        echo \"BIN_NAME=${BIN_NAME}\" >> $GITHUB_OUTPUT\n\n    - name: Set testing options\n      id: test-options\n      shell: bash\n      run: |\n        # test only library unit tests and binary for arm-type targets\n        unset CARGO_TEST_OPTIONS\n        case ${target} in\n        arm-* | aarch64-*)\n          CARGO_TEST_OPTIONS=\"--bin=${name}\" ;;\n        esac\n        echo \"CARGO_TEST_OPTIONS=${CARGO_TEST_OPTIONS}\" >> $GITHUB_OUTPUT\n\n    - name: Run tests\n      shell: bash\n      env:\n        cargo_test_options: ${{ steps.test-options.outputs.CARGO_TEST_OPTIONS}}\n      run: $BUILD_CMD test --locked --target=\"${target}\" \"${cargo_test_options}\"\n\n    - name: Generate completions\n      id: completions\n      shell: bash\n      run: make completions\n\n    - name: Create tarball\n      id: package\n      shell: bash\n      env:\n        BIN_PATH: ${{ steps.bin.outputs.BIN_PATH }}\n        version: ${{ needs.crate_metadata.outputs.version }}\n      run: |\n        PKG_suffix=\".tar.gz\"\n        case ${target} in\n        *-pc-windows-*) PKG_suffix=\".zip\" ;;\n        esac\n        PKG_BASENAME=${name}-v${version}-${target}\n        PKG_NAME=${PKG_BASENAME}${PKG_suffix}\n        echo \"PKG_NAME=${PKG_NAME}\" >> $GITHUB_OUTPUT\n\n        PKG_STAGING=\"${CICD_INTERMEDIATES_DIR}/package\"\n        ARCHIVE_DIR=\"${PKG_STAGING}/${PKG_BASENAME}/\"\n        mkdir -p \"${ARCHIVE_DIR}\"\n\n        # Binary\n        cp \"${BIN_PATH}\" \"$ARCHIVE_DIR\"\n\n        # README, LICENSE and CHANGELOG files\n        cp \"README.md\" \"LICENSE-MIT\" \"LICENSE-APACHE\" \"CHANGELOG.md\" \"$ARCHIVE_DIR\"\n\n        # Man page\n        cp \"doc/${name}.1\" \"$ARCHIVE_DIR\"\n\n        # Autocompletion files\n        cp -r autocomplete \"${ARCHIVE_DIR}\"\n\n        # base compressed package\n        pushd \"${PKG_STAGING}/\" >/dev/null\n        case ${target} in\n          *-pc-windows-*) 7z -y a \"${PKG_NAME}\" \"${PKG_BASENAME}\"/* | tail -2 ;;\n          *) tar czf \"${PKG_NAME}\" \"${PKG_BASENAME}\"/* ;;\n        esac;\n        popd >/dev/null\n\n        # Let subsequent steps know where to find the compressed package\n        echo \"PKG_PATH=${PKG_STAGING}/${PKG_NAME}\" >> $GITHUB_OUTPUT\n\n    - name: Create Debian package\n      id: debian-package\n      shell: bash\n      if: startsWith(matrix.job.os, 'ubuntu')\n      run: bash scripts/create-deb.sh\n      env:\n        TARGET: ${{ matrix.job.target }}\n        DPKG_VERSION: ${{ needs.crate_metadata.version }}\n        BIN_PATH: ${{ steps.bin.outputs.BIN_PATH }}\n\n    - name: \"Artifact upload: tarball\"\n      id: upload-tarball\n      uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7\n      with:\n        name: ${{ steps.package.outputs.PKG_NAME }}\n        path: ${{ steps.package.outputs.PKG_PATH }}\n\n    - name: \"Artifact upload: Debian package\"\n      id: upload-deb\n      uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7\n      if: steps.debian-package.outputs.DPKG_NAME\n      with:\n        name: ${{ steps.debian-package.outputs.DPKG_NAME }}\n        path: ${{ steps.debian-package.outputs.DPKG_PATH }}\n\n    - name: Check for release\n      id: is-release\n      shell: bash\n      run: |\n        unset IS_RELEASE ; if [[ $GITHUB_REF =~ ^refs/tags/v[0-9].* ]]; then IS_RELEASE='true' ; fi\n        echo \"IS_RELEASE=${IS_RELEASE}\" >> $GITHUB_OUTPUT\n\n    - name: \"Attest artifact: tarball\"\n      uses: actions/attest@59d89421af93a897026c735860bf21b6eb4f7b26 # v4\n      if: steps.is-release.outputs.IS_RELEASE\n      with:\n        subject-name: ${{ steps.package.outputs.PKG_NAME }}\n        subject-digest: sha256:${{ steps.upload-tarball.outputs.artifact-digest }}\n\n    - name: \"Attest artifact: Debian package\"\n      uses: actions/attest@59d89421af93a897026c735860bf21b6eb4f7b26 # v4\n      if: 'steps.is-release.outputs.IS_RELEASE && steps.debian-package.outputs.DPKG_NAME'\n      with:\n        subject-name: ${{ steps.debian-package.outputs.DPKG_NAME }}\n        subject-digest: sha256:${{ steps.upload-deb.outputs.artifact-digest }}\n\n    - name: Publish archives and packages\n      uses: softprops/action-gh-release@a06a81a03ee405af7f2048a818ed3f03bbf83c7b # v2.5.0\n      if: steps.is-release.outputs.IS_RELEASE\n      with:\n        files: |\n          ${{ steps.package.outputs.PKG_PATH }}\n          ${{ steps.debian-package.outputs.DPKG_PATH }}\n      env:\n        GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}\n\n  winget:\n    name: Publish to Winget\n    runs-on: ubuntu-latest\n    needs: build\n    if: startsWith(github.ref, 'refs/tags/v')\n    steps:\n      - uses: vedantmgoyal9/winget-releaser@4ffc7888bffd451b357355dc214d43bb9f23917e # v2\n        with:\n          identifier: sharkdp.fd\n          installers-regex: '-pc-windows-msvc\\.zip$'\n          token: ${{ secrets.WINGET_TOKEN }}\n"
  },
  {
    "path": ".gitignore",
    "content": "target/\n/autocomplete/\n**/*.rs.bk\n"
  },
  {
    "path": "CHANGELOG.md",
    "content": "# Unreleased\n\n## Bugfixes\n- Handle invalid working directories gracefully when using `--full-path`, see #1900 (@Xavrir).\n\n# 10.4.2\n\n## Bugfixes\n- Fixed performance regression due to `--ignore-contain`; see #1913 and #1914\n\n# 10.4.1\n\nThis is just a re-release of 10.4.0 due to an issue with the 10.4.0 release.\n\n# 10.4.0\n\n## Features\n- Add `--ignore-contain` option to ignore directories containing a named entry (e.g. to ignore [`CACHEDIR.TAG`](https://bford.info/cachedir/)); see #1727 (@fischman).\n\n## Bugfixes\n\n- Fix Windows hyperlink generation for paths with spaces. (#1872)\n\n- `--print0` combined with `--exec` will now print a `\\0` between the output of each entry. Note that if there are multiple instances\n  of `--exec`, the `\\0` will be between each _set_ of commands, _not_ between each individual command run. Fixes #1797.\n\n- Several bugs were fixed by an update to the `ignore` library used for handling ignore rules\n    - #1506\n    - #1667\n    - #1813\n\n## Changes\n\n- Minimum required rust version has been increased to 1.90.0. Notably, this means dropping fully support for intel Mac and Windows 7.\n- Statically link the CRT for MSVC builds via Cargo config to avoid runtime DLL dependencies, see #1874 (@FidelSch)\n\n# 10.3.0\n\n## Features\n\n- Add a hidden `--mindepth` alias for `--min-depth`. (#1617)\n\n\n## Bugfixes\n\n\n## Changes\n\n- Replace `humantime` crate and `chrono` crate with `jiff` crate, see #1690 (@sorairolake). This has some small changes to the\n  way dates given to options such `--changed-within` and `--changed-before` including:\n  - 'M' no longer means \"month\", as that could be confusing with minutes. Use \"mo\", \"mos\", \"month\" or \"months\" instead.\n  - month and year now account for variability in the calander rather than being a hard-coded number of seconds. That is probably\n    what you would expect, but it is a slight change in behavior.\n- aarch64 Windows was added to CI and release artifacts\n- Many dependencies were updated\n- Better support building on Illumos (there is no automated testing, but some known issues were fixed)\n\n## Other\n\nThis will be the last release that has been tested on x86_64 Mac OS, since GitHub is\ndropping support for runners with that hardware.\n\nIt may also be the last release to use a version of Rust with tier-1 support for\nx86_64/intel Macs and Windows 7.\n\n\n# 10.2.0\n\n## Features\n\n- Add --hyperlink option to add OSC 8 hyperlinks to output\n\n\n## Bugfixes\n\n\n## Changes\n\n- Build windows releases with rust 1.77 so windows 7 is still supported\n- Deb packages now include symlink for fdfind to be more consistent with official packages\n\n\n## Other\n\n# 10.1.0\n\n## Features\n\n- Allow passing an optional argument to `--strip-cwd-prefix` of \"always\", \"never\", or \"auto\". to force whether the cwd prefix is stripped or not.\n- Add a `--format` option which allows using a format template for direct ouput similar to the template used for `--exec`. (#1043)\n\n## Bugfixes\n- Fix aarch64 page size again. This time it should actually work. (#1085, #1549) (@tavianator)\n\n\n## Other\n\n- aarch64-apple-darwin target added to builds on the release page. Note that this is a tier 2 rust target.\n\n# v10.0.0\n\n## Features\n\n- Add `dir` as an alias to `directory` when using `-t` \\ `--type`, see #1460 and #1464 (@Ato2207).\n- Add support for @%s date format in time filters similar to GNU date (seconds since Unix epoch for --older/--newer), see #1493 (@nabellows)\n- Breaking: No longer automatically ignore `.git` when using `--hidden` with vcs ignore enabled. This reverts the change in v9.0.0. While this feature\n  was often useful, it also broke some existing workflows, and there wasn't a good way to opt out of it. And there isn't really a good way for us to add\n  a way to opt out of it. And you can easily get similar behavior by adding `.git/` to your global fdignore file.\n    See #1457.\n\n## Bugfixes\n\n- Respect NO_COLOR environment variable with `--list-details` option. (#1455)\n- Fix bug that would cause hidden files to be included despite gitignore rules\n  if search path is \".\" (#1461, BurntSushi/ripgrep#2711).\n- aarch64 builds now use 64k page sizes with jemalloc. This fixes issues on some systems, such as ARM Macs that\n  have a larger system page size than the system that the binary was built on. (#1547)\n- Address [CVE-2024-24576](https://blog.rust-lang.org/2024/04/09/cve-2024-24576.html), by increasing minimum rust version.\n\n\n## Changes\n- Minimum supported rust version is now 1.77.2\n\n\n# v9.0.0\n\n## Performance\n\n- Performance has been *significantly improved*, both due to optimizations in the underlying `ignore`\n  crate (#1429), and in `fd` itself (#1422, #1408, #1362) - @tavianator.\n  [Benchmarks results](https://gist.github.com/tavianator/32edbe052f33ef60570cf5456b59de81) show gains\n  of 6-8x for full traversals of smaller directories (100k files) and up to 13x for larger directories (1M files).\n\n- The default number of threads is now constrained to be at most 64. This should improve startup time on\n  systems with many CPU cores. (#1203, #1410, #1412, #1431) - @tmccombs and @tavianator\n\n- New flushing behavior when writing output to stdout, providing better performance for TTY and non-TTY\n  use cases, see #1452 and #1313 (@tavianator).\n\n## Features\n\n- Support character and block device file types, see #1213 and #1336 (@cgzones)\n- Breaking: `.git/` is now ignored by default when using `--hidden` / `-H`, use `--no-ignore` / `-I` or\n  `--no-ignore-vcs` to override, see #1387 and #1396 (@skoriop)\n\n## Bugfixes\n\n- Fix `NO_COLOR` support, see #1421 (@acuteenvy)\n\n## Other\n\n- Fixed documentation typos, see #1409 (@marcospb19)\n\n## Thanks\n\nSpecial thanks to @tavianator for his incredible work on performance in the `ignore` crate and `fd` itself.\n\n\n\n# v8.7.1\n\n## Bugfixes\n\n- `-1` properly conflicts with the exec family of options.\n- `--max-results` overrides `-1`\n- `--quiet` properly conflicts with the exec family of options. This used to be the case, but broke during the switch to clap-derive\n- `--changed-within` now accepts a space as well as a \"T\" as the separator between date and time (due to update of chrono dependency)\n\n## Other\n- Many dependencies were updated\n- Some documentation was updated and fixed\n\n# v8.7.0\n\n## Features\n\n- Add flag --no-require-git to always respect gitignore files, see #1216 (@vegerot)\n\n## Bugfixes\n\n- Fix logic for when to use global ignore file. There was a bug where the only case where the\n  global ignore file wasn't processed was if `--no-ignore` was passed, but neither `--unrestricted`\n  nor `--no-global-ignore-file` is passed. See #1209\n\n# v8.6.0\n\n## Features\n\n- New `--and <pattern>` option to add additional patterns that must also be matched. See #315\n  and #1139 (@Uthar)\n- Added `--changed-after` as alias for `--changed-within`, to have a name consistent with `--changed-before`.\n\n\n## Changes\n\n- Breaking: On Unix-like systems, `--type executable` now additionally checks if\n  the file is executable by the current user, see #1106 and #1169 (@ptipiak)\n\n\n## Bugfixes\n\n- Use fd instead of fd.exe for Powershell completions (when completions are generated on windows)\n\n\n## Other\n\n\n# v8.5.3\n\n## Bugfixes\n\n- Fix completion generation to not include full path of fd command\n- Fix build error if completions feature is disabled\n\n# v8.5.2\n\n## Bugfixes\n\n- Fix --owner option value parsing, see #1163 and #1164 (@tmccombs)\n\n\n# v8.5.1\n\n## Bugfixes\n\n- Fix --threads/-j option value parsing, see #1160 and #1162 (@sharkdp)\n\n\n# v8.5.0\n\n## Features\n\n- `--type executable`/`-t` now works on Windows, see #1051 and #1061 (@tavianator)\n\n## Bugfixes\n\n- Fixed differences between piped / non-piped output. This changes `fd`s behavior back to what we\n  had before 8.3.0, i.e. there will be no leading `./` prefixes, unless `--exec`/`-x`,\n  `--exec-batch`/`-X`, or `--print0`/`-0` are used. `--strip-cwd-prefix` can be used to strip that\n  prefix in those cases. See #1046, #1115, and #1121 (@tavianator)\n- `fd` could previously crash with a panic due to a race condition in Rusts standard library\n  (see https://github.com/rust-lang/rust/issues/39364). This has been fixed by switching to a different\n  message passing implementation, see #1060 and #1146 (@tavianator)\n- `fd`s memory usage will not grow unboundedly on huge directory trees, see #1146 (@tavianator)\n- fd returns an error when current working directory does not exist while a search path is\n  specified, see #1072 (@vijfhoek)\n- Improved \"command not found\" error message, see #1083 and #1109 (@themkat)\n- Preserve command exit codes when using `--exec-batch`, see #1136 and #1137 (@amesgen)\n\n## Changes\n\n- No leading `./` prefix for non-interactive results, see above.\n- fd now colorizes paths in parallel, significantly improving performance, see #1148 (@tavianator)\n- fd can now avoid `stat` syscalls even when colorizing paths, as long as the color scheme doesn't\n  require metadata, see #1148 (@tavianator)\n- The statically linked `musl` versions of `fd` now use `jmalloc`, leading to a significant performance\n  improvement, see #1062 (@tavianator)\n\n## Other\n\n- Added link back to GitHub in man page and `--help` text, see #1086 (@scottchiefbaker)\n- Major update in how `fd` handles command line options internally, see #1067 (@tmccombs)\n\n# v8.4.0\n\n## Features\n\n- Support multiple `--exec <cmd>` instances, see #406 and #960 (@tmccombs)\n\n## Bugfixes\n\n- \"Argument list too long\" errors can not appear anymore when using `--exec-batch`/`-X`, as the command invocations are automatically batched at the maximum possible size, even if `--batch-size` is not given. See #410 and #1020 (@tavianator)\n\n## Changes\n\n- Directories are now printed with an additional path separator at the end: `foo/bar/`, see #436 and #812 (@yyogo)\n- The `-u` flag was changed to be equivalent to `-HI` (previously, a single `-u` was only equivalent to `-I`). Additional `-u` flags are still allowed, but ignored. See #840 and #986 (@jacksontheel)\n\n## Other\n\n- Added installation instructions for RHEL8, see #989 (@ethsol)\n\n\n# v8.3.2\n\n## Bugfixes\n\n- Invalid absolute path on windows when searching from the drive root, see #931 and #936 (@gbarta)\n\n\n# v8.3.1\n\n## Bugfixes\n\n- Stop implying `--no-ignore-parent` when `--no-vcs-ignore` is supplied, see #907, #901, #908 (@tmccombs)\n- fd no longer waits for the whole traversal if the only matches arrive within max_buffer_time, see #868 and #895 (@tavianator)\n- `--max-results=1` now immediately quits after the first result, see #867\n- `fd -h` does not panic anymore when stdout is closed, see #897\n\n## Changes\n\n- Disable jemalloc on FreeBSD, see #896 (@xanderio)\n- Updated man page, see #912 (@rlue)\n- Updated zsh completions, see #932 (@tmccombs)\n\n\n# v8.3.0\n\n## Performance improvements\n\n- Colorized output is now significantly faster, see #720 and #853 (@tavianator)\n- Writing to stdout is now buffered if the output does not go to a TTY. This increases performance\n  when the output of `fd` is piped to another program or to a file, see #885 (@tmccombs, original\n  implementation by @sourlemon207)\n- File metadata is now cached between the different filters that require it (e.g. `--owner`,\n  `--size`), reducing the number of `stat` syscalls when multiple filters are used; see #863\n  (@tavianator, original implementation by @alexmaco)\n\n## Features\n\n- Don't buffer command output from `--exec` when using a single thread. See #522\n- Add new `-q, --quiet` flag, see #303 (@Asha20)\n- Add new `--no-ignore-parent` flag, see #787 (@will459)\n- Add new `--batch-size` flag, see #410 (@devonhollowood)\n- Add opposing command-line options, see #595 (@Asha20)\n- Add support for more filesystem indicators in `LS_COLORS`, see\n  https://github.com/sharkdp/lscolors/pull/35 (@tavianator)\n\n## Bugfixes\n\n- Always show the `./` prefix for search results unless the output is a TTY or `--strip-cwd-prefix` is set, see #760 and #861 (@jcaplan)\n- Set default path separator to `/` in MSYS, see #537 and #730 (@aswild)\n- fd cannot search files under a RAM disk, see #752\n- fd doesn't show substituted drive on Windows, see #365\n- Properly handle write errors to devices that are full, see #737\n- Use local time zone for time functions (`--change-newer-than`, `--change-older-than`), see #631 (@jacobmischka)\n- Support `--list-details` on more platforms (like BusyBox), see #783\n- The filters `--owner`, `--size`, and `--changed-{within,before}` now apply to symbolic links\n  themselves, rather than the link target, except when `--follow` is specified; see #863\n- Change time comparisons to be exclusive, see #794 (@jacobmischka)\n\n## Changes\n\n- Apply custom `--path-separator` to commands run with `--exec(-batch)` and `--list-details`, see #697 (@aswild)\n\n## Other\n\n- Many documentation updates\n\n\n# v8.2.1\n\nNo functional changes with respect to v8.2.0. Bugfix in the release process.\n\n# v8.2.0\n\n## Features\n\n- Add new `--prune` flag, see #535 (@reima)\n- Improved the usability of the time-based options, see #624 and #645 (@gorogoroumaru)\n- Add support for exact file sizes in the `--size` filter, see #669 and #696 (@Rogach)\n- `fd` now prints an error message if the search pattern requires a leading dot but\n  `--hidden` is not enabled (Unix only), see #615\n\n## Bugfixes\n\n- Avoid panic when performing limited searches in directories with restricted permissions, see #678\n- Invalid numeric command-line arguments are silently ignored, see #675\n- Disable jemalloc on Android, see #662\n- The `--help` text will be colorless if `NO_COLOR` has been set, see #600 (@xanonid)\n\n## Changes\n\n- If `LS_COLORS` is not set (e.g. on Windows), we now provide a more comprehensive default which\n  includes much more filetypes, see #604 and #682 (mjsir911).\n\n## Other\n\n- Added `zsh` completion files, see #654 and #189 (@smancill)\n\n# v8.1.1\n\n## Bugfixes\n\n- Support colored output on older Windows versions if either (1) `--color=always` is set or (2) the `TERM` environment variable is set. See #469\n\n# v8.1.0\n\n## Features\n\n- Add new `--owner [user][:group]` filter. See #307 (pull #581) (@alexmaco)\n- Add support for a global ignore file (`~/.config/fd/ignore` on Unix), see #575 (@soedirgo)\n- Do not exit immediately if one of the search paths is missing, see #587 (@DJRHails)\n\n## Bugfixes\n\n- Reverted a change from fd 8.0 that enabled colors on all Windows terminals (see below) in order to support older Windows versions again, see #577. Unfortunately, this re-opens #469\n- Fix segfault caused by jemalloc on macOS Catalina, see #498\n- Fix `--glob` behavior with empty pattern, see #579 (@SeamusConnor)\n- Fix `--list-details` on FreeBSD, DragonFly BSD, OpenBSD and NetBSD. See #573 (@t6)\n\n## Changes\n\n- Updated documentation for `--size`, see #584\n\n# v8.0.0\n\n## Features\n\n- Add a new `-l`/`--list-details` option to show more details about the search results. This is\n  basically an alias for `--exec-batch ls -l` with some additional `ls` options.\n  This can be used in order to:\n    * see metadata like permissions, owner, file size, modification times (#491)\n    * see symlink targets (#482)\n    * achieve a deterministic output order (#324, #196, #159)\n- Add a new `--max-results=<count>` option to limit the number of search results, see #472, #476 and #555\n  This can be useful to speed up searches in cases where you know that there are only N results.\n  Using this option is also (slightly) faster than piping to `head -n <count>` where `fd` can only\n  exit when it finds the search results `<count> + 1`.\n- Add the alias `-1` for `--max-results=1`, see #561. (@SimplyDanny).\n- Add new `--type socket` and `--type pipe` filters, see #511.\n- Add new `--min-depth <depth>` and `--exact-depth <depth>` options in addition to the existing option\n  to limit the maximum depth. See #404.\n- Support additional ANSI font styles in `LS_COLORS`: faint, slow blink, rapid blink, dimmed, hidden and strikethrough.\n\n## Bugfixes\n\n- Preserve non-UTF8 filenames: invalid UTF-8 filenames are now properly passed to child-processes\n  when using `--exec`, `--exec-batch` or `--list-details`. In `fd`'s output, we replace non-UTF-8\n  sequences with the \"�\" character. However, if the output of `fd` goes to another process, we\n  print the actual bytes of the filename. For more details, see #558 and #295.\n- `LS_COLORS` entries with unsupported font styles are not completely ignored, see #552\n\n## Changes\n\n- Colored output will now be enabled by default on older Windows versions.\n  This allows the use of colored output if the terminal supports it (e.g.\n  MinTTY, Git Bash). On the other hand, this will be a regression for users\n  on older Windows versions with terminals that do not support ANSI escape\n  sequences. Affected users can use an alias `fd=\"fd --color=never\"` to\n  continue using `fd` without colors. There is no change of behavior for\n  Windows 10. See #469.\n- When using `--glob` in combination with `--full-path`, a `*` character does not match a path\n  separation character (`/` or `\\\\`) anymore. You can use `**` for that. This allows things like\n  `fd -p -g '/some/base/path/*/*/*.txt'` which would previously match to arbitrary depths (instead\n  of exactly two folders below `/some/base/path`. See #404.\n- \"Legacy\" support to use `fd -exec` (with a single dash) has been removed. Use `fd -x` or\n  `fd --exec` instead.\n- Overall improved error handling and error messages.\n\n\n## Other\n\n- Korean translation of the README, see: [한국어](https://github.com/spearkkk/fd-kor) (@spearkkk)\n\n\n# v7.5.0\n\n## Features\n\n- Added `--one-file-system` (aliases: `--mount`, `--xdev`) to not cross file system boundaries on Unix and Windows, see #507 (@FallenWarrior2k).\n- Added `--base-directory` to change the working directory in which `fd` is run, see #509 and #475 (@hajdamak).\n- `fd` will not use colored output if the `NO_COLOR` environment variable is set, see #550 and #551 (@metadave).\n- `fd --exec` will return exit code 1 if one of the executed commands fails, see #526 and #531 (@fusillicode and @Giuffre)\n\n## Bug Fixes\n\n- Fixed 'command not found' error when using zsh completion, see #487 (@barskern).\n- `fd -L` should include broken symlinks, see #357 and #497 (@tommilligan, @neersighted and @sharkdp)\n- Display directories even if we don't have permission to enter, see #437 (@sharkdp)\n\n## Changes\n\n- A flag can now be passed multiple times without producing an error, see #488 and #496 (@rootbid).\n- Search results are sorted when using the `-X` option to match the behaviour of piping to `xargs`, see #441 and #524 (@Marcoleni @crash-g).\n\n\n# v7.4.0\n\n## Performance improvements\n\n- Reduce number of `stat` syscalls, improving the performance for searches where file metadata is\n  required (`--type`, `--size`, `--changed-within`, …), see #434 (@tavianator)\n- Use jemalloc by default, improving the performance for almost all searches, see #481. Note that\n  Windows and `*musl*` builds do not profit from this.\n\n## Features\n\n- Added a new `-g`/`--glob` option to switch to glob-based searches (instead of regular expression\n  based searches). This is accompanied by a new `--regex` option that can be used to switch back,\n  if users want to `alias fd=\"fd --glob\"`. See #284\n- Added a new `--path-separator <sep>` option which can be useful for Windows users who\n  want/need `fd` to use `/` instead of `\\`, see #428 and #153 (@mookid)\n- Added support for hidden files on Windows, see #379\n- When `fd` is run with the `--exec-batch`/`-X` option, it now exposes the exit status of the\n  command that was run, see #333.\n- Exit immediately when Ctrl-C has been pressed twice, see #423\n\n## Bugfixes\n\n- Make `--changed-within`/`--changed-before` work for directories, see #470\n\n## Other\n\n- Pre-built `fd` binaries should now be available for `armhf` targets, see #457 (@detly)\n- `fd` is now available on Alpine Linux, see #451 (@5paceToast)\n- `fd` is now in the officla FreeBSD repositories, see #412 (@t6)\n- Added OpenBSD install instructions, see #421 (@evitalis)\n- Added metadata to the Debian package, see #416 (@cathalgarvey)\n- `fd` can be installed via npm, see #438 (@pablopunk)\n\n\n# v7.3.0\n\n## Features\n\n- New `--exec-batch <cmd>`/`-X <cmd>` option for batch execution of commands, see #360 (@kimsnj).\n  This allows you to do things like:\n  ``` bash\n  fd … -X vim  # open all search results in vim (or any other editor)\n  fd … -X ls -l  # view detailed stats about the search results with 'ls'\n  fd -e svg -X inkscape  # open all SVG files in Inkscape\n  ```\n- Support for 24-bit color codes (when specified via `LS_COLORS`) as well as\n  different font styles (bold, italic, underline).\n\n## Changes\n\n- A few performance improvements, in particular when printing lots of colorized\n  results to the console, see #370\n- The `LS_COLORS` handling has been \"outsourced\" to a separate crate (https://github.com/sharkdp/lscolors) that is now being used by other tools as well: [fselect](https://github.com/jhspetersson/fselect), [lsd](https://github.com/Peltoche/lsd/pull/84). For details, see #363.\n\n## Other\n\n- `fd` will be available in Ubuntu Disco DIngo (19.04), see #373 (@sylvestre)\n- This release should come with a static ARM binary (`arm-unknown-linux-musleabihf`), see #320 (@duncanfinney)\n- Various documentation improvements, see #389\n\n## Thanks\n\nSpecial thanks to @alexmaco for his awesome work on refactoring and code improvements! (see #401, #398, and #383)\n\n# v7.2.0\n\n## Features\n\n* Added support for filtering by file modification time by adding two new options `--changed-before <date|duration>` and `--changed-within <..>`. For more details, see the `--help` text, the man page, the relevant issue #165 and the PR #339 (@kimsnj)\n* Added `--show-errors` option to enable the display of filesystem error messages such as \"permission denied\", see #311 (@psinghal20 and @majecty)\n* Added `--maxdepth` as a (hidden) alias for `--max-depth`, see #323 (@mqudsi)\n* Added `--search-path` option which can be supplied to replace the positional `path` argument at any position.\n\n## Changes\n\n* Loosen strict handling of missing `--ignore-file`, see #280 (@psinghal20)\n* Re-enabled `.ignore` files, see #156.\n\n## Bugfixes\n\n* `fd` could previously get stuck when run from the root directory in the\n  presence of zombie processes. This curious bug has been fixed in Rust 1.29 and higher. For more details, see #288, [rust-lang/rust#50619](https://github.com/rust-lang/rust/issues/50619) and [the fix](https://github.com/rust-lang/rust/pull/50630)\n\n## Other\n\n* `fd` has officially landed in Debian! See #345 for details. Thanks goes to @sylvestre, @paride and possibly others I don't know about.\n* Added Chinese translation of README (@chinanf-boy)\n\n## Thanks\n\nA special thanks goes to @joshleeb for his amazing improvements throughout\nthe code base (new tests, refactoring work and various other things)!\n\n\n# v7.1.0\n\n## Features\n\n* Added `--size` filter option, see #276 (@stevepentland, @JonathanxD and @alexmaco)\n* Added `--type empty` (or `-t e`) to search for empty files and/or directories, see #273\n\n## Changes\n\n* With the new version, `.gitignore` files will only be respected in Git repositories, not outside.\n* A few performance improvements for `--type` searches, see 641976cf7ad311ba741571ca8b7f02b2654b6955 and 50a2bab5cd52d26d4a3bc786885a2c270ed3b227\n\n## Other\n\n* Starting with this release, we will offer pre-built ARM binaries, see #244\n* Added instructions on how to use `fd` with `emacs`, see #282 (@redguardtoo)\n* `fd` is now in the official openSUSE repositories, see #275 (@avindra)\n* `fd` is now available via MacPorts, see #291 (@raimue)\n\n\n# v7.0.0\n\n## Features\n\n* Added `--type executable` (or `-t x`) to search for executable files only, see #246 (@PramodBisht)\n* Added support for `.fdignore` files, see #156 and #241.\n* Added `--ignore-file` option to add custom ignore files, see #156.\n* Suggest `--fixed-strings` on invalid regular expressions, see #234 (@PramodBisht)\n* Detect when user supplied path instead of pattern, see #235.\n\n## Changes\n\n* `.ignore` and `.rgignore` files are not parsed anymore. Use `.fdignore` files\n  or add custom files via `--ignore-file` instead.\n* Updated to `regex-syntax` 0.5 (@cuviper)\n\n## Bugfixes\n\n* Properly normalize absolute paths, see #268\n* Invalid utf8 filenames displayed when `-e` is used, see #250\n* If `--type` is used, fifos/sockets/etc. are always shown, see #260\n\n## Other\n\n* Packaging:\n    * The Arch Linux package is now simply called `fd`.\n    * There is now a `fd` ebuild for Gentoo Linux.\n    * There is a `scoop` package for `fd` (Windows).\n    * There is a `Chocolatey` package for `fd` (Windows).\n    * There is a Fedora `copr` package for `fd`.\n\n\n# v6.3.0\n\n## Features\n\n* Files with multiple extensions can now be found via `--extension`/`-e`, see #214 (@althonos)\n  ``` bash\n  > fd -e tar.gz\n  ```\n\n* Added new `-F`/`--fixed-strings`/`--literal` option that treats the pattern as a literal string instead of a regular expression, see #157\n\n  ``` bash\n  > fd -F 'file(1).txt'\n  ```\n\n* Allow `-exec` to work as `--exec`, see #226 (@stevepentland)\n\n## Bugfixes\n\n* Fixed `Ctrl-C` handling when using `--exec`, see #224 (@Doxterpepper)\n\n* Fixed wrong file owner for files in deb package, see #213\n\n## Other\n\n* Replaced old gif by a fancy new SVG screencast (@marionebl)\n* Updated [benchmark results](https://github.com/sharkdp/fd#benchmark) (fd has become faster in the meantime!). There is a new repository that hosts several benchmarking scripts for fd: https://github.com/sharkdp/fd-benchmarks\n\n\n# v6.2.0\n\n## Features\n\n* Support for filtering by multiple file extensions and multiple file types, see #199 and #177\n  (@tkadur).\n\n  For example, it's possible to search for C++ source or header files:\n  ``` bash\n  > fd -e cpp -e c -e cxx -e h pattern\n  ```\n\n## Changes\n\n* The size of the output buffer (for sorting search results) is now limited to 1000 entries. This\n  improves the search speed significantly if there are a lot of results, see #191 (@sharkdp).\n\n## Bugfixes\n\n* Fix a bug where long-running searches could not be killed via Ctrl-C, see #210 (@Doxterpepper)\n* fd's exit codes are now in accordance with Unix standards, see #201 (@Doxterpepper)\n\n## Other\n\n* Bash, zsh and fish completion should now work with the Ubuntu `.deb` packages, see #195 and #209\n  (@tmccombs and @sharkdp)\n* There is a new section on how to set up `fzf` to use `fd` in the\n  [README](https://github.com/sharkdp/fd#using-fd-with-fzf), see #168.\n\n\n# v6.1.0\n\n## Features\n\n* Support for multiple search paths, see #166 (@Doxterpepper)\n* Added `--no-ignore-vcs` option to disable `.gitignore` and other VCS ignore files,\n  without disabling `.ignore` files - see #156 (@ptzz).\n\n## Bugfixes\n\n* Handle terminal signals, see #128 (@Doxterpepper)\n* Fixed hang on `--exec` when user input was required, see #178 and #193 (@reima)\n\n## Other\n\n* Debian packages are now created via Travis CI and should be available for this and all\n  future releases (@tmccombs).\n* fd is now available on Void Linux (@maxice8)\n* The minimum required Rust version is now 1.20\n\n## Thanks\n\n@Doxterpepper deserves a special mention for his great work that is included in this release and\nfor the support in ticket discussions and concerning Travis CI fixes. Thank you very much!\n\nThanks also go out to @tmccombs for the work on Debian packages and for reviewing a lot of pull requests!\n\n# v6.0.0\n\n## Changes\n\n- The `--exec`/`-x` option does not spawn an intermediate shell anymore. This improves the\n  performance of parallel command execution and fixes a whole class of (present and potentially\n  future) problems with shell escaping. The drawback is that shell commands cannot directly be\n  called with `--exec`. See #155 for the full discussion. These changes have been implemented by\n  @reima (Thanks!).\n\n## Bugfixes\n\n- `--exec` does not escape cmd.exe metacharacters on Windows (see #155, as above).\n\n## Other\n\n* *fd* is now available in the FreeBSD ports (@andoriyu)\n* The minimal `rustc` version is now checked when building with `cargo`, see #164 (@matematikaadit)\n* The output directory for the shell completion files is created if it does not exist (@andoriyu)\n\n\n# v5.0.0\n\n## Features\n\n* Added new `--exec`, `-x` option for parallel command execution (@mmstick, see #84 and #116). See the corresponding [README section](https://github.com/sharkdp/fd#parallel-command-execution) for an introduction.\n* Auto-disable color output on unsupported Windows shells like `cmd.exe` (@iology, see #129)\n* Added the `--exclude`, `-X` option to suppress certain files/directories in the search results\n  (see #89).\n* Added ripgrep aliases `-u` and `-uu` for `--no-ignore` and `--no-ignore --hidden`, respectively\n  (@unsignedint, see #92)\n* Added `-i`, `--ignore-case` (@iology, see #95)\n* Made smart case really smart (@reima, see #103)\n* Added RedoxOS support (@goyox86, see #131)\n\n## Changes\n\n* The dot `.` can now match newlines in file names (@iology, see #111)\n* The short `--type` argument for symlinks has been changed from `s` to `l` (@jcpetkovich, see #83)\n\n## Bugfixes\n\n* Various improvements in root-path and symlink handling (@iology, see #82, #107, and #113)\n* Fixed absolute path handling on Windows (@reima, #93)\n* Fixed: current directory not included when using relative path (see #81)\n* Fixed `--type` behavior for unknown file types (@iology, see #150)\n* Some fixes around `--exec` (@iology, see #142)\n\n## Other\n\n* Major updates and bugfixes to our continuous integration and deployment tooling on Travis\n  (@matematikaadit, see #149, #145, #133)\n* Code style improvements & automatic style checking via `rustfmt` on Travis (@Detegr, see #99)\n* Added a man page (@pickfire, see #77)\n* *fd* has been relicensed under the dual license MIT/Apache-2.0 (@Detegr, see #105)\n* Major refactorings and code improvements (Big thanks to @gsquire, @reima, @iology)\n* First version of [`CONTRIBUTING`](https://github.com/sharkdp/fd/blob/master/CONTRIBUTING.md) guidelines\n* There is now a Nix package (@mehandes)\n* *fd* is now in the official Arch Linux repos (@cassava)\n* Improved tooling around shell completion files (@ImbaKnugel, see #124)\n* Updated tutorial in the [`README`](https://github.com/sharkdp/fd/blob/master/README.md)\n* The minimum required version of Rust has been bumped to 1.19.\n\n## Thanks\n\nA *lot* of things have happened since the last release and I'd like to thank all contributors for their great support. I'd also like to thank those that have contributed by reporting bugs and by posting feature requests.\n\nI'd also like to take this chance to say a special Thank You to a few people that have stood out in one way or another: To @iology, for contributing a multitude of bugfixes, improvements and new features. To @reima and @Detegr for their continuing great support. To @mmstick, for implementing the most advanced new feature of *fd*. And to @matematikaadit for the CI/tooling upgrades.\n\n\n# v4.0.0\n\n## Features\n\n* Added filtering by file extension, for example `fd -e txt`, see #56 (@reima)\n* Add option to force colored output: `--color always`, see #49 (@Detegr)\n* Generate Shell completions for Bash, ZSH, Fish and Powershell, see #64 (@ImbaKnugel)\n* Better & extended `--help` text (@abaez and @Detegr)\n* Proper Windows support, see #70\n\n## Changes\n\n* The integration tests have been re-written in Rust :sparkles:, making them platform-independent and easily callable via `cargo test` - see #65  (many thanks to @reima!)\n* New tutorial in the README (@deg4uss3r)\n* Reduced number of `stat` syscalls for each result from 3 to 1, see #36.\n* Enabled Appveyor CI\n\n# v3.1.0\n\n## Features\n- Added file type filtering, e.g. `find --type directory` or `find -t f` (@exitium)\n\n# v3.0.0\n\n## Features\n- Directories are now traversed in parallel, leading to significant performance improvements (see [benchmarks](https://github.com/sharkdp/fd#benchmark))\n- Added `--print0` option (@michaelmior)\n- Added AUR packages (@wezm)\n\n## Changes\n- Changed short flag for `--follow` from `-f` to `-L` (consistency with `ripgrep`)\n\n# v2.0.0\n\n* Changed `--sensitive` to `--case-sensitive`\n* Changed `--absolute` to `--absolute-path`\n* Throw an error if root directory is not existent, see #39\n* Use absolute paths if the root dir is an absolute path, see #40\n* Handle invalid UTF-8, see #34 #38\n* Support `-V`, `--version` by switching from `getopts` to `clap`.\n\nMisc:\n* It's now possible to install `fd` via homebrew on macOS: `brew install fd`.\n\n# v1.1.0\n\n- Windows compatibility (@sebasv), see #29 #35\n- Safely exit on broken output pipes (e.g.: usage with `head`, `tail`, ..), see #24\n- Backport for rust 1.16, see #23\n\n# v1.0.0\n\n* Respect `.(git)ignore` files\n* Use `LS_COLORS` environment variable directly, instead of `~/.dir_colors` file.\n* Added unit and integration tests\n* Added optional second argument (search path)\n\n# v0.3.0\n\n-  Parse dircolors files, closes #20\n-  Colorize each path component, closes #19\n-  Add short command line option for --hidden, see #18\n\n# v0.2.0\n\n-  Option to follow symlinks, disable colors, closes #16, closes #17\n- `--filename` instead of `--full-path`\n-  Option to search hidden directories, closes #12\n-  Configurable search depth, closes #13\n-  Detect interactive terminal, closes #11\n\n# v0.1.0\n\nInitial release\n"
  },
  {
    "path": "CONTRIBUTING.md",
    "content": "## Contributing to *fd*\n\n**Thank you very much for considering contributing to this project!**\n\nWe welcome any form of contribution:\n\n  * New issues (feature requests, bug reports, questions, ideas, ...)\n  * Pull requests (documentation improvements, code improvements, new features, ...)\n\n**Note**: Before you take the time to open a pull request, please open a ticket first. This will\ngive us the chance to discuss any potential changes first.\n\n## Add an entry to the changelog\n\nIf your contribution changes the behavior of `fd` (as opposed to a typo-fix\nin the documentation), please update the [`CHANGELOG.md`](CHANGELOG.md#upcoming-release) file\nand describe your changes. This makes the release process much easier and\ntherefore helps to get your changes into a new `fd` release faster.\n\nThe top of the `CHANGELOG` contains an *\"Upcoming release\"* section with a few\nsubsections (Features, Bugfixes, …). Please add your entry to the subsection\nthat best describes your change.\n\nEntries follow this format:\n```\n- Short description of what has been changed, see #123 (@user)\n```\nHere, `#123` is the number of the original issue and/or your pull request.\nPlease replace `@user` by your GitHub username.\n\n## Important links\n\n  * [Open issues](https://github.com/sharkdp/fd/issues)\n  * [Open pull requests](https://github.com/sharkdp/fd/pulls)\n  * [Development section in the README](https://github.com/sharkdp/fd#development)\n  * [fd on crates.io](https://crates.io/crates/fd-find)\n  * [LICENSE-APACHE](https://github.com/sharkdp/fd/blob/master/LICENSE-APACHE) and [LICENSE-MIT](https://github.com/sharkdp/fd/blob/master/LICENSE-MIT)\n"
  },
  {
    "path": "Cargo.toml",
    "content": "[package]\nauthors = [\"David Peter <mail@david-peter.de>\"]\ncategories = [\"command-line-utilities\"]\ndescription = \"fd is a simple, fast and user-friendly alternative to find.\"\nexclude = [\"/benchmarks/*\"]\nhomepage = \"https://github.com/sharkdp/fd\"\ndocumentation = \"https://docs.rs/fd-find\"\nkeywords = [\n    \"search\",\n    \"find\",\n    \"file\",\n    \"filesystem\",\n    \"tool\",\n]\nlicense = \"MIT OR Apache-2.0\"\nname = \"fd-find\"\nreadme = \"README.md\"\nrepository = \"https://github.com/sharkdp/fd\"\nversion = \"10.4.2\"\nedition= \"2024\"\nrust-version = \"1.90.0\"\n\n[badges.appveyor]\nrepository = \"sharkdp/fd\"\n\n[badges.travis-ci]\nrepository = \"sharkdp/fd\"\n\n[[bin]]\nname = \"fd\"\npath = \"src/main.rs\"\n\n[dependencies]\naho-corasick = \"1.1\"\nnu-ansi-term = \"0.50\"\nargmax = \"0.4.0\"\nignore = \"0.4.25\"\nregex = \"1.12.2\"\nregex-syntax = \"0.8\"\nctrlc = \"3.5\"\nglobset = \"0.4\"\nanyhow = \"1.0\"\netcetera = \"0.11\"\nnormpath = \"1.1.1\"\ncrossbeam-channel = \"0.5.15\"\nclap_complete = {version = \"4.5.62\", optional = true}\nfaccess = \"0.2.4\"\njiff = \"0.2.18\"\n\n[dependencies.clap]\nversion = \"4.5.54\"\nfeatures = [\"suggestions\", \"color\", \"wrap_help\", \"cargo\", \"derive\"]\n\n[dependencies.lscolors]\nversion = \"0.21\"\ndefault-features = false\nfeatures = [\"nu-ansi-term\"]\n\n[target.'cfg(unix)'.dependencies]\nnix = { version = \"0.31.1\", default-features = false, features = [\"signal\", \"user\", \"hostname\"] }\n\n[target.'cfg(all(unix, not(target_os = \"redox\")))'.dependencies]\nlibc = \"0.2\"\n\n# FIXME: Re-enable jemalloc on macOS\n# jemalloc is currently disabled on macOS due to a bug in jemalloc in combination with macOS\n# Catalina. See https://github.com/sharkdp/fd/issues/498 for details.\n# This has to be kept in sync with src/main.rs where the allocator for\n# the program is set.\n[target.'cfg(all(not(windows), not(target_os = \"android\"), not(target_os = \"macos\"), not(target_os = \"freebsd\"), not(target_os = \"openbsd\"), not(target_os = \"illumos\"), not(all(target_env = \"musl\", target_pointer_width = \"32\")), not(target_arch = \"riscv64\")))'.dependencies]\ntikv-jemallocator = {version = \"0.6.0\", optional = true}\n\n[dev-dependencies]\ndiff = \"0.1\"\ntempfile = \"3.24\"\nfiletime = \"0.2\"\ntest-case = \"3.3\"\n\n[profile.dev]\ndebug = \"line-tables-only\"\n\n[profile.dev.package.\"*\"]\ndebug = false\n\n[profile.debugging]\ninherits = \"dev\"\ndebug = true\n\n[profile.release]\nlto = true\nstrip = true\ncodegen-units = 1\n\n[features]\nuse-jemalloc = [\"tikv-jemallocator\"]\ncompletions = [\"clap_complete\"]\nbase = [\"use-jemalloc\"]\ndefault = [\"use-jemalloc\", \"completions\"]\n\n[package.metadata.binstall]\npkg-url = \"{ repo }/releases/download/v{ version }/{ name }-v{ version }-{ target }.{ archive-format }\"\nbin-dir = \"{ bin }-v{ version }-{ target }/{ bin }{ binary-ext }\"\npkg-fmt = \"tgz\"\n\n[package.metadata.binstall.overrides.x86_64-pc-windows-msvc]\npkg-fmt = \"zip\"\n\n[package.metadata.binstall.overrides.x86_64-pc-windows-gnu]\npkg-fmt = \"zip\"\n\n[package.metadata.binstall.overrides.i686-pc-windows-msvc]\npkg-fmt = \"zip\"\n\n[package.metadata.binstall.overrides.aarch64-pc-windows-msvc]\npkg-fmt = \"zip\"\n"
  },
  {
    "path": "Cross.toml",
    "content": "# https://github.com/sharkdp/fd/issues/1085\n[target.aarch64-unknown-linux-gnu.env]\npassthrough = [\"JEMALLOC_SYS_WITH_LG_PAGE=16\"]\n\n[target.aarch64-unknown-linux-musl.env]\npassthrough = [\"JEMALLOC_SYS_WITH_LG_PAGE=16\"]\n"
  },
  {
    "path": "LICENSE-APACHE",
    "content": "                              Apache License\n                        Version 2.0, January 2004\n                     http://www.apache.org/licenses/\n\nTERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION\n\n1. Definitions.\n\n   \"License\" shall mean the terms and conditions for use, reproduction,\n   and distribution as defined by Sections 1 through 9 of this document.\n\n   \"Licensor\" shall mean the copyright owner or entity authorized by\n   the copyright owner that is granting the License.\n\n   \"Legal Entity\" shall mean the union of the acting entity and all\n   other entities that control, are controlled by, or are under common\n   control with that entity. For the purposes of this definition,\n   \"control\" means (i) the power, direct or indirect, to cause the\n   direction or management of such entity, whether by contract or\n   otherwise, or (ii) ownership of fifty percent (50%) or more of the\n   outstanding shares, or (iii) beneficial ownership of such entity.\n\n   \"You\" (or \"Your\") shall mean an individual or Legal Entity\n   exercising permissions granted by this License.\n\n   \"Source\" form shall mean the preferred form for making modifications,\n   including but not limited to software source code, documentation\n   source, and configuration files.\n\n   \"Object\" form shall mean any form resulting from mechanical\n   transformation or translation of a Source form, including but\n   not limited to compiled object code, generated documentation,\n   and conversions to other media types.\n\n   \"Work\" shall mean the work of authorship, whether in Source or\n   Object form, made available under the License, as indicated by a\n   copyright notice that is included in or attached to the work\n   (an example is provided in the Appendix below).\n\n   \"Derivative Works\" shall mean any work, whether in Source or Object\n   form, that is based on (or derived from) the Work and for which the\n   editorial revisions, annotations, elaborations, or other modifications\n   represent, as a whole, an original work of authorship. For the purposes\n   of this License, Derivative Works shall not include works that remain\n   separable from, or merely link (or bind by name) to the interfaces of,\n   the Work and Derivative Works thereof.\n\n   \"Contribution\" shall mean any work of authorship, including\n   the original version of the Work and any modifications or additions\n   to that Work or Derivative Works thereof, that is intentionally\n   submitted to Licensor for inclusion in the Work by the copyright owner\n   or by an individual or Legal Entity authorized to submit on behalf of\n   the copyright owner. For the purposes of this definition, \"submitted\"\n   means any form of electronic, verbal, or written communication sent\n   to the Licensor or its representatives, including but not limited to\n   communication on electronic mailing lists, source code control systems,\n   and issue tracking systems that are managed by, or on behalf of, the\n   Licensor for the purpose of discussing and improving the Work, but\n   excluding communication that is conspicuously marked or otherwise\n   designated in writing by the copyright owner as \"Not a Contribution.\"\n\n   \"Contributor\" shall mean Licensor and any individual or Legal Entity\n   on behalf of whom a Contribution has been received by Licensor and\n   subsequently incorporated within the Work.\n\n2. Grant of Copyright License. Subject to the terms and conditions of\n   this License, each Contributor hereby grants to You a perpetual,\n   worldwide, non-exclusive, no-charge, royalty-free, irrevocable\n   copyright license to reproduce, prepare Derivative Works of,\n   publicly display, publicly perform, sublicense, and distribute the\n   Work and such Derivative Works in Source or Object form.\n\n3. Grant of Patent License. Subject to the terms and conditions of\n   this License, each Contributor hereby grants to You a perpetual,\n   worldwide, non-exclusive, no-charge, royalty-free, irrevocable\n   (except as stated in this section) patent license to make, have made,\n   use, offer to sell, sell, import, and otherwise transfer the Work,\n   where such license applies only to those patent claims licensable\n   by such Contributor that are necessarily infringed by their\n   Contribution(s) alone or by combination of their Contribution(s)\n   with the Work to which such Contribution(s) was submitted. If You\n   institute patent litigation against any entity (including a\n   cross-claim or counterclaim in a lawsuit) alleging that the Work\n   or a Contribution incorporated within the Work constitutes direct\n   or contributory patent infringement, then any patent licenses\n   granted to You under this License for that Work shall terminate\n   as of the date such litigation is filed.\n\n4. Redistribution. You may reproduce and distribute copies of the\n   Work or Derivative Works thereof in any medium, with or without\n   modifications, and in Source or Object form, provided that You\n   meet the following conditions:\n\n   (a) You must give any other recipients of the Work or\n       Derivative Works a copy of this License; and\n\n   (b) You must cause any modified files to carry prominent notices\n       stating that You changed the files; and\n\n   (c) You must retain, in the Source form of any Derivative Works\n       that You distribute, all copyright, patent, trademark, and\n       attribution notices from the Source form of the Work,\n       excluding those notices that do not pertain to any part of\n       the Derivative Works; and\n\n   (d) If the Work includes a \"NOTICE\" text file as part of its\n       distribution, then any Derivative Works that You distribute must\n       include a readable copy of the attribution notices contained\n       within such NOTICE file, excluding those notices that do not\n       pertain to any part of the Derivative Works, in at least one\n       of the following places: within a NOTICE text file distributed\n       as part of the Derivative Works; within the Source form or\n       documentation, if provided along with the Derivative Works; or,\n       within a display generated by the Derivative Works, if and\n       wherever such third-party notices normally appear. The contents\n       of the NOTICE file are for informational purposes only and\n       do not modify the License. You may add Your own attribution\n       notices within Derivative Works that You distribute, alongside\n       or as an addendum to the NOTICE text from the Work, provided\n       that such additional attribution notices cannot be construed\n       as modifying the License.\n\n   You may add Your own copyright statement to Your modifications and\n   may provide additional or different license terms and conditions\n   for use, reproduction, or distribution of Your modifications, or\n   for any such Derivative Works as a whole, provided Your use,\n   reproduction, and distribution of the Work otherwise complies with\n   the conditions stated in this License.\n\n5. Submission of Contributions. Unless You explicitly state otherwise,\n   any Contribution intentionally submitted for inclusion in the Work\n   by You to the Licensor shall be under the terms and conditions of\n   this License, without any additional terms or conditions.\n   Notwithstanding the above, nothing herein shall supersede or modify\n   the terms of any separate license agreement you may have executed\n   with Licensor regarding such Contributions.\n\n6. Trademarks. This License does not grant permission to use the trade\n   names, trademarks, service marks, or product names of the Licensor,\n   except as required for reasonable and customary use in describing the\n   origin of the Work and reproducing the content of the NOTICE file.\n\n7. Disclaimer of Warranty. Unless required by applicable law or\n   agreed to in writing, Licensor provides the Work (and each\n   Contributor provides its Contributions) on an \"AS IS\" BASIS,\n   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or\n   implied, including, without limitation, any warranties or conditions\n   of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A\n   PARTICULAR PURPOSE. You are solely responsible for determining the\n   appropriateness of using or redistributing the Work and assume any\n   risks associated with Your exercise of permissions under this License.\n\n8. Limitation of Liability. In no event and under no legal theory,\n   whether in tort (including negligence), contract, or otherwise,\n   unless required by applicable law (such as deliberate and grossly\n   negligent acts) or agreed to in writing, shall any Contributor be\n   liable to You for damages, including any direct, indirect, special,\n   incidental, or consequential damages of any character arising as a\n   result of this License or out of the use or inability to use the\n   Work (including but not limited to damages for loss of goodwill,\n   work stoppage, computer failure or malfunction, or any and all\n   other commercial damages or losses), even if such Contributor\n   has been advised of the possibility of such damages.\n\n9. Accepting Warranty or Additional Liability. While redistributing\n   the Work or Derivative Works thereof, You may choose to offer,\n   and charge a fee for, acceptance of support, warranty, indemnity,\n   or other liability obligations and/or rights consistent with this\n   License. However, in accepting such obligations, You may act only\n   on Your own behalf and on Your sole responsibility, not on behalf\n   of any other Contributor, and only if You agree to indemnify,\n   defend, and hold each Contributor harmless for any liability\n   incurred by, or claims asserted against, such Contributor by reason\n   of your accepting any such warranty or additional liability.\n\nEND OF TERMS AND CONDITIONS\n\nAPPENDIX: How to apply the Apache License to your work.\n\n   To apply the Apache License to your work, attach the following\n   boilerplate notice, with the fields enclosed by brackets \"[]\"\n   replaced with your own identifying information. (Don't include\n   the brackets!)  The text should be enclosed in the appropriate\n   comment syntax for the file format. We also recommend that a\n   file or class name and description of purpose be included on the\n   same \"printed page\" as the copyright notice for easier\n   identification within third-party archives.\n\nCopyright 2017-2020 fd developers\n\nLicensed under the Apache License, Version 2.0 (the \"License\");\nyou may not use this file except in compliance with the License.\nYou may obtain a copy of the License at\n\n\thttp://www.apache.org/licenses/LICENSE-2.0\n\nUnless required by applicable law or agreed to in writing, software\ndistributed under the License is distributed on an \"AS IS\" BASIS,\nWITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\nSee the License for the specific language governing permissions and\nlimitations under the License.\n"
  },
  {
    "path": "LICENSE-MIT",
    "content": "MIT License\n\nCopyright (c) 2017-present The fd developers\n\nPermission is hereby granted, free of charge, to any person obtaining a copy\nof this software and associated documentation files (the \"Software\"), to deal\nin the Software without restriction, including without limitation the rights\nto use, copy, modify, merge, publish, distribute, sublicense, and/or sell\ncopies of the Software, and to permit persons to whom the Software is\nfurnished to do so, subject to the following conditions:\n\nThe above copyright notice and this permission notice shall be included in all\ncopies or substantial portions of the Software.\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\nIMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\nFITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\nAUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\nLIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\nOUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\nSOFTWARE.\n"
  },
  {
    "path": "Makefile",
    "content": "PROFILE=release\nEXE=target/$(PROFILE)/fd\nprefix=/usr/local\nbindir=$(prefix)/bin\ndatadir=$(prefix)/share\nexe_name=fd\n\n$(EXE): Cargo.toml src/**/*.rs\n\tcargo build --profile $(PROFILE) --locked\n\n.PHONY: completions\ncompletions: autocomplete/fd.bash autocomplete/fd.fish autocomplete/_fd.ps1 autocomplete/_fd\n\ncomp_dir=@mkdir -p autocomplete\n\nautocomplete/fd.bash: $(EXE)\n\t$(comp_dir)\n\t$(EXE) --gen-completions bash > $@\n\nautocomplete/fd.fish: $(EXE)\n\t$(comp_dir)\n\t$(EXE) --gen-completions fish > $@\n\nautocomplete/_fd.ps1: $(EXE)\n\t$(comp_dir)\n\t$(EXE) --gen-completions powershell > $@\n\nautocomplete/_fd: contrib/completion/_fd\n\t$(comp_dir)\n\tcp $< $@\n\ninstall: $(EXE) completions\n\tinstall -Dm755 $(EXE) $(DESTDIR)$(bindir)/fd\n\tinstall -Dm644 autocomplete/fd.bash $(DESTDIR)/$(datadir)/bash-completion/completions/$(exe_name)\n\tinstall -Dm644 autocomplete/fd.fish $(DESTDIR)/$(datadir)/fish/vendor_completions.d/$(exe_name).fish\n\tinstall -Dm644 autocomplete/_fd $(DESTDIR)/$(datadir)/zsh/site-functions/_$(exe_name)\n\tinstall -Dm644 doc/fd.1 $(DESTDIR)/$(datadir)/man/man1/$(exe_name).1\n"
  },
  {
    "path": "README.md",
    "content": "# fd\n\n[![CICD](https://github.com/sharkdp/fd/actions/workflows/CICD.yml/badge.svg)](https://github.com/sharkdp/fd/actions/workflows/CICD.yml)\n[![Version info](https://img.shields.io/crates/v/fd-find.svg)](https://crates.io/crates/fd-find)\n[[中文](https://github.com/cha0ran/fd-zh)]\n[[한국어](https://github.com/spearkkk/fd-kor)]\n\n`fd` is a program to find entries in your filesystem.\nIt is a simple, fast and user-friendly alternative to [`find`](https://www.gnu.org/software/findutils/).\nWhile it does not aim to support all of `find`'s powerful functionality, it provides sensible\n(opinionated) defaults for a majority of use cases.\n\n[Installation](#installation) • [How to use](#how-to-use) • [Troubleshooting](#troubleshooting)\n\n## Features\n\n* Intuitive syntax: `fd PATTERN` instead of `find -iname '*PATTERN*'`.\n* Regular expression (default) and glob-based patterns.\n* [Very fast](#benchmark) due to parallelized directory traversal.\n* Uses colors to highlight different file types (same as `ls`).\n* Supports [parallel command execution](#command-execution)\n* Smart case: the search is case-insensitive by default. It switches to\n  case-sensitive if the pattern contains an uppercase\n  character[\\*](http://vimdoc.sourceforge.net/htmldoc/options.html#'smartcase').\n* Ignores hidden directories and files, by default.\n* Ignores patterns from your `.gitignore`, by default.\n* The command name is *50%* shorter[\\*](https://github.com/ggreer/the_silver_searcher) than\n  `find` :-).\n\n## Sponsors\n\nA special *thank you* goes to our biggest <a href=\"doc/sponsors.md\">sponsor</a>:<br>\n\n<a href=\"https://tuple.app/fd\">\n  <img src=\"doc/sponsors/tuple-logo.png\" width=\"200\" alt=\"Tuple\">\n  <br>\n  <strong>Tuple, the premier screen sharing app for developers</strong>\n  <br>\n  <sub>Available for MacOS &amp; Windows</sub>\n</a>\n\n\n## Demo\n\n![Demo](doc/screencast.svg)\n\n## How to use\n\nFirst, to get an overview of all available command line options, you can either run\n[`fd -h`](#command-line-options) for a concise help message or `fd --help` for a more detailed\nversion.\n\n### Simple search\n\n*fd* is designed to find entries in your filesystem. The most basic search you can perform is to\nrun *fd* with a single argument: the search pattern. For example, assume that you want to find an\nold script of yours (the name included `netflix`):\n``` bash\n> fd netfl\nSoftware/python/imdb-ratings/netflix-details.py\n```\nIf called with just a single argument like this, *fd* searches the current directory recursively\nfor any entries that *contain* the pattern `netfl`.\n\n### Regular expression search\n\nThe search pattern is treated as a regular expression. Here, we search for entries that start\nwith `x` and end with `rc`:\n``` bash\n> cd /etc\n> fd '^x.*rc$'\nX11/xinit/xinitrc\nX11/xinit/xserverrc\n```\n\nThe regular expression syntax used by `fd` is [documented here](https://docs.rs/regex/latest/regex/#syntax).\n\n### Specifying the root directory\n\nIf we want to search a specific directory, it can be given as a second argument to *fd*:\n``` bash\n> fd passwd /etc\n/etc/default/passwd\n/etc/pam.d/passwd\n/etc/passwd\n```\n\n### List all files, recursively\n\n*fd* can be called with no arguments. This is very useful to get a quick overview of all entries\nin the current directory, recursively (similar to `ls -R`):\n``` bash\n> cd fd/tests\n> fd\ntestenv\ntestenv/mod.rs\ntests.rs\n```\n\nIf you want to use this functionality to list all files in a given directory, you have to use\na catch-all pattern such as `.` or `^`:\n``` bash\n> fd . fd/tests/\ntestenv\ntestenv/mod.rs\ntests.rs\n```\n\n### Searching for a particular file extension\n\nOften, we are interested in all files of a particular type. This can be done with the `-e` (or\n`--extension`) option. Here, we search for all Markdown files in the fd repository:\n``` bash\n> cd fd\n> fd -e md\nCONTRIBUTING.md\nREADME.md\n```\n\nThe `-e` option can be used in combination with a search pattern:\n``` bash\n> fd -e rs mod\nsrc/fshelper/mod.rs\nsrc/lscolors/mod.rs\ntests/testenv/mod.rs\n```\n\n### Searching for a particular file name\n\n To find files with exactly the provided search pattern, use the `-g` (or `--glob`) option:\n``` bash\n> fd -g libc.so /usr\n/usr/lib32/libc.so\n/usr/lib/libc.so\n```\n\n### Hidden and ignored files\nBy default, *fd* does not search hidden directories and does not show hidden files in the\nsearch results. To disable this behavior, we can use the `-H` (or `--hidden`) option:\n``` bash\n> fd pre-commit\n> fd -H pre-commit\n.git/hooks/pre-commit.sample\n```\n\nIf we work in a directory that is a Git repository (or includes Git repositories), *fd* does not\nsearch folders (and does not show files) that match one of the `.gitignore` patterns. To disable\nthis behavior, we can use the `-I` (or `--no-ignore`) option:\n``` bash\n> fd num_cpu\n> fd -I num_cpu\ntarget/debug/deps/libnum_cpus-f5ce7ef99006aa05.rlib\n```\n\nTo really search *all* files and directories, simply combine the hidden and ignore features to show\neverything (`-HI`) or use `-u`/`--unrestricted`.\n\n### Matching the full path\nBy default, *fd* only matches the filename of each file. However, using the `--full-path` or `-p` option,\nyou can match against the full path.\n\n```bash\n> fd -p -g '**/.git/config'\n> fd -p '.*/lesson-\\d+/[a-z]+.(jpg|png)'\n```\n\n### Command execution\n\nInstead of just showing the search results, you often want to *do something* with them. `fd`\nprovides two ways to execute external commands for each of your search results:\n\n* The `-x`/`--exec` option runs an external command *for each of the search results* (in parallel).\n* The `-X`/`--exec-batch` option launches the external command once, with *all search results as arguments*.\n\n#### Examples\n\nRecursively find all zip archives and unpack them:\n``` bash\nfd -e zip -x unzip\n```\nIf there are two such files, `file1.zip` and `backup/file2.zip`, this would execute\n`unzip file1.zip` and `unzip backup/file2.zip`. The two `unzip` processes run in parallel\n(if the files are found fast enough).\n\nFind all `*.h` and `*.cpp` files and auto-format them inplace with `clang-format -i`:\n``` bash\nfd -e h -e cpp -x clang-format -i\n```\nNote how the `-i` option to `clang-format` can be passed as a separate argument. This is why\nwe put the `-x` option last.\n\nAny positional arguments after `-x` belong to the command template, not to `fd` itself. If you\nalso want to pass a pattern or search path, put `-x` last:\n``` bash\nfd pattern path -x echo\n```\n\nFind all `test_*.py` files and open them in your favorite editor:\n``` bash\nfd -g 'test_*.py' -X vim\n```\nNote that we use capital `-X` here to open a single `vim` instance. If there are two such files,\n`test_basic.py` and `lib/test_advanced.py`, this will run `vim test_basic.py lib/test_advanced.py`.\n\nTo see details like file permissions, owners, file sizes etc., you can tell `fd` to show them\nby running `ls` for each result:\n``` bash\nfd … -X ls -lhd --color=always\n```\nThis pattern is so useful that `fd` provides a shortcut. You can use the `-l`/`--list-details`\noption to execute `ls` in this way: `fd … -l`.\n\nThe `-X` option is also useful when combining `fd` with [ripgrep](https://github.com/BurntSushi/ripgrep/) (`rg`) in order to search within a certain class of files, like all C++ source files:\n```bash\nfd -e cpp -e cxx -e h -e hpp -X rg 'std::cout'\n```\n\nConvert all `*.jpg` files to `*.png` files:\n``` bash\nfd -e jpg -x convert {} {.}.png\n```\nHere, `{}` is a placeholder for the search result. `{.}` is the same, without the file extension.\nSee below for more details on the placeholder syntax.\n\nThe terminal output of commands run from parallel threads using `-x` will not be interlaced or garbled,\nso `fd -x` can be used to rudimentarily parallelize a task run over many files.\nAn example of this is calculating the checksum of each individual file within a directory.\n```\nfd -tf -x md5sum > file_checksums.txt\n```\n\n#### Placeholder syntax\n\nThe `-x` and `-X` options take a *command template* as a series of arguments (instead of a single string).\nIf you want to add additional options to `fd` after the command template, you can terminate it with a `\\;`.\n\nFor example, `fd -x echo \\; pattern path` treats `pattern path` as `fd` arguments instead of\npassing them to `echo`. In practice, it is often clearer to write `fd pattern path -x echo`.\n\nThe syntax for generating commands is similar to that of [GNU Parallel](https://www.gnu.org/software/parallel/):\n\n- `{}`: A placeholder token that will be replaced with the path of the search result\n  (`documents/images/party.jpg`).\n- `{.}`: Like `{}`, but without the file extension (`documents/images/party`).\n- `{/}`: A placeholder that will be replaced by the basename of the search result (`party.jpg`).\n- `{//}`: The parent of the discovered path (`documents/images`).\n- `{/.}`: The basename, with the extension removed (`party`).\n\nIf you do not include a placeholder, *fd* automatically adds a `{}` at the end.\n\n#### Parallel vs. serial execution\n\nFor `-x`/`--exec`, you can control the number of parallel jobs by using the `-j`/`--threads` option.\nUse `--threads=1` for serial execution.\n\n### Excluding specific files or directories\n\nSometimes we want to ignore search results from a specific subdirectory. For example, we might\nwant to search all hidden files and directories (`-H`) but exclude all matches from `.git`\ndirectories. We can use the `-E` (or `--exclude`) option for this. It takes an arbitrary glob\npattern as an argument:\n``` bash\n> fd -H -E .git …\n```\n\nWe can also use this to skip mounted directories:\n``` bash\n> fd -E /mnt/external-drive …\n```\n\n.. or to skip certain file types:\n``` bash\n> fd -E '*.bak' …\n```\n\nTo make exclude-patterns like these permanent, you can create a `.fdignore` file. They work like\n`.gitignore` files, but are specific to `fd`. For example:\n``` bash\n> cat ~/.fdignore\n/mnt/external-drive\n*.bak\n```\n\n> [!NOTE]\n> `fd` also supports `.ignore` files that are used by other programs such as `rg` or `ag`.\n\nIf you want `fd` to ignore these patterns globally, you can put them in `fd`'s global ignore file.\nThis is usually located in `~/.config/fd/ignore` in macOS or Linux, and `%APPDATA%\\fd\\ignore` in\nWindows.\n\nYou may wish to include `.git/` in your `fd/ignore` file so that `.git` directories, and their contents\nare not included in output if you use the `--hidden` option.\n\n### Deleting files\n\nYou can use `fd` to remove all files and directories that are matched by your search pattern.\nIf you only want to remove files, you can use the `--exec-batch`/`-X` option to call `rm`. For\nexample, to recursively remove all `.DS_Store` files, run:\n``` bash\n> fd -H '^\\.DS_Store$' -tf -X rm\n```\nIf you are unsure, always call `fd` without `-X rm` first. Alternatively, use `rm`s \"interactive\"\noption:\n``` bash\n> fd -H '^\\.DS_Store$' -tf -X rm -i\n```\n\nIf you also want to remove a certain class of directories, you can use the same technique. You will\nhave to use `rm`s `--recursive`/`-r` flag to remove directories.\n\n> [!NOTE]\n> There are scenarios where using `fd … -X rm -r` can cause race conditions: if you have a\npath like `…/foo/bar/foo/…` and want to remove all directories named `foo`, you can end up in a\nsituation where the outer `foo` directory is removed first, leading to (harmless) *\"'foo/bar/foo':\nNo such file or directory\"* errors in the `rm` call.\n\n### Command-line options\n\nThis is the output of `fd -h`. To see the full set of command-line options, use `fd --help` which\nalso includes a much more detailed help text.\n\n```\nUsage: fd [OPTIONS] [pattern [path]...]\n\nArguments:\n  [pattern]  the search pattern (a regular expression, unless '--glob' is used; optional)\n  [path]...  the root directories for the filesystem search (optional)\n\nOptions:\n  -H, --hidden                     Search hidden files and directories\n  -I, --no-ignore                  Do not respect .(git|fd)ignore files\n  -s, --case-sensitive             Case-sensitive search (default: smart case)\n  -i, --ignore-case                Case-insensitive search (default: smart case)\n  -g, --glob                       Glob-based search (default: regular expression)\n  -a, --absolute-path              Show absolute instead of relative paths\n  -l, --list-details               Use a long listing format with file metadata\n  -L, --follow                     Follow symbolic links\n  -p, --full-path                  Search full abs. path (default: filename only)\n  -d, --max-depth <depth>          Set maximum search depth (default: none)\n  -E, --exclude <glob>             Exclude entries that match the given glob pattern\n  -t, --type <filetype>            Filter by type: file (f), directory (d/dir), symlink (l),\n                                   executable (x), empty (e), socket (s), pipe (p), char-device\n                                   (c), block-device (b)\n  -e, --extension <ext>            Filter by file extension\n  -S, --size <size>                Limit results based on the size of files\n      --changed-within <date|dur>  Filter by file modification time (newer than)\n      --changed-before <date|dur>  Filter by file modification time (older than)\n  -o, --owner <user:group>         Filter by owning user and/or group\n      --format <fmt>               Print results according to template\n  -x, --exec <cmd>...              Execute a command for each search result\n  -X, --exec-batch <cmd>...        Execute a command with all search results at once\n  -c, --color <when>               When to use colors [default: auto] [possible values: auto,\n                                   always, never]\n      --hyperlink[=<when>]         Add hyperlinks to output paths [default: never] [possible\n                                   values: auto, always, never]\n      --ignore-contain <name>      Ignore directories containing the named entry\n  -h, --help                       Print help (see more with '--help')\n  -V, --version                    Print version\n```\n\nNote that options can be given after the pattern and/or path as well.\n\n## Benchmark\n\nLet's search my home folder for files that end in `[0-9].jpg`. It contains ~750.000\nsubdirectories and about a 4 million files. For averaging and statistical analysis, I'm using\n[hyperfine](https://github.com/sharkdp/hyperfine). The following benchmarks are performed\nwith a \"warm\"/pre-filled disk-cache (results for a \"cold\" disk-cache show the same trends).\n\nLet's start with `find`:\n```\nBenchmark 1: find ~ -iregex '.*[0-9]\\.jpg$'\n  Time (mean ± σ):     19.922 s ±  0.109 s\n  Range (min … max):   19.765 s … 20.065 s\n```\n\n`find` is much faster if it does not need to perform a regular-expression search:\n```\nBenchmark 2: find ~ -iname '*[0-9].jpg'\n  Time (mean ± σ):     11.226 s ±  0.104 s\n  Range (min … max):   11.119 s … 11.466 s\n```\n\nNow let's try the same for `fd`. Note that `fd` performs a regular expression\nsearch by default. The options `-u`/`--unrestricted` option is needed here for\na fair comparison. Otherwise `fd` does not have to traverse hidden folders and\nignored paths (see below):\n```\nBenchmark 3: fd -u '[0-9]\\.jpg$' ~\n  Time (mean ± σ):     854.8 ms ±  10.0 ms\n  Range (min … max):   839.2 ms … 868.9 ms\n```\nFor this particular example, `fd` is approximately **23 times faster** than `find -iregex`\nand about **13 times faster** than `find -iname`. By the way, both tools found the exact\nsame 546 files :smile:.\n\n**Note**: This is *one particular* benchmark on *one particular* machine. While we have\nperformed a lot of different tests (and found consistent results), things might\nbe different for you! We encourage everyone to try it out on their own. See\n[this repository](https://github.com/sharkdp/fd-benchmarks) for all necessary scripts.\n\nConcerning *fd*'s speed, a lot of credit goes to the `regex` and `ignore` crates that are\nalso used in [ripgrep](https://github.com/BurntSushi/ripgrep) (check it out!).\n\n## Troubleshooting\n\n### `fd` does not find my file!\n\nRemember that `fd` ignores hidden directories and files by default. It also ignores patterns\nfrom `.gitignore` files. If you want to make sure to find absolutely every possible file, always\nuse the options `-u`/`--unrestricted` option (or `-HI` to enable hidden and ignored files):\n``` bash\n> fd -u …\n```\n\nAlso remember that by default, `fd` only searches based on the filename and\ndoesn't compare the pattern to the full path. If you want to search based on the\nfull path (similar to the `-path` option of `find`) you need to use the `--full-path`\n(or `-p`) option.\n\n### Colorized output\n\n`fd` can colorize files by extension, just like `ls`. In order for this to work, the environment\nvariable [`LS_COLORS`](https://linux.die.net/man/5/dir_colors) has to be set. Typically, the value\nof this variable is set by the `dircolors` command which provides a convenient configuration format\nto define colors for different file formats.\nOn most distributions, `LS_COLORS` should be set already. If you are on Windows or if you are looking\nfor alternative, more complete (or more colorful) variants, see [here](https://github.com/sharkdp/vivid),\n[here](https://github.com/seebi/dircolors-solarized) or\n[here](https://github.com/trapd00r/LS_COLORS).\n\n`fd` also honors the [`NO_COLOR`](https://no-color.org/) environment variable.\n\n### `fd` doesn't seem to interpret my regex pattern correctly\n\nA lot of special regex characters (like `[]`, `^`, `$`, ..) are also special characters in your\nshell. If in doubt, always make sure to put single quotes around the regex pattern:\n\n``` bash\n> fd '^[A-Z][0-9]+$'\n```\n\nIf your pattern starts with a dash, you have to add `--` to signal the end of command line\noptions. Otherwise, the pattern will be interpreted as a command-line option. Alternatively,\nuse a character class with a single hyphen character:\n\n``` bash\n> fd -- '-pattern'\n> fd '[-]pattern'\n```\n\n### \"Command not found\" for `alias`es or shell functions\n\nShell `alias`es and shell functions can not be used for command execution via `fd -x` or\n`fd -X`. In `zsh`, you can make the alias global via `alias -g myalias=\"…\"`. In `bash`,\nyou can use `export -f my_function` to make available to child processes. You would still\nneed to call `fd -x bash -c 'my_function \"$1\"' bash`. For other use cases or shells, use\na (temporary) shell script.\n\n## Integration with other programs\n\n### Using fd with `fzf`\n\nYou can use *fd* to generate input for the command-line fuzzy finder [fzf](https://github.com/junegunn/fzf):\n``` bash\nexport FZF_DEFAULT_COMMAND='fd --type file'\nexport FZF_CTRL_T_COMMAND=\"$FZF_DEFAULT_COMMAND\"\n```\n\nThen, you can type `vim <Ctrl-T>` on your terminal to open fzf and search through the fd-results.\n\nAlternatively, you might like to follow symbolic links and include hidden files (but exclude `.git` folders):\n``` bash\nexport FZF_DEFAULT_COMMAND='fd --type file --follow --hidden --exclude .git'\n```\n\nYou can even use fd's colored output inside fzf by setting:\n``` bash\nexport FZF_DEFAULT_COMMAND=\"fd --type file --color=always\"\nexport FZF_DEFAULT_OPTS=\"--ansi\"\n```\n\nFor more details, see the [Tips section](https://github.com/junegunn/fzf#tips) of the fzf README.\n\n### Using fd with `rofi`\n\n[*rofi*](https://github.com/davatorium/rofi) is a graphical launch menu application that is able to create menus by reading from *stdin*. Piping `fd` output into `rofi`s `-dmenu` mode creates fuzzy-searchable lists of files and directories.\n\n#### Example\n\nCreate a case-insensitive searchable multi-select list of *PDF* files under your `$HOME` directory and open the selection with your configured PDF viewer. To list all file types, drop the `-e pdf` argument.\n\n``` bash\nfd --type f -e pdf . $HOME | rofi -keep-right -dmenu -i -p FILES -multi-select | xargs -I {} xdg-open {}\n```\n\nTo modify the list that is presented by rofi, add arguments to the `fd` command. To modify the search behaviour of rofi, add arguments to the `rofi` command.\n\n### Using fd with `emacs`\n\nThe emacs package [find-file-in-project](https://github.com/technomancy/find-file-in-project) can\nuse *fd* to find files.\n\nAfter installing `find-file-in-project`, add the line `(setq ffip-use-rust-fd t)` to your\n`~/.emacs` or `~/.emacs.d/init.el` file.\n\nIn emacs, run `M-x find-file-in-project-by-selected` to find matching files. Alternatively, run\n`M-x find-file-in-project` to list all available files in the project.\n\n### Printing the output as a tree\n\nTo format the output of `fd` as a file-tree you can use the `tree` command with\n`--fromfile`:\n```bash\n❯ fd | tree --fromfile\n```\n\nThis can be more useful than running `tree` by itself because `tree` does not\nignore any files by default, nor does it support as rich a set of options as\n`fd` does to control what to print:\n```bash\n❯ fd --extension rs | tree --fromfile\n.\n├── build.rs\n└── src\n    ├── app.rs\n    └── error.rs\n```\n\nOn bash and similar you can simply create an alias:\n```bash\n❯ alias as-tree='tree --fromfile'\n```\n\n### Using fd with `xargs` or `parallel`\n\nNote that `fd` has a builtin feature for [command execution](#command-execution) with\nits `-x`/`--exec` and `-X`/`--exec-batch` options. If you prefer, you can still use\nit in combination with `xargs`:\n``` bash\n> fd -0 -e rs | xargs -0 wc -l\n```\nHere, the `-0` option tells *fd* to separate search results by the NULL character (instead of\nnewlines). In the same way, the `-0` option of `xargs` tells it to read the input in this way.\n\n## Installation\n\n[![Packaging status](https://repology.org/badge/vertical-allrepos/fd-find.svg)](https://repology.org/project/fd-find/versions)\n\n### On Ubuntu\n*... and other Debian-based Linux distributions.*\n\nIf you run Ubuntu 19.04 (Disco Dingo) or newer, you can install the\n[officially maintained package](https://packages.ubuntu.com/fd-find):\n```\napt install fd-find\n```\nNote that the binary is called `fdfind` as the binary name `fd` is already used by another package.\nIt is recommended that after installation, you add a link to `fd` by executing command\n`ln -s $(which fdfind) ~/.local/bin/fd`, in order to use `fd` in the same way as in this documentation.\nMake sure that `$HOME/.local/bin` is in your `$PATH`.\n\nIf you use an older version of Ubuntu, you can download the latest `.deb` package from the\n[release page](https://github.com/sharkdp/fd/releases) and install it via:\n``` bash\ndpkg -i fd_9.0.0_amd64.deb # adapt version number and architecture\n```\n\nNote that the .deb packages on the release page for this project still name the executable `fd`.\n\n### On Debian\n\nIf you run Debian Buster or newer, you can install the\n[officially maintained Debian package](https://tracker.debian.org/pkg/rust-fd-find):\n```\napt-get install fd-find\n```\nNote that the binary is called `fdfind` as the binary name `fd` is already used by another package.\nIt is recommended that after installation, you add a link to `fd` by executing command\n`ln -s $(which fdfind) ~/.local/bin/fd`, in order to use `fd` in the same way as in this documentation.\nMake sure that `$HOME/.local/bin` is in your `$PATH`.\n\nNote that the .deb packages on the release page for this project still name the executable `fd`.\n\n### On Fedora\n\nStarting with Fedora 28, you can install `fd` from the official package sources:\n``` bash\ndnf install fd-find\n```\n\n### On Alpine Linux\n\nYou can install [the fd package](https://pkgs.alpinelinux.org/packages?name=fd)\nfrom the official sources, provided you have the appropriate repository enabled:\n```\napk add fd\n```\n\n### On Arch Linux\n\nYou can install [the fd package](https://www.archlinux.org/packages/extra/x86_64/fd/) from the official repos:\n```\npacman -S fd\n```\nYou can also install fd [from the AUR](https://aur.archlinux.org/packages/fd-git).\n\n### On Gentoo Linux\n\nYou can use [the fd ebuild](https://packages.gentoo.org/packages/sys-apps/fd) from the official repo:\n```\nemerge -av fd\n```\n\n### On openSUSE Linux\n\nYou can install [the fd package](https://software.opensuse.org/package/fd) from the official repo:\n```\nzypper in fd\n```\n\n### On Void Linux\n\nYou can install `fd` via xbps-install:\n```\nxbps-install -S fd\n```\n\n### On ALT Linux\n\nYou can install [the fd package](https://packages.altlinux.org/en/sisyphus/srpms/fd/) from the official repo:\n```\napt-get install fd\n```\n\n### On Solus\n\nYou can install [the fd package](https://github.com/getsolus/packages/tree/main/packages/f/fd) from the official repo:\n```\neopkg install fd\n```\n\n### On RedHat Enterprise Linux (RHEL) 8/9/10, Almalinux 8/9/10, EuroLinux 8/9 or Rocky Linux 8/9/10\n\nYou can install [the `fd` package](https://copr.fedorainfracloud.org/coprs/tkbcopr/fd/) from Fedora Copr.\n\n```bash\ndnf copr enable tkbcopr/fd\ndnf install fd\n```\n\nA different version using the [slower](https://github.com/sharkdp/fd/pull/481#issuecomment-534494592) malloc [instead of jemalloc](https://bugzilla.redhat.com/show_bug.cgi?id=2216193#c1) is also available from the EPEL8/9 repo as the package `fd-find`.\n\n### On macOS\n\nYou can install `fd` with [Homebrew](https://formulae.brew.sh/formula/fd):\n```\nbrew install fd\n```\n\n… or with MacPorts:\n```\nport install fd\n```\n\n### On Windows\n\nYou can download pre-built binaries from the [release page](https://github.com/sharkdp/fd/releases).\n\nAlternatively, you can install `fd` via [Scoop](http://scoop.sh):\n```\nscoop install fd\n```\n\nOr via [Chocolatey](https://chocolatey.org):\n```\nchoco install fd\n```\n\nOr via [Winget](https://learn.microsoft.com/en-us/windows/package-manager/):\n```\nwinget install sharkdp.fd\n```\n\n### On GuixOS\n\nYou can install [the fd package](https://guix.gnu.org/en/packages/fd-8.1.1/) from the official repo:\n```\nguix install fd\n```\n\n### On Mise\n\nYou can use [mise](https://github.com/jdx/mise) to install `fd` with a command like this:\n```\nmise use -g fd@latest\n```\n\n### On NixOS / via Nix\n\nYou can use the [Nix package manager](https://nixos.org/nix/) to install `fd`:\n```\nnix-env -i fd\n```\n\n### Via Flox\n\nYou can use [Flox](https://flox.dev) to install `fd` into a Flox environment:\n```\nflox install fd\n```\n\n### On FreeBSD\n\nYou can install [the fd-find package](https://www.freshports.org/sysutils/fd) from the official repo:\n```\npkg install fd-find\n```\n\n### From npm\n\nOn Linux and macOS, you can install the [fd-find](https://npm.im/fd-find) package:\n\n```\nnpm install -g fd-find\n```\n\n### From source\n\nWith Rust's package manager [cargo](https://github.com/rust-lang/cargo), you can install *fd* via:\n```\ncargo install fd-find\n```\nNote that rust version *1.77.2* or later is required.\n\n`make` is also needed for the build.\n\n### From binaries\n\nThe [release page](https://github.com/sharkdp/fd/releases) includes precompiled binaries for Linux, macOS and Windows. Statically-linked binaries are also available: look for archives with `musl` in the file name.\n\n## Development\n```bash\ngit clone https://github.com/sharkdp/fd\n\n# Build\ncd fd\ncargo build\n\n# Run unit tests and integration tests\ncargo test\n\n# Install\ncargo install --path .\n```\n\n### Completions\n\n#### From Release Archives\n\nPre-built completion files are included in the release archives (`.tar.gz`/`.zip`) on the\n[Releases page](https://github.com/sharkdp/fd/releases), in the `autocomplete` directory.\nTo use these completions:\n\n- **bash**: Source the `fd.bash` file in your `~/.bashrc`, or place it in a directory that gets sourced automatically.\n- **zsh**: Move `_fd` to a directory in your `fpath` (e.g., `~/.zfunc`).\n- **fish**: Copy `fd.fish` to `~/.config/fish/completions/`.\n- **powershell**: Source `_fd.ps1` from one of your [profile scripts](https://learn.microsoft.com/en-us/powershell/scripting/learn/shell/creating-profiles?view=powershell-7.5).\n\n#### Generate from fd\n\nYou can also generate completions directly using `fd --gen-completions <shell>`:\n\n```bash\n# Bash\nfd --gen-completions bash > ~/.local/share/bash-completion/completions/fd\n\n# Zsh (ensure ~/.zfunc is in your fpath)\nfd --gen-completions zsh > ~/.zfunc/_fd\n\n# Fish\nfd --gen-completions fish > ~/.config/fish/completions/fd.fish\n\n# PowerShell\nfd --gen-completions powershell >> $PROFILE\n```\n\n## Maintainers\n\n- [sharkdp](https://github.com/sharkdp)\n- [tmccombs](https://github.com/tmccombs)\n- [tavianator](https://github.com/tavianator)\n\n## License\n\n`fd` is distributed under the terms of both the MIT License and the Apache License 2.0.\n\nSee the [LICENSE-APACHE](LICENSE-APACHE) and [LICENSE-MIT](LICENSE-MIT) files for license details.\n"
  },
  {
    "path": "SECURITY.md",
    "content": "# Security Reporting\n\nIf you wish to report a security vulnerability privately, we appreciate your diligence. Please follow the guidelines below to submit your report.\n\n## Reporting\n\nTo report a security vulnerability, please provide the following information:\n\n1. **PROJECT**\n   - Include the URL of the project repository - Example: <https://github.com/sharkdp/fd>\n\n2. **PUBLIC**\n   - Indicate whether this vulnerability has already been publicly discussed or disclosed.\n   - If so, provide relevant links.\n\n3. **DESCRIPTION**\n   - Provide a detailed description of the security vulnerability.\n   - Include as much information as possible to help us understand and address the issue.\n\nSend this information, along with any additional relevant details, to our [vulnerability reporting form](https://github.com/sharkdp/fd/security/advisories/new).\n\n## Confidentiality\n\nWe kindly ask you to keep the report confidential until a public announcement is made.\n\n## Notes\n\n- Vulnerabilities will be handled on a best-effort basis.\n- You may request an advance copy of the patched release, but we cannot guarantee early access before the public release.\n- You will be notified via email simultaneously with the public announcement.\n- We will respond within a few weeks to confirm whether your report has been accepted or rejected.\n\nThank you for helping to improve the security of our project!\n"
  },
  {
    "path": "contrib/completion/_fd",
    "content": "#compdef fd\n\n##\n# zsh completion function for fd\n#\n# Based on ripgrep completion function.\n# Originally based on code from the zsh-users project — see copyright notice\n# below.\n\nautoload -U is-at-least\n\n_fd() {\n  local curcontext=\"$curcontext\" no='!' ret=1\n  local -a context line state state_descr _arguments_options fd_types fd_args\n  local -A opt_args\n\n  if is-at-least 5.2; then\n    _arguments_options=( -s -S )\n  else\n    _arguments_options=( -s )\n  fi\n\n  fd_types=(\n    {f,file}'\\:\"regular files\"'\n    {d,directory}'\\:\"directories\"'\n    {l,symlink}'\\:\"symbolic links\"'\n    {e,empty}'\\:\"empty files or directories\"'\n    {x,executable}'\\:\"executable (files)\"'\n    {b,block-device}'\\:\"block devices\"'\n    {c,char-device}'\\:\"character devices\"'\n    {s,socket}'\\:\"sockets\"'\n    {p,pipe}'\\:\"named pipes (FIFOs)\"'\n  )\n\n  # Do not complete rare options unless either the current prefix\n  # matches one of those options or the user has the `complete-all`\n  # style set. Note that this prefix check has to be updated manually to account\n  # for all of the potential negation options listed below!\n  if\n    # (--[bpsu]* => match all options marked with '$no')\n    [[ $PREFIX$SUFFIX == --[bopsun]* ]] ||\n    zstyle -t \":complete:$curcontext:*\" complete-all\n  then\n    no=\n  fi\n\n  # We make heavy use of argument groups here to prevent the option specs from\n  # growing unwieldy. These aren't supported in zsh <5.4, though, so we'll strip\n  # them out below if necessary. This makes the exclusions inaccurate on those\n  # older versions, but oh well — it's not that big a deal\n  fd_args=(\n    + '(hidden)' # hidden files\n    {-H,--hidden}'[search hidden files/directories]'\n\n    + '(no-ignore-full)' # all ignore files\n    '(no-ignore-partial)'{-I,--no-ignore}\"[don't respect .(git|fd)ignore and global ignore files]\"\n    $no'(no-ignore-partial)*'{-u,--unrestricted}'[alias for --no-ignore, when repeated also alias for --hidden]'\n\n    + no-ignore-partial # some ignore files\n    \"(no-ignore-full --no-ignore-vcs)--no-ignore-vcs[don't respect .gitignore files]\"\n    \"!(no-ignore-full --no-global-ignore-file)--no-global-ignore-file[don't respect the global ignore file]\"\n    $no'(no-ignore-full --no-ignore-parent)--no-ignore-parent[]'\n\n    + '(case)' # case-sensitivity\n    {-s,--case-sensitive}'[perform a case-sensitive search]'\n    {-i,--ignore-case}'[perform a case-insensitive search]'\n\n    + '(regex-pattern)' # regex-based search pattern\n    '(no-regex-pattern)--regex[perform a regex-based search (default)]'\n\n    + '(no-regex-pattern)' # non-regex-based search pattern\n    {-g,--glob}'[perform a glob-based search]'\n    {-F,--fixed-strings}'[treat pattern as literal string instead of a regex]'\n\n    + '(no-require-git)'\n    \"$no(no-ignore-full --no-ignore-vcs --no-require-git)--no-require-git[don't require git repo to respect gitignores]\"\n\n    + '(match-full)' # match against full path\n    {-p,--full-path}'[match the pattern against the full path instead of the basename]'\n\n    + '(follow)' # follow symlinks\n    {-L,--follow}'[follow symbolic links to directories]'\n\n    + '(abs-path)' # show absolute paths\n    '(long-listing)'{-a,--absolute-path}'[show absolute paths instead of relative paths]'\n\n    + '(null-sep)' # use null separator for output\n    '(long-listing)'{-0,--print0}'[separate search results by the null character]'\n\n    + '(long-listing)' # long-listing output\n    '(abs-path null-sep max-results exec-cmds)'{-l,--list-details}'[use a long listing format with file metadata]'\n\n    + '(max-results)' # max number of results\n    '(long-listing exec-cmds)--max-results=[limit number of search results to given count and quit]:count'\n    '(long-listing exec-cmds)-1[limit to a single search result and quit]'\n\n    + '(fs-errors)' # file-system errors\n    $no'--show-errors[enable the display of filesystem errors]'\n\n    + '(fs-traversal)' # file-system traversal\n    $no\"--one-file-system[don't descend into directories on other file systems]\"\n    '!--mount'\n    '!--xdev'\n\n    + dir-depth # directory depth\n    '(--exact-depth -d --max-depth)'{-d+,--max-depth=}'[set max directory depth to descend when searching]:depth'\n    '!(--exact-depth -d --max-depth)--maxdepth:depth'\n    '(--exact-depth --min-depth)--min-depth=[set directory depth to descend before start searching]:depth'\n    '(--exact-depth -d --max-depth --maxdepth --min-depth)--exact-depth=[only search at the exact given directory depth]:depth'\n\n    + prune # pruning\n    \"--prune[don't traverse into matching directories]\"\n\n    + filter-misc # filter search\n    '*'{-t+,--type=}\"[filter search by type]:type:(($fd_types))\"\n    '*'{-e+,--extension=}'[filter search by file extension]:extension'\n    '*'{-E+,--exclude=}'[exclude files/directories that match the given glob pattern]:glob pattern'\n    '*'{-S+,--size=}'[limit search by file size]:size limit:->size'\n    '(-o --owner)'{-o+,--owner=}'[filter by owning user and/or group]:owner and/or group:->owner'\n\n    + ignore-file # extra ignore files\n    '*--ignore-file=[add a custom, low-precedence ignore-file with .gitignore format]: :_files'\n\n    + '(filter-mtime-newer)' # filter by files modified after than\n    '--changed-within=[limit search to files/directories modified within the given date/duration]:date or duration'\n    '--changed-after=[alias for --changed-within]:date/duration'\n    '!--change-newer-than=:date/duration'\n    '!--newer=:date/duration'\n\n    + '(filter-mtime-older)' # filter by files modified before than\n    '--changed-before=[limit search to files/directories modified before the given date/duration]:date or duration'\n    '!--change-older-than=:date/duration'\n    '!--older=:date/duration'\n\n    + '(color)' # colorize output\n    {-c+,--color=}'[declare when to colorize search results]:when to colorize:((\n      auto\\:\"show colors if the output goes to an interactive console (default)\"\n      never\\:\"do not use colorized output\"\n      always\\:\"always use colorized output\"\n    ))'\n\n    '--hyperlink=-[add hyperlinks to output paths]::when:(auto never always)'\n\n    + '(threads)'\n    {-j+,--threads=}'[set the number of threads for searching and executing]:number of threads'\n\n    + '(exec-cmds)' # execute command\n    '(long-listing max-results)'{-x+,--exec=}'[execute command for each search result]:command: _command_names -e:*\\;::program arguments: _normal'\n    '(long-listing max-results)'{-X+,--exec-batch=}'[execute command for all search results at once]:command: _command_names -e:*\\;::program arguments: _normal'\n    '(long-listing max-results)--batch-size=[max number of args for each -X call]:size'\n\n    + other\n    '!(--max-buffer-time)--max-buffer-time=[set amount of time to buffer before showing output]:time (ms)'\n\n    + '(about)' # about flags\n    '(: * -)'{-h,--help}'[display help message]'\n    '(: * -)'{-V,--version}'[display version information]'\n\n    + path-sep # set path separator for output\n    $no'(--path-separator)--path-separator=[set the path separator to use when printing file paths]:path separator'\n\n    + search-path\n    $no'(--base-directory)--base-directory=[change the current working directory to the given path]:directory:_files -/'\n    $no'(*)*--search-path=[set search path (instead of positional <path> arguments)]:directory:_files -/'\n\n    + strip-cwd-prefix\n    $no'(strip-cwd-prefix exec-cmds)--strip-cwd-prefix=-[When to strip ./]::when:(always never auto)'\n\n    + and\n    '--and=[additional required search path]:pattern'\n\n\n    + args # positional arguments\n    '1: :_guard \"^-*\" pattern'\n    '(--search-path)*:directory:_files -/'\n  )\n\n  # Strip out argument groups where unsupported (see above)\n  is-at-least 5.4 ||\n  fd_args=( ${(@)args:#(#i)(+|[a-z0-9][a-z0-9_-]#|\\([a-z0-9][a-z0-9_-]#\\))} )\n\n  _arguments $_arguments_options : $fd_args && ret=0\n\n  case ${state} in\n    owner)\n      compset -P '(\\\\|)\\!'\n      if compset -P '*:'; then\n        _groups && ret=0\n      else\n        if\n          compset -S ':*' ||\n          # Do not add the colon suffix when completing \"!user<TAB>\n          # (with a starting double-quote) otherwise pressing tab again\n          # after the inserted colon \"!user:<TAB> will complete history modifiers\n          [[ $IPREFIX == (\\\\|\\!)*  && ($QIPREFIX == \\\"* && -z $QISUFFIX) ]]\n        then\n          _users && ret=0\n        else\n          local q\n          # Since quotes are needed when using the negation prefix !,\n          # automatically remove the colon suffix also when closing the quote\n          if [[ $QIPREFIX == [\\'\\\"]* ]]; then\n            q=${QIPREFIX:0:1}\n          fi\n          _users -r \": \\t\\n\\-$q\" -S : && ret=0\n        fi\n      fi\n      ;;\n\n    size)\n      if compset -P '[-+][0-9]##'; then\n        local -a suff=(\n          'B:bytes'\n          'K:kilobytes  (10^3  = 1000   bytes)'\n          'M:megabytes  (10^6  = 1000^2 bytes)'\n          'G:gigabytes  (10^9  = 1000^3 bytes)'\n          'T:terabytes  (10^12 = 1000^4 bytes)'\n          'Ki:kibibytes  ( 2^10 = 1024   bytes)'\n          'Mi:mebibytes  ( 2^20 = 1024^2 bytes)'\n          'Gi:gigibytes  ( 2^30 = 1024^3 bytes)'\n          'Ti:tebibytes  ( 2^40 = 1024^4 bytes)'\n        )\n        _describe -t units 'size limit units' suff -V 'units'\n      elif compset -P '[-+]'; then\n        _message -e 'size limit number (full format: <+-><number><unit>)'\n      else\n        _values 'size limit prefix (full format: <prefix><number><unit>)' \\\n          '\\+[file size must be greater or equal to]'\\\n          '-[file size must be less than or equal to]' && ret=0\n      fi\n      ;;\n  esac\n\n  return ret\n}\n\n_fd \"$@\"\n\n# ------------------------------------------------------------------------------\n# Copyright (c) 2011 GitHub zsh-users - http://github.com/zsh-users\n# All rights reserved.\n#\n# Redistribution and use in source and binary forms, with or without\n# modification, are permitted provided that the following conditions are met:\n#     * Redistributions of source code must retain the above copyright\n#       notice, this list of conditions and the following disclaimer.\n#     * Redistributions in binary form must reproduce the above copyright\n#       notice, this list of conditions and the following disclaimer in the\n#       documentation and/or other materials provided with the distribution.\n#     * Neither the name of the zsh-users nor the\n#       names of its contributors may be used to endorse or promote products\n#       derived from this software without specific prior written permission.\n#\n# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS \"AS IS\" AND\n# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED\n# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE\n# DISCLAIMED. IN NO EVENT SHALL ZSH-USERS BE LIABLE FOR ANY\n# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES\n# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;\n# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND\n# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS\n# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n# ------------------------------------------------------------------------------\n# Description\n# -----------\n#\n#  Completion script for fd\n#\n# ------------------------------------------------------------------------------\n# Authors\n# -------\n#\n#  * smancill (https://github.com/smancill)\n#\n# ------------------------------------------------------------------------------\n\n# Local Variables:\n# mode: shell-script\n# coding: utf-8-unix\n# indent-tabs-mode: nil\n# sh-indentation: 2\n# sh-basic-offset: 2\n# End:\n# vim: ft=zsh sw=2 ts=2 et\n"
  },
  {
    "path": "doc/.gitattributes",
    "content": "* linguist-vendored\n"
  },
  {
    "path": "doc/fd.1",
    "content": ".TH FD 1\n.SH NAME\nfd \\- find entries in the filesystem\n.SH SYNOPSIS\n.B fd\n.RB [ \\-HIEsiaLp0hV ]\n.RB [ \\-d\n.IR depth ]\n.RB [ \\-t\n.IR filetype ]\n.RB [ \\-e\n.IR ext ]\n.RB [ \\-E\n.IR exclude ]\n.RB [ \\-c\n.IR when ]\n.RB [ \\-j\n.IR num ]\n.RB [ \\-x\n.IR cmd ]\n.RI [ pattern\n.RI [ path... ]]\n.SH DESCRIPTION\n.B fd\nis a simple, fast and user-friendly alternative to\n.BR find (1).\n.P\nBy default\n.B fd\nuses regular expressions for the pattern. However, this can be changed to use simple glob patterns\nwith the '\\-\\-glob' option.\n.P\nBy default\n.B fd\nwill exclude hidden files and directories, as well as any files that match gitignore rules\nor ignore rules in .ignore or .fdignore files.\n.P\nIf you wish to search all files in a specific directory, you can use '' or . as the search pattern,\nto match all files. Or you can use the '\\-\\-search\\-path' option to specify the path(s) instead of\nthe positional parameter.\n.P\nOptions may be given anywhere on the command line.\n.SH OPTIONS\n.TP\n.B \\-H, \\-\\-hidden\nInclude hidden files and directories in the search results\n(default: hidden files and directories are skipped). The flag can be overridden with '--no-hidden'.\n.IP\nIgnored files are still excluded unless \\-\\-no\\-ignore or \\-\\-no\\-ignore\\-vcs\nis also used.\n.TP\n.B \\-I, \\-\\-no\\-ignore\nShow search results from files and directories that would otherwise be ignored by\n.RS\n.IP \\[bu] 2\n.I .gitignore\n.IP \\[bu]\n.I .git/info/exclude\n.IP \\[bu]\nThe global gitignore configuration (by default\n.IR $HOME/.config/git/ignore )\n.IP \\[bu]\n.I .ignore\n.IP \\[bu]\n.I .fdignore\n.IP \\[bu]\nThe global fd ignore file (usually\n.I $HOME/.config/fd/ignore\n)\n.RE\n.IP\nThe flag can be overridden with '--ignore'.\n.TP\n.B \\-u, \\-\\-unrestricted\nPerform an unrestricted search, including ignored and hidden files. This is an alias for '--hidden --no-ignore'.\n.TP\n.B \\-\\-no\\-ignore\\-vcs\nShow search results from files and directories that would otherwise be ignored by gitignore files\nincluding\n.IR  .gitignore ,\n.IR  .git/info/exclude ,\nand the global gitignore configuration\n.RI ( core.excludesFile\ngit setting, which defaults to\n.IR $HOME/.config/git/ignore ).\nThe flag can be overridden with '--ignore-vcs'.\n.TP\n.B \\-\\-no\\-require\\-git\nDo not require a git repository to respect gitignores. By default, fd will only\nrespect global gitignore rules, .gitignore rules and local exclude rules if fd\ndetects that you are searching inside a git repository. This flag allows you to\nrelax this restriction such that fd will respect all git related ignore rules\nregardless of whether you’re searching in a git repository or not. The flag can\nbe overridden with '--require-git'.\n.TP\n.B \\-\\-no\\-ignore\\-parent\nShow search results from files and directories that would otherwise be ignored by gitignore files in\nparent directories.\n.TP\n.B \\-s, \\-\\-case\\-sensitive\nPerform a case-sensitive search. By default, fd uses case-insensitive searches, unless the\npattern contains an uppercase character (smart case).\n.TP\n.B \\-i, \\-\\-ignore\\-case\nPerform a case-insensitive search. By default, fd uses case-insensitive searches, unless the\npattern contains an uppercase character (smart case).\n.TP\n.B \\-g, \\-\\-glob\nPerform a glob-based search instead of a regular expression search.\nIf combined with the '\\-\\-full-path' option, '**' can be used to match multiple path components.\n.TP\n.B \\-\\-regex\nPerform a regular-expression based search (default). This can be used to override --glob.\n.TP\n.B \\-F, \\-\\-fixed\\-strings\nTreat the pattern as a literal string instead of a regular expression. Note that this also\nperforms substring comparison. If you want to match on an exact filename, consider using '\\-\\-glob'.\n.TP\n.BI \"\\-\\-and \" pattern\nAdd additional required search patterns, all of which must be matched. Multiple additional\npatterns can be specified. The patterns are regular expressions, unless '\\-\\-glob'\nor '\\-\\-fixed\\-strings' is used.\n.TP\n.B \\-a, \\-\\-absolute\\-path\nShows the full path starting from the root as opposed to relative paths.\nThe flag can be overridden with '--relative-path'.\n.TP\n.B \\-l, \\-\\-list\\-details\nUse a detailed listing format like 'ls -l'. This is basically an alias\nfor '--exec-batch ls -l' with some additional 'ls' options. This can be used\nto see more metadata, to show symlink targets and to achieve a deterministic\nsort order.\n.TP\n.B \\-L, \\-\\-follow\nBy default, fd does not descend into symlinked directories. Using this flag, symbolic links are\nalso traversed. The flag can be overridden with '--no-follow'.\n.TP\n.B \\-p, \\-\\-full\\-path\nBy default, the search pattern is only matched against the filename (or directory name). Using\nthis flag, the\n.I pattern\nis matched against the full path.\n.TP\n.B \\-0, \\-\\-print0\nSeparate search results by the null character (instead of newlines). Useful for piping results to\n.IR xargs .\n.TP\n.B \\-\\-max\\-results count\nLimit the number of search results to 'count' and quit immediately.\n.TP\n.B \\-1\nLimit the search to a single result and quit immediately. This is an alias for '--max-results=1'.\n.TP\n.B \\-q, \\-\\-quiet\nWhen the flag is present, the program does not print anything and will instead exit with a code of 0 if there is at least one search result.\nOtherwise, the exit code will be 1.\nThis is mainly for usage in scripts and can be faster than checking for output because the search can be stopped early after the first match.\n.B \\-\\-has\\-results\ncan be used as an alias.\n.TP\n.B \\-\\-show-errors\nEnable the display of filesystem errors for situations such as insufficient\npermissions or dead symlinks.\n.TP\n.B \\-\\-strip-cwd-prefix [when]\nBy default, relative paths are prefixed with './' when -x/--exec,\n-X/--exec-batch, or -0/--print0 are given, to reduce the risk of a\npath starting with '-' being treated as a command line option. Use\nthis flag to change this behavior. If this flag is used without a value,\nit is equivalent to passing \"always\". Possible values are:\n.RS\n.IP never\nNever strip the ./ at the beginning of paths\n.IP always\nAlways strip the ./ at the beginning of paths\n.IP auto\nOnly strip if used with --exec, --exec-batch, or --print0. That is, it resets to the default behavior.\n.RE\n.TP\n.B \\-\\-one\\-file\\-system, \\-\\-mount, \\-\\-xdev\nBy default, fd will traverse the file system tree as far as other options dictate. With this flag, fd ensures that it does not descend into a different file system than the one it started in. Comparable to the -mount or -xdev filters of find(1).\n.TP\n.B \\-h, \\-\\-help\nPrint help information.\n.TP\n.B \\-V, \\-\\-version\nPrint version information.\n.TP\n.BI \"\\-d, \\-\\-max\\-depth \" d\nLimit directory traversal to at most\n.I d\nlevels of depth. By default, there is no limit on the search depth.\n.TP\n.BI \"\\-\\-min\\-depth \" d\nOnly show search results starting at the given depth. See also: '--max-depth' and '--exact-depth'.\n.TP\n.BI \"\\-\\-exact\\-depth \" d\nOnly show search results at the exact given depth. This is an alias for '--min-depth <depth> --max-depth <depth>'.\n.TP\n.B \\-\\-prune\nDo not traverse into matching directories.\n.TP\n.BI \"\\-t, \\-\\-type \" filetype\nFilter search by type:\n.RS\n.IP \"f, file\"\nregular files\n.IP \"d, dir, directory\"\ndirectories\n.IP \"l, symlink\"\nsymbolic links\n.IP \"b, block-device\"\nblock devices\n.IP \"c, char-device\"\ncharacter devices\n.IP \"s, socket\"\nsockets\n.IP \"p, pipe\"\nnamed pipes (FIFOs)\n.IP \"x, executable\"\nexecutable (files)\n.IP \"e, empty\"\nempty files or directories\n.RE\n\n.RS\nThis option can be specified more than once to include multiple file types.\nSearching for '--type file --type symlink' will show both regular files as well as\nsymlinks. Note that the 'executable' and 'empty' filters work differently: '--type\nexecutable' implies '--type file' by default. And '--type empty' searches for\nempty files and directories, unless either '--type file' or '--type directory' is\nspecified in addition.\n\nExamples:\n  - Only search for files:\n      fd --type file …\n      fd -tf …\n  - Find both files and symlinks\n      fd --type file --type symlink …\n      fd -tf -tl …\n  - Find executable files:\n      fd --type executable\n      fd -tx\n  - Find empty files:\n      fd --type empty --type file\n      fd -te -tf\n  - Find empty directories:\n      fd --type empty --type directory\n      fd -te -td\n.RE\n.TP\n.BI \"\\-e, \\-\\-extension \" ext\nFilter search results by file extension\n.IR ext .\nThis option can be used repeatedly to allow for multiple possible file extensions.\n\nIf you want to search for files without extension, you can use the regex '^[^.]+$'\nas a normal search pattern.\n.TP\n.BI \"\\-E, \\-\\-exclude \" glob\nExclude files/directories that match the given glob pattern.\nThis overrides any other ignore logic.\nMultiple exclude patterns can be specified.\nExamples:\n  \\-\\-exclude '*.pyc'\n  \\-\\-exclude node_modules\n.TP\n.BI \"\\-\\-ignore-contain \" name\nExclude directories that (directly) contain the given name.\nThis option can be specified multiple times.\n.TP\n.BI \"\\-\\-ignore-file \" path\nAdd a custom ignore-file in '.gitignore' format.\nThese files have a low precedence.\n.TP\n.BI \"\\-c, \\-\\-color \" when\nDeclare\n.I when\nto colorize search results:\n.RS\n.IP auto\nColorize output when standard output is connected to terminal (default).\n.IP never\nDo not colorize output.\n.IP always\nAlways colorize output.\n.RE\n.TP\n.B \"\\-\\-hyperlink\nSpecify whether the output should use terminal escape codes to indicate a hyperlink to a\nfile url pointing to the path.\n\nThe value can be auto, always, or never.\n\nCurrently, the default is \"never\", and if the option is used without an argument \"auto\" is\nused. In the future this may be changed to \"auto\" and \"always\".\n.RS\n.IP auto\nOnly output hyperlinks if color is also enabled, as a proxy for whether terminal escape\ncodes are acceptable.\n.IP never\nNever output hyperlink escapes.\n.IP always\nAlways output hyperlink escapes, regardless of color settings.\n.RE\n.TP\n.BI \"\\-j, \\-\\-threads \" num\nSet number of threads to use for searching & executing (default: number of available CPU cores).\n.TP\n.BI \"\\-S, \\-\\-size \" size\nLimit results based on the size of files using the format\n.I <+-><NUM><UNIT>\n.RS\n.IP '+'\nfile size must be greater than or equal to this\n.IP '-'\nfile size must be less than or equal to this\n.P\nIf neither '+' nor '-' is specified, file size must be exactly equal to this.\n.IP 'NUM'\nThe numeric size (e.g. 500)\n.IP 'UNIT'\nThe units for NUM. They are not case-sensitive.\nAllowed unit values:\n.RS\n.IP 'b'\nbytes\n.IP 'k'\nkilobytes (base ten, 10^3 = 1000 bytes)\n.IP 'm'\nmegabytes\n.IP 'g'\ngigabytes\n.IP 't'\nterabytes\n.IP 'ki'\nkibibytes (base two, 2^10 = 1024 bytes)\n.IP 'mi'\nmebibytes\n.IP 'gi'\ngibibytes\n.IP 'ti'\ntebibytes\n.RE\n.RE\n.TP\n.BI \"\\-\\-changed-within \" date|duration\nFilter results based on the file modification time.\nFiles with modification timestamps greater than the argument will be returned.\nThe argument can be provided as a duration (\\fI10h, 1d, 35min\\fR) or as a specific point\nin time as full RFC3339 format with time zone, as a date or datetime in the\nlocal time zone (\\fIYYYY-MM-DD\\fR or \\fIYYYY-MM-DD HH:MM:SS\\fR), or as the prefix '@'\nfollowed by the number of seconds since the Unix epoch (@[0-9]+).\n\\fB\\-\\-change-newer-than\\fR,\n.B --newer\nor\n.B --changed-after\ncan be used as aliases.\n\nExamples:\n  \\-\\-changed-within 2weeks\n  \\-\\-change-newer-than \"2018-10-27 10:00:00\"\n  \\-\\-newer 2018-10-27\n  \\-\\-changed-after @1704067200\n.TP\n.BI \"\\-\\-changed-before \" date|duration\nFilter results based on the file modification time.\nFiles with modification timestamps less than the argument will be returned.\nThe argument can be provided as a duration (\\fI10h, 1d, 35min\\fR) or as a specific point\nin time as full RFC3339 format with time zone, as a date or datetime in the\nlocal time zone (\\fIYYYY-MM-DD\\fR or \\fIYYYY-MM-DD HH:MM:SS\\fR), or as the prefix '@'\nfollowed by the number of seconds since the Unix epoch (@[0-9]+).\n.B --change-older-than\nor\n.B --older\ncan be used as aliases.\n\nExamples:\n  \\-\\-changed-before \"2018-10-27 10:00:00\"\n  \\-\\-change-older-than 2weeks\n  \\-\\-older @1704067200\n.TP\n.BI \"-o, \\-\\-owner \" [user][:group]\nFilter files by their user and/or group. Format: [(user|uid)][:(group|gid)]. Either side\nis optional. Precede either side with a '!' to exclude files instead.\n\nExamples:\n  \\-\\-owner john\n  \\-\\-owner :students\n  \\-\\-owner \"!john:students\"\n.TP\n.BI \"-C, \\-\\-base\\-directory \" path\nChange the current working directory of fd to the provided path. This means that search results will\nbe shown with respect to the given base path. Note that relative paths which are passed to fd via the\npositional \\fIpath\\fR argument or the \\fB\\-\\-search\\-path\\fR option will also be resolved relative to\nthis directory.\n.TP\n.BI \"\\-\\-path\\-separator \" separator\nSet the path separator to use when printing file paths. The default is the OS-specific separator\n('/' on Unix, '\\\\' on Windows).\n.TP\n.BI \"\\-\\-search\\-path \" search\\-path\nProvide paths to search as an alternative to the positional \\fIpath\\fR argument. Changes the usage to\n\\'fd [FLAGS/OPTIONS] \\-\\-search\\-path PATH \\-\\-search\\-path PATH2 [PATTERN]\\'\n.TP\n.BI \"\\-\\-format \" fmt\nSpecify a template string that is used for printing a line for each file found.\n\nThe following placeholders are substituted into the string for each file before printing:\n.RS\n.IP {}\npath (of the current search result)\n.IP {/}\nbasename\n.IP {//}\nparent directory\n.IP {.}\npath without file extension\n.IP {/.}\nbasename without file extension\n.IP {{\nliteral '{' (an escape sequence)\n.IP }}\nliteral '}' (an escape sequence)\n.P\nNotice that you can use \"{{\" and \"}}\" to escape \"{\" and \"}\" respectively, which is especially\nuseful if you need to include the literal text of one of the above placeholders.\n.RE\n.TP\n.BI \"\\-x, \\-\\-exec \" command\n.RS\nExecute\n.I command\nfor each search result in parallel (use --threads=1 for sequential command execution).\n\nNote that all subsequent positional arguments are considered to be arguments to the\n.I command\n- not to fd.\nIt is therefore recommended to place the \\-x/\\-\\-exec option last. Alternatively, you can supply\na ';' argument to end the argument list and continue with more fd options.\nMost shells require ';' to be escaped: '\\\\;'.\nThis option can be specified multiple times, in which case all commands are run for each\nfile found, in the order they are provided. In that case, you must supply a ';' argument for\nall but the last commands.\n\nIf parallelism is enabled, the order commands will be executed in is non-deterministic. And even with\n--threads=1, the order is determined by the operating system and may not be what you expect. Thus, it is\nrecommended that you don't rely on any ordering of the results.\n\nBefore executing the command, any placeholder patterns in the command are replaced with the\ncorresponding values for the current file. The same placeholders are used as in the \"\\-\\-format\"\noption.\n\nIf no placeholder is present, an implicit \"{}\" at the end is assumed.\n\nIf --print0 is also given, then a null character (\\\\0) will be printed between the output for each found entry.\nThis allows another program to easily distinguish the output for each file if the command(s) produce multiple lines.\n\nExamples:\n\n  - find all *.zip files and unzip them:\n\n        fd -e zip -x unzip\n\n  - find *.h and *.cpp files and run \"clang-format -i ..\" for each of them:\n\n        fd -e h -e cpp -x clang-format -i\n\n  - Convert all *.jpg files to *.png files:\n\n        fd -e jpg -x convert {} {.}.png\n\n  - Run stat for each *.txt file, separated by null characters\n\n        fd -0 -e txt -x stat\n.RE\n.TP\n.BI \"\\-X, \\-\\-exec-batch \" command\n.RS\nExecute\n.I command\nonce, with all search results as arguments.\n\nThe order of the arguments is non-deterministic and should not be relied upon.\n\nThis uses the same placeholders as \"\\-\\-format\" and \"\\-\\-exec\", but instead of expanding\nonce per command invocation each argument containing a placeholder is expanding for every\nfile in a batch and passed as separate arguments.\n\nIf no placeholder is present, an implicit \"{}\" at the end is assumed.\n\nLike \\-\\-exec, subsequent arguments are assumed to be part of\n.I command\nuntil either the end of command arguments or a ';' argument. See above.\n\nLike \\-\\-exec, this can be used multiple times, in which case each command will be run in\nthe order given.\n\nExamples:\n\n  - Find all test_*.py files and open them in your favorite editor:\n\n        fd -g 'test_*.py' -X vim\n\n    Note that this executes a single \"vim\" process with all search results as arguments.\n\n  - Find all *.rs files and count the lines with \"wc -l ...\":\n\n        fd -e rs -X wc -l\n.RE\n.TP\n.BI \"\\-\\-batch-size \" size\nMaximum number of arguments to pass to the command given with -X. If the number of results is\ngreater than the given size, the command given with -X is run again with remaining arguments. A\nbatch size of zero means there is no limit (default), but note that batching might still happen\ndue to OS restrictions on the maximum length of command lines.\n.SH PATTERN SYNTAX\nThe regular expression syntax used by fd is documented here:\n\n    https://docs.rs/regex/1.0.0/regex/#syntax\n\nThe glob syntax is documented here:\n\n    https://docs.rs/globset/#syntax\n.SH ENVIRONMENT\n.TP\n.B LS_COLORS\nDetermines how to colorize search results, see\n.BR dircolors (1) .\n.TP\n.B NO_COLOR\nDisables colorized output.\n.TP\n.B XDG_CONFIG_HOME, HOME\nUsed to locate the global ignore file. If\n.B XDG_CONFIG_HOME\nis set, use\n.IR $XDG_CONFIG_HOME/fd/ignore .\nOtherwise, use\n.IR $HOME/.config/fd/ignore .\n.SH FILES\n.TP\n.B .fdignore\nThis file works similarly to a .gitignore file anywhere in the searched tree and specifies patterns\nthat should be excluded from the search. However, this file is specific to fd, and will be used even\nif the --no-ignore-vcs option is used.\n.TP\n.B $XDG_CONFIG_HOME/fd/ignore\nGlobal ignore file. Unless ignore mode is turned off (such as with --no-ignore)\nignore entries in this file will be ignored, as if it was an .fdignore file in the\ncurrent directory.\n.SH EXAMPLES\n.TP\n.RI \"Find files and directories that match the pattern '\" needle \"':\"\n$ fd needle\n.TP\n.RI \"Start a search in a given directory (\" /var/log \"):\"\n$ fd nginx /var/log\n.TP\n.RI \"Find all Python files (all files with the extension \" .py \") in the current directory:\"\n$ fd -e py\n.TP\n.RI \"Open all search results with vim:\"\n$ fd pattern -X vim\n.SH Tips and Tricks\n.IP \\[bu]\nIf you add \".git/\" to your global ignore file ($XDG_CONFIG_HOME/fd/ignore), then\n\".git\" folders will be ignored by default, even when the --hidden option is used.\n.IP \\[bu]\nYou can use a shell alias or a wrapper script in order to pass desired flags to fd\nby default. For example if you do not like the default behavior of respecting gitignore,\nyou can use\n`alias fd=\"/usr/bin/fd --no-ignore-vcs\"`\nin your .bashrc to create an alias for fd that doesn't ignore git files by default.\n.SH BUGS\nBugs can be reported on GitHub: https://github.com/sharkdp/fd/issues\n.SH SEE ALSO\n.BR find (1)\n"
  },
  {
    "path": "doc/release-checklist.md",
    "content": "# Release checklist\n\nThis file can be used as-is, or copied into the GitHub PR description which includes\nnecessary changes for the upcoming release.\n\n## Version bump\n\n- [ ] Create a new branch for the required changes for this release.\n- [ ] Update version in `Cargo.toml`. Run `cargo build` to update `Cargo.lock`.\n      Make sure to `git add` the `Cargo.lock` changes as well.\n- [ ] Find the current min. supported Rust version by running\n      `grep rust-version Cargo.toml`.\n- [ ] Update the `fd` version and the min. supported Rust version in `README.md`.\n- [ ] Update `CHANGELOG.md`. Change the heading of the *\"Upcoming release\"* section\n      to the version of this release.\n\n## Pre-release checks and updates\n\n- [ ] Install the latest version (`cargo install --locked -f --path .`) and make\n      sure that it is available on the `PATH` (`fd --version` should show the\n      new version).\n- [ ] Review `-h`, `--help`, and the `man` page.\n- [ ] Run `fd -h` and copy the output to the *\"Command-line options\"* section in\n      the README\n- [ ] Push all changes and wait for CI to succeed (before continuing with the\n      next section).\n- [ ] Optional: manually test the new features and command-line options described\n      in the `CHANGELOG.md`.\n- [ ] Run `cargo publish --dry-run` to make sure that it will succeed later\n      (after creating the GitHub release).\n\n## Release\n\n- [ ] Merge your release branch (should be a fast-forward merge).\n- [ ] Create a tag and push it: `git tag vX.Y.Z; git push origin tag vX.Y.Z`.\n      This will trigger the deployment via GitHub Actions.\n      REMINDER: If your `origin` is a fork, don't forget to push to e.g. `upstream`\n      instead.\n- [ ] Go to https://github.com/sharkdp/fd/releases/new to create the new\n      release. Select the new tag and also use it as the release title. For the\n      release notes, copy the corresponding section from `CHANGELOG.md` and\n      possibly add additional remarks for package maintainers.\n      Publish the release.\n- [ ] Check if the binary deployment works (archives and Debian packages should\n      appear when the CI run *for the Git tag* has finished).\n- [ ] Publish to crates.io by running `cargo publish` in a *clean* repository.\n      One way to do this is to clone a fresh copy.\n\n## Post-release\n\n- [ ] Prepare a new *\"Upcoming release\"* section at the top of `CHANGELOG.md`.\n      Put this at the top:\n\n      # Upcoming release\n\n      ## Features\n\n\n      ## Bugfixes\n\n\n      ## Changes\n\n\n      ## Other\n\n"
  },
  {
    "path": "doc/screencast.sh",
    "content": "#!/bin/bash\n# Designed to be executed via svg-term from the fd root directory:\n# svg-term --command=\"bash doc/screencast.sh\" --out doc/screencast.svg --padding=10\n# Then run this (workaround for #1003):\n# sed -i '' 's/<text/<text font-size=\"1.67\"/g' doc/screencast.svg\nset -e\nset -u\n\nPROMPT=\"▶\"\n\nenter() {\n    INPUT=$1\n    DELAY=1\n\n    prompt\n    sleep \"$DELAY\"\n    type \"$INPUT\"\n    sleep 0.5\n    printf '%b' \"\\\\n\"\n    eval \"$INPUT\"\n    type \"\\\\n\"\n}\n\nprompt() {\n    printf '%b ' \"$PROMPT\" | pv -q\n}\n\ntype() {\n    printf '%b' \"$1\" | pv -qL $((10+(-2 + RANDOM%5)))\n}\n\nmain() {\n    IFS='%'\n\n    enter \"fd\"\n\n    enter \"fd app\"\n\n    enter \"fd fi\"\n\n    enter \"fd fi --type f\"\n\n    enter \"fd --type d\"\n\n    enter \"fd -e md\"\n\n    enter \"fd -e md --exec wc -l\"\n\n    enter \"fd '^[A-Z]'\"\n\n    enter \"fd --exclude src\"\n\n    enter \"fd --hidden sample\"\n\n    prompt\n\n    sleep 3\n\n    echo \"\"\n\n    unset IFS\n}\n\nmain\n"
  },
  {
    "path": "doc/sponsors.md",
    "content": "## Sponsors\n\n`fd` development is sponsored by many individuals and companies. Thank you very much!\n\nPlease note, that being sponsored does not affect the individuality of the `fd`\nproject or affect the maintainers' actions in any way.\nWe remain impartial and continue to assess pull requests solely on merit - the\nfeatures added, bugs solved, and effect on the overall complexity of the code.\nNo issue will have a different priority based on sponsorship status of the\nreporter.\n\nContributions from anybody are most welcomed, please see our [`CONTRIBUTING.md`](../CONTRIBUTING.md) guide.\n"
  },
  {
    "path": "rustfmt.toml",
    "content": "# Defaults are used\n"
  },
  {
    "path": "scripts/create-deb.sh",
    "content": "#!/bin/bash\nCOPYRIGHT_YEARS=\"2018 - \"$(date \"+%Y\")\nMAINTAINER=\"David Peter <mail@david-peter.de>\"\nREPO=\"https://github.com/sharkdp/fd\"\nDPKG_STAGING=\"${CICD_INTERMEDIATES_DIR:-.}/debian-package\"\nDPKG_DIR=\"${DPKG_STAGING}/dpkg\"\nmkdir -p \"${DPKG_DIR}\"\n\nif [[ -z \"$TARGET\" ]]; then\n  TARGET=\"$(rustc -vV | sed -n 's|host: \\(.*\\)|\\1|p')\"\nfi\n\ncase \"$TARGET\" in\n  *-musl*)\n    DPKG_BASENAME=fd-musl\n    DPKG_CONFLICTS=\"fd, fd-find\"\n    ;;\n  *)\n    DPKG_BASENAME=fd\n    DPKG_CONFLICTS=\"fd-musl, fd-find\"\n    ;;\nesac\n\nif [[ -z \"$DPKG_VERSION\" ]]; then\n  DPKG_VERSION=$(cargo metadata --no-deps --format-version 1 | jq -r .packages[0].version)\nfi\n\nunset DPKG_ARCH\ncase \"${TARGET}\" in\n  aarch64-*-linux-*) DPKG_ARCH=arm64 ;;\n  arm-*-linux-*hf) DPKG_ARCH=armhf ;;\n  i686-*-linux-*) DPKG_ARCH=i686 ;;\n  x86_64-*-linux-*) DPKG_ARCH=amd64 ;;\n  *) DPKG_ARCH=notset ;;\nesac;\n\nDPKG_NAME=\"${DPKG_BASENAME}_${DPKG_VERSION}_${DPKG_ARCH}.deb\"\n\nBIN_PATH=${BIN_PATH:-target/${TARGET}/release/fd}\n\n# Binary\ninstall -Dm755 \"${BIN_PATH}\" \"${DPKG_DIR}/usr/bin/fd\"\n\n# Man page\ninstall -Dm644 'doc/fd.1' \"${DPKG_DIR}/usr/share/man/man1/fd.1\"\ngzip -n --best \"${DPKG_DIR}/usr/share/man/man1/fd.1\"\n\n# Autocompletion files\ninstall -Dm644 'autocomplete/fd.bash' \"${DPKG_DIR}/usr/share/bash-completion/completions/fd\"\ninstall -Dm644 'autocomplete/fd.fish' \"${DPKG_DIR}/usr/share/fish/vendor_completions.d/fd.fish\"\ninstall -Dm644 'autocomplete/_fd' \"${DPKG_DIR}/usr/share/zsh/vendor-completions/_fd\"\n\n# README and LICENSE\ninstall -Dm644 \"README.md\" \"${DPKG_DIR}/usr/share/doc/${DPKG_BASENAME}/README.md\"\ninstall -Dm644 \"LICENSE-MIT\" \"${DPKG_DIR}/usr/share/doc/${DPKG_BASENAME}/LICENSE-MIT\"\ninstall -Dm644 \"LICENSE-APACHE\" \"${DPKG_DIR}/usr/share/doc/${DPKG_BASENAME}/LICENSE-APACHE\"\ninstall -Dm644 \"CHANGELOG.md\" \"${DPKG_DIR}/usr/share/doc/${DPKG_BASENAME}/changelog\"\ngzip -n --best \"${DPKG_DIR}/usr/share/doc/${DPKG_BASENAME}/changelog\"\n\n# Create symlinks so fdfind can be used as well:\nln -s \"/usr/bin/fd\" \"${DPKG_DIR}/usr/bin/fdfind\"\nln -s  './fd.bash' \"${DPKG_DIR}/usr/share/bash-completion/completions/fdfind\"\nln -s  './fd.fish' \"${DPKG_DIR}/usr/share/fish/vendor_completions.d/fdfind.fish\"\nln -s  './_fd' \"${DPKG_DIR}/usr/share/zsh/vendor-completions/_fdfind\"\n\ncat > \"${DPKG_DIR}/usr/share/doc/${DPKG_BASENAME}/copyright\" <<EOF\nFormat: http://www.debian.org/doc/packaging-manuals/copyright-format/1.0/\nUpstream-Name: fd\nSource: ${REPO}\n\nFiles: *\nCopyright: ${MAINTAINER}\nCopyright: $COPYRIGHT_YEARS ${MAINTAINER}\nLicense: Apache-2.0 or MIT\n\nLicense: Apache-2.0\n  On Debian systems, the complete text of the Apache-2.0 can be found in the\n  file /usr/share/common-licenses/Apache-2.0.\n\nLicense: MIT\n  Permission is hereby granted, free of charge, to any\n  person obtaining a copy of this software and associated\n  documentation files (the \"Software\"), to deal in the\n  Software without restriction, including without\n  limitation the rights to use, copy, modify, merge,\n  publish, distribute, sublicense, and/or sell copies of\n  the Software, and to permit persons to whom the Software\n  is furnished to do so, subject to the following\n  conditions:\n  .\n  The above copyright notice and this permission notice\n  shall be included in all copies or substantial portions\n  of the Software.\n  .\n  THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF\n  ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED\n  TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A\n  PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT\n  SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY\n  CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION\n  OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR\n  IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER\n  DEALINGS IN THE SOFTWARE.\nEOF\n  chmod 644 \"${DPKG_DIR}/usr/share/doc/${DPKG_BASENAME}/copyright\"\n\n  # control file\n  mkdir -p \"${DPKG_DIR}/DEBIAN\"\n  cat > \"${DPKG_DIR}/DEBIAN/control\" <<EOF\nPackage: ${DPKG_BASENAME}\nVersion: ${DPKG_VERSION}\nSection: utils\nPriority: optional\nMaintainer: ${MAINTAINER}\nHomepage: ${REPO}\nArchitecture: ${DPKG_ARCH}\nProvides: fd\nConflicts: ${DPKG_CONFLICTS}\nDescription: simple, fast and user-friendly alternative to find\n  fd is a program to find entries in your filesystem.\n  It is a simple, fast and user-friendly alternative to find.\n  While it does not aim to support all of finds powerful functionality, it provides\n  sensible (opinionated) defaults for a majority of use cases.\nEOF\n\nDPKG_PATH=\"${DPKG_STAGING}/${DPKG_NAME}\"\n\nif [[ -n $GITHUB_OUTPUT ]]; then\n  echo \"DPKG_NAME=${DPKG_NAME}\" >> \"$GITHUB_OUTPUT\"\n  echo \"DPKG_PATH=${DPKG_PATH}\" >> \"$GITHUB_OUTPUT\"\nfi\n\n# build dpkg\nfakeroot dpkg-deb --build \"${DPKG_DIR}\" \"${DPKG_PATH}\"\n"
  },
  {
    "path": "scripts/version-bump.sh",
    "content": "#!/usr/bin/bash\n\nset -eu\n\n# This script automates the \"Version bump\" section\n\nversion=\"$1\"\n\nif [[ -z $version ]]; then\n  echo \"Usage: must supply version as first argument\" >&2\n  exit 1\nfi\n\ngit switch -C \"release-$version\"\nsed -i -e \"0,/^\\[badges/{s/^version =.*/version = \\\"$version\\\"/}\" Cargo.toml\n\nmsrv=\"$(grep -F rust-version Cargo.toml | sed -e 's/^rust-version= \"\\(.*\\)\"/\\1/')\"\n\nsed -i -e \"s/Note that rust version \\*[0-9.]+\\* or later/Note that rust version *$msrv* or later/\" README.md\n\nsed -i -e \"s/^# Upcoming release/# $version/\" CHANGELOG.md\n\n"
  },
  {
    "path": "src/cli.rs",
    "content": "use std::num::NonZeroUsize;\nuse std::path::{Path, PathBuf};\nuse std::time::Duration;\n\nuse anyhow::anyhow;\nuse clap::{\n    Arg, ArgAction, ArgGroup, ArgMatches, Command, Parser, ValueEnum, error::ErrorKind,\n    value_parser,\n};\n#[cfg(feature = \"completions\")]\nuse clap_complete::Shell;\nuse normpath::PathExt;\n\nuse crate::error::print_error;\nuse crate::exec::CommandSet;\nuse crate::filesystem;\n#[cfg(unix)]\nuse crate::filter::OwnerFilter;\nuse crate::filter::SizeFilter;\n\n#[derive(Parser)]\n#[command(\n    name = \"fd\",\n    version,\n    about = \"A program to find entries in your filesystem\",\n    after_long_help = \"Bugs can be reported on GitHub: https://github.com/sharkdp/fd/issues\",\n    max_term_width = 98,\n    args_override_self = true,\n    group(ArgGroup::new(\"execs\").args(&[\"exec\", \"exec_batch\", \"list_details\"]).conflicts_with_all(&[\n            \"max_results\", \"quiet\", \"max_one_result\"])),\n)]\npub struct Opts {\n    /// Include hidden directories and files in the search results (default:\n    /// hidden files and directories are skipped). Files and directories are\n    /// considered to be hidden if their name starts with a `.` sign (dot).\n    /// Any files or directories that are ignored due to the rules described by\n    /// --no-ignore are still ignored unless otherwise specified.\n    /// The flag can be overridden with --no-hidden.\n    #[arg(\n        long,\n        short = 'H',\n        help = \"Search hidden files and directories\",\n        long_help\n    )]\n    pub hidden: bool,\n\n    /// Overrides --hidden\n    #[arg(long, overrides_with = \"hidden\", hide = true, action = ArgAction::SetTrue)]\n    no_hidden: (),\n\n    /// Show search results from files and directories that would otherwise be\n    /// ignored by '.gitignore', '.ignore', '.fdignore', or the global ignore file,\n    /// The flag can be overridden with --ignore.\n    #[arg(\n        long,\n        short = 'I',\n        help = \"Do not respect .(git|fd)ignore files\",\n        long_help\n    )]\n    pub no_ignore: bool,\n\n    /// Overrides --no-ignore\n    #[arg(long, overrides_with = \"no_ignore\", hide = true, action = ArgAction::SetTrue)]\n    ignore: (),\n\n    ///Show search results from files and directories that\n    ///would otherwise be ignored by '.gitignore' files.\n    ///The flag can be overridden with --ignore-vcs.\n    #[arg(\n        long,\n        hide_short_help = true,\n        help = \"Do not respect .gitignore files\",\n        long_help\n    )]\n    pub no_ignore_vcs: bool,\n\n    /// Overrides --no-ignore-vcs\n    #[arg(long, overrides_with = \"no_ignore_vcs\", hide = true, action = ArgAction::SetTrue)]\n    ignore_vcs: (),\n\n    /// Do not require a git repository to respect gitignores.\n    /// By default, fd will only respect global gitignore rules, .gitignore rules,\n    /// and local exclude rules if fd detects that you are searching inside a\n    /// git repository. This flag allows you to relax this restriction such that\n    /// fd will respect all git related ignore rules regardless of whether you're\n    /// searching in a git repository or not.\n    ///\n    ///\n    /// This flag can be disabled with --require-git.\n    #[arg(\n        long,\n        overrides_with = \"require_git\",\n        hide_short_help = true,\n        // same description as ripgrep's flag: ripgrep/crates/core/app.rs\n        long_help\n    )]\n    pub no_require_git: bool,\n\n    /// Overrides --no-require-git\n    #[arg(long, overrides_with = \"no_require_git\", hide = true, action = ArgAction::SetTrue)]\n    require_git: (),\n\n    /// Show search results from files and directories that would otherwise be\n    /// ignored by '.gitignore', '.ignore', or '.fdignore' files in parent directories.\n    #[arg(\n        long,\n        hide_short_help = true,\n        help = \"Do not respect .(git|fd)ignore files in parent directories\",\n        long_help\n    )]\n    pub no_ignore_parent: bool,\n\n    /// Do not respect the global ignore file\n    #[arg(long, hide = true)]\n    pub no_global_ignore_file: bool,\n\n    /// Perform an unrestricted search, including ignored and hidden files. This is\n    /// an alias for '--no-ignore --hidden'.\n    #[arg(long = \"unrestricted\", short = 'u', overrides_with_all(&[\"ignore\", \"no_hidden\"]), action(ArgAction::Count), hide_short_help = true,\n    help = \"Unrestricted search, alias for '--no-ignore --hidden'\",\n        long_help,\n        )]\n    rg_alias_hidden_ignore: u8,\n\n    /// Case-sensitive search (default: smart case)\n    #[arg(\n        long,\n        short = 's',\n        overrides_with(\"ignore_case\"),\n        long_help = \"Perform a case-sensitive search. By default, fd uses case-insensitive \\\n                     searches, unless the pattern contains an uppercase character (smart \\\n                     case).\"\n    )]\n    pub case_sensitive: bool,\n\n    /// Perform a case-insensitive search. By default, fd uses case-insensitive\n    /// searches, unless the pattern contains an uppercase character (smart\n    /// case).\n    #[arg(\n        long,\n        short = 'i',\n        overrides_with(\"case_sensitive\"),\n        help = \"Case-insensitive search (default: smart case)\",\n        long_help\n    )]\n    pub ignore_case: bool,\n\n    /// Perform a glob-based search instead of a regular expression search.\n    #[arg(\n        long,\n        short = 'g',\n        conflicts_with(\"fixed_strings\"),\n        help = \"Glob-based search (default: regular expression)\",\n        long_help\n    )]\n    pub glob: bool,\n\n    /// Perform a regular-expression based search (default). This can be used to\n    /// override --glob.\n    #[arg(\n        long,\n        overrides_with(\"glob\"),\n        hide_short_help = true,\n        help = \"Regular-expression based search (default)\",\n        long_help\n    )]\n    pub regex: bool,\n\n    /// Treat the pattern as a literal string instead of a regular expression. Note\n    /// that this also performs substring comparison. If you want to match on an\n    /// exact filename, consider using '--glob'.\n    #[arg(\n        long,\n        short = 'F',\n        alias = \"literal\",\n        hide_short_help = true,\n        help = \"Treat pattern as literal string stead of regex\",\n        long_help\n    )]\n    pub fixed_strings: bool,\n\n    /// Add additional required search patterns, all of which must be matched. Multiple\n    /// additional patterns can be specified. The patterns are regular\n    /// expressions, unless '--glob' or '--fixed-strings' is used.\n    #[arg(\n        long = \"and\",\n        value_name = \"pattern\",\n        help = \"Additional search patterns that need to be matched\",\n        long_help,\n        hide_short_help = true,\n        allow_hyphen_values = true\n    )]\n    pub exprs: Option<Vec<String>>,\n\n    /// Shows the full path starting from the root as opposed to relative paths.\n    /// The flag can be overridden with --relative-path.\n    #[arg(\n        long,\n        short = 'a',\n        help = \"Show absolute instead of relative paths\",\n        long_help\n    )]\n    pub absolute_path: bool,\n\n    /// Overrides --absolute-path\n    #[arg(long, overrides_with = \"absolute_path\", hide = true, action = ArgAction::SetTrue)]\n    relative_path: (),\n\n    /// Use a detailed listing format like 'ls -l'. This is basically an alias\n    /// for '--exec-batch ls -l' with some additional 'ls' options. This can be\n    /// used to see more metadata, to show symlink targets and to achieve a\n    /// deterministic sort order.\n    #[arg(\n        long,\n        short = 'l',\n        conflicts_with(\"absolute_path\"),\n        help = \"Use a long listing format with file metadata\",\n        long_help\n    )]\n    pub list_details: bool,\n\n    /// Follow symbolic links\n    #[arg(\n        long,\n        short = 'L',\n        alias = \"dereference\",\n        long_help = \"By default, fd does not descend into symlinked directories. Using this \\\n                     flag, symbolic links are also traversed. \\\n                     Flag can be overridden with --no-follow.\"\n    )]\n    pub follow: bool,\n\n    /// Overrides --follow\n    #[arg(long, overrides_with = \"follow\", hide = true, action = ArgAction::SetTrue)]\n    no_follow: (),\n\n    /// By default, the search pattern is only matched against the filename (or directory name). Using this flag, the pattern is matched against the full (absolute) path. Example:\n    ///   fd --glob -p '**/.git/config'\n    #[arg(\n        long,\n        short = 'p',\n        help = \"Search full abs. path (default: filename only)\",\n        long_help,\n        verbatim_doc_comment\n    )]\n    pub full_path: bool,\n\n    /// Separate search results by the null character (instead of newlines).\n    /// Useful for piping results to 'xargs'.\n    #[arg(\n        long = \"print0\",\n        short = '0',\n        conflicts_with(\"list_details\"),\n        hide_short_help = true,\n        help = \"Separate search results by the null character\",\n        long_help\n    )]\n    pub null_separator: bool,\n\n    /// Limit the directory traversal to a given depth. By default, there is no\n    /// limit on the search depth.\n    #[arg(\n        long,\n        short = 'd',\n        value_name = \"depth\",\n        alias(\"maxdepth\"),\n        help = \"Set maximum search depth (default: none)\",\n        long_help\n    )]\n    max_depth: Option<usize>,\n\n    /// Only show search results starting at the given depth.\n    /// See also: '--max-depth' and '--exact-depth'\n    #[arg(\n        long,\n        value_name = \"depth\",\n        hide_short_help = true,\n        alias(\"mindepth\"),\n        help = \"Only show search results starting at the given depth.\",\n        long_help\n    )]\n    min_depth: Option<usize>,\n\n    /// Only show search results at the exact given depth. This is an alias for\n    /// '--min-depth <depth> --max-depth <depth>'.\n    #[arg(long, value_name = \"depth\", hide_short_help = true, conflicts_with_all(&[\"max_depth\", \"min_depth\"]),\n    help = \"Only show search results at the exact given depth\",\n        long_help,\n        )]\n    exact_depth: Option<usize>,\n\n    /// Exclude files/directories that match the given glob pattern. This\n    /// overrides any other ignore logic. Multiple exclude patterns can be\n    /// specified.\n    ///\n    /// Examples:\n    /// {n}  --exclude '*.pyc'\n    /// {n}  --exclude node_modules\n    #[arg(\n        long,\n        short = 'E',\n        value_name = \"glob\",\n        help = \"Exclude entries that match the given glob pattern\",\n        long_help\n    )]\n    pub exclude: Vec<String>,\n\n    /// Do not traverse into directories that match the search criteria. If\n    /// you want to exclude specific directories, use the '--exclude=…' option.\n    #[arg(long, hide_short_help = true, conflicts_with_all(&[\"size\", \"exact_depth\"]),\n        long_help,\n        )]\n    pub prune: bool,\n\n    /// Filter the search by type:\n    /// {n}  'f' or 'file':         regular files\n    /// {n}  'd' or 'dir' or 'directory':    directories\n    /// {n}  'l' or 'symlink':      symbolic links\n    /// {n}  's' or 'socket':       socket\n    /// {n}  'p' or 'pipe':         named pipe (FIFO)\n    /// {n}  'b' or 'block-device': block device\n    /// {n}  'c' or 'char-device':  character device\n    /// {n}{n}  'x' or 'executable':   executables\n    /// {n}  'e' or 'empty':        empty files or directories\n    ///\n    /// This option can be specified more than once to include multiple file types.\n    /// Searching for '--type file --type symlink' will show both regular files as\n    /// well as symlinks. Note that the 'executable' and 'empty' filters work differently:\n    /// '--type executable' implies '--type file' by default. And '--type empty' searches\n    /// for empty files and directories, unless either '--type file' or '--type directory'\n    /// is specified in addition.\n    ///\n    /// Examples:\n    /// {n}  - Only search for files:\n    /// {n}      fd --type file …\n    /// {n}      fd -tf …\n    /// {n}  - Find both files and symlinks\n    /// {n}      fd --type file --type symlink …\n    /// {n}      fd -tf -tl …\n    /// {n}  - Find executable files:\n    /// {n}      fd --type executable\n    /// {n}      fd -tx\n    /// {n}  - Find empty files:\n    /// {n}      fd --type empty --type file\n    /// {n}      fd -te -tf\n    /// {n}  - Find empty directories:\n    /// {n}      fd --type empty --type directory\n    /// {n}      fd -te -td\n    #[arg(\n        long = \"type\",\n        short = 't',\n        value_name = \"filetype\",\n        hide_possible_values = true,\n        value_enum,\n        help = \"Filter by type: file (f), directory (d/dir), symlink (l), \\\n                executable (x), empty (e), socket (s), pipe (p), \\\n                char-device (c), block-device (b)\",\n        long_help\n    )]\n    pub filetype: Option<Vec<FileType>>,\n\n    /// (Additionally) filter search results by their file extension. Multiple\n    /// allowable file extensions can be specified.\n    ///\n    /// If you want to search for files without extension,\n    /// you can use the regex '^[^.]+$' as a normal search pattern.\n    #[arg(\n        long = \"extension\",\n        short = 'e',\n        value_name = \"ext\",\n        help = \"Filter by file extension\",\n        long_help\n    )]\n    pub extensions: Option<Vec<String>>,\n\n    /// Limit results based on the size of files using the format <+-><NUM><UNIT>.\n    ///    '+': file size must be greater than or equal to this\n    ///    '-': file size must be less than or equal to this\n    ///\n    /// If neither '+' nor '-' is specified, file size must be exactly equal to this.\n    ///    'NUM':  The numeric size (e.g. 500)\n    ///    'UNIT': The units for NUM. They are not case-sensitive.\n    /// Allowed unit values:\n    ///     'b':  bytes\n    ///     'k':  kilobytes (base ten, 10^3 = 1000 bytes)\n    ///     'm':  megabytes\n    ///     'g':  gigabytes\n    ///     't':  terabytes\n    ///     'ki': kibibytes (base two, 2^10 = 1024 bytes)\n    ///     'mi': mebibytes\n    ///     'gi': gibibytes\n    ///     'ti': tebibytes\n    #[arg(long, short = 'S', value_parser = SizeFilter::from_string, allow_hyphen_values = true, verbatim_doc_comment, value_name = \"size\",\n        help = \"Limit results based on the size of files\",\n        long_help,\n        verbatim_doc_comment,\n        )]\n    pub size: Vec<SizeFilter>,\n\n    /// Filter results based on the file modification time. Files with modification times\n    /// greater than the argument are returned. The argument can be provided\n    /// as a specific point in time (YYYY-MM-DD HH:MM:SS or @timestamp) or as a duration (10h, 1d, 35min).\n    /// If the time is not specified, it defaults to 00:00:00.\n    /// '--change-newer-than', '--newer', or '--changed-after' can be used as aliases.\n    ///\n    /// Examples:\n    /// {n}    --changed-within 2weeks\n    /// {n}    --change-newer-than '2018-10-27 10:00:00'\n    /// {n}    --newer 2018-10-27\n    /// {n}    --changed-after 1day\n    #[arg(\n        long,\n        alias(\"change-newer-than\"),\n        alias(\"newer\"),\n        alias(\"changed-after\"),\n        value_name = \"date|dur\",\n        help = \"Filter by file modification time (newer than)\",\n        long_help\n    )]\n    pub changed_within: Option<String>,\n\n    /// Filter results based on the file modification time. Files with modification times\n    /// less than the argument are returned. The argument can be provided\n    /// as a specific point in time (YYYY-MM-DD HH:MM:SS or @timestamp) or as a duration (10h, 1d, 35min).\n    /// '--change-older-than' or '--older' can be used as aliases.\n    ///\n    /// Examples:\n    /// {n}    --changed-before '2018-10-27 10:00:00'\n    /// {n}    --change-older-than 2weeks\n    /// {n}    --older 2018-10-27\n    #[arg(\n        long,\n        alias(\"change-older-than\"),\n        alias(\"older\"),\n        value_name = \"date|dur\",\n        help = \"Filter by file modification time (older than)\",\n        long_help\n    )]\n    pub changed_before: Option<String>,\n\n    /// Filter files by their user and/or group.\n    /// Format: [(user|uid)][:(group|gid)]. Either side is optional.\n    /// Precede either side with a '!' to exclude files instead.\n    ///\n    /// Examples:\n    /// {n}    --owner john\n    /// {n}    --owner :students\n    /// {n}    --owner '!john:students'\n    #[cfg(unix)]\n    #[arg(long, short = 'o', value_parser = OwnerFilter::from_string, value_name = \"user:group\",\n        help = \"Filter by owning user and/or group\",\n        long_help,\n        )]\n    pub owner: Option<OwnerFilter>,\n\n    /// Instead of printing the file normally, print the format string with the following placeholders replaced:\n    ///   '{}': path (of the current search result)\n    ///   '{/}': basename\n    ///   '{//}': parent directory\n    ///   '{.}': path without file extension\n    ///   '{/.}': basename without file extension\n    #[arg(\n        long,\n        value_name = \"fmt\",\n        help = \"Print results according to template\",\n        conflicts_with = \"list_details\"\n    )]\n    pub format: Option<String>,\n\n    #[command(flatten)]\n    pub exec: Exec,\n\n    /// Maximum number of arguments to pass to the command given with -X.\n    /// If the number of results is greater than the given size,\n    /// the command given with -X is run again with remaining arguments.\n    /// A batch size of zero means there is no limit (default), but note\n    /// that batching might still happen due to OS restrictions on the\n    /// maximum length of command lines.\n    #[arg(\n        long,\n        value_name = \"size\",\n        hide_short_help = true,\n        requires(\"exec_batch\"),\n        value_parser = value_parser!(usize),\n        default_value_t,\n        help = \"Max number of arguments to run as a batch size with -X\",\n        long_help,\n    )]\n    pub batch_size: usize,\n\n    /// Add a custom ignore-file in '.gitignore' format. These files have a low precedence.\n    #[arg(\n        long,\n        value_name = \"path\",\n        hide_short_help = true,\n        help = \"Add a custom ignore-file in '.gitignore' format\",\n        long_help\n    )]\n    pub ignore_file: Vec<PathBuf>,\n\n    /// Declare when to use color for the pattern match output\n    #[arg(\n        long,\n        short = 'c',\n        value_enum,\n        default_value_t = ColorWhen::Auto,\n        value_name = \"when\",\n        help = \"When to use colors\",\n        long_help,\n    )]\n    pub color: ColorWhen,\n\n    /// Add a terminal hyperlink to a file:// url for each path in the output.\n    ///\n    /// Auto mode  is used if no argument is given to this option.\n    ///\n    /// This doesn't do anything for --exec and --exec-batch.\n    #[arg(\n        long,\n        alias = \"hyper\",\n        value_name = \"when\",\n        require_equals = true,\n        value_enum,\n        default_value_t = HyperlinkWhen::Never,\n        default_missing_value = \"auto\",\n        num_args = 0..=1,\n        help = \"Add hyperlinks to output paths\"\n    )]\n    pub hyperlink: HyperlinkWhen,\n\n    /// Ignore directories containing the named entry.\n    #[arg(long, value_name = \"name\")]\n    pub ignore_contain: Vec<String>,\n\n    /// Set number of threads to use for searching & executing (default: number\n    /// of available CPU cores)\n    #[arg(long, short = 'j', value_name = \"num\", hide_short_help = true, value_parser = str::parse::<NonZeroUsize>)]\n    pub threads: Option<NonZeroUsize>,\n\n    /// Milliseconds to buffer before streaming search results to console\n    ///\n    /// Amount of time in milliseconds to buffer, before streaming the search\n    /// results to the console.\n    #[arg(long, hide = true, value_parser = parse_millis)]\n    pub max_buffer_time: Option<Duration>,\n\n    ///Limit the number of search results to 'count' and quit immediately.\n    #[arg(\n        long,\n        value_name = \"count\",\n        hide_short_help = true,\n        overrides_with(\"max_one_result\"),\n        help = \"Limit the number of search results\",\n        long_help\n    )]\n    max_results: Option<usize>,\n\n    /// Limit the search to a single result and quit immediately.\n    /// This is an alias for '--max-results=1'.\n    #[arg(\n        short = '1',\n        hide_short_help = true,\n        overrides_with(\"max_results\"),\n        help = \"Limit search to a single result\",\n        long_help\n    )]\n    max_one_result: bool,\n\n    /// When the flag is present, the program does not print anything and will\n    /// return with an exit code of 0 if there is at least one match. Otherwise, the\n    /// exit code will be 1.\n    /// '--has-results' can be used as an alias.\n    #[arg(\n        long,\n        short = 'q',\n        alias = \"has-results\",\n        hide_short_help = true,\n        conflicts_with(\"max_results\"),\n        help = \"Print nothing, exit code 0 if match found, 1 otherwise\",\n        long_help\n    )]\n    pub quiet: bool,\n\n    /// Enable the display of filesystem errors for situations such as\n    /// insufficient permissions or dead symlinks.\n    #[arg(\n        long,\n        hide_short_help = true,\n        help = \"Show filesystem errors\",\n        long_help\n    )]\n    pub show_errors: bool,\n\n    /// Change the current working directory of fd to the provided path. This\n    /// means that search results will be shown with respect to the given base\n    /// path. Note that relative paths which are passed to fd via the positional\n    /// <path> argument or the '--search-path' option will also be resolved\n    /// relative to this directory.\n    #[arg(\n        long,\n        short = 'C',\n        value_name = \"path\",\n        hide_short_help = true,\n        help = \"Change current working directory\",\n        long_help\n    )]\n    pub base_directory: Option<PathBuf>,\n\n    /// the search pattern which is either a regular expression (default) or a glob\n    /// pattern (if --glob is used). If no pattern has been specified, every entry\n    /// is considered a match. If your pattern starts with a dash (-), make sure to\n    /// pass '--' first, or it will be considered as a flag (fd -- '-foo').\n    #[arg(\n        default_value = \"\",\n        hide_default_value = true,\n        value_name = \"pattern\",\n        help = \"the search pattern (a regular expression, unless '--glob' is used; optional)\",\n        long_help\n    )]\n    pub pattern: String,\n\n    /// Set the path separator to use when printing file paths. The default is\n    /// the OS-specific separator ('/' on Unix, '\\' on Windows).\n    #[arg(\n        long,\n        value_name = \"separator\",\n        hide_short_help = true,\n        help = \"Set path separator when printing file paths\",\n        long_help\n    )]\n    pub path_separator: Option<String>,\n\n    /// The directory where the filesystem search is rooted (optional). If\n    /// omitted, search the current working directory.\n    #[arg(action = ArgAction::Append,\n        value_name = \"path\",\n        help = \"the root directories for the filesystem search (optional)\",\n        long_help,\n        )]\n    path: Vec<PathBuf>,\n\n    /// Provide paths to search as an alternative to the positional <path>\n    /// argument. Changes the usage to `fd [OPTIONS] --search-path <path>\n    /// --search-path <path2> [<pattern>]`\n    #[arg(\n        long,\n        conflicts_with(\"path\"),\n        value_name = \"search-path\",\n        hide_short_help = true,\n        help = \"Provides paths to search as an alternative to the positional <path> argument\",\n        long_help\n    )]\n    search_path: Vec<PathBuf>,\n\n    /// By default, relative paths are prefixed with './' when -x/--exec,\n    /// -X/--exec-batch, or -0/--print0 are given, to reduce the risk of a\n    /// path starting with '-' being treated as a command line option. Use\n    /// this flag to change this behavior. If this flag is used without a value,\n    /// it is equivalent to passing \"always\".\n    #[arg(long, conflicts_with_all(&[\"path\", \"search_path\"]), value_name = \"when\", hide_short_help = true, require_equals = true, long_help)]\n    strip_cwd_prefix: Option<Option<StripCwdWhen>>,\n\n    /// By default, fd will traverse the file system tree as far as other options\n    /// dictate. With this flag, fd ensures that it does not descend into a\n    /// different file system than the one it started in. Comparable to the -mount\n    /// or -xdev filters of find(1).\n    #[cfg(any(unix, windows))]\n    #[arg(long, aliases(&[\"mount\", \"xdev\"]), hide_short_help = true, long_help)]\n    pub one_file_system: bool,\n\n    #[cfg(feature = \"completions\")]\n    #[arg(long, hide = true, exclusive = true)]\n    gen_completions: Option<Option<Shell>>,\n}\n\nimpl Opts {\n    pub fn search_paths(&self) -> anyhow::Result<Vec<PathBuf>> {\n        // would it make sense to concatenate these?\n        let paths = if !self.path.is_empty() {\n            &self.path\n        } else if !self.search_path.is_empty() {\n            &self.search_path\n        } else {\n            let current_directory = Path::new(\"./\");\n            ensure_current_directory_exists(current_directory)?;\n            return Ok(vec![self.normalize_path(current_directory)]);\n        };\n        Ok(paths\n            .iter()\n            .filter_map(|path| {\n                if filesystem::is_existing_directory(path) {\n                    Some(self.normalize_path(path))\n                } else {\n                    print_error(format!(\n                        \"Search path '{}' is not a directory.\",\n                        path.to_string_lossy()\n                    ));\n                    None\n                }\n            })\n            .collect())\n    }\n\n    fn normalize_path(&self, path: &Path) -> PathBuf {\n        if self.absolute_path {\n            filesystem::absolute_path(path.normalize().unwrap().as_path()).unwrap()\n        } else if path == Path::new(\".\") {\n            // Change \".\" to \"./\" as a workaround for https://github.com/BurntSushi/ripgrep/pull/2711\n            PathBuf::from(\"./\")\n        } else {\n            path.to_path_buf()\n        }\n    }\n\n    pub fn no_search_paths(&self) -> bool {\n        self.path.is_empty() && self.search_path.is_empty()\n    }\n\n    #[inline]\n    pub fn rg_alias_ignore(&self) -> bool {\n        self.rg_alias_hidden_ignore > 0\n    }\n\n    pub fn max_depth(&self) -> Option<usize> {\n        self.max_depth.or(self.exact_depth)\n    }\n\n    pub fn min_depth(&self) -> Option<usize> {\n        self.min_depth.or(self.exact_depth)\n    }\n\n    pub fn threads(&self) -> NonZeroUsize {\n        self.threads.unwrap_or_else(default_num_threads)\n    }\n\n    pub fn max_results(&self) -> Option<usize> {\n        self.max_results\n            .filter(|&m| m > 0)\n            .or_else(|| self.max_one_result.then_some(1))\n    }\n\n    pub fn strip_cwd_prefix<P: FnOnce() -> bool>(&self, auto_pred: P) -> bool {\n        use self::StripCwdWhen::*;\n        self.no_search_paths()\n            && match self.strip_cwd_prefix.map_or(Auto, |o| o.unwrap_or(Always)) {\n                Auto => auto_pred(),\n                Always => true,\n                Never => false,\n            }\n    }\n\n    #[cfg(feature = \"completions\")]\n    pub fn gen_completions(&self) -> anyhow::Result<Option<Shell>> {\n        self.gen_completions\n            .map(|maybe_shell| match maybe_shell {\n                Some(sh) => Ok(sh),\n                None => {\n                    Shell::from_env().ok_or_else(|| anyhow!(\"Unable to get shell from environment\"))\n                }\n            })\n            .transpose()\n    }\n}\n\n/// Get the default number of threads to use, if not explicitly specified.\nfn default_num_threads() -> NonZeroUsize {\n    // If we can't get the amount of parallelism for some reason, then\n    // default to a single thread, because that is safe.\n    let fallback = NonZeroUsize::MIN;\n    // To limit startup overhead on massively parallel machines, don't use more\n    // than 64 threads.\n    let limit = NonZeroUsize::new(64).unwrap();\n\n    std::thread::available_parallelism()\n        .unwrap_or(fallback)\n        .min(limit)\n}\n\n#[derive(Copy, Clone, PartialEq, Eq, ValueEnum)]\npub enum FileType {\n    #[value(alias = \"f\")]\n    File,\n    #[value(alias = \"d\", alias = \"dir\")]\n    Directory,\n    #[value(alias = \"l\")]\n    Symlink,\n    #[value(alias = \"b\")]\n    BlockDevice,\n    #[value(alias = \"c\")]\n    CharDevice,\n    /// A file which is executable by the current effective user\n    #[value(alias = \"x\")]\n    Executable,\n    #[value(alias = \"e\")]\n    Empty,\n    #[value(alias = \"s\")]\n    Socket,\n    #[value(alias = \"p\")]\n    Pipe,\n}\n\n#[derive(Copy, Clone, PartialEq, Eq, Debug, ValueEnum)]\npub enum ColorWhen {\n    /// show colors if the output goes to an interactive console (default)\n    Auto,\n    /// always use colorized output\n    Always,\n    /// do not use colorized output\n    Never,\n}\n\n#[derive(Copy, Clone, PartialEq, Eq, Debug, ValueEnum)]\npub enum StripCwdWhen {\n    /// Use the default behavior\n    Auto,\n    /// Always strip the ./ at the beginning of paths\n    Always,\n    /// Never strip the ./\n    Never,\n}\n\n#[derive(Copy, Clone, PartialEq, Eq, Debug, ValueEnum)]\npub enum HyperlinkWhen {\n    /// Use hyperlinks only if color is enabled\n    Auto,\n    /// Always use hyperlinks when printing file paths\n    Always,\n    /// Never use hyperlinks\n    Never,\n}\n\n// there isn't a derive api for getting grouped values yet,\n// so we have to use hand-rolled parsing for exec and exec-batch\npub struct Exec {\n    pub command: Option<CommandSet>,\n}\n\nimpl clap::FromArgMatches for Exec {\n    fn from_arg_matches(matches: &ArgMatches) -> clap::error::Result<Self> {\n        let command = matches\n            .get_occurrences::<String>(\"exec\")\n            .map(CommandSet::new)\n            .or_else(|| {\n                matches\n                    .get_occurrences::<String>(\"exec_batch\")\n                    .map(CommandSet::new_batch)\n            })\n            .transpose()\n            .map_err(|e| clap::Error::raw(ErrorKind::InvalidValue, e))?;\n        Ok(Exec { command })\n    }\n\n    fn update_from_arg_matches(&mut self, matches: &ArgMatches) -> clap::error::Result<()> {\n        *self = Self::from_arg_matches(matches)?;\n        Ok(())\n    }\n}\n\nimpl clap::Args for Exec {\n    fn augment_args(cmd: Command) -> Command {\n        cmd.arg(Arg::new(\"exec\")\n            .action(ArgAction::Append)\n            .long(\"exec\")\n            .short('x')\n            .num_args(1..)\n                .allow_hyphen_values(true)\n                .value_terminator(\";\")\n                .value_name(\"cmd\")\n                .conflicts_with(\"list_details\")\n                .help(\"Execute a command for each search result\")\n                .long_help(\n                    \"Execute a command for each search result in parallel (use --threads=1 for sequential command execution). \\\n                     There is no guarantee of the order commands are executed in, and the order should not be depended upon. \\\n                     All positional arguments following --exec are considered to be arguments to the command - not to fd. \\\n                     It is therefore recommended to place the '-x'/'--exec' option last. \\\n                     Use '\\\\;' to terminate the command template if you need to continue passing fd arguments afterwards.\\n\\\n                     The following placeholders are substituted before the command is executed:\\n  \\\n                       '{}':   path (of the current search result)\\n  \\\n                       '{/}':  basename\\n  \\\n                       '{//}': parent directory\\n  \\\n                       '{.}':  path without file extension\\n  \\\n                       '{/.}': basename without file extension\\n  \\\n                       '{{':   literal '{' (for escaping)\\n  \\\n                       '}}':   literal '}' (for escaping)\\n\\n\\\n                     If no placeholder is present, an implicit \\\"{}\\\" at the end is assumed.\\n\\n\\\n                     Examples:\\n\\n  \\\n                       - find all *.zip files and unzip them:\\n\\n      \\\n                           fd -e zip -x unzip\\n\\n  \\\n                       - find *.h and *.cpp files and run \\\"clang-format -i ..\\\" for each of them:\\n\\n      \\\n                           fd -e h -e cpp -x clang-format -i\\n\\n  \\\n                       - search within `src/` and echo each match (place `-x` last):\\n\\n      \\\n                           fd . src -x echo\\n\\n  \\\n                       - Convert all *.jpg files to *.png files:\\n\\n      \\\n                           fd -e jpg -x convert {} {.}.png\\\n                    \",\n                ),\n        )\n        .arg(\n            Arg::new(\"exec_batch\")\n                .action(ArgAction::Append)\n                .long(\"exec-batch\")\n                .short('X')\n                .num_args(1..)\n                .allow_hyphen_values(true)\n                .value_terminator(\";\")\n                .value_name(\"cmd\")\n                .conflicts_with_all([\"exec\", \"list_details\"])\n                .help(\"Execute a command with all search results at once\")\n                .long_help(\n                    \"Execute the given command once, with all search results as arguments.\\n\\\n                     The order of the arguments is non-deterministic, and should not be relied upon.\\n\\\n                     One of the following placeholders is substituted before the command is executed:\\n  \\\n                       '{}':   path (of all search results)\\n  \\\n                       '{/}':  basename\\n  \\\n                       '{//}': parent directory\\n  \\\n                       '{.}':  path without file extension\\n  \\\n                       '{/.}': basename without file extension\\n  \\\n                       '{{':   literal '{' (for escaping)\\n  \\\n                       '}}':   literal '}' (for escaping)\\n\\n\\\n                     If no placeholder is present, an implicit \\\"{}\\\" at the end is assumed.\\n\\n\\\n                     Examples:\\n\\n  \\\n                       - Find all test_*.py files and open them in your favorite editor:\\n\\n      \\\n                           fd -g 'test_*.py' -X vim\\n\\n  \\\n                       - Find all *.rs files and count the lines with \\\"wc -l ...\\\":\\n\\n      \\\n                           fd -e rs -X wc -l\\\n                     \"\n                ),\n        )\n    }\n\n    fn augment_args_for_update(cmd: Command) -> Command {\n        Self::augment_args(cmd)\n    }\n}\n\nfn parse_millis(arg: &str) -> Result<Duration, std::num::ParseIntError> {\n    Ok(Duration::from_millis(arg.parse()?))\n}\n\nfn ensure_current_directory_exists(current_directory: &Path) -> anyhow::Result<()> {\n    if filesystem::is_existing_directory(current_directory) {\n        Ok(())\n    } else {\n        Err(anyhow!(\n            \"Could not retrieve current directory (has it been deleted?).\"\n        ))\n    }\n}\n"
  },
  {
    "path": "src/config.rs",
    "content": "use std::{path::PathBuf, sync::Arc, time::Duration};\n\nuse lscolors::LsColors;\nuse regex::bytes::RegexSet;\n\nuse crate::exec::CommandSet;\nuse crate::filetypes::FileTypes;\n#[cfg(unix)]\nuse crate::filter::OwnerFilter;\nuse crate::filter::{SizeFilter, TimeFilter};\nuse crate::fmt::FormatTemplate;\n\n/// Configuration options for *fd*.\npub struct Config {\n    /// Whether the search is case-sensitive or case-insensitive.\n    pub case_sensitive: bool,\n\n    /// Cached current working directory for absolute path construction.\n    /// Populated when `--full-path` is set; `None` means search by filename only.\n    pub cwd: Option<PathBuf>,\n\n    /// Whether to ignore hidden files and directories (or not).\n    pub ignore_hidden: bool,\n\n    /// Whether to respect `.fdignore` files or not.\n    pub read_fdignore: bool,\n\n    /// Whether to respect ignore files in parent directories or not.\n    pub read_parent_ignore: bool,\n\n    /// Whether to respect VCS ignore files (`.gitignore`, ..) or not.\n    pub read_vcsignore: bool,\n\n    /// Whether to require a `.git` directory to respect gitignore files.\n    pub require_git_to_read_vcsignore: bool,\n\n    /// Whether to respect the global ignore file or not.\n    pub read_global_ignore: bool,\n\n    /// Whether to follow symlinks or not.\n    pub follow_links: bool,\n\n    /// Whether to limit the search to starting file system or not.\n    pub one_file_system: bool,\n\n    /// Whether elements of output should be separated by a null character\n    pub null_separator: bool,\n\n    /// The maximum search depth, or `None` if no maximum search depth should be set.\n    ///\n    /// A depth of `1` includes all files under the current directory, a depth of `2` also includes\n    /// all files under subdirectories of the current directory, etc.\n    pub max_depth: Option<usize>,\n\n    /// The minimum depth for reported entries, or `None`.\n    pub min_depth: Option<usize>,\n\n    /// Whether to stop traversing into matching directories.\n    pub prune: bool,\n\n    /// The number of threads to use.\n    pub threads: usize,\n\n    /// If true, the program doesn't print anything and will instead return an exit code of 0\n    /// if there's at least one match. Otherwise, the exit code will be 1.\n    pub quiet: bool,\n\n    /// Time to buffer results internally before streaming to the console. This is useful to\n    /// provide a sorted output, in case the total execution time is shorter than\n    /// `max_buffer_time`.\n    pub max_buffer_time: Option<Duration>,\n\n    /// `None` if the output should not be colorized. Otherwise, a `LsColors` instance that defines\n    /// how to style different filetypes.\n    pub ls_colors: Option<LsColors>,\n\n    /// Whether or not we are writing to an interactive terminal\n    #[cfg_attr(not(unix), allow(unused))]\n    pub interactive_terminal: bool,\n\n    /// The type of file to search for. If set to `None`, all file types are displayed. If\n    /// set to `Some(..)`, only the types that are specified are shown.\n    pub file_types: Option<FileTypes>,\n\n    /// The extension to search for. Only entries matching the extension will be included.\n    ///\n    /// The value (if present) will be a lowercase string without leading dots.\n    pub extensions: Option<RegexSet>,\n\n    /// A format string to use to format results, similarly to exec\n    pub format: Option<FormatTemplate>,\n\n    /// If a value is supplied, each item found will be used to generate and execute commands.\n    pub command: Option<Arc<CommandSet>>,\n\n    /// Maximum number of search results to pass to each `command`. If zero, the number is\n    /// unlimited.\n    pub batch_size: usize,\n\n    /// A list of glob patterns that should be excluded from the search.\n    pub exclude_patterns: Vec<String>,\n\n    /// A list of custom ignore files.\n    pub ignore_files: Vec<PathBuf>,\n\n    /// The given constraints on the size of returned files\n    pub size_constraints: Vec<SizeFilter>,\n\n    /// Constraints on last modification time of files\n    pub time_constraints: Vec<TimeFilter>,\n\n    #[cfg(unix)]\n    /// User/group ownership constraint\n    pub owner_constraint: Option<OwnerFilter>,\n\n    /// Whether or not to display filesystem errors\n    pub show_filesystem_errors: bool,\n\n    /// The separator used to print file paths.\n    pub path_separator: Option<String>,\n\n    /// The actual separator, either the system default separator or `path_separator`\n    pub actual_path_separator: String,\n\n    /// The maximum number of search results\n    pub max_results: Option<usize>,\n\n    /// Whether or not to strip the './' prefix for search results\n    pub strip_cwd_prefix: bool,\n\n    /// Whether or not to use hyperlinks on paths\n    pub hyperlink: bool,\n\n    /// Names that should stop traversal down their parent. (e.g. https://bford.info/cachedir/).\n    pub ignore_contain: Vec<String>,\n}\n\nimpl Config {\n    /// Check whether results are being printed.\n    pub fn is_printing(&self) -> bool {\n        self.command.is_none()\n    }\n}\n"
  },
  {
    "path": "src/dir_entry.rs",
    "content": "use std::cell::OnceCell;\nuse std::ffi::OsString;\nuse std::fs::{FileType, Metadata};\nuse std::path::{Path, PathBuf};\n\nuse lscolors::{Colorable, LsColors, Style};\n\nuse crate::config::Config;\nuse crate::filesystem::strip_current_dir;\n\n#[derive(Debug)]\nenum DirEntryInner {\n    Normal(ignore::DirEntry),\n    BrokenSymlink(PathBuf),\n}\n\n#[derive(Debug)]\npub struct DirEntry {\n    inner: DirEntryInner,\n    metadata: OnceCell<Option<Metadata>>,\n    style: OnceCell<Option<Style>>,\n}\n\nimpl DirEntry {\n    #[inline]\n    pub fn normal(e: ignore::DirEntry) -> Self {\n        Self {\n            inner: DirEntryInner::Normal(e),\n            metadata: OnceCell::new(),\n            style: OnceCell::new(),\n        }\n    }\n\n    pub fn broken_symlink(path: PathBuf) -> Self {\n        Self {\n            inner: DirEntryInner::BrokenSymlink(path),\n            metadata: OnceCell::new(),\n            style: OnceCell::new(),\n        }\n    }\n\n    pub fn path(&self) -> &Path {\n        match &self.inner {\n            DirEntryInner::Normal(e) => e.path(),\n            DirEntryInner::BrokenSymlink(pathbuf) => pathbuf.as_path(),\n        }\n    }\n\n    pub fn into_path(self) -> PathBuf {\n        match self.inner {\n            DirEntryInner::Normal(e) => e.into_path(),\n            DirEntryInner::BrokenSymlink(p) => p,\n        }\n    }\n\n    /// Returns the path as it should be presented to the user.\n    pub fn stripped_path(&self, config: &Config) -> &Path {\n        if config.strip_cwd_prefix {\n            strip_current_dir(self.path())\n        } else {\n            self.path()\n        }\n    }\n\n    /// Returns the path as it should be presented to the user.\n    pub fn into_stripped_path(self, config: &Config) -> PathBuf {\n        if config.strip_cwd_prefix {\n            self.stripped_path(config).to_path_buf()\n        } else {\n            self.into_path()\n        }\n    }\n\n    pub fn file_type(&self) -> Option<FileType> {\n        match &self.inner {\n            DirEntryInner::Normal(e) => e.file_type(),\n            DirEntryInner::BrokenSymlink(_) => self.metadata().map(|m| m.file_type()),\n        }\n    }\n\n    pub fn metadata(&self) -> Option<&Metadata> {\n        self.metadata\n            .get_or_init(|| match &self.inner {\n                DirEntryInner::Normal(e) => e.metadata().ok(),\n                DirEntryInner::BrokenSymlink(path) => path.symlink_metadata().ok(),\n            })\n            .as_ref()\n    }\n\n    pub fn depth(&self) -> Option<usize> {\n        match &self.inner {\n            DirEntryInner::Normal(e) => Some(e.depth()),\n            DirEntryInner::BrokenSymlink(_) => None,\n        }\n    }\n\n    pub fn style(&self, ls_colors: &LsColors) -> Option<&Style> {\n        self.style\n            .get_or_init(|| ls_colors.style_for(self).cloned())\n            .as_ref()\n    }\n}\n\nimpl PartialEq for DirEntry {\n    #[inline]\n    fn eq(&self, other: &Self) -> bool {\n        self.path() == other.path()\n    }\n}\n\nimpl Eq for DirEntry {}\n\nimpl PartialOrd for DirEntry {\n    #[inline]\n    fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {\n        Some(self.cmp(other))\n    }\n}\n\nimpl Ord for DirEntry {\n    #[inline]\n    fn cmp(&self, other: &Self) -> std::cmp::Ordering {\n        self.path().cmp(other.path())\n    }\n}\n\nimpl Colorable for DirEntry {\n    fn path(&self) -> PathBuf {\n        self.path().to_owned()\n    }\n\n    fn file_name(&self) -> OsString {\n        let name = match &self.inner {\n            DirEntryInner::Normal(e) => e.file_name(),\n            DirEntryInner::BrokenSymlink(path) => {\n                // Path::file_name() only works if the last component is Normal,\n                // but we want it for all component types, so we open code it.\n                // Copied from LsColors::style_for_path_with_metadata().\n                path.components()\n                    .next_back()\n                    .map(|c| c.as_os_str())\n                    .unwrap_or_else(|| path.as_os_str())\n            }\n        };\n        name.to_owned()\n    }\n\n    fn file_type(&self) -> Option<FileType> {\n        self.file_type()\n    }\n\n    fn metadata(&self) -> Option<Metadata> {\n        self.metadata().cloned()\n    }\n}\n"
  },
  {
    "path": "src/error.rs",
    "content": "pub fn print_error(msg: impl Into<String>) {\n    eprintln!(\"[fd error]: {}\", msg.into());\n}\n"
  },
  {
    "path": "src/exec/command.rs",
    "content": "use std::io;\nuse std::io::Write;\n\nuse argmax::Command;\n\nuse crate::error::print_error;\nuse crate::exit_codes::ExitCode;\n\nstruct Outputs {\n    stdout: Vec<u8>,\n    stderr: Vec<u8>,\n}\npub struct OutputBuffer {\n    null_separator: bool,\n    outputs: Vec<Outputs>,\n}\n\nimpl OutputBuffer {\n    pub fn new(null_separator: bool) -> Self {\n        Self {\n            null_separator,\n            outputs: Vec::new(),\n        }\n    }\n\n    fn push(&mut self, stdout: Vec<u8>, stderr: Vec<u8>) {\n        self.outputs.push(Outputs { stdout, stderr });\n    }\n\n    fn write(self) {\n        // Avoid taking the lock if there is nothing to do.\n        // If null_separator is true, then we still need to write the\n        // null separator, because the output may have been written directly\n        // to stdout\n        if self.outputs.is_empty() && !self.null_separator {\n            return;\n        }\n\n        let stdout = io::stdout();\n        let stderr = io::stderr();\n\n        // While we hold these locks, only this thread will be able\n        // to write its outputs.\n        let mut stdout = stdout.lock();\n        let mut stderr = stderr.lock();\n\n        for output in self.outputs.iter() {\n            let _ = stdout.write_all(&output.stdout);\n            let _ = stderr.write_all(&output.stderr);\n        }\n        if self.null_separator {\n            // If null_separator is enabled, then we should write a \\0 at the end\n            // of the output for this entry\n            let _ = stdout.write_all(b\"\\0\");\n        }\n    }\n}\n\n/// Executes a command.\npub fn execute_commands<I: Iterator<Item = io::Result<Command>>>(\n    cmds: I,\n    mut output_buffer: OutputBuffer,\n    enable_output_buffering: bool,\n) -> ExitCode {\n    for result in cmds {\n        let mut cmd = match result {\n            Ok(cmd) => cmd,\n            Err(e) => return handle_cmd_error(None, e),\n        };\n\n        // Spawn the supplied command.\n        let output = if enable_output_buffering {\n            cmd.output()\n        } else {\n            // If running on only one thread, don't buffer output\n            // Allows for viewing and interacting with intermediate command output\n            cmd.spawn().and_then(|c| c.wait_with_output())\n        };\n\n        // Then wait for the command to exit, if it was spawned.\n        match output {\n            Ok(output) => {\n                if enable_output_buffering {\n                    output_buffer.push(output.stdout, output.stderr);\n                }\n                if output.status.code() != Some(0) {\n                    output_buffer.write();\n                    return ExitCode::GeneralError;\n                }\n            }\n            Err(why) => {\n                output_buffer.write();\n                return handle_cmd_error(Some(&cmd), why);\n            }\n        }\n    }\n    output_buffer.write();\n    ExitCode::Success\n}\n\npub fn handle_cmd_error(cmd: Option<&Command>, err: io::Error) -> ExitCode {\n    match (cmd, err) {\n        (Some(cmd), err) if err.kind() == io::ErrorKind::NotFound => {\n            print_error(format!(\n                \"Command not found: {}\",\n                cmd.get_program().to_string_lossy()\n            ));\n            ExitCode::GeneralError\n        }\n        (_, err) => {\n            print_error(format!(\"Problem while executing command: {err}\"));\n            ExitCode::GeneralError\n        }\n    }\n}\n"
  },
  {
    "path": "src/exec/job.rs",
    "content": "use crate::config::Config;\nuse crate::error::print_error;\nuse crate::exit_codes::{ExitCode, merge_exitcodes};\nuse crate::walk::WorkerResult;\n\nuse super::CommandSet;\n\n/// An event loop that listens for inputs from the `rx` receiver. Each received input will\n/// generate a command with the supplied command template. The generated command will then\n/// be executed, and this process will continue until the receiver's sender has closed.\npub fn job(\n    results: impl IntoIterator<Item = WorkerResult>,\n    cmd: &CommandSet,\n    config: &Config,\n) -> ExitCode {\n    // Output should be buffered when only running a single thread\n    let buffer_output: bool = config.threads > 1;\n\n    let mut ret = ExitCode::Success;\n    for result in results {\n        // Obtain the next result from the receiver, else if the channel\n        // has closed, exit from the loop\n        let dir_entry = match result {\n            WorkerResult::Entry(dir_entry) => dir_entry,\n            WorkerResult::Error(err) => {\n                if config.show_filesystem_errors {\n                    print_error(err.to_string());\n                }\n                continue;\n            }\n        };\n\n        // Generate a command, execute it and store its exit code.\n        let code = cmd.execute(\n            dir_entry.stripped_path(config),\n            config.path_separator.as_deref(),\n            config.null_separator,\n            buffer_output,\n        );\n        ret = merge_exitcodes([ret, code]);\n    }\n    // Returns error in case of any error.\n    ret\n}\n\npub fn batch(\n    results: impl IntoIterator<Item = WorkerResult>,\n    cmd: &CommandSet,\n    config: &Config,\n) -> ExitCode {\n    let paths = results\n        .into_iter()\n        .filter_map(|worker_result| match worker_result {\n            WorkerResult::Entry(dir_entry) => Some(dir_entry.into_stripped_path(config)),\n            WorkerResult::Error(err) => {\n                if config.show_filesystem_errors {\n                    print_error(err.to_string());\n                }\n                None\n            }\n        });\n\n    cmd.execute_batch(paths, config.batch_size, config.path_separator.as_deref())\n}\n"
  },
  {
    "path": "src/exec/mod.rs",
    "content": "mod command;\nmod job;\n\nuse std::ffi::OsString;\nuse std::io;\nuse std::iter;\nuse std::path::{Path, PathBuf};\nuse std::process::Stdio;\n\nuse anyhow::{Result, bail};\nuse argmax::Command;\n\nuse crate::exec::command::OutputBuffer;\nuse crate::exit_codes::{ExitCode, merge_exitcodes};\nuse crate::fmt::{FormatTemplate, Token};\n\nuse self::command::{execute_commands, handle_cmd_error};\npub use self::job::{batch, job};\n\n/// Execution mode of the command\n#[derive(Debug, Clone, Copy, PartialEq, Eq)]\npub enum ExecutionMode {\n    /// Command is executed for each search result\n    OneByOne,\n    /// Command is run for a batch of results at once\n    Batch,\n}\n\n#[derive(Debug, Clone, PartialEq)]\npub struct CommandSet {\n    mode: ExecutionMode,\n    commands: Vec<CommandTemplate>,\n}\n\nimpl CommandSet {\n    pub fn new<I, T, S>(input: I) -> Result<CommandSet>\n    where\n        I: IntoIterator<Item = T>,\n        T: IntoIterator<Item = S>,\n        S: AsRef<str>,\n    {\n        Ok(CommandSet {\n            mode: ExecutionMode::OneByOne,\n            commands: input\n                .into_iter()\n                .map(CommandTemplate::new)\n                .collect::<Result<_>>()?,\n        })\n    }\n\n    pub fn new_batch<I, T, S>(input: I) -> Result<CommandSet>\n    where\n        I: IntoIterator<Item = T>,\n        T: IntoIterator<Item = S>,\n        S: AsRef<str>,\n    {\n        Ok(CommandSet {\n            mode: ExecutionMode::Batch,\n            commands: input\n                .into_iter()\n                .map(|args| {\n                    let cmd = CommandTemplate::new(args)?;\n                    if cmd.number_of_tokens() > 1 {\n                        bail!(\"Only one placeholder allowed for batch commands\");\n                    }\n                    if cmd.args[0].has_tokens() {\n                        bail!(\"First argument of exec-batch is expected to be a fixed executable\");\n                    }\n                    Ok(cmd)\n                })\n                .collect::<Result<Vec<_>>>()?,\n        })\n    }\n\n    pub fn in_batch_mode(&self) -> bool {\n        self.mode == ExecutionMode::Batch\n    }\n\n    pub fn execute(\n        &self,\n        input: &Path,\n        path_separator: Option<&str>,\n        null_separator: bool,\n        buffer_output: bool,\n    ) -> ExitCode {\n        let commands = self\n            .commands\n            .iter()\n            .map(|c| c.generate(input, path_separator));\n        execute_commands(commands, OutputBuffer::new(null_separator), buffer_output)\n    }\n\n    pub fn execute_batch<I>(&self, paths: I, limit: usize, path_separator: Option<&str>) -> ExitCode\n    where\n        I: Iterator<Item = PathBuf>,\n    {\n        let builders: io::Result<Vec<_>> = self\n            .commands\n            .iter()\n            .map(|c| CommandBuilder::new(c, limit))\n            .collect();\n\n        match builders {\n            Ok(mut builders) => {\n                for path in paths {\n                    for builder in &mut builders {\n                        if let Err(e) = builder.push(&path, path_separator) {\n                            return handle_cmd_error(Some(&builder.cmd), e);\n                        }\n                    }\n                }\n\n                for builder in &mut builders {\n                    if let Err(e) = builder.finish() {\n                        return handle_cmd_error(Some(&builder.cmd), e);\n                    }\n                }\n\n                merge_exitcodes(builders.iter().map(|b| b.exit_code()))\n            }\n            Err(e) => handle_cmd_error(None, e),\n        }\n    }\n}\n\n/// Represents a multi-exec command as it is built.\n#[derive(Debug)]\nstruct CommandBuilder {\n    pre_args: Vec<OsString>,\n    path_arg: FormatTemplate,\n    post_args: Vec<OsString>,\n    cmd: Command,\n    count: usize,\n    limit: usize,\n    exit_code: ExitCode,\n}\n\nimpl CommandBuilder {\n    fn new(template: &CommandTemplate, limit: usize) -> io::Result<Self> {\n        let mut pre_args = vec![];\n        let mut path_arg = None;\n        let mut post_args = vec![];\n\n        for arg in &template.args {\n            if arg.has_tokens() {\n                path_arg = Some(arg.clone());\n            } else if path_arg.is_none() {\n                pre_args.push(arg.generate(\"\", None));\n            } else {\n                post_args.push(arg.generate(\"\", None));\n            }\n        }\n\n        let cmd = Self::new_command(&pre_args)?;\n\n        Ok(Self {\n            pre_args,\n            path_arg: path_arg.unwrap(),\n            post_args,\n            cmd,\n            count: 0,\n            limit,\n            exit_code: ExitCode::Success,\n        })\n    }\n\n    fn new_command(pre_args: &[OsString]) -> io::Result<Command> {\n        let mut cmd = Command::new(&pre_args[0]);\n        cmd.stdin(Stdio::inherit());\n        cmd.stdout(Stdio::inherit());\n        cmd.stderr(Stdio::inherit());\n        cmd.try_args(&pre_args[1..])?;\n        Ok(cmd)\n    }\n\n    fn push(&mut self, path: &Path, separator: Option<&str>) -> io::Result<()> {\n        if self.limit > 0 && self.count >= self.limit {\n            self.finish()?;\n        }\n\n        let arg = self.path_arg.generate(path, separator);\n        if !self\n            .cmd\n            .args_would_fit(iter::once(&arg).chain(&self.post_args))\n        {\n            self.finish()?;\n        }\n\n        self.cmd.try_arg(arg)?;\n        self.count += 1;\n        Ok(())\n    }\n\n    fn finish(&mut self) -> io::Result<()> {\n        if self.count > 0 {\n            self.cmd.try_args(&self.post_args)?;\n            if !self.cmd.status()?.success() {\n                self.exit_code = ExitCode::GeneralError;\n            }\n\n            self.cmd = Self::new_command(&self.pre_args)?;\n            self.count = 0;\n        }\n\n        Ok(())\n    }\n\n    fn exit_code(&self) -> ExitCode {\n        self.exit_code\n    }\n}\n\n/// Represents a template that is utilized to generate command strings.\n///\n/// The template is meant to be coupled with an input in order to generate a command. The\n/// `generate_and_execute()` method will be used to generate a command and execute it.\n#[derive(Debug, Clone, PartialEq)]\nstruct CommandTemplate {\n    args: Vec<FormatTemplate>,\n}\n\nimpl CommandTemplate {\n    fn new<I, S>(input: I) -> Result<CommandTemplate>\n    where\n        I: IntoIterator<Item = S>,\n        S: AsRef<str>,\n    {\n        let mut args = Vec::new();\n        let mut has_placeholder = false;\n\n        for arg in input {\n            let arg = arg.as_ref();\n\n            let tmpl = FormatTemplate::parse(arg);\n            has_placeholder |= tmpl.has_tokens();\n            args.push(tmpl);\n        }\n\n        // We need to check that we have at least one argument, because if not\n        // it will try to execute each file and directory it finds.\n        //\n        // Sadly, clap can't currently handle this for us, see\n        // https://github.com/clap-rs/clap/issues/3542\n        if args.is_empty() {\n            bail!(\"No executable provided for --exec or --exec-batch\");\n        }\n\n        // If a placeholder token was not supplied, append one at the end of the command.\n        if !has_placeholder {\n            args.push(FormatTemplate::Tokens(vec![Token::Placeholder]));\n        }\n\n        Ok(CommandTemplate { args })\n    }\n\n    fn number_of_tokens(&self) -> usize {\n        self.args.iter().filter(|arg| arg.has_tokens()).count()\n    }\n\n    /// Generates and executes a command.\n    ///\n    /// Using the internal `args` field, and a supplied `input` variable, a `Command` will be\n    /// build.\n    fn generate(&self, input: &Path, path_separator: Option<&str>) -> io::Result<Command> {\n        let mut cmd = Command::new(self.args[0].generate(input, path_separator));\n        for arg in &self.args[1..] {\n            cmd.try_arg(arg.generate(input, path_separator))?;\n        }\n        Ok(cmd)\n    }\n}\n\n#[cfg(test)]\nmod tests {\n    use super::*;\n\n    fn generate_str(template: &CommandTemplate, input: &str) -> Vec<String> {\n        template\n            .args\n            .iter()\n            .map(|arg| arg.generate(input, None).into_string().unwrap())\n            .collect()\n    }\n\n    #[test]\n    fn tokens_with_placeholder() {\n        assert_eq!(\n            CommandSet::new(vec![vec![&\"echo\", &\"${SHELL}:\"]]).unwrap(),\n            CommandSet {\n                commands: vec![CommandTemplate {\n                    args: vec![\n                        FormatTemplate::Text(\"echo\".into()),\n                        FormatTemplate::Text(\"${SHELL}:\".into()),\n                        FormatTemplate::Tokens(vec![Token::Placeholder]),\n                    ]\n                }],\n                mode: ExecutionMode::OneByOne,\n            }\n        );\n    }\n\n    #[test]\n    fn tokens_with_no_extension() {\n        assert_eq!(\n            CommandSet::new(vec![vec![\"echo\", \"{.}\"]]).unwrap(),\n            CommandSet {\n                commands: vec![CommandTemplate {\n                    args: vec![\n                        FormatTemplate::Text(\"echo\".into()),\n                        FormatTemplate::Tokens(vec![Token::NoExt]),\n                    ],\n                }],\n                mode: ExecutionMode::OneByOne,\n            }\n        );\n    }\n\n    #[test]\n    fn tokens_with_basename() {\n        assert_eq!(\n            CommandSet::new(vec![vec![\"echo\", \"{/}\"]]).unwrap(),\n            CommandSet {\n                commands: vec![CommandTemplate {\n                    args: vec![\n                        FormatTemplate::Text(\"echo\".into()),\n                        FormatTemplate::Tokens(vec![Token::Basename]),\n                    ],\n                }],\n                mode: ExecutionMode::OneByOne,\n            }\n        );\n    }\n\n    #[test]\n    fn tokens_with_parent() {\n        assert_eq!(\n            CommandSet::new(vec![vec![\"echo\", \"{//}\"]]).unwrap(),\n            CommandSet {\n                commands: vec![CommandTemplate {\n                    args: vec![\n                        FormatTemplate::Text(\"echo\".into()),\n                        FormatTemplate::Tokens(vec![Token::Parent]),\n                    ],\n                }],\n                mode: ExecutionMode::OneByOne,\n            }\n        );\n    }\n\n    #[test]\n    fn tokens_with_basename_no_extension() {\n        assert_eq!(\n            CommandSet::new(vec![vec![\"echo\", \"{/.}\"]]).unwrap(),\n            CommandSet {\n                commands: vec![CommandTemplate {\n                    args: vec![\n                        FormatTemplate::Text(\"echo\".into()),\n                        FormatTemplate::Tokens(vec![Token::BasenameNoExt]),\n                    ],\n                }],\n                mode: ExecutionMode::OneByOne,\n            }\n        );\n    }\n\n    #[test]\n    fn tokens_with_literal_braces() {\n        let template = CommandTemplate::new(vec![\"{{}}\", \"{{\", \"{.}}\"]).unwrap();\n        assert_eq!(\n            generate_str(&template, \"foo\"),\n            vec![\"{}\", \"{\", \"{.}\", \"foo\"]\n        );\n    }\n\n    #[test]\n    fn tokens_with_literal_braces_and_placeholder() {\n        let template = CommandTemplate::new(vec![\"{{{},end}\"]).unwrap();\n        assert_eq!(generate_str(&template, \"foo\"), vec![\"{foo,end}\"]);\n    }\n\n    #[test]\n    fn tokens_multiple() {\n        assert_eq!(\n            CommandSet::new(vec![vec![\"cp\", \"{}\", \"{/.}.ext\"]]).unwrap(),\n            CommandSet {\n                commands: vec![CommandTemplate {\n                    args: vec![\n                        FormatTemplate::Text(\"cp\".into()),\n                        FormatTemplate::Tokens(vec![Token::Placeholder]),\n                        FormatTemplate::Tokens(vec![\n                            Token::BasenameNoExt,\n                            Token::Text(\".ext\".into())\n                        ]),\n                    ],\n                }],\n                mode: ExecutionMode::OneByOne,\n            }\n        );\n    }\n\n    #[test]\n    fn tokens_single_batch() {\n        assert_eq!(\n            CommandSet::new_batch(vec![vec![\"echo\", \"{.}\"]]).unwrap(),\n            CommandSet {\n                commands: vec![CommandTemplate {\n                    args: vec![\n                        FormatTemplate::Text(\"echo\".into()),\n                        FormatTemplate::Tokens(vec![Token::NoExt]),\n                    ],\n                }],\n                mode: ExecutionMode::Batch,\n            }\n        );\n    }\n\n    #[test]\n    fn tokens_multiple_batch() {\n        assert!(CommandSet::new_batch(vec![vec![\"echo\", \"{.}\", \"{}\"]]).is_err());\n    }\n\n    #[test]\n    fn template_no_args() {\n        assert!(CommandTemplate::new::<Vec<_>, &'static str>(vec![]).is_err());\n    }\n\n    #[test]\n    fn command_set_no_args() {\n        assert!(CommandSet::new(vec![vec![\"echo\"], vec![]]).is_err());\n    }\n\n    #[test]\n    fn generate_custom_path_separator() {\n        let arg = FormatTemplate::Tokens(vec![Token::Placeholder]);\n        macro_rules! check {\n            ($input:expr, $expected:expr) => {\n                assert_eq!(arg.generate($input, Some(\"#\")), OsString::from($expected));\n            };\n        }\n\n        check!(\"foo\", \"foo\");\n        check!(\"foo/bar\", \"foo#bar\");\n        check!(\"/foo/bar/baz\", \"#foo#bar#baz\");\n    }\n\n    #[cfg(windows)]\n    #[test]\n    fn generate_custom_path_separator_windows() {\n        let arg = FormatTemplate::Tokens(vec![Token::Placeholder]);\n        macro_rules! check {\n            ($input:expr, $expected:expr) => {\n                assert_eq!(arg.generate($input, Some(\"#\")), OsString::from($expected));\n            };\n        }\n\n        // path starting with a drive letter\n        check!(r\"C:\\foo\\bar\", \"C:#foo#bar\");\n        // UNC path\n        check!(r\"\\\\server\\share\\path\", \"##server#share#path\");\n        // Drive Relative path - no separator after the colon omits the RootDir path component.\n        // This is uncommon, but valid\n        check!(r\"C:foo\\bar\", \"C:foo#bar\");\n\n        // forward slashes should get normalized and interpreted as separators\n        check!(\"C:/foo/bar\", \"C:#foo#bar\");\n        check!(\"C:foo/bar\", \"C:foo#bar\");\n\n        // Rust does not interpret \"//server/share\" as a UNC path, but rather as a normal\n        // absolute path that begins with RootDir, and the two slashes get combined together as\n        // a single path separator during normalization.\n        //check!(\"//server/share/path\", \"##server#share#path\");\n    }\n}\n"
  },
  {
    "path": "src/exit_codes.rs",
    "content": "use std::process;\n\n#[cfg(unix)]\nuse nix::sys::signal::{SigHandler, Signal, raise, signal};\n\n#[derive(Debug, Clone, Copy, PartialEq, Eq)]\npub enum ExitCode {\n    Success,\n    HasResults(bool),\n    GeneralError,\n    KilledBySigint,\n}\n\nimpl From<ExitCode> for i32 {\n    fn from(code: ExitCode) -> Self {\n        match code {\n            ExitCode::Success => 0,\n            ExitCode::HasResults(has_results) => !has_results as i32,\n            ExitCode::GeneralError => 1,\n            ExitCode::KilledBySigint => 130,\n        }\n    }\n}\n\nimpl ExitCode {\n    fn is_error(self) -> bool {\n        i32::from(self) != 0\n    }\n\n    /// Exit the process with the appropriate code.\n    pub fn exit(self) -> ! {\n        #[cfg(unix)]\n        if self == ExitCode::KilledBySigint {\n            // Get rid of the SIGINT handler, if present, and raise SIGINT\n            unsafe {\n                if signal(Signal::SIGINT, SigHandler::SigDfl).is_ok() {\n                    let _ = raise(Signal::SIGINT);\n                }\n            }\n        }\n\n        process::exit(self.into())\n    }\n}\n\npub fn merge_exitcodes(results: impl IntoIterator<Item = ExitCode>) -> ExitCode {\n    if results.into_iter().any(ExitCode::is_error) {\n        return ExitCode::GeneralError;\n    }\n    ExitCode::Success\n}\n\n#[cfg(test)]\nmod tests {\n    use super::*;\n\n    #[test]\n    fn success_when_no_results() {\n        assert_eq!(merge_exitcodes([]), ExitCode::Success);\n    }\n\n    #[test]\n    fn general_error_if_at_least_one_error() {\n        assert_eq!(\n            merge_exitcodes([ExitCode::GeneralError]),\n            ExitCode::GeneralError\n        );\n        assert_eq!(\n            merge_exitcodes([ExitCode::KilledBySigint]),\n            ExitCode::GeneralError\n        );\n        assert_eq!(\n            merge_exitcodes([ExitCode::KilledBySigint, ExitCode::Success]),\n            ExitCode::GeneralError\n        );\n        assert_eq!(\n            merge_exitcodes([ExitCode::Success, ExitCode::GeneralError]),\n            ExitCode::GeneralError\n        );\n        assert_eq!(\n            merge_exitcodes([ExitCode::GeneralError, ExitCode::KilledBySigint]),\n            ExitCode::GeneralError\n        );\n    }\n\n    #[test]\n    fn success_if_no_error() {\n        assert_eq!(merge_exitcodes([ExitCode::Success]), ExitCode::Success);\n        assert_eq!(\n            merge_exitcodes([ExitCode::Success, ExitCode::Success]),\n            ExitCode::Success\n        );\n    }\n}\n"
  },
  {
    "path": "src/filesystem.rs",
    "content": "use std::borrow::Cow;\nuse std::env;\nuse std::ffi::OsStr;\nuse std::fs;\nuse std::io;\n#[cfg(any(unix, target_os = \"redox\"))]\nuse std::os::unix::fs::FileTypeExt;\nuse std::path::{Path, PathBuf};\n\nuse normpath::PathExt;\n\nuse crate::dir_entry;\n\npub fn path_absolute_form(path: &Path) -> io::Result<PathBuf> {\n    if path.is_absolute() {\n        return Ok(path.to_path_buf());\n    }\n\n    let path = path.strip_prefix(\".\").unwrap_or(path);\n    env::current_dir().map(|path_buf| path_buf.join(path))\n}\n\n/// Construct an absolute path from a potentially relative path and a\n/// pre-resolved working directory. Unlike `path_absolute_form`, this\n/// does not call `env::current_dir()` and cannot fail.\npub fn make_absolute(path: &Path, cwd: &Path) -> PathBuf {\n    if path.is_absolute() {\n        return path.to_path_buf();\n    }\n    let path = path.strip_prefix(\".\").unwrap_or(path);\n    cwd.join(path)\n}\n\npub fn absolute_path(path: &Path) -> io::Result<PathBuf> {\n    let path_buf = path_absolute_form(path)?;\n\n    #[cfg(windows)]\n    let path_buf = Path::new(\n        path_buf\n            .as_path()\n            .to_string_lossy()\n            .trim_start_matches(r\"\\\\?\\\"),\n    )\n    .to_path_buf();\n\n    Ok(path_buf)\n}\n\npub fn is_existing_directory(path: &Path) -> bool {\n    // Note: we do not use `.exists()` here, as `.` always exists, even if\n    // the CWD has been deleted.\n    path.is_dir() && (path.file_name().is_some() || path.normalize().is_ok())\n}\n\npub fn is_empty(entry: &dir_entry::DirEntry) -> bool {\n    if let Some(file_type) = entry.file_type() {\n        if file_type.is_dir() {\n            if let Ok(mut entries) = fs::read_dir(entry.path()) {\n                entries.next().is_none()\n            } else {\n                false\n            }\n        } else if file_type.is_file() {\n            entry.metadata().map(|m| m.len() == 0).unwrap_or(false)\n        } else {\n            false\n        }\n    } else {\n        false\n    }\n}\n\n#[cfg(any(unix, target_os = \"redox\"))]\npub fn is_block_device(ft: fs::FileType) -> bool {\n    ft.is_block_device()\n}\n\n#[cfg(windows)]\npub fn is_block_device(_: fs::FileType) -> bool {\n    false\n}\n\n#[cfg(any(unix, target_os = \"redox\"))]\npub fn is_char_device(ft: fs::FileType) -> bool {\n    ft.is_char_device()\n}\n\n#[cfg(windows)]\npub fn is_char_device(_: fs::FileType) -> bool {\n    false\n}\n\n#[cfg(any(unix, target_os = \"redox\"))]\npub fn is_socket(ft: fs::FileType) -> bool {\n    ft.is_socket()\n}\n\n#[cfg(windows)]\npub fn is_socket(_: fs::FileType) -> bool {\n    false\n}\n\n#[cfg(any(unix, target_os = \"redox\"))]\npub fn is_pipe(ft: fs::FileType) -> bool {\n    ft.is_fifo()\n}\n\n#[cfg(windows)]\npub fn is_pipe(_: fs::FileType) -> bool {\n    false\n}\n\n#[cfg(any(unix, target_os = \"redox\"))]\npub fn osstr_to_bytes(input: &OsStr) -> Cow<'_, [u8]> {\n    use std::os::unix::ffi::OsStrExt;\n    Cow::Borrowed(input.as_bytes())\n}\n\n#[cfg(windows)]\npub fn osstr_to_bytes(input: &OsStr) -> Cow<'_, [u8]> {\n    let string = input.to_string_lossy();\n\n    match string {\n        Cow::Owned(string) => Cow::Owned(string.into_bytes()),\n        Cow::Borrowed(string) => Cow::Borrowed(string.as_bytes()),\n    }\n}\n\n/// Remove the `./` prefix from a path.\npub fn strip_current_dir(path: &Path) -> &Path {\n    path.strip_prefix(\".\").unwrap_or(path)\n}\n\n/// Default value for the path_separator, mainly for MSYS/MSYS2, which set the MSYSTEM\n/// environment variable, and we set fd's path separator to '/' rather than Rust's default of '\\'.\n///\n/// Returns Some to use a nonstandard path separator, or None to use rust's default on the target\n/// platform.\npub fn default_path_separator() -> Option<String> {\n    if cfg!(windows) {\n        let msystem = env::var(\"MSYSTEM\").ok()?;\n        if !msystem.is_empty() {\n            return Some(\"/\".to_owned());\n        }\n    }\n    None\n}\n\n#[cfg(test)]\nmod tests {\n    use super::strip_current_dir;\n    use std::path::Path;\n\n    #[test]\n    fn strip_current_dir_basic() {\n        assert_eq!(strip_current_dir(Path::new(\"./foo\")), Path::new(\"foo\"));\n        assert_eq!(strip_current_dir(Path::new(\"foo\")), Path::new(\"foo\"));\n        assert_eq!(\n            strip_current_dir(Path::new(\"./foo/bar/baz\")),\n            Path::new(\"foo/bar/baz\")\n        );\n        assert_eq!(\n            strip_current_dir(Path::new(\"foo/bar/baz\")),\n            Path::new(\"foo/bar/baz\")\n        );\n    }\n\n    #[test]\n    fn make_absolute_with_relative_path() {\n        use super::make_absolute;\n        use std::path::PathBuf;\n\n        let cwd = Path::new(\"/home/user\");\n        assert_eq!(\n            make_absolute(Path::new(\"foo/bar\"), cwd),\n            PathBuf::from(\"/home/user/foo/bar\")\n        );\n    }\n\n    #[test]\n    fn make_absolute_strips_dot_prefix() {\n        use super::make_absolute;\n        use std::path::PathBuf;\n\n        let cwd = Path::new(\"/home/user\");\n        assert_eq!(\n            make_absolute(Path::new(\"./foo/bar\"), cwd),\n            PathBuf::from(\"/home/user/foo/bar\")\n        );\n    }\n\n    #[test]\n    fn make_absolute_with_absolute_path() {\n        use super::make_absolute;\n        use std::path::PathBuf;\n\n        let cwd = Path::new(\"/home/user\");\n        assert_eq!(\n            make_absolute(Path::new(\"/absolute/path\"), cwd),\n            PathBuf::from(\"/absolute/path\")\n        );\n    }\n}\n"
  },
  {
    "path": "src/filetypes.rs",
    "content": "use crate::dir_entry;\nuse crate::filesystem;\n\nuse faccess::PathExt;\n\n/// Whether or not to show\n#[derive(Default)]\npub struct FileTypes {\n    pub files: bool,\n    pub directories: bool,\n    pub symlinks: bool,\n    pub block_devices: bool,\n    pub char_devices: bool,\n    pub sockets: bool,\n    pub pipes: bool,\n    pub executables_only: bool,\n    pub empty_only: bool,\n}\n\nimpl FileTypes {\n    pub fn should_ignore(&self, entry: &dir_entry::DirEntry) -> bool {\n        if let Some(ref entry_type) = entry.file_type() {\n            (!self.files && entry_type.is_file())\n                || (!self.directories && entry_type.is_dir())\n                || (!self.symlinks && entry_type.is_symlink())\n                || (!self.block_devices && filesystem::is_block_device(*entry_type))\n                || (!self.char_devices && filesystem::is_char_device(*entry_type))\n                || (!self.sockets && filesystem::is_socket(*entry_type))\n                || (!self.pipes && filesystem::is_pipe(*entry_type))\n                || (self.executables_only && !entry.path().executable())\n                || (self.empty_only && !filesystem::is_empty(entry))\n                || !(entry_type.is_file()\n                    || entry_type.is_dir()\n                    || entry_type.is_symlink()\n                    || filesystem::is_block_device(*entry_type)\n                    || filesystem::is_char_device(*entry_type)\n                    || filesystem::is_socket(*entry_type)\n                    || filesystem::is_pipe(*entry_type))\n        } else {\n            true\n        }\n    }\n}\n"
  },
  {
    "path": "src/filter/mod.rs",
    "content": "pub use self::size::SizeFilter;\npub use self::time::TimeFilter;\n\n#[cfg(unix)]\npub use self::owner::OwnerFilter;\n\nmod size;\nmod time;\n\n#[cfg(unix)]\nmod owner;\n"
  },
  {
    "path": "src/filter/owner.rs",
    "content": "use anyhow::{Result, anyhow};\nuse nix::unistd::{Group, User};\nuse std::fs;\n\n#[derive(Clone, Copy, Debug, PartialEq, Eq)]\npub struct OwnerFilter {\n    uid: Check<u32>,\n    gid: Check<u32>,\n}\n\n#[derive(Clone, Copy, Debug, PartialEq, Eq)]\nenum Check<T> {\n    Equal(T),\n    NotEq(T),\n    Ignore,\n}\n\nimpl OwnerFilter {\n    const IGNORE: Self = OwnerFilter {\n        uid: Check::Ignore,\n        gid: Check::Ignore,\n    };\n\n    /// Parses an owner constraint\n    /// Returns an error if the string is invalid\n    /// Returns Ok(None) when string is acceptable but a noop (such as \"\" or \":\")\n    pub fn from_string(input: &str) -> Result<Self> {\n        let mut it = input.split(':');\n        let (fst, snd) = (it.next(), it.next());\n\n        if it.next().is_some() {\n            return Err(anyhow!(\n                \"more than one ':' present in owner string '{}'. See 'fd --help'.\",\n                input\n            ));\n        }\n\n        let uid = Check::parse(fst, |s| {\n            if let Ok(uid) = s.parse() {\n                Ok(uid)\n            } else {\n                User::from_name(s)?\n                    .map(|user| user.uid.as_raw())\n                    .ok_or_else(|| anyhow!(\"'{}' is not a recognized user name\", s))\n            }\n        })?;\n        let gid = Check::parse(snd, |s| {\n            if let Ok(gid) = s.parse() {\n                Ok(gid)\n            } else {\n                Group::from_name(s)?\n                    .map(|group| group.gid.as_raw())\n                    .ok_or_else(|| anyhow!(\"'{}' is not a recognized group name\", s))\n            }\n        })?;\n\n        Ok(OwnerFilter { uid, gid })\n    }\n\n    /// If self is a no-op (ignore both uid and gid) then return `None`, otherwise wrap in a `Some`\n    pub fn filter_ignore(self) -> Option<Self> {\n        if self == Self::IGNORE {\n            None\n        } else {\n            Some(self)\n        }\n    }\n\n    pub fn matches(&self, md: &fs::Metadata) -> bool {\n        use std::os::unix::fs::MetadataExt;\n\n        self.uid.check(md.uid()) && self.gid.check(md.gid())\n    }\n}\n\nimpl<T: PartialEq> Check<T> {\n    fn check(&self, v: T) -> bool {\n        match self {\n            Check::Equal(x) => v == *x,\n            Check::NotEq(x) => v != *x,\n            Check::Ignore => true,\n        }\n    }\n\n    fn parse<F>(s: Option<&str>, f: F) -> Result<Self>\n    where\n        F: Fn(&str) -> Result<T>,\n    {\n        let (s, equality) = match s {\n            Some(\"\") | None => return Ok(Check::Ignore),\n            Some(s) if s.starts_with('!') => (&s[1..], false),\n            Some(s) => (s, true),\n        };\n\n        f(s).map(|x| {\n            if equality {\n                Check::Equal(x)\n            } else {\n                Check::NotEq(x)\n            }\n        })\n    }\n}\n\n#[cfg(test)]\nmod owner_parsing {\n    use super::OwnerFilter;\n\n    macro_rules! owner_tests {\n        ($($name:ident: $value:expr => $result:pat,)*) => {\n            $(\n                #[test]\n                fn $name() {\n                    let o = OwnerFilter::from_string($value);\n                    match o {\n                        $result => {},\n                        _ => panic!(\"{:?} does not match {}\", o, stringify!($result)),\n                    }\n                }\n            )*\n        };\n    }\n\n    use super::Check::*;\n    owner_tests! {\n        empty:      \"\"      => Ok(OwnerFilter::IGNORE),\n        uid_only:   \"5\"     => Ok(OwnerFilter { uid: Equal(5), gid: Ignore     }),\n        uid_gid:    \"9:3\"   => Ok(OwnerFilter { uid: Equal(9), gid: Equal(3)   }),\n        gid_only:   \":8\"    => Ok(OwnerFilter { uid: Ignore,   gid: Equal(8)   }),\n        colon_only: \":\"     => Ok(OwnerFilter::IGNORE),\n        trailing:   \"5:\"    => Ok(OwnerFilter { uid: Equal(5), gid: Ignore     }),\n\n        uid_negate: \"!5\"    => Ok(OwnerFilter { uid: NotEq(5), gid: Ignore     }),\n        both_negate:\"!4:!3\" => Ok(OwnerFilter { uid: NotEq(4), gid: NotEq(3)   }),\n        uid_not_gid:\"6:!8\"  => Ok(OwnerFilter { uid: Equal(6), gid: NotEq(8)   }),\n\n        more_colons:\"3:5:\"  => Err(_),\n        only_colons:\"::\"    => Err(_),\n    }\n}\n"
  },
  {
    "path": "src/filter/size.rs",
    "content": "use std::sync::OnceLock;\n\nuse anyhow::anyhow;\nuse regex::Regex;\n\nstatic SIZE_CAPTURES: OnceLock<Regex> = OnceLock::new();\n\n#[derive(Clone, Copy, Debug, PartialEq, Eq)]\npub enum SizeFilter {\n    Max(u64),\n    Min(u64),\n    Equals(u64),\n}\n\n// SI prefixes (powers of 10)\nconst KILO: u64 = 1000;\nconst MEGA: u64 = KILO * 1000;\nconst GIGA: u64 = MEGA * 1000;\nconst TERA: u64 = GIGA * 1000;\n\n// Binary prefixes (powers of 2)\nconst KIBI: u64 = 1024;\nconst MEBI: u64 = KIBI * 1024;\nconst GIBI: u64 = MEBI * 1024;\nconst TEBI: u64 = GIBI * 1024;\n\nimpl SizeFilter {\n    pub fn from_string(s: &str) -> anyhow::Result<Self> {\n        SizeFilter::parse_opt(s)\n            .ok_or_else(|| anyhow!(\"'{}' is not a valid size constraint. See 'fd --help'.\", s))\n    }\n\n    fn parse_opt(s: &str) -> Option<Self> {\n        let pattern =\n            SIZE_CAPTURES.get_or_init(|| Regex::new(r\"(?i)^([+-]?)(\\d+)(b|[kmgt]i?b?)$\").unwrap());\n        if !pattern.is_match(s) {\n            return None;\n        }\n\n        let captures = pattern.captures(s)?;\n        let limit_kind = captures.get(1).map_or(\"+\", |m| m.as_str());\n        let quantity = captures\n            .get(2)\n            .and_then(|v| v.as_str().parse::<u64>().ok())?;\n\n        let multiplier = match &captures.get(3).map_or(\"b\", |m| m.as_str()).to_lowercase()[..] {\n            v if v.starts_with(\"ki\") => KIBI,\n            v if v.starts_with('k') => KILO,\n            v if v.starts_with(\"mi\") => MEBI,\n            v if v.starts_with('m') => MEGA,\n            v if v.starts_with(\"gi\") => GIBI,\n            v if v.starts_with('g') => GIGA,\n            v if v.starts_with(\"ti\") => TEBI,\n            v if v.starts_with('t') => TERA,\n            \"b\" => 1,\n            _ => return None,\n        };\n\n        let size = quantity * multiplier;\n        match limit_kind {\n            \"+\" => Some(SizeFilter::Min(size)),\n            \"-\" => Some(SizeFilter::Max(size)),\n            \"\" => Some(SizeFilter::Equals(size)),\n            _ => None,\n        }\n    }\n\n    pub fn is_within(&self, size: u64) -> bool {\n        match *self {\n            SizeFilter::Max(limit) => size <= limit,\n            SizeFilter::Min(limit) => size >= limit,\n            SizeFilter::Equals(limit) => size == limit,\n        }\n    }\n}\n\n#[cfg(test)]\nmod tests {\n    use super::*;\n\n    macro_rules! gen_size_filter_parse_test {\n        ($($name: ident: $val: expr,)*) => {\n            $(\n                #[test]\n                fn $name() {\n                    let (txt, expected) = $val;\n                    let actual = SizeFilter::from_string(txt).unwrap();\n                    assert_eq!(actual, expected);\n                }\n            )*\n        };\n    }\n\n    // Parsing and size conversion tests data. Ensure that each type gets properly interpreted.\n    // Call with higher base values to ensure expected multiplication (only need a couple)\n    gen_size_filter_parse_test! {\n        byte_plus:                (\"+1b\",     SizeFilter::Min(1)),\n        byte_plus_multiplier:     (\"+10b\",    SizeFilter::Min(10)),\n        byte_minus:               (\"-1b\",     SizeFilter::Max(1)),\n        kilo_plus:                (\"+1k\",     SizeFilter::Min(1000)),\n        kilo_plus_suffix:         (\"+1kb\",    SizeFilter::Min(1000)),\n        kilo_minus:               (\"-1k\",     SizeFilter::Max(1000)),\n        kilo_minus_multiplier:    (\"-100k\",   SizeFilter::Max(100_000)),\n        kilo_minus_suffix:        (\"-1kb\",    SizeFilter::Max(1000)),\n        kilo_plus_upper:          (\"+1K\",     SizeFilter::Min(1000)),\n        kilo_plus_suffix_upper:   (\"+1KB\",    SizeFilter::Min(1000)),\n        kilo_minus_upper:         (\"-1K\",     SizeFilter::Max(1000)),\n        kilo_minus_suffix_upper:  (\"-1Kb\",    SizeFilter::Max(1000)),\n        kibi_plus:                (\"+1ki\",    SizeFilter::Min(1024)),\n        kibi_plus_multiplier:     (\"+10ki\",   SizeFilter::Min(10_240)),\n        kibi_plus_suffix:         (\"+1kib\",   SizeFilter::Min(1024)),\n        kibi_minus:               (\"-1ki\",    SizeFilter::Max(1024)),\n        kibi_minus_multiplier:    (\"-100ki\",  SizeFilter::Max(102_400)),\n        kibi_minus_suffix:        (\"-1kib\",   SizeFilter::Max(1024)),\n        kibi_plus_upper:          (\"+1KI\",    SizeFilter::Min(1024)),\n        kibi_plus_suffix_upper:   (\"+1KiB\",   SizeFilter::Min(1024)),\n        kibi_minus_upper:         (\"-1Ki\",    SizeFilter::Max(1024)),\n        kibi_minus_suffix_upper:  (\"-1KIB\",   SizeFilter::Max(1024)),\n        mega_plus:                (\"+1m\",     SizeFilter::Min(1_000_000)),\n        mega_plus_suffix:         (\"+1mb\",    SizeFilter::Min(1_000_000)),\n        mega_minus:               (\"-1m\",     SizeFilter::Max(1_000_000)),\n        mega_minus_suffix:        (\"-1mb\",    SizeFilter::Max(1_000_000)),\n        mega_plus_upper:          (\"+1M\",     SizeFilter::Min(1_000_000)),\n        mega_plus_suffix_upper:   (\"+1MB\",    SizeFilter::Min(1_000_000)),\n        mega_minus_upper:         (\"-1M\",     SizeFilter::Max(1_000_000)),\n        mega_minus_suffix_upper:  (\"-1Mb\",    SizeFilter::Max(1_000_000)),\n        mebi_plus:                (\"+1mi\",    SizeFilter::Min(1_048_576)),\n        mebi_plus_suffix:         (\"+1mib\",   SizeFilter::Min(1_048_576)),\n        mebi_minus:               (\"-1mi\",    SizeFilter::Max(1_048_576)),\n        mebi_minus_suffix:        (\"-1mib\",   SizeFilter::Max(1_048_576)),\n        mebi_plus_upper:          (\"+1MI\",    SizeFilter::Min(1_048_576)),\n        mebi_plus_suffix_upper:   (\"+1MiB\",   SizeFilter::Min(1_048_576)),\n        mebi_minus_upper:         (\"-1Mi\",    SizeFilter::Max(1_048_576)),\n        mebi_minus_suffix_upper:  (\"-1MIB\",   SizeFilter::Max(1_048_576)),\n        giga_plus:                (\"+1g\",     SizeFilter::Min(1_000_000_000)),\n        giga_plus_suffix:         (\"+1gb\",    SizeFilter::Min(1_000_000_000)),\n        giga_minus:               (\"-1g\",     SizeFilter::Max(1_000_000_000)),\n        giga_minus_suffix:        (\"-1gb\",    SizeFilter::Max(1_000_000_000)),\n        giga_plus_upper:          (\"+1G\",     SizeFilter::Min(1_000_000_000)),\n        giga_plus_suffix_upper:   (\"+1GB\",    SizeFilter::Min(1_000_000_000)),\n        giga_minus_upper:         (\"-1G\",     SizeFilter::Max(1_000_000_000)),\n        giga_minus_suffix_upper:  (\"-1Gb\",    SizeFilter::Max(1_000_000_000)),\n        gibi_plus:                (\"+1gi\",    SizeFilter::Min(1_073_741_824)),\n        gibi_plus_suffix:         (\"+1gib\",   SizeFilter::Min(1_073_741_824)),\n        gibi_minus:               (\"-1gi\",    SizeFilter::Max(1_073_741_824)),\n        gibi_minus_suffix:        (\"-1gib\",   SizeFilter::Max(1_073_741_824)),\n        gibi_plus_upper:          (\"+1GI\",    SizeFilter::Min(1_073_741_824)),\n        gibi_plus_suffix_upper:   (\"+1GiB\",   SizeFilter::Min(1_073_741_824)),\n        gibi_minus_upper:         (\"-1Gi\",    SizeFilter::Max(1_073_741_824)),\n        gibi_minus_suffix_upper:  (\"-1GIB\",   SizeFilter::Max(1_073_741_824)),\n        tera_plus:                (\"+1t\",     SizeFilter::Min(1_000_000_000_000)),\n        tera_plus_suffix:         (\"+1tb\",    SizeFilter::Min(1_000_000_000_000)),\n        tera_minus:               (\"-1t\",     SizeFilter::Max(1_000_000_000_000)),\n        tera_minus_suffix:        (\"-1tb\",    SizeFilter::Max(1_000_000_000_000)),\n        tera_plus_upper:          (\"+1T\",     SizeFilter::Min(1_000_000_000_000)),\n        tera_plus_suffix_upper:   (\"+1TB\",    SizeFilter::Min(1_000_000_000_000)),\n        tera_minus_upper:         (\"-1T\",     SizeFilter::Max(1_000_000_000_000)),\n        tera_minus_suffix_upper:  (\"-1Tb\",    SizeFilter::Max(1_000_000_000_000)),\n        tebi_plus:                (\"+1ti\",    SizeFilter::Min(1_099_511_627_776)),\n        tebi_plus_suffix:         (\"+1tib\",   SizeFilter::Min(1_099_511_627_776)),\n        tebi_minus:               (\"-1ti\",    SizeFilter::Max(1_099_511_627_776)),\n        tebi_minus_suffix:        (\"-1tib\",   SizeFilter::Max(1_099_511_627_776)),\n        tebi_plus_upper:          (\"+1TI\",    SizeFilter::Min(1_099_511_627_776)),\n        tebi_plus_suffix_upper:   (\"+1TiB\",   SizeFilter::Min(1_099_511_627_776)),\n        tebi_minus_upper:         (\"-1Ti\",    SizeFilter::Max(1_099_511_627_776)),\n        tebi_minus_suffix_upper:  (\"-1TIB\",   SizeFilter::Max(1_099_511_627_776)),\n    }\n\n    /// Invalid parse testing\n    macro_rules! gen_size_filter_failure {\n        ($($name:ident: $value:expr,)*) => {\n            $(\n                #[test]\n                fn $name() {\n                    let i = SizeFilter::from_string($value);\n                    assert!(i.is_err());\n                }\n            )*\n        };\n    }\n\n    // Invalid parse data\n    gen_size_filter_failure! {\n        ensure_missing_number_returns_none: \"+g\",\n        ensure_missing_unit_returns_none: \"+18\",\n        ensure_bad_format_returns_none_1: \"$10M\",\n        ensure_bad_format_returns_none_2: \"badval\",\n        ensure_bad_format_returns_none_3: \"9999\",\n        ensure_invalid_unit_returns_none_1: \"+50a\",\n        ensure_invalid_unit_returns_none_2: \"-10v\",\n        ensure_invalid_unit_returns_none_3: \"+1Mv\",\n        ensure_bib_format_returns_none: \"+1bib\",\n        ensure_bb_format_returns_none: \"+1bb\",\n    }\n\n    #[test]\n    fn is_within_less_than() {\n        let f = SizeFilter::from_string(\"-1k\").unwrap();\n        assert!(f.is_within(999));\n    }\n\n    #[test]\n    fn is_within_less_than_equal() {\n        let f = SizeFilter::from_string(\"-1k\").unwrap();\n        assert!(f.is_within(1000));\n    }\n\n    #[test]\n    fn is_within_greater_than() {\n        let f = SizeFilter::from_string(\"+1k\").unwrap();\n        assert!(f.is_within(1001));\n    }\n\n    #[test]\n    fn is_within_greater_than_equal() {\n        let f = SizeFilter::from_string(\"+1K\").unwrap();\n        assert!(f.is_within(1000));\n    }\n}\n"
  },
  {
    "path": "src/filter/time.rs",
    "content": "use jiff::{Span, Timestamp, Zoned, civil::DateTime, tz::TimeZone};\n\nuse std::time::{Duration, SystemTime, UNIX_EPOCH};\n\n/// Filter based on time ranges.\n#[derive(Debug, PartialEq, Eq)]\npub enum TimeFilter {\n    Before(SystemTime),\n    After(SystemTime),\n}\n\n#[cfg(not(test))]\nfn now() -> Zoned {\n    Zoned::now()\n}\n\n#[cfg(test)]\nthread_local! {\n    static TESTTIME: std::cell::RefCell<Option<Zoned>> = None.into();\n}\n\n/// This allows us to set a specific time when running tests\n#[cfg(test)]\nfn now() -> Zoned {\n    TESTTIME.with_borrow(|reftime| reftime.as_ref().cloned().unwrap_or_else(Zoned::now))\n}\n\nimpl TimeFilter {\n    fn from_str(s: &str) -> Option<SystemTime> {\n        if let Ok(span) = s.parse::<Span>() {\n            let datetime = now().checked_sub(span).ok()?;\n            Some(datetime.into())\n        } else if let Ok(timestamp) = s.parse::<Timestamp>() {\n            Some(timestamp.into())\n        } else if let Ok(datetime) = s.parse::<DateTime>() {\n            Some(\n                TimeZone::system()\n                    .to_ambiguous_zoned(datetime)\n                    .later()\n                    .ok()?\n                    .into(),\n            )\n        } else {\n            let timestamp_secs: u64 = s.strip_prefix('@')?.parse().ok()?;\n            Some(UNIX_EPOCH + Duration::from_secs(timestamp_secs))\n        }\n    }\n\n    pub fn before(s: &str) -> Option<TimeFilter> {\n        TimeFilter::from_str(s).map(TimeFilter::Before)\n    }\n\n    pub fn after(s: &str) -> Option<TimeFilter> {\n        TimeFilter::from_str(s).map(TimeFilter::After)\n    }\n\n    pub fn applies_to(&self, t: &SystemTime) -> bool {\n        match self {\n            TimeFilter::Before(limit) => t < limit,\n            TimeFilter::After(limit) => t > limit,\n        }\n    }\n}\n\n#[cfg(test)]\nmod tests {\n    use super::*;\n    use std::time::Duration;\n\n    struct TestTime(SystemTime);\n\n    impl TestTime {\n        fn new(time: Zoned) -> Self {\n            TESTTIME.with_borrow_mut(|t| *t = Some(time.clone()));\n            TestTime(time.into())\n        }\n\n        fn set(&mut self, time: Zoned) {\n            TESTTIME.with_borrow_mut(|t| *t = Some(time.clone()));\n            self.0 = time.into();\n        }\n\n        fn timestamp(&self) -> SystemTime {\n            self.0\n        }\n    }\n\n    impl Drop for TestTime {\n        fn drop(&mut self) {\n            // Stop using manually set times\n            TESTTIME.with_borrow_mut(|t| *t = None);\n        }\n    }\n\n    #[test]\n    fn is_time_filter_applicable() {\n        let local_tz = TimeZone::system();\n        let mut test_time = TestTime::new(\n            local_tz\n                .to_ambiguous_zoned(\"2010-10-10 10:10:10\".parse::<DateTime>().unwrap())\n                .later()\n                .unwrap(),\n        );\n        let mut ref_time = test_time.timestamp();\n\n        assert!(TimeFilter::after(\"1min\").unwrap().applies_to(&ref_time));\n        assert!(!TimeFilter::before(\"1min\").unwrap().applies_to(&ref_time));\n\n        let t1m_ago = ref_time - Duration::from_secs(60);\n        assert!(!TimeFilter::after(\"30sec\").unwrap().applies_to(&t1m_ago));\n        assert!(TimeFilter::after(\"2min\").unwrap().applies_to(&t1m_ago));\n\n        assert!(TimeFilter::before(\"30sec\").unwrap().applies_to(&t1m_ago));\n        assert!(!TimeFilter::before(\"2min\").unwrap().applies_to(&t1m_ago));\n\n        let t10s_before = \"2010-10-10 10:10:00\";\n        assert!(\n            !TimeFilter::before(t10s_before)\n                .unwrap()\n                .applies_to(&ref_time)\n        );\n        assert!(\n            TimeFilter::before(t10s_before)\n                .unwrap()\n                .applies_to(&t1m_ago)\n        );\n\n        assert!(\n            TimeFilter::after(t10s_before)\n                .unwrap()\n                .applies_to(&ref_time)\n        );\n        assert!(!TimeFilter::after(t10s_before).unwrap().applies_to(&t1m_ago));\n\n        let same_day = \"2010-10-10\";\n        assert!(!TimeFilter::before(same_day).unwrap().applies_to(&ref_time));\n        assert!(!TimeFilter::before(same_day).unwrap().applies_to(&t1m_ago));\n\n        assert!(TimeFilter::after(same_day).unwrap().applies_to(&ref_time));\n        assert!(TimeFilter::after(same_day).unwrap().applies_to(&t1m_ago));\n\n        test_time.set(\n            \"2010-10-10T10:10:10+00:00\"\n                .parse::<Timestamp>()\n                .unwrap()\n                .to_zoned(local_tz.clone()),\n        );\n        ref_time = test_time.timestamp();\n        let t1m_ago = ref_time - Duration::from_secs(60);\n        let t10s_before = \"2010-10-10T10:10:00+00:00\";\n        assert!(\n            !TimeFilter::before(t10s_before)\n                .unwrap()\n                .applies_to(&ref_time)\n        );\n        assert!(\n            TimeFilter::before(t10s_before)\n                .unwrap()\n                .applies_to(&t1m_ago)\n        );\n\n        assert!(\n            TimeFilter::after(t10s_before)\n                .unwrap()\n                .applies_to(&ref_time)\n        );\n        assert!(!TimeFilter::after(t10s_before).unwrap().applies_to(&t1m_ago));\n\n        let ref_timestamp = 1707723412u64; // Mon Feb 12 07:36:52 UTC 2024\n        test_time.set(\n            \"2024-02-12T07:36:52+00:00\"\n                .parse::<Timestamp>()\n                .unwrap()\n                .to_zoned(local_tz),\n        );\n        ref_time = test_time.timestamp();\n        let t1m_ago = ref_time - Duration::from_secs(60);\n        let t1s_later = ref_time + Duration::from_secs(1);\n        // Timestamp only supported via '@' prefix\n        assert!(TimeFilter::before(&ref_timestamp.to_string()).is_none());\n        assert!(\n            TimeFilter::before(&format!(\"@{ref_timestamp}\"))\n                .unwrap()\n                .applies_to(&t1m_ago)\n        );\n        assert!(\n            !TimeFilter::before(&format!(\"@{ref_timestamp}\"))\n                .unwrap()\n                .applies_to(&t1s_later)\n        );\n        assert!(\n            !TimeFilter::after(&format!(\"@{ref_timestamp}\"))\n                .unwrap()\n                .applies_to(&t1m_ago)\n        );\n        assert!(\n            TimeFilter::after(&format!(\"@{ref_timestamp}\"))\n                .unwrap()\n                .applies_to(&t1s_later)\n        );\n    }\n}\n"
  },
  {
    "path": "src/fmt/input.rs",
    "content": "use std::ffi::{OsStr, OsString};\nuse std::path::{Path, PathBuf};\n\nuse crate::filesystem::strip_current_dir;\n\n/// Removes the parent component of the path\npub fn basename(path: &Path) -> &OsStr {\n    path.file_name().unwrap_or(path.as_os_str())\n}\n\n/// Removes the extension from the path\npub fn remove_extension(path: &Path) -> OsString {\n    let dirname = dirname(path);\n    let stem = path.file_stem().unwrap_or(path.as_os_str());\n\n    let path = PathBuf::from(dirname).join(stem);\n\n    strip_current_dir(&path).to_owned().into_os_string()\n}\n\n/// Removes the basename from the path.\npub fn dirname(path: &Path) -> OsString {\n    path.parent()\n        .map(|p| {\n            if p == OsStr::new(\"\") {\n                OsString::from(\".\")\n            } else {\n                p.as_os_str().to_owned()\n            }\n        })\n        .unwrap_or_else(|| path.as_os_str().to_owned())\n}\n\n#[cfg(test)]\nmod path_tests {\n    use super::*;\n    use std::path::MAIN_SEPARATOR_STR;\n\n    fn correct(input: &str) -> String {\n        input.replace('/', MAIN_SEPARATOR_STR)\n    }\n\n    macro_rules! func_tests {\n        ($($name:ident: $func:ident for $input:expr => $output:expr)+) => {\n            $(\n                #[test]\n                fn $name() {\n                    let input_path = PathBuf::from(&correct($input));\n                    let output_string = OsString::from(correct($output));\n                    assert_eq!($func(&input_path), output_string);\n                }\n            )+\n        }\n    }\n\n    func_tests! {\n        remove_ext_simple:  remove_extension  for  \"foo.txt\"      =>  \"foo\"\n        remove_ext_dir:     remove_extension  for  \"dir/foo.txt\"  =>  \"dir/foo\"\n        hidden:             remove_extension  for  \".foo\"         =>  \".foo\"\n        remove_ext_utf8:    remove_extension  for  \"💖.txt\"       =>  \"💖\"\n        remove_ext_empty:   remove_extension  for  \"\"             =>  \"\"\n\n        basename_simple:  basename  for  \"foo.txt\"      =>  \"foo.txt\"\n        basename_dir:     basename  for  \"dir/foo.txt\"  =>  \"foo.txt\"\n        basename_empty:   basename  for  \"\"             =>  \"\"\n        basename_utf8_0:  basename  for  \"💖/foo.txt\"   =>  \"foo.txt\"\n        basename_utf8_1:  basename  for  \"dir/💖.txt\"   =>  \"💖.txt\"\n\n        dirname_simple:  dirname  for  \"foo.txt\"      =>  \".\"\n        dirname_dir:     dirname  for  \"dir/foo.txt\"  =>  \"dir\"\n        dirname_utf8_0:  dirname  for  \"💖/foo.txt\"   =>  \"💖\"\n        dirname_utf8_1:  dirname  for  \"dir/💖.txt\"   =>  \"dir\"\n    }\n\n    #[test]\n    #[cfg(windows)]\n    fn dirname_root() {\n        assert_eq!(dirname(&PathBuf::from(\"C:\")), OsString::from(\"C:\"));\n        assert_eq!(dirname(&PathBuf::from(\"\\\\\")), OsString::from(\"\\\\\"));\n    }\n\n    #[test]\n    #[cfg(not(windows))]\n    fn dirname_root() {\n        assert_eq!(dirname(&PathBuf::from(\"/\")), OsString::from(\"/\"));\n    }\n}\n"
  },
  {
    "path": "src/fmt/mod.rs",
    "content": "mod input;\n\nuse std::borrow::Cow;\nuse std::ffi::{OsStr, OsString};\nuse std::fmt::{self, Display, Formatter};\nuse std::path::{Component, Path, Prefix};\nuse std::sync::OnceLock;\n\nuse aho_corasick::AhoCorasick;\n\nuse self::input::{basename, dirname, remove_extension};\n\n/// Designates what should be written to a buffer\n///\n/// Each `Token` contains either text, or a placeholder variant, which will be used to generate\n/// commands after all tokens for a given command template have been collected.\n#[derive(Clone, Debug, PartialEq, Eq)]\npub enum Token {\n    Placeholder,\n    Basename,\n    Parent,\n    NoExt,\n    BasenameNoExt,\n    Text(String),\n}\n\nimpl Display for Token {\n    fn fmt(&self, f: &mut Formatter) -> fmt::Result {\n        match *self {\n            Token::Placeholder => f.write_str(\"{}\")?,\n            Token::Basename => f.write_str(\"{/}\")?,\n            Token::Parent => f.write_str(\"{//}\")?,\n            Token::NoExt => f.write_str(\"{.}\")?,\n            Token::BasenameNoExt => f.write_str(\"{/.}\")?,\n            Token::Text(ref string) => f.write_str(string)?,\n        }\n        Ok(())\n    }\n}\n\n/// A parsed format string\n///\n/// This is either a collection of `Token`s including at least one placeholder variant,\n/// or a fixed text.\n#[derive(Clone, Debug, PartialEq)]\npub enum FormatTemplate {\n    Tokens(Vec<Token>),\n    Text(String),\n}\n\nstatic PLACEHOLDERS: OnceLock<AhoCorasick> = OnceLock::new();\n\nimpl FormatTemplate {\n    pub fn has_tokens(&self) -> bool {\n        matches!(self, FormatTemplate::Tokens(_))\n    }\n\n    pub fn parse(fmt: &str) -> Self {\n        // NOTE: we assume that { and } have the same length\n        const BRACE_LEN: usize = '{'.len_utf8();\n        let mut tokens = Vec::new();\n        let mut remaining = fmt;\n        let mut buf = String::new();\n        let placeholders = PLACEHOLDERS.get_or_init(|| {\n            AhoCorasick::new([\"{{\", \"}}\", \"{}\", \"{/}\", \"{//}\", \"{.}\", \"{/.}\"]).unwrap()\n        });\n        while let Some(m) = placeholders.find(remaining) {\n            match m.pattern().as_u32() {\n                0 | 1 => {\n                    // we found an escaped {{ or }}, so add\n                    // everything up to the first char to the buffer\n                    // then skip the second one.\n                    buf += &remaining[..m.start() + BRACE_LEN];\n                    remaining = &remaining[m.end()..];\n                }\n                id if !remaining[m.end()..].starts_with('}') => {\n                    buf += &remaining[..m.start()];\n                    if !buf.is_empty() {\n                        tokens.push(Token::Text(std::mem::take(&mut buf)));\n                    }\n                    tokens.push(token_from_pattern_id(id));\n                    remaining = &remaining[m.end()..];\n                }\n                _ => {\n                    // We got a normal pattern, but the final \"}\"\n                    // is escaped, so add up to that to the buffer, then\n                    // skip the final }\n                    buf += &remaining[..m.end()];\n                    remaining = &remaining[m.end() + BRACE_LEN..];\n                }\n            }\n        }\n        // Add the rest of the string to the buffer, and add the final buffer to the tokens\n        if !remaining.is_empty() {\n            buf += remaining;\n        }\n        if tokens.is_empty() {\n            // No placeholders were found, so just return the text\n            return FormatTemplate::Text(buf);\n        }\n        // Add final text segment\n        if !buf.is_empty() {\n            tokens.push(Token::Text(buf));\n        }\n        debug_assert!(!tokens.is_empty());\n        FormatTemplate::Tokens(tokens)\n    }\n\n    /// Generate a result string from this template. If path_separator is Some, then it will replace\n    /// the path separator in all placeholder tokens. Fixed text and tokens are not affected by\n    /// path separator substitution.\n    pub fn generate(&self, path: impl AsRef<Path>, path_separator: Option<&str>) -> OsString {\n        use Token::*;\n        let path = path.as_ref();\n\n        match *self {\n            Self::Tokens(ref tokens) => {\n                let mut s = OsString::new();\n                for token in tokens {\n                    match token {\n                        Basename => s.push(Self::replace_separator(basename(path), path_separator)),\n                        BasenameNoExt => s.push(Self::replace_separator(\n                            &remove_extension(basename(path).as_ref()),\n                            path_separator,\n                        )),\n                        NoExt => s.push(Self::replace_separator(\n                            &remove_extension(path),\n                            path_separator,\n                        )),\n                        Parent => s.push(Self::replace_separator(&dirname(path), path_separator)),\n                        Placeholder => {\n                            s.push(Self::replace_separator(path.as_ref(), path_separator))\n                        }\n                        Text(string) => s.push(string),\n                    }\n                }\n                s\n            }\n            Self::Text(ref text) => OsString::from(text),\n        }\n    }\n\n    /// Replace the path separator in the input with the custom separator string. If path_separator\n    /// is None, simply return a borrowed Cow<OsStr> of the input. Otherwise, the input is\n    /// interpreted as a Path and its components are iterated through and re-joined into a new\n    /// OsString.\n    fn replace_separator<'a>(path: &'a OsStr, path_separator: Option<&str>) -> Cow<'a, OsStr> {\n        // fast-path - no replacement necessary\n        if path_separator.is_none() {\n            return Cow::Borrowed(path);\n        }\n\n        let path_separator = path_separator.unwrap();\n        let mut out = OsString::with_capacity(path.len());\n        let mut components = Path::new(path).components().peekable();\n\n        while let Some(comp) = components.next() {\n            match comp {\n                // Absolute paths on Windows are tricky.  A Prefix component is usually a drive\n                // letter or UNC path, and is usually followed by RootDir. There are also\n                // \"verbatim\" prefixes beginning with \"\\\\?\\\" that skip normalization. We choose to\n                // ignore verbatim path prefixes here because they're very rare, might be\n                // impossible to reach here, and there's no good way to deal with them. If users\n                // are doing something advanced involving verbatim windows paths, they can do their\n                // own output filtering with a tool like sed.\n                Component::Prefix(prefix) => {\n                    if let Prefix::UNC(server, share) = prefix.kind() {\n                        // Prefix::UNC is a parsed version of '\\\\server\\share'\n                        out.push(path_separator);\n                        out.push(path_separator);\n                        out.push(server);\n                        out.push(path_separator);\n                        out.push(share);\n                    } else {\n                        // All other Windows prefix types are rendered as-is. This results in e.g. \"C:\" for\n                        // drive letters. DeviceNS and Verbatim* prefixes won't have backslashes converted,\n                        // but they're not returned by directories fd can search anyway so we don't worry\n                        // about them.\n                        out.push(comp.as_os_str());\n                    }\n                }\n\n                // Root directory is always replaced with the custom separator.\n                Component::RootDir => out.push(path_separator),\n\n                // Everything else is joined normally, with a trailing separator if we're not last\n                _ => {\n                    out.push(comp.as_os_str());\n                    if components.peek().is_some() {\n                        out.push(path_separator);\n                    }\n                }\n            }\n        }\n        Cow::Owned(out)\n    }\n}\n\n// Convert the id from an aho-corasick match to the\n// appropriate token\nfn token_from_pattern_id(id: u32) -> Token {\n    use Token::*;\n    match id {\n        2 => Placeholder,\n        3 => Basename,\n        4 => Parent,\n        5 => NoExt,\n        6 => BasenameNoExt,\n        _ => unreachable!(),\n    }\n}\n\n#[cfg(test)]\nmod fmt_tests {\n    use super::*;\n    use std::path::PathBuf;\n\n    #[test]\n    fn parse_no_placeholders() {\n        let templ = FormatTemplate::parse(\"This string has no placeholders\");\n        assert_eq!(\n            templ,\n            FormatTemplate::Text(\"This string has no placeholders\".into())\n        );\n    }\n\n    #[test]\n    fn parse_only_brace_escapes() {\n        let templ = FormatTemplate::parse(\"This string only has escapes like {{ and }}\");\n        assert_eq!(\n            templ,\n            FormatTemplate::Text(\"This string only has escapes like { and }\".into())\n        );\n    }\n\n    #[test]\n    fn all_placeholders() {\n        use Token::*;\n\n        let templ = FormatTemplate::parse(\n            \"{{path={} \\\n            basename={/} \\\n            parent={//} \\\n            noExt={.} \\\n            basenameNoExt={/.} \\\n            }}\",\n        );\n        assert_eq!(\n            templ,\n            FormatTemplate::Tokens(vec![\n                Text(\"{path=\".into()),\n                Placeholder,\n                Text(\" basename=\".into()),\n                Basename,\n                Text(\" parent=\".into()),\n                Parent,\n                Text(\" noExt=\".into()),\n                NoExt,\n                Text(\" basenameNoExt=\".into()),\n                BasenameNoExt,\n                Text(\" }\".into()),\n            ])\n        );\n\n        let mut path = PathBuf::new();\n        path.push(\"a\");\n        path.push(\"folder\");\n        path.push(\"file.txt\");\n\n        let expanded = templ.generate(&path, Some(\"/\")).into_string().unwrap();\n\n        assert_eq!(\n            expanded,\n            \"{path=a/folder/file.txt \\\n            basename=file.txt \\\n            parent=a/folder \\\n            noExt=a/folder/file \\\n            basenameNoExt=file }\"\n        );\n    }\n}\n"
  },
  {
    "path": "src/hyperlink.rs",
    "content": "use crate::filesystem::absolute_path;\nuse std::fmt::{self, Formatter, Write};\nuse std::path::{Path, PathBuf};\n\npub(crate) struct PathUrl(PathBuf);\n\nimpl PathUrl {\n    pub(crate) fn new(path: &Path) -> Option<PathUrl> {\n        Some(PathUrl(absolute_path(path).ok()?))\n    }\n}\n\nimpl fmt::Display for PathUrl {\n    fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {\n        write!(f, \"file://{}\", host())?;\n        let bytes = self.0.as_os_str().as_encoded_bytes();\n        for &byte in bytes.iter() {\n            encode(f, byte)?;\n        }\n        Ok(())\n    }\n}\n\nfn encode(f: &mut Formatter, byte: u8) -> fmt::Result {\n    // NOTE:\n    // Most terminals can handle non-ascii unicode characters in a file url fine. But on some OSes (notably\n    // windows), the encoded bytes of the path may not be valid UTF-8. Since we don't know if a\n    // byte >= 128 is part of a valid UTF-8 encoding or not, we just percent encode any non-ascii\n    // byte.\n    // Percent encoding these bytes is probably safer anyway.\n    match byte {\n        b'0'..=b'9' | b'A'..=b'Z' | b'a'..=b'z' | b'/' | b':' | b'-' | b'.' | b'_' | b'~' => {\n            f.write_char(byte.into())\n        }\n        #[cfg(windows)]\n        b'\\\\' => f.write_char('/'),\n        _ => {\n            write!(f, \"%{byte:02X}\")\n        }\n    }\n}\n\n#[cfg(unix)]\nfn host() -> &'static str {\n    use std::sync::OnceLock;\n\n    static HOSTNAME: OnceLock<String> = OnceLock::new();\n\n    HOSTNAME\n        .get_or_init(|| {\n            nix::unistd::gethostname()\n                .ok()\n                .and_then(|h| h.into_string().ok())\n                .unwrap_or_default()\n        })\n        .as_ref()\n}\n\n#[cfg(not(unix))]\nconst fn host() -> &'static str {\n    \"/\"\n}\n\n#[cfg(test)]\nmod test {\n    use super::*;\n\n    // This allows us to test the encoding without having to worry about the host, or absolute path\n    struct Encoded(&'static str);\n\n    impl fmt::Display for Encoded {\n        fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {\n            for byte in self.0.bytes() {\n                encode(f, byte)?;\n            }\n            Ok(())\n        }\n    }\n\n    #[test]\n    fn test_unicode_encoding() {\n        assert_eq!(\n            Encoded(\"$*\\x1bßé/∫😃\\x07\").to_string(),\n            \"%24%2A%1B%C3%9F%C3%A9/%E2%88%AB%F0%9F%98%83%07\",\n        );\n    }\n}\n"
  },
  {
    "path": "src/main.rs",
    "content": "mod cli;\nmod config;\nmod dir_entry;\nmod error;\nmod exec;\nmod exit_codes;\nmod filesystem;\nmod filetypes;\nmod filter;\nmod fmt;\nmod hyperlink;\nmod output;\nmod regex_helper;\nmod walk;\n\nuse std::env;\nuse std::io::IsTerminal;\nuse std::path::Path;\nuse std::sync::Arc;\n\nuse anyhow::{Context, Result, anyhow, bail};\nuse clap::{CommandFactory, Parser};\nuse globset::GlobBuilder;\nuse lscolors::LsColors;\nuse regex::bytes::{Regex, RegexBuilder, RegexSetBuilder};\n\nuse crate::cli::{ColorWhen, HyperlinkWhen, Opts};\nuse crate::config::Config;\nuse crate::exec::CommandSet;\nuse crate::exit_codes::ExitCode;\nuse crate::filetypes::FileTypes;\n#[cfg(unix)]\nuse crate::filter::OwnerFilter;\nuse crate::filter::TimeFilter;\nuse crate::regex_helper::{pattern_has_uppercase_char, pattern_matches_strings_with_leading_dot};\n\n// We use jemalloc for performance reasons, see https://github.com/sharkdp/fd/pull/481\n// FIXME: re-enable jemalloc on macOS, see comment in Cargo.toml file for more infos\n// This has to be kept in sync with the Cargo.toml file section that declares a\n// dependency on tikv-jemallocator.\n#[cfg(all(\n    not(windows),\n    not(target_os = \"android\"),\n    not(target_os = \"macos\"),\n    not(target_os = \"freebsd\"),\n    not(target_os = \"openbsd\"),\n    not(target_os = \"illumos\"),\n    not(all(target_env = \"musl\", target_pointer_width = \"32\")),\n    not(target_arch = \"riscv64\"),\n    feature = \"use-jemalloc\"\n))]\n#[global_allocator]\nstatic ALLOC: tikv_jemallocator::Jemalloc = tikv_jemallocator::Jemalloc;\n\n// vivid --color-mode 8-bit generate molokai\nconst DEFAULT_LS_COLORS: &str = \"\now=0:or=0;38;5;16;48;5;203:no=0:ex=1;38;5;203:cd=0;38;5;203;48;5;236:mi=0;38;5;16;48;5;203:*~=0;38;5;243:st=0:pi=0;38;5;16;48;5;81:fi=0:di=0;38;5;81:so=0;38;5;16;48;5;203:bd=0;38;5;81;48;5;236:tw=0:ln=0;38;5;203:*.m=0;38;5;48:*.o=0;38;5;243:*.z=4;38;5;203:*.a=1;38;5;203:*.r=0;38;5;48:*.c=0;38;5;48:*.d=0;38;5;48:*.t=0;38;5;48:*.h=0;38;5;48:*.p=0;38;5;48:*.cc=0;38;5;48:*.ll=0;38;5;48:*.jl=0;38;5;48:*css=0;38;5;48:*.md=0;38;5;185:*.gz=4;38;5;203:*.nb=0;38;5;48:*.mn=0;38;5;48:*.go=0;38;5;48:*.xz=4;38;5;203:*.so=1;38;5;203:*.rb=0;38;5;48:*.pm=0;38;5;48:*.bc=0;38;5;243:*.py=0;38;5;48:*.as=0;38;5;48:*.pl=0;38;5;48:*.rs=0;38;5;48:*.sh=0;38;5;48:*.7z=4;38;5;203:*.ps=0;38;5;186:*.cs=0;38;5;48:*.el=0;38;5;48:*.rm=0;38;5;208:*.hs=0;38;5;48:*.td=0;38;5;48:*.ui=0;38;5;149:*.ex=0;38;5;48:*.js=0;38;5;48:*.cp=0;38;5;48:*.cr=0;38;5;48:*.la=0;38;5;243:*.kt=0;38;5;48:*.ml=0;38;5;48:*.vb=0;38;5;48:*.gv=0;38;5;48:*.lo=0;38;5;243:*.hi=0;38;5;243:*.ts=0;38;5;48:*.ko=1;38;5;203:*.hh=0;38;5;48:*.pp=0;38;5;48:*.di=0;38;5;48:*.bz=4;38;5;203:*.fs=0;38;5;48:*.png=0;38;5;208:*.zsh=0;38;5;48:*.mpg=0;38;5;208:*.pid=0;38;5;243:*.xmp=0;38;5;149:*.iso=4;38;5;203:*.m4v=0;38;5;208:*.dot=0;38;5;48:*.ods=0;38;5;186:*.inc=0;38;5;48:*.sxw=0;38;5;186:*.aif=0;38;5;208:*.git=0;38;5;243:*.gvy=0;38;5;48:*.tbz=4;38;5;203:*.log=0;38;5;243:*.txt=0;38;5;185:*.ico=0;38;5;208:*.csx=0;38;5;48:*.vob=0;38;5;208:*.pgm=0;38;5;208:*.pps=0;38;5;186:*.ics=0;38;5;186:*.img=4;38;5;203:*.fon=0;38;5;208:*.hpp=0;38;5;48:*.bsh=0;38;5;48:*.sql=0;38;5;48:*TODO=1:*.php=0;38;5;48:*.pkg=4;38;5;203:*.ps1=0;38;5;48:*.csv=0;38;5;185:*.ilg=0;38;5;243:*.ini=0;38;5;149:*.pyc=0;38;5;243:*.psd=0;38;5;208:*.htc=0;38;5;48:*.swp=0;38;5;243:*.mli=0;38;5;48:*hgrc=0;38;5;149:*.bst=0;38;5;149:*.ipp=0;38;5;48:*.fsi=0;38;5;48:*.tcl=0;38;5;48:*.exs=0;38;5;48:*.out=0;38;5;243:*.jar=4;38;5;203:*.xls=0;38;5;186:*.ppm=0;38;5;208:*.apk=4;38;5;203:*.aux=0;38;5;243:*.rpm=4;38;5;203:*.dll=1;38;5;203:*.eps=0;38;5;208:*.exe=1;38;5;203:*.doc=0;38;5;186:*.wma=0;38;5;208:*.deb=4;38;5;203:*.pod=0;38;5;48:*.ind=0;38;5;243:*.nix=0;38;5;149:*.lua=0;38;5;48:*.epp=0;38;5;48:*.dpr=0;38;5;48:*.htm=0;38;5;185:*.ogg=0;38;5;208:*.bin=4;38;5;203:*.otf=0;38;5;208:*.yml=0;38;5;149:*.pro=0;38;5;149:*.cxx=0;38;5;48:*.tex=0;38;5;48:*.fnt=0;38;5;208:*.erl=0;38;5;48:*.sty=0;38;5;243:*.bag=4;38;5;203:*.rst=0;38;5;185:*.pdf=0;38;5;186:*.pbm=0;38;5;208:*.xcf=0;38;5;208:*.clj=0;38;5;48:*.gif=0;38;5;208:*.rar=4;38;5;203:*.elm=0;38;5;48:*.bib=0;38;5;149:*.tsx=0;38;5;48:*.dmg=4;38;5;203:*.tmp=0;38;5;243:*.bcf=0;38;5;243:*.mkv=0;38;5;208:*.svg=0;38;5;208:*.cpp=0;38;5;48:*.vim=0;38;5;48:*.bmp=0;38;5;208:*.ltx=0;38;5;48:*.fls=0;38;5;243:*.flv=0;38;5;208:*.wav=0;38;5;208:*.m4a=0;38;5;208:*.mid=0;38;5;208:*.hxx=0;38;5;48:*.pas=0;38;5;48:*.wmv=0;38;5;208:*.tif=0;38;5;208:*.kex=0;38;5;186:*.mp4=0;38;5;208:*.bak=0;38;5;243:*.xlr=0;38;5;186:*.dox=0;38;5;149:*.swf=0;38;5;208:*.tar=4;38;5;203:*.tgz=4;38;5;203:*.cfg=0;38;5;149:*.xml=0;\n38;5;185:*.jpg=0;38;5;208:*.mir=0;38;5;48:*.sxi=0;38;5;186:*.bz2=4;38;5;203:*.odt=0;38;5;186:*.mov=0;38;5;208:*.toc=0;38;5;243:*.bat=1;38;5;203:*.asa=0;38;5;48:*.awk=0;38;5;48:*.sbt=0;38;5;48:*.vcd=4;38;5;203:*.kts=0;38;5;48:*.arj=4;38;5;203:*.blg=0;38;5;243:*.c++=0;38;5;48:*.odp=0;38;5;186:*.bbl=0;38;5;243:*.idx=0;38;5;243:*.com=1;38;5;203:*.mp3=0;38;5;208:*.avi=0;38;5;208:*.def=0;38;5;48:*.cgi=0;38;5;48:*.zip=4;38;5;203:*.ttf=0;38;5;208:*.ppt=0;38;5;186:*.tml=0;38;5;149:*.fsx=0;38;5;48:*.h++=0;38;5;48:*.rtf=0;38;5;186:*.inl=0;38;5;48:*.yaml=0;38;5;149:*.html=0;38;5;185:*.mpeg=0;38;5;208:*.java=0;38;5;48:*.hgrc=0;38;5;149:*.orig=0;38;5;243:*.conf=0;38;5;149:*.dart=0;38;5;48:*.psm1=0;38;5;48:*.rlib=0;38;5;243:*.fish=0;38;5;48:*.bash=0;38;5;48:*.make=0;38;5;149:*.docx=0;38;5;186:*.json=0;38;5;149:*.psd1=0;38;5;48:*.lisp=0;38;5;48:*.tbz2=4;38;5;203:*.diff=0;38;5;48:*.epub=0;38;5;186:*.xlsx=0;38;5;186:*.pptx=0;38;5;186:*.toml=0;38;5;149:*.h264=0;38;5;208:*.purs=0;38;5;48:*.flac=0;38;5;208:*.tiff=0;38;5;208:*.jpeg=0;38;5;208:*.lock=0;38;5;243:*.less=0;38;5;48:*.dyn_o=0;38;5;243:*.scala=0;38;5;48:*.mdown=0;38;5;185:*.shtml=0;38;5;185:*.class=0;38;5;243:*.cache=0;38;5;243:*.cmake=0;38;5;149:*passwd=0;38;5;149:*.swift=0;38;5;48:*shadow=0;38;5;149:*.xhtml=0;38;5;185:*.patch=0;38;5;48:*.cabal=0;38;5;48:*README=0;38;5;16;48;5;186:*.toast=4;38;5;203:*.ipynb=0;38;5;48:*COPYING=0;38;5;249:*.gradle=0;38;5;48:*.matlab=0;38;5;48:*.config=0;38;5;149:*LICENSE=0;38;5;249:*.dyn_hi=0;38;5;243:*.flake8=0;38;5;149:*.groovy=0;38;5;48:*INSTALL=0;38;5;16;48;5;186:*TODO.md=1:*.ignore=0;38;5;149:*Doxyfile=0;38;5;149:*TODO.txt=1:*setup.py=0;38;5;149:*Makefile=0;38;5;149:*.gemspec=0;38;5;149:*.desktop=0;38;5;149:*.rgignore=0;38;5;149:*.markdown=0;38;5;185:*COPYRIGHT=0;38;5;249:*configure=0;38;5;149:*.DS_Store=0;38;5;243:*.kdevelop=0;38;5;149:*.fdignore=0;38;5;149:*README.md=0;38;5;16;48;5;186:*.cmake.in=0;38;5;149:*SConscript=0;38;5;149:*CODEOWNERS=0;38;5;149:*.localized=0;38;5;243:*.gitignore=0;38;5;149:*Dockerfile=0;38;5;149:*.gitconfig=0;38;5;149:*INSTALL.md=0;38;5;16;48;5;186:*README.txt=0;38;5;16;48;5;186:*SConstruct=0;38;5;149:*.scons_opt=0;38;5;243:*.travis.yml=0;38;5;186:*.gitmodules=0;38;5;149:*.synctex.gz=0;38;5;243:*LICENSE-MIT=0;38;5;249:*MANIFEST.in=0;38;5;149:*Makefile.in=0;38;5;243:*Makefile.am=0;38;5;149:*INSTALL.txt=0;38;5;16;48;5;186:*configure.ac=0;38;5;149:*.applescript=0;38;5;48:*appveyor.yml=0;38;5;186:*.fdb_latexmk=0;38;5;243:*CONTRIBUTORS=0;38;5;16;48;5;186:*.clang-format=0;38;5;149:*LICENSE-APACHE=0;38;5;249:*CMakeLists.txt=0;38;5;149:*CMakeCache.txt=0;38;5;243:*.gitattributes=0;38;5;149:*CONTRIBUTORS.md=0;38;5;16;48;5;186:*.sconsign.dblite=0;38;5;243:*requirements.txt=0;38;5;149:*CONTRIBUTORS.txt=0;38;5;16;48;5;186:*package-lock.json=0;38;5;243:*.CFUserTextEncoding=0;38;5;243\n\";\n\nfn main() {\n    let result = run();\n    match result {\n        Ok(exit_code) => {\n            exit_code.exit();\n        }\n        Err(err) => {\n            eprintln!(\"[fd error]: {err:#}\");\n            ExitCode::GeneralError.exit();\n        }\n    }\n}\n\nfn run() -> Result<ExitCode> {\n    let opts = Opts::parse();\n\n    #[cfg(feature = \"completions\")]\n    if let Some(shell) = opts.gen_completions()? {\n        return print_completions(shell);\n    }\n\n    set_working_dir(&opts)?;\n    let search_paths = opts.search_paths()?;\n    if search_paths.is_empty() {\n        bail!(\"No valid search paths given.\");\n    }\n\n    ensure_search_pattern_is_not_a_path(&opts)?;\n    let pattern = &opts.pattern;\n    let exprs = &opts.exprs;\n    let empty = Vec::new();\n\n    let pattern_regexps = exprs\n        .as_ref()\n        .unwrap_or(&empty)\n        .iter()\n        .chain([pattern])\n        .map(|pat| build_pattern_regex(pat, &opts))\n        .collect::<Result<Vec<String>>>()?;\n\n    let config = construct_config(opts, &pattern_regexps)?;\n\n    ensure_use_hidden_option_for_leading_dot_pattern(&config, &pattern_regexps)?;\n\n    let regexps = pattern_regexps\n        .into_iter()\n        .map(|pat| build_regex(pat, &config))\n        .collect::<Result<Vec<Regex>>>()?;\n\n    walk::scan(&search_paths, regexps, config)\n}\n\n#[cfg(feature = \"completions\")]\n#[cold]\nfn print_completions(shell: clap_complete::Shell) -> Result<ExitCode> {\n    // The program name is the first argument.\n    let first_arg = env::args().next();\n    let program_name = first_arg\n        .as_ref()\n        .map(Path::new)\n        .and_then(|path| path.file_stem())\n        .and_then(|file| file.to_str())\n        .unwrap_or(\"fd\");\n    let mut cmd = Opts::command();\n    cmd.build();\n    clap_complete::generate(shell, &mut cmd, program_name, &mut std::io::stdout());\n    Ok(ExitCode::Success)\n}\n\nfn set_working_dir(opts: &Opts) -> Result<()> {\n    if let Some(ref base_directory) = opts.base_directory {\n        if !filesystem::is_existing_directory(base_directory) {\n            return Err(anyhow!(\n                \"The '--base-directory' path '{}' is not a directory.\",\n                base_directory.to_string_lossy()\n            ));\n        }\n        env::set_current_dir(base_directory).with_context(|| {\n            format!(\n                \"Could not set '{}' as the current working directory\",\n                base_directory.to_string_lossy()\n            )\n        })?;\n    }\n    Ok(())\n}\n\n/// Detect if the user accidentally supplied a path instead of a search pattern\nfn ensure_search_pattern_is_not_a_path(opts: &Opts) -> Result<()> {\n    if !opts.full_path\n        && opts.pattern.contains(std::path::MAIN_SEPARATOR)\n        && Path::new(&opts.pattern).is_dir()\n    {\n        Err(anyhow!(\n            \"The search pattern '{pattern}' contains a path-separation character ('{sep}') \\\n             and will not lead to any search results.\\n\\n\\\n             If you want to search for all files inside the '{pattern}' directory, use a match-all pattern:\\n\\n  \\\n             fd . '{pattern}'\\n\\n\\\n             Instead, if you want your pattern to match the full file path, use:\\n\\n  \\\n             fd --full-path '{pattern}'\",\n            pattern = &opts.pattern,\n            sep = std::path::MAIN_SEPARATOR,\n        ))\n    } else {\n        Ok(())\n    }\n}\n\nfn build_pattern_regex(pattern: &str, opts: &Opts) -> Result<String> {\n    Ok(if opts.glob && !pattern.is_empty() {\n        let glob = GlobBuilder::new(pattern).literal_separator(true).build()?;\n        glob.regex().to_owned()\n    } else if opts.fixed_strings {\n        // Treat pattern as literal string if '--fixed-strings' is used\n        regex::escape(pattern)\n    } else {\n        String::from(pattern)\n    })\n}\n\nfn check_path_separator_length(path_separator: Option<&str>) -> Result<()> {\n    match (cfg!(windows), path_separator) {\n        (true, Some(sep)) if sep.len() > 1 => Err(anyhow!(\n            \"A path separator must be exactly one byte, but \\\n                 the given separator is {} bytes: '{}'.\\n\\\n                 In some shells on Windows, '/' is automatically \\\n                 expanded. Try to use '//' instead.\",\n            sep.len(),\n            sep\n        )),\n        _ => Ok(()),\n    }\n}\n\nfn construct_config(mut opts: Opts, pattern_regexps: &[String]) -> Result<Config> {\n    // The search will be case-sensitive if the command line flag is set or\n    // if any of the patterns has an uppercase character (smart case).\n    let case_sensitive = !opts.ignore_case\n        && (opts.case_sensitive\n            || pattern_regexps\n                .iter()\n                .any(|pat| pattern_has_uppercase_char(pat)));\n\n    let path_separator = opts\n        .path_separator\n        .take()\n        .or_else(filesystem::default_path_separator);\n    let actual_path_separator = path_separator\n        .clone()\n        .unwrap_or_else(|| std::path::MAIN_SEPARATOR.to_string());\n    check_path_separator_length(path_separator.as_deref())?;\n\n    let size_limits = std::mem::take(&mut opts.size);\n    let time_constraints = extract_time_constraints(&opts)?;\n    #[cfg(unix)]\n    let owner_constraint: Option<OwnerFilter> = opts.owner.and_then(OwnerFilter::filter_ignore);\n\n    #[cfg(windows)]\n    let ansi_colors_support =\n        nu_ansi_term::enable_ansi_support().is_ok() || std::env::var_os(\"TERM\").is_some();\n    #[cfg(not(windows))]\n    let ansi_colors_support = true;\n\n    let interactive_terminal = std::io::stdout().is_terminal();\n\n    let colored_output = match opts.color {\n        ColorWhen::Always => true,\n        ColorWhen::Never => false,\n        ColorWhen::Auto => {\n            let no_color = env::var_os(\"NO_COLOR\").is_some_and(|x| !x.is_empty());\n            ansi_colors_support && !no_color && interactive_terminal\n        }\n    };\n\n    let ls_colors = if colored_output {\n        Some(LsColors::from_env().unwrap_or_else(|| LsColors::from_string(DEFAULT_LS_COLORS)))\n    } else {\n        None\n    };\n    let hyperlink = match opts.hyperlink {\n        HyperlinkWhen::Always => true,\n        HyperlinkWhen::Never => false,\n        HyperlinkWhen::Auto => colored_output,\n    };\n    let command = extract_command(&mut opts, colored_output)?;\n    let has_command = command.is_some();\n\n    let cwd = if opts.full_path {\n        Some(env::current_dir().context(\n            \"Could not determine current directory. \\\n             This is required for --full-path.\",\n        )?)\n    } else {\n        None\n    };\n\n    Ok(Config {\n        case_sensitive,\n        cwd,\n        ignore_hidden: !(opts.hidden || opts.rg_alias_ignore()),\n        read_fdignore: !(opts.no_ignore || opts.rg_alias_ignore()),\n        read_vcsignore: !(opts.no_ignore || opts.rg_alias_ignore() || opts.no_ignore_vcs),\n        require_git_to_read_vcsignore: !opts.no_require_git,\n        read_parent_ignore: !opts.no_ignore_parent,\n        read_global_ignore: !(opts.no_ignore\n            || opts.rg_alias_ignore()\n            || opts.no_global_ignore_file),\n        follow_links: opts.follow,\n        one_file_system: opts.one_file_system,\n        null_separator: opts.null_separator,\n        quiet: opts.quiet,\n        max_depth: opts.max_depth(),\n        min_depth: opts.min_depth(),\n        prune: opts.prune,\n        threads: opts.threads().get(),\n        max_buffer_time: opts.max_buffer_time,\n        ls_colors,\n        hyperlink,\n        interactive_terminal,\n        file_types: opts.filetype.as_ref().map(|values| {\n            use crate::cli::FileType::*;\n            let mut file_types = FileTypes::default();\n            for value in values {\n                match value {\n                    File => file_types.files = true,\n                    Directory => file_types.directories = true,\n                    Symlink => file_types.symlinks = true,\n                    Executable => {\n                        file_types.executables_only = true;\n                        file_types.files = true;\n                    }\n                    Empty => file_types.empty_only = true,\n                    BlockDevice => file_types.block_devices = true,\n                    CharDevice => file_types.char_devices = true,\n                    Socket => file_types.sockets = true,\n                    Pipe => file_types.pipes = true,\n                }\n            }\n\n            // If only 'empty' was specified, search for both files and directories:\n            if file_types.empty_only && !(file_types.files || file_types.directories) {\n                file_types.files = true;\n                file_types.directories = true;\n            }\n\n            file_types\n        }),\n        extensions: opts\n            .extensions\n            .as_ref()\n            .map(|exts| {\n                let patterns = exts\n                    .iter()\n                    .map(|e| e.trim_start_matches('.'))\n                    .map(|e| format!(r\".\\.{}$\", regex::escape(e)));\n                RegexSetBuilder::new(patterns)\n                    .case_insensitive(true)\n                    .build()\n            })\n            .transpose()?,\n        format: opts\n            .format\n            .as_deref()\n            .map(crate::fmt::FormatTemplate::parse),\n        command: command.map(Arc::new),\n        batch_size: opts.batch_size,\n        exclude_patterns: opts.exclude.iter().map(|p| String::from(\"!\") + p).collect(),\n        ignore_files: std::mem::take(&mut opts.ignore_file),\n        size_constraints: size_limits,\n        time_constraints,\n        #[cfg(unix)]\n        owner_constraint,\n        show_filesystem_errors: opts.show_errors,\n        path_separator,\n        actual_path_separator,\n        max_results: opts.max_results(),\n        strip_cwd_prefix: opts.strip_cwd_prefix(|| !(opts.null_separator || has_command)),\n        ignore_contain: opts.ignore_contain,\n    })\n}\n\nfn extract_command(opts: &mut Opts, colored_output: bool) -> Result<Option<CommandSet>> {\n    opts.exec\n        .command\n        .take()\n        .map(Ok)\n        .or_else(|| {\n            if !opts.list_details {\n                return None;\n            }\n\n            let res = determine_ls_command(colored_output)\n                .map(|cmd| CommandSet::new_batch([cmd]).unwrap());\n            Some(res)\n        })\n        .transpose()\n}\n\nfn determine_ls_command(colored_output: bool) -> Result<Vec<&'static str>> {\n    #[allow(unused)]\n    let gnu_ls = |command_name| {\n        let color_arg = if colored_output {\n            \"--color=always\"\n        } else {\n            \"--color=never\"\n        };\n        // Note: we use short options here (instead of --long-options) to support more\n        // platforms (like BusyBox).\n        vec![\n            command_name,\n            \"-l\", // long listing format\n            \"-h\", // human readable file sizes\n            \"-d\", // list directories themselves, not their contents\n            color_arg,\n        ]\n    };\n    let cmd: Vec<&str> = if cfg!(unix) {\n        if !cfg!(any(\n            target_os = \"macos\",\n            target_os = \"dragonfly\",\n            target_os = \"freebsd\",\n            target_os = \"netbsd\",\n            target_os = \"openbsd\"\n        )) {\n            // Assume ls is GNU ls\n            gnu_ls(\"ls\")\n        } else {\n            // MacOS, DragonFlyBSD, FreeBSD\n            use std::process::{Command, Stdio};\n\n            // Use GNU ls, if available (support for --color=auto, better LS_COLORS support)\n            let gnu_ls_exists = Command::new(\"gls\")\n                .arg(\"--version\")\n                .stdout(Stdio::null())\n                .stderr(Stdio::null())\n                .status()\n                .is_ok();\n\n            if gnu_ls_exists {\n                gnu_ls(\"gls\")\n            } else {\n                let mut cmd = vec![\n                    \"ls\", // BSD version of ls\n                    \"-l\", // long listing format\n                    \"-h\", // '--human-readable' is not available, '-h' is\n                    \"-d\", // '--directory' is not available, but '-d' is\n                ];\n\n                if !cfg!(any(target_os = \"netbsd\", target_os = \"openbsd\")) && colored_output {\n                    // -G is not available in NetBSD's and OpenBSD's ls\n                    cmd.push(\"-G\");\n                }\n\n                cmd\n            }\n        }\n    } else if cfg!(windows) {\n        use std::process::{Command, Stdio};\n\n        // Use GNU ls, if available\n        let gnu_ls_exists = Command::new(\"ls\")\n            .arg(\"--version\")\n            .stdout(Stdio::null())\n            .stderr(Stdio::null())\n            .status()\n            .is_ok();\n\n        if gnu_ls_exists {\n            gnu_ls(\"ls\")\n        } else {\n            return Err(anyhow!(\n                \"'fd --list-details' is not supported on Windows unless GNU 'ls' is installed.\"\n            ));\n        }\n    } else {\n        return Err(anyhow!(\n            \"'fd --list-details' is not supported on this platform.\"\n        ));\n    };\n    Ok(cmd)\n}\n\nfn extract_time_constraints(opts: &Opts) -> Result<Vec<TimeFilter>> {\n    let mut time_constraints: Vec<TimeFilter> = Vec::new();\n    if let Some(ref t) = opts.changed_within {\n        if let Some(f) = TimeFilter::after(t) {\n            time_constraints.push(f);\n        } else {\n            return Err(anyhow!(\n                \"'{}' is not a valid date or duration. See 'fd --help'.\",\n                t\n            ));\n        }\n    }\n    if let Some(ref t) = opts.changed_before {\n        if let Some(f) = TimeFilter::before(t) {\n            time_constraints.push(f);\n        } else {\n            return Err(anyhow!(\n                \"'{}' is not a valid date or duration. See 'fd --help'.\",\n                t\n            ));\n        }\n    }\n    Ok(time_constraints)\n}\n\nfn ensure_use_hidden_option_for_leading_dot_pattern(\n    config: &Config,\n    pattern_regexps: &[String],\n) -> Result<()> {\n    if cfg!(unix)\n        && config.ignore_hidden\n        && pattern_regexps\n            .iter()\n            .any(|pat| pattern_matches_strings_with_leading_dot(pat))\n    {\n        Err(anyhow!(\n            \"The pattern(s) seems to only match files with a leading dot, but hidden files are \\\n            filtered by default. Consider adding -H/--hidden to search hidden files as well \\\n            or adjust your search pattern(s).\"\n        ))\n    } else {\n        Ok(())\n    }\n}\n\nfn build_regex(pattern_regex: String, config: &Config) -> Result<regex::bytes::Regex> {\n    RegexBuilder::new(&pattern_regex)\n        .case_insensitive(!config.case_sensitive)\n        .dot_matches_new_line(true)\n        .build()\n        .map_err(|e| {\n            anyhow!(\n                \"{}\\n\\nNote: You can use the '--fixed-strings' option to search for a \\\n                 literal string instead of a regular expression. Alternatively, you can \\\n                 also use the '--glob' option to match on a glob pattern.\",\n                e\n            )\n        })\n}\n"
  },
  {
    "path": "src/output.rs",
    "content": "use std::borrow::Cow;\nuse std::io::{self, Write};\n\nuse lscolors::{Indicator, LsColors, Style};\n\nuse crate::config::Config;\nuse crate::dir_entry::DirEntry;\nuse crate::fmt::FormatTemplate;\nuse crate::hyperlink::PathUrl;\n\nfn replace_path_separator(path: &str, new_path_separator: &str) -> String {\n    path.replace(std::path::MAIN_SEPARATOR, new_path_separator)\n}\n\n// TODO: this function is performance critical and can probably be optimized\npub fn print_entry<W: Write>(stdout: &mut W, entry: &DirEntry, config: &Config) -> io::Result<()> {\n    let mut has_hyperlink = false;\n    if config.hyperlink\n        && let Some(url) = PathUrl::new(entry.path())\n    {\n        write!(stdout, \"\\x1B]8;;{url}\\x1B\\\\\")?;\n        has_hyperlink = true;\n    }\n\n    if let Some(ref format) = config.format {\n        print_entry_format(stdout, entry, config, format)?;\n    } else if let Some(ref ls_colors) = config.ls_colors {\n        print_entry_colorized(stdout, entry, config, ls_colors)?;\n    } else {\n        print_entry_uncolorized(stdout, entry, config)?;\n    };\n\n    if has_hyperlink {\n        write!(stdout, \"\\x1B]8;;\\x1B\\\\\")?;\n    }\n\n    if config.null_separator {\n        write!(stdout, \"\\0\")\n    } else {\n        writeln!(stdout)\n    }\n}\n\n// Display a trailing slash if the path is a directory and the config option is enabled.\n// If the path_separator option is set, display that instead.\n// The trailing slash will not be colored.\n#[inline]\nfn print_trailing_slash<W: Write>(\n    stdout: &mut W,\n    entry: &DirEntry,\n    config: &Config,\n    style: Option<&Style>,\n) -> io::Result<()> {\n    if entry.file_type().is_some_and(|ft| ft.is_dir()) {\n        write!(\n            stdout,\n            \"{}\",\n            style\n                .map(Style::to_nu_ansi_term_style)\n                .unwrap_or_default()\n                .paint(&config.actual_path_separator)\n        )?;\n    }\n    Ok(())\n}\n\n// TODO: this function is performance critical and can probably be optimized\nfn print_entry_format<W: Write>(\n    stdout: &mut W,\n    entry: &DirEntry,\n    config: &Config,\n    format: &FormatTemplate,\n) -> io::Result<()> {\n    let output = format.generate(\n        entry.stripped_path(config),\n        config.path_separator.as_deref(),\n    );\n    // TODO: support writing raw bytes on unix?\n    write!(stdout, \"{}\", output.to_string_lossy())\n}\n\n// TODO: this function is performance critical and can probably be optimized\nfn print_entry_colorized<W: Write>(\n    stdout: &mut W,\n    entry: &DirEntry,\n    config: &Config,\n    ls_colors: &LsColors,\n) -> io::Result<()> {\n    // Split the path between the parent and the last component\n    let mut offset = 0;\n    let path = entry.stripped_path(config);\n    let path_str = path.to_string_lossy();\n\n    if let Some(parent) = path.parent() {\n        offset = parent.to_string_lossy().len();\n        for c in path_str[offset..].chars() {\n            if std::path::is_separator(c) {\n                offset += c.len_utf8();\n            } else {\n                break;\n            }\n        }\n    }\n\n    if offset > 0 {\n        let mut parent_str = Cow::from(&path_str[..offset]);\n        if let Some(ref separator) = config.path_separator {\n            *parent_str.to_mut() = replace_path_separator(&parent_str, separator);\n        }\n\n        let style = ls_colors\n            .style_for_indicator(Indicator::Directory)\n            .map(Style::to_nu_ansi_term_style)\n            .unwrap_or_default();\n        write!(stdout, \"{}\", style.paint(parent_str))?;\n    }\n\n    let style = entry\n        .style(ls_colors)\n        .map(Style::to_nu_ansi_term_style)\n        .unwrap_or_default();\n    write!(stdout, \"{}\", style.paint(&path_str[offset..]))?;\n\n    print_trailing_slash(\n        stdout,\n        entry,\n        config,\n        ls_colors.style_for_indicator(Indicator::Directory),\n    )?;\n\n    Ok(())\n}\n\n// TODO: this function is performance critical and can probably be optimized\nfn print_entry_uncolorized_base<W: Write>(\n    stdout: &mut W,\n    entry: &DirEntry,\n    config: &Config,\n) -> io::Result<()> {\n    let path = entry.stripped_path(config);\n\n    let mut path_string = path.to_string_lossy();\n    if let Some(ref separator) = config.path_separator {\n        *path_string.to_mut() = replace_path_separator(&path_string, separator);\n    }\n    write!(stdout, \"{path_string}\")?;\n    print_trailing_slash(stdout, entry, config, None)\n}\n\n#[cfg(not(unix))]\nfn print_entry_uncolorized<W: Write>(\n    stdout: &mut W,\n    entry: &DirEntry,\n    config: &Config,\n) -> io::Result<()> {\n    print_entry_uncolorized_base(stdout, entry, config)\n}\n\n#[cfg(unix)]\nfn print_entry_uncolorized<W: Write>(\n    stdout: &mut W,\n    entry: &DirEntry,\n    config: &Config,\n) -> io::Result<()> {\n    use std::os::unix::ffi::OsStrExt;\n\n    if config.interactive_terminal || config.path_separator.is_some() {\n        // Fall back to the base implementation\n        print_entry_uncolorized_base(stdout, entry, config)\n    } else {\n        // Print path as raw bytes, allowing invalid UTF-8 filenames to be passed to other processes\n        stdout.write_all(entry.stripped_path(config).as_os_str().as_bytes())?;\n        print_trailing_slash(stdout, entry, config, None)\n    }\n}\n"
  },
  {
    "path": "src/regex_helper.rs",
    "content": "use regex_syntax::ParserBuilder;\nuse regex_syntax::hir::Hir;\n\n/// Determine if a regex pattern contains a literal uppercase character.\npub fn pattern_has_uppercase_char(pattern: &str) -> bool {\n    let mut parser = ParserBuilder::new().utf8(false).build();\n\n    parser\n        .parse(pattern)\n        .map(|hir| hir_has_uppercase_char(&hir))\n        .unwrap_or(false)\n}\n\n/// Determine if a regex expression contains a literal uppercase character.\nfn hir_has_uppercase_char(hir: &Hir) -> bool {\n    use regex_syntax::hir::*;\n\n    match hir.kind() {\n        HirKind::Literal(Literal(bytes)) => match std::str::from_utf8(bytes) {\n            Ok(s) => s.chars().any(|c| c.is_uppercase()),\n            Err(_) => bytes.iter().any(|b| char::from(*b).is_uppercase()),\n        },\n        HirKind::Class(Class::Unicode(ranges)) => ranges\n            .iter()\n            .any(|r| r.start().is_uppercase() || r.end().is_uppercase()),\n        HirKind::Class(Class::Bytes(ranges)) => ranges\n            .iter()\n            .any(|r| char::from(r.start()).is_uppercase() || char::from(r.end()).is_uppercase()),\n        HirKind::Capture(Capture { sub, .. }) | HirKind::Repetition(Repetition { sub, .. }) => {\n            hir_has_uppercase_char(sub)\n        }\n        HirKind::Concat(hirs) | HirKind::Alternation(hirs) => {\n            hirs.iter().any(hir_has_uppercase_char)\n        }\n        _ => false,\n    }\n}\n\n/// Determine if a regex pattern only matches strings starting with a literal dot (hidden files)\npub fn pattern_matches_strings_with_leading_dot(pattern: &str) -> bool {\n    let mut parser = ParserBuilder::new().utf8(false).build();\n\n    parser\n        .parse(pattern)\n        .map(|hir| hir_matches_strings_with_leading_dot(&hir))\n        .unwrap_or(false)\n}\n\n/// See above.\nfn hir_matches_strings_with_leading_dot(hir: &Hir) -> bool {\n    use regex_syntax::hir::*;\n\n    // Note: this only really detects the simplest case where a regex starts with\n    // \"^\\\\.\", i.e. a start text anchor and a literal dot character. There are a lot\n    // of other patterns that ONLY match hidden files, e.g. ^(\\\\.foo|\\\\.bar) which are\n    // not (yet) detected by this algorithm.\n    match hir.kind() {\n        HirKind::Concat(hirs) => {\n            let mut hirs = hirs.iter();\n            if let Some(hir) = hirs.next() {\n                if hir.kind() != &HirKind::Look(Look::Start) {\n                    return false;\n                }\n            } else {\n                return false;\n            }\n\n            if let Some(hir) = hirs.next() {\n                match hir.kind() {\n                    HirKind::Literal(Literal(bytes)) => bytes.starts_with(b\".\"),\n                    _ => false,\n                }\n            } else {\n                false\n            }\n        }\n        _ => false,\n    }\n}\n\n#[test]\nfn pattern_has_uppercase_char_simple() {\n    assert!(pattern_has_uppercase_char(\"A\"));\n    assert!(pattern_has_uppercase_char(\"foo.EXE\"));\n\n    assert!(!pattern_has_uppercase_char(\"a\"));\n    assert!(!pattern_has_uppercase_char(\"foo.exe123\"));\n}\n\n#[test]\nfn pattern_has_uppercase_char_advanced() {\n    assert!(pattern_has_uppercase_char(\"foo.[a-zA-Z]\"));\n\n    assert!(!pattern_has_uppercase_char(r\"\\Acargo\"));\n    assert!(!pattern_has_uppercase_char(r\"carg\\x6F\"));\n}\n\n#[test]\nfn matches_strings_with_leading_dot_simple() {\n    assert!(pattern_matches_strings_with_leading_dot(\"^\\\\.gitignore\"));\n\n    assert!(!pattern_matches_strings_with_leading_dot(\"^.gitignore\"));\n    assert!(!pattern_matches_strings_with_leading_dot(\"\\\\.gitignore\"));\n    assert!(!pattern_matches_strings_with_leading_dot(\"^gitignore\"));\n}\n"
  },
  {
    "path": "src/walk.rs",
    "content": "use std::borrow::Cow;\nuse std::ffi::OsStr;\nuse std::io::{self, Write};\nuse std::mem;\nuse std::path::PathBuf;\nuse std::sync::atomic::{AtomicBool, Ordering};\nuse std::sync::{Arc, Mutex, MutexGuard};\nuse std::thread;\nuse std::time::{Duration, Instant};\n\nuse anyhow::{Result, anyhow};\nuse crossbeam_channel::{Receiver, RecvTimeoutError, SendError, Sender, bounded};\nuse etcetera::BaseStrategy;\nuse ignore::overrides::{Override, OverrideBuilder};\nuse ignore::{WalkBuilder, WalkParallel, WalkState};\nuse regex::bytes::Regex;\n\nuse crate::config::Config;\nuse crate::dir_entry::DirEntry;\nuse crate::error::print_error;\nuse crate::exec;\nuse crate::exit_codes::{ExitCode, merge_exitcodes};\nuse crate::filesystem;\nuse crate::output;\n\n/// The receiver thread can either be buffering results or directly streaming to the console.\n#[derive(PartialEq)]\nenum ReceiverMode {\n    /// Receiver is still buffering in order to sort the results, if the search finishes fast\n    /// enough.\n    Buffering,\n\n    /// Receiver is directly printing results to the output.\n    Streaming,\n}\n\n/// The Worker threads can result in a valid entry having PathBuf or an error.\n#[allow(clippy::large_enum_variant)]\n#[derive(Debug)]\npub enum WorkerResult {\n    // Errors should be rare, so it's probably better to allow large_enum_variant than\n    // to box the Entry variant\n    Entry(DirEntry),\n    Error(ignore::Error),\n}\n\n/// A batch of WorkerResults to send over a channel.\n#[derive(Clone)]\nstruct Batch {\n    items: Arc<Mutex<Option<Vec<WorkerResult>>>>,\n}\n\nimpl Batch {\n    fn new() -> Self {\n        Self {\n            items: Arc::new(Mutex::new(Some(vec![]))),\n        }\n    }\n\n    fn lock(&self) -> MutexGuard<'_, Option<Vec<WorkerResult>>> {\n        self.items.lock().unwrap()\n    }\n}\n\nimpl IntoIterator for Batch {\n    type Item = WorkerResult;\n    type IntoIter = std::vec::IntoIter<WorkerResult>;\n\n    fn into_iter(self) -> Self::IntoIter {\n        self.lock().take().unwrap().into_iter()\n    }\n}\n\n/// Wrapper that sends batches of items at once over a channel.\nstruct BatchSender {\n    batch: Batch,\n    tx: Sender<Batch>,\n    limit: usize,\n}\n\nimpl BatchSender {\n    fn new(tx: Sender<Batch>, limit: usize) -> Self {\n        Self {\n            batch: Batch::new(),\n            tx,\n            limit,\n        }\n    }\n\n    /// Check if we need to flush a batch.\n    fn needs_flush(&self, batch: Option<&Vec<WorkerResult>>) -> bool {\n        match batch {\n            // Limit the batch size to provide some backpressure\n            Some(vec) => vec.len() >= self.limit,\n            // Batch was already taken by the receiver, so make a new one\n            None => true,\n        }\n    }\n\n    /// Add an item to a batch.\n    fn send(&mut self, item: WorkerResult) -> Result<(), SendError<()>> {\n        let mut batch = self.batch.lock();\n\n        if self.needs_flush(batch.as_ref()) {\n            drop(batch);\n            self.batch = Batch::new();\n            batch = self.batch.lock();\n        }\n\n        let items = batch.as_mut().unwrap();\n        items.push(item);\n\n        if items.len() == 1 {\n            // New batch, send it over the channel\n            self.tx\n                .send(self.batch.clone())\n                .map_err(|_| SendError(()))?;\n        }\n\n        Ok(())\n    }\n}\n\n/// Maximum size of the output buffer before flushing results to the console\nconst MAX_BUFFER_LENGTH: usize = 1000;\n/// Default duration until output buffering switches to streaming.\nconst DEFAULT_MAX_BUFFER_TIME: Duration = Duration::from_millis(100);\n\n/// Wrapper for the receiver thread's buffering behavior.\nstruct ReceiverBuffer<'a, W> {\n    /// The configuration.\n    config: &'a Config,\n    /// For shutting down the senders.\n    quit_flag: &'a AtomicBool,\n    /// The ^C notifier.\n    interrupt_flag: &'a AtomicBool,\n    /// Receiver for worker results.\n    rx: Receiver<Batch>,\n    /// Standard output.\n    stdout: W,\n    /// The current buffer mode.\n    mode: ReceiverMode,\n    /// The deadline to switch to streaming mode.\n    deadline: Instant,\n    /// The buffer of quickly received paths.\n    buffer: Vec<DirEntry>,\n    /// Result count.\n    num_results: usize,\n}\n\nimpl<'a, W: Write> ReceiverBuffer<'a, W> {\n    /// Create a new receiver buffer.\n    fn new(state: &'a WorkerState, rx: Receiver<Batch>, stdout: W) -> Self {\n        let config = &state.config;\n        let quit_flag = state.quit_flag.as_ref();\n        let interrupt_flag = state.interrupt_flag.as_ref();\n        let max_buffer_time = config.max_buffer_time.unwrap_or(DEFAULT_MAX_BUFFER_TIME);\n        let deadline = Instant::now() + max_buffer_time;\n\n        Self {\n            config,\n            quit_flag,\n            interrupt_flag,\n            rx,\n            stdout,\n            mode: ReceiverMode::Buffering,\n            deadline,\n            buffer: Vec::with_capacity(MAX_BUFFER_LENGTH),\n            num_results: 0,\n        }\n    }\n\n    /// Process results until finished.\n    fn process(&mut self) -> ExitCode {\n        loop {\n            if let Err(ec) = self.poll() {\n                self.quit_flag.store(true, Ordering::Relaxed);\n                return ec;\n            }\n        }\n    }\n\n    /// Receive the next worker result.\n    fn recv(&self) -> Result<Batch, RecvTimeoutError> {\n        match self.mode {\n            ReceiverMode::Buffering => {\n                // Wait at most until we should switch to streaming\n                self.rx.recv_deadline(self.deadline)\n            }\n            ReceiverMode::Streaming => {\n                // Wait however long it takes for a result\n                Ok(self.rx.recv()?)\n            }\n        }\n    }\n\n    /// Wait for a result or state change.\n    fn poll(&mut self) -> Result<(), ExitCode> {\n        match self.recv() {\n            Ok(batch) => {\n                for result in batch {\n                    match result {\n                        WorkerResult::Entry(dir_entry) => {\n                            if self.config.quiet {\n                                return Err(ExitCode::HasResults(true));\n                            }\n\n                            match self.mode {\n                                ReceiverMode::Buffering => {\n                                    self.buffer.push(dir_entry);\n                                    if self.buffer.len() > MAX_BUFFER_LENGTH {\n                                        self.stream()?;\n                                    }\n                                }\n                                ReceiverMode::Streaming => {\n                                    self.print(&dir_entry)?;\n                                }\n                            }\n\n                            self.num_results += 1;\n                            if let Some(max_results) = self.config.max_results\n                                && self.num_results >= max_results\n                            {\n                                return self.stop();\n                            }\n                        }\n                        WorkerResult::Error(err) => {\n                            if self.config.show_filesystem_errors {\n                                print_error(err.to_string());\n                            }\n                        }\n                    }\n                }\n\n                // If we don't have another batch ready, flush before waiting\n                if self.mode == ReceiverMode::Streaming && self.rx.is_empty() {\n                    self.flush()?;\n                }\n            }\n            Err(RecvTimeoutError::Timeout) => {\n                self.stream()?;\n            }\n            Err(RecvTimeoutError::Disconnected) => {\n                return self.stop();\n            }\n        }\n\n        Ok(())\n    }\n\n    /// Output a path.\n    fn print(&mut self, entry: &DirEntry) -> Result<(), ExitCode> {\n        if let Err(e) = output::print_entry(&mut self.stdout, entry, self.config)\n            && e.kind() != ::std::io::ErrorKind::BrokenPipe\n        {\n            print_error(format!(\"Could not write to output: {e}\"));\n            return Err(ExitCode::GeneralError);\n        }\n\n        if self.interrupt_flag.load(Ordering::Relaxed) {\n            // Ignore any errors on flush, because we're about to exit anyway\n            let _ = self.flush();\n            return Err(ExitCode::KilledBySigint);\n        }\n\n        Ok(())\n    }\n\n    /// Switch ourselves into streaming mode.\n    fn stream(&mut self) -> Result<(), ExitCode> {\n        self.mode = ReceiverMode::Streaming;\n\n        let buffer = mem::take(&mut self.buffer);\n        for path in buffer {\n            self.print(&path)?;\n        }\n\n        self.flush()\n    }\n\n    /// Stop looping.\n    fn stop(&mut self) -> Result<(), ExitCode> {\n        if self.mode == ReceiverMode::Buffering {\n            self.buffer.sort();\n            self.stream()?;\n        }\n\n        if self.config.quiet {\n            Err(ExitCode::HasResults(self.num_results > 0))\n        } else {\n            Err(ExitCode::Success)\n        }\n    }\n\n    /// Flush stdout if necessary.\n    fn flush(&mut self) -> Result<(), ExitCode> {\n        if self.stdout.flush().is_err() {\n            // Probably a broken pipe. Exit gracefully.\n            return Err(ExitCode::GeneralError);\n        }\n        Ok(())\n    }\n}\n\n/// State shared by the sender and receiver threads.\nstruct WorkerState {\n    /// The search patterns.\n    patterns: Vec<Regex>,\n    /// The command line configuration.\n    config: Config,\n    /// Flag for cleanly shutting down the parallel walk\n    quit_flag: Arc<AtomicBool>,\n    /// Flag specifically for quitting due to ^C\n    interrupt_flag: Arc<AtomicBool>,\n}\n\nimpl WorkerState {\n    fn new(patterns: Vec<Regex>, config: Config) -> Self {\n        let quit_flag = Arc::new(AtomicBool::new(false));\n        let interrupt_flag = Arc::new(AtomicBool::new(false));\n\n        Self {\n            patterns,\n            config,\n            quit_flag,\n            interrupt_flag,\n        }\n    }\n\n    fn build_overrides(&self, paths: &[PathBuf]) -> Result<Override> {\n        let first_path = &paths[0];\n        let config = &self.config;\n\n        let mut builder = OverrideBuilder::new(first_path);\n\n        for pattern in &config.exclude_patterns {\n            builder\n                .add(pattern)\n                .map_err(|e| anyhow!(\"Malformed exclude pattern: {}\", e))?;\n        }\n\n        builder\n            .build()\n            .map_err(|_| anyhow!(\"Mismatch in exclude patterns\"))\n    }\n\n    fn build_walker(&self, paths: &[PathBuf]) -> Result<WalkParallel> {\n        let first_path = &paths[0];\n        let config = &self.config;\n        let overrides = self.build_overrides(paths)?;\n\n        let mut builder = WalkBuilder::new(first_path);\n        builder\n            .hidden(config.ignore_hidden)\n            .ignore(config.read_fdignore)\n            .parents(config.read_parent_ignore && (config.read_fdignore || config.read_vcsignore))\n            .git_ignore(config.read_vcsignore)\n            .git_global(config.read_vcsignore)\n            .git_exclude(config.read_vcsignore)\n            .require_git(config.require_git_to_read_vcsignore)\n            .overrides(overrides)\n            .follow_links(config.follow_links)\n            // No need to check for supported platforms, option is unavailable on unsupported ones\n            .same_file_system(config.one_file_system)\n            .max_depth(config.max_depth);\n\n        if config.read_fdignore {\n            builder.add_custom_ignore_filename(\".fdignore\");\n        }\n\n        if config.read_global_ignore\n            && let Ok(basedirs) = etcetera::choose_base_strategy()\n        {\n            let global_ignore_file = basedirs.config_dir().join(\"fd\").join(\"ignore\");\n            if global_ignore_file.is_file() {\n                let result = builder.add_ignore(global_ignore_file);\n                match result {\n                    Some(ignore::Error::Partial(_)) => (),\n                    Some(err) => {\n                        print_error(format!(\"Malformed pattern in global ignore file. {err}.\"));\n                    }\n                    None => (),\n                }\n            }\n        }\n\n        for ignore_file in &config.ignore_files {\n            let result = builder.add_ignore(ignore_file);\n            match result {\n                Some(ignore::Error::Partial(_)) => (),\n                Some(err) => {\n                    print_error(format!(\"Malformed pattern in custom ignore file. {err}.\"));\n                }\n                None => (),\n            }\n        }\n\n        for path in &paths[1..] {\n            builder.add(path);\n        }\n\n        let walker = builder.threads(config.threads).build_parallel();\n        Ok(walker)\n    }\n\n    /// Run the receiver work, either on this thread or a pool of background\n    /// threads (for --exec).\n    fn receive(&self, rx: Receiver<Batch>) -> ExitCode {\n        let config = &self.config;\n\n        // This will be set to `Some` if the `--exec` argument was supplied.\n        if let Some(ref cmd) = config.command {\n            if cmd.in_batch_mode() {\n                exec::batch(rx.into_iter().flatten(), cmd, config)\n            } else {\n                thread::scope(|scope| {\n                    // Each spawned job will store its thread handle in here.\n                    let threads = config.threads;\n                    let mut handles = Vec::with_capacity(threads);\n                    for _ in 0..threads {\n                        let rx = rx.clone();\n\n                        // Spawn a job thread that will listen for and execute inputs.\n                        let handle =\n                            scope.spawn(|| exec::job(rx.into_iter().flatten(), cmd, config));\n\n                        // Push the handle of the spawned thread into the vector for later joining.\n                        handles.push(handle);\n                    }\n                    let exit_codes = handles.into_iter().map(|handle| handle.join().unwrap());\n                    merge_exitcodes(exit_codes)\n                })\n            }\n        } else {\n            let stdout = io::stdout().lock();\n            let stdout = io::BufWriter::new(stdout);\n\n            ReceiverBuffer::new(self, rx, stdout).process()\n        }\n    }\n\n    /// Spawn the sender threads.\n    fn spawn_senders(&self, walker: WalkParallel, tx: Sender<Batch>) {\n        walker.run(|| {\n            let patterns = &self.patterns;\n            let config = &self.config;\n            let quit_flag = self.quit_flag.as_ref();\n\n            let mut limit = 0x100;\n            if let Some(cmd) = &config.command\n                && !cmd.in_batch_mode()\n                && config.threads > 1\n            {\n                // Evenly distribute work between multiple receivers\n                limit = 1;\n            }\n            let mut tx = BatchSender::new(tx.clone(), limit);\n\n            Box::new(move |entry| {\n                if quit_flag.load(Ordering::Relaxed) {\n                    return WalkState::Quit;\n                }\n\n                if let Ok(e) = &entry {\n                    // If the entry is a directory that contains a\n                    // \"ignore contain\" file\", we want to skip this\n                    // directory.\n                    // Check the filetype first to avoid unnecessary\n                    // syscalls.\n                    if e.file_type().is_some_and(|t| t.is_dir()) {\n                        let entry_path = e.path();\n                        if config\n                            .ignore_contain\n                            .iter()\n                            .any(|ic| entry_path.join(ic).exists())\n                        {\n                            return WalkState::Skip;\n                        }\n                    }\n                    if e.depth() == 0 {\n                        // Skip the root directory entry.\n                        return WalkState::Continue;\n                    }\n                }\n                let entry = match entry {\n                    Ok(e) => DirEntry::normal(e),\n                    Err(ignore::Error::WithPath {\n                        path,\n                        err: inner_err,\n                    }) => match inner_err.as_ref() {\n                        ignore::Error::Io(io_error)\n                            if io_error.kind() == io::ErrorKind::NotFound\n                                && path\n                                    .symlink_metadata()\n                                    .ok()\n                                    .is_some_and(|m| m.file_type().is_symlink()) =>\n                        {\n                            DirEntry::broken_symlink(path)\n                        }\n                        _ => {\n                            return match tx.send(WorkerResult::Error(ignore::Error::WithPath {\n                                path,\n                                err: inner_err,\n                            })) {\n                                Ok(_) => WalkState::Continue,\n                                Err(_) => WalkState::Quit,\n                            };\n                        }\n                    },\n                    Err(err) => {\n                        return match tx.send(WorkerResult::Error(err)) {\n                            Ok(_) => WalkState::Continue,\n                            Err(_) => WalkState::Quit,\n                        };\n                    }\n                };\n\n                if let Some(min_depth) = config.min_depth\n                    && entry.depth().is_none_or(|d| d < min_depth)\n                {\n                    return WalkState::Continue;\n                }\n\n                // Check the name first, since it doesn't require metadata\n                let entry_path = entry.path();\n\n                let search_str = search_str_for_entry(entry_path, config.cwd.as_deref());\n\n                if !patterns\n                    .iter()\n                    .all(|pat| pat.is_match(&filesystem::osstr_to_bytes(search_str.as_ref())))\n                {\n                    return WalkState::Continue;\n                }\n\n                // Filter out unwanted extensions.\n                if let Some(ref exts_regex) = config.extensions {\n                    if let Some(path_str) = entry_path.file_name() {\n                        if !exts_regex.is_match(&filesystem::osstr_to_bytes(path_str)) {\n                            return WalkState::Continue;\n                        }\n                    } else {\n                        return WalkState::Continue;\n                    }\n                }\n\n                // Filter out unwanted file types.\n                if let Some(ref file_types) = config.file_types\n                    && file_types.should_ignore(&entry)\n                {\n                    return WalkState::Continue;\n                }\n\n                #[cfg(unix)]\n                {\n                    if let Some(ref owner_constraint) = config.owner_constraint {\n                        if let Some(metadata) = entry.metadata() {\n                            if !owner_constraint.matches(metadata) {\n                                return WalkState::Continue;\n                            }\n                        } else {\n                            return WalkState::Continue;\n                        }\n                    }\n                }\n\n                // Filter out unwanted sizes if it is a file and we have been given size constraints.\n                if !config.size_constraints.is_empty() {\n                    if entry_path.is_file() {\n                        if let Some(metadata) = entry.metadata() {\n                            let file_size = metadata.len();\n                            if config\n                                .size_constraints\n                                .iter()\n                                .any(|sc| !sc.is_within(file_size))\n                            {\n                                return WalkState::Continue;\n                            }\n                        } else {\n                            return WalkState::Continue;\n                        }\n                    } else {\n                        return WalkState::Continue;\n                    }\n                }\n\n                // Filter out unwanted modification times\n                if !config.time_constraints.is_empty() {\n                    let mut matched = false;\n                    if let Some(metadata) = entry.metadata()\n                        && let Ok(modified) = metadata.modified()\n                    {\n                        matched = config\n                            .time_constraints\n                            .iter()\n                            .all(|tf| tf.applies_to(&modified));\n                    }\n                    if !matched {\n                        return WalkState::Continue;\n                    }\n                }\n\n                if config.is_printing()\n                    && let Some(ls_colors) = &config.ls_colors\n                {\n                    // Compute colors in parallel\n                    entry.style(ls_colors);\n                }\n\n                let send_result = tx.send(WorkerResult::Entry(entry));\n\n                if send_result.is_err() {\n                    return WalkState::Quit;\n                }\n\n                // Apply pruning.\n                if config.prune {\n                    return WalkState::Skip;\n                }\n\n                WalkState::Continue\n            })\n        });\n    }\n\n    /// Perform the recursive scan.\n    fn scan(&self, paths: &[PathBuf]) -> Result<ExitCode> {\n        let config = &self.config;\n        let walker = self.build_walker(paths)?;\n\n        if config.ls_colors.is_some() && config.is_printing() {\n            let quit_flag = Arc::clone(&self.quit_flag);\n            let interrupt_flag = Arc::clone(&self.interrupt_flag);\n\n            ctrlc::set_handler(move || {\n                quit_flag.store(true, Ordering::Relaxed);\n\n                if interrupt_flag.fetch_or(true, Ordering::Relaxed) {\n                    // Ctrl-C has been pressed twice, exit NOW\n                    ExitCode::KilledBySigint.exit();\n                }\n            })\n            .unwrap();\n        }\n\n        let (tx, rx) = bounded(2 * config.threads);\n\n        let exit_code = thread::scope(|scope| {\n            // Spawn the receiver thread(s)\n            let receiver = scope.spawn(|| self.receive(rx));\n\n            // Spawn the sender threads.\n            self.spawn_senders(walker, tx);\n\n            receiver.join().unwrap()\n        });\n\n        if self.interrupt_flag.load(Ordering::Relaxed) {\n            Ok(ExitCode::KilledBySigint)\n        } else {\n            Ok(exit_code)\n        }\n    }\n}\n\nfn search_str_for_entry<'a>(\n    entry_path: &'a std::path::Path,\n    cwd: Option<&std::path::Path>,\n) -> Cow<'a, OsStr> {\n    if let Some(cwd) = cwd {\n        let abs_path = filesystem::make_absolute(entry_path, cwd);\n        Cow::Owned(abs_path.into_os_string())\n    } else {\n        match entry_path.file_name() {\n            Some(filename) => Cow::Borrowed(filename),\n            None => unreachable!(\n                \"Encountered file system entry without a file name. This should only \\\n                 happen for paths like 'foo/bar/..' or '/' which are not supposed to \\\n                 appear in a file system traversal.\"\n            ),\n        }\n    }\n}\n\n/// Recursively scan the given search path for files / pathnames matching the patterns.\n///\n/// If the `--exec` argument was supplied, this will create a thread pool for executing\n/// jobs in parallel from a given command line and the discovered paths. Otherwise, each\n/// path will simply be written to standard output.\npub fn scan(paths: &[PathBuf], patterns: Vec<Regex>, config: Config) -> Result<ExitCode> {\n    WorkerState::new(patterns, config).scan(paths)\n}\n"
  },
  {
    "path": "tests/testenv/mod.rs",
    "content": "use std::env;\nuse std::fs;\nuse std::io::{self, Write};\n#[cfg(unix)]\nuse std::os::unix;\n#[cfg(windows)]\nuse std::os::windows;\nuse std::path::{Path, PathBuf};\nuse std::process;\n\nuse tempfile::TempDir;\n\n/// Environment for the integration tests.\npub struct TestEnv {\n    /// Temporary working directory.\n    temp_dir: TempDir,\n\n    /// Path to the *fd* executable.\n    fd_exe: PathBuf,\n\n    /// Normalize each line by sorting the whitespace-separated words\n    normalize_line: bool,\n\n    /// Temporary directory for storing test config (global ignore file)\n    config_dir: Option<TempDir>,\n}\n\n/// Create the working directory and the test files.\nfn create_working_directory(\n    directories: &[&'static str],\n    files: &[&'static str],\n) -> Result<TempDir, io::Error> {\n    let temp_dir = tempfile::Builder::new().prefix(\"fd-tests\").tempdir()?;\n\n    {\n        let root = temp_dir.path();\n\n        // Pretend that this is a Git repository in order for `.gitignore` files to be respected\n        fs::create_dir_all(root.join(\".git\"))?;\n\n        for directory in directories {\n            fs::create_dir_all(root.join(directory))?;\n        }\n\n        for file in files {\n            fs::File::create(root.join(file))?;\n        }\n\n        #[cfg(unix)]\n        unix::fs::symlink(root.join(\"one/two\"), root.join(\"symlink\"))?;\n\n        // Note: creating symlinks on Windows requires the `SeCreateSymbolicLinkPrivilege` which\n        // is by default only granted for administrators.\n        #[cfg(windows)]\n        windows::fs::symlink_dir(root.join(\"one/two\"), root.join(\"symlink\"))?;\n\n        fs::File::create(root.join(\".fdignore\"))?.write_all(b\"fdignored.foo\")?;\n\n        fs::File::create(root.join(\".gitignore\"))?.write_all(b\"gitignored.foo\")?;\n    }\n\n    Ok(temp_dir)\n}\n\nfn create_config_directory_with_global_ignore(ignore_file_content: &str) -> io::Result<TempDir> {\n    let config_dir = tempfile::Builder::new().prefix(\"fd-config\").tempdir()?;\n    let fd_dir = config_dir.path().join(\"fd\");\n    fs::create_dir(&fd_dir)?;\n    let mut ignore_file = fs::File::create(fd_dir.join(\"ignore\"))?;\n    ignore_file.write_all(ignore_file_content.as_bytes())?;\n\n    Ok(config_dir)\n}\n\n/// Find the *fd* executable.\nfn find_fd_exe() -> PathBuf {\n    // Read the location of the fd executable from the environment\n    PathBuf::from(env::var(\"CARGO_BIN_EXE_fd\").unwrap_or(env!(\"CARGO_BIN_EXE_fd\").to_string()))\n}\n\n/// Format an error message for when *fd* did not exit successfully.\nfn format_exit_error(args: &[&str], output: &process::Output) -> String {\n    format!(\n        \"`fd {}` did not exit successfully.\\nstdout:\\n---\\n{}---\\nstderr:\\n---\\n{}---\",\n        args.join(\" \"),\n        String::from_utf8_lossy(&output.stdout),\n        String::from_utf8_lossy(&output.stderr)\n    )\n}\n\n/// Format an error message for when the output of *fd* did not match the expected output.\nfn format_output_error(args: &[&str], expected: &str, actual: &str) -> String {\n    // Generate diff text.\n    let diff_text = diff::lines(expected, actual)\n        .into_iter()\n        .map(|diff| match diff {\n            diff::Result::Left(l) => format!(\"-{l}\"),\n            diff::Result::Both(l, _) => format!(\" {l}\"),\n            diff::Result::Right(r) => format!(\"+{r}\"),\n        })\n        .collect::<Vec<_>>()\n        .join(\"\\n\");\n\n    format!(\n        concat!(\n            \"`fd {}` did not produce the expected output.\\n\",\n            \"Showing diff between expected and actual:\\n{}\\n\"\n        ),\n        args.join(\" \"),\n        diff_text\n    )\n}\n\n/// Normalize the output for comparison.\nfn normalize_output(s: &str, trim_start: bool, normalize_line: bool) -> String {\n    // Split into lines and normalize separators.\n    let mut lines = s\n        .replace('\\0', \"NULL\\n\")\n        .lines()\n        .map(|line| {\n            let line = if trim_start { line.trim_start() } else { line };\n            let line = line.replace('/', std::path::MAIN_SEPARATOR_STR);\n            if normalize_line {\n                let mut words: Vec<_> = line.split_whitespace().collect();\n                words.sort_unstable();\n                return words.join(\" \");\n            }\n            line\n        })\n        .collect::<Vec<_>>();\n\n    lines.sort();\n    lines.join(\"\\n\")\n}\n\n/// Trim whitespace from the beginning of each line.\nfn trim_lines(s: &str) -> String {\n    s.lines()\n        .map(|line| line.trim_start())\n        .fold(String::new(), |mut str, line| {\n            str.push_str(line);\n            str.push('\\n');\n            str\n        })\n}\n\nimpl TestEnv {\n    pub fn new(directories: &[&'static str], files: &[&'static str]) -> TestEnv {\n        let temp_dir = create_working_directory(directories, files).expect(\"working directory\");\n        let fd_exe = find_fd_exe();\n\n        TestEnv {\n            temp_dir,\n            fd_exe,\n            normalize_line: false,\n            config_dir: None,\n        }\n    }\n\n    pub fn normalize_line(self, normalize: bool) -> TestEnv {\n        TestEnv {\n            temp_dir: self.temp_dir,\n            fd_exe: self.fd_exe,\n            normalize_line: normalize,\n            config_dir: self.config_dir,\n        }\n    }\n\n    pub fn global_ignore_file(self, content: &str) -> TestEnv {\n        let config_dir =\n            create_config_directory_with_global_ignore(content).expect(\"config directory\");\n        TestEnv {\n            config_dir: Some(config_dir),\n            ..self\n        }\n    }\n\n    /// Create a broken symlink at the given path in the temp_dir.\n    pub fn create_broken_symlink<P: AsRef<Path>>(\n        &mut self,\n        link_path: P,\n    ) -> Result<PathBuf, io::Error> {\n        let root = self.test_root();\n        let broken_symlink_link = root.join(link_path);\n        {\n            let temp_target_dir = tempfile::Builder::new()\n                .prefix(\"fd-tests-broken-symlink\")\n                .tempdir()?;\n            let broken_symlink_target = temp_target_dir.path().join(\"broken_symlink_target\");\n            fs::File::create(&broken_symlink_target)?;\n            #[cfg(unix)]\n            unix::fs::symlink(&broken_symlink_target, &broken_symlink_link)?;\n            #[cfg(windows)]\n            windows::fs::symlink_file(&broken_symlink_target, &broken_symlink_link)?;\n        }\n        Ok(broken_symlink_link)\n    }\n\n    /// Get the root directory for the tests.\n    pub fn test_root(&self) -> PathBuf {\n        self.temp_dir.path().to_path_buf()\n    }\n\n    /// Get the path of the fd executable.\n    #[cfg_attr(windows, allow(unused))]\n    pub fn test_exe(&self) -> &PathBuf {\n        &self.fd_exe\n    }\n\n    /// Get the root directory of the file system.\n    pub fn system_root(&self) -> PathBuf {\n        let mut components = self.temp_dir.path().components();\n        PathBuf::from(components.next().expect(\"root directory\").as_os_str())\n    }\n\n    /// Assert that calling *fd* in the specified path under the root working directory,\n    /// and with the specified arguments produces the expected output.\n    pub fn assert_success_and_get_output<P: AsRef<Path>>(\n        &self,\n        path: P,\n        args: &[&str],\n    ) -> process::Output {\n        // Run *fd*.\n        let output = self.run_command(path.as_ref(), args);\n\n        // Check for exit status.\n        if !output.status.success() {\n            panic!(\"{}\", format_exit_error(args, &output));\n        }\n\n        output\n    }\n\n    pub fn assert_success_and_get_normalized_output<P: AsRef<Path>>(\n        &self,\n        path: P,\n        args: &[&str],\n    ) -> String {\n        let output = self.assert_success_and_get_output(path, args);\n        normalize_output(\n            &String::from_utf8_lossy(&output.stdout),\n            false,\n            self.normalize_line,\n        )\n    }\n\n    /// Assert that calling *fd* with the specified arguments produces the expected output.\n    pub fn assert_output(&self, args: &[&str], expected: &str) {\n        self.assert_output_subdirectory(\".\", args, expected)\n    }\n\n    /// Similar to assert_output, but able to handle non-utf8 output\n    #[cfg(all(unix, not(target_os = \"macos\")))]\n    pub fn assert_output_raw(&self, args: &[&str], expected: &[u8]) {\n        let output = self.assert_success_and_get_output(\".\", args);\n\n        assert_eq!(expected, &output.stdout[..]);\n    }\n\n    /// Assert that calling *fd* in the specified path under the root working directory,\n    /// and with the specified arguments produces the expected output.\n    pub fn assert_output_subdirectory<P: AsRef<Path>>(\n        &self,\n        path: P,\n        args: &[&str],\n        expected: &str,\n    ) {\n        // Normalize both expected and actual output.\n        let expected = normalize_output(expected, true, self.normalize_line);\n        let actual = self.assert_success_and_get_normalized_output(path, args);\n\n        // Compare actual output to expected output.\n        if expected != actual {\n            panic!(\"{}\", format_output_error(args, &expected, &actual));\n        }\n    }\n\n    /// Assert that calling *fd* with the specified arguments produces the expected error,\n    /// and does not succeed.\n    pub fn assert_failure_with_error(&self, args: &[&str], expected: &str) {\n        let status = self.assert_error_subdirectory(\".\", args, Some(expected));\n        if status.success() {\n            panic!(\"error '{expected}' did not occur.\");\n        }\n    }\n\n    /// Assert that calling *fd* with the specified arguments does not succeed.\n    pub fn assert_failure(&self, args: &[&str]) {\n        let status = self.assert_error_subdirectory(\".\", args, None);\n        if status.success() {\n            panic!(\"Failure did not occur as expected.\");\n        }\n    }\n\n    /// Assert that calling *fd* with the specified arguments produces the expected error.\n    pub fn assert_error(&self, args: &[&str], expected: &str) -> process::ExitStatus {\n        self.assert_error_subdirectory(\".\", args, Some(expected))\n    }\n\n    fn run_command(&self, path: &Path, args: &[&str]) -> process::Output {\n        // Setup *fd* command.\n        let mut cmd = process::Command::new(&self.fd_exe);\n        cmd.current_dir(self.temp_dir.path().join(path));\n        if let Some(config_dir) = &self.config_dir {\n            cmd.env(\"XDG_CONFIG_HOME\", config_dir.path());\n        } else {\n            cmd.arg(\"--no-global-ignore-file\");\n        }\n        // Make sure LS_COLORS is unset to ensure consistent\n        // color output\n        cmd.env(\"LS_COLORS\", \"\");\n        cmd.args(args);\n\n        // Run *fd*.\n        cmd.output().expect(\"fd output\")\n    }\n\n    /// Assert that calling *fd* in the specified path under the root working directory,\n    /// and with the specified arguments produces an error with the expected message.\n    fn assert_error_subdirectory<P: AsRef<Path>>(\n        &self,\n        path: P,\n        args: &[&str],\n        expected: Option<&str>,\n    ) -> process::ExitStatus {\n        let output = self.run_command(path.as_ref(), args);\n\n        if let Some(expected) = expected {\n            // Normalize both expected and actual output.\n            let expected_error = trim_lines(expected);\n            let actual_err = trim_lines(&String::from_utf8_lossy(&output.stderr));\n\n            // Compare actual output to expected output.\n            if !actual_err.trim_start().starts_with(&expected_error) {\n                panic!(\n                    \"{}\",\n                    format_output_error(args, &expected_error, &actual_err)\n                );\n            }\n        }\n\n        output.status\n    }\n}\n"
  },
  {
    "path": "tests/tests.rs",
    "content": "mod testenv;\n\n#[cfg(unix)]\nuse nix::unistd::{Gid, Group, Uid, User};\nuse std::fs;\nuse std::io::Write;\nuse std::path::Path;\nuse std::time::{Duration, SystemTime};\nuse test_case::test_case;\n\nuse jiff::Timestamp;\nuse normpath::PathExt;\nuse regex::escape;\n\nuse crate::testenv::TestEnv;\n\nstatic DEFAULT_DIRS: &[&str] = &[\"one/two/three\", \"one/two/three/directory_foo\"];\n\nstatic DEFAULT_FILES: &[&str] = &[\n    \"a.foo\",\n    \"one/b.foo\",\n    \"one/two/c.foo\",\n    \"one/two/C.Foo2\",\n    \"one/two/three/d.foo\",\n    \"fdignored.foo\",\n    \"gitignored.foo\",\n    \".hidden.foo\",\n    \"e1 e2\",\n];\n\n#[allow(clippy::let_and_return)]\nfn get_absolute_root_path(env: &TestEnv) -> String {\n    let path = env\n        .test_root()\n        .normalize()\n        .expect(\"absolute path\")\n        .as_path()\n        .to_str()\n        .expect(\"string\")\n        .to_string();\n\n    #[cfg(windows)]\n    let path = path.trim_start_matches(r\"\\\\?\\\").to_string();\n\n    path\n}\n\n#[cfg(test)]\nfn get_test_env_with_abs_path(dirs: &[&'static str], files: &[&'static str]) -> (TestEnv, String) {\n    let env = TestEnv::new(dirs, files);\n    let root_path = get_absolute_root_path(&env);\n    (env, root_path)\n}\n\n#[cfg(test)]\nfn create_file_with_size<P: AsRef<Path>>(path: P, size_in_bytes: usize) {\n    let content = \"#\".repeat(size_in_bytes);\n    let mut f = fs::File::create::<P>(path).unwrap();\n    f.write_all(content.as_bytes()).unwrap();\n}\n\n/// Simple test\n#[test]\nfn test_simple() {\n    let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES);\n\n    te.assert_output(&[\"a.foo\"], \"a.foo\");\n    te.assert_output(&[\"b.foo\"], \"one/b.foo\");\n    te.assert_output(&[\"d.foo\"], \"one/two/three/d.foo\");\n\n    te.assert_output(\n        &[\"foo\"],\n        \"a.foo\n        one/b.foo\n        one/two/c.foo\n        one/two/C.Foo2\n        one/two/three/d.foo\n        one/two/three/directory_foo/\",\n    );\n}\n\nstatic AND_EXTRA_FILES: &[&str] = &[\n    \"a.foo\",\n    \"one/b.foo\",\n    \"one/two/c.foo\",\n    \"one/two/C.Foo2\",\n    \"one/two/three/baz-quux\",\n    \"one/two/three/Baz-Quux2\",\n    \"one/two/three/d.foo\",\n    \"fdignored.foo\",\n    \"gitignored.foo\",\n    \".hidden.foo\",\n    \"A-B.jpg\",\n    \"A-C.png\",\n    \"B-A.png\",\n    \"B-C.png\",\n    \"C-A.jpg\",\n    \"C-B.png\",\n    \"e1 e2\",\n];\n\n/// AND test\n#[test]\nfn test_and_basic() {\n    let te = TestEnv::new(DEFAULT_DIRS, AND_EXTRA_FILES);\n\n    te.assert_output(\n        &[\"foo\", \"--and\", \"c\"],\n        \"one/two/C.Foo2\n        one/two/c.foo\n        one/two/three/directory_foo/\",\n    );\n\n    te.assert_output(\n        &[\"f\", \"--and\", \"[ad]\", \"--and\", \"[_]\"],\n        \"one/two/three/directory_foo/\",\n    );\n\n    te.assert_output(\n        &[\"f\", \"--and\", \"[ad]\", \"--and\", \"[.]\"],\n        \"a.foo\n        one/two/three/d.foo\",\n    );\n\n    te.assert_output(&[\"Foo\", \"--and\", \"C\"], \"one/two/C.Foo2\");\n\n    te.assert_output(&[\"foo\", \"--and\", \"asdasdasdsadasd\"], \"\");\n}\n\n#[test]\nfn test_and_empty_pattern() {\n    let te = TestEnv::new(DEFAULT_DIRS, AND_EXTRA_FILES);\n    te.assert_output(&[\"Foo\", \"--and\", \"2\", \"--and\", \"\"], \"one/two/C.Foo2\");\n}\n\n#[test]\nfn test_and_bad_pattern() {\n    let te = TestEnv::new(DEFAULT_DIRS, AND_EXTRA_FILES);\n\n    te.assert_failure(&[\"Foo\", \"--and\", \"2\", \"--and\", \"[\", \"--and\", \"C\"]);\n    te.assert_failure(&[\"Foo\", \"--and\", \"[\", \"--and\", \"2\", \"--and\", \"C\"]);\n    te.assert_failure(&[\"Foo\", \"--and\", \"2\", \"--and\", \"C\", \"--and\", \"[\"]);\n    te.assert_failure(&[\"[\", \"--and\", \"2\", \"--and\", \"C\", \"--and\", \"Foo\"]);\n}\n\n#[test]\nfn test_and_pattern_starts_with_dash() {\n    let te = TestEnv::new(DEFAULT_DIRS, AND_EXTRA_FILES);\n\n    te.assert_output(\n        &[\"baz\", \"--and\", \"quux\"],\n        \"one/two/three/Baz-Quux2\n        one/two/three/baz-quux\",\n    );\n    te.assert_output(\n        &[\"baz\", \"--and\", \"-\"],\n        \"one/two/three/Baz-Quux2\n        one/two/three/baz-quux\",\n    );\n    te.assert_output(\n        &[\"Quu\", \"--and\", \"x\", \"--and\", \"-\"],\n        \"one/two/three/Baz-Quux2\",\n    );\n}\n\n#[test]\nfn test_and_plus_extension() {\n    let te = TestEnv::new(DEFAULT_DIRS, AND_EXTRA_FILES);\n\n    te.assert_output(\n        &[\n            \"A\",\n            \"--and\",\n            \"B\",\n            \"--extension\",\n            \"jpg\",\n            \"--extension\",\n            \"png\",\n        ],\n        \"A-B.jpg\n        B-A.png\",\n    );\n\n    te.assert_output(\n        &[\n            \"A\",\n            \"--extension\",\n            \"jpg\",\n            \"--and\",\n            \"B\",\n            \"--extension\",\n            \"png\",\n        ],\n        \"A-B.jpg\n        B-A.png\",\n    );\n}\n\n#[test]\nfn test_and_plus_type() {\n    let te = TestEnv::new(DEFAULT_DIRS, AND_EXTRA_FILES);\n\n    te.assert_output(\n        &[\"c\", \"--type\", \"d\", \"--and\", \"foo\"],\n        \"one/two/three/directory_foo/\",\n    );\n\n    te.assert_output(\n        &[\"c\", \"--type\", \"f\", \"--and\", \"foo\"],\n        \"one/two/C.Foo2\n        one/two/c.foo\",\n    );\n}\n\n#[test]\nfn test_and_plus_glob() {\n    let te = TestEnv::new(DEFAULT_DIRS, AND_EXTRA_FILES);\n\n    te.assert_output(&[\"*foo\", \"--glob\", \"--and\", \"c*\"], \"one/two/c.foo\");\n}\n\n#[test]\nfn test_and_plus_fixed_strings() {\n    let te = TestEnv::new(DEFAULT_DIRS, AND_EXTRA_FILES);\n\n    te.assert_output(\n        &[\"foo\", \"--fixed-strings\", \"--and\", \"c\", \"--and\", \".\"],\n        \"one/two/c.foo\n        one/two/C.Foo2\",\n    );\n\n    te.assert_output(\n        &[\"foo\", \"--fixed-strings\", \"--and\", \"[c]\", \"--and\", \".\"],\n        \"\",\n    );\n\n    te.assert_output(\n        &[\"Foo\", \"--fixed-strings\", \"--and\", \"C\", \"--and\", \".\"],\n        \"one/two/C.Foo2\",\n    );\n}\n\n#[test]\nfn test_and_plus_ignore_case() {\n    let te = TestEnv::new(DEFAULT_DIRS, AND_EXTRA_FILES);\n\n    te.assert_output(\n        &[\"Foo\", \"--ignore-case\", \"--and\", \"C\", \"--and\", \"[.]\"],\n        \"one/two/C.Foo2\n        one/two/c.foo\",\n    );\n}\n\n#[test]\nfn test_and_plus_case_sensitive() {\n    let te = TestEnv::new(DEFAULT_DIRS, AND_EXTRA_FILES);\n\n    te.assert_output(\n        &[\"foo\", \"--case-sensitive\", \"--and\", \"c\", \"--and\", \"[.]\"],\n        \"one/two/c.foo\",\n    );\n}\n\n#[test]\nfn test_and_plus_full_path() {\n    let te = TestEnv::new(DEFAULT_DIRS, AND_EXTRA_FILES);\n\n    te.assert_output(\n        &[\n            \"three\",\n            \"--full-path\",\n            \"--and\",\n            \"_foo\",\n            \"--and\",\n            r\"[/\\\\]dir\",\n        ],\n        \"one/two/three/directory_foo/\",\n    );\n\n    te.assert_output(\n        &[\n            \"three\",\n            \"--full-path\",\n            \"--and\",\n            r\"[/\\\\]two\",\n            \"--and\",\n            r\"[/\\\\]dir\",\n        ],\n        \"one/two/three/directory_foo/\",\n    );\n}\n\n/// Test each pattern type with an empty pattern.\n#[test]\nfn test_empty_pattern() {\n    let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES);\n    let expected = \"a.foo\n    e1 e2\n    one/\n    one/b.foo\n    one/two/\n    one/two/c.foo\n    one/two/C.Foo2\n    one/two/three/\n    one/two/three/d.foo\n    one/two/three/directory_foo/\n    symlink\";\n\n    te.assert_output(&[\"--regex\"], expected);\n    te.assert_output(&[\"--fixed-strings\"], expected);\n    te.assert_output(&[\"--glob\"], expected);\n}\n\n/// Test multiple directory searches\n#[test]\nfn test_multi_file() {\n    let dirs = &[\"test1\", \"test2\"];\n    let files = &[\"test1/a.foo\", \"test1/b.foo\", \"test2/a.foo\"];\n    let te = TestEnv::new(dirs, files);\n    te.assert_output(\n        &[\"a.foo\", \"test1\", \"test2\"],\n        \"test1/a.foo\n        test2/a.foo\",\n    );\n\n    te.assert_output(\n        &[\"\", \"test1\", \"test2\"],\n        \"test1/a.foo\n        test2/a.foo\n        test1/b.foo\",\n    );\n\n    te.assert_output(&[\"a.foo\", \"test1\"], \"test1/a.foo\");\n\n    te.assert_output(&[\"b.foo\", \"test1\", \"test2\"], \"test1/b.foo\");\n}\n\n/// Test search over multiple directory with missing\n#[test]\nfn test_multi_file_with_missing() {\n    let dirs = &[\"real\"];\n    let files = &[\"real/a.foo\", \"real/b.foo\"];\n    let te = TestEnv::new(dirs, files);\n    te.assert_output(&[\"a.foo\", \"real\", \"fake\"], \"real/a.foo\");\n\n    te.assert_error(\n        &[\"a.foo\", \"real\", \"fake\"],\n        \"[fd error]: Search path 'fake' is not a directory.\",\n    );\n\n    te.assert_output(\n        &[\"\", \"real\", \"fake\"],\n        \"real/a.foo\n        real/b.foo\",\n    );\n\n    te.assert_output(\n        &[\"\", \"real\", \"fake1\", \"fake2\"],\n        \"real/a.foo\n        real/b.foo\",\n    );\n\n    te.assert_error(\n        &[\"\", \"real\", \"fake1\", \"fake2\"],\n        \"[fd error]: Search path 'fake1' is not a directory.\n        [fd error]: Search path 'fake2' is not a directory.\",\n    );\n\n    te.assert_failure_with_error(\n        &[\"\", \"fake1\", \"fake2\"],\n        \"[fd error]: Search path 'fake1' is not a directory.\n        [fd error]: Search path 'fake2' is not a directory.\n        [fd error]: No valid search paths given.\",\n    );\n}\n\n/// Explicit root path\n#[test]\nfn test_explicit_root_path() {\n    let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES);\n\n    te.assert_output(\n        &[\"foo\", \"one\"],\n        \"one/b.foo\n        one/two/c.foo\n        one/two/C.Foo2\n        one/two/three/d.foo\n        one/two/three/directory_foo/\",\n    );\n\n    te.assert_output(\n        &[\"foo\", \"one/two/three\"],\n        \"one/two/three/d.foo\n        one/two/three/directory_foo/\",\n    );\n\n    te.assert_output_subdirectory(\n        \"one/two/\",\n        &[\"foo\", \"../../\"],\n        \"../../a.foo\n        ../../one/b.foo\n        ../../one/two/c.foo\n        ../../one/two/C.Foo2\n        ../../one/two/three/d.foo\n        ../../one/two/three/directory_foo/\",\n    );\n\n    te.assert_output_subdirectory(\n        \"one/two/three\",\n        &[\"\", \"..\"],\n        \"../c.foo\n        ../C.Foo2\n        ../three/\n        ../three/d.foo\n        ../three/directory_foo/\",\n    );\n}\n\n/// Regex searches\n#[test]\nfn test_regex_searches() {\n    let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES);\n\n    te.assert_output(\n        &[\"[a-c].foo\"],\n        \"a.foo\n        one/b.foo\n        one/two/c.foo\n        one/two/C.Foo2\",\n    );\n\n    te.assert_output(\n        &[\"--case-sensitive\", \"[a-c].foo\"],\n        \"a.foo\n        one/b.foo\n        one/two/c.foo\",\n    );\n}\n\n/// Smart case\n#[test]\nfn test_smart_case() {\n    let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES);\n\n    te.assert_output(\n        &[\"c.foo\"],\n        \"one/two/c.foo\n        one/two/C.Foo2\",\n    );\n\n    te.assert_output(&[\"C.Foo\"], \"one/two/C.Foo2\");\n\n    te.assert_output(&[\"Foo\"], \"one/two/C.Foo2\");\n\n    // Only literal uppercase chars should trigger case sensitivity.\n    te.assert_output(\n        &[\"\\\\Ac\"],\n        \"one/two/c.foo\n        one/two/C.Foo2\",\n    );\n    te.assert_output(&[\"\\\\AC\"], \"one/two/C.Foo2\");\n}\n\n/// Case sensitivity (--case-sensitive)\n#[test]\nfn test_case_sensitive() {\n    let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES);\n\n    te.assert_output(&[\"--case-sensitive\", \"c.foo\"], \"one/two/c.foo\");\n\n    te.assert_output(&[\"--case-sensitive\", \"C.Foo\"], \"one/two/C.Foo2\");\n\n    te.assert_output(\n        &[\"--ignore-case\", \"--case-sensitive\", \"C.Foo\"],\n        \"one/two/C.Foo2\",\n    );\n}\n\n/// Case insensitivity (--ignore-case)\n#[test]\nfn test_case_insensitive() {\n    let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES);\n\n    te.assert_output(\n        &[\"--ignore-case\", \"C.Foo\"],\n        \"one/two/c.foo\n        one/two/C.Foo2\",\n    );\n\n    te.assert_output(\n        &[\"--case-sensitive\", \"--ignore-case\", \"C.Foo\"],\n        \"one/two/c.foo\n        one/two/C.Foo2\",\n    );\n}\n\n/// Glob-based searches (--glob)\n#[test]\nfn test_glob_searches() {\n    let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES);\n\n    te.assert_output(\n        &[\"--glob\", \"*.foo\"],\n        \"a.foo\n        one/b.foo\n        one/two/c.foo\n        one/two/three/d.foo\",\n    );\n\n    te.assert_output(\n        &[\"--glob\", \"[a-c].foo\"],\n        \"a.foo\n        one/b.foo\n        one/two/c.foo\",\n    );\n\n    te.assert_output(\n        &[\"--glob\", \"[a-c].foo*\"],\n        \"a.foo\n        one/b.foo\n        one/two/C.Foo2\n        one/two/c.foo\",\n    );\n}\n\n/// Glob-based searches (--glob) in combination with full path searches (--full-path)\n#[cfg(not(windows))] // TODO: make this work on Windows\n#[test]\nfn test_full_path_glob_searches() {\n    let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES);\n\n    te.assert_output(\n        &[\"--glob\", \"--full-path\", \"**/one/**/*.foo\"],\n        \"one/b.foo\n        one/two/c.foo\n        one/two/three/d.foo\",\n    );\n\n    te.assert_output(\n        &[\"--glob\", \"--full-path\", \"**/one/*/*.foo\"],\n        \" one/two/c.foo\",\n    );\n\n    te.assert_output(\n        &[\"--glob\", \"--full-path\", \"**/one/*/*/*.foo\"],\n        \" one/two/three/d.foo\",\n    );\n}\n\n#[test]\nfn test_smart_case_glob_searches() {\n    let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES);\n\n    te.assert_output(\n        &[\"--glob\", \"c.foo*\"],\n        \"one/two/C.Foo2\n        one/two/c.foo\",\n    );\n\n    te.assert_output(&[\"--glob\", \"C.Foo*\"], \"one/two/C.Foo2\");\n}\n\n/// Glob-based searches (--glob) in combination with --case-sensitive\n#[test]\nfn test_case_sensitive_glob_searches() {\n    let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES);\n\n    te.assert_output(&[\"--glob\", \"--case-sensitive\", \"c.foo*\"], \"one/two/c.foo\");\n}\n\n/// Glob-based searches (--glob) in combination with --extension\n#[test]\nfn test_glob_searches_with_extension() {\n    let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES);\n\n    te.assert_output(\n        &[\"--glob\", \"--extension\", \"foo2\", \"[a-z].*\"],\n        \"one/two/C.Foo2\",\n    );\n}\n\n/// Make sure that --regex overrides --glob\n#[test]\nfn test_regex_overrides_glob() {\n    let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES);\n\n    te.assert_output(&[\"--glob\", \"--regex\", \"Foo2$\"], \"one/two/C.Foo2\");\n}\n\n/// Full path search (--full-path)\n#[test]\nfn test_full_path() {\n    let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES);\n\n    let root = te.system_root();\n    let prefix = escape(&root.to_string_lossy());\n\n    te.assert_output(\n        &[\"--full-path\", &format!(\"^{prefix}.*three.*foo$\")],\n        \"one/two/three/d.foo\n        one/two/three/directory_foo/\",\n    );\n}\n\n/// Hidden files (--hidden)\n#[test]\nfn test_hidden() {\n    let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES);\n\n    te.assert_output(\n        &[\"--hidden\", \"foo\"],\n        \".hidden.foo\n        a.foo\n        one/b.foo\n        one/two/c.foo\n        one/two/C.Foo2\n        one/two/three/d.foo\n        one/two/three/directory_foo/\",\n    );\n}\n\n/// Hidden file attribute on Windows\n#[cfg(windows)]\n#[test]\nfn test_hidden_file_attribute() {\n    use std::os::windows::fs::OpenOptionsExt;\n\n    let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES);\n\n    // https://docs.microsoft.com/en-us/windows/win32/api/fileapi/nf-fileapi-setfileattributesa\n    const FILE_ATTRIBUTE_HIDDEN: u32 = 2;\n\n    fs::OpenOptions::new()\n        .create(true)\n        .write(true)\n        .attributes(FILE_ATTRIBUTE_HIDDEN)\n        .open(te.test_root().join(\"hidden-file.txt\"))\n        .unwrap();\n\n    te.assert_output(&[\"--hidden\", \"hidden-file.txt\"], \"hidden-file.txt\");\n    te.assert_output(&[\"hidden-file.txt\"], \"\");\n}\n\n/// Ignored files (--no-ignore)\n#[test]\nfn test_no_ignore() {\n    let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES);\n\n    te.assert_output(\n        &[\"--no-ignore\", \"foo\"],\n        \"a.foo\n        fdignored.foo\n        gitignored.foo\n        one/b.foo\n        one/two/c.foo\n        one/two/C.Foo2\n        one/two/three/d.foo\n        one/two/three/directory_foo/\",\n    );\n\n    te.assert_output(\n        &[\"--hidden\", \"--no-ignore\", \"foo\"],\n        \".hidden.foo\n        a.foo\n        fdignored.foo\n        gitignored.foo\n        one/b.foo\n        one/two/c.foo\n        one/two/C.Foo2\n        one/two/three/d.foo\n        one/two/three/directory_foo/\",\n    );\n}\n\n/// .gitignore and .fdignore\n#[test]\nfn test_gitignore_and_fdignore() {\n    let files = &[\n        \"ignored-by-nothing\",\n        \"ignored-by-fdignore\",\n        \"ignored-by-gitignore\",\n        \"ignored-by-both\",\n    ];\n    let te = TestEnv::new(&[], files);\n\n    fs::File::create(te.test_root().join(\".fdignore\"))\n        .unwrap()\n        .write_all(b\"ignored-by-fdignore\\nignored-by-both\")\n        .unwrap();\n\n    fs::File::create(te.test_root().join(\".gitignore\"))\n        .unwrap()\n        .write_all(b\"ignored-by-gitignore\\nignored-by-both\")\n        .unwrap();\n\n    te.assert_output(&[\"ignored\"], \"ignored-by-nothing\");\n\n    te.assert_output(\n        &[\"--no-ignore-vcs\", \"ignored\"],\n        \"ignored-by-nothing\n        ignored-by-gitignore\",\n    );\n\n    te.assert_output(\n        &[\"--no-ignore\", \"ignored\"],\n        \"ignored-by-nothing\n        ignored-by-fdignore\n        ignored-by-gitignore\n        ignored-by-both\",\n    );\n}\n\n/// Ignore parent ignore files (--no-ignore-parent)\n#[test]\nfn test_no_ignore_parent() {\n    let dirs = &[\"inner\"];\n    let files = &[\n        \"inner/parent-ignored\",\n        \"inner/child-ignored\",\n        \"inner/not-ignored\",\n    ];\n    let te = TestEnv::new(dirs, files);\n\n    // Ignore 'parent-ignored' in root\n    fs::File::create(te.test_root().join(\".gitignore\"))\n        .unwrap()\n        .write_all(b\"parent-ignored\")\n        .unwrap();\n    // Ignore 'child-ignored' in inner\n    fs::File::create(te.test_root().join(\"inner/.gitignore\"))\n        .unwrap()\n        .write_all(b\"child-ignored\")\n        .unwrap();\n\n    te.assert_output_subdirectory(\"inner\", &[], \"not-ignored\");\n\n    te.assert_output_subdirectory(\n        \"inner\",\n        &[\"--no-ignore-parent\"],\n        \"parent-ignored\n        not-ignored\",\n    );\n}\n\n/// Ignore parent ignore files (--no-ignore-parent) with an inner git repo\n#[test]\nfn test_no_ignore_parent_inner_git() {\n    let dirs = &[\"inner\"];\n    let files = &[\n        \"inner/parent-ignored\",\n        \"inner/child-ignored\",\n        \"inner/not-ignored\",\n    ];\n    let te = TestEnv::new(dirs, files);\n\n    // Make the inner folder also appear as a git repo\n    fs::create_dir_all(te.test_root().join(\"inner/.git\")).unwrap();\n\n    // Ignore 'parent-ignored' in root\n    fs::File::create(te.test_root().join(\".gitignore\"))\n        .unwrap()\n        .write_all(b\"parent-ignored\")\n        .unwrap();\n    // Ignore 'child-ignored' in inner\n    fs::File::create(te.test_root().join(\"inner/.gitignore\"))\n        .unwrap()\n        .write_all(b\"child-ignored\")\n        .unwrap();\n\n    te.assert_output_subdirectory(\n        \"inner\",\n        &[],\n        \"not-ignored\n        parent-ignored\",\n    );\n\n    te.assert_output_subdirectory(\n        \"inner\",\n        &[\"--no-ignore-parent\"],\n        \"not-ignored\n        parent-ignored\",\n    );\n}\n\n/// Precedence of .fdignore files\n#[test]\nfn test_custom_ignore_precedence() {\n    let dirs = &[\"inner\"];\n    let files = &[\"inner/foo\"];\n    let te = TestEnv::new(dirs, files);\n\n    // Ignore 'foo' via .gitignore\n    fs::File::create(te.test_root().join(\"inner/.gitignore\"))\n        .unwrap()\n        .write_all(b\"foo\")\n        .unwrap();\n\n    // Whitelist 'foo' via .fdignore\n    fs::File::create(te.test_root().join(\".fdignore\"))\n        .unwrap()\n        .write_all(b\"!foo\")\n        .unwrap();\n\n    te.assert_output(&[\"foo\"], \"inner/foo\");\n\n    te.assert_output(&[\"--no-ignore-vcs\", \"foo\"], \"inner/foo\");\n\n    te.assert_output(&[\"--no-ignore\", \"foo\"], \"inner/foo\");\n}\n\n/// Don't require git to respect gitignore (--no-require-git)\n#[test]\nfn test_respect_ignore_files() {\n    let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES);\n\n    // Not in a git repo anymore\n    fs::remove_dir(te.test_root().join(\".git\")).unwrap();\n\n    // don't respect gitignore because we're not in a git repo\n    te.assert_output(\n        &[\"foo\"],\n        \"a.foo\n        gitignored.foo\n        one/b.foo\n        one/two/c.foo\n        one/two/C.Foo2\n        one/two/three/d.foo\n        one/two/three/directory_foo/\",\n    );\n\n    // respect gitignore because we set `--no-require-git`\n    te.assert_output(\n        &[\"--no-require-git\", \"foo\"],\n        \"a.foo\n        one/b.foo\n        one/two/c.foo\n        one/two/C.Foo2\n        one/two/three/d.foo\n        one/two/three/directory_foo/\",\n    );\n\n    // make sure overriding works\n    te.assert_output(\n        &[\"--no-require-git\", \"--require-git\", \"foo\"],\n        \"a.foo\n        gitignored.foo\n        one/b.foo\n        one/two/c.foo\n        one/two/C.Foo2\n        one/two/three/d.foo\n        one/two/three/directory_foo/\",\n    );\n\n    te.assert_output(\n        &[\"--no-require-git\", \"--no-ignore\", \"foo\"],\n        \"a.foo\n        gitignored.foo\n        fdignored.foo\n        one/b.foo\n        one/two/c.foo\n        one/two/C.Foo2\n        one/two/three/d.foo\n        one/two/three/directory_foo/\",\n    );\n}\n\n/// VCS ignored files (--no-ignore-vcs)\n#[test]\nfn test_no_ignore_vcs() {\n    let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES);\n\n    te.assert_output(\n        &[\"--no-ignore-vcs\", \"foo\"],\n        \"a.foo\n        gitignored.foo\n        one/b.foo\n        one/two/c.foo\n        one/two/C.Foo2\n        one/two/three/d.foo\n        one/two/three/directory_foo/\",\n    );\n}\n\n/// Test that --no-ignore-vcs still respects .fdignored in parent directory\n#[test]\nfn test_no_ignore_vcs_child_dir() {\n    let te = TestEnv::new(\n        &[\"inner\"],\n        &[\"inner/fdignored.foo\", \"inner/foo\", \"inner/gitignored.foo\"],\n    );\n\n    te.assert_output_subdirectory(\n        \"inner\",\n        &[\"--no-ignore-vcs\", \"foo\"],\n        \"foo\n        gitignored.foo\",\n    );\n}\n\n/// Custom ignore files (--ignore-file)\n#[test]\nfn test_custom_ignore_files() {\n    let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES);\n\n    // Ignore 'C.Foo2' and everything in 'three'.\n    fs::File::create(te.test_root().join(\"custom.ignore\"))\n        .unwrap()\n        .write_all(b\"C.Foo2\\nthree\")\n        .unwrap();\n\n    te.assert_output(\n        &[\"--ignore-file\", \"custom.ignore\", \"foo\"],\n        \"a.foo\n        one/b.foo\n        one/two/c.foo\",\n    );\n}\n\n/// Ignored files with ripgrep aliases (-u / -uu)\n#[test]\nfn test_no_ignore_aliases() {\n    let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES);\n\n    te.assert_output(\n        &[\"-u\", \"foo\"],\n        \".hidden.foo\n        a.foo\n        fdignored.foo\n        gitignored.foo\n        one/b.foo\n        one/two/c.foo\n        one/two/C.Foo2\n        one/two/three/d.foo\n        one/two/three/directory_foo/\",\n    );\n}\n\n#[cfg(not(windows))]\n#[test]\nfn test_global_ignore() {\n    let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES).global_ignore_file(\"one\");\n    te.assert_output(\n        &[],\n        \"a.foo\n    e1 e2\n    symlink\",\n    );\n}\n\n#[cfg(not(windows))]\n#[test_case(\"--unrestricted\", \".hidden.foo\na.foo\nfdignored.foo\ngitignored.foo\none/b.foo\none/two/c.foo\none/two/C.Foo2\none/two/three/d.foo\none/two/three/directory_foo/\"; \"unrestricted\")]\n#[test_case(\"--no-ignore\", \"a.foo\nfdignored.foo\ngitignored.foo\none/b.foo\none/two/c.foo\none/two/C.Foo2\none/two/three/d.foo\none/two/three/directory_foo/\"; \"no-ignore\")]\n#[test_case(\"--no-global-ignore-file\", \"a.foo\none/b.foo\none/two/c.foo\none/two/C.Foo2\none/two/three/d.foo\none/two/three/directory_foo/\"; \"no-global-ignore-file\")]\nfn test_no_global_ignore(flag: &str, expected_output: &str) {\n    let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES).global_ignore_file(\"one\");\n    te.assert_output(&[flag, \"foo\"], expected_output);\n}\n\n/// Symlinks (--follow)\n#[test]\nfn test_follow() {\n    let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES);\n\n    te.assert_output(\n        &[\"--follow\", \"c.foo\"],\n        \"one/two/c.foo\n        one/two/C.Foo2\n        symlink/c.foo\n        symlink/C.Foo2\",\n    );\n}\n\n// File system boundaries (--one-file-system)\n// Limited to Unix because, to the best of my knowledge, there is no easy way to test a use case\n// file systems mounted into the tree on Windows.\n// Not limiting depth causes massive delay under Darwin, see BurntSushi/ripgrep#1429\n#[test]\n#[cfg(unix)]\nfn test_file_system_boundaries() {\n    // Helper function to get the device ID for a given path\n    // Inspired by https://github.com/BurntSushi/ripgrep/blob/8892bf648cfec111e6e7ddd9f30e932b0371db68/ignore/src/walk.rs#L1693\n    fn device_num(path: impl AsRef<Path>) -> u64 {\n        use std::os::unix::fs::MetadataExt;\n\n        path.as_ref().metadata().map(|md| md.dev()).unwrap()\n    }\n\n    // Can't simulate file system boundaries\n    let te = TestEnv::new(&[], &[]);\n\n    let dev_null = Path::new(\"/dev/null\");\n\n    // /dev/null should exist in all sane Unixes. Skip if it doesn't exist for some reason.\n    // Also skip should it be on the same device as the root partition for some reason.\n    if !dev_null.is_file() || device_num(dev_null) == device_num(\"/\") {\n        return;\n    }\n\n    te.assert_output(\n        &[\"--full-path\", \"--max-depth\", \"2\", \"^/dev/null$\", \"/\"],\n        \"/dev/null\",\n    );\n    te.assert_output(\n        &[\n            \"--one-file-system\",\n            \"--full-path\",\n            \"--max-depth\",\n            \"2\",\n            \"^/dev/null$\",\n            \"/\",\n        ],\n        \"\",\n    );\n}\n\n#[test]\nfn test_follow_broken_symlink() {\n    let mut te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES);\n    te.create_broken_symlink(\"broken_symlink\")\n        .expect(\"Failed to create broken symlink.\");\n\n    te.assert_output(\n        &[\"symlink\"],\n        \"broken_symlink\n        symlink\",\n    );\n    te.assert_output(\n        &[\"--type\", \"symlink\", \"symlink\"],\n        \"broken_symlink\n        symlink\",\n    );\n\n    te.assert_output(&[\"--type\", \"file\", \"symlink\"], \"\");\n\n    te.assert_output(\n        &[\"--follow\", \"--type\", \"symlink\", \"symlink\"],\n        \"broken_symlink\",\n    );\n    te.assert_output(&[\"--follow\", \"--type\", \"file\", \"symlink\"], \"\");\n}\n\n/// Null separator (--print0)\n#[test]\nfn test_print0() {\n    let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES);\n\n    te.assert_output(\n        &[\"--print0\", \"foo\"],\n        \"./a.fooNULL\n        ./one/b.fooNULL\n        ./one/two/C.Foo2NULL\n        ./one/two/c.fooNULL\n        ./one/two/three/d.fooNULL\n        ./one/two/three/directory_foo/NULL\",\n    );\n}\n\n/// Maximum depth (--max-depth)\n#[test]\nfn test_max_depth() {\n    let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES);\n\n    te.assert_output(\n        &[\"--max-depth\", \"3\"],\n        \"a.foo\n        e1 e2\n        one/\n        one/b.foo\n        one/two/\n        one/two/c.foo\n        one/two/C.Foo2\n        one/two/three/\n        symlink\",\n    );\n\n    te.assert_output(\n        &[\"--max-depth\", \"2\"],\n        \"a.foo\n        e1 e2\n        one/\n        one/b.foo\n        one/two/\n        symlink\",\n    );\n\n    te.assert_output(\n        &[\"--max-depth\", \"1\"],\n        \"a.foo\n        e1 e2\n        one/\n        symlink\",\n    );\n}\n\n/// Minimum depth (--min-depth)\n#[test]\nfn test_min_depth() {\n    let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES);\n\n    te.assert_output(\n        &[\"--min-depth\", \"3\"],\n        \"one/two/c.foo\n        one/two/C.Foo2\n        one/two/three/\n        one/two/three/d.foo\n        one/two/three/directory_foo/\",\n    );\n\n    te.assert_output(\n        &[\"--min-depth\", \"4\"],\n        \"one/two/three/d.foo\n        one/two/three/directory_foo/\",\n    );\n}\n\n/// Exact depth (--exact-depth)\n#[test]\nfn test_exact_depth() {\n    let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES);\n\n    te.assert_output(\n        &[\"--exact-depth\", \"3\"],\n        \"one/two/c.foo\n        one/two/C.Foo2\n        one/two/three/\",\n    );\n}\n\n/// Pruning (--prune)\n#[test]\nfn test_prune() {\n    let dirs = &[\"foo/bar\", \"bar/foo\", \"baz\"];\n    let files = &[\n        \"foo/foo.file\",\n        \"foo/bar/foo.file\",\n        \"bar/foo.file\",\n        \"bar/foo/foo.file\",\n        \"baz/foo.file\",\n    ];\n\n    let te = TestEnv::new(dirs, files);\n\n    te.assert_output(\n        &[\"foo\"],\n        \"foo/\n        foo/foo.file\n        foo/bar/foo.file\n        bar/foo.file\n        bar/foo/\n        bar/foo/foo.file\n        baz/foo.file\",\n    );\n\n    te.assert_output(\n        &[\"--prune\", \"foo\"],\n        \"foo/\n        bar/foo/\n        bar/foo.file\n        baz/foo.file\",\n    );\n}\n\n/// Absolute paths (--absolute-path)\n#[test]\nfn test_absolute_path() {\n    let (te, abs_path) = get_test_env_with_abs_path(DEFAULT_DIRS, DEFAULT_FILES);\n\n    te.assert_output(\n        &[\"--absolute-path\"],\n        &format!(\n            \"{abs_path}/a.foo\n            {abs_path}/e1 e2\n            {abs_path}/one/\n            {abs_path}/one/b.foo\n            {abs_path}/one/two/\n            {abs_path}/one/two/c.foo\n            {abs_path}/one/two/C.Foo2\n            {abs_path}/one/two/three/\n            {abs_path}/one/two/three/d.foo\n            {abs_path}/one/two/three/directory_foo/\n            {abs_path}/symlink\",\n            abs_path = &abs_path\n        ),\n    );\n\n    te.assert_output(\n        &[\"--absolute-path\", \"foo\"],\n        &format!(\n            \"{abs_path}/a.foo\n            {abs_path}/one/b.foo\n            {abs_path}/one/two/c.foo\n            {abs_path}/one/two/C.Foo2\n            {abs_path}/one/two/three/d.foo\n            {abs_path}/one/two/three/directory_foo/\",\n            abs_path = &abs_path\n        ),\n    );\n}\n\n/// Show absolute paths if the path argument is absolute\n#[test]\nfn test_implicit_absolute_path() {\n    let (te, abs_path) = get_test_env_with_abs_path(DEFAULT_DIRS, DEFAULT_FILES);\n\n    te.assert_output(\n        &[\"foo\", &abs_path],\n        &format!(\n            \"{abs_path}/a.foo\n            {abs_path}/one/b.foo\n            {abs_path}/one/two/c.foo\n            {abs_path}/one/two/C.Foo2\n            {abs_path}/one/two/three/d.foo\n            {abs_path}/one/two/three/directory_foo/\",\n            abs_path = &abs_path\n        ),\n    );\n}\n\n/// Absolute paths should be normalized\n#[test]\nfn test_normalized_absolute_path() {\n    let (te, abs_path) = get_test_env_with_abs_path(DEFAULT_DIRS, DEFAULT_FILES);\n\n    te.assert_output_subdirectory(\n        \"one\",\n        &[\"--absolute-path\", \"foo\", \"..\"],\n        &format!(\n            \"{abs_path}/a.foo\n            {abs_path}/one/b.foo\n            {abs_path}/one/two/c.foo\n            {abs_path}/one/two/C.Foo2\n            {abs_path}/one/two/three/d.foo\n            {abs_path}/one/two/three/directory_foo/\",\n            abs_path = &abs_path\n        ),\n    );\n}\n\n/// File type filter (--type)\n#[test]\nfn test_type() {\n    let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES);\n\n    te.assert_output(\n        &[\"--type\", \"f\"],\n        \"a.foo\n        e1 e2\n        one/b.foo\n        one/two/c.foo\n        one/two/C.Foo2\n        one/two/three/d.foo\",\n    );\n\n    te.assert_output(&[\"--type\", \"f\", \"e1\"], \"e1 e2\");\n\n    te.assert_output(\n        &[\"--type\", \"d\"],\n        \"one/\n        one/two/\n        one/two/three/\n        one/two/three/directory_foo/\",\n    );\n\n    te.assert_output(\n        &[\"--type\", \"d\", \"--type\", \"l\"],\n        \"one/\n        one/two/\n        one/two/three/\n        one/two/three/directory_foo/\n        symlink\",\n    );\n\n    te.assert_output(&[\"--type\", \"l\"], \"symlink\");\n}\n\n/// Test `--type executable`\n#[cfg(unix)]\n#[test]\nfn test_type_executable() {\n    use std::os::unix::fs::OpenOptionsExt;\n\n    // This test assumes the current user isn't root\n    // (otherwise if the executable bit is set for any level, it is executable for the current\n    // user)\n    if Uid::current().is_root() {\n        return;\n    }\n\n    let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES);\n\n    fs::OpenOptions::new()\n        .create_new(true)\n        .truncate(true)\n        .write(true)\n        .mode(0o777)\n        .open(te.test_root().join(\"executable-file.sh\"))\n        .unwrap();\n\n    fs::OpenOptions::new()\n        .create(true)\n        .truncate(true)\n        .write(true)\n        .mode(0o645)\n        .open(te.test_root().join(\"not-user-executable-file.sh\"))\n        .unwrap();\n\n    te.assert_output(&[\"--type\", \"executable\"], \"executable-file.sh\");\n\n    te.assert_output(\n        &[\"--type\", \"executable\", \"--type\", \"directory\"],\n        \"executable-file.sh\n        one/\n        one/two/\n        one/two/three/\n        one/two/three/directory_foo/\",\n    );\n}\n\n/// Test `--type empty`\n#[test]\nfn test_type_empty() {\n    let te = TestEnv::new(&[\"dir_empty\", \"dir_nonempty\"], &[]);\n\n    create_file_with_size(te.test_root().join(\"0_bytes.foo\"), 0);\n    create_file_with_size(te.test_root().join(\"5_bytes.foo\"), 5);\n\n    create_file_with_size(te.test_root().join(\"dir_nonempty\").join(\"2_bytes.foo\"), 2);\n\n    te.assert_output(\n        &[\"--type\", \"empty\"],\n        \"0_bytes.foo\n        dir_empty/\",\n    );\n\n    te.assert_output(\n        &[\"--type\", \"empty\", \"--type\", \"file\", \"--type\", \"directory\"],\n        \"0_bytes.foo\n        dir_empty/\",\n    );\n\n    te.assert_output(&[\"--type\", \"empty\", \"--type\", \"file\"], \"0_bytes.foo\");\n\n    te.assert_output(&[\"--type\", \"empty\", \"--type\", \"directory\"], \"dir_empty/\");\n}\n\n/// File extension (--extension)\n#[test]\nfn test_extension() {\n    let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES);\n\n    te.assert_output(\n        &[\"--extension\", \"foo\"],\n        \"a.foo\n        one/b.foo\n        one/two/c.foo\n        one/two/three/d.foo\",\n    );\n\n    te.assert_output(\n        &[\"--extension\", \".foo\"],\n        \"a.foo\n        one/b.foo\n        one/two/c.foo\n        one/two/three/d.foo\",\n    );\n\n    te.assert_output(\n        &[\"--extension\", \".foo\", \"--extension\", \"foo2\"],\n        \"a.foo\n        one/b.foo\n        one/two/c.foo\n        one/two/three/d.foo\n        one/two/C.Foo2\",\n    );\n\n    te.assert_output(&[\"--extension\", \".foo\", \"a\"], \"a.foo\");\n\n    te.assert_output(&[\"--extension\", \"foo2\"], \"one/two/C.Foo2\");\n\n    let te2 = TestEnv::new(&[], &[\"spam.bar.baz\", \"egg.bar.baz\", \"yolk.bar.baz.sig\"]);\n\n    te2.assert_output(\n        &[\"--extension\", \".bar.baz\"],\n        \"spam.bar.baz\n        egg.bar.baz\",\n    );\n\n    te2.assert_output(&[\"--extension\", \"sig\"], \"yolk.bar.baz.sig\");\n\n    te2.assert_output(&[\"--extension\", \"bar.baz.sig\"], \"yolk.bar.baz.sig\");\n\n    let te3 = TestEnv::new(&[], &[\"latin1.e\\u{301}xt\", \"smiley.☻\"]);\n\n    te3.assert_output(&[\"--extension\", \"☻\"], \"smiley.☻\");\n\n    te3.assert_output(&[\"--extension\", \".e\\u{301}xt\"], \"latin1.e\\u{301}xt\");\n\n    let te4 = TestEnv::new(&[], &[\".hidden\", \"test.hidden\"]);\n\n    te4.assert_output(&[\"--hidden\", \"--extension\", \".hidden\"], \"test.hidden\");\n}\n\n/// No file extension (test for the pattern provided in the --help text)\n#[test]\nfn test_no_extension() {\n    let te = TestEnv::new(\n        DEFAULT_DIRS,\n        &[\"a.foo\", \"aa\", \"one/b.foo\", \"one/bb\", \"one/two/three/d\"],\n    );\n\n    te.assert_output(\n        &[\"^[^.]+$\"],\n        \"aa\n        one/\n        one/bb\n        one/two/\n        one/two/three/\n        one/two/three/d\n        one/two/three/directory_foo/\n        symlink\",\n    );\n\n    te.assert_output(\n        &[\"^[^.]+$\", \"--type\", \"file\"],\n        \"aa\n        one/bb\n        one/two/three/d\",\n    );\n}\n\n/// Symlink as search directory\n#[test]\nfn test_symlink_as_root() {\n    let mut te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES);\n    te.create_broken_symlink(\"broken_symlink\")\n        .expect(\"Failed to create broken symlink.\");\n\n    // From: http://pubs.opengroup.org/onlinepubs/9699919799/functions/getcwd.html\n    // The getcwd() function shall place an absolute pathname of the current working directory in\n    // the array pointed to by buf, and return buf. The pathname shall contain no components that\n    // are dot or dot-dot, or are symbolic links.\n    //\n    // Key points:\n    // 1. The path of the current working directory of a Unix process cannot contain symlinks.\n    // 2. The path of the current working directory of a Windows process can contain symlinks.\n    //\n    // More:\n    // 1. On Windows, symlinks are resolved after the \"..\" component.\n    // 2. On Unix, symlinks are resolved immediately as encountered.\n\n    let parent_parent = if cfg!(windows) { \"..\" } else { \"../..\" };\n    te.assert_output_subdirectory(\n        \"symlink\",\n        &[\"\", parent_parent],\n        &format!(\n            \"{dir}/a.foo\n            {dir}/broken_symlink\n            {dir}/e1 e2\n            {dir}/one/\n            {dir}/one/b.foo\n            {dir}/one/two/\n            {dir}/one/two/c.foo\n            {dir}/one/two/C.Foo2\n            {dir}/one/two/three/\n            {dir}/one/two/three/d.foo\n            {dir}/one/two/three/directory_foo/\n            {dir}/symlink\",\n            dir = &parent_parent\n        ),\n    );\n}\n\n#[test]\nfn test_symlink_and_absolute_path() {\n    let (te, abs_path) = get_test_env_with_abs_path(DEFAULT_DIRS, DEFAULT_FILES);\n\n    let expected_path = if cfg!(windows) { \"symlink\" } else { \"one/two\" };\n\n    te.assert_output_subdirectory(\n        \"symlink\",\n        &[\"--absolute-path\"],\n        &format!(\n            \"{abs_path}/{expected_path}/c.foo\n            {abs_path}/{expected_path}/C.Foo2\n            {abs_path}/{expected_path}/three/\n            {abs_path}/{expected_path}/three/d.foo\n            {abs_path}/{expected_path}/three/directory_foo/\",\n            abs_path = &abs_path,\n            expected_path = expected_path\n        ),\n    );\n}\n\n#[test]\nfn test_symlink_as_absolute_root() {\n    let (te, abs_path) = get_test_env_with_abs_path(DEFAULT_DIRS, DEFAULT_FILES);\n\n    te.assert_output(\n        &[\"\", &format!(\"{abs_path}/symlink\")],\n        &format!(\n            \"{abs_path}/symlink/c.foo\n            {abs_path}/symlink/C.Foo2\n            {abs_path}/symlink/three/\n            {abs_path}/symlink/three/d.foo\n            {abs_path}/symlink/three/directory_foo/\",\n            abs_path = &abs_path\n        ),\n    );\n}\n\n#[test]\nfn test_symlink_and_full_path() {\n    let (te, abs_path) = get_test_env_with_abs_path(DEFAULT_DIRS, DEFAULT_FILES);\n    let root = te.system_root();\n    let prefix = escape(&root.to_string_lossy());\n\n    let expected_path = if cfg!(windows) { \"symlink\" } else { \"one/two\" };\n\n    te.assert_output_subdirectory(\n        \"symlink\",\n        &[\n            \"--absolute-path\",\n            \"--full-path\",\n            &format!(\"^{prefix}.*three\"),\n        ],\n        &format!(\n            \"{abs_path}/{expected_path}/three/\n            {abs_path}/{expected_path}/three/d.foo\n            {abs_path}/{expected_path}/three/directory_foo/\",\n            abs_path = &abs_path,\n            expected_path = expected_path\n        ),\n    );\n}\n\n#[test]\nfn test_symlink_and_full_path_abs_path() {\n    let (te, abs_path) = get_test_env_with_abs_path(DEFAULT_DIRS, DEFAULT_FILES);\n    let root = te.system_root();\n    let prefix = escape(&root.to_string_lossy());\n    te.assert_output(\n        &[\n            \"--full-path\",\n            &format!(\"^{prefix}.*symlink.*three\"),\n            &format!(\"{abs_path}/symlink\"),\n        ],\n        &format!(\n            \"{abs_path}/symlink/three/\n            {abs_path}/symlink/three/d.foo\n            {abs_path}/symlink/three/directory_foo/\",\n            abs_path = &abs_path\n        ),\n    );\n}\n/// Exclude patterns (--exclude)\n#[test]\nfn test_excludes() {\n    let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES);\n\n    te.assert_output(\n        &[\"--exclude\", \"*.foo\"],\n        \"one/\n        one/two/\n        one/two/C.Foo2\n        one/two/three/\n        one/two/three/directory_foo/\n        e1 e2\n        symlink\",\n    );\n\n    te.assert_output(\n        &[\"--exclude\", \"*.foo\", \"--exclude\", \"*.Foo2\"],\n        \"one/\n        one/two/\n        one/two/three/\n        one/two/three/directory_foo/\n        e1 e2\n        symlink\",\n    );\n\n    te.assert_output(\n        &[\"--exclude\", \"*.foo\", \"--exclude\", \"*.Foo2\", \"foo\"],\n        \"one/two/three/directory_foo/\",\n    );\n\n    te.assert_output(\n        &[\"--exclude\", \"one/two/\", \"foo\"],\n        \"a.foo\n        one/b.foo\",\n    );\n\n    te.assert_output(\n        &[\"--exclude\", \"one/**/*.foo\"],\n        \"a.foo\n        e1 e2\n        one/\n        one/two/\n        one/two/C.Foo2\n        one/two/three/\n        one/two/three/directory_foo/\n        symlink\",\n    );\n}\n\n#[test]\nfn format() {\n    let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES);\n\n    te.assert_output(\n        &[\"--format\", \"path={}\", \"--path-separator=/\"],\n        \"path=a.foo\n        path=e1 e2\n        path=one\n        path=one/b.foo\n        path=one/two\n        path=one/two/C.Foo2\n        path=one/two/c.foo\n        path=one/two/three\n        path=one/two/three/d.foo\n        path=one/two/three/directory_foo\n        path=symlink\",\n    );\n\n    te.assert_output(\n        &[\"foo\", \"--format\", \"noExt={.}\", \"--path-separator=/\"],\n        \"noExt=a\n        noExt=one/b\n        noExt=one/two/C\n        noExt=one/two/c\n        noExt=one/two/three/d\n        noExt=one/two/three/directory_foo\",\n    );\n\n    te.assert_output(\n        &[\"foo\", \"--format\", \"basename={/}\", \"--path-separator=/\"],\n        \"basename=a.foo\n        basename=b.foo\n        basename=C.Foo2\n        basename=c.foo\n        basename=d.foo\n        basename=directory_foo\",\n    );\n\n    te.assert_output(\n        &[\"foo\", \"--format\", \"name={/.}\", \"--path-separator=/\"],\n        \"name=a\n        name=b\n        name=C\n        name=c\n        name=d\n        name=directory_foo\",\n    );\n\n    te.assert_output(\n        &[\"foo\", \"--format\", \"parent={//}\", \"--path-separator=/\"],\n        \"parent=.\n        parent=one\n        parent=one/two\n        parent=one/two\n        parent=one/two/three\n        parent=one/two/three\",\n    );\n}\n\n/// Shell script execution (--exec)\n#[test]\nfn test_exec() {\n    let (te, abs_path) = get_test_env_with_abs_path(DEFAULT_DIRS, DEFAULT_FILES);\n    // TODO Windows tests: D:file.txt \\file.txt \\\\server\\share\\file.txt ...\n    if !cfg!(windows) {\n        te.assert_output(\n            &[\"--absolute-path\", \"foo\", \"--exec\", \"echo\"],\n            &format!(\n                \"{abs_path}/a.foo\n                {abs_path}/one/b.foo\n                {abs_path}/one/two/C.Foo2\n                {abs_path}/one/two/c.foo\n                {abs_path}/one/two/three/d.foo\n                {abs_path}/one/two/three/directory_foo\",\n                abs_path = &abs_path\n            ),\n        );\n\n        te.assert_output(\n            &[\"foo\", \"--exec\", \"echo\", \"{}\"],\n            \"./a.foo\n            ./one/b.foo\n            ./one/two/C.Foo2\n            ./one/two/c.foo\n            ./one/two/three/d.foo\n            ./one/two/three/directory_foo\",\n        );\n\n        te.assert_output(\n            &[\"foo\", \"--strip-cwd-prefix\", \"--exec\", \"echo\", \"{}\"],\n            \"a.foo\n            one/b.foo\n            one/two/C.Foo2\n            one/two/c.foo\n            one/two/three/d.foo\n            one/two/three/directory_foo\",\n        );\n\n        te.assert_output(\n            &[\"foo\", \"--exec\", \"echo\", \"{.}\"],\n            \"a\n            one/b\n            one/two/C\n            one/two/c\n            one/two/three/d\n            one/two/three/directory_foo\",\n        );\n\n        te.assert_output(\n            &[\"foo\", \"--exec\", \"echo\", \"{/}\"],\n            \"a.foo\n            b.foo\n            C.Foo2\n            c.foo\n            d.foo\n            directory_foo\",\n        );\n\n        te.assert_output(\n            &[\"foo\", \"--exec\", \"echo\", \"{/.}\"],\n            \"a\n            b\n            C\n            c\n            d\n            directory_foo\",\n        );\n\n        te.assert_output(\n            &[\"foo\", \"--exec\", \"echo\", \"{//}\"],\n            \".\n            ./one\n            ./one/two\n            ./one/two\n            ./one/two/three\n            ./one/two/three\",\n        );\n\n        te.assert_output(&[\"e1\", \"--exec\", \"printf\", \"%s.%s\\n\"], \"./e1 e2.\");\n    }\n}\n\n// TODO test for windows\n#[cfg(not(windows))]\n#[test]\nfn test_exec_multi() {\n    let (te, abs_path) = get_test_env_with_abs_path(DEFAULT_DIRS, DEFAULT_FILES);\n\n    te.assert_output(\n        &[\n            \"--absolute-path\",\n            \"foo\",\n            \"--exec\",\n            \"echo\",\n            \";\",\n            \"--exec\",\n            \"echo\",\n            \"test\",\n            \"{/}\",\n        ],\n        &format!(\n            \"{abs_path}/a.foo\n                {abs_path}/one/b.foo\n                {abs_path}/one/two/C.Foo2\n                {abs_path}/one/two/c.foo\n                {abs_path}/one/two/three/d.foo\n                {abs_path}/one/two/three/directory_foo\n                test a.foo\n                test b.foo\n                test C.Foo2\n                test c.foo\n                test d.foo\n                test directory_foo\",\n            abs_path = &abs_path\n        ),\n    );\n\n    te.assert_output(\n        &[\n            \"e1\", \"--exec\", \"echo\", \"{.}\", \";\", \"--exec\", \"echo\", \"{/}\", \";\", \"--exec\", \"echo\",\n            \"{//}\", \";\", \"--exec\", \"echo\", \"{/.}\",\n        ],\n        \"e1 e2\n        e1 e2\n        .\n        e1 e2\",\n    );\n\n    // We use printf here because we need to suppress a newline and\n    // echo -n is not POSIX-compliant.\n    te.assert_output(\n        &[\n            \"foo\", \"--exec\", \"printf\", \"%s\", \"{/}: \", \";\", \"--exec\", \"printf\", \"%s\\\\n\", \"{//}\",\n        ],\n        \"a.foo: .\n        b.foo: ./one\n        C.Foo2: ./one/two\n        c.foo: ./one/two\n        d.foo: ./one/two/three\n        directory_foo: ./one/two/three\",\n    );\n}\n\n#[cfg(not(windows))]\n#[test]\nfn test_exec_nulls() {\n    let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES);\n    te.assert_output(\n        &[\"foo\", \"--print0\", \"--exec\", \"printf\", \"p=%s\"],\n        \"p=./a.fooNULL\n        p=./one/b.fooNULL\n        p=./one/two/C.Foo2NULL\n        p=./one/two/c.fooNULL\n        p=./one/two/three/d.fooNULL\n        p=./one/two/three/directory_fooNULL\",\n    );\n}\n\n#[test]\nfn test_exec_batch() {\n    let (te, abs_path) = get_test_env_with_abs_path(DEFAULT_DIRS, DEFAULT_FILES);\n    let te = te.normalize_line(true);\n\n    // TODO Test for windows\n    if !cfg!(windows) {\n        te.assert_output(\n            &[\"--absolute-path\", \"foo\", \"--exec-batch\", \"echo\"],\n            &format!(\n                \"{abs_path}/a.foo {abs_path}/one/b.foo {abs_path}/one/two/C.Foo2 {abs_path}/one/two/c.foo {abs_path}/one/two/three/d.foo {abs_path}/one/two/three/directory_foo\",\n                abs_path = &abs_path\n            ),\n        );\n\n        te.assert_output(\n            &[\"foo\", \"--exec-batch\", \"echo\", \"{}\"],\n            \"./a.foo ./one/b.foo ./one/two/C.Foo2 ./one/two/c.foo ./one/two/three/d.foo ./one/two/three/directory_foo\",\n        );\n\n        te.assert_output(\n            &[\"foo\", \"--strip-cwd-prefix\", \"--exec-batch\", \"echo\", \"{}\"],\n            \"a.foo one/b.foo one/two/C.Foo2 one/two/c.foo one/two/three/d.foo one/two/three/directory_foo\",\n        );\n\n        te.assert_output(\n            &[\"foo\", \"--exec-batch\", \"echo\", \"{/}\"],\n            \"a.foo b.foo C.Foo2 c.foo d.foo directory_foo\",\n        );\n\n        te.assert_output(\n            &[\"no_match\", \"--exec-batch\", \"echo\", \"Matched: \", \"{/}\"],\n            \"\",\n        );\n\n        te.assert_failure_with_error(\n            &[\"foo\", \"--exec-batch\", \"echo\", \"{}\", \"{}\"],\n            \"error: Only one placeholder allowed for batch commands\\n\\\n            \\n\\\n            Usage: fd [OPTIONS] [pattern] [path]...\\n\\\n            \\n\\\n            For more information, try '--help'.\\n\\\n            \",\n        );\n\n        te.assert_failure_with_error(\n            &[\"foo\", \"--exec-batch\", \"echo\", \"{/}\", \";\", \"-x\", \"echo\"],\n            \"error: the argument '--exec-batch <cmd>...' cannot be used with '--exec <cmd>...'\\n\\\n            \\n\\\n            Usage: fd --exec-batch <cmd>... <pattern> [path]...\\n\\\n            \\n\\\n            For more information, try '--help'.\\n\\\n            \",\n        );\n\n        te.assert_failure_with_error(\n            &[\"foo\", \"--exec-batch\"],\n            \"error: a value is required for '--exec-batch <cmd>...' but none was supplied\\n\\\n            \\n\\\n            For more information, try '--help'.\\n\\\n            \",\n        );\n\n        te.assert_failure_with_error(\n            &[\"foo\", \"--exec-batch\", \"echo {}\"],\n            \"error: First argument of exec-batch is expected to be a fixed executable\\n\\\n            \\n\\\n            Usage: fd [OPTIONS] [pattern] [path]...\\n\\\n            \\n\\\n            For more information, try '--help'.\\n\\\n            \",\n        );\n\n        te.assert_failure_with_error(&[\"a.foo\", \"--exec-batch\", \"bash\", \"-c\", \"exit 1\"], \"\");\n    }\n}\n\n#[test]\nfn test_exec_batch_multi() {\n    // TODO test for windows\n    if cfg!(windows) {\n        return;\n    }\n    let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES);\n\n    let output = te.assert_success_and_get_output(\n        \".\",\n        &[\n            \"foo\",\n            \"--exec-batch\",\n            \"echo\",\n            \"{}\",\n            \";\",\n            \"--exec-batch\",\n            \"echo\",\n            \"{/}\",\n        ],\n    );\n    let stdout = std::str::from_utf8(&output.stdout).unwrap();\n    let lines: Vec<_> = stdout\n        .lines()\n        .map(|l| {\n            let mut words: Vec<_> = l.split_whitespace().collect();\n            words.sort_unstable();\n            words\n        })\n        .collect();\n\n    assert_eq!(\n        lines,\n        &[\n            [\n                \"./a.foo\",\n                \"./one/b.foo\",\n                \"./one/two/C.Foo2\",\n                \"./one/two/c.foo\",\n                \"./one/two/three/d.foo\",\n                \"./one/two/three/directory_foo\"\n            ],\n            [\n                \"C.Foo2\",\n                \"a.foo\",\n                \"b.foo\",\n                \"c.foo\",\n                \"d.foo\",\n                \"directory_foo\"\n            ],\n        ]\n    );\n\n    te.assert_failure_with_error(\n        &[\n            \"a.foo\",\n            \"--exec-batch\",\n            \"echo\",\n            \";\",\n            \"--exec-batch\",\n            \"bash\",\n            \"-c\",\n            \"exit 1\",\n        ],\n        \"\",\n    );\n}\n\n#[test]\nfn test_exec_batch_with_limit() {\n    // TODO Test for windows\n    if cfg!(windows) {\n        return;\n    }\n\n    let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES);\n\n    let output = te.assert_success_and_get_output(\n        \".\",\n        &[\"foo\", \"--batch-size=2\", \"--exec-batch\", \"echo\", \"{}\"],\n    );\n    let stdout = String::from_utf8_lossy(&output.stdout);\n\n    for line in stdout.lines() {\n        assert_eq!(2, line.split_whitespace().count());\n    }\n\n    let mut paths: Vec<_> = stdout\n        .lines()\n        .flat_map(|line| line.split_whitespace())\n        .collect();\n    paths.sort_unstable();\n    assert_eq!(\n        &paths,\n        &[\n            \"./a.foo\",\n            \"./one/b.foo\",\n            \"./one/two/C.Foo2\",\n            \"./one/two/c.foo\",\n            \"./one/two/three/d.foo\",\n            \"./one/two/three/directory_foo\"\n        ],\n    );\n}\n\n/// Shell script execution (--exec) with a custom --path-separator\n#[test]\nfn test_exec_with_separator() {\n    let (te, abs_path) = get_test_env_with_abs_path(DEFAULT_DIRS, DEFAULT_FILES);\n    te.assert_output(\n        &[\n            \"--path-separator=#\",\n            \"--absolute-path\",\n            \"foo\",\n            \"--exec\",\n            \"echo\",\n        ],\n        &format!(\n            \"{abs_path}#a.foo\n                {abs_path}#one#b.foo\n                {abs_path}#one#two#C.Foo2\n                {abs_path}#one#two#c.foo\n                {abs_path}#one#two#three#d.foo\n                {abs_path}#one#two#three#directory_foo\",\n            abs_path = abs_path.replace(std::path::MAIN_SEPARATOR, \"#\"),\n        ),\n    );\n\n    te.assert_output(\n        &[\"--path-separator=#\", \"foo\", \"--exec\", \"echo\", \"{}\"],\n        \".#a.foo\n            .#one#b.foo\n            .#one#two#C.Foo2\n            .#one#two#c.foo\n            .#one#two#three#d.foo\n            .#one#two#three#directory_foo\",\n    );\n\n    te.assert_output(\n        &[\"--path-separator=#\", \"foo\", \"--exec\", \"echo\", \"{.}\"],\n        \"a\n            one#b\n            one#two#C\n            one#two#c\n            one#two#three#d\n            one#two#three#directory_foo\",\n    );\n\n    te.assert_output(\n        &[\"--path-separator=#\", \"foo\", \"--exec\", \"echo\", \"{/}\"],\n        \"a.foo\n            b.foo\n            C.Foo2\n            c.foo\n            d.foo\n            directory_foo\",\n    );\n\n    te.assert_output(\n        &[\"--path-separator=#\", \"foo\", \"--exec\", \"echo\", \"{/.}\"],\n        \"a\n            b\n            C\n            c\n            d\n            directory_foo\",\n    );\n\n    te.assert_output(\n        &[\"--path-separator=#\", \"foo\", \"--exec\", \"echo\", \"{//}\"],\n        \".\n            .#one\n            .#one#two\n            .#one#two\n            .#one#two#three\n            .#one#two#three\",\n    );\n\n    te.assert_output(\n        &[\"--path-separator=#\", \"e1\", \"--exec\", \"printf\", \"%s.%s\\n\"],\n        \".#e1 e2.\",\n    );\n}\n\n/// Non-zero exit code (--quiet)\n#[test]\nfn test_quiet() {\n    let dirs = &[];\n    let files = &[\"a.foo\", \"b.foo\"];\n    let te = TestEnv::new(dirs, files);\n\n    te.assert_output(&[\"-q\"], \"\");\n    te.assert_output(&[\"--quiet\"], \"\");\n    te.assert_output(&[\"--has-results\"], \"\");\n    te.assert_failure_with_error(&[\"--quiet\", \"c.foo\"], \"\")\n}\n\n/// Literal search (--fixed-strings)\n#[test]\nfn test_fixed_strings() {\n    let dirs = &[\"test1\", \"test2\"];\n    let files = &[\"test1/a.foo\", \"test1/a_foo\", \"test2/Download (1).tar.gz\"];\n    let te = TestEnv::new(dirs, files);\n\n    // Regex search, dot is treated as \"any character\"\n    te.assert_output(\n        &[\"a.foo\"],\n        \"test1/a.foo\n         test1/a_foo\",\n    );\n\n    // Literal search, dot is treated as character\n    te.assert_output(&[\"--fixed-strings\", \"a.foo\"], \"test1/a.foo\");\n\n    // Regex search, parens are treated as group\n    te.assert_output(&[\"download (1)\"], \"\");\n\n    // Literal search, parens are treated as characters\n    te.assert_output(\n        &[\"--fixed-strings\", \"download (1)\"],\n        \"test2/Download (1).tar.gz\",\n    );\n\n    // Combine with --case-sensitive\n    te.assert_output(&[\"--fixed-strings\", \"--case-sensitive\", \"download (1)\"], \"\");\n}\n\n/// Filenames with invalid UTF-8 sequences\n#[cfg(target_os = \"linux\")]\n#[test]\nfn test_invalid_utf8() {\n    use std::ffi::OsStr;\n    use std::os::unix::ffi::OsStrExt;\n\n    let dirs = &[\"test1\"];\n    let files = &[];\n    let te = TestEnv::new(dirs, files);\n\n    fs::File::create(\n        te.test_root()\n            .join(OsStr::from_bytes(b\"test1/test_\\xFEinvalid.txt\")),\n    )\n    .unwrap();\n\n    te.assert_output(&[\"\", \"test1/\"], \"test1/test_�invalid.txt\");\n\n    te.assert_output(&[\"invalid\", \"test1/\"], \"test1/test_�invalid.txt\");\n\n    // Should not be found under a different extension\n    te.assert_output(&[\"-e\", \"zip\", \"\", \"test1/\"], \"\");\n}\n\n/// Filtering for file size (--size)\n#[test]\nfn test_size() {\n    let te = TestEnv::new(&[], &[]);\n\n    create_file_with_size(te.test_root().join(\"0_bytes.foo\"), 0);\n    create_file_with_size(te.test_root().join(\"11_bytes.foo\"), 11);\n    create_file_with_size(te.test_root().join(\"30_bytes.foo\"), 30);\n    create_file_with_size(te.test_root().join(\"3_kilobytes.foo\"), 3 * 1000);\n    create_file_with_size(te.test_root().join(\"4_kibibytes.foo\"), 4 * 1024);\n\n    // Zero and non-zero sized files.\n    te.assert_output(\n        &[\"\", \"--size\", \"+0B\"],\n        \"0_bytes.foo\n        11_bytes.foo\n        30_bytes.foo\n        3_kilobytes.foo\n        4_kibibytes.foo\",\n    );\n\n    // Zero sized files.\n    te.assert_output(&[\"\", \"--size\", \"-0B\"], \"0_bytes.foo\");\n    te.assert_output(&[\"\", \"--size\", \"0B\"], \"0_bytes.foo\");\n    te.assert_output(&[\"\", \"--size=0B\"], \"0_bytes.foo\");\n    te.assert_output(&[\"\", \"-S\", \"0B\"], \"0_bytes.foo\");\n\n    // Files with 2 bytes or more.\n    te.assert_output(\n        &[\"\", \"--size\", \"+2B\"],\n        \"11_bytes.foo\n        30_bytes.foo\n        3_kilobytes.foo\n        4_kibibytes.foo\",\n    );\n\n    // Files with 2 bytes or less.\n    te.assert_output(&[\"\", \"--size\", \"-2B\"], \"0_bytes.foo\");\n\n    // Files with size between 1 byte and 11 bytes.\n    te.assert_output(&[\"\", \"--size\", \"+1B\", \"--size\", \"-11B\"], \"11_bytes.foo\");\n\n    // Files with size equal 11 bytes.\n    te.assert_output(&[\"\", \"--size\", \"11B\"], \"11_bytes.foo\");\n\n    // Files with size between 1 byte and 30 bytes.\n    te.assert_output(\n        &[\"\", \"--size\", \"+1B\", \"--size\", \"-30B\"],\n        \"11_bytes.foo\n        30_bytes.foo\",\n    );\n\n    // Combine with a search pattern\n    te.assert_output(&[\"^11_\", \"--size\", \"+1B\", \"--size\", \"-30B\"], \"11_bytes.foo\");\n\n    // Files with size between 12 and 30 bytes.\n    te.assert_output(&[\"\", \"--size\", \"+12B\", \"--size\", \"-30B\"], \"30_bytes.foo\");\n\n    // Files with size between 31 and 100 bytes.\n    te.assert_output(&[\"\", \"--size\", \"+31B\", \"--size\", \"-100B\"], \"\");\n\n    // Files with size between 3 kibibytes and 5 kibibytes.\n    te.assert_output(&[\"\", \"--size\", \"+3ki\", \"--size\", \"-5ki\"], \"4_kibibytes.foo\");\n\n    // Files with size between 3 kilobytes and 5 kilobytes.\n    te.assert_output(\n        &[\"\", \"--size\", \"+3k\", \"--size\", \"-5k\"],\n        \"3_kilobytes.foo\n        4_kibibytes.foo\",\n    );\n\n    // Files with size greater than 3 kilobytes and less than 3 kibibytes.\n    te.assert_output(&[\"\", \"--size\", \"+3k\", \"--size\", \"-3ki\"], \"3_kilobytes.foo\");\n\n    // Files with size equal 4 kibibytes.\n    te.assert_output(&[\"\", \"--size\", \"+4ki\", \"--size\", \"-4ki\"], \"4_kibibytes.foo\");\n    te.assert_output(&[\"\", \"--size\", \"4ki\"], \"4_kibibytes.foo\");\n}\n\n#[cfg(test)]\nfn create_file_with_modified<P: AsRef<Path>>(path: P, duration_in_secs: u64) {\n    let st = SystemTime::now() - Duration::from_secs(duration_in_secs);\n    let ft = filetime::FileTime::from_system_time(st);\n    fs::File::create(&path).expect(\"creation failed\");\n    filetime::set_file_times(&path, ft, ft).expect(\"time modification failed\");\n}\n\n#[cfg(test)]\nfn remove_symlink<P: AsRef<Path>>(path: P) {\n    #[cfg(unix)]\n    fs::remove_file(path).expect(\"remove symlink\");\n\n    // On Windows, symlinks remember whether they point to files or directories, so try both\n    #[cfg(windows)]\n    fs::remove_file(path.as_ref())\n        .or_else(|_| fs::remove_dir(path.as_ref()))\n        .expect(\"remove symlink\");\n}\n\n#[test]\nfn test_modified_relative() {\n    let te = TestEnv::new(&[], &[]);\n    remove_symlink(te.test_root().join(\"symlink\"));\n    create_file_with_modified(te.test_root().join(\"foo_0_now\"), 0);\n    create_file_with_modified(te.test_root().join(\"bar_1_min\"), 60);\n    create_file_with_modified(te.test_root().join(\"foo_10_min\"), 600);\n    create_file_with_modified(te.test_root().join(\"bar_1_h\"), 60 * 60);\n    create_file_with_modified(te.test_root().join(\"foo_2_h\"), 2 * 60 * 60);\n    create_file_with_modified(te.test_root().join(\"bar_1_day\"), 24 * 60 * 60);\n\n    te.assert_output(\n        &[\"\", \"--changed-within\", \"15min\"],\n        \"foo_0_now\n        bar_1_min\n        foo_10_min\",\n    );\n\n    te.assert_output(\n        &[\"\", \"--change-older-than\", \"15min\"],\n        \"bar_1_h\n        foo_2_h\n        bar_1_day\",\n    );\n\n    te.assert_output(\n        &[\"foo\", \"--changed-within\", \"12h\"],\n        \"foo_0_now\n        foo_10_min\n        foo_2_h\",\n    );\n}\n\n#[cfg(test)]\nfn change_file_modified<P: AsRef<Path>>(path: P, iso_date: &str) {\n    let st = iso_date\n        .parse::<Timestamp>()\n        .map(SystemTime::from)\n        .expect(\"invalid date\");\n    let ft = filetime::FileTime::from_system_time(st);\n    filetime::set_file_times(path, ft, ft).expect(\"time modification failde\");\n}\n\n#[test]\nfn test_modified_absolute() {\n    let te = TestEnv::new(&[], &[\"15mar2018\", \"30dec2017\"]);\n    remove_symlink(te.test_root().join(\"symlink\"));\n    change_file_modified(te.test_root().join(\"15mar2018\"), \"2018-03-15T12:00:00Z\");\n    change_file_modified(te.test_root().join(\"30dec2017\"), \"2017-12-30T23:59:00Z\");\n\n    te.assert_output(\n        &[\"\", \"--change-newer-than\", \"2018-01-01 00:00:00\"],\n        \"15mar2018\",\n    );\n    te.assert_output(\n        &[\"\", \"--changed-before\", \"2018-01-01 00:00:00\"],\n        \"30dec2017\",\n    );\n}\n\n#[cfg(unix)]\n#[test]\nfn test_owner_ignore_all() {\n    let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES);\n    te.assert_output(&[\"--owner\", \":\", \"a.foo\"], \"a.foo\");\n    te.assert_output(&[\"--owner\", \"\", \"a.foo\"], \"a.foo\");\n}\n\n#[cfg(unix)]\n#[test]\nfn test_owner_current_user() {\n    let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES);\n    let uid = Uid::current();\n    te.assert_output(&[\"--owner\", &uid.to_string(), \"a.foo\"], \"a.foo\");\n    if let Ok(Some(user)) = User::from_uid(uid) {\n        te.assert_output(&[\"--owner\", &user.name, \"a.foo\"], \"a.foo\");\n    }\n}\n\n#[cfg(unix)]\n#[test]\nfn test_owner_current_group() {\n    let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES);\n    let gid = Gid::current();\n    te.assert_output(&[\"--owner\", &format!(\":{gid}\"), \"a.foo\"], \"a.foo\");\n    if let Ok(Some(group)) = Group::from_gid(gid) {\n        te.assert_output(&[\"--owner\", &format!(\":{}\", group.name), \"a.foo\"], \"a.foo\");\n    }\n}\n\n#[cfg(target_os = \"linux\")]\n#[test]\nfn test_owner_root() {\n    // This test assumes the current user isn't root\n    if Uid::current().is_root() || Gid::current() == Gid::from_raw(0) {\n        return;\n    }\n    let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES);\n    te.assert_output(&[\"--owner\", \"root\", \"a.foo\"], \"\");\n    te.assert_output(&[\"--owner\", \"0\", \"a.foo\"], \"\");\n    te.assert_output(&[\"--owner\", \":root\", \"a.foo\"], \"\");\n    te.assert_output(&[\"--owner\", \":0\", \"a.foo\"], \"\");\n}\n\n#[test]\nfn test_custom_path_separator() {\n    let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES);\n\n    te.assert_output(\n        &[\"foo\", \"one\", \"--path-separator\", \"=\"],\n        \"one=b.foo\n        one=two=c.foo\n        one=two=C.Foo2\n        one=two=three=d.foo\n        one=two=three=directory_foo=\",\n    );\n}\n\n#[test]\nfn test_base_directory() {\n    let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES);\n\n    te.assert_output(\n        &[\"--base-directory\", \"one\"],\n        \"b.foo\n        two/\n        two/c.foo\n        two/C.Foo2\n        two/three/\n        two/three/d.foo\n        two/three/directory_foo/\",\n    );\n\n    te.assert_output(\n        &[\"--base-directory\", \"one/two/\", \"foo\"],\n        \"c.foo\n        C.Foo2\n        three/d.foo\n        three/directory_foo/\",\n    );\n\n    // Explicit root path\n    te.assert_output(\n        &[\"--base-directory\", \"one\", \"foo\", \"two\"],\n        \"two/c.foo\n        two/C.Foo2\n        two/three/d.foo\n        two/three/directory_foo/\",\n    );\n\n    // Ignore base directory when absolute path is used\n    let (te, abs_path) = get_test_env_with_abs_path(DEFAULT_DIRS, DEFAULT_FILES);\n    let abs_base_dir = &format!(\"{abs_path}/one/two/\", abs_path = &abs_path);\n    te.assert_output(\n        &[\"--base-directory\", abs_base_dir, \"foo\", &abs_path],\n        &format!(\n            \"{abs_path}/a.foo\n            {abs_path}/one/b.foo\n            {abs_path}/one/two/c.foo\n            {abs_path}/one/two/C.Foo2\n            {abs_path}/one/two/three/d.foo\n            {abs_path}/one/two/three/directory_foo/\",\n            abs_path = &abs_path\n        ),\n    );\n}\n\n#[test]\nfn test_max_results() {\n    let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES);\n\n    // Unrestricted\n    te.assert_output(\n        &[\"--max-results=0\", \"c.foo\"],\n        \"one/two/C.Foo2\n         one/two/c.foo\",\n    );\n\n    // Limited to two results\n    te.assert_output(\n        &[\"--max-results=2\", \"c.foo\"],\n        \"one/two/C.Foo2\n         one/two/c.foo\",\n    );\n\n    // Limited to one result. We could find either C.Foo2 or c.foo\n    let assert_just_one_result_with_option = |option| {\n        let output = te.assert_success_and_get_output(\".\", &[option, \"c.foo\"]);\n        let stdout = String::from_utf8_lossy(&output.stdout)\n            .trim()\n            .replace(&std::path::MAIN_SEPARATOR.to_string(), \"/\");\n        assert!(stdout == \"one/two/C.Foo2\" || stdout == \"one/two/c.foo\");\n    };\n    assert_just_one_result_with_option(\"--max-results=1\");\n    assert_just_one_result_with_option(\"-1\");\n\n    // check that --max-results & -1 conflict with --exec\n    te.assert_failure(&[\"thing\", \"--max-results=0\", \"--exec=cat\"]);\n    te.assert_failure(&[\"thing\", \"-1\", \"--exec=cat\"]);\n    te.assert_failure(&[\"thing\", \"--max-results=1\", \"-1\", \"--exec=cat\"]);\n}\n\n/// Filenames with non-utf8 paths are passed to the executed program unchanged\n///\n/// Note:\n/// - the test is disabled on Darwin/OSX, since it coerces file names to UTF-8,\n///   even when the requested file name is not valid UTF-8.\n/// - the test is currently disabled on Windows because I'm not sure how to create\n///   invalid UTF-8 files on Windows\n#[cfg(all(unix, not(target_os = \"macos\")))]\n#[test]\nfn test_exec_invalid_utf8() {\n    use std::ffi::OsStr;\n    use std::os::unix::ffi::OsStrExt;\n\n    let dirs = &[\"test1\"];\n    let files = &[];\n    let te = TestEnv::new(dirs, files);\n\n    fs::File::create(\n        te.test_root()\n            .join(OsStr::from_bytes(b\"test1/test_\\xFEinvalid.txt\")),\n    )\n    .unwrap();\n\n    te.assert_output_raw(\n        &[\"\", \"test1/\", \"--exec\", \"echo\", \"{}\"],\n        b\"test1/test_\\xFEinvalid.txt\\n\",\n    );\n\n    te.assert_output_raw(\n        &[\"\", \"test1/\", \"--exec\", \"echo\", \"{/}\"],\n        b\"test_\\xFEinvalid.txt\\n\",\n    );\n\n    te.assert_output_raw(&[\"\", \"test1/\", \"--exec\", \"echo\", \"{//}\"], b\"test1\\n\");\n\n    te.assert_output_raw(\n        &[\"\", \"test1/\", \"--exec\", \"echo\", \"{.}\"],\n        b\"test1/test_\\xFEinvalid\\n\",\n    );\n\n    te.assert_output_raw(\n        &[\"\", \"test1/\", \"--exec\", \"echo\", \"{/.}\"],\n        b\"test_\\xFEinvalid\\n\",\n    );\n}\n\n#[test]\nfn test_list_details() {\n    let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES);\n\n    // Make sure we can execute 'fd --list-details' without any errors.\n    te.assert_success_and_get_output(\".\", &[\"--list-details\"]);\n}\n\n#[test]\nfn test_single_and_multithreaded_execution() {\n    let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES);\n\n    te.assert_output(&[\"--threads=1\", \"a.foo\"], \"a.foo\");\n    te.assert_output(&[\"--threads=16\", \"a.foo\"], \"a.foo\");\n}\n\n/// Make sure that fd fails if numeric arguments can not be parsed\n#[test]\nfn test_number_parsing_errors() {\n    let te = TestEnv::new(&[], &[]);\n\n    te.assert_failure(&[\"--threads=a\"]);\n    te.assert_failure(&[\"-j\", \"\"]);\n    te.assert_failure(&[\"--threads=0\"]);\n\n    te.assert_failure(&[\"--min-depth=a\"]);\n    te.assert_failure(&[\"--mindepth=a\"]);\n    te.assert_failure(&[\"--max-depth=a\"]);\n    te.assert_failure(&[\"--maxdepth=a\"]);\n    te.assert_failure(&[\"--exact-depth=a\"]);\n\n    te.assert_failure(&[\"--max-buffer-time=a\"]);\n\n    te.assert_failure(&[\"--max-results=a\"]);\n}\n\n#[test_case(\"--hidden\", &[\"--no-hidden\"] ; \"hidden\")]\n#[test_case(\"--no-ignore\", &[\"--ignore\"] ; \"no-ignore\")]\n#[test_case(\"--no-ignore-vcs\", &[\"--ignore-vcs\"] ; \"no-ignore-vcs\")]\n#[test_case(\"--no-require-git\", &[\"--require-git\"] ; \"no-require-git\")]\n#[test_case(\"--follow\", &[\"--no-follow\"] ; \"follow\")]\n#[test_case(\"--absolute-path\", &[\"--relative-path\"] ; \"absolute-path\")]\n#[test_case(\"-u\", &[\"--ignore\", \"--no-hidden\"] ; \"u\")]\n#[test_case(\"-uu\", &[\"--ignore\", \"--no-hidden\"] ; \"uu\")]\nfn test_opposing(flag: &str, opposing_flags: &[&str]) {\n    let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES);\n\n    let mut flags = vec![flag];\n    flags.extend_from_slice(opposing_flags);\n    let out_no_flags = te.assert_success_and_get_normalized_output(\".\", &[]);\n    let out_opposing_flags = te.assert_success_and_get_normalized_output(\".\", &flags);\n\n    assert_eq!(\n        out_no_flags,\n        out_opposing_flags,\n        \"{} should override {}\",\n        opposing_flags.join(\" \"),\n        flag\n    );\n}\n\n/// Print error if search pattern starts with a dot and --hidden is not set\n/// (Unix only, hidden files on Windows work differently)\n#[test]\n#[cfg(unix)]\nfn test_error_if_hidden_not_set_and_pattern_starts_with_dot() {\n    let te = TestEnv::new(&[], &[\".gitignore\", \".whatever\", \"non-hidden\"]);\n\n    te.assert_failure(&[\"^\\\\.gitignore\"]);\n    te.assert_failure(&[\"--glob\", \".gitignore\"]);\n\n    te.assert_output(&[\"--hidden\", \"^\\\\.gitignore\"], \".gitignore\");\n    te.assert_output(&[\"--hidden\", \"--glob\", \".gitignore\"], \".gitignore\");\n    te.assert_output(&[\".gitignore\"], \"\");\n}\n\n#[test]\nfn test_strip_cwd_prefix() {\n    let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES);\n\n    te.assert_output(\n        &[\"--strip-cwd-prefix\", \".\"],\n        \"a.foo\n        e1 e2\n        one/\n        one/b.foo\n        one/two/\n        one/two/c.foo\n        one/two/C.Foo2\n        one/two/three/\n        one/two/three/d.foo\n        one/two/three/directory_foo/\n        symlink\",\n    );\n}\n\n/// When fd is ran from a non-existent working directory, but an existent\n/// directory is passed in the arguments, it should still run fine\n#[test]\n#[cfg(all(not(windows), not(target_os = \"illumos\")))]\nfn test_invalid_cwd() {\n    let te = TestEnv::new(&[], &[]);\n\n    let root = te.test_root().join(\"foo\");\n    fs::create_dir(&root).unwrap();\n    std::env::set_current_dir(&root).unwrap();\n    fs::remove_dir(&root).unwrap();\n\n    let output = std::process::Command::new(te.test_exe())\n        .arg(\"query\")\n        .arg(te.test_root())\n        .output()\n        .unwrap();\n\n    if !output.status.success() {\n        panic!(\"{output:?}\");\n    }\n}\n\n/// Test behavior of .git directory with various flags\n#[test]\nfn test_git_dir() {\n    let te = TestEnv::new(\n        &[\".git/one\", \"other_dir/.git\", \"nested/dir/.git\"],\n        &[\n            \".git/one/foo.a\",\n            \".git/.foo\",\n            \".git/a.foo\",\n            \"other_dir/.git/foo1\",\n            \"nested/dir/.git/foo2\",\n        ],\n    );\n\n    te.assert_output(\n        &[\"--hidden\", \"foo\"],\n        \".git/one/foo.a\n        .git/.foo\n        .git/a.foo\n        other_dir/.git/foo1\n        nested/dir/.git/foo2\",\n    );\n    te.assert_output(&[\"--no-ignore\", \"foo\"], \"\");\n    te.assert_output(\n        &[\"--hidden\", \"--no-ignore\", \"foo\"],\n        \".git/one/foo.a\n         .git/.foo\n         .git/a.foo\n         other_dir/.git/foo1\n         nested/dir/.git/foo2\",\n    );\n    te.assert_output(\n        &[\"--hidden\", \"--no-ignore-vcs\", \"foo\"],\n        \".git/one/foo.a\n         .git/.foo\n         .git/a.foo\n         other_dir/.git/foo1\n         nested/dir/.git/foo2\",\n    );\n}\n\n#[test]\nfn test_gitignore_parent() {\n    let te = TestEnv::new(&[\"sub\"], &[\".abc\", \"sub/.abc\"]);\n\n    fs::File::create(te.test_root().join(\".gitignore\"))\n        .unwrap()\n        .write_all(b\".abc\\n\")\n        .unwrap();\n\n    te.assert_output_subdirectory(\"sub\", &[\"--hidden\"], \"\");\n    te.assert_output_subdirectory(\"sub\", &[\"--hidden\", \"--search-path\", \".\"], \"\");\n}\n\n#[test]\nfn test_hyperlink() {\n    let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES);\n\n    #[cfg(unix)]\n    let hostname = nix::unistd::gethostname().unwrap().into_string().unwrap();\n    #[cfg(not(unix))]\n    let hostname = \"/\";\n\n    let expected = format!(\n        \"\\x1b]8;;file://{}{}/a.foo\\x1b\\\\a.foo\\x1b]8;;\\x1b\\\\\",\n        hostname,\n        get_absolute_root_path(&te),\n    );\n\n    te.assert_output(&[\"--hyperlink=always\", \"a.foo\"], &expected);\n}\n\n#[test]\nfn test_ignore_contain() {\n    let te = TestEnv::new(\n        &[\"include\", \"exclude\", \"exclude/sub\", \"other\"],\n        &[\n            \"top\",\n            \"include/foo\",\n            \"exclude/CACHEDIR.TAG\",\n            \"exclude/sub/nope\",\n            \"other/ignoremyparent\",\n        ],\n    );\n    let expected = \"include/\n    include/foo\n    symlink\n    top\";\n    te.assert_output(\n        &[\n            \"--ignore-contain=CACHEDIR.TAG\",\n            \"--ignore-contain=ignoremyparent\",\n            \".\",\n        ],\n        expected,\n    );\n}\n\n#[test]\nfn test_ignore_contain_precedence_over_depth_check() {\n    let te = TestEnv::new(\n        &[\"include\", \"exclude\", \"exclude/sub\"],\n        &[\n            \"top\",\n            \"include/foo\",\n            \"exclude/CACHEDIR.TAG\",\n            \"exclude/sub/nope\",\n        ],\n    );\n    let expected = \"include/foo\";\n    te.assert_output(\n        &[\"--ignore-contain=CACHEDIR.TAG\", \"--min-depth=2\", \".\"],\n        expected,\n    );\n}\n\n#[test]\nfn test_ignore_contain_precedence_over_root_check() {\n    let te = TestEnv::new(&[\"include\"], &[\"CACHEDIR.TAG\", \"top\", \"include/foo\"]);\n    let expected = \"\";\n    te.assert_output(&[\"--ignore-contain=CACHEDIR.TAG\", \".\"], expected);\n}\n"
  }
]