Compare commits

..

No commits in common. "master" and "v8.6.0" have entirely different histories.

37 changed files with 1461 additions and 2327 deletions

View File

@ -4,7 +4,3 @@ updates:
directory: "/" directory: "/"
schedule: schedule:
interval: "monthly" interval: "monthly"
- package-ecosystem: "github-actions"
directory: "/"
schedule:
interval: "daily"

View File

@ -1,8 +1,8 @@
name: CICD name: CICD
env: env:
MIN_SUPPORTED_RUST_VERSION: "1.60.0"
CICD_INTERMEDIATES_DIR: "_cicd-intermediates" CICD_INTERMEDIATES_DIR: "_cicd-intermediates"
MSRV_FEATURES: "--all-features"
on: on:
workflow_dispatch: workflow_dispatch:
@ -14,90 +14,68 @@ on:
- '*' - '*'
jobs: jobs:
crate_metadata: code_quality:
name: Extract crate metadata name: Code quality
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Extract crate information
id: crate_metadata
run: |
echo "name=fd" | tee -a $GITHUB_OUTPUT
cargo metadata --no-deps --format-version 1 | jq -r '"version=" + .packages[0].version' | tee -a $GITHUB_OUTPUT
cargo metadata --no-deps --format-version 1 | jq -r '"maintainer=" + .packages[0].authors[0]' | tee -a $GITHUB_OUTPUT
cargo metadata --no-deps --format-version 1 | jq -r '"homepage=" + .packages[0].homepage' | tee -a $GITHUB_OUTPUT
cargo metadata --no-deps --format-version 1 | jq -r '"msrv=" + .packages[0].rust_version' | tee -a $GITHUB_OUTPUT
outputs:
name: ${{ steps.crate_metadata.outputs.name }}
version: ${{ steps.crate_metadata.outputs.version }}
maintainer: ${{ steps.crate_metadata.outputs.maintainer }}
homepage: ${{ steps.crate_metadata.outputs.homepage }}
msrv: ${{ steps.crate_metadata.outputs.msrv }}
ensure_cargo_fmt:
name: Ensure 'cargo fmt' has been run
runs-on: ubuntu-20.04 runs-on: ubuntu-20.04
steps: steps:
- uses: dtolnay/rust-toolchain@stable - name: Checkout source code
with: uses: actions/checkout@v3
components: rustfmt - name: Install rust toolchain
- uses: actions/checkout@v4 run: |
- run: cargo fmt -- --check rm -f "${HOME}/.cargo/bin/"{rustfmt,cargo-fmt}
rustup set profile minimal
lint_check: rustup toolchain install stable -c "clippy,rustfmt"
name: Ensure 'cargo clippy' has no warnings rustup default stable
runs-on: ubuntu-latest - name: Rust cache
steps: uses: Swatinem/rust-cache@v2
- uses: dtolnay/rust-toolchain@stable - name: Ensure `cargo fmt` has been run
with: run: cargo fmt --check
components: clippy - name: Ensure MSRV is set in `clippy.toml`
- uses: actions/checkout@v4 run: grep "^msrv = \"${{ env.MIN_SUPPORTED_RUST_VERSION }}\"\$" clippy.toml
- run: cargo clippy --all-targets --all-features -- -Dwarnings - name: Run clippy
run: cargo clippy --locked --all-targets --all-features
min_version: min_version:
name: Minimum supported rust version name: Minimum supported rust version
runs-on: ubuntu-20.04 runs-on: ubuntu-20.04
needs: crate_metadata
steps: steps:
- name: Checkout source code - name: Checkout source code
uses: actions/checkout@v4 uses: actions/checkout@v3
- name: Install rust toolchain (v${{ needs.crate_metadata.outputs.msrv }}) - name: Install rust toolchain (v${{ env.MIN_SUPPORTED_RUST_VERSION }})
uses: dtolnay/rust-toolchain@master run: |
with: rustup set profile minimal
toolchain: ${{ needs.crate_metadata.outputs.msrv }} rustup toolchain install ${{ env.MIN_SUPPORTED_RUST_VERSION }} -c clippy
components: clippy rustup default ${{ env.MIN_SUPPORTED_RUST_VERSION }}
- name: Rust cache
uses: Swatinem/rust-cache@v2
- name: Run clippy (on minimum supported rust version to prevent warnings we can't fix) - name: Run clippy (on minimum supported rust version to prevent warnings we can't fix)
run: cargo clippy --locked --all-targets ${{ env.MSRV_FEATURES }} run: cargo clippy --locked --all-targets --all-features
- name: Run tests - name: Run tests
run: cargo test --locked ${{ env.MSRV_FEATURES }} run: cargo test --locked
build: build:
name: ${{ matrix.job.target }} (${{ matrix.job.os }}) name: ${{ matrix.job.os }} (${{ matrix.job.target }})
runs-on: ${{ matrix.job.os }} runs-on: ${{ matrix.job.os }}
needs: crate_metadata
strategy: strategy:
fail-fast: false fail-fast: false
matrix: matrix:
job: job:
- { target: aarch64-unknown-linux-gnu , os: ubuntu-22.04, use-cross: true } - { os: ubuntu-20.04, target: arm-unknown-linux-gnueabihf , use-cross: true }
- { target: aarch64-unknown-linux-musl , os: ubuntu-22.04, use-cross: true } - { os: ubuntu-20.04, target: arm-unknown-linux-musleabihf, use-cross: true }
- { target: arm-unknown-linux-gnueabihf , os: ubuntu-22.04, use-cross: true } - { os: ubuntu-20.04, target: aarch64-unknown-linux-gnu , use-cross: true }
- { target: arm-unknown-linux-musleabihf, os: ubuntu-22.04, use-cross: true } - { os: ubuntu-20.04, target: i686-unknown-linux-gnu , use-cross: true }
- { target: i686-pc-windows-msvc , os: windows-2022 } - { os: ubuntu-20.04, target: i686-unknown-linux-musl , use-cross: true }
- { target: i686-unknown-linux-gnu , os: ubuntu-22.04, use-cross: true } - { os: ubuntu-20.04, target: x86_64-unknown-linux-gnu , use-cross: true }
- { target: i686-unknown-linux-musl , os: ubuntu-22.04, use-cross: true } - { os: ubuntu-20.04, target: x86_64-unknown-linux-musl , use-cross: true }
- { target: x86_64-apple-darwin , os: macos-12 } - { os: macos-12 , target: x86_64-apple-darwin }
- { target: aarch64-apple-darwin , os: macos-14 } # - { os: windows-2019, target: i686-pc-windows-gnu } ## disabled; error: linker `i686-w64-mingw32-gcc` not found
- { target: x86_64-pc-windows-gnu , os: windows-2022 } - { os: windows-2019, target: i686-pc-windows-msvc }
- { target: x86_64-pc-windows-msvc , os: windows-2022 } - { os: windows-2019, target: x86_64-pc-windows-gnu }
- { target: x86_64-unknown-linux-gnu , os: ubuntu-22.04, use-cross: true } - { os: windows-2019, target: x86_64-pc-windows-msvc }
- { target: x86_64-unknown-linux-musl , os: ubuntu-22.04, use-cross: true }
env:
BUILD_CMD: cargo
steps: steps:
- name: Checkout source code - name: Checkout source code
uses: actions/checkout@v4 uses: actions/checkout@v3
- name: Install prerequisites - name: Install prerequisites
shell: bash shell: bash
@ -107,24 +85,20 @@ jobs:
aarch64-unknown-linux-gnu) sudo apt-get -y update ; sudo apt-get -y install gcc-aarch64-linux-gnu ;; aarch64-unknown-linux-gnu) sudo apt-get -y update ; sudo apt-get -y install gcc-aarch64-linux-gnu ;;
esac esac
- name: Install Rust toolchain - name: Extract crate information
uses: dtolnay/rust-toolchain@stable
with:
targets: ${{ matrix.job.target }}
# On windows, for now build with 1.77.2, so that it works on windows 7.
# When we update the MSRV again, we'll need to revisit this, and probably drop support for Win7
toolchain: "${{ contains(matrix.job.target, 'windows-') && '1.77.2' || 'stable' }}"
- name: Install cross
if: matrix.job.use-cross
uses: taiki-e/install-action@v2
with:
tool: cross
- name: Overwrite build command env variable
if: matrix.job.use-cross
shell: bash shell: bash
run: echo "BUILD_CMD=cross" >> $GITHUB_ENV run: |
echo "PROJECT_NAME=fd" >> $GITHUB_ENV
echo "PROJECT_VERSION=$(sed -n 's/^version = "\(.*\)"/\1/p' Cargo.toml | head -n1)" >> $GITHUB_ENV
echo "PROJECT_MAINTAINER=$(sed -n 's/^authors = \["\(.*\)"\]/\1/p' Cargo.toml)" >> $GITHUB_ENV
echo "PROJECT_HOMEPAGE=$(sed -n 's/^homepage = "\(.*\)"/\1/p' Cargo.toml)" >> $GITHUB_ENV
- name: Install Rust toolchain
run: |
rustup set profile minimal
rustup toolchain install stable
rustup override set stable
rustup target add ${{ matrix.job.target }}
- name: Show version information (Rust, cargo, GCC) - name: Show version information (Rust, cargo, GCC)
shell: bash shell: bash
@ -136,12 +110,29 @@ jobs:
cargo -V cargo -V
rustc -V rustc -V
- name: Build - name: Set cargo cmd
shell: bash shell: bash
run: $BUILD_CMD build --locked --release --target=${{ matrix.job.target }} run: echo "CARGO_CMD=cargo" >> $GITHUB_ENV
- name: Set binary name & path - name: Set cargo cmd to cross
id: bin shell: bash
if: ${{ matrix.job.use-cross == true }}
run: echo "CARGO_CMD=cross" >> $GITHUB_ENV
- name: Rust cache
uses: Swatinem/rust-cache@v2
with:
key: ${{ matrix.job.os }}-${{ matrix.job.target }}
- name: Install cross
if: ${{ matrix.job.use-cross == true }}
run: cargo install cross
- name: Build
run: ${{ env.CARGO_CMD }} build --locked --release --target=${{ matrix.job.target }}
- name: Strip debug information from executable
id: strip
shell: bash shell: bash
run: | run: |
# Figure out suffix of binary # Figure out suffix of binary
@ -150,11 +141,29 @@ jobs:
*-pc-windows-*) EXE_suffix=".exe" ;; *-pc-windows-*) EXE_suffix=".exe" ;;
esac; esac;
# Setup paths # Figure out what strip tool to use if any
BIN_NAME="${{ needs.crate_metadata.outputs.name }}${EXE_suffix}" STRIP="strip"
BIN_PATH="target/${{ matrix.job.target }}/release/${BIN_NAME}" case ${{ matrix.job.target }} in
arm-unknown-linux-*) STRIP="arm-linux-gnueabihf-strip" ;;
aarch64-unknown-linux-gnu) STRIP="aarch64-linux-gnu-strip" ;;
*-pc-windows-msvc) STRIP="" ;;
esac;
# Let subsequent steps know where to find the binary # Setup paths
BIN_DIR="${{ env.CICD_INTERMEDIATES_DIR }}/stripped-release-bin/"
mkdir -p "${BIN_DIR}"
BIN_NAME="${{ env.PROJECT_NAME }}${EXE_suffix}"
BIN_PATH="${BIN_DIR}/${BIN_NAME}"
# Copy the release build binary to the result location
cp "target/${{ matrix.job.target }}/release/${BIN_NAME}" "${BIN_DIR}"
# Also strip if possible
if [ -n "${STRIP}" ]; then
"${STRIP}" "${BIN_PATH}"
fi
# Let subsequent steps know where to find the (stripped) bin
echo "BIN_PATH=${BIN_PATH}" >> $GITHUB_OUTPUT echo "BIN_PATH=${BIN_PATH}" >> $GITHUB_OUTPUT
echo "BIN_NAME=${BIN_NAME}" >> $GITHUB_OUTPUT echo "BIN_NAME=${BIN_NAME}" >> $GITHUB_OUTPUT
@ -164,12 +173,11 @@ jobs:
run: | run: |
# test only library unit tests and binary for arm-type targets # test only library unit tests and binary for arm-type targets
unset CARGO_TEST_OPTIONS unset CARGO_TEST_OPTIONS
unset CARGO_TEST_OPTIONS ; case ${{ matrix.job.target }} in arm-* | aarch64-*) CARGO_TEST_OPTIONS="--bin ${{ needs.crate_metadata.outputs.name }}" ;; esac; unset CARGO_TEST_OPTIONS ; case ${{ matrix.job.target }} in arm-* | aarch64-*) CARGO_TEST_OPTIONS="--bin ${PROJECT_NAME}" ;; esac;
echo "CARGO_TEST_OPTIONS=${CARGO_TEST_OPTIONS}" >> $GITHUB_OUTPUT echo "CARGO_TEST_OPTIONS=${CARGO_TEST_OPTIONS}" >> $GITHUB_OUTPUT
- name: Run tests - name: Run tests
shell: bash run: ${{ env.CARGO_CMD }} test --locked --target=${{ matrix.job.target }} ${{ steps.test-options.outputs.CARGO_TEST_OPTIONS}}
run: $BUILD_CMD test --locked --target=${{ matrix.job.target }} ${{ steps.test-options.outputs.CARGO_TEST_OPTIONS}}
- name: Generate completions - name: Generate completions
id: completions id: completions
@ -181,7 +189,7 @@ jobs:
shell: bash shell: bash
run: | run: |
PKG_suffix=".tar.gz" ; case ${{ matrix.job.target }} in *-pc-windows-*) PKG_suffix=".zip" ;; esac; PKG_suffix=".tar.gz" ; case ${{ matrix.job.target }} in *-pc-windows-*) PKG_suffix=".zip" ;; esac;
PKG_BASENAME=${{ needs.crate_metadata.outputs.name }}-v${{ needs.crate_metadata.outputs.version }}-${{ matrix.job.target }} PKG_BASENAME=${PROJECT_NAME}-v${PROJECT_VERSION}-${{ matrix.job.target }}
PKG_NAME=${PKG_BASENAME}${PKG_suffix} PKG_NAME=${PKG_BASENAME}${PKG_suffix}
echo "PKG_NAME=${PKG_NAME}" >> $GITHUB_OUTPUT echo "PKG_NAME=${PKG_NAME}" >> $GITHUB_OUTPUT
@ -190,14 +198,14 @@ jobs:
mkdir -p "${ARCHIVE_DIR}" mkdir -p "${ARCHIVE_DIR}"
# Binary # Binary
cp "${{ steps.bin.outputs.BIN_PATH }}" "$ARCHIVE_DIR" cp "${{ steps.strip.outputs.BIN_PATH }}" "$ARCHIVE_DIR"
# Man page
cp 'doc/${{ env.PROJECT_NAME }}.1' "$ARCHIVE_DIR"
# README, LICENSE and CHANGELOG files # README, LICENSE and CHANGELOG files
cp "README.md" "LICENSE-MIT" "LICENSE-APACHE" "CHANGELOG.md" "$ARCHIVE_DIR" cp "README.md" "LICENSE-MIT" "LICENSE-APACHE" "CHANGELOG.md" "$ARCHIVE_DIR"
# Man page
cp 'doc/${{ needs.crate_metadata.outputs.name }}.1' "$ARCHIVE_DIR"
# Autocompletion files # Autocompletion files
cp -r autocomplete "${ARCHIVE_DIR}" cp -r autocomplete "${ARCHIVE_DIR}"
@ -222,10 +230,10 @@ jobs:
DPKG_DIR="${DPKG_STAGING}/dpkg" DPKG_DIR="${DPKG_STAGING}/dpkg"
mkdir -p "${DPKG_DIR}" mkdir -p "${DPKG_DIR}"
DPKG_BASENAME=${{ needs.crate_metadata.outputs.name }} DPKG_BASENAME=${PROJECT_NAME}
DPKG_CONFLICTS=${{ needs.crate_metadata.outputs.name }}-musl DPKG_CONFLICTS=${PROJECT_NAME}-musl
case ${{ matrix.job.target }} in *-musl*) DPKG_BASENAME=${{ needs.crate_metadata.outputs.name }}-musl ; DPKG_CONFLICTS=${{ needs.crate_metadata.outputs.name }} ;; esac; case ${{ matrix.job.target }} in *-musl) DPKG_BASENAME=${PROJECT_NAME}-musl ; DPKG_CONFLICTS=${PROJECT_NAME} ;; esac;
DPKG_VERSION=${{ needs.crate_metadata.outputs.version }} DPKG_VERSION=${PROJECT_VERSION}
unset DPKG_ARCH unset DPKG_ARCH
case ${{ matrix.job.target }} in case ${{ matrix.job.target }} in
@ -240,16 +248,16 @@ jobs:
echo "DPKG_NAME=${DPKG_NAME}" >> $GITHUB_OUTPUT echo "DPKG_NAME=${DPKG_NAME}" >> $GITHUB_OUTPUT
# Binary # Binary
install -Dm755 "${{ steps.bin.outputs.BIN_PATH }}" "${DPKG_DIR}/usr/bin/${{ steps.bin.outputs.BIN_NAME }}" install -Dm755 "${{ steps.strip.outputs.BIN_PATH }}" "${DPKG_DIR}/usr/bin/${{ steps.strip.outputs.BIN_NAME }}"
# Man page # Man page
install -Dm644 'doc/${{ needs.crate_metadata.outputs.name }}.1' "${DPKG_DIR}/usr/share/man/man1/${{ needs.crate_metadata.outputs.name }}.1" install -Dm644 'doc/${{ env.PROJECT_NAME }}.1' "${DPKG_DIR}/usr/share/man/man1/${{ env.PROJECT_NAME }}.1"
gzip -n --best "${DPKG_DIR}/usr/share/man/man1/${{ needs.crate_metadata.outputs.name }}.1" gzip -n --best "${DPKG_DIR}/usr/share/man/man1/${{ env.PROJECT_NAME }}.1"
# Autocompletion files # Autocompletion files
install -Dm644 'autocomplete/fd.bash' "${DPKG_DIR}/usr/share/bash-completion/completions/${{ needs.crate_metadata.outputs.name }}" install -Dm644 'autocomplete/fd.bash' "${DPKG_DIR}/usr/share/bash-completion/completions/${{ env.PROJECT_NAME }}"
install -Dm644 'autocomplete/fd.fish' "${DPKG_DIR}/usr/share/fish/vendor_completions.d/${{ needs.crate_metadata.outputs.name }}.fish" install -Dm644 'autocomplete/fd.fish' "${DPKG_DIR}/usr/share/fish/vendor_completions.d/${{ env.PROJECT_NAME }}.fish"
install -Dm644 'autocomplete/_fd' "${DPKG_DIR}/usr/share/zsh/vendor-completions/_${{ needs.crate_metadata.outputs.name }}" install -Dm644 'autocomplete/_fd' "${DPKG_DIR}/usr/share/zsh/vendor-completions/_${{ env.PROJECT_NAME }}"
# README and LICENSE # README and LICENSE
install -Dm644 "README.md" "${DPKG_DIR}/usr/share/doc/${DPKG_BASENAME}/README.md" install -Dm644 "README.md" "${DPKG_DIR}/usr/share/doc/${DPKG_BASENAME}/README.md"
@ -260,12 +268,12 @@ jobs:
cat > "${DPKG_DIR}/usr/share/doc/${DPKG_BASENAME}/copyright" <<EOF cat > "${DPKG_DIR}/usr/share/doc/${DPKG_BASENAME}/copyright" <<EOF
Format: http://www.debian.org/doc/packaging-manuals/copyright-format/1.0/ Format: http://www.debian.org/doc/packaging-manuals/copyright-format/1.0/
Upstream-Name: ${{ needs.crate_metadata.outputs.name }} Upstream-Name: ${{ env.PROJECT_NAME }}
Source: ${{ needs.crate_metadata.outputs.homepage }} Source: ${{ env.PROJECT_HOMEPAGE }}
Files: * Files: *
Copyright: ${{ needs.crate_metadata.outputs.maintainer }} Copyright: ${{ env.PROJECT_MAINTAINER }}
Copyright: $COPYRIGHT_YEARS ${{ needs.crate_metadata.outputs.maintainer }} Copyright: $COPYRIGHT_YEARS ${{ env.PROJECT_MAINTAINER }}
License: Apache-2.0 or MIT License: Apache-2.0 or MIT
License: Apache-2.0 License: Apache-2.0
@ -306,10 +314,10 @@ jobs:
Version: ${DPKG_VERSION} Version: ${DPKG_VERSION}
Section: utils Section: utils
Priority: optional Priority: optional
Maintainer: ${{ needs.crate_metadata.outputs.maintainer }} Maintainer: ${{ env.PROJECT_MAINTAINER }}
Homepage: ${{ needs.crate_metadata.outputs.homepage }} Homepage: ${{ env.PROJECT_HOMEPAGE }}
Architecture: ${DPKG_ARCH} Architecture: ${DPKG_ARCH}
Provides: ${{ needs.crate_metadata.outputs.name }} Provides: ${{ env.PROJECT_NAME }}
Conflicts: ${DPKG_CONFLICTS} Conflicts: ${DPKG_CONFLICTS}
Description: simple, fast and user-friendly alternative to find Description: simple, fast and user-friendly alternative to find
fd is a program to find entries in your filesystem. fd is a program to find entries in your filesystem.
@ -345,7 +353,7 @@ jobs:
echo "IS_RELEASE=${IS_RELEASE}" >> $GITHUB_OUTPUT echo "IS_RELEASE=${IS_RELEASE}" >> $GITHUB_OUTPUT
- name: Publish archives and packages - name: Publish archives and packages
uses: softprops/action-gh-release@v2 uses: softprops/action-gh-release@v1
if: steps.is-release.outputs.IS_RELEASE if: steps.is-release.outputs.IS_RELEASE
with: with:
files: | files: |
@ -353,15 +361,3 @@ jobs:
${{ steps.debian-package.outputs.DPKG_PATH }} ${{ steps.debian-package.outputs.DPKG_PATH }}
env: env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
winget:
name: Publish to Winget
runs-on: ubuntu-latest
needs: build
if: startsWith(github.ref, 'refs/tags/v')
steps:
- uses: vedantmgoyal2009/winget-releaser@v2
with:
identifier: sharkdp.fd
installers-regex: '-pc-windows-msvc\.zip$'
token: ${{ secrets.WINGET_TOKEN }}

View File

@ -1,103 +1,3 @@
# 10.1.0
## Features
- Allow passing an optional argument to `--strip-cwd-prefix` of "always", "never", or "auto". to force whether the cwd prefix is stripped or not.
- Add a `--format` option which allows using a format template for direct ouput similar to the template used for `--exec`. (#1043)
## Bugfixes
- Fix aarch64 page size again. This time it should actually work. (#1085, #1549) (@tavianator)
## Other
- aarch64-apple-darwin target added to builds on the release page. Note that this is a tier 2 rust target.
# v10.0.0
## Features
- Add `dir` as an alias to `directory` when using `-t` \ `--type`, see #1460 and #1464 (@Ato2207).
- Add support for @%s date format in time filters similar to GNU date (seconds since Unix epoch for --older/--newer), see #1493 (@nabellows)
- Breaking: No longer automatically ignore `.git` when using `--hidden` with vcs ignore enabled. This reverts the change in v9.0.0. While this feature
was often useful, it also broke some existing workflows, and there wasn't a good way to opt out of it. And there isn't really a good way for us to add
a way to opt out of it. And you can easily get similar behavior by adding `.git/` to your global fdignore file.
See #1457.
## Bugfixes
- Respect NO_COLOR environment variable with `--list-details` option. (#1455)
- Fix bug that would cause hidden files to be included despite gitignore rules
if search path is "." (#1461, BurntSushi/ripgrep#2711).
- aarch64 builds now use 64k page sizes with jemalloc. This fixes issues on some systems, such as ARM Macs that
have a larger system page size than the system that the binary was built on. (#1547)
- Address [CVE-2024-24576](https://blog.rust-lang.org/2024/04/09/cve-2024-24576.html), by increasing minimum rust version.
## Changes
- Minimum supported rust version is now 1.77.2
# v9.0.0
## Performance
- Performance has been *significantly improved*, both due to optimizations in the underlying `ignore`
crate (#1429), and in `fd` itself (#1422, #1408, #1362) - @tavianator.
[Benchmarks results](https://gist.github.com/tavianator/32edbe052f33ef60570cf5456b59de81) show gains
of 6-8x for full traversals of smaller directories (100k files) and up to 13x for larger directories (1M files).
- The default number of threads is now constrained to be at most 64. This should improve startup time on
systems with many CPU cores. (#1203, #1410, #1412, #1431) - @tmccombs and @tavianator
- New flushing behavior when writing output to stdout, providing better performance for TTY and non-TTY
use cases, see #1452 and #1313 (@tavianator).
## Features
- Support character and block device file types, see #1213 and #1336 (@cgzones)
- Breaking: `.git/` is now ignored by default when using `--hidden` / `-H`, use `--no-ignore` / `-I` or
`--no-ignore-vcs` to override, see #1387 and #1396 (@skoriop)
## Bugfixes
- Fix `NO_COLOR` support, see #1421 (@acuteenvy)
## Other
- Fixed documentation typos, see #1409 (@marcospb19)
## Thanks
Special thanks to @tavianator for his incredible work on performance in the `ignore` crate and `fd` itself.
# v8.7.1
## Bugfixes
- `-1` properly conflicts with the exec family of options.
- `--max-results` overrides `-1`
- `--quiet` properly conflicts with the exec family of options. This used to be the case, but broke during the switch to clap-derive
- `--changed-within` now accepts a space as well as a "T" as the separator between date and time (due to update of chrono dependency)
## Other
- Many dependencies were updated
- Some documentation was updated and fixed
# v8.7.0
## Features
- Add flag --no-require-git to always respect gitignore files, see #1216 (@vegerot)
## Bugfixes
- Fix logic for when to use global ignore file. There was a bug where the only case where the
global ignore file wasn't processed was if `--no-ignore` was passed, but neither `--unrestricted`
nor `--no-global-ignore-file` is passed. See #1209
# v8.6.0 # v8.6.0
## Features ## Features

View File

@ -13,11 +13,11 @@ give us the chance to discuss any potential changes first.
## Add an entry to the changelog ## Add an entry to the changelog
If your contribution changes the behavior of `fd` (as opposed to a typo-fix If your contribution changes the behavior of `fd` (as opposed to a typo-fix
in the documentation), please update the [`CHANGELOG.md`](CHANGELOG.md#upcoming-release) file in the documentation), please update the [`CHANGELOG.md`](CHANGELOG.md) file
and describe your changes. This makes the release process much easier and and describe your changes. This makes the release process much easier and
therefore helps to get your changes into a new `fd` release faster. therefore helps to get your changes into a new `fd` release faster.
The top of the `CHANGELOG` contains an *"Upcoming release"* section with a few The top of the `CHANGELOG` contains a *"unreleased"* section with a few
subsections (Features, Bugfixes, …). Please add your entry to the subsection subsections (Features, Bugfixes, …). Please add your entry to the subsection
that best describes your change. that best describes your change.

892
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@ -12,13 +12,12 @@ keywords = [
"filesystem", "filesystem",
"tool", "tool",
] ]
license = "MIT OR Apache-2.0" license = "MIT/Apache-2.0"
name = "fd-find" name = "fd-find"
readme = "README.md" readme = "README.md"
repository = "https://github.com/sharkdp/fd" repository = "https://github.com/sharkdp/fd"
version = "10.1.0" version = "8.6.0"
edition= "2021" edition= "2021"
rust-version = "1.77.2"
[badges.appveyor] [badges.appveyor]
repository = "sharkdp/fd" repository = "sharkdp/fd"
@ -34,38 +33,41 @@ path = "src/main.rs"
version_check = "0.9" version_check = "0.9"
[dependencies] [dependencies]
aho-corasick = "1.1" nu-ansi-term = "0.46"
nu-ansi-term = "0.50"
argmax = "0.3.1" argmax = "0.3.1"
ignore = "0.4.22" atty = "0.2"
regex = "1.10.3" ignore = "0.4.3"
regex-syntax = "0.8" num_cpus = "1.13"
regex = "1.7.0"
regex-syntax = "0.6"
ctrlc = "3.2" ctrlc = "3.2"
humantime = "2.1" humantime = "2.1"
globset = "0.4" globset = "0.4"
anyhow = "1.0" anyhow = "1.0"
etcetera = "0.8" dirs-next = "2.0"
normpath = "1.1.1" normpath = "0.3.2"
crossbeam-channel = "0.5.13" once_cell = "1.15.0"
clap_complete = {version = "4.4.9", optional = true} crossbeam-channel = "0.5.6"
clap_complete = {version = "4.0.6", optional = true}
faccess = "0.2.4" faccess = "0.2.4"
[dependencies.clap] [dependencies.clap]
version = "4.4.13" version = "4.0.22"
features = ["suggestions", "color", "wrap_help", "cargo", "derive"] features = ["suggestions", "color", "wrap_help", "cargo", "unstable-grouped", "derive"]
[dependencies.chrono] [dependencies.chrono]
version = "0.4.38" version = "0.4.23"
default-features = false default-features = false
features = ["std", "clock"] features = ["std", "clock"]
[dependencies.lscolors] [dependencies.lscolors]
version = "0.17" version = "0.13"
default-features = false default-features = false
features = ["nu-ansi-term"] features = ["nu-ansi-term"]
[target.'cfg(unix)'.dependencies] [target.'cfg(unix)'.dependencies]
nix = { version = "0.29.0", default-features = false, features = ["signal", "user"] } users = "0.11.0"
nix = { version = "0.24.2", default-features = false, features = ["signal"] }
[target.'cfg(all(unix, not(target_os = "redox")))'.dependencies] [target.'cfg(all(unix, not(target_os = "redox")))'.dependencies]
libc = "0.2" libc = "0.2"
@ -73,18 +75,17 @@ libc = "0.2"
# FIXME: Re-enable jemalloc on macOS # FIXME: Re-enable jemalloc on macOS
# jemalloc is currently disabled on macOS due to a bug in jemalloc in combination with macOS # jemalloc is currently disabled on macOS due to a bug in jemalloc in combination with macOS
# Catalina. See https://github.com/sharkdp/fd/issues/498 for details. # Catalina. See https://github.com/sharkdp/fd/issues/498 for details.
[target.'cfg(all(not(windows), not(target_os = "android"), not(target_os = "macos"), not(target_os = "freebsd"), not(target_os = "openbsd"), not(all(target_env = "musl", target_pointer_width = "32")), not(target_arch = "riscv64")))'.dependencies] [target.'cfg(all(not(windows), not(target_os = "android"), not(target_os = "macos"), not(target_os = "freebsd"), not(all(target_env = "musl", target_pointer_width = "32")), not(target_arch = "riscv64")))'.dependencies]
jemallocator = {version = "0.5.4", optional = true} jemallocator = {version = "0.5.0", optional = true}
[dev-dependencies] [dev-dependencies]
diff = "0.1" diff = "0.1"
tempfile = "3.10" tempfile = "3.3"
filetime = "0.2" filetime = "0.2"
test-case = "3.3" test-case = "2.2"
[profile.release] [profile.release]
lto = true lto = true
strip = true
codegen-units = 1 codegen-units = 1
[features] [features]

View File

@ -1,6 +0,0 @@
# https://github.com/sharkdp/fd/issues/1085
[target.aarch64-unknown-linux-gnu.env]
passthrough = ["JEMALLOC_SYS_WITH_LG_PAGE=16"]
[target.aarch64-unknown-linux-musl.env]
passthrough = ["JEMALLOC_SYS_WITH_LG_PAGE=16"]

View File

@ -6,7 +6,7 @@ datadir=$(prefix)/share
exe_name=fd exe_name=fd
$(EXE): Cargo.toml src/**/*.rs $(EXE): Cargo.toml src/**/*.rs
cargo build --profile $(PROFILE) --locked cargo build --profile $(PROFILE)
.PHONY: completions .PHONY: completions
completions: autocomplete/fd.bash autocomplete/fd.fish autocomplete/fd.ps1 autocomplete/_fd completions: autocomplete/fd.bash autocomplete/fd.fish autocomplete/fd.ps1 autocomplete/_fd

192
README.md
View File

@ -2,7 +2,7 @@
[![CICD](https://github.com/sharkdp/fd/actions/workflows/CICD.yml/badge.svg)](https://github.com/sharkdp/fd/actions/workflows/CICD.yml) [![CICD](https://github.com/sharkdp/fd/actions/workflows/CICD.yml/badge.svg)](https://github.com/sharkdp/fd/actions/workflows/CICD.yml)
[![Version info](https://img.shields.io/crates/v/fd-find.svg)](https://crates.io/crates/fd-find) [![Version info](https://img.shields.io/crates/v/fd-find.svg)](https://crates.io/crates/fd-find)
[[中文](https://github.com/cha0ran/fd-zh)] [[中文](https://github.com/chinanf-boy/fd-zh)]
[[한국어](https://github.com/spearkkk/fd-kor)] [[한국어](https://github.com/spearkkk/fd-kor)]
`fd` is a program to find entries in your filesystem. `fd` is a program to find entries in your filesystem.
@ -10,7 +10,10 @@ It is a simple, fast and user-friendly alternative to [`find`](https://www.gnu.o
While it does not aim to support all of `find`'s powerful functionality, it provides sensible While it does not aim to support all of `find`'s powerful functionality, it provides sensible
(opinionated) defaults for a majority of use cases. (opinionated) defaults for a majority of use cases.
[Installation](#installation) • [How to use](#how-to-use) • [Troubleshooting](#troubleshooting) Quick links:
* [How to use](#how-to-use)
* [Installation](#installation)
* [Troubleshooting](#troubleshooting)
## Features ## Features
@ -140,7 +143,7 @@ target/debug/deps/libnum_cpus-f5ce7ef99006aa05.rlib
``` ```
To really search *all* files and directories, simply combine the hidden and ignore features to show To really search *all* files and directories, simply combine the hidden and ignore features to show
everything (`-HI`) or use `-u`/`--unrestricted`. everything (`-HI`).
### Matching the full path ### Matching the full path
By default, *fd* only matches the filename of each file. However, using the `--full-path` or `-p` option, By default, *fd* only matches the filename of each file. However, using the `--full-path` or `-p` option,
@ -258,17 +261,12 @@ To make exclude-patterns like these permanent, you can create a `.fdignore` file
/mnt/external-drive /mnt/external-drive
*.bak *.bak
``` ```
Note: `fd` also supports `.ignore` files that are used by other programs such as `rg` or `ag`.
> [!NOTE]
> `fd` also supports `.ignore` files that are used by other programs such as `rg` or `ag`.
If you want `fd` to ignore these patterns globally, you can put them in `fd`'s global ignore file. If you want `fd` to ignore these patterns globally, you can put them in `fd`'s global ignore file.
This is usually located in `~/.config/fd/ignore` in macOS or Linux, and `%APPDATA%\fd\ignore` in This is usually located in `~/.config/fd/ignore` in macOS or Linux, and `%APPDATA%\fd\ignore` in
Windows. Windows.
You may wish to include `.git/` in your `fd/ignore` file so that `.git` directories, and their contents
are not included in output if you use the `--hidden` option.
### Deleting files ### Deleting files
You can use `fd` to remove all files and directories that are matched by your search pattern. You can use `fd` to remove all files and directories that are matched by your search pattern.
@ -286,8 +284,7 @@ option:
If you also want to remove a certain class of directories, you can use the same technique. You will If you also want to remove a certain class of directories, you can use the same technique. You will
have to use `rm`s `--recursive`/`-r` flag to remove directories. have to use `rm`s `--recursive`/`-r` flag to remove directories.
> [!NOTE] Note: there are scenarios where using `fd … -X rm -r` can cause race conditions: if you have a
> There are scenarios where using `fd … -X rm -r` can cause race conditions: if you have a
path like `…/foo/bar/foo/…` and want to remove all directories named `foo`, you can end up in a path like `…/foo/bar/foo/…` and want to remove all directories named `foo`, you can end up in a
situation where the outer `foo` directory is removed first, leading to (harmless) *"'foo/bar/foo': situation where the outer `foo` directory is removed first, leading to (harmless) *"'foo/bar/foo':
No such file or directory"* errors in the `rm` call. No such file or directory"* errors in the `rm` call.
@ -316,76 +313,81 @@ Options:
-p, --full-path Search full abs. path (default: filename only) -p, --full-path Search full abs. path (default: filename only)
-d, --max-depth <depth> Set maximum search depth (default: none) -d, --max-depth <depth> Set maximum search depth (default: none)
-E, --exclude <pattern> Exclude entries that match the given glob pattern -E, --exclude <pattern> Exclude entries that match the given glob pattern
-t, --type <filetype> Filter by type: file (f), directory (d/dir), symlink (l), -t, --type <filetype> Filter by type: file (f), directory (d), symlink (l),
executable (x), empty (e), socket (s), pipe (p), char-device executable (x), empty (e), socket (s), pipe (p)
(c), block-device (b)
-e, --extension <ext> Filter by file extension -e, --extension <ext> Filter by file extension
-S, --size <size> Limit results based on the size of files -S, --size <size> Limit results based on the size of files
--changed-within <date|dur> Filter by file modification time (newer than) --changed-within <date|dur> Filter by file modification time (newer than)
--changed-before <date|dur> Filter by file modification time (older than) --changed-before <date|dur> Filter by file modification time (older than)
-o, --owner <user:group> Filter by owning user and/or group -o, --owner <user:group> Filter by owning user and/or group
--format <fmt> Print results according to template
-x, --exec <cmd>... Execute a command for each search result -x, --exec <cmd>... Execute a command for each search result
-X, --exec-batch <cmd>... Execute a command with all search results at once -X, --exec-batch <cmd>... Execute a command with all search results at once
-c, --color <when> When to use colors [default: auto] [possible values: auto, -c, --color <when> When to use colors [default: auto] [possible values: auto,
always, never] always, never]
-h, --help Print help (see more with '--help') -h, --help Print help information (use `--help` for more detail)
-V, --version Print version -V, --version Print version information
``` ```
## Benchmark ## Benchmark
Let's search my home folder for files that end in `[0-9].jpg`. It contains ~750.000 Let's search my home folder for files that end in `[0-9].jpg`. It contains ~190.000
subdirectories and about a 4 million files. For averaging and statistical analysis, I'm using subdirectories and about a million files. For averaging and statistical analysis, I'm using
[hyperfine](https://github.com/sharkdp/hyperfine). The following benchmarks are performed [hyperfine](https://github.com/sharkdp/hyperfine). The following benchmarks are performed
with a "warm"/pre-filled disk-cache (results for a "cold" disk-cache show the same trends). with a "warm"/pre-filled disk-cache (results for a "cold" disk-cache show the same trends).
Let's start with `find`: Let's start with `find`:
``` ```
Benchmark 1: find ~ -iregex '.*[0-9]\.jpg$' Benchmark #1: find ~ -iregex '.*[0-9]\.jpg$'
Time (mean ± σ): 19.922 s ± 0.109 s
Range (min … max): 19.765 s … 20.065 s Time (mean ± σ): 7.236 s ± 0.090 s
Range (min … max): 7.133 s … 7.385 s
``` ```
`find` is much faster if it does not need to perform a regular-expression search: `find` is much faster if it does not need to perform a regular-expression search:
``` ```
Benchmark 2: find ~ -iname '*[0-9].jpg' Benchmark #2: find ~ -iname '*[0-9].jpg'
Time (mean ± σ): 11.226 s ± 0.104 s
Range (min … max): 11.119 s … 11.466 s Time (mean ± σ): 3.914 s ± 0.027 s
Range (min … max): 3.876 s … 3.964 s
``` ```
Now let's try the same for `fd`. Note that `fd` performs a regular expression Now let's try the same for `fd`. Note that `fd` *always* performs a regular expression
search by default. The options `-u`/`--unrestricted` option is needed here for search. The options `--hidden` and `--no-ignore` are needed for a fair comparison,
a fair comparison. Otherwise `fd` does not have to traverse hidden folders and otherwise `fd` does not have to traverse hidden folders and ignored paths (see below):
ignored paths (see below):
``` ```
Benchmark 3: fd -u '[0-9]\.jpg$' ~ Benchmark #3: fd -HI '.*[0-9]\.jpg$' ~
Time (mean ± σ): 854.8 ms ± 10.0 ms
Range (min … max): 839.2 ms … 868.9 ms
```
For this particular example, `fd` is approximately **23 times faster** than `find -iregex`
and about **13 times faster** than `find -iname`. By the way, both tools found the exact
same 546 files :smile:.
**Note**: This is *one particular* benchmark on *one particular* machine. While we have Time (mean ± σ): 811.6 ms ± 26.9 ms
performed a lot of different tests (and found consistent results), things might
be different for you! We encourage everyone to try it out on their own. See Range (min … max): 786.0 ms … 870.7 ms
```
For this particular example, `fd` is approximately nine times faster than `find -iregex`
and about five times faster than `find -iname`. By the way, both tools found the exact
same 20880 files :smile:.
Finally, let's run `fd` without `--hidden` and `--no-ignore` (this can lead to different
search results, of course). If *fd* does not have to traverse the hidden and git-ignored
folders, it is almost an order of magnitude faster:
```
Benchmark #4: fd '[0-9]\.jpg$' ~
Time (mean ± σ): 123.7 ms ± 6.0 ms
Range (min … max): 118.8 ms … 140.0 ms
```
**Note**: This is *one particular* benchmark on *one particular* machine. While I have
performed quite a lot of different tests (and found consistent results), things might
be different for you! I encourage everyone to try it out on their own. See
[this repository](https://github.com/sharkdp/fd-benchmarks) for all necessary scripts. [this repository](https://github.com/sharkdp/fd-benchmarks) for all necessary scripts.
Concerning *fd*'s speed, a lot of credit goes to the `regex` and `ignore` crates that are Concerning *fd*'s speed, the main credit goes to the `regex` and `ignore` crates that are also used
also used in [ripgrep](https://github.com/BurntSushi/ripgrep) (check it out!). in [ripgrep](https://github.com/BurntSushi/ripgrep) (check it out!).
## Troubleshooting ## Troubleshooting
### `fd` does not find my file!
Remember that `fd` ignores hidden directories and files by default. It also ignores patterns
from `.gitignore` files. If you want to make sure to find absolutely every possible file, always
use the options `-u`/`--unrestricted` option (or `-HI` to enable hidden and ignored files):
``` bash
> fd -u …
```
### Colorized output ### Colorized output
`fd` can colorize files by extension, just like `ls`. In order for this to work, the environment `fd` can colorize files by extension, just like `ls`. In order for this to work, the environment
@ -399,6 +401,15 @@ for alternative, more complete (or more colorful) variants, see [here](https://g
`fd` also honors the [`NO_COLOR`](https://no-color.org/) environment variable. `fd` also honors the [`NO_COLOR`](https://no-color.org/) environment variable.
### `fd` does not find my file!
Remember that `fd` ignores hidden directories and files by default. It also ignores patterns
from `.gitignore` files. If you want to make sure to find absolutely every possible file, always
use the options `-H` and `-I` to disable these two features:
``` bash
> fd -HI …
```
### `fd` doesn't seem to interpret my regex pattern correctly ### `fd` doesn't seem to interpret my regex pattern correctly
A lot of special regex characters (like `[]`, `^`, `$`, ..) are also special characters in your A lot of special regex characters (like `[]`, `^`, `$`, ..) are also special characters in your
@ -477,17 +488,16 @@ In emacs, run `M-x find-file-in-project-by-selected` to find matching files. Alt
### Printing the output as a tree ### Printing the output as a tree
To format the output of `fd` as a file-tree you can use the `tree` command with To format the output of `fd` similar to the `tree` command, install [`as-tree`] and pipe the output
`--fromfile`: of `fd` to `as-tree`:
```bash ```bash
fd | tree --fromfile fd | as-tree
``` ```
This can be more useful than running `tree` by itself because `tree` does not This can be more useful than running `tree` by itself because `tree` does not ignore any files by
ignore any files by default, nor does it support as rich a set of options as default, nor does it support as rich a set of options as `fd` does to control what to print:
`fd` does to control what to print:
```bash ```bash
fd --extension rs | tree --fromfile fd --extension rs | as-tree
. .
├── build.rs ├── build.rs
└── src └── src
@ -495,10 +505,9 @@ ignore any files by default, nor does it support as rich a set of options as
└── error.rs └── error.rs
``` ```
On bash and similar you can simply create an alias: For more information about `as-tree`, see [the `as-tree` README][`as-tree`].
```bash
alias as-tree='tree --fromfile' [`as-tree`]: https://github.com/jez/as-tree
```
### Using fd with `xargs` or `parallel` ### Using fd with `xargs` or `parallel`
@ -521,7 +530,7 @@ newlines). In the same way, the `-0` option of `xargs` tells it to read the inpu
If you run Ubuntu 19.04 (Disco Dingo) or newer, you can install the If you run Ubuntu 19.04 (Disco Dingo) or newer, you can install the
[officially maintained package](https://packages.ubuntu.com/fd-find): [officially maintained package](https://packages.ubuntu.com/fd-find):
``` ```
apt install fd-find sudo apt install fd-find
``` ```
Note that the binary is called `fdfind` as the binary name `fd` is already used by another package. Note that the binary is called `fdfind` as the binary name `fd` is already used by another package.
It is recommended that after installation, you add a link to `fd` by executing command It is recommended that after installation, you add a link to `fd` by executing command
@ -531,7 +540,7 @@ Make sure that `$HOME/.local/bin` is in your `$PATH`.
If you use an older version of Ubuntu, you can download the latest `.deb` package from the If you use an older version of Ubuntu, you can download the latest `.deb` package from the
[release page](https://github.com/sharkdp/fd/releases) and install it via: [release page](https://github.com/sharkdp/fd/releases) and install it via:
``` bash ``` bash
dpkg -i fd_9.0.0_amd64.deb # adapt version number and architecture sudo dpkg -i fd_8.6.0_amd64.deb # adapt version number and architecture
``` ```
### On Debian ### On Debian
@ -539,7 +548,7 @@ dpkg -i fd_9.0.0_amd64.deb # adapt version number and architecture
If you run Debian Buster or newer, you can install the If you run Debian Buster or newer, you can install the
[officially maintained Debian package](https://tracker.debian.org/pkg/rust-fd-find): [officially maintained Debian package](https://tracker.debian.org/pkg/rust-fd-find):
``` ```
apt-get install fd-find sudo apt-get install fd-find
``` ```
Note that the binary is called `fdfind` as the binary name `fd` is already used by another package. Note that the binary is called `fdfind` as the binary name `fd` is already used by another package.
It is recommended that after installation, you add a link to `fd` by executing command It is recommended that after installation, you add a link to `fd` by executing command
@ -567,8 +576,6 @@ You can install [the fd package](https://www.archlinux.org/packages/community/x8
``` ```
pacman -S fd pacman -S fd
``` ```
You can also install fd [from the AUR](https://aur.archlinux.org/packages/fd-git).
### On Gentoo Linux ### On Gentoo Linux
You can use [the fd ebuild](https://packages.gentoo.org/packages/sys-apps/fd) from the official repo: You can use [the fd ebuild](https://packages.gentoo.org/packages/sys-apps/fd) from the official repo:
@ -590,31 +597,22 @@ You can install `fd` via xbps-install:
xbps-install -S fd xbps-install -S fd
``` ```
### On ALT Linux ### On RedHat Enterprise Linux 8 (RHEL8), Almalinux 8, EuroLinux 8 or Rocky Linux 8
You can install [the fd package](https://packages.altlinux.org/en/sisyphus/srpms/fd/) from the official repo: Get the latest fd-v*-x86_64-unknown-linux-gnu.tar.gz file from [sharkdp on github](https://github.com/sharkdp/fd/releases)
``` ```
apt-get install fd tar xf fd-v*-x86_64-unknown-linux-gnu.tar.gz
chown -R root:root fd-v*-x86_64-unknown-linux-gnu
cd fd-v*-x86_64-unknown-linux-gnu
sudo cp fd /bin
gzip fd.1
chown root:root fd.1.gz
sudo cp fd.1.gz /usr/share/man/man1
sudo cp autocomplete/fd.bash /usr/share/bash-completion/completions/fd
source /usr/share/bash-completion/completions/fd
fd
``` ```
### On Solus
You can install [the fd package](https://github.com/getsolus/packages/tree/main/packages/f/fd) from the official repo:
```
eopkg install fd
```
### On RedHat Enterprise Linux 8/9 (RHEL8/9), Almalinux 8/9, EuroLinux 8/9 or Rocky Linux 8/9
You can install [the `fd` package](https://copr.fedorainfracloud.org/coprs/tkbcopr/fd/) from Fedora Copr.
```bash
dnf copr enable tkbcopr/fd
dnf install fd
```
A different version using the [slower](https://github.com/sharkdp/fd/pull/481#issuecomment-534494592) malloc [instead of jemalloc](https://bugzilla.redhat.com/show_bug.cgi?id=2216193#c1) is also available from the EPEL8/9 repo as the package `fd-find`.
### On macOS ### On macOS
You can install `fd` with [Homebrew](https://formulae.brew.sh/formula/fd): You can install `fd` with [Homebrew](https://formulae.brew.sh/formula/fd):
@ -624,7 +622,7 @@ brew install fd
… or with MacPorts: … or with MacPorts:
``` ```
port install fd sudo port install fd
``` ```
### On Windows ### On Windows
@ -641,11 +639,6 @@ Or via [Chocolatey](https://chocolatey.org):
choco install fd choco install fd
``` ```
Or via [Winget](https://learn.microsoft.com/en-us/windows/package-manager/):
```
winget install sharkdp.fd
```
### On GuixOS ### On GuixOS
You can install [the fd package](https://guix.gnu.org/en/packages/fd-8.1.1/) from the official repo: You can install [the fd package](https://guix.gnu.org/en/packages/fd-8.1.1/) from the official repo:
@ -660,13 +653,6 @@ You can use the [Nix package manager](https://nixos.org/nix/) to install `fd`:
nix-env -i fd nix-env -i fd
``` ```
### Via Flox
You can use [Flox](https://flox.dev) to install `fd` into a Flox environment:
```
flox install fd
```
### On FreeBSD ### On FreeBSD
You can install [the fd-find package](https://www.freshports.org/sysutils/fd) from the official repo: You can install [the fd-find package](https://www.freshports.org/sysutils/fd) from the official repo:
@ -676,7 +662,7 @@ pkg install fd-find
### From npm ### From npm
On Linux and macOS, you can install the [fd-find](https://npm.im/fd-find) package: On linux and macOS, you can install the [fd-find](https://npm.im/fd-find) package:
``` ```
npm install -g fd-find npm install -g fd-find
@ -688,7 +674,7 @@ With Rust's package manager [cargo](https://github.com/rust-lang/cargo), you can
``` ```
cargo install fd-find cargo install fd-find
``` ```
Note that rust version *1.77.2* or later is required. Note that rust version *1.60.0* or later is required.
`make` is also needed for the build. `make` is also needed for the build.
@ -719,6 +705,8 @@ cargo install --path .
## License ## License
Copyright (c) 2017-2021 The fd developers
`fd` is distributed under the terms of both the MIT License and the Apache License 2.0. `fd` is distributed under the terms of both the MIT License and the Apache License 2.0.
See the [LICENSE-APACHE](LICENSE-APACHE) and [LICENSE-MIT](LICENSE-MIT) files for license details. See the [LICENSE-APACHE](LICENSE-APACHE) and [LICENSE-MIT](LICENSE-MIT) files for license details.

View File

@ -1,5 +1,5 @@
fn main() { fn main() {
let min_version = "1.64"; let min_version = "1.60";
match version_check::is_min_version(min_version) { match version_check::is_min_version(min_version) {
Some(true) => {} Some(true) => {}

1
clippy.toml Normal file
View File

@ -0,0 +1 @@
msrv = "1.60.0"

View File

@ -26,8 +26,6 @@ _fd() {
{l,symlink}'\:"symbolic links"' {l,symlink}'\:"symbolic links"'
{e,empty}'\:"empty files or directories"' {e,empty}'\:"empty files or directories"'
{x,executable}'\:"executable (files)"' {x,executable}'\:"executable (files)"'
{b,block-device}'\:"block devices"'
{c,char-device}'\:"character devices"'
{s,socket}'\:"sockets"' {s,socket}'\:"sockets"'
{p,pipe}'\:"named pipes (FIFOs)"' {p,pipe}'\:"named pipes (FIFOs)"'
) )
@ -38,7 +36,7 @@ _fd() {
# for all of the potential negation options listed below! # for all of the potential negation options listed below!
if if
# (--[bpsu]* => match all options marked with '$no') # (--[bpsu]* => match all options marked with '$no')
[[ $PREFIX$SUFFIX == --[bopsun]* ]] || [[ $PREFIX$SUFFIX == --[bopsu]* ]] ||
zstyle -t ":complete:$curcontext:*" complete-all zstyle -t ":complete:$curcontext:*" complete-all
then then
no= no=
@ -72,9 +70,6 @@ _fd() {
{-g,--glob}'[perform a glob-based search]' {-g,--glob}'[perform a glob-based search]'
{-F,--fixed-strings}'[treat pattern as literal string instead of a regex]' {-F,--fixed-strings}'[treat pattern as literal string instead of a regex]'
+ '(no-require-git)'
"$no(no-ignore-full --no-ignore-vcs --no-require-git)--no-require-git[don't require git repo to respect gitignores]"
+ '(match-full)' # match against full path + '(match-full)' # match against full path
{-p,--full-path}'[match the pattern against the full path instead of the basename]' {-p,--full-path}'[match the pattern against the full path instead of the basename]'
@ -123,7 +118,6 @@ _fd() {
+ '(filter-mtime-newer)' # filter by files modified after than + '(filter-mtime-newer)' # filter by files modified after than
'--changed-within=[limit search to files/directories modified within the given date/duration]:date or duration' '--changed-within=[limit search to files/directories modified within the given date/duration]:date or duration'
'--changed-after=[alias for --changed-within]:date/duration'
'!--change-newer-than=:date/duration' '!--change-newer-than=:date/duration'
'!--newer=:date/duration' '!--newer=:date/duration'
@ -162,11 +156,7 @@ _fd() {
$no'(*)*--search-path=[set search path (instead of positional <path> arguments)]:directory:_files -/' $no'(*)*--search-path=[set search path (instead of positional <path> arguments)]:directory:_files -/'
+ strip-cwd-prefix + strip-cwd-prefix
$no'(strip-cwd-prefix exec-cmds)--strip-cwd-prefix=[When to strip ./]:when:(always never auto)' $no'(strip-cwd-prefix exec-cmds)--strip-cwd-prefix[Strip ./ prefix when output is redirected]'
+ and
'--and=[additional required search path]:pattern'
+ args # positional arguments + args # positional arguments
'1: :_guard "^-*" pattern' '1: :_guard "^-*" pattern'

140
doc/fd.1 vendored
View File

@ -29,19 +29,11 @@ By default
.B fd .B fd
uses regular expressions for the pattern. However, this can be changed to use simple glob patterns uses regular expressions for the pattern. However, this can be changed to use simple glob patterns
with the '\-\-glob' option. with the '\-\-glob' option.
.P
By default
.B fd
will exclude hidden files and directories, as well as any files that match gitignore rules
or ignore rules in .ignore or .fdignore files.
.SH OPTIONS .SH OPTIONS
.TP .TP
.B \-H, \-\-hidden .B \-H, \-\-hidden
Include hidden files and directories in the search results Include hidden files and directories in the search results
(default: hidden files and directories are skipped). The flag can be overridden with '--no-hidden'. (default: hidden files and directories are skipped). The flag can be overridden with '--no-hidden'.
.IP
Ignored files are still excluded unless \-\-no\-ignore or \-\-no\-ignore\-vcs
is also used.
.TP .TP
.B \-I, \-\-no\-ignore .B \-I, \-\-no\-ignore
Show search results from files and directories that would otherwise be ignored by Show search results from files and directories that would otherwise be ignored by
@ -79,14 +71,6 @@ git setting, which defaults to
.IR $HOME/.config/git/ignore ). .IR $HOME/.config/git/ignore ).
The flag can be overridden with '--ignore-vcs'. The flag can be overridden with '--ignore-vcs'.
.TP .TP
.B \-\-no\-require\-git
Do not require a git repository to respect gitignores. By default, fd will only
respect global gitignore rules, .gitignore rules and local exclude rules if fd
detects that you are searching inside a git repository. This flag allows you to
relax this restriction such that fd will respect all git related ignore rules
regardless of whether youre searching in a git repository or not. The flag can
be overridden with '--require-git'.
.TP
.B \-\-no\-ignore\-parent .B \-\-no\-ignore\-parent
Show search results from files and directories that would otherwise be ignored by gitignore files in Show search results from files and directories that would otherwise be ignored by gitignore files in
parent directories. parent directories.
@ -110,11 +94,6 @@ Perform a regular-expression based search (default). This can be used to overrid
Treat the pattern as a literal string instead of a regular expression. Note that this also Treat the pattern as a literal string instead of a regular expression. Note that this also
performs substring comparison. If you want to match on an exact filename, consider using '\-\-glob'. performs substring comparison. If you want to match on an exact filename, consider using '\-\-glob'.
.TP .TP
.BI "\-\-and " pattern
Add additional required search patterns, all of which must be matched. Multiple additional
patterns can be specified. The patterns are regular expressions, unless '\-\-glob'
or '\-\-fixed\-strings' is used.
.TP
.B \-a, \-\-absolute\-path .B \-a, \-\-absolute\-path
Shows the full path starting from the root as opposed to relative paths. Shows the full path starting from the root as opposed to relative paths.
The flag can be overridden with '--relative-path'. The flag can be overridden with '--relative-path'.
@ -156,20 +135,9 @@ can be used as an alias.
Enable the display of filesystem errors for situations such as insufficient Enable the display of filesystem errors for situations such as insufficient
permissions or dead symlinks. permissions or dead symlinks.
.TP .TP
.B \-\-strip-cwd-prefix [when] .B \-\-strip-cwd-prefix
By default, relative paths are prefixed with './' when -x/--exec, By default, relative paths are prefixed with './' when the output goes to a non interactive terminal
-X/--exec-batch, or -0/--print0 are given, to reduce the risk of a (TTY). Use this flag to disable this behaviour.
path starting with '-' being treated as a command line option. Use
this flag to change this behavior. If this flag is used without a value,
it is equivalent to passing "always". Possible values are:
.RS
.IP never
Never strip the ./ at the beginning of paths
.IP always
Always strip the ./ at the beginning of paths
.IP auto
Only strip if used with --exec, --exec-batch, or --print0. That is, it resets to the default behavior.
.RE
.TP .TP
.B \-\-one\-file\-system, \-\-mount, \-\-xdev .B \-\-one\-file\-system, \-\-mount, \-\-xdev
By default, fd will traverse the file system tree as far as other options dictate. With this flag, fd ensures that it does not descend into a different file system than the one it started in. Comparable to the -mount or -xdev filters of find(1). By default, fd will traverse the file system tree as far as other options dictate. With this flag, fd ensures that it does not descend into a different file system than the one it started in. Comparable to the -mount or -xdev filters of find(1).
@ -199,14 +167,10 @@ Filter search by type:
.RS .RS
.IP "f, file" .IP "f, file"
regular files regular files
.IP "d, dir, directory" .IP "d, directory"
directories directories
.IP "l, symlink" .IP "l, symlink"
symbolic links symbolic links
.IP "b, block-device"
block devices
.IP "c, char-device"
character devices
.IP "s, socket" .IP "s, socket"
sockets sockets
.IP "p, pipe" .IP "p, pipe"
@ -320,9 +284,8 @@ tebibytes
Filter results based on the file modification time. Filter results based on the file modification time.
Files with modification times greater than the argument will be returned. Files with modification times greater than the argument will be returned.
The argument can be provided as a duration (\fI10h, 1d, 35min\fR) or as a specific point The argument can be provided as a duration (\fI10h, 1d, 35min\fR) or as a specific point
in time as full RFC3339 format with time zone, as a date or datetime in the in time in either full RFC3339 format with time zone, or as a date or datetime in the
local time zone (\fIYYYY-MM-DD\fR or \fIYYYY-MM-DD HH:MM:SS\fR), or as the prefix '@' local time zone (\fIYYYY-MM-DD\fR or \fIYYYY-MM-DD HH:MM:SS\fR).
followed by the number of seconds since the Unix epoch (@[0-9]+).
\fB\-\-change-newer-than\fR, \fB\-\-change-newer-than\fR,
.B --newer .B --newer
or or
@ -333,15 +296,13 @@ Examples:
\-\-changed-within 2weeks \-\-changed-within 2weeks
\-\-change-newer-than "2018-10-27 10:00:00" \-\-change-newer-than "2018-10-27 10:00:00"
\-\-newer 2018-10-27 \-\-newer 2018-10-27
\-\-changed-after @1704067200
.TP .TP
.BI "\-\-changed-before " date|duration .BI "\-\-changed-before " date|duration
Filter results based on the file modification time. Filter results based on the file modification time.
Files with modification times less than the argument will be returned. Files with modification times less than the argument will be returned.
The argument can be provided as a duration (\fI10h, 1d, 35min\fR) or as a specific point The argument can be provided as a duration (\fI10h, 1d, 35min\fR) or as a specific point
in time as full RFC3339 format with time zone, as a date or datetime in the in time in either full RFC3339 format with time zone, or as a date or datetime in the
local time zone (\fIYYYY-MM-DD\fR or \fIYYYY-MM-DD HH:MM:SS\fR), or as the prefix '@' local time zone (\fIYYYY-MM-DD\fR or \fIYYYY-MM-DD HH:MM:SS\fR).
followed by the number of seconds since the Unix epoch (@[0-9]+).
.B --change-older-than .B --change-older-than
or or
.B --older .B --older
@ -350,7 +311,6 @@ can be used as aliases.
Examples: Examples:
\-\-changed-before "2018-10-27 10:00:00" \-\-changed-before "2018-10-27 10:00:00"
\-\-change-older-than 2weeks \-\-change-older-than 2weeks
\-\-older @1704067200
.TP .TP
.BI "-o, \-\-owner " [user][:group] .BI "-o, \-\-owner " [user][:group]
Filter files by their user and/or group. Format: [(user|uid)][:(group|gid)]. Either side Filter files by their user and/or group. Format: [(user|uid)][:(group|gid)]. Either side
@ -375,30 +335,6 @@ Set the path separator to use when printing file paths. The default is the OS-sp
Provide paths to search as an alternative to the positional \fIpath\fR argument. Changes the usage to Provide paths to search as an alternative to the positional \fIpath\fR argument. Changes the usage to
\'fd [FLAGS/OPTIONS] \-\-search\-path PATH \-\-search\-path PATH2 [PATTERN]\' \'fd [FLAGS/OPTIONS] \-\-search\-path PATH \-\-search\-path PATH2 [PATTERN]\'
.TP .TP
.BI "\-\-format " fmt
Specify a template string that is used for printing a line for each file found.
The following placeholders are substituted into the string for each file before printing:
.RS
.IP {}
path (of the current search result)
.IP {/}
basename
.IP {//}
parent directory
.IP {.}
path without file extension
.IP {/.}
basename without file extension
.IP {{
literal '{' (an escape sequence)
.IP }}
literal '}' (an escape sequence)
.P
Notice that you can use "{{" and "}}" to escape "{" and "}" respectively, which is especially
useful if you need to include the literal text of one of the above placeholders.
.RE
.TP
.BI "\-x, \-\-exec " command .BI "\-x, \-\-exec " command
.RS .RS
Execute Execute
@ -415,13 +351,19 @@ This option can be specified multiple times, in which case all commands are run
file found, in the order they are provided. In that case, you must supply a ';' argument for file found, in the order they are provided. In that case, you must supply a ';' argument for
all but the last commands. all but the last commands.
If parallelism is enabled, the order commands will be executed in is non-deterministic. And even with The following placeholders are substituted before the command is executed:
--threads=1, the order is determined by the operating system and may not be what you expect. Thus, it is .RS
recommended that you don't rely on any ordering of the results. .IP {}
path (of the current search result)
Before executing the command, any placeholder patterns in the command are replaced with the .IP {/}
corresponding values for the current file. The same placeholders are used as in the "\-\-format" basename
option. .IP {//}
parent directory
.IP {.}
path without file extension
.IP {/.}
basename without file extension
.RE
If no placeholder is present, an implicit "{}" at the end is assumed. If no placeholder is present, an implicit "{}" at the end is assumed.
@ -445,12 +387,19 @@ Examples:
Execute Execute
.I command .I command
once, with all search results as arguments. once, with all search results as arguments.
One of the following placeholders is substituted before the command is executed:
The order of the arguments is non-deterministic and should not be relied upon. .RS
.IP {}
This uses the same placeholders as "\-\-format" and "\-\-exec", but instead of expanding path (of all search results)
once per command invocation each argument containing a placeholder is expanding for every .IP {/}
file in a batch and passed as separate arguments. basename
.IP {//}
parent directory
.IP {.}
path without file extension
.IP {/.}
basename without file extension
.RE
If no placeholder is present, an implicit "{}" at the end is assumed. If no placeholder is present, an implicit "{}" at the end is assumed.
@ -499,17 +448,6 @@ is set, use
.IR $XDG_CONFIG_HOME/fd/ignore . .IR $XDG_CONFIG_HOME/fd/ignore .
Otherwise, use Otherwise, use
.IR $HOME/.config/fd/ignore . .IR $HOME/.config/fd/ignore .
.SH FILES
.TP
.B .fdignore
This file works similarly to a .gitignore file anywhere in the searched tree and specifies patterns
that should be excluded from the search. However, this file is specific to fd, and will be used even
if the --no-ignore-vcs option is used.
.TP
.B $XDG_CONFIG_HOME/fd/ignore
Global ignore file. Unless ignore mode is turned off (such as with --no-ignore)
ignore entries in this file will be ignored, as if it was an .fdignore file in the
current directory.
.SH EXAMPLES .SH EXAMPLES
.TP .TP
.RI "Find files and directories that match the pattern '" needle "':" .RI "Find files and directories that match the pattern '" needle "':"
@ -523,16 +461,6 @@ $ fd -e py
.TP .TP
.RI "Open all search results with vim:" .RI "Open all search results with vim:"
$ fd pattern -X vim $ fd pattern -X vim
.SH Tips and Tricks
.IP \[bu]
If you add ".git/" to your global ignore file ($XDG_CONFIG_HOME/fd/ignore), then
".git" folders will be ignored by default, even when the --hidden option is used.
.IP \[bu]
You can use a shell alias or a wrapper script in order to pass desired flags to fd
by default. For example if you do not like the default behavior of respecting gitignore,
you can use
`alias fd="/usr/bin/fd --no-ignore-vcs"`
in your .bashrc to create an alias for fd that doesn't ignore git files by default.
.SH BUGS .SH BUGS
Bugs can be reported on GitHub: https://github.com/sharkdp/fd/issues Bugs can be reported on GitHub: https://github.com/sharkdp/fd/issues
.SH SEE ALSO .SH SEE ALSO

View File

@ -9,7 +9,7 @@ necessary changes for the upcoming release.
- [ ] Update version in `Cargo.toml`. Run `cargo build` to update `Cargo.lock`. - [ ] Update version in `Cargo.toml`. Run `cargo build` to update `Cargo.lock`.
Make sure to `git add` the `Cargo.lock` changes as well. Make sure to `git add` the `Cargo.lock` changes as well.
- [ ] Find the current min. supported Rust version by running - [ ] Find the current min. supported Rust version by running
`grep rust-version Cargo.toml`. `grep '^\s*MIN_SUPPORTED_RUST_VERSION' .github/workflows/CICD.yml`.
- [ ] Update the `fd` version and the min. supported Rust version in `README.md`. - [ ] Update the `fd` version and the min. supported Rust version in `README.md`.
- [ ] Update `CHANGELOG.md`. Change the heading of the *"Upcoming release"* section - [ ] Update `CHANGELOG.md`. Change the heading of the *"Upcoming release"* section
to the version of this release. to the version of this release.

12
doc/sponsors.md vendored
View File

@ -1,12 +0,0 @@
## Sponsors
`fd` development is sponsored by many individuals and companies. Thank you very much!
Please note, that being sponsored does not affect the individuality of the `fd`
project or affect the maintainers' actions in any way.
We remain impartial and continue to assess pull requests solely on merit - the
features added, bugs solved, and effect on the overall complexity of the code.
No issue will have a different priority based on sponsorship status of the
reporter.
Contributions from anybody are most welcomed, please see our [`CONTRIBUTING.md`](../CONTRIBUTING.md) guide.

File diff suppressed because one or more lines are too long

Before

Width:  |  Height:  |  Size: 7.2 KiB

View File

@ -1 +0,0 @@
# Defaults are used

View File

@ -1,22 +0,0 @@
#!/usr/bin/bash
set -eu
# This script automates the "Version bump" section
version="$1"
if [[ -z $version ]]; then
echo "Usage: must supply version as first argument" >&2
exit 1
fi
git switch -C "release-$version"
sed -i -e "0,/^\[badges/{s/^version =.*/version = \"$version\"/}" Cargo.toml
msrv="$(grep -F rust-version Cargo.toml | sed -e 's/^rust-version= "\(.*\)"/\1/')"
sed -i -e "s/Note that rust version \*[0-9.]+\* or later/Note that rust version *$msrv* or later/" README.md
sed -i -e "s/^# Upcoming release/# $version/" CHANGELOG.md

View File

@ -1,4 +1,3 @@
use std::num::NonZeroUsize;
use std::path::{Path, PathBuf}; use std::path::{Path, PathBuf};
use std::time::Duration; use std::time::Duration;
@ -27,14 +26,12 @@ use crate::filter::SizeFilter;
max_term_width = 98, max_term_width = 98,
args_override_self = true, args_override_self = true,
group(ArgGroup::new("execs").args(&["exec", "exec_batch", "list_details"]).conflicts_with_all(&[ group(ArgGroup::new("execs").args(&["exec", "exec_batch", "list_details"]).conflicts_with_all(&[
"max_results", "quiet", "max_one_result"])), "max_results", "has_results", "count"])),
)] )]
pub struct Opts { pub struct Opts {
/// Include hidden directories and files in the search results (default: /// Include hidden directories and files in the search results (default:
/// hidden files and directories are skipped). Files and directories are /// hidden files and directories are skipped). Files and directories are
/// considered to be hidden if their name starts with a `.` sign (dot). /// considered to be hidden if their name starts with a `.` sign (dot).
/// Any files or directories that are ignored due to the rules described by
/// --no-ignore are still ignored unless otherwise specified.
/// The flag can be overridden with --no-hidden. /// The flag can be overridden with --no-hidden.
#[arg( #[arg(
long, long,
@ -49,7 +46,7 @@ pub struct Opts {
no_hidden: (), no_hidden: (),
/// Show search results from files and directories that would otherwise be /// Show search results from files and directories that would otherwise be
/// ignored by '.gitignore', '.ignore', '.fdignore', or the global ignore file, /// ignored by '.gitignore', '.ignore', '.fdignore', or the global ignore file.
/// The flag can be overridden with --ignore. /// The flag can be overridden with --ignore.
#[arg( #[arg(
long, long,
@ -63,9 +60,8 @@ pub struct Opts {
#[arg(long, overrides_with = "no_ignore", hide = true, action = ArgAction::SetTrue)] #[arg(long, overrides_with = "no_ignore", hide = true, action = ArgAction::SetTrue)]
ignore: (), ignore: (),
///Show search results from files and directories that ///Show search results from files and directories that would otherwise be
///would otherwise be ignored by '.gitignore' files. /// ignored by '.gitignore' files. The flag can be overridden with --ignore-vcs.
///The flag can be overridden with --ignore-vcs.
#[arg( #[arg(
long, long,
hide_short_help = true, hide_short_help = true,
@ -78,28 +74,6 @@ pub struct Opts {
#[arg(long, overrides_with = "no_ignore_vcs", hide = true, action = ArgAction::SetTrue)] #[arg(long, overrides_with = "no_ignore_vcs", hide = true, action = ArgAction::SetTrue)]
ignore_vcs: (), ignore_vcs: (),
/// Do not require a git repository to respect gitignores.
/// By default, fd will only respect global gitignore rules, .gitignore rules,
/// and local exclude rules if fd detects that you are searching inside a
/// git repository. This flag allows you to relax this restriction such that
/// fd will respect all git related ignore rules regardless of whether you're
/// searching in a git repository or not.
///
///
/// This flag can be disabled with --require-git.
#[arg(
long,
overrides_with = "require_git",
hide_short_help = true,
// same description as ripgrep's flag: ripgrep/crates/core/app.rs
long_help
)]
pub no_require_git: bool,
/// Overrides --no-require-git
#[arg(long, overrides_with = "no_require_git", hide = true, action = ArgAction::SetTrue)]
require_git: (),
/// Show search results from files and directories that would otherwise be /// Show search results from files and directories that would otherwise be
/// ignored by '.gitignore', '.ignore', or '.fdignore' files in parent directories. /// ignored by '.gitignore', '.ignore', or '.fdignore' files in parent directories.
#[arg( #[arg(
@ -226,7 +200,7 @@ pub struct Opts {
alias = "dereference", alias = "dereference",
long_help = "By default, fd does not descend into symlinked directories. Using this \ long_help = "By default, fd does not descend into symlinked directories. Using this \
flag, symbolic links are also traversed. \ flag, symbolic links are also traversed. \
Flag can be overridden with --no-follow." Flag can be overriden with --no-follow."
)] )]
pub follow: bool, pub follow: bool,
@ -313,12 +287,10 @@ pub struct Opts {
/// Filter the search by type: /// Filter the search by type:
/// {n} 'f' or 'file': regular files /// {n} 'f' or 'file': regular files
/// {n} 'd' or 'dir' or 'directory': directories /// {n} 'd' or 'directory': directories
/// {n} 'l' or 'symlink': symbolic links /// {n} 'l' or 'symlink': symbolic links
/// {n} 's' or 'socket': socket /// {n} 's' or 'socket': socket
/// {n} 'p' or 'pipe': named pipe (FIFO) /// {n} 'p' or 'pipe': named pipe (FIFO)
/// {n} 'b' or 'block-device': block device
/// {n} 'c' or 'char-device': character device
/// {n}{n} 'x' or 'executable': executables /// {n}{n} 'x' or 'executable': executables
/// {n} 'e' or 'empty': empty files or directories /// {n} 'e' or 'empty': empty files or directories
/// ///
@ -351,9 +323,8 @@ pub struct Opts {
value_name = "filetype", value_name = "filetype",
hide_possible_values = true, hide_possible_values = true,
value_enum, value_enum,
help = "Filter by type: file (f), directory (d/dir), symlink (l), \ help = "Filter by type: file (f), directory (d), symlink (l), \
executable (x), empty (e), socket (s), pipe (p), \ executable (x), empty (e), socket (s), pipe (p)",
char-device (c), block-device (b)",
long_help long_help
)] )]
pub filetype: Option<Vec<FileType>>, pub filetype: Option<Vec<FileType>>,
@ -398,7 +369,7 @@ pub struct Opts {
/// Filter results based on the file modification time. Files with modification times /// Filter results based on the file modification time. Files with modification times
/// greater than the argument are returned. The argument can be provided /// greater than the argument are returned. The argument can be provided
/// as a specific point in time (YYYY-MM-DD HH:MM:SS or @timestamp) or as a duration (10h, 1d, 35min). /// as a specific point in time (YYYY-MM-DD HH:MM:SS) or as a duration (10h, 1d, 35min).
/// If the time is not specified, it defaults to 00:00:00. /// If the time is not specified, it defaults to 00:00:00.
/// '--change-newer-than', '--newer', or '--changed-after' can be used as aliases. /// '--change-newer-than', '--newer', or '--changed-after' can be used as aliases.
/// ///
@ -420,7 +391,7 @@ pub struct Opts {
/// Filter results based on the file modification time. Files with modification times /// Filter results based on the file modification time. Files with modification times
/// less than the argument are returned. The argument can be provided /// less than the argument are returned. The argument can be provided
/// as a specific point in time (YYYY-MM-DD HH:MM:SS or @timestamp) or as a duration (10h, 1d, 35min). /// as a specific point in time (YYYY-MM-DD HH:MM:SS) or as a duration (10h, 1d, 35min).
/// '--change-older-than' or '--older' can be used as aliases. /// '--change-older-than' or '--older' can be used as aliases.
/// ///
/// Examples: /// Examples:
@ -452,20 +423,6 @@ pub struct Opts {
)] )]
pub owner: Option<OwnerFilter>, pub owner: Option<OwnerFilter>,
/// Instead of printing the file normally, print the format string with the following placeholders replaced:
/// '{}': path (of the current search result)
/// '{/}': basename
/// '{//}': parent directory
/// '{.}': path without file extension
/// '{/.}': basename without file extension
#[arg(
long,
value_name = "fmt",
help = "Print results according to template",
conflicts_with = "list_details"
)]
pub format: Option<String>,
#[command(flatten)] #[command(flatten)]
pub exec: Exec, pub exec: Exec,
@ -511,8 +468,8 @@ pub struct Opts {
/// Set number of threads to use for searching & executing (default: number /// Set number of threads to use for searching & executing (default: number
/// of available CPU cores) /// of available CPU cores)
#[arg(long, short = 'j', value_name = "num", hide_short_help = true, value_parser = str::parse::<NonZeroUsize>)] #[arg(long, short = 'j', value_name = "num", hide_short_help = true, value_parser = clap::value_parser!(u32).range(1..))]
pub threads: Option<NonZeroUsize>, pub threads: Option<u32>,
/// Milliseconds to buffer before streaming search results to console /// Milliseconds to buffer before streaming search results to console
/// ///
@ -526,7 +483,6 @@ pub struct Opts {
long, long,
value_name = "count", value_name = "count",
hide_short_help = true, hide_short_help = true,
overrides_with("max_one_result"),
help = "Limit the number of search results", help = "Limit the number of search results",
long_help long_help
)] )]
@ -631,10 +587,9 @@ pub struct Opts {
/// By default, relative paths are prefixed with './' when -x/--exec, /// By default, relative paths are prefixed with './' when -x/--exec,
/// -X/--exec-batch, or -0/--print0 are given, to reduce the risk of a /// -X/--exec-batch, or -0/--print0 are given, to reduce the risk of a
/// path starting with '-' being treated as a command line option. Use /// path starting with '-' being treated as a command line option. Use
/// this flag to change this behavior. If this flag is used without a value, /// this flag to disable this behaviour.
/// it is equivalent to passing "always". #[arg(long, conflicts_with_all(&["path", "search_path"]), hide_short_help = true, long_help)]
#[arg(long, conflicts_with_all(&["path", "search_path"]), value_name = "when", hide_short_help = true, require_equals = true, long_help)] pub strip_cwd_prefix: bool,
strip_cwd_prefix: Option<Option<StripCwdWhen>>,
/// By default, fd will traverse the file system tree as far as other options /// By default, fd will traverse the file system tree as far as other options
/// dictate. With this flag, fd ensures that it does not descend into a /// dictate. With this flag, fd ensures that it does not descend into a
@ -657,7 +612,7 @@ impl Opts {
} else if !self.search_path.is_empty() { } else if !self.search_path.is_empty() {
&self.search_path &self.search_path
} else { } else {
let current_directory = Path::new("./"); let current_directory = Path::new(".");
ensure_current_directory_exists(current_directory)?; ensure_current_directory_exists(current_directory)?;
return Ok(vec![self.normalize_path(current_directory)]); return Ok(vec![self.normalize_path(current_directory)]);
}; };
@ -680,9 +635,6 @@ impl Opts {
fn normalize_path(&self, path: &Path) -> PathBuf { fn normalize_path(&self, path: &Path) -> PathBuf {
if self.absolute_path { if self.absolute_path {
filesystem::absolute_path(path.normalize().unwrap().as_path()).unwrap() filesystem::absolute_path(path.normalize().unwrap().as_path()).unwrap()
} else if path == Path::new(".") {
// Change "." to "./" as a workaround for https://github.com/BurntSushi/ripgrep/pull/2711
PathBuf::from("./")
} else { } else {
path.to_path_buf() path.to_path_buf()
} }
@ -705,24 +657,23 @@ impl Opts {
self.min_depth.or(self.exact_depth) self.min_depth.or(self.exact_depth)
} }
pub fn threads(&self) -> NonZeroUsize { pub fn threads(&self) -> usize {
self.threads.unwrap_or_else(default_num_threads) // This will panic if the number of threads passed in is more than usize::MAX in an environment
// where usize is less than 32 bits (for example 16-bit architectures). It's pretty
// unlikely fd will be running in such an environment, and even more unlikely someone would
// be trying to use that many threads on such an environment, so I think panicing is an
// appropriate way to handle that.
std::cmp::max(
self.threads
.map_or_else(num_cpus::get, |n| n.try_into().expect("too many threads")),
1,
)
} }
pub fn max_results(&self) -> Option<usize> { pub fn max_results(&self) -> Option<usize> {
self.max_results self.max_results
.filter(|&m| m > 0) .filter(|&m| m > 0)
.or_else(|| self.max_one_result.then_some(1)) .or_else(|| self.max_one_result.then(|| 1))
}
pub fn strip_cwd_prefix<P: FnOnce() -> bool>(&self, auto_pred: P) -> bool {
use self::StripCwdWhen::*;
self.no_search_paths()
&& match self.strip_cwd_prefix.map_or(Auto, |o| o.unwrap_or(Always)) {
Auto => auto_pred(),
Always => true,
Never => false,
}
} }
#[cfg(feature = "completions")] #[cfg(feature = "completions")]
@ -738,32 +689,14 @@ impl Opts {
} }
} }
/// Get the default number of threads to use, if not explicitly specified.
fn default_num_threads() -> NonZeroUsize {
// If we can't get the amount of parallelism for some reason, then
// default to a single thread, because that is safe.
let fallback = NonZeroUsize::MIN;
// To limit startup overhead on massively parallel machines, don't use more
// than 64 threads.
let limit = NonZeroUsize::new(64).unwrap();
std::thread::available_parallelism()
.unwrap_or(fallback)
.min(limit)
}
#[derive(Copy, Clone, PartialEq, Eq, ValueEnum)] #[derive(Copy, Clone, PartialEq, Eq, ValueEnum)]
pub enum FileType { pub enum FileType {
#[value(alias = "f")] #[value(alias = "f")]
File, File,
#[value(alias = "d", alias = "dir")] #[value(alias = "d")]
Directory, Directory,
#[value(alias = "l")] #[value(alias = "l")]
Symlink, Symlink,
#[value(alias = "b")]
BlockDevice,
#[value(alias = "c")]
CharDevice,
/// A file which is executable by the current effective user /// A file which is executable by the current effective user
#[value(alias = "x")] #[value(alias = "x")]
Executable, Executable,
@ -785,14 +718,15 @@ pub enum ColorWhen {
Never, Never,
} }
#[derive(Copy, Clone, PartialEq, Eq, Debug, ValueEnum)] impl ColorWhen {
pub enum StripCwdWhen { pub fn as_str(&self) -> &'static str {
/// Use the default behavior use ColorWhen::*;
Auto, match *self {
/// Always strip the ./ at the beginning of paths Auto => "auto",
Always, Never => "never",
/// Never strip the ./ Always => "always",
Never, }
}
} }
// there isn't a derive api for getting grouped values yet, // there isn't a derive api for getting grouped values yet,
@ -804,11 +738,11 @@ pub struct Exec {
impl clap::FromArgMatches for Exec { impl clap::FromArgMatches for Exec {
fn from_arg_matches(matches: &ArgMatches) -> clap::error::Result<Self> { fn from_arg_matches(matches: &ArgMatches) -> clap::error::Result<Self> {
let command = matches let command = matches
.get_occurrences::<String>("exec") .grouped_values_of("exec")
.map(CommandSet::new) .map(CommandSet::new)
.or_else(|| { .or_else(|| {
matches matches
.get_occurrences::<String>("exec_batch") .grouped_values_of("exec_batch")
.map(CommandSet::new_batch) .map(CommandSet::new_batch)
}) })
.transpose() .transpose()
@ -836,7 +770,6 @@ impl clap::Args for Exec {
.help("Execute a command for each search result") .help("Execute a command for each search result")
.long_help( .long_help(
"Execute a command for each search result in parallel (use --threads=1 for sequential command execution). \ "Execute a command for each search result in parallel (use --threads=1 for sequential command execution). \
There is no guarantee of the order commands are executed in, and the order should not be depended upon. \
All positional arguments following --exec are considered to be arguments to the command - not to fd. \ All positional arguments following --exec are considered to be arguments to the command - not to fd. \
It is therefore recommended to place the '-x'/'--exec' option last.\n\ It is therefore recommended to place the '-x'/'--exec' option last.\n\
The following placeholders are substituted before the command is executed:\n \ The following placeholders are substituted before the command is executed:\n \
@ -844,9 +777,7 @@ impl clap::Args for Exec {
'{/}': basename\n \ '{/}': basename\n \
'{//}': parent directory\n \ '{//}': parent directory\n \
'{.}': path without file extension\n \ '{.}': path without file extension\n \
'{/.}': basename without file extension\n \ '{/.}': basename without file extension\n\n\
'{{': literal '{' (for escaping)\n \
'}}': literal '}' (for escaping)\n\n\
If no placeholder is present, an implicit \"{}\" at the end is assumed.\n\n\ If no placeholder is present, an implicit \"{}\" at the end is assumed.\n\n\
Examples:\n\n \ Examples:\n\n \
- find all *.zip files and unzip them:\n\n \ - find all *.zip files and unzip them:\n\n \
@ -871,15 +802,12 @@ impl clap::Args for Exec {
.help("Execute a command with all search results at once") .help("Execute a command with all search results at once")
.long_help( .long_help(
"Execute the given command once, with all search results as arguments.\n\ "Execute the given command once, with all search results as arguments.\n\
The order of the arguments is non-deterministic, and should not be relied upon.\n\
One of the following placeholders is substituted before the command is executed:\n \ One of the following placeholders is substituted before the command is executed:\n \
'{}': path (of all search results)\n \ '{}': path (of all search results)\n \
'{/}': basename\n \ '{/}': basename\n \
'{//}': parent directory\n \ '{//}': parent directory\n \
'{.}': path without file extension\n \ '{.}': path without file extension\n \
'{/.}': basename without file extension\n \ '{/.}': basename without file extension\n\n\
'{{': literal '{' (for escaping)\n \
'}}': literal '}' (for escaping)\n\n\
If no placeholder is present, an implicit \"{}\" at the end is assumed.\n\n\ If no placeholder is present, an implicit \"{}\" at the end is assumed.\n\n\
Examples:\n\n \ Examples:\n\n \
- Find all test_*.py files and open them in your favorite editor:\n\n \ - Find all test_*.py files and open them in your favorite editor:\n\n \

View File

@ -8,7 +8,6 @@ use crate::filetypes::FileTypes;
#[cfg(unix)] #[cfg(unix)]
use crate::filter::OwnerFilter; use crate::filter::OwnerFilter;
use crate::filter::{SizeFilter, TimeFilter}; use crate::filter::{SizeFilter, TimeFilter};
use crate::fmt::FormatTemplate;
/// Configuration options for *fd*. /// Configuration options for *fd*.
pub struct Config { pub struct Config {
@ -31,9 +30,6 @@ pub struct Config {
/// Whether to respect VCS ignore files (`.gitignore`, ..) or not. /// Whether to respect VCS ignore files (`.gitignore`, ..) or not.
pub read_vcsignore: bool, pub read_vcsignore: bool,
/// Whether to require a `.git` directory to respect gitignore files.
pub require_git_to_read_vcsignore: bool,
/// Whether to respect the global ignore file or not. /// Whether to respect the global ignore file or not.
pub read_global_ignore: bool, pub read_global_ignore: bool,
@ -86,9 +82,6 @@ pub struct Config {
/// The value (if present) will be a lowercase string without leading dots. /// The value (if present) will be a lowercase string without leading dots.
pub extensions: Option<RegexSet>, pub extensions: Option<RegexSet>,
/// A format string to use to format results, similarly to exec
pub format: Option<FormatTemplate>,
/// If a value is supplied, each item found will be used to generate and execute commands. /// If a value is supplied, each item found will be used to generate and execute commands.
pub command: Option<Arc<CommandSet>>, pub command: Option<Arc<CommandSet>>,

View File

@ -1,20 +1,19 @@
use std::cell::OnceCell;
use std::ffi::OsString; use std::ffi::OsString;
use std::fs::{FileType, Metadata}; use std::fs::{FileType, Metadata};
use std::path::{Path, PathBuf}; use std::path::{Path, PathBuf};
use lscolors::{Colorable, LsColors, Style}; use lscolors::{Colorable, LsColors, Style};
use once_cell::unsync::OnceCell;
use crate::config::Config; use crate::config::Config;
use crate::filesystem::strip_current_dir; use crate::filesystem::strip_current_dir;
#[derive(Debug)]
enum DirEntryInner { enum DirEntryInner {
Normal(ignore::DirEntry), Normal(ignore::DirEntry),
BrokenSymlink(PathBuf), BrokenSymlink(PathBuf),
} }
#[derive(Debug)]
pub struct DirEntry { pub struct DirEntry {
inner: DirEntryInner, inner: DirEntryInner,
metadata: OnceCell<Option<Metadata>>, metadata: OnceCell<Option<Metadata>>,
@ -113,7 +112,7 @@ impl Eq for DirEntry {}
impl PartialOrd for DirEntry { impl PartialOrd for DirEntry {
#[inline] #[inline]
fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> { fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {
Some(self.cmp(other)) self.path().partial_cmp(other.path())
} }
} }

View File

@ -34,10 +34,10 @@ pub fn dirname(path: &Path) -> OsString {
#[cfg(test)] #[cfg(test)]
mod path_tests { mod path_tests {
use super::*; use super::*;
use std::path::MAIN_SEPARATOR_STR; use std::path::MAIN_SEPARATOR;
fn correct(input: &str) -> String { fn correct(input: &str) -> String {
input.replace('/', MAIN_SEPARATOR_STR) input.replace('/', &MAIN_SEPARATOR.to_string())
} }
macro_rules! func_tests { macro_rules! func_tests {

View File

@ -1,6 +1,9 @@
use std::sync::Mutex; use std::sync::{Arc, Mutex};
use crossbeam_channel::Receiver;
use crate::config::Config; use crate::config::Config;
use crate::dir_entry::DirEntry;
use crate::error::print_error; use crate::error::print_error;
use crate::exit_codes::{merge_exitcodes, ExitCode}; use crate::exit_codes::{merge_exitcodes, ExitCode};
use crate::walk::WorkerResult; use crate::walk::WorkerResult;
@ -11,47 +14,43 @@ use super::CommandSet;
/// generate a command with the supplied command template. The generated command will then /// generate a command with the supplied command template. The generated command will then
/// be executed, and this process will continue until the receiver's sender has closed. /// be executed, and this process will continue until the receiver's sender has closed.
pub fn job( pub fn job(
results: impl IntoIterator<Item = WorkerResult>, rx: Receiver<WorkerResult>,
cmd: &CommandSet, cmd: Arc<CommandSet>,
out_perm: &Mutex<()>, out_perm: Arc<Mutex<()>>,
config: &Config, config: &Config,
) -> ExitCode { ) -> ExitCode {
// Output should be buffered when only running a single thread // Output should be buffered when only running a single thread
let buffer_output: bool = config.threads > 1; let buffer_output: bool = config.threads > 1;
let mut ret = ExitCode::Success; let mut results: Vec<ExitCode> = Vec::new();
for result in results { loop {
// Obtain the next result from the receiver, else if the channel // Obtain the next result from the receiver, else if the channel
// has closed, exit from the loop // has closed, exit from the loop
let dir_entry = match result { let dir_entry: DirEntry = match rx.recv() {
WorkerResult::Entry(dir_entry) => dir_entry, Ok(WorkerResult::Entry(dir_entry)) => dir_entry,
WorkerResult::Error(err) => { Ok(WorkerResult::Error(err)) => {
if config.show_filesystem_errors { if config.show_filesystem_errors {
print_error(err.to_string()); print_error(err.to_string());
} }
continue; continue;
} }
Err(_) => break,
}; };
// Generate a command, execute it and store its exit code. // Generate a command, execute it and store its exit code.
let code = cmd.execute( results.push(cmd.execute(
dir_entry.stripped_path(config), dir_entry.stripped_path(config),
config.path_separator.as_deref(), config.path_separator.as_deref(),
out_perm, Arc::clone(&out_perm),
buffer_output, buffer_output,
); ))
ret = merge_exitcodes([ret, code]);
} }
// Returns error in case of any error. // Returns error in case of any error.
ret merge_exitcodes(results)
} }
pub fn batch( pub fn batch(rx: Receiver<WorkerResult>, cmd: &CommandSet, config: &Config) -> ExitCode {
results: impl IntoIterator<Item = WorkerResult>, let paths = rx
cmd: &CommandSet,
config: &Config,
) -> ExitCode {
let paths = results
.into_iter() .into_iter()
.filter_map(|worker_result| match worker_result { .filter_map(|worker_result| match worker_result {
WorkerResult::Entry(dir_entry) => Some(dir_entry.into_stripped_path(config)), WorkerResult::Entry(dir_entry) => Some(dir_entry.into_stripped_path(config)),

View File

@ -1,21 +1,27 @@
mod command; mod command;
mod input;
mod job; mod job;
mod token;
use std::ffi::OsString; use std::borrow::Cow;
use std::ffi::{OsStr, OsString};
use std::io; use std::io;
use std::iter; use std::iter;
use std::path::{Path, PathBuf}; use std::path::{Component, Path, PathBuf, Prefix};
use std::process::Stdio; use std::process::Stdio;
use std::sync::Mutex; use std::sync::{Arc, Mutex};
use anyhow::{bail, Result}; use anyhow::{bail, Result};
use argmax::Command; use argmax::Command;
use once_cell::sync::Lazy;
use regex::Regex;
use crate::exit_codes::{merge_exitcodes, ExitCode}; use crate::exit_codes::{merge_exitcodes, ExitCode};
use crate::fmt::{FormatTemplate, Token};
use self::command::{execute_commands, handle_cmd_error}; use self::command::{execute_commands, handle_cmd_error};
use self::input::{basename, dirname, remove_extension};
pub use self::job::{batch, job}; pub use self::job::{batch, job};
use self::token::Token;
/// Execution mode of the command /// Execution mode of the command
#[derive(Debug, Clone, Copy, PartialEq, Eq)] #[derive(Debug, Clone, Copy, PartialEq, Eq)]
@ -33,10 +39,9 @@ pub struct CommandSet {
} }
impl CommandSet { impl CommandSet {
pub fn new<I, T, S>(input: I) -> Result<CommandSet> pub fn new<I, S>(input: I) -> Result<CommandSet>
where where
I: IntoIterator<Item = T>, I: IntoIterator<Item = Vec<S>>,
T: IntoIterator<Item = S>,
S: AsRef<str>, S: AsRef<str>,
{ {
Ok(CommandSet { Ok(CommandSet {
@ -48,10 +53,9 @@ impl CommandSet {
}) })
} }
pub fn new_batch<I, T, S>(input: I) -> Result<CommandSet> pub fn new_batch<I, S>(input: I) -> Result<CommandSet>
where where
I: IntoIterator<Item = T>, I: IntoIterator<Item = Vec<S>>,
T: IntoIterator<Item = S>,
S: AsRef<str>, S: AsRef<str>,
{ {
Ok(CommandSet { Ok(CommandSet {
@ -80,14 +84,14 @@ impl CommandSet {
&self, &self,
input: &Path, input: &Path,
path_separator: Option<&str>, path_separator: Option<&str>,
out_perm: &Mutex<()>, out_perm: Arc<Mutex<()>>,
buffer_output: bool, buffer_output: bool,
) -> ExitCode { ) -> ExitCode {
let commands = self let commands = self
.commands .commands
.iter() .iter()
.map(|c| c.generate(input, path_separator)); .map(|c| c.generate(input, path_separator));
execute_commands(commands, out_perm, buffer_output) execute_commands(commands, &out_perm, buffer_output)
} }
pub fn execute_batch<I>(&self, paths: I, limit: usize, path_separator: Option<&str>) -> ExitCode pub fn execute_batch<I>(&self, paths: I, limit: usize, path_separator: Option<&str>) -> ExitCode
@ -127,7 +131,7 @@ impl CommandSet {
#[derive(Debug)] #[derive(Debug)]
struct CommandBuilder { struct CommandBuilder {
pre_args: Vec<OsString>, pre_args: Vec<OsString>,
path_arg: FormatTemplate, path_arg: ArgumentTemplate,
post_args: Vec<OsString>, post_args: Vec<OsString>,
cmd: Command, cmd: Command,
count: usize, count: usize,
@ -216,7 +220,7 @@ impl CommandBuilder {
/// `generate_and_execute()` method will be used to generate a command and execute it. /// `generate_and_execute()` method will be used to generate a command and execute it.
#[derive(Debug, Clone, PartialEq)] #[derive(Debug, Clone, PartialEq)]
struct CommandTemplate { struct CommandTemplate {
args: Vec<FormatTemplate>, args: Vec<ArgumentTemplate>,
} }
impl CommandTemplate { impl CommandTemplate {
@ -225,15 +229,50 @@ impl CommandTemplate {
I: IntoIterator<Item = S>, I: IntoIterator<Item = S>,
S: AsRef<str>, S: AsRef<str>,
{ {
static PLACEHOLDER_PATTERN: Lazy<Regex> =
Lazy::new(|| Regex::new(r"\{(/?\.?|//)\}").unwrap());
let mut args = Vec::new(); let mut args = Vec::new();
let mut has_placeholder = false; let mut has_placeholder = false;
for arg in input { for arg in input {
let arg = arg.as_ref(); let arg = arg.as_ref();
let tmpl = FormatTemplate::parse(arg); let mut tokens = Vec::new();
has_placeholder |= tmpl.has_tokens(); let mut start = 0;
args.push(tmpl);
for placeholder in PLACEHOLDER_PATTERN.find_iter(arg) {
// Leading text before the placeholder.
if placeholder.start() > start {
tokens.push(Token::Text(arg[start..placeholder.start()].to_owned()));
}
start = placeholder.end();
match placeholder.as_str() {
"{}" => tokens.push(Token::Placeholder),
"{.}" => tokens.push(Token::NoExt),
"{/}" => tokens.push(Token::Basename),
"{//}" => tokens.push(Token::Parent),
"{/.}" => tokens.push(Token::BasenameNoExt),
_ => unreachable!("Unhandled placeholder"),
}
has_placeholder = true;
}
// Without a placeholder, the argument is just fixed text.
if tokens.is_empty() {
args.push(ArgumentTemplate::Text(arg.to_owned()));
continue;
}
if start < arg.len() {
// Trailing text after last placeholder.
tokens.push(Token::Text(arg[start..].to_owned()));
}
args.push(ArgumentTemplate::Tokens(tokens));
} }
// We need to check that we have at least one argument, because if not // We need to check that we have at least one argument, because if not
@ -247,7 +286,7 @@ impl CommandTemplate {
// If a placeholder token was not supplied, append one at the end of the command. // If a placeholder token was not supplied, append one at the end of the command.
if !has_placeholder { if !has_placeholder {
args.push(FormatTemplate::Tokens(vec![Token::Placeholder])); args.push(ArgumentTemplate::Tokens(vec![Token::Placeholder]));
} }
Ok(CommandTemplate { args }) Ok(CommandTemplate { args })
@ -270,18 +309,115 @@ impl CommandTemplate {
} }
} }
/// Represents a template for a single command argument.
///
/// The argument is either a collection of `Token`s including at least one placeholder variant, or
/// a fixed text.
#[derive(Clone, Debug, PartialEq)]
enum ArgumentTemplate {
Tokens(Vec<Token>),
Text(String),
}
impl ArgumentTemplate {
pub fn has_tokens(&self) -> bool {
matches!(self, ArgumentTemplate::Tokens(_))
}
/// Generate an argument from this template. If path_separator is Some, then it will replace
/// the path separator in all placeholder tokens. Text arguments and tokens are not affected by
/// path separator substitution.
pub fn generate(&self, path: impl AsRef<Path>, path_separator: Option<&str>) -> OsString {
use self::Token::*;
let path = path.as_ref();
match *self {
ArgumentTemplate::Tokens(ref tokens) => {
let mut s = OsString::new();
for token in tokens {
match *token {
Basename => s.push(Self::replace_separator(basename(path), path_separator)),
BasenameNoExt => s.push(Self::replace_separator(
&remove_extension(basename(path).as_ref()),
path_separator,
)),
NoExt => s.push(Self::replace_separator(
&remove_extension(path),
path_separator,
)),
Parent => s.push(Self::replace_separator(&dirname(path), path_separator)),
Placeholder => {
s.push(Self::replace_separator(path.as_ref(), path_separator))
}
Text(ref string) => s.push(string),
}
}
s
}
ArgumentTemplate::Text(ref text) => OsString::from(text),
}
}
/// Replace the path separator in the input with the custom separator string. If path_separator
/// is None, simply return a borrowed Cow<OsStr> of the input. Otherwise, the input is
/// interpreted as a Path and its components are iterated through and re-joined into a new
/// OsString.
fn replace_separator<'a>(path: &'a OsStr, path_separator: Option<&str>) -> Cow<'a, OsStr> {
// fast-path - no replacement necessary
if path_separator.is_none() {
return Cow::Borrowed(path);
}
let path_separator = path_separator.unwrap();
let mut out = OsString::with_capacity(path.len());
let mut components = Path::new(path).components().peekable();
while let Some(comp) = components.next() {
match comp {
// Absolute paths on Windows are tricky. A Prefix component is usually a drive
// letter or UNC path, and is usually followed by RootDir. There are also
// "verbatim" prefixes beginning with "\\?\" that skip normalization. We choose to
// ignore verbatim path prefixes here because they're very rare, might be
// impossible to reach here, and there's no good way to deal with them. If users
// are doing something advanced involving verbatim windows paths, they can do their
// own output filtering with a tool like sed.
Component::Prefix(prefix) => {
if let Prefix::UNC(server, share) = prefix.kind() {
// Prefix::UNC is a parsed version of '\\server\share'
out.push(path_separator);
out.push(path_separator);
out.push(server);
out.push(path_separator);
out.push(share);
} else {
// All other Windows prefix types are rendered as-is. This results in e.g. "C:" for
// drive letters. DeviceNS and Verbatim* prefixes won't have backslashes converted,
// but they're not returned by directories fd can search anyway so we don't worry
// about them.
out.push(comp.as_os_str());
}
}
// Root directory is always replaced with the custom separator.
Component::RootDir => out.push(path_separator),
// Everything else is joined normally, with a trailing separator if we're not last
_ => {
out.push(comp.as_os_str());
if components.peek().is_some() {
out.push(path_separator);
}
}
}
}
Cow::Owned(out)
}
}
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use super::*; use super::*;
fn generate_str(template: &CommandTemplate, input: &str) -> Vec<String> {
template
.args
.iter()
.map(|arg| arg.generate(input, None).into_string().unwrap())
.collect()
}
#[test] #[test]
fn tokens_with_placeholder() { fn tokens_with_placeholder() {
assert_eq!( assert_eq!(
@ -289,9 +425,9 @@ mod tests {
CommandSet { CommandSet {
commands: vec![CommandTemplate { commands: vec![CommandTemplate {
args: vec![ args: vec![
FormatTemplate::Text("echo".into()), ArgumentTemplate::Text("echo".into()),
FormatTemplate::Text("${SHELL}:".into()), ArgumentTemplate::Text("${SHELL}:".into()),
FormatTemplate::Tokens(vec![Token::Placeholder]), ArgumentTemplate::Tokens(vec![Token::Placeholder]),
] ]
}], }],
mode: ExecutionMode::OneByOne, mode: ExecutionMode::OneByOne,
@ -306,8 +442,8 @@ mod tests {
CommandSet { CommandSet {
commands: vec![CommandTemplate { commands: vec![CommandTemplate {
args: vec![ args: vec![
FormatTemplate::Text("echo".into()), ArgumentTemplate::Text("echo".into()),
FormatTemplate::Tokens(vec![Token::NoExt]), ArgumentTemplate::Tokens(vec![Token::NoExt]),
], ],
}], }],
mode: ExecutionMode::OneByOne, mode: ExecutionMode::OneByOne,
@ -322,8 +458,8 @@ mod tests {
CommandSet { CommandSet {
commands: vec![CommandTemplate { commands: vec![CommandTemplate {
args: vec![ args: vec![
FormatTemplate::Text("echo".into()), ArgumentTemplate::Text("echo".into()),
FormatTemplate::Tokens(vec![Token::Basename]), ArgumentTemplate::Tokens(vec![Token::Basename]),
], ],
}], }],
mode: ExecutionMode::OneByOne, mode: ExecutionMode::OneByOne,
@ -338,8 +474,8 @@ mod tests {
CommandSet { CommandSet {
commands: vec![CommandTemplate { commands: vec![CommandTemplate {
args: vec![ args: vec![
FormatTemplate::Text("echo".into()), ArgumentTemplate::Text("echo".into()),
FormatTemplate::Tokens(vec![Token::Parent]), ArgumentTemplate::Tokens(vec![Token::Parent]),
], ],
}], }],
mode: ExecutionMode::OneByOne, mode: ExecutionMode::OneByOne,
@ -354,8 +490,8 @@ mod tests {
CommandSet { CommandSet {
commands: vec![CommandTemplate { commands: vec![CommandTemplate {
args: vec![ args: vec![
FormatTemplate::Text("echo".into()), ArgumentTemplate::Text("echo".into()),
FormatTemplate::Tokens(vec![Token::BasenameNoExt]), ArgumentTemplate::Tokens(vec![Token::BasenameNoExt]),
], ],
}], }],
mode: ExecutionMode::OneByOne, mode: ExecutionMode::OneByOne,
@ -363,21 +499,6 @@ mod tests {
); );
} }
#[test]
fn tokens_with_literal_braces() {
let template = CommandTemplate::new(vec!["{{}}", "{{", "{.}}"]).unwrap();
assert_eq!(
generate_str(&template, "foo"),
vec!["{}", "{", "{.}", "foo"]
);
}
#[test]
fn tokens_with_literal_braces_and_placeholder() {
let template = CommandTemplate::new(vec!["{{{},end}"]).unwrap();
assert_eq!(generate_str(&template, "foo"), vec!["{foo,end}"]);
}
#[test] #[test]
fn tokens_multiple() { fn tokens_multiple() {
assert_eq!( assert_eq!(
@ -385,9 +506,9 @@ mod tests {
CommandSet { CommandSet {
commands: vec![CommandTemplate { commands: vec![CommandTemplate {
args: vec![ args: vec![
FormatTemplate::Text("cp".into()), ArgumentTemplate::Text("cp".into()),
FormatTemplate::Tokens(vec![Token::Placeholder]), ArgumentTemplate::Tokens(vec![Token::Placeholder]),
FormatTemplate::Tokens(vec![ ArgumentTemplate::Tokens(vec![
Token::BasenameNoExt, Token::BasenameNoExt,
Token::Text(".ext".into()) Token::Text(".ext".into())
]), ]),
@ -405,8 +526,8 @@ mod tests {
CommandSet { CommandSet {
commands: vec![CommandTemplate { commands: vec![CommandTemplate {
args: vec![ args: vec![
FormatTemplate::Text("echo".into()), ArgumentTemplate::Text("echo".into()),
FormatTemplate::Tokens(vec![Token::NoExt]), ArgumentTemplate::Tokens(vec![Token::NoExt]),
], ],
}], }],
mode: ExecutionMode::Batch, mode: ExecutionMode::Batch,
@ -431,7 +552,7 @@ mod tests {
#[test] #[test]
fn generate_custom_path_separator() { fn generate_custom_path_separator() {
let arg = FormatTemplate::Tokens(vec![Token::Placeholder]); let arg = ArgumentTemplate::Tokens(vec![Token::Placeholder]);
macro_rules! check { macro_rules! check {
($input:expr, $expected:expr) => { ($input:expr, $expected:expr) => {
assert_eq!(arg.generate($input, Some("#")), OsString::from($expected)); assert_eq!(arg.generate($input, Some("#")), OsString::from($expected));
@ -446,7 +567,7 @@ mod tests {
#[cfg(windows)] #[cfg(windows)]
#[test] #[test]
fn generate_custom_path_separator_windows() { fn generate_custom_path_separator_windows() {
let arg = FormatTemplate::Tokens(vec![Token::Placeholder]); let arg = ArgumentTemplate::Tokens(vec![Token::Placeholder]);
macro_rules! check { macro_rules! check {
($input:expr, $expected:expr) => { ($input:expr, $expected:expr) => {
assert_eq!(arg.generate($input, Some("#")), OsString::from($expected)); assert_eq!(arg.generate($input, Some("#")), OsString::from($expected));

29
src/exec/token.rs Normal file
View File

@ -0,0 +1,29 @@
use std::fmt::{self, Display, Formatter};
/// Designates what should be written to a buffer
///
/// Each `Token` contains either text, or a placeholder variant, which will be used to generate
/// commands after all tokens for a given command template have been collected.
#[derive(Clone, Debug, PartialEq, Eq)]
pub enum Token {
Placeholder,
Basename,
Parent,
NoExt,
BasenameNoExt,
Text(String),
}
impl Display for Token {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
match *self {
Token::Placeholder => f.write_str("{}")?,
Token::Basename => f.write_str("{/}")?,
Token::Parent => f.write_str("{//}")?,
Token::NoExt => f.write_str("{.}")?,
Token::BasenameNoExt => f.write_str("{/.}")?,
Token::Text(ref string) => f.write_str(string)?,
}
Ok(())
}
}

View File

@ -59,26 +59,6 @@ pub fn is_empty(entry: &dir_entry::DirEntry) -> bool {
} }
} }
#[cfg(any(unix, target_os = "redox"))]
pub fn is_block_device(ft: fs::FileType) -> bool {
ft.is_block_device()
}
#[cfg(windows)]
pub fn is_block_device(_: fs::FileType) -> bool {
false
}
#[cfg(any(unix, target_os = "redox"))]
pub fn is_char_device(ft: fs::FileType) -> bool {
ft.is_char_device()
}
#[cfg(windows)]
pub fn is_char_device(_: fs::FileType) -> bool {
false
}
#[cfg(any(unix, target_os = "redox"))] #[cfg(any(unix, target_os = "redox"))]
pub fn is_socket(ft: fs::FileType) -> bool { pub fn is_socket(ft: fs::FileType) -> bool {
ft.is_socket() ft.is_socket()
@ -128,11 +108,13 @@ pub fn strip_current_dir(path: &Path) -> &Path {
pub fn default_path_separator() -> Option<String> { pub fn default_path_separator() -> Option<String> {
if cfg!(windows) { if cfg!(windows) {
let msystem = env::var("MSYSTEM").ok()?; let msystem = env::var("MSYSTEM").ok()?;
if !msystem.is_empty() { match msystem.as_str() {
return Some("/".to_owned()); "MINGW64" | "MINGW32" | "MSYS" => Some("/".to_owned()),
_ => None,
} }
} else {
None
} }
None
} }
#[cfg(test)] #[cfg(test)]

View File

@ -9,8 +9,6 @@ pub struct FileTypes {
pub files: bool, pub files: bool,
pub directories: bool, pub directories: bool,
pub symlinks: bool, pub symlinks: bool,
pub block_devices: bool,
pub char_devices: bool,
pub sockets: bool, pub sockets: bool,
pub pipes: bool, pub pipes: bool,
pub executables_only: bool, pub executables_only: bool,
@ -23,8 +21,6 @@ impl FileTypes {
(!self.files && entry_type.is_file()) (!self.files && entry_type.is_file())
|| (!self.directories && entry_type.is_dir()) || (!self.directories && entry_type.is_dir())
|| (!self.symlinks && entry_type.is_symlink()) || (!self.symlinks && entry_type.is_symlink())
|| (!self.block_devices && filesystem::is_block_device(*entry_type))
|| (!self.char_devices && filesystem::is_char_device(*entry_type))
|| (!self.sockets && filesystem::is_socket(*entry_type)) || (!self.sockets && filesystem::is_socket(*entry_type))
|| (!self.pipes && filesystem::is_pipe(*entry_type)) || (!self.pipes && filesystem::is_pipe(*entry_type))
|| (self.executables_only && !entry.path().executable()) || (self.executables_only && !entry.path().executable())
@ -32,8 +28,6 @@ impl FileTypes {
|| !(entry_type.is_file() || !(entry_type.is_file()
|| entry_type.is_dir() || entry_type.is_dir()
|| entry_type.is_symlink() || entry_type.is_symlink()
|| filesystem::is_block_device(*entry_type)
|| filesystem::is_char_device(*entry_type)
|| filesystem::is_socket(*entry_type) || filesystem::is_socket(*entry_type)
|| filesystem::is_pipe(*entry_type)) || filesystem::is_pipe(*entry_type))
} else { } else {

View File

@ -1,5 +1,4 @@
use anyhow::{anyhow, Result}; use anyhow::{anyhow, Result};
use nix::unistd::{Group, User};
use std::fs; use std::fs;
#[derive(Clone, Copy, Debug, PartialEq, Eq)] #[derive(Clone, Copy, Debug, PartialEq, Eq)]
@ -36,22 +35,16 @@ impl OwnerFilter {
} }
let uid = Check::parse(fst, |s| { let uid = Check::parse(fst, |s| {
if let Ok(uid) = s.parse() { s.parse()
Ok(uid) .ok()
} else { .or_else(|| users::get_user_by_name(s).map(|user| user.uid()))
User::from_name(s)? .ok_or_else(|| anyhow!("'{}' is not a recognized user name", s))
.map(|user| user.uid.as_raw())
.ok_or_else(|| anyhow!("'{}' is not a recognized user name", s))
}
})?; })?;
let gid = Check::parse(snd, |s| { let gid = Check::parse(snd, |s| {
if let Ok(gid) = s.parse() { s.parse()
Ok(gid) .ok()
} else { .or_else(|| users::get_group_by_name(s).map(|group| group.gid()))
Group::from_name(s)? .ok_or_else(|| anyhow!("'{}' is not a recognized group name", s))
.map(|group| group.gid.as_raw())
.ok_or_else(|| anyhow!("'{}' is not a recognized group name", s))
}
})?; })?;
Ok(OwnerFilter { uid, gid }) Ok(OwnerFilter { uid, gid })

View File

@ -1,9 +1,9 @@
use std::sync::OnceLock;
use anyhow::anyhow; use anyhow::anyhow;
use once_cell::sync::Lazy;
use regex::Regex; use regex::Regex;
static SIZE_CAPTURES: OnceLock<Regex> = OnceLock::new(); static SIZE_CAPTURES: Lazy<Regex> =
Lazy::new(|| Regex::new(r"(?i)^([+-]?)(\d+)(b|[kmgt]i?b?)$").unwrap());
#[derive(Clone, Copy, Debug, PartialEq, Eq)] #[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum SizeFilter { pub enum SizeFilter {
@ -31,13 +31,11 @@ impl SizeFilter {
} }
fn parse_opt(s: &str) -> Option<Self> { fn parse_opt(s: &str) -> Option<Self> {
let pattern = if !SIZE_CAPTURES.is_match(s) {
SIZE_CAPTURES.get_or_init(|| Regex::new(r"(?i)^([+-]?)(\d+)(b|[kmgt]i?b?)$").unwrap());
if !pattern.is_match(s) {
return None; return None;
} }
let captures = pattern.captures(s)?; let captures = SIZE_CAPTURES.captures(s)?;
let limit_kind = captures.get(1).map_or("+", |m| m.as_str()); let limit_kind = captures.get(1).map_or("+", |m| m.as_str());
let quantity = captures let quantity = captures
.get(2) .get(2)

View File

@ -1,4 +1,4 @@
use chrono::{DateTime, Local, NaiveDate, NaiveDateTime}; use chrono::{offset::TimeZone, DateTime, Local, NaiveDate};
use std::time::SystemTime; use std::time::SystemTime;
@ -20,21 +20,11 @@ impl TimeFilter {
.ok() .ok()
.or_else(|| { .or_else(|| {
NaiveDate::parse_from_str(s, "%F") NaiveDate::parse_from_str(s, "%F")
.ok()? .ok()
.and_hms_opt(0, 0, 0)? .and_then(|nd| nd.and_hms_opt(0, 0, 0))
.and_local_timezone(Local) .and_then(|ndt| Local.from_local_datetime(&ndt).single())
.latest()
})
.or_else(|| {
NaiveDateTime::parse_from_str(s, "%F %T")
.ok()?
.and_local_timezone(Local)
.latest()
})
.or_else(|| {
let timestamp_secs = s.strip_prefix('@')?.parse().ok()?;
DateTime::from_timestamp(timestamp_secs, 0).map(Into::into)
}) })
.or_else(|| Local.datetime_from_str(s, "%F %T").ok())
.map(|dt| dt.into()) .map(|dt| dt.into())
}) })
} }
@ -62,10 +52,8 @@ mod tests {
#[test] #[test]
fn is_time_filter_applicable() { fn is_time_filter_applicable() {
let ref_time = NaiveDateTime::parse_from_str("2010-10-10 10:10:10", "%F %T") let ref_time = Local
.unwrap() .datetime_from_str("2010-10-10 10:10:10", "%F %T")
.and_local_timezone(Local)
.latest()
.unwrap() .unwrap()
.into(); .into();
@ -139,32 +127,5 @@ mod tests {
assert!(!TimeFilter::after(&ref_time, t10s_before) assert!(!TimeFilter::after(&ref_time, t10s_before)
.unwrap() .unwrap()
.applies_to(&t1m_ago)); .applies_to(&t1m_ago));
let ref_timestamp = 1707723412u64; // Mon Feb 12 07:36:52 UTC 2024
let ref_time = DateTime::parse_from_rfc3339("2024-02-12T07:36:52+00:00")
.unwrap()
.into();
let t1m_ago = ref_time - Duration::from_secs(60);
let t1s_later = ref_time + Duration::from_secs(1);
// Timestamp only supported via '@' prefix
assert!(TimeFilter::before(&ref_time, &ref_timestamp.to_string()).is_none());
assert!(
TimeFilter::before(&ref_time, &format!("@{}", ref_timestamp))
.unwrap()
.applies_to(&t1m_ago)
);
assert!(
!TimeFilter::before(&ref_time, &format!("@{}", ref_timestamp))
.unwrap()
.applies_to(&t1s_later)
);
assert!(
!TimeFilter::after(&ref_time, &format!("@{}", ref_timestamp))
.unwrap()
.applies_to(&t1m_ago)
);
assert!(TimeFilter::after(&ref_time, &format!("@{}", ref_timestamp))
.unwrap()
.applies_to(&t1s_later));
} }
} }

View File

@ -1,281 +0,0 @@
mod input;
use std::borrow::Cow;
use std::ffi::{OsStr, OsString};
use std::fmt::{self, Display, Formatter};
use std::path::{Component, Path, Prefix};
use std::sync::OnceLock;
use aho_corasick::AhoCorasick;
use self::input::{basename, dirname, remove_extension};
/// Designates what should be written to a buffer
///
/// Each `Token` contains either text, or a placeholder variant, which will be used to generate
/// commands after all tokens for a given command template have been collected.
#[derive(Clone, Debug, PartialEq, Eq)]
pub enum Token {
Placeholder,
Basename,
Parent,
NoExt,
BasenameNoExt,
Text(String),
}
impl Display for Token {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
match *self {
Token::Placeholder => f.write_str("{}")?,
Token::Basename => f.write_str("{/}")?,
Token::Parent => f.write_str("{//}")?,
Token::NoExt => f.write_str("{.}")?,
Token::BasenameNoExt => f.write_str("{/.}")?,
Token::Text(ref string) => f.write_str(string)?,
}
Ok(())
}
}
/// A parsed format string
///
/// This is either a collection of `Token`s including at least one placeholder variant,
/// or a fixed text.
#[derive(Clone, Debug, PartialEq)]
pub enum FormatTemplate {
Tokens(Vec<Token>),
Text(String),
}
static PLACEHOLDERS: OnceLock<AhoCorasick> = OnceLock::new();
impl FormatTemplate {
pub fn has_tokens(&self) -> bool {
matches!(self, FormatTemplate::Tokens(_))
}
pub fn parse(fmt: &str) -> Self {
// NOTE: we assume that { and } have the same length
const BRACE_LEN: usize = '{'.len_utf8();
let mut tokens = Vec::new();
let mut remaining = fmt;
let mut buf = String::new();
let placeholders = PLACEHOLDERS.get_or_init(|| {
AhoCorasick::new(["{{", "}}", "{}", "{/}", "{//}", "{.}", "{/.}"]).unwrap()
});
while let Some(m) = placeholders.find(remaining) {
match m.pattern().as_u32() {
0 | 1 => {
// we found an escaped {{ or }}, so add
// everything up to the first char to the buffer
// then skip the second one.
buf += &remaining[..m.start() + BRACE_LEN];
remaining = &remaining[m.end()..];
}
id if !remaining[m.end()..].starts_with('}') => {
buf += &remaining[..m.start()];
if !buf.is_empty() {
tokens.push(Token::Text(std::mem::take(&mut buf)));
}
tokens.push(token_from_pattern_id(id));
remaining = &remaining[m.end()..];
}
_ => {
// We got a normal pattern, but the final "}"
// is escaped, so add up to that to the buffer, then
// skip the final }
buf += &remaining[..m.end()];
remaining = &remaining[m.end() + BRACE_LEN..];
}
}
}
// Add the rest of the string to the buffer, and add the final buffer to the tokens
if !remaining.is_empty() {
buf += remaining;
}
if tokens.is_empty() {
// No placeholders were found, so just return the text
return FormatTemplate::Text(buf);
}
// Add final text segment
if !buf.is_empty() {
tokens.push(Token::Text(buf));
}
debug_assert!(!tokens.is_empty());
FormatTemplate::Tokens(tokens)
}
/// Generate a result string from this template. If path_separator is Some, then it will replace
/// the path separator in all placeholder tokens. Fixed text and tokens are not affected by
/// path separator substitution.
pub fn generate(&self, path: impl AsRef<Path>, path_separator: Option<&str>) -> OsString {
use Token::*;
let path = path.as_ref();
match *self {
Self::Tokens(ref tokens) => {
let mut s = OsString::new();
for token in tokens {
match token {
Basename => s.push(Self::replace_separator(basename(path), path_separator)),
BasenameNoExt => s.push(Self::replace_separator(
&remove_extension(basename(path).as_ref()),
path_separator,
)),
NoExt => s.push(Self::replace_separator(
&remove_extension(path),
path_separator,
)),
Parent => s.push(Self::replace_separator(&dirname(path), path_separator)),
Placeholder => {
s.push(Self::replace_separator(path.as_ref(), path_separator))
}
Text(ref string) => s.push(string),
}
}
s
}
Self::Text(ref text) => OsString::from(text),
}
}
/// Replace the path separator in the input with the custom separator string. If path_separator
/// is None, simply return a borrowed Cow<OsStr> of the input. Otherwise, the input is
/// interpreted as a Path and its components are iterated through and re-joined into a new
/// OsString.
fn replace_separator<'a>(path: &'a OsStr, path_separator: Option<&str>) -> Cow<'a, OsStr> {
// fast-path - no replacement necessary
if path_separator.is_none() {
return Cow::Borrowed(path);
}
let path_separator = path_separator.unwrap();
let mut out = OsString::with_capacity(path.len());
let mut components = Path::new(path).components().peekable();
while let Some(comp) = components.next() {
match comp {
// Absolute paths on Windows are tricky. A Prefix component is usually a drive
// letter or UNC path, and is usually followed by RootDir. There are also
// "verbatim" prefixes beginning with "\\?\" that skip normalization. We choose to
// ignore verbatim path prefixes here because they're very rare, might be
// impossible to reach here, and there's no good way to deal with them. If users
// are doing something advanced involving verbatim windows paths, they can do their
// own output filtering with a tool like sed.
Component::Prefix(prefix) => {
if let Prefix::UNC(server, share) = prefix.kind() {
// Prefix::UNC is a parsed version of '\\server\share'
out.push(path_separator);
out.push(path_separator);
out.push(server);
out.push(path_separator);
out.push(share);
} else {
// All other Windows prefix types are rendered as-is. This results in e.g. "C:" for
// drive letters. DeviceNS and Verbatim* prefixes won't have backslashes converted,
// but they're not returned by directories fd can search anyway so we don't worry
// about them.
out.push(comp.as_os_str());
}
}
// Root directory is always replaced with the custom separator.
Component::RootDir => out.push(path_separator),
// Everything else is joined normally, with a trailing separator if we're not last
_ => {
out.push(comp.as_os_str());
if components.peek().is_some() {
out.push(path_separator);
}
}
}
}
Cow::Owned(out)
}
}
// Convert the id from an aho-corasick match to the
// appropriate token
fn token_from_pattern_id(id: u32) -> Token {
use Token::*;
match id {
2 => Placeholder,
3 => Basename,
4 => Parent,
5 => NoExt,
6 => BasenameNoExt,
_ => unreachable!(),
}
}
#[cfg(test)]
mod fmt_tests {
use super::*;
use std::path::PathBuf;
#[test]
fn parse_no_placeholders() {
let templ = FormatTemplate::parse("This string has no placeholders");
assert_eq!(
templ,
FormatTemplate::Text("This string has no placeholders".into())
);
}
#[test]
fn parse_only_brace_escapes() {
let templ = FormatTemplate::parse("This string only has escapes like {{ and }}");
assert_eq!(
templ,
FormatTemplate::Text("This string only has escapes like { and }".into())
);
}
#[test]
fn all_placeholders() {
use Token::*;
let templ = FormatTemplate::parse(
"{{path={} \
basename={/} \
parent={//} \
noExt={.} \
basenameNoExt={/.} \
}}",
);
assert_eq!(
templ,
FormatTemplate::Tokens(vec![
Text("{path=".into()),
Placeholder,
Text(" basename=".into()),
Basename,
Text(" parent=".into()),
Parent,
Text(" noExt=".into()),
NoExt,
Text(" basenameNoExt=".into()),
BasenameNoExt,
Text(" }".into()),
])
);
let mut path = PathBuf::new();
path.push("a");
path.push("folder");
path.push("file.txt");
let expanded = templ.generate(&path, Some("/")).into_string().unwrap();
assert_eq!(
expanded,
"{path=a/folder/file.txt \
basename=file.txt \
parent=a/folder \
noExt=a/folder/file \
basenameNoExt=file }"
);
}
}

View File

@ -7,18 +7,17 @@ mod exit_codes;
mod filesystem; mod filesystem;
mod filetypes; mod filetypes;
mod filter; mod filter;
mod fmt;
mod output; mod output;
mod regex_helper; mod regex_helper;
mod walk; mod walk;
use std::env; use std::env;
use std::io::IsTerminal;
use std::path::Path; use std::path::Path;
use std::sync::Arc; use std::sync::Arc;
use std::time; use std::time;
use anyhow::{anyhow, bail, Context, Result}; use anyhow::{anyhow, bail, Context, Result};
use atty::Stream;
use clap::{CommandFactory, Parser}; use clap::{CommandFactory, Parser};
use globset::GlobBuilder; use globset::GlobBuilder;
use lscolors::LsColors; use lscolors::LsColors;
@ -41,7 +40,6 @@ use crate::regex_helper::{pattern_has_uppercase_char, pattern_matches_strings_wi
not(target_os = "android"), not(target_os = "android"),
not(target_os = "macos"), not(target_os = "macos"),
not(target_os = "freebsd"), not(target_os = "freebsd"),
not(target_os = "openbsd"),
not(all(target_env = "musl", target_pointer_width = "32")), not(all(target_env = "musl", target_pointer_width = "32")),
not(target_arch = "riscv64"), not(target_arch = "riscv64"),
feature = "use-jemalloc" feature = "use-jemalloc"
@ -104,7 +102,7 @@ fn run() -> Result<ExitCode> {
.map(|pat| build_regex(pat, &config)) .map(|pat| build_regex(pat, &config))
.collect::<Result<Vec<Regex>>>()?; .collect::<Result<Vec<Regex>>>()?;
walk::scan(&search_paths, regexps, config) walk::scan(&search_paths, Arc::new(regexps), Arc::new(config))
} }
#[cfg(feature = "completions")] #[cfg(feature = "completions")]
@ -218,14 +216,12 @@ fn construct_config(mut opts: Opts, pattern_regexps: &[String]) -> Result<Config
#[cfg(not(windows))] #[cfg(not(windows))]
let ansi_colors_support = true; let ansi_colors_support = true;
let interactive_terminal = std::io::stdout().is_terminal(); let interactive_terminal = atty::is(Stream::Stdout);
let colored_output = match opts.color { let colored_output = match opts.color {
ColorWhen::Always => true, ColorWhen::Always => true,
ColorWhen::Never => false, ColorWhen::Never => false,
ColorWhen::Auto => { ColorWhen::Auto => {
let no_color = env::var_os("NO_COLOR").is_some_and(|x| !x.is_empty()); ansi_colors_support && env::var_os("NO_COLOR").is_none() && interactive_terminal
ansi_colors_support && !no_color && interactive_terminal
} }
}; };
@ -243,11 +239,8 @@ fn construct_config(mut opts: Opts, pattern_regexps: &[String]) -> Result<Config
ignore_hidden: !(opts.hidden || opts.rg_alias_ignore()), ignore_hidden: !(opts.hidden || opts.rg_alias_ignore()),
read_fdignore: !(opts.no_ignore || opts.rg_alias_ignore()), read_fdignore: !(opts.no_ignore || opts.rg_alias_ignore()),
read_vcsignore: !(opts.no_ignore || opts.rg_alias_ignore() || opts.no_ignore_vcs), read_vcsignore: !(opts.no_ignore || opts.rg_alias_ignore() || opts.no_ignore_vcs),
require_git_to_read_vcsignore: !opts.no_require_git,
read_parent_ignore: !opts.no_ignore_parent, read_parent_ignore: !opts.no_ignore_parent,
read_global_ignore: !(opts.no_ignore read_global_ignore: !opts.no_ignore || opts.rg_alias_ignore() || opts.no_global_ignore_file,
|| opts.rg_alias_ignore()
|| opts.no_global_ignore_file),
follow_links: opts.follow, follow_links: opts.follow,
one_file_system: opts.one_file_system, one_file_system: opts.one_file_system,
null_separator: opts.null_separator, null_separator: opts.null_separator,
@ -255,7 +248,7 @@ fn construct_config(mut opts: Opts, pattern_regexps: &[String]) -> Result<Config
max_depth: opts.max_depth(), max_depth: opts.max_depth(),
min_depth: opts.min_depth(), min_depth: opts.min_depth(),
prune: opts.prune, prune: opts.prune,
threads: opts.threads().get(), threads: opts.threads(),
max_buffer_time: opts.max_buffer_time, max_buffer_time: opts.max_buffer_time,
ls_colors, ls_colors,
interactive_terminal, interactive_terminal,
@ -272,8 +265,6 @@ fn construct_config(mut opts: Opts, pattern_regexps: &[String]) -> Result<Config
file_types.files = true; file_types.files = true;
} }
Empty => file_types.empty_only = true, Empty => file_types.empty_only = true,
BlockDevice => file_types.block_devices = true,
CharDevice => file_types.char_devices = true,
Socket => file_types.sockets = true, Socket => file_types.sockets = true,
Pipe => file_types.pipes = true, Pipe => file_types.pipes = true,
} }
@ -300,10 +291,6 @@ fn construct_config(mut opts: Opts, pattern_regexps: &[String]) -> Result<Config
.build() .build()
}) })
.transpose()?, .transpose()?,
format: opts
.format
.as_deref()
.map(crate::fmt::FormatTemplate::parse),
command: command.map(Arc::new), command: command.map(Arc::new),
batch_size: opts.batch_size, batch_size: opts.batch_size,
exclude_patterns: opts.exclude.iter().map(|p| String::from("!") + p).collect(), exclude_patterns: opts.exclude.iter().map(|p| String::from("!") + p).collect(),
@ -316,7 +303,8 @@ fn construct_config(mut opts: Opts, pattern_regexps: &[String]) -> Result<Config
path_separator, path_separator,
actual_path_separator, actual_path_separator,
max_results: opts.max_results(), max_results: opts.max_results(),
strip_cwd_prefix: opts.strip_cwd_prefix(|| !(opts.null_separator || has_command)), strip_cwd_prefix: (opts.no_search_paths()
&& (opts.strip_cwd_prefix || !(opts.null_separator || has_command))),
}) })
} }
@ -329,22 +317,18 @@ fn extract_command(opts: &mut Opts, colored_output: bool) -> Result<Option<Comma
if !opts.list_details { if !opts.list_details {
return None; return None;
} }
let color_arg = format!("--color={}", opts.color.as_str());
let res = determine_ls_command(colored_output) let res = determine_ls_command(&color_arg, colored_output)
.map(|cmd| CommandSet::new_batch([cmd]).unwrap()); .map(|cmd| CommandSet::new_batch([cmd]).unwrap());
Some(res) Some(res)
}) })
.transpose() .transpose()
} }
fn determine_ls_command(colored_output: bool) -> Result<Vec<&'static str>> { fn determine_ls_command(color_arg: &str, colored_output: bool) -> Result<Vec<&str>> {
#[allow(unused)] #[allow(unused)]
let gnu_ls = |command_name| { let gnu_ls = |command_name| {
let color_arg = if colored_output {
"--color=always"
} else {
"--color=never"
};
// Note: we use short options here (instead of --long-options) to support more // Note: we use short options here (instead of --long-options) to support more
// platforms (like BusyBox). // platforms (like BusyBox).
vec![ vec![

View File

@ -7,7 +7,6 @@ use crate::config::Config;
use crate::dir_entry::DirEntry; use crate::dir_entry::DirEntry;
use crate::error::print_error; use crate::error::print_error;
use crate::exit_codes::ExitCode; use crate::exit_codes::ExitCode;
use crate::fmt::FormatTemplate;
fn replace_path_separator(path: &str, new_path_separator: &str) -> String { fn replace_path_separator(path: &str, new_path_separator: &str) -> String {
path.replace(std::path::MAIN_SEPARATOR, new_path_separator) path.replace(std::path::MAIN_SEPARATOR, new_path_separator)
@ -15,10 +14,7 @@ fn replace_path_separator(path: &str, new_path_separator: &str) -> String {
// TODO: this function is performance critical and can probably be optimized // TODO: this function is performance critical and can probably be optimized
pub fn print_entry<W: Write>(stdout: &mut W, entry: &DirEntry, config: &Config) { pub fn print_entry<W: Write>(stdout: &mut W, entry: &DirEntry, config: &Config) {
// TODO: use format if supplied let r = if let Some(ref ls_colors) = config.ls_colors {
let r = if let Some(ref format) = config.format {
print_entry_format(stdout, entry, config, format)
} else if let Some(ref ls_colors) = config.ls_colors {
print_entry_colorized(stdout, entry, config, ls_colors) print_entry_colorized(stdout, entry, config, ls_colors)
} else { } else {
print_entry_uncolorized(stdout, entry, config) print_entry_uncolorized(stdout, entry, config)
@ -58,22 +54,6 @@ fn print_trailing_slash<W: Write>(
Ok(()) Ok(())
} }
// TODO: this function is performance critical and can probably be optimized
fn print_entry_format<W: Write>(
stdout: &mut W,
entry: &DirEntry,
config: &Config,
format: &FormatTemplate,
) -> io::Result<()> {
let separator = if config.null_separator { "\0" } else { "\n" };
let output = format.generate(
entry.stripped_path(config),
config.path_separator.as_deref(),
);
// TODO: support writing raw bytes on unix?
write!(stdout, "{}{}", output.to_string_lossy(), separator)
}
// TODO: this function is performance critical and can probably be optimized // TODO: this function is performance critical and can probably be optimized
fn print_entry_colorized<W: Write>( fn print_entry_colorized<W: Write>(
stdout: &mut W, stdout: &mut W,

View File

@ -3,7 +3,7 @@ use regex_syntax::ParserBuilder;
/// Determine if a regex pattern contains a literal uppercase character. /// Determine if a regex pattern contains a literal uppercase character.
pub fn pattern_has_uppercase_char(pattern: &str) -> bool { pub fn pattern_has_uppercase_char(pattern: &str) -> bool {
let mut parser = ParserBuilder::new().utf8(false).build(); let mut parser = ParserBuilder::new().allow_invalid_utf8(true).build();
parser parser
.parse(pattern) .parse(pattern)
@ -16,18 +16,16 @@ fn hir_has_uppercase_char(hir: &Hir) -> bool {
use regex_syntax::hir::*; use regex_syntax::hir::*;
match hir.kind() { match hir.kind() {
HirKind::Literal(Literal(bytes)) => match std::str::from_utf8(bytes) { HirKind::Literal(Literal::Unicode(c)) => c.is_uppercase(),
Ok(s) => s.chars().any(|c| c.is_uppercase()), HirKind::Literal(Literal::Byte(b)) => char::from(*b).is_uppercase(),
Err(_) => bytes.iter().any(|b| char::from(*b).is_uppercase()),
},
HirKind::Class(Class::Unicode(ranges)) => ranges HirKind::Class(Class::Unicode(ranges)) => ranges
.iter() .iter()
.any(|r| r.start().is_uppercase() || r.end().is_uppercase()), .any(|r| r.start().is_uppercase() || r.end().is_uppercase()),
HirKind::Class(Class::Bytes(ranges)) => ranges HirKind::Class(Class::Bytes(ranges)) => ranges
.iter() .iter()
.any(|r| char::from(r.start()).is_uppercase() || char::from(r.end()).is_uppercase()), .any(|r| char::from(r.start()).is_uppercase() || char::from(r.end()).is_uppercase()),
HirKind::Capture(Capture { sub, .. }) | HirKind::Repetition(Repetition { sub, .. }) => { HirKind::Group(Group { hir, .. }) | HirKind::Repetition(Repetition { hir, .. }) => {
hir_has_uppercase_char(sub) hir_has_uppercase_char(hir)
} }
HirKind::Concat(hirs) | HirKind::Alternation(hirs) => { HirKind::Concat(hirs) | HirKind::Alternation(hirs) => {
hirs.iter().any(hir_has_uppercase_char) hirs.iter().any(hir_has_uppercase_char)
@ -38,7 +36,7 @@ fn hir_has_uppercase_char(hir: &Hir) -> bool {
/// Determine if a regex pattern only matches strings starting with a literal dot (hidden files) /// Determine if a regex pattern only matches strings starting with a literal dot (hidden files)
pub fn pattern_matches_strings_with_leading_dot(pattern: &str) -> bool { pub fn pattern_matches_strings_with_leading_dot(pattern: &str) -> bool {
let mut parser = ParserBuilder::new().utf8(false).build(); let mut parser = ParserBuilder::new().allow_invalid_utf8(true).build();
parser parser
.parse(pattern) .parse(pattern)
@ -58,7 +56,7 @@ fn hir_matches_strings_with_leading_dot(hir: &Hir) -> bool {
HirKind::Concat(hirs) => { HirKind::Concat(hirs) => {
let mut hirs = hirs.iter(); let mut hirs = hirs.iter();
if let Some(hir) = hirs.next() { if let Some(hir) = hirs.next() {
if hir.kind() != &HirKind::Look(Look::Start) { if hir.kind() != &HirKind::Anchor(Anchor::StartText) {
return false; return false;
} }
} else { } else {
@ -66,10 +64,7 @@ fn hir_matches_strings_with_leading_dot(hir: &Hir) -> bool {
} }
if let Some(hir) = hirs.next() { if let Some(hir) = hirs.next() {
match hir.kind() { hir.kind() == &HirKind::Literal(Literal::Unicode('.'))
HirKind::Literal(Literal(bytes)) => bytes.starts_with(&[b'.']),
_ => false,
}
} else { } else {
false false
} }

View File

@ -1,18 +1,17 @@
use std::borrow::Cow;
use std::ffi::OsStr; use std::ffi::OsStr;
use std::io::{self, Write}; use std::io;
use std::mem; use std::mem;
use std::path::PathBuf; use std::path::PathBuf;
use std::sync::atomic::{AtomicBool, Ordering}; use std::sync::atomic::{AtomicBool, Ordering};
use std::sync::{Arc, Mutex, MutexGuard}; use std::sync::{Arc, Mutex};
use std::thread; use std::thread;
use std::time::{Duration, Instant}; use std::time::{Duration, Instant};
use std::{borrow::Cow, io::Write};
use anyhow::{anyhow, Result}; use anyhow::{anyhow, Result};
use crossbeam_channel::{bounded, Receiver, RecvTimeoutError, SendError, Sender}; use crossbeam_channel::{bounded, Receiver, RecvTimeoutError, Sender};
use etcetera::BaseStrategy; use ignore::overrides::OverrideBuilder;
use ignore::overrides::{Override, OverrideBuilder}; use ignore::{self, WalkBuilder};
use ignore::{WalkBuilder, WalkParallel, WalkState};
use regex::bytes::Regex; use regex::bytes::Regex;
use crate::config::Config; use crate::config::Config;
@ -36,7 +35,6 @@ enum ReceiverMode {
/// The Worker threads can result in a valid entry having PathBuf or an error. /// The Worker threads can result in a valid entry having PathBuf or an error.
#[allow(clippy::large_enum_variant)] #[allow(clippy::large_enum_variant)]
#[derive(Debug)]
pub enum WorkerResult { pub enum WorkerResult {
// Errors should be rare, so it's probably better to allow large_enum_variant than // Errors should be rare, so it's probably better to allow large_enum_variant than
// to box the Entry variant // to box the Entry variant
@ -44,98 +42,139 @@ pub enum WorkerResult {
Error(ignore::Error), Error(ignore::Error),
} }
/// A batch of WorkerResults to send over a channel.
#[derive(Clone)]
struct Batch {
items: Arc<Mutex<Option<Vec<WorkerResult>>>>,
}
impl Batch {
fn new() -> Self {
Self {
items: Arc::new(Mutex::new(Some(vec![]))),
}
}
fn lock(&self) -> MutexGuard<'_, Option<Vec<WorkerResult>>> {
self.items.lock().unwrap()
}
}
impl IntoIterator for Batch {
type Item = WorkerResult;
type IntoIter = std::vec::IntoIter<WorkerResult>;
fn into_iter(self) -> Self::IntoIter {
self.lock().take().unwrap().into_iter()
}
}
/// Wrapper that sends batches of items at once over a channel.
struct BatchSender {
batch: Batch,
tx: Sender<Batch>,
limit: usize,
}
impl BatchSender {
fn new(tx: Sender<Batch>, limit: usize) -> Self {
Self {
batch: Batch::new(),
tx,
limit,
}
}
/// Check if we need to flush a batch.
fn needs_flush(&self, batch: Option<&Vec<WorkerResult>>) -> bool {
match batch {
// Limit the batch size to provide some backpressure
Some(vec) => vec.len() >= self.limit,
// Batch was already taken by the receiver, so make a new one
None => true,
}
}
/// Add an item to a batch.
fn send(&mut self, item: WorkerResult) -> Result<(), SendError<()>> {
let mut batch = self.batch.lock();
if self.needs_flush(batch.as_ref()) {
drop(batch);
self.batch = Batch::new();
batch = self.batch.lock();
}
let items = batch.as_mut().unwrap();
items.push(item);
if items.len() == 1 {
// New batch, send it over the channel
self.tx
.send(self.batch.clone())
.map_err(|_| SendError(()))?;
}
Ok(())
}
}
/// Maximum size of the output buffer before flushing results to the console /// Maximum size of the output buffer before flushing results to the console
const MAX_BUFFER_LENGTH: usize = 1000; pub const MAX_BUFFER_LENGTH: usize = 1000;
/// Default duration until output buffering switches to streaming. /// Default duration until output buffering switches to streaming.
const DEFAULT_MAX_BUFFER_TIME: Duration = Duration::from_millis(100); pub const DEFAULT_MAX_BUFFER_TIME: Duration = Duration::from_millis(100);
/// Recursively scan the given search path for files / pathnames matching the patterns.
///
/// If the `--exec` argument was supplied, this will create a thread pool for executing
/// jobs in parallel from a given command line and the discovered paths. Otherwise, each
/// path will simply be written to standard output.
pub fn scan(paths: &[PathBuf], patterns: Arc<Vec<Regex>>, config: Arc<Config>) -> Result<ExitCode> {
let first_path = &paths[0];
// Channel capacity was chosen empircally to perform similarly to an unbounded channel
let (tx, rx) = bounded(0x4000 * config.threads);
let mut override_builder = OverrideBuilder::new(first_path);
for pattern in &config.exclude_patterns {
override_builder
.add(pattern)
.map_err(|e| anyhow!("Malformed exclude pattern: {}", e))?;
}
let overrides = override_builder
.build()
.map_err(|_| anyhow!("Mismatch in exclude patterns"))?;
let mut walker = WalkBuilder::new(first_path);
walker
.hidden(config.ignore_hidden)
.ignore(config.read_fdignore)
.parents(config.read_parent_ignore && (config.read_fdignore || config.read_vcsignore))
.git_ignore(config.read_vcsignore)
.git_global(config.read_vcsignore)
.git_exclude(config.read_vcsignore)
.overrides(overrides)
.follow_links(config.follow_links)
// No need to check for supported platforms, option is unavailable on unsupported ones
.same_file_system(config.one_file_system)
.max_depth(config.max_depth);
if config.read_fdignore {
walker.add_custom_ignore_filename(".fdignore");
}
if config.read_global_ignore {
#[cfg(target_os = "macos")]
let config_dir_op = std::env::var_os("XDG_CONFIG_HOME")
.map(PathBuf::from)
.filter(|p| p.is_absolute())
.or_else(|| dirs_next::home_dir().map(|d| d.join(".config")));
#[cfg(not(target_os = "macos"))]
let config_dir_op = dirs_next::config_dir();
if let Some(global_ignore_file) = config_dir_op
.map(|p| p.join("fd").join("ignore"))
.filter(|p| p.is_file())
{
let result = walker.add_ignore(global_ignore_file);
match result {
Some(ignore::Error::Partial(_)) => (),
Some(err) => {
print_error(format!("Malformed pattern in global ignore file. {}.", err));
}
None => (),
}
}
}
for ignore_file in &config.ignore_files {
let result = walker.add_ignore(ignore_file);
match result {
Some(ignore::Error::Partial(_)) => (),
Some(err) => {
print_error(format!("Malformed pattern in custom ignore file. {}.", err));
}
None => (),
}
}
for path in &paths[1..] {
walker.add(path);
}
let parallel_walker = walker.threads(config.threads).build_parallel();
// Flag for cleanly shutting down the parallel walk
let quit_flag = Arc::new(AtomicBool::new(false));
// Flag specifically for quitting due to ^C
let interrupt_flag = Arc::new(AtomicBool::new(false));
if config.ls_colors.is_some() && config.is_printing() {
let quit_flag = Arc::clone(&quit_flag);
let interrupt_flag = Arc::clone(&interrupt_flag);
ctrlc::set_handler(move || {
quit_flag.store(true, Ordering::Relaxed);
if interrupt_flag.fetch_or(true, Ordering::Relaxed) {
// Ctrl-C has been pressed twice, exit NOW
ExitCode::KilledBySigint.exit();
}
})
.unwrap();
}
// Spawn the thread that receives all results through the channel.
let receiver_thread = spawn_receiver(&config, &quit_flag, &interrupt_flag, rx);
// Spawn the sender threads.
spawn_senders(&config, &quit_flag, patterns, parallel_walker, tx);
// Wait for the receiver thread to print out all results.
let exit_code = receiver_thread.join().unwrap();
if interrupt_flag.load(Ordering::Relaxed) {
Ok(ExitCode::KilledBySigint)
} else {
Ok(exit_code)
}
}
/// Wrapper for the receiver thread's buffering behavior. /// Wrapper for the receiver thread's buffering behavior.
struct ReceiverBuffer<'a, W> { struct ReceiverBuffer<W> {
/// The configuration. /// The configuration.
config: &'a Config, config: Arc<Config>,
/// For shutting down the senders. /// For shutting down the senders.
quit_flag: &'a AtomicBool, quit_flag: Arc<AtomicBool>,
/// The ^C notifier. /// The ^C notifier.
interrupt_flag: &'a AtomicBool, interrupt_flag: Arc<AtomicBool>,
/// Receiver for worker results. /// Receiver for worker results.
rx: Receiver<Batch>, rx: Receiver<WorkerResult>,
/// Standard output. /// Standard output.
stdout: W, stdout: W,
/// The current buffer mode. /// The current buffer mode.
@ -148,12 +187,15 @@ struct ReceiverBuffer<'a, W> {
num_results: usize, num_results: usize,
} }
impl<'a, W: Write> ReceiverBuffer<'a, W> { impl<W: Write> ReceiverBuffer<W> {
/// Create a new receiver buffer. /// Create a new receiver buffer.
fn new(state: &'a WorkerState, rx: Receiver<Batch>, stdout: W) -> Self { fn new(
let config = &state.config; config: Arc<Config>,
let quit_flag = state.quit_flag.as_ref(); quit_flag: Arc<AtomicBool>,
let interrupt_flag = state.interrupt_flag.as_ref(); interrupt_flag: Arc<AtomicBool>,
rx: Receiver<WorkerResult>,
stdout: W,
) -> Self {
let max_buffer_time = config.max_buffer_time.unwrap_or(DEFAULT_MAX_BUFFER_TIME); let max_buffer_time = config.max_buffer_time.unwrap_or(DEFAULT_MAX_BUFFER_TIME);
let deadline = Instant::now() + max_buffer_time; let deadline = Instant::now() + max_buffer_time;
@ -181,7 +223,7 @@ impl<'a, W: Write> ReceiverBuffer<'a, W> {
} }
/// Receive the next worker result. /// Receive the next worker result.
fn recv(&self) -> Result<Batch, RecvTimeoutError> { fn recv(&self) -> Result<WorkerResult, RecvTimeoutError> {
match self.mode { match self.mode {
ReceiverMode::Buffering => { ReceiverMode::Buffering => {
// Wait at most until we should switch to streaming // Wait at most until we should switch to streaming
@ -197,44 +239,34 @@ impl<'a, W: Write> ReceiverBuffer<'a, W> {
/// Wait for a result or state change. /// Wait for a result or state change.
fn poll(&mut self) -> Result<(), ExitCode> { fn poll(&mut self) -> Result<(), ExitCode> {
match self.recv() { match self.recv() {
Ok(batch) => { Ok(WorkerResult::Entry(dir_entry)) => {
for result in batch { if self.config.quiet {
match result { return Err(ExitCode::HasResults(true));
WorkerResult::Entry(dir_entry) => { }
if self.config.quiet {
return Err(ExitCode::HasResults(true));
}
match self.mode { match self.mode {
ReceiverMode::Buffering => { ReceiverMode::Buffering => {
self.buffer.push(dir_entry); self.buffer.push(dir_entry);
if self.buffer.len() > MAX_BUFFER_LENGTH { if self.buffer.len() > MAX_BUFFER_LENGTH {
self.stream()?; self.stream()?;
}
}
ReceiverMode::Streaming => {
self.print(&dir_entry)?;
}
}
self.num_results += 1;
if let Some(max_results) = self.config.max_results {
if self.num_results >= max_results {
return self.stop();
}
}
}
WorkerResult::Error(err) => {
if self.config.show_filesystem_errors {
print_error(err.to_string());
}
} }
} }
ReceiverMode::Streaming => {
self.print(&dir_entry)?;
self.flush()?;
}
} }
// If we don't have another batch ready, flush before waiting self.num_results += 1;
if self.mode == ReceiverMode::Streaming && self.rx.is_empty() { if let Some(max_results) = self.config.max_results {
self.flush()?; if self.num_results >= max_results {
return self.stop();
}
}
}
Ok(WorkerResult::Error(err)) => {
if self.config.show_filesystem_errors {
print_error(err.to_string());
} }
} }
Err(RecvTimeoutError::Timeout) => { Err(RecvTimeoutError::Timeout) => {
@ -250,7 +282,7 @@ impl<'a, W: Write> ReceiverBuffer<'a, W> {
/// Output a path. /// Output a path.
fn print(&mut self, entry: &DirEntry) -> Result<(), ExitCode> { fn print(&mut self, entry: &DirEntry) -> Result<(), ExitCode> {
output::print_entry(&mut self.stdout, entry, self.config); output::print_entry(&mut self.stdout, entry, &self.config);
if self.interrupt_flag.load(Ordering::Relaxed) { if self.interrupt_flag.load(Ordering::Relaxed) {
// Ignore any errors on flush, because we're about to exit anyway // Ignore any errors on flush, because we're about to exit anyway
@ -289,7 +321,7 @@ impl<'a, W: Write> ReceiverBuffer<'a, W> {
/// Flush stdout if necessary. /// Flush stdout if necessary.
fn flush(&mut self) -> Result<(), ExitCode> { fn flush(&mut self) -> Result<(), ExitCode> {
if self.stdout.flush().is_err() { if self.config.interactive_terminal && self.stdout.flush().is_err() {
// Probably a broken pipe. Exit gracefully. // Probably a broken pipe. Exit gracefully.
return Err(ExitCode::GeneralError); return Err(ExitCode::GeneralError);
} }
@ -297,372 +329,229 @@ impl<'a, W: Write> ReceiverBuffer<'a, W> {
} }
} }
/// State shared by the sender and receiver threads. fn spawn_receiver(
struct WorkerState { config: &Arc<Config>,
/// The search patterns. quit_flag: &Arc<AtomicBool>,
patterns: Vec<Regex>, interrupt_flag: &Arc<AtomicBool>,
/// The command line configuration. rx: Receiver<WorkerResult>,
config: Config, ) -> thread::JoinHandle<ExitCode> {
/// Flag for cleanly shutting down the parallel walk let config = Arc::clone(config);
quit_flag: Arc<AtomicBool>, let quit_flag = Arc::clone(quit_flag);
/// Flag specifically for quitting due to ^C let interrupt_flag = Arc::clone(interrupt_flag);
interrupt_flag: Arc<AtomicBool>,
}
impl WorkerState {
fn new(patterns: Vec<Regex>, config: Config) -> Self {
let quit_flag = Arc::new(AtomicBool::new(false));
let interrupt_flag = Arc::new(AtomicBool::new(false));
Self {
patterns,
config,
quit_flag,
interrupt_flag,
}
}
fn build_overrides(&self, paths: &[PathBuf]) -> Result<Override> {
let first_path = &paths[0];
let config = &self.config;
let mut builder = OverrideBuilder::new(first_path);
for pattern in &config.exclude_patterns {
builder
.add(pattern)
.map_err(|e| anyhow!("Malformed exclude pattern: {}", e))?;
}
builder
.build()
.map_err(|_| anyhow!("Mismatch in exclude patterns"))
}
fn build_walker(&self, paths: &[PathBuf]) -> Result<WalkParallel> {
let first_path = &paths[0];
let config = &self.config;
let overrides = self.build_overrides(paths)?;
let mut builder = WalkBuilder::new(first_path);
builder
.hidden(config.ignore_hidden)
.ignore(config.read_fdignore)
.parents(config.read_parent_ignore && (config.read_fdignore || config.read_vcsignore))
.git_ignore(config.read_vcsignore)
.git_global(config.read_vcsignore)
.git_exclude(config.read_vcsignore)
.require_git(config.require_git_to_read_vcsignore)
.overrides(overrides)
.follow_links(config.follow_links)
// No need to check for supported platforms, option is unavailable on unsupported ones
.same_file_system(config.one_file_system)
.max_depth(config.max_depth);
if config.read_fdignore {
builder.add_custom_ignore_filename(".fdignore");
}
if config.read_global_ignore {
if let Ok(basedirs) = etcetera::choose_base_strategy() {
let global_ignore_file = basedirs.config_dir().join("fd").join("ignore");
if global_ignore_file.is_file() {
let result = builder.add_ignore(global_ignore_file);
match result {
Some(ignore::Error::Partial(_)) => (),
Some(err) => {
print_error(format!(
"Malformed pattern in global ignore file. {}.",
err
));
}
None => (),
}
}
}
}
for ignore_file in &config.ignore_files {
let result = builder.add_ignore(ignore_file);
match result {
Some(ignore::Error::Partial(_)) => (),
Some(err) => {
print_error(format!("Malformed pattern in custom ignore file. {}.", err));
}
None => (),
}
}
for path in &paths[1..] {
builder.add(path);
}
let walker = builder.threads(config.threads).build_parallel();
Ok(walker)
}
/// Run the receiver work, either on this thread or a pool of background
/// threads (for --exec).
fn receive(&self, rx: Receiver<Batch>) -> ExitCode {
let config = &self.config;
let threads = config.threads;
thread::spawn(move || {
// This will be set to `Some` if the `--exec` argument was supplied. // This will be set to `Some` if the `--exec` argument was supplied.
if let Some(ref cmd) = config.command { if let Some(ref cmd) = config.command {
if cmd.in_batch_mode() { if cmd.in_batch_mode() {
exec::batch(rx.into_iter().flatten(), cmd, config) exec::batch(rx, cmd, &config)
} else { } else {
let out_perm = Mutex::new(()); let out_perm = Arc::new(Mutex::new(()));
thread::scope(|scope| { // Each spawned job will store it's thread handle in here.
// Each spawned job will store its thread handle in here. let mut handles = Vec::with_capacity(threads);
let threads = config.threads; for _ in 0..threads {
let mut handles = Vec::with_capacity(threads); let config = Arc::clone(&config);
for _ in 0..threads { let rx = rx.clone();
let rx = rx.clone(); let cmd = Arc::clone(cmd);
let out_perm = Arc::clone(&out_perm);
// Spawn a job thread that will listen for and execute inputs. // Spawn a job thread that will listen for and execute inputs.
let handle = scope let handle = thread::spawn(move || exec::job(rx, cmd, out_perm, &config));
.spawn(|| exec::job(rx.into_iter().flatten(), cmd, &out_perm, config));
// Push the handle of the spawned thread into the vector for later joining. // Push the handle of the spawned thread into the vector for later joining.
handles.push(handle); handles.push(handle);
} }
let exit_codes = handles.into_iter().map(|handle| handle.join().unwrap());
merge_exitcodes(exit_codes) let exit_codes = handles
}) .into_iter()
.map(|handle| handle.join().unwrap())
.collect::<Vec<_>>();
merge_exitcodes(exit_codes)
} }
} else { } else {
let stdout = io::stdout().lock(); let stdout = io::stdout();
let stdout = stdout.lock();
let stdout = io::BufWriter::new(stdout); let stdout = io::BufWriter::new(stdout);
ReceiverBuffer::new(self, rx, stdout).process() let mut rxbuffer = ReceiverBuffer::new(config, quit_flag, interrupt_flag, rx, stdout);
rxbuffer.process()
} }
} })
}
/// Spawn the sender threads. fn spawn_senders(
fn spawn_senders(&self, walker: WalkParallel, tx: Sender<Batch>) { config: &Arc<Config>,
walker.run(|| { quit_flag: &Arc<AtomicBool>,
let patterns = &self.patterns; patterns: Arc<Vec<Regex>>,
let config = &self.config; parallel_walker: ignore::WalkParallel,
let quit_flag = self.quit_flag.as_ref(); tx: Sender<WorkerResult>,
) {
parallel_walker.run(|| {
let config = Arc::clone(config);
let patterns = Arc::clone(&patterns);
let tx_thread = tx.clone();
let quit_flag = Arc::clone(quit_flag);
let mut limit = 0x100; Box::new(move |entry_o| {
if let Some(cmd) = &config.command { if quit_flag.load(Ordering::Relaxed) {
if !cmd.in_batch_mode() && config.threads > 1 { return ignore::WalkState::Quit;
// Evenly distribute work between multiple receivers }
limit = 1;
let entry = match entry_o {
Ok(ref e) if e.depth() == 0 => {
// Skip the root directory entry.
return ignore::WalkState::Continue;
}
Ok(e) => DirEntry::normal(e),
Err(ignore::Error::WithPath {
path,
err: inner_err,
}) => match inner_err.as_ref() {
ignore::Error::Io(io_error)
if io_error.kind() == io::ErrorKind::NotFound
&& path
.symlink_metadata()
.ok()
.map_or(false, |m| m.file_type().is_symlink()) =>
{
DirEntry::broken_symlink(path)
}
_ => {
return match tx_thread.send(WorkerResult::Error(ignore::Error::WithPath {
path,
err: inner_err,
})) {
Ok(_) => ignore::WalkState::Continue,
Err(_) => ignore::WalkState::Quit,
}
}
},
Err(err) => {
return match tx_thread.send(WorkerResult::Error(err)) {
Ok(_) => ignore::WalkState::Continue,
Err(_) => ignore::WalkState::Quit,
}
}
};
if let Some(min_depth) = config.min_depth {
if entry.depth().map_or(true, |d| d < min_depth) {
return ignore::WalkState::Continue;
} }
} }
let mut tx = BatchSender::new(tx.clone(), limit);
Box::new(move |entry| { // Check the name first, since it doesn't require metadata
if quit_flag.load(Ordering::Relaxed) { let entry_path = entry.path();
return WalkState::Quit;
let search_str: Cow<OsStr> = if config.search_full_path {
let path_abs_buf = filesystem::path_absolute_form(entry_path)
.expect("Retrieving absolute path succeeds");
Cow::Owned(path_abs_buf.as_os_str().to_os_string())
} else {
match entry_path.file_name() {
Some(filename) => Cow::Borrowed(filename),
None => unreachable!(
"Encountered file system entry without a file name. This should only \
happen for paths like 'foo/bar/..' or '/' which are not supposed to \
appear in a file system traversal."
),
} }
};
let entry = match entry { if !patterns
Ok(ref e) if e.depth() == 0 => { .iter()
// Skip the root directory entry. .all(|pat| pat.is_match(&filesystem::osstr_to_bytes(search_str.as_ref())))
return WalkState::Continue; {
return ignore::WalkState::Continue;
}
// Filter out unwanted extensions.
if let Some(ref exts_regex) = config.extensions {
if let Some(path_str) = entry_path.file_name() {
if !exts_regex.is_match(&filesystem::osstr_to_bytes(path_str)) {
return ignore::WalkState::Continue;
} }
Ok(e) => DirEntry::normal(e),
Err(ignore::Error::WithPath {
path,
err: inner_err,
}) => match inner_err.as_ref() {
ignore::Error::Io(io_error)
if io_error.kind() == io::ErrorKind::NotFound
&& path
.symlink_metadata()
.ok()
.map_or(false, |m| m.file_type().is_symlink()) =>
{
DirEntry::broken_symlink(path)
}
_ => {
return match tx.send(WorkerResult::Error(ignore::Error::WithPath {
path,
err: inner_err,
})) {
Ok(_) => WalkState::Continue,
Err(_) => WalkState::Quit,
}
}
},
Err(err) => {
return match tx.send(WorkerResult::Error(err)) {
Ok(_) => WalkState::Continue,
Err(_) => WalkState::Quit,
}
}
};
if let Some(min_depth) = config.min_depth {
if entry.depth().map_or(true, |d| d < min_depth) {
return WalkState::Continue;
}
}
// Check the name first, since it doesn't require metadata
let entry_path = entry.path();
let search_str: Cow<OsStr> = if config.search_full_path {
let path_abs_buf = filesystem::path_absolute_form(entry_path)
.expect("Retrieving absolute path succeeds");
Cow::Owned(path_abs_buf.as_os_str().to_os_string())
} else { } else {
match entry_path.file_name() { return ignore::WalkState::Continue;
Some(filename) => Cow::Borrowed(filename),
None => unreachable!(
"Encountered file system entry without a file name. This should only \
happen for paths like 'foo/bar/..' or '/' which are not supposed to \
appear in a file system traversal."
),
}
};
if !patterns
.iter()
.all(|pat| pat.is_match(&filesystem::osstr_to_bytes(search_str.as_ref())))
{
return WalkState::Continue;
} }
}
// Filter out unwanted extensions. // Filter out unwanted file types.
if let Some(ref exts_regex) = config.extensions { if let Some(ref file_types) = config.file_types {
if let Some(path_str) = entry_path.file_name() { if file_types.should_ignore(&entry) {
if !exts_regex.is_match(&filesystem::osstr_to_bytes(path_str)) { return ignore::WalkState::Continue;
return WalkState::Continue;
}
} else {
return WalkState::Continue;
}
} }
}
// Filter out unwanted file types. #[cfg(unix)]
if let Some(ref file_types) = config.file_types { {
if file_types.should_ignore(&entry) { if let Some(ref owner_constraint) = config.owner_constraint {
return WalkState::Continue;
}
}
#[cfg(unix)]
{
if let Some(ref owner_constraint) = config.owner_constraint {
if let Some(metadata) = entry.metadata() {
if !owner_constraint.matches(metadata) {
return WalkState::Continue;
}
} else {
return WalkState::Continue;
}
}
}
// Filter out unwanted sizes if it is a file and we have been given size constraints.
if !config.size_constraints.is_empty() {
if entry_path.is_file() {
if let Some(metadata) = entry.metadata() {
let file_size = metadata.len();
if config
.size_constraints
.iter()
.any(|sc| !sc.is_within(file_size))
{
return WalkState::Continue;
}
} else {
return WalkState::Continue;
}
} else {
return WalkState::Continue;
}
}
// Filter out unwanted modification times
if !config.time_constraints.is_empty() {
let mut matched = false;
if let Some(metadata) = entry.metadata() { if let Some(metadata) = entry.metadata() {
if let Ok(modified) = metadata.modified() { if !owner_constraint.matches(metadata) {
matched = config return ignore::WalkState::Continue;
.time_constraints
.iter()
.all(|tf| tf.applies_to(&modified));
} }
} } else {
if !matched { return ignore::WalkState::Continue;
return WalkState::Continue;
} }
} }
}
if config.is_printing() { // Filter out unwanted sizes if it is a file and we have been given size constraints.
if let Some(ls_colors) = &config.ls_colors { if !config.size_constraints.is_empty() {
// Compute colors in parallel if entry_path.is_file() {
entry.style(ls_colors); if let Some(metadata) = entry.metadata() {
let file_size = metadata.len();
if config
.size_constraints
.iter()
.any(|sc| !sc.is_within(file_size))
{
return ignore::WalkState::Continue;
}
} else {
return ignore::WalkState::Continue;
}
} else {
return ignore::WalkState::Continue;
}
}
// Filter out unwanted modification times
if !config.time_constraints.is_empty() {
let mut matched = false;
if let Some(metadata) = entry.metadata() {
if let Ok(modified) = metadata.modified() {
matched = config
.time_constraints
.iter()
.all(|tf| tf.applies_to(&modified));
} }
} }
if !matched {
let send_result = tx.send(WorkerResult::Entry(entry)); return ignore::WalkState::Continue;
if send_result.is_err() {
return WalkState::Quit;
} }
}
// Apply pruning. if config.is_printing() {
if config.prune { if let Some(ls_colors) = &config.ls_colors {
return WalkState::Skip; // Compute colors in parallel
entry.style(ls_colors);
} }
}
WalkState::Continue let send_result = tx_thread.send(WorkerResult::Entry(entry));
})
});
}
/// Perform the recursive scan. if send_result.is_err() {
fn scan(&self, paths: &[PathBuf]) -> Result<ExitCode> { return ignore::WalkState::Quit;
let config = &self.config; }
let walker = self.build_walker(paths)?;
if config.ls_colors.is_some() && config.is_printing() { // Apply pruning.
let quit_flag = Arc::clone(&self.quit_flag); if config.prune {
let interrupt_flag = Arc::clone(&self.interrupt_flag); return ignore::WalkState::Skip;
}
ctrlc::set_handler(move || { ignore::WalkState::Continue
quit_flag.store(true, Ordering::Relaxed); })
});
if interrupt_flag.fetch_or(true, Ordering::Relaxed) {
// Ctrl-C has been pressed twice, exit NOW
ExitCode::KilledBySigint.exit();
}
})
.unwrap();
}
let (tx, rx) = bounded(2 * config.threads);
let exit_code = thread::scope(|scope| {
// Spawn the receiver thread(s)
let receiver = scope.spawn(|| self.receive(rx));
// Spawn the sender threads.
self.spawn_senders(walker, tx);
receiver.join().unwrap()
});
if self.interrupt_flag.load(Ordering::Relaxed) {
Ok(ExitCode::KilledBySigint)
} else {
Ok(exit_code)
}
}
}
/// Recursively scan the given search path for files / pathnames matching the patterns.
///
/// If the `--exec` argument was supplied, this will create a thread pool for executing
/// jobs in parallel from a given command line and the discovered paths. Otherwise, each
/// path will simply be written to standard output.
pub fn scan(paths: &[PathBuf], patterns: Vec<Regex>, config: Config) -> Result<ExitCode> {
WorkerState::new(patterns, config).scan(paths)
} }

View File

@ -20,9 +20,6 @@ pub struct TestEnv {
/// Normalize each line by sorting the whitespace-separated words /// Normalize each line by sorting the whitespace-separated words
normalize_line: bool, normalize_line: bool,
/// Temporary directory for storing test config (global ignore file)
config_dir: Option<TempDir>,
} }
/// Create the working directory and the test files. /// Create the working directory and the test files.
@ -62,16 +59,6 @@ fn create_working_directory(
Ok(temp_dir) Ok(temp_dir)
} }
fn create_config_directory_with_global_ignore(ignore_file_content: &str) -> io::Result<TempDir> {
let config_dir = tempfile::Builder::new().prefix("fd-config").tempdir()?;
let fd_dir = config_dir.path().join("fd");
fs::create_dir(&fd_dir)?;
let mut ignore_file = fs::File::create(fd_dir.join("ignore"))?;
ignore_file.write_all(ignore_file_content.as_bytes())?;
Ok(config_dir)
}
/// Find the *fd* executable. /// Find the *fd* executable.
fn find_fd_exe() -> PathBuf { fn find_fd_exe() -> PathBuf {
// Tests exe is in target/debug/deps, the *fd* exe is in target/debug // Tests exe is in target/debug/deps, the *fd* exe is in target/debug
@ -129,7 +116,7 @@ fn normalize_output(s: &str, trim_start: bool, normalize_line: bool) -> String {
.lines() .lines()
.map(|line| { .map(|line| {
let line = if trim_start { line.trim_start() } else { line }; let line = if trim_start { line.trim_start() } else { line };
let line = line.replace('/', std::path::MAIN_SEPARATOR_STR); let line = line.replace('/', &std::path::MAIN_SEPARATOR.to_string());
if normalize_line { if normalize_line {
let mut words: Vec<_> = line.split_whitespace().collect(); let mut words: Vec<_> = line.split_whitespace().collect();
words.sort_unstable(); words.sort_unstable();
@ -163,7 +150,6 @@ impl TestEnv {
temp_dir, temp_dir,
fd_exe, fd_exe,
normalize_line: false, normalize_line: false,
config_dir: None,
} }
} }
@ -172,16 +158,6 @@ impl TestEnv {
temp_dir: self.temp_dir, temp_dir: self.temp_dir,
fd_exe: self.fd_exe, fd_exe: self.fd_exe,
normalize_line: normalize, normalize_line: normalize,
config_dir: self.config_dir,
}
}
pub fn global_ignore_file(self, content: &str) -> TestEnv {
let config_dir =
create_config_directory_with_global_ignore(content).expect("config directory");
TestEnv {
config_dir: Some(config_dir),
..self
} }
} }
@ -230,8 +206,13 @@ impl TestEnv {
path: P, path: P,
args: &[&str], args: &[&str],
) -> process::Output { ) -> process::Output {
// Setup *fd* command.
let mut cmd = process::Command::new(&self.fd_exe);
cmd.current_dir(self.temp_dir.path().join(path));
cmd.arg("--no-global-ignore-file").args(args);
// Run *fd*. // Run *fd*.
let output = self.run_command(path.as_ref(), args); let output = cmd.output().expect("fd output");
// Check for exit status. // Check for exit status.
if !output.status.success() { if !output.status.success() {
@ -307,21 +288,6 @@ impl TestEnv {
self.assert_error_subdirectory(".", args, Some(expected)) self.assert_error_subdirectory(".", args, Some(expected))
} }
fn run_command(&self, path: &Path, args: &[&str]) -> process::Output {
// Setup *fd* command.
let mut cmd = process::Command::new(&self.fd_exe);
cmd.current_dir(self.temp_dir.path().join(path));
if let Some(config_dir) = &self.config_dir {
cmd.env("XDG_CONFIG_HOME", config_dir.path());
} else {
cmd.arg("--no-global-ignore-file");
}
cmd.args(args);
// Run *fd*.
cmd.output().expect("fd output")
}
/// Assert that calling *fd* in the specified path under the root working directory, /// Assert that calling *fd* in the specified path under the root working directory,
/// and with the specified arguments produces an error with the expected message. /// and with the specified arguments produces an error with the expected message.
fn assert_error_subdirectory<P: AsRef<Path>>( fn assert_error_subdirectory<P: AsRef<Path>>(
@ -330,7 +296,13 @@ impl TestEnv {
args: &[&str], args: &[&str],
expected: Option<&str>, expected: Option<&str>,
) -> process::ExitStatus { ) -> process::ExitStatus {
let output = self.run_command(path.as_ref(), args); // Setup *fd* command.
let mut cmd = process::Command::new(&self.fd_exe);
cmd.current_dir(self.temp_dir.path().join(path));
cmd.arg("--no-global-ignore-file").args(args);
// Run *fd*.
let output = cmd.output().expect("fd output");
if let Some(expected) = expected { if let Some(expected) = expected {
// Normalize both expected and actual output. // Normalize both expected and actual output.

View File

@ -1,7 +1,5 @@
mod testenv; mod testenv;
#[cfg(unix)]
use nix::unistd::{Gid, Group, Uid, User};
use std::fs; use std::fs;
use std::io::Write; use std::io::Write;
use std::path::Path; use std::path::Path;
@ -810,62 +808,6 @@ fn test_custom_ignore_precedence() {
te.assert_output(&["--no-ignore", "foo"], "inner/foo"); te.assert_output(&["--no-ignore", "foo"], "inner/foo");
} }
/// Don't require git to respect gitignore (--no-require-git)
#[test]
fn test_respect_ignore_files() {
let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES);
// Not in a git repo anymore
fs::remove_dir(te.test_root().join(".git")).unwrap();
// don't respect gitignore because we're not in a git repo
te.assert_output(
&["foo"],
"a.foo
gitignored.foo
one/b.foo
one/two/c.foo
one/two/C.Foo2
one/two/three/d.foo
one/two/three/directory_foo/",
);
// respect gitignore because we set `--no-require-git`
te.assert_output(
&["--no-require-git", "foo"],
"a.foo
one/b.foo
one/two/c.foo
one/two/C.Foo2
one/two/three/d.foo
one/two/three/directory_foo/",
);
// make sure overriding works
te.assert_output(
&["--no-require-git", "--require-git", "foo"],
"a.foo
gitignored.foo
one/b.foo
one/two/c.foo
one/two/C.Foo2
one/two/three/d.foo
one/two/three/directory_foo/",
);
te.assert_output(
&["--no-require-git", "--no-ignore", "foo"],
"a.foo
gitignored.foo
fdignored.foo
one/b.foo
one/two/c.foo
one/two/C.Foo2
one/two/three/d.foo
one/two/three/directory_foo/",
);
}
/// VCS ignored files (--no-ignore-vcs) /// VCS ignored files (--no-ignore-vcs)
#[test] #[test]
fn test_no_ignore_vcs() { fn test_no_ignore_vcs() {
@ -937,47 +879,6 @@ fn test_no_ignore_aliases() {
); );
} }
#[cfg(not(windows))]
#[test]
fn test_global_ignore() {
let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES).global_ignore_file("one");
te.assert_output(
&[],
"a.foo
e1 e2
symlink",
);
}
#[cfg(not(windows))]
#[test_case("--unrestricted", ".hidden.foo
a.foo
fdignored.foo
gitignored.foo
one/b.foo
one/two/c.foo
one/two/C.Foo2
one/two/three/d.foo
one/two/three/directory_foo/"; "unrestricted")]
#[test_case("--no-ignore", "a.foo
fdignored.foo
gitignored.foo
one/b.foo
one/two/c.foo
one/two/C.Foo2
one/two/three/d.foo
one/two/three/directory_foo/"; "no-ignore")]
#[test_case("--no-global-ignore-file", "a.foo
one/b.foo
one/two/c.foo
one/two/C.Foo2
one/two/three/d.foo
one/two/three/directory_foo/"; "no-global-ignore-file")]
fn test_no_global_ignore(flag: &str, expected_output: &str) {
let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES).global_ignore_file("one");
te.assert_output(&[flag, "foo"], expected_output);
}
/// Symlinks (--follow) /// Symlinks (--follow)
#[test] #[test]
fn test_follow() { fn test_follow() {
@ -1301,18 +1202,10 @@ fn test_type() {
fn test_type_executable() { fn test_type_executable() {
use std::os::unix::fs::OpenOptionsExt; use std::os::unix::fs::OpenOptionsExt;
// This test assumes the current user isn't root
// (otherwise if the executable bit is set for any level, it is executable for the current
// user)
if Uid::current().is_root() {
return;
}
let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES); let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES);
fs::OpenOptions::new() fs::OpenOptions::new()
.create_new(true) .create(true)
.truncate(true)
.write(true) .write(true)
.mode(0o777) .mode(0o777)
.open(te.test_root().join("executable-file.sh")) .open(te.test_root().join("executable-file.sh"))
@ -1320,7 +1213,6 @@ fn test_type_executable() {
fs::OpenOptions::new() fs::OpenOptions::new()
.create(true) .create(true)
.truncate(true)
.write(true) .write(true)
.mode(0o645) .mode(0o645)
.open(te.test_root().join("not-user-executable-file.sh")) .open(te.test_root().join("not-user-executable-file.sh"))
@ -1624,66 +1516,6 @@ fn test_excludes() {
); );
} }
#[test]
fn format() {
let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES);
te.assert_output(
&["--format", "path={}", "--path-separator=/"],
"path=a.foo
path=e1 e2
path=one
path=one/b.foo
path=one/two
path=one/two/C.Foo2
path=one/two/c.foo
path=one/two/three
path=one/two/three/d.foo
path=one/two/three/directory_foo
path=symlink",
);
te.assert_output(
&["foo", "--format", "noExt={.}", "--path-separator=/"],
"noExt=a
noExt=one/b
noExt=one/two/C
noExt=one/two/c
noExt=one/two/three/d
noExt=one/two/three/directory_foo",
);
te.assert_output(
&["foo", "--format", "basename={/}", "--path-separator=/"],
"basename=a.foo
basename=b.foo
basename=C.Foo2
basename=c.foo
basename=d.foo
basename=directory_foo",
);
te.assert_output(
&["foo", "--format", "name={/.}", "--path-separator=/"],
"name=a
name=b
name=C
name=c
name=d
name=directory_foo",
);
te.assert_output(
&["foo", "--format", "parent={//}", "--path-separator=/"],
"parent=.
parent=one
parent=one/two
parent=one/two
parent=one/two/three
parent=one/two/three",
);
}
/// Shell script execution (--exec) /// Shell script execution (--exec)
#[test] #[test]
fn test_exec() { fn test_exec() {
@ -1869,26 +1701,18 @@ fn test_exec_batch() {
\n\ \n\
Usage: fd [OPTIONS] [pattern] [path]...\n\ Usage: fd [OPTIONS] [pattern] [path]...\n\
\n\ \n\
For more information, try '--help'.\n\ For more information try '--help'\n\
", ",
); );
te.assert_failure_with_error( te.assert_failure_with_error(
&["foo", "--exec-batch", "echo", "{/}", ";", "-x", "echo"], &["foo", "--exec-batch", "echo", "{/}", ";", "-x", "echo"],
"error: the argument '--exec-batch <cmd>...' cannot be used with '--exec <cmd>...'\n\ "error: The argument '--exec-batch <cmd>...' cannot be used with '--exec <cmd>...'",
\n\
Usage: fd --exec-batch <cmd>... <pattern> [path]...\n\
\n\
For more information, try '--help'.\n\
",
); );
te.assert_failure_with_error( te.assert_failure_with_error(
&["foo", "--exec-batch"], &["foo", "--exec-batch"],
"error: a value is required for '--exec-batch <cmd>...' but none was supplied\n\ "error: The argument '--exec-batch <cmd>...' requires a value but none was supplied",
\n\
For more information, try '--help'.\n\
",
); );
te.assert_failure_with_error( te.assert_failure_with_error(
@ -1897,7 +1721,7 @@ fn test_exec_batch() {
\n\ \n\
Usage: fd [OPTIONS] [pattern] [path]...\n\ Usage: fd [OPTIONS] [pattern] [path]...\n\
\n\ \n\
For more information, try '--help'.\n\ For more information try '--help'\n\
", ",
); );
@ -2325,10 +2149,10 @@ fn test_owner_ignore_all() {
#[test] #[test]
fn test_owner_current_user() { fn test_owner_current_user() {
let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES); let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES);
let uid = Uid::current(); let uid = users::get_current_uid();
te.assert_output(&["--owner", &uid.to_string(), "a.foo"], "a.foo"); te.assert_output(&["--owner", &uid.to_string(), "a.foo"], "a.foo");
if let Ok(Some(user)) = User::from_uid(uid) { if let Some(username) = users::get_current_username().map(|u| u.into_string().unwrap()) {
te.assert_output(&["--owner", &user.name, "a.foo"], "a.foo"); te.assert_output(&["--owner", &username, "a.foo"], "a.foo");
} }
} }
@ -2336,10 +2160,10 @@ fn test_owner_current_user() {
#[test] #[test]
fn test_owner_current_group() { fn test_owner_current_group() {
let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES); let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES);
let gid = Gid::current(); let gid = users::get_current_gid();
te.assert_output(&["--owner", &format!(":{}", gid), "a.foo"], "a.foo"); te.assert_output(&["--owner", &format!(":{}", gid), "a.foo"], "a.foo");
if let Ok(Some(group)) = Group::from_gid(gid) { if let Some(groupname) = users::get_current_groupname().map(|u| u.into_string().unwrap()) {
te.assert_output(&["--owner", &format!(":{}", group.name), "a.foo"], "a.foo"); te.assert_output(&["--owner", &format!(":{}", groupname), "a.foo"], "a.foo");
} }
} }
@ -2347,7 +2171,7 @@ fn test_owner_current_group() {
#[test] #[test]
fn test_owner_root() { fn test_owner_root() {
// This test assumes the current user isn't root // This test assumes the current user isn't root
if Uid::current().is_root() || Gid::current() == Gid::from_raw(0) { if users::get_current_uid() == 0 || users::get_current_gid() == 0 {
return; return;
} }
let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES); let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES);
@ -2448,11 +2272,6 @@ fn test_max_results() {
}; };
assert_just_one_result_with_option("--max-results=1"); assert_just_one_result_with_option("--max-results=1");
assert_just_one_result_with_option("-1"); assert_just_one_result_with_option("-1");
// check that --max-results & -1 conflic with --exec
te.assert_failure(&["thing", "--max-results=0", "--exec=cat"]);
te.assert_failure(&["thing", "-1", "--exec=cat"]);
te.assert_failure(&["thing", "--max-results=1", "-1", "--exec=cat"]);
} }
/// Filenames with non-utf8 paths are passed to the executed program unchanged /// Filenames with non-utf8 paths are passed to the executed program unchanged
@ -2539,7 +2358,6 @@ fn test_number_parsing_errors() {
#[test_case("--hidden", &["--no-hidden"] ; "hidden")] #[test_case("--hidden", &["--no-hidden"] ; "hidden")]
#[test_case("--no-ignore", &["--ignore"] ; "no-ignore")] #[test_case("--no-ignore", &["--ignore"] ; "no-ignore")]
#[test_case("--no-ignore-vcs", &["--ignore-vcs"] ; "no-ignore-vcs")] #[test_case("--no-ignore-vcs", &["--ignore-vcs"] ; "no-ignore-vcs")]
#[test_case("--no-require-git", &["--require-git"] ; "no-require-git")]
#[test_case("--follow", &["--no-follow"] ; "follow")] #[test_case("--follow", &["--no-follow"] ; "follow")]
#[test_case("--absolute-path", &["--relative-path"] ; "absolute-path")] #[test_case("--absolute-path", &["--relative-path"] ; "absolute-path")]
#[test_case("-u", &["--ignore", "--no-hidden"] ; "u")] #[test_case("-u", &["--ignore", "--no-hidden"] ; "u")]
@ -2618,57 +2436,3 @@ fn test_invalid_cwd() {
panic!("{:?}", output); panic!("{:?}", output);
} }
} }
/// Test behavior of .git directory with various flags
#[test]
fn test_git_dir() {
let te = TestEnv::new(
&[".git/one", "other_dir/.git", "nested/dir/.git"],
&[
".git/one/foo.a",
".git/.foo",
".git/a.foo",
"other_dir/.git/foo1",
"nested/dir/.git/foo2",
],
);
te.assert_output(
&["--hidden", "foo"],
".git/one/foo.a
.git/.foo
.git/a.foo
other_dir/.git/foo1
nested/dir/.git/foo2",
);
te.assert_output(&["--no-ignore", "foo"], "");
te.assert_output(
&["--hidden", "--no-ignore", "foo"],
".git/one/foo.a
.git/.foo
.git/a.foo
other_dir/.git/foo1
nested/dir/.git/foo2",
);
te.assert_output(
&["--hidden", "--no-ignore-vcs", "foo"],
".git/one/foo.a
.git/.foo
.git/a.foo
other_dir/.git/foo1
nested/dir/.git/foo2",
);
}
#[test]
fn test_gitignore_parent() {
let te = TestEnv::new(&["sub"], &[".abc", "sub/.abc"]);
fs::File::create(te.test_root().join(".gitignore"))
.unwrap()
.write_all(b".abc\n")
.unwrap();
te.assert_output_subdirectory("sub", &["--hidden"], "");
te.assert_output_subdirectory("sub", &["--hidden", "--search-path", "."], "");
}