mirror of https://github.com/sharkdp/fd.git
Compare commits
No commits in common. "master" and "v8.6.0" have entirely different histories.
|
@ -4,7 +4,3 @@ updates:
|
|||
directory: "/"
|
||||
schedule:
|
||||
interval: "monthly"
|
||||
- package-ecosystem: "github-actions"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "daily"
|
||||
|
|
|
@ -1,8 +1,8 @@
|
|||
name: CICD
|
||||
|
||||
env:
|
||||
MIN_SUPPORTED_RUST_VERSION: "1.60.0"
|
||||
CICD_INTERMEDIATES_DIR: "_cicd-intermediates"
|
||||
MSRV_FEATURES: "--all-features"
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
|
@ -14,90 +14,68 @@ on:
|
|||
- '*'
|
||||
|
||||
jobs:
|
||||
crate_metadata:
|
||||
name: Extract crate metadata
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Extract crate information
|
||||
id: crate_metadata
|
||||
run: |
|
||||
echo "name=fd" | tee -a $GITHUB_OUTPUT
|
||||
cargo metadata --no-deps --format-version 1 | jq -r '"version=" + .packages[0].version' | tee -a $GITHUB_OUTPUT
|
||||
cargo metadata --no-deps --format-version 1 | jq -r '"maintainer=" + .packages[0].authors[0]' | tee -a $GITHUB_OUTPUT
|
||||
cargo metadata --no-deps --format-version 1 | jq -r '"homepage=" + .packages[0].homepage' | tee -a $GITHUB_OUTPUT
|
||||
cargo metadata --no-deps --format-version 1 | jq -r '"msrv=" + .packages[0].rust_version' | tee -a $GITHUB_OUTPUT
|
||||
outputs:
|
||||
name: ${{ steps.crate_metadata.outputs.name }}
|
||||
version: ${{ steps.crate_metadata.outputs.version }}
|
||||
maintainer: ${{ steps.crate_metadata.outputs.maintainer }}
|
||||
homepage: ${{ steps.crate_metadata.outputs.homepage }}
|
||||
msrv: ${{ steps.crate_metadata.outputs.msrv }}
|
||||
|
||||
ensure_cargo_fmt:
|
||||
name: Ensure 'cargo fmt' has been run
|
||||
code_quality:
|
||||
name: Code quality
|
||||
runs-on: ubuntu-20.04
|
||||
steps:
|
||||
- uses: dtolnay/rust-toolchain@stable
|
||||
with:
|
||||
components: rustfmt
|
||||
- uses: actions/checkout@v4
|
||||
- run: cargo fmt -- --check
|
||||
|
||||
lint_check:
|
||||
name: Ensure 'cargo clippy' has no warnings
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: dtolnay/rust-toolchain@stable
|
||||
with:
|
||||
components: clippy
|
||||
- uses: actions/checkout@v4
|
||||
- run: cargo clippy --all-targets --all-features -- -Dwarnings
|
||||
- name: Checkout source code
|
||||
uses: actions/checkout@v3
|
||||
- name: Install rust toolchain
|
||||
run: |
|
||||
rm -f "${HOME}/.cargo/bin/"{rustfmt,cargo-fmt}
|
||||
rustup set profile minimal
|
||||
rustup toolchain install stable -c "clippy,rustfmt"
|
||||
rustup default stable
|
||||
- name: Rust cache
|
||||
uses: Swatinem/rust-cache@v2
|
||||
- name: Ensure `cargo fmt` has been run
|
||||
run: cargo fmt --check
|
||||
- name: Ensure MSRV is set in `clippy.toml`
|
||||
run: grep "^msrv = \"${{ env.MIN_SUPPORTED_RUST_VERSION }}\"\$" clippy.toml
|
||||
- name: Run clippy
|
||||
run: cargo clippy --locked --all-targets --all-features
|
||||
|
||||
min_version:
|
||||
name: Minimum supported rust version
|
||||
runs-on: ubuntu-20.04
|
||||
needs: crate_metadata
|
||||
steps:
|
||||
- name: Checkout source code
|
||||
uses: actions/checkout@v4
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Install rust toolchain (v${{ needs.crate_metadata.outputs.msrv }})
|
||||
uses: dtolnay/rust-toolchain@master
|
||||
with:
|
||||
toolchain: ${{ needs.crate_metadata.outputs.msrv }}
|
||||
components: clippy
|
||||
- name: Install rust toolchain (v${{ env.MIN_SUPPORTED_RUST_VERSION }})
|
||||
run: |
|
||||
rustup set profile minimal
|
||||
rustup toolchain install ${{ env.MIN_SUPPORTED_RUST_VERSION }} -c clippy
|
||||
rustup default ${{ env.MIN_SUPPORTED_RUST_VERSION }}
|
||||
- name: Rust cache
|
||||
uses: Swatinem/rust-cache@v2
|
||||
- name: Run clippy (on minimum supported rust version to prevent warnings we can't fix)
|
||||
run: cargo clippy --locked --all-targets ${{ env.MSRV_FEATURES }}
|
||||
run: cargo clippy --locked --all-targets --all-features
|
||||
- name: Run tests
|
||||
run: cargo test --locked ${{ env.MSRV_FEATURES }}
|
||||
run: cargo test --locked
|
||||
|
||||
build:
|
||||
name: ${{ matrix.job.target }} (${{ matrix.job.os }})
|
||||
name: ${{ matrix.job.os }} (${{ matrix.job.target }})
|
||||
runs-on: ${{ matrix.job.os }}
|
||||
needs: crate_metadata
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
job:
|
||||
- { target: aarch64-unknown-linux-gnu , os: ubuntu-22.04, use-cross: true }
|
||||
- { target: aarch64-unknown-linux-musl , os: ubuntu-22.04, use-cross: true }
|
||||
- { target: arm-unknown-linux-gnueabihf , os: ubuntu-22.04, use-cross: true }
|
||||
- { target: arm-unknown-linux-musleabihf, os: ubuntu-22.04, use-cross: true }
|
||||
- { target: i686-pc-windows-msvc , os: windows-2022 }
|
||||
- { target: i686-unknown-linux-gnu , os: ubuntu-22.04, use-cross: true }
|
||||
- { target: i686-unknown-linux-musl , os: ubuntu-22.04, use-cross: true }
|
||||
- { target: x86_64-apple-darwin , os: macos-12 }
|
||||
- { target: aarch64-apple-darwin , os: macos-14 }
|
||||
- { target: x86_64-pc-windows-gnu , os: windows-2022 }
|
||||
- { target: x86_64-pc-windows-msvc , os: windows-2022 }
|
||||
- { target: x86_64-unknown-linux-gnu , os: ubuntu-22.04, use-cross: true }
|
||||
- { target: x86_64-unknown-linux-musl , os: ubuntu-22.04, use-cross: true }
|
||||
env:
|
||||
BUILD_CMD: cargo
|
||||
- { os: ubuntu-20.04, target: arm-unknown-linux-gnueabihf , use-cross: true }
|
||||
- { os: ubuntu-20.04, target: arm-unknown-linux-musleabihf, use-cross: true }
|
||||
- { os: ubuntu-20.04, target: aarch64-unknown-linux-gnu , use-cross: true }
|
||||
- { os: ubuntu-20.04, target: i686-unknown-linux-gnu , use-cross: true }
|
||||
- { os: ubuntu-20.04, target: i686-unknown-linux-musl , use-cross: true }
|
||||
- { os: ubuntu-20.04, target: x86_64-unknown-linux-gnu , use-cross: true }
|
||||
- { os: ubuntu-20.04, target: x86_64-unknown-linux-musl , use-cross: true }
|
||||
- { os: macos-12 , target: x86_64-apple-darwin }
|
||||
# - { os: windows-2019, target: i686-pc-windows-gnu } ## disabled; error: linker `i686-w64-mingw32-gcc` not found
|
||||
- { os: windows-2019, target: i686-pc-windows-msvc }
|
||||
- { os: windows-2019, target: x86_64-pc-windows-gnu }
|
||||
- { os: windows-2019, target: x86_64-pc-windows-msvc }
|
||||
steps:
|
||||
- name: Checkout source code
|
||||
uses: actions/checkout@v4
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Install prerequisites
|
||||
shell: bash
|
||||
|
@ -107,24 +85,20 @@ jobs:
|
|||
aarch64-unknown-linux-gnu) sudo apt-get -y update ; sudo apt-get -y install gcc-aarch64-linux-gnu ;;
|
||||
esac
|
||||
|
||||
- name: Install Rust toolchain
|
||||
uses: dtolnay/rust-toolchain@stable
|
||||
with:
|
||||
targets: ${{ matrix.job.target }}
|
||||
# On windows, for now build with 1.77.2, so that it works on windows 7.
|
||||
# When we update the MSRV again, we'll need to revisit this, and probably drop support for Win7
|
||||
toolchain: "${{ contains(matrix.job.target, 'windows-') && '1.77.2' || 'stable' }}"
|
||||
|
||||
- name: Install cross
|
||||
if: matrix.job.use-cross
|
||||
uses: taiki-e/install-action@v2
|
||||
with:
|
||||
tool: cross
|
||||
|
||||
- name: Overwrite build command env variable
|
||||
if: matrix.job.use-cross
|
||||
- name: Extract crate information
|
||||
shell: bash
|
||||
run: echo "BUILD_CMD=cross" >> $GITHUB_ENV
|
||||
run: |
|
||||
echo "PROJECT_NAME=fd" >> $GITHUB_ENV
|
||||
echo "PROJECT_VERSION=$(sed -n 's/^version = "\(.*\)"/\1/p' Cargo.toml | head -n1)" >> $GITHUB_ENV
|
||||
echo "PROJECT_MAINTAINER=$(sed -n 's/^authors = \["\(.*\)"\]/\1/p' Cargo.toml)" >> $GITHUB_ENV
|
||||
echo "PROJECT_HOMEPAGE=$(sed -n 's/^homepage = "\(.*\)"/\1/p' Cargo.toml)" >> $GITHUB_ENV
|
||||
|
||||
- name: Install Rust toolchain
|
||||
run: |
|
||||
rustup set profile minimal
|
||||
rustup toolchain install stable
|
||||
rustup override set stable
|
||||
rustup target add ${{ matrix.job.target }}
|
||||
|
||||
- name: Show version information (Rust, cargo, GCC)
|
||||
shell: bash
|
||||
|
@ -136,12 +110,29 @@ jobs:
|
|||
cargo -V
|
||||
rustc -V
|
||||
|
||||
- name: Build
|
||||
- name: Set cargo cmd
|
||||
shell: bash
|
||||
run: $BUILD_CMD build --locked --release --target=${{ matrix.job.target }}
|
||||
run: echo "CARGO_CMD=cargo" >> $GITHUB_ENV
|
||||
|
||||
- name: Set binary name & path
|
||||
id: bin
|
||||
- name: Set cargo cmd to cross
|
||||
shell: bash
|
||||
if: ${{ matrix.job.use-cross == true }}
|
||||
run: echo "CARGO_CMD=cross" >> $GITHUB_ENV
|
||||
|
||||
- name: Rust cache
|
||||
uses: Swatinem/rust-cache@v2
|
||||
with:
|
||||
key: ${{ matrix.job.os }}-${{ matrix.job.target }}
|
||||
|
||||
- name: Install cross
|
||||
if: ${{ matrix.job.use-cross == true }}
|
||||
run: cargo install cross
|
||||
|
||||
- name: Build
|
||||
run: ${{ env.CARGO_CMD }} build --locked --release --target=${{ matrix.job.target }}
|
||||
|
||||
- name: Strip debug information from executable
|
||||
id: strip
|
||||
shell: bash
|
||||
run: |
|
||||
# Figure out suffix of binary
|
||||
|
@ -150,11 +141,29 @@ jobs:
|
|||
*-pc-windows-*) EXE_suffix=".exe" ;;
|
||||
esac;
|
||||
|
||||
# Setup paths
|
||||
BIN_NAME="${{ needs.crate_metadata.outputs.name }}${EXE_suffix}"
|
||||
BIN_PATH="target/${{ matrix.job.target }}/release/${BIN_NAME}"
|
||||
# Figure out what strip tool to use if any
|
||||
STRIP="strip"
|
||||
case ${{ matrix.job.target }} in
|
||||
arm-unknown-linux-*) STRIP="arm-linux-gnueabihf-strip" ;;
|
||||
aarch64-unknown-linux-gnu) STRIP="aarch64-linux-gnu-strip" ;;
|
||||
*-pc-windows-msvc) STRIP="" ;;
|
||||
esac;
|
||||
|
||||
# Let subsequent steps know where to find the binary
|
||||
# Setup paths
|
||||
BIN_DIR="${{ env.CICD_INTERMEDIATES_DIR }}/stripped-release-bin/"
|
||||
mkdir -p "${BIN_DIR}"
|
||||
BIN_NAME="${{ env.PROJECT_NAME }}${EXE_suffix}"
|
||||
BIN_PATH="${BIN_DIR}/${BIN_NAME}"
|
||||
|
||||
# Copy the release build binary to the result location
|
||||
cp "target/${{ matrix.job.target }}/release/${BIN_NAME}" "${BIN_DIR}"
|
||||
|
||||
# Also strip if possible
|
||||
if [ -n "${STRIP}" ]; then
|
||||
"${STRIP}" "${BIN_PATH}"
|
||||
fi
|
||||
|
||||
# Let subsequent steps know where to find the (stripped) bin
|
||||
echo "BIN_PATH=${BIN_PATH}" >> $GITHUB_OUTPUT
|
||||
echo "BIN_NAME=${BIN_NAME}" >> $GITHUB_OUTPUT
|
||||
|
||||
|
@ -164,12 +173,11 @@ jobs:
|
|||
run: |
|
||||
# test only library unit tests and binary for arm-type targets
|
||||
unset CARGO_TEST_OPTIONS
|
||||
unset CARGO_TEST_OPTIONS ; case ${{ matrix.job.target }} in arm-* | aarch64-*) CARGO_TEST_OPTIONS="--bin ${{ needs.crate_metadata.outputs.name }}" ;; esac;
|
||||
unset CARGO_TEST_OPTIONS ; case ${{ matrix.job.target }} in arm-* | aarch64-*) CARGO_TEST_OPTIONS="--bin ${PROJECT_NAME}" ;; esac;
|
||||
echo "CARGO_TEST_OPTIONS=${CARGO_TEST_OPTIONS}" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Run tests
|
||||
shell: bash
|
||||
run: $BUILD_CMD test --locked --target=${{ matrix.job.target }} ${{ steps.test-options.outputs.CARGO_TEST_OPTIONS}}
|
||||
run: ${{ env.CARGO_CMD }} test --locked --target=${{ matrix.job.target }} ${{ steps.test-options.outputs.CARGO_TEST_OPTIONS}}
|
||||
|
||||
- name: Generate completions
|
||||
id: completions
|
||||
|
@ -181,7 +189,7 @@ jobs:
|
|||
shell: bash
|
||||
run: |
|
||||
PKG_suffix=".tar.gz" ; case ${{ matrix.job.target }} in *-pc-windows-*) PKG_suffix=".zip" ;; esac;
|
||||
PKG_BASENAME=${{ needs.crate_metadata.outputs.name }}-v${{ needs.crate_metadata.outputs.version }}-${{ matrix.job.target }}
|
||||
PKG_BASENAME=${PROJECT_NAME}-v${PROJECT_VERSION}-${{ matrix.job.target }}
|
||||
PKG_NAME=${PKG_BASENAME}${PKG_suffix}
|
||||
echo "PKG_NAME=${PKG_NAME}" >> $GITHUB_OUTPUT
|
||||
|
||||
|
@ -190,14 +198,14 @@ jobs:
|
|||
mkdir -p "${ARCHIVE_DIR}"
|
||||
|
||||
# Binary
|
||||
cp "${{ steps.bin.outputs.BIN_PATH }}" "$ARCHIVE_DIR"
|
||||
cp "${{ steps.strip.outputs.BIN_PATH }}" "$ARCHIVE_DIR"
|
||||
|
||||
# Man page
|
||||
cp 'doc/${{ env.PROJECT_NAME }}.1' "$ARCHIVE_DIR"
|
||||
|
||||
# README, LICENSE and CHANGELOG files
|
||||
cp "README.md" "LICENSE-MIT" "LICENSE-APACHE" "CHANGELOG.md" "$ARCHIVE_DIR"
|
||||
|
||||
# Man page
|
||||
cp 'doc/${{ needs.crate_metadata.outputs.name }}.1' "$ARCHIVE_DIR"
|
||||
|
||||
# Autocompletion files
|
||||
cp -r autocomplete "${ARCHIVE_DIR}"
|
||||
|
||||
|
@ -222,10 +230,10 @@ jobs:
|
|||
DPKG_DIR="${DPKG_STAGING}/dpkg"
|
||||
mkdir -p "${DPKG_DIR}"
|
||||
|
||||
DPKG_BASENAME=${{ needs.crate_metadata.outputs.name }}
|
||||
DPKG_CONFLICTS=${{ needs.crate_metadata.outputs.name }}-musl
|
||||
case ${{ matrix.job.target }} in *-musl*) DPKG_BASENAME=${{ needs.crate_metadata.outputs.name }}-musl ; DPKG_CONFLICTS=${{ needs.crate_metadata.outputs.name }} ;; esac;
|
||||
DPKG_VERSION=${{ needs.crate_metadata.outputs.version }}
|
||||
DPKG_BASENAME=${PROJECT_NAME}
|
||||
DPKG_CONFLICTS=${PROJECT_NAME}-musl
|
||||
case ${{ matrix.job.target }} in *-musl) DPKG_BASENAME=${PROJECT_NAME}-musl ; DPKG_CONFLICTS=${PROJECT_NAME} ;; esac;
|
||||
DPKG_VERSION=${PROJECT_VERSION}
|
||||
|
||||
unset DPKG_ARCH
|
||||
case ${{ matrix.job.target }} in
|
||||
|
@ -240,16 +248,16 @@ jobs:
|
|||
echo "DPKG_NAME=${DPKG_NAME}" >> $GITHUB_OUTPUT
|
||||
|
||||
# Binary
|
||||
install -Dm755 "${{ steps.bin.outputs.BIN_PATH }}" "${DPKG_DIR}/usr/bin/${{ steps.bin.outputs.BIN_NAME }}"
|
||||
install -Dm755 "${{ steps.strip.outputs.BIN_PATH }}" "${DPKG_DIR}/usr/bin/${{ steps.strip.outputs.BIN_NAME }}"
|
||||
|
||||
# Man page
|
||||
install -Dm644 'doc/${{ needs.crate_metadata.outputs.name }}.1' "${DPKG_DIR}/usr/share/man/man1/${{ needs.crate_metadata.outputs.name }}.1"
|
||||
gzip -n --best "${DPKG_DIR}/usr/share/man/man1/${{ needs.crate_metadata.outputs.name }}.1"
|
||||
install -Dm644 'doc/${{ env.PROJECT_NAME }}.1' "${DPKG_DIR}/usr/share/man/man1/${{ env.PROJECT_NAME }}.1"
|
||||
gzip -n --best "${DPKG_DIR}/usr/share/man/man1/${{ env.PROJECT_NAME }}.1"
|
||||
|
||||
# Autocompletion files
|
||||
install -Dm644 'autocomplete/fd.bash' "${DPKG_DIR}/usr/share/bash-completion/completions/${{ needs.crate_metadata.outputs.name }}"
|
||||
install -Dm644 'autocomplete/fd.fish' "${DPKG_DIR}/usr/share/fish/vendor_completions.d/${{ needs.crate_metadata.outputs.name }}.fish"
|
||||
install -Dm644 'autocomplete/_fd' "${DPKG_DIR}/usr/share/zsh/vendor-completions/_${{ needs.crate_metadata.outputs.name }}"
|
||||
install -Dm644 'autocomplete/fd.bash' "${DPKG_DIR}/usr/share/bash-completion/completions/${{ env.PROJECT_NAME }}"
|
||||
install -Dm644 'autocomplete/fd.fish' "${DPKG_DIR}/usr/share/fish/vendor_completions.d/${{ env.PROJECT_NAME }}.fish"
|
||||
install -Dm644 'autocomplete/_fd' "${DPKG_DIR}/usr/share/zsh/vendor-completions/_${{ env.PROJECT_NAME }}"
|
||||
|
||||
# README and LICENSE
|
||||
install -Dm644 "README.md" "${DPKG_DIR}/usr/share/doc/${DPKG_BASENAME}/README.md"
|
||||
|
@ -260,12 +268,12 @@ jobs:
|
|||
|
||||
cat > "${DPKG_DIR}/usr/share/doc/${DPKG_BASENAME}/copyright" <<EOF
|
||||
Format: http://www.debian.org/doc/packaging-manuals/copyright-format/1.0/
|
||||
Upstream-Name: ${{ needs.crate_metadata.outputs.name }}
|
||||
Source: ${{ needs.crate_metadata.outputs.homepage }}
|
||||
Upstream-Name: ${{ env.PROJECT_NAME }}
|
||||
Source: ${{ env.PROJECT_HOMEPAGE }}
|
||||
|
||||
Files: *
|
||||
Copyright: ${{ needs.crate_metadata.outputs.maintainer }}
|
||||
Copyright: $COPYRIGHT_YEARS ${{ needs.crate_metadata.outputs.maintainer }}
|
||||
Copyright: ${{ env.PROJECT_MAINTAINER }}
|
||||
Copyright: $COPYRIGHT_YEARS ${{ env.PROJECT_MAINTAINER }}
|
||||
License: Apache-2.0 or MIT
|
||||
|
||||
License: Apache-2.0
|
||||
|
@ -306,10 +314,10 @@ jobs:
|
|||
Version: ${DPKG_VERSION}
|
||||
Section: utils
|
||||
Priority: optional
|
||||
Maintainer: ${{ needs.crate_metadata.outputs.maintainer }}
|
||||
Homepage: ${{ needs.crate_metadata.outputs.homepage }}
|
||||
Maintainer: ${{ env.PROJECT_MAINTAINER }}
|
||||
Homepage: ${{ env.PROJECT_HOMEPAGE }}
|
||||
Architecture: ${DPKG_ARCH}
|
||||
Provides: ${{ needs.crate_metadata.outputs.name }}
|
||||
Provides: ${{ env.PROJECT_NAME }}
|
||||
Conflicts: ${DPKG_CONFLICTS}
|
||||
Description: simple, fast and user-friendly alternative to find
|
||||
fd is a program to find entries in your filesystem.
|
||||
|
@ -345,7 +353,7 @@ jobs:
|
|||
echo "IS_RELEASE=${IS_RELEASE}" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Publish archives and packages
|
||||
uses: softprops/action-gh-release@v2
|
||||
uses: softprops/action-gh-release@v1
|
||||
if: steps.is-release.outputs.IS_RELEASE
|
||||
with:
|
||||
files: |
|
||||
|
@ -353,15 +361,3 @@ jobs:
|
|||
${{ steps.debian-package.outputs.DPKG_PATH }}
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
winget:
|
||||
name: Publish to Winget
|
||||
runs-on: ubuntu-latest
|
||||
needs: build
|
||||
if: startsWith(github.ref, 'refs/tags/v')
|
||||
steps:
|
||||
- uses: vedantmgoyal2009/winget-releaser@v2
|
||||
with:
|
||||
identifier: sharkdp.fd
|
||||
installers-regex: '-pc-windows-msvc\.zip$'
|
||||
token: ${{ secrets.WINGET_TOKEN }}
|
||||
|
|
100
CHANGELOG.md
100
CHANGELOG.md
|
@ -1,103 +1,3 @@
|
|||
# 10.1.0
|
||||
|
||||
## Features
|
||||
|
||||
- Allow passing an optional argument to `--strip-cwd-prefix` of "always", "never", or "auto". to force whether the cwd prefix is stripped or not.
|
||||
- Add a `--format` option which allows using a format template for direct ouput similar to the template used for `--exec`. (#1043)
|
||||
|
||||
## Bugfixes
|
||||
- Fix aarch64 page size again. This time it should actually work. (#1085, #1549) (@tavianator)
|
||||
|
||||
|
||||
## Other
|
||||
|
||||
- aarch64-apple-darwin target added to builds on the release page. Note that this is a tier 2 rust target.
|
||||
|
||||
# v10.0.0
|
||||
|
||||
## Features
|
||||
|
||||
- Add `dir` as an alias to `directory` when using `-t` \ `--type`, see #1460 and #1464 (@Ato2207).
|
||||
- Add support for @%s date format in time filters similar to GNU date (seconds since Unix epoch for --older/--newer), see #1493 (@nabellows)
|
||||
- Breaking: No longer automatically ignore `.git` when using `--hidden` with vcs ignore enabled. This reverts the change in v9.0.0. While this feature
|
||||
was often useful, it also broke some existing workflows, and there wasn't a good way to opt out of it. And there isn't really a good way for us to add
|
||||
a way to opt out of it. And you can easily get similar behavior by adding `.git/` to your global fdignore file.
|
||||
See #1457.
|
||||
|
||||
## Bugfixes
|
||||
|
||||
- Respect NO_COLOR environment variable with `--list-details` option. (#1455)
|
||||
- Fix bug that would cause hidden files to be included despite gitignore rules
|
||||
if search path is "." (#1461, BurntSushi/ripgrep#2711).
|
||||
- aarch64 builds now use 64k page sizes with jemalloc. This fixes issues on some systems, such as ARM Macs that
|
||||
have a larger system page size than the system that the binary was built on. (#1547)
|
||||
- Address [CVE-2024-24576](https://blog.rust-lang.org/2024/04/09/cve-2024-24576.html), by increasing minimum rust version.
|
||||
|
||||
|
||||
## Changes
|
||||
- Minimum supported rust version is now 1.77.2
|
||||
|
||||
|
||||
# v9.0.0
|
||||
|
||||
## Performance
|
||||
|
||||
- Performance has been *significantly improved*, both due to optimizations in the underlying `ignore`
|
||||
crate (#1429), and in `fd` itself (#1422, #1408, #1362) - @tavianator.
|
||||
[Benchmarks results](https://gist.github.com/tavianator/32edbe052f33ef60570cf5456b59de81) show gains
|
||||
of 6-8x for full traversals of smaller directories (100k files) and up to 13x for larger directories (1M files).
|
||||
|
||||
- The default number of threads is now constrained to be at most 64. This should improve startup time on
|
||||
systems with many CPU cores. (#1203, #1410, #1412, #1431) - @tmccombs and @tavianator
|
||||
|
||||
- New flushing behavior when writing output to stdout, providing better performance for TTY and non-TTY
|
||||
use cases, see #1452 and #1313 (@tavianator).
|
||||
|
||||
## Features
|
||||
|
||||
- Support character and block device file types, see #1213 and #1336 (@cgzones)
|
||||
- Breaking: `.git/` is now ignored by default when using `--hidden` / `-H`, use `--no-ignore` / `-I` or
|
||||
`--no-ignore-vcs` to override, see #1387 and #1396 (@skoriop)
|
||||
|
||||
## Bugfixes
|
||||
|
||||
- Fix `NO_COLOR` support, see #1421 (@acuteenvy)
|
||||
|
||||
## Other
|
||||
|
||||
- Fixed documentation typos, see #1409 (@marcospb19)
|
||||
|
||||
## Thanks
|
||||
|
||||
Special thanks to @tavianator for his incredible work on performance in the `ignore` crate and `fd` itself.
|
||||
|
||||
|
||||
|
||||
# v8.7.1
|
||||
|
||||
## Bugfixes
|
||||
|
||||
- `-1` properly conflicts with the exec family of options.
|
||||
- `--max-results` overrides `-1`
|
||||
- `--quiet` properly conflicts with the exec family of options. This used to be the case, but broke during the switch to clap-derive
|
||||
- `--changed-within` now accepts a space as well as a "T" as the separator between date and time (due to update of chrono dependency)
|
||||
|
||||
## Other
|
||||
- Many dependencies were updated
|
||||
- Some documentation was updated and fixed
|
||||
|
||||
# v8.7.0
|
||||
|
||||
## Features
|
||||
|
||||
- Add flag --no-require-git to always respect gitignore files, see #1216 (@vegerot)
|
||||
|
||||
## Bugfixes
|
||||
|
||||
- Fix logic for when to use global ignore file. There was a bug where the only case where the
|
||||
global ignore file wasn't processed was if `--no-ignore` was passed, but neither `--unrestricted`
|
||||
nor `--no-global-ignore-file` is passed. See #1209
|
||||
|
||||
# v8.6.0
|
||||
|
||||
## Features
|
||||
|
|
|
@ -13,11 +13,11 @@ give us the chance to discuss any potential changes first.
|
|||
## Add an entry to the changelog
|
||||
|
||||
If your contribution changes the behavior of `fd` (as opposed to a typo-fix
|
||||
in the documentation), please update the [`CHANGELOG.md`](CHANGELOG.md#upcoming-release) file
|
||||
in the documentation), please update the [`CHANGELOG.md`](CHANGELOG.md) file
|
||||
and describe your changes. This makes the release process much easier and
|
||||
therefore helps to get your changes into a new `fd` release faster.
|
||||
|
||||
The top of the `CHANGELOG` contains an *"Upcoming release"* section with a few
|
||||
The top of the `CHANGELOG` contains a *"unreleased"* section with a few
|
||||
subsections (Features, Bugfixes, …). Please add your entry to the subsection
|
||||
that best describes your change.
|
||||
|
||||
|
|
File diff suppressed because it is too large
Load Diff
45
Cargo.toml
45
Cargo.toml
|
@ -12,13 +12,12 @@ keywords = [
|
|||
"filesystem",
|
||||
"tool",
|
||||
]
|
||||
license = "MIT OR Apache-2.0"
|
||||
license = "MIT/Apache-2.0"
|
||||
name = "fd-find"
|
||||
readme = "README.md"
|
||||
repository = "https://github.com/sharkdp/fd"
|
||||
version = "10.1.0"
|
||||
version = "8.6.0"
|
||||
edition= "2021"
|
||||
rust-version = "1.77.2"
|
||||
|
||||
[badges.appveyor]
|
||||
repository = "sharkdp/fd"
|
||||
|
@ -34,38 +33,41 @@ path = "src/main.rs"
|
|||
version_check = "0.9"
|
||||
|
||||
[dependencies]
|
||||
aho-corasick = "1.1"
|
||||
nu-ansi-term = "0.50"
|
||||
nu-ansi-term = "0.46"
|
||||
argmax = "0.3.1"
|
||||
ignore = "0.4.22"
|
||||
regex = "1.10.3"
|
||||
regex-syntax = "0.8"
|
||||
atty = "0.2"
|
||||
ignore = "0.4.3"
|
||||
num_cpus = "1.13"
|
||||
regex = "1.7.0"
|
||||
regex-syntax = "0.6"
|
||||
ctrlc = "3.2"
|
||||
humantime = "2.1"
|
||||
globset = "0.4"
|
||||
anyhow = "1.0"
|
||||
etcetera = "0.8"
|
||||
normpath = "1.1.1"
|
||||
crossbeam-channel = "0.5.13"
|
||||
clap_complete = {version = "4.4.9", optional = true}
|
||||
dirs-next = "2.0"
|
||||
normpath = "0.3.2"
|
||||
once_cell = "1.15.0"
|
||||
crossbeam-channel = "0.5.6"
|
||||
clap_complete = {version = "4.0.6", optional = true}
|
||||
faccess = "0.2.4"
|
||||
|
||||
[dependencies.clap]
|
||||
version = "4.4.13"
|
||||
features = ["suggestions", "color", "wrap_help", "cargo", "derive"]
|
||||
version = "4.0.22"
|
||||
features = ["suggestions", "color", "wrap_help", "cargo", "unstable-grouped", "derive"]
|
||||
|
||||
[dependencies.chrono]
|
||||
version = "0.4.38"
|
||||
version = "0.4.23"
|
||||
default-features = false
|
||||
features = ["std", "clock"]
|
||||
|
||||
[dependencies.lscolors]
|
||||
version = "0.17"
|
||||
version = "0.13"
|
||||
default-features = false
|
||||
features = ["nu-ansi-term"]
|
||||
|
||||
[target.'cfg(unix)'.dependencies]
|
||||
nix = { version = "0.29.0", default-features = false, features = ["signal", "user"] }
|
||||
users = "0.11.0"
|
||||
nix = { version = "0.24.2", default-features = false, features = ["signal"] }
|
||||
|
||||
[target.'cfg(all(unix, not(target_os = "redox")))'.dependencies]
|
||||
libc = "0.2"
|
||||
|
@ -73,18 +75,17 @@ libc = "0.2"
|
|||
# FIXME: Re-enable jemalloc on macOS
|
||||
# jemalloc is currently disabled on macOS due to a bug in jemalloc in combination with macOS
|
||||
# Catalina. See https://github.com/sharkdp/fd/issues/498 for details.
|
||||
[target.'cfg(all(not(windows), not(target_os = "android"), not(target_os = "macos"), not(target_os = "freebsd"), not(target_os = "openbsd"), not(all(target_env = "musl", target_pointer_width = "32")), not(target_arch = "riscv64")))'.dependencies]
|
||||
jemallocator = {version = "0.5.4", optional = true}
|
||||
[target.'cfg(all(not(windows), not(target_os = "android"), not(target_os = "macos"), not(target_os = "freebsd"), not(all(target_env = "musl", target_pointer_width = "32")), not(target_arch = "riscv64")))'.dependencies]
|
||||
jemallocator = {version = "0.5.0", optional = true}
|
||||
|
||||
[dev-dependencies]
|
||||
diff = "0.1"
|
||||
tempfile = "3.10"
|
||||
tempfile = "3.3"
|
||||
filetime = "0.2"
|
||||
test-case = "3.3"
|
||||
test-case = "2.2"
|
||||
|
||||
[profile.release]
|
||||
lto = true
|
||||
strip = true
|
||||
codegen-units = 1
|
||||
|
||||
[features]
|
||||
|
|
|
@ -1,6 +0,0 @@
|
|||
# https://github.com/sharkdp/fd/issues/1085
|
||||
[target.aarch64-unknown-linux-gnu.env]
|
||||
passthrough = ["JEMALLOC_SYS_WITH_LG_PAGE=16"]
|
||||
|
||||
[target.aarch64-unknown-linux-musl.env]
|
||||
passthrough = ["JEMALLOC_SYS_WITH_LG_PAGE=16"]
|
2
Makefile
2
Makefile
|
@ -6,7 +6,7 @@ datadir=$(prefix)/share
|
|||
exe_name=fd
|
||||
|
||||
$(EXE): Cargo.toml src/**/*.rs
|
||||
cargo build --profile $(PROFILE) --locked
|
||||
cargo build --profile $(PROFILE)
|
||||
|
||||
.PHONY: completions
|
||||
completions: autocomplete/fd.bash autocomplete/fd.fish autocomplete/fd.ps1 autocomplete/_fd
|
||||
|
|
192
README.md
192
README.md
|
@ -2,7 +2,7 @@
|
|||
|
||||
[![CICD](https://github.com/sharkdp/fd/actions/workflows/CICD.yml/badge.svg)](https://github.com/sharkdp/fd/actions/workflows/CICD.yml)
|
||||
[![Version info](https://img.shields.io/crates/v/fd-find.svg)](https://crates.io/crates/fd-find)
|
||||
[[中文](https://github.com/cha0ran/fd-zh)]
|
||||
[[中文](https://github.com/chinanf-boy/fd-zh)]
|
||||
[[한국어](https://github.com/spearkkk/fd-kor)]
|
||||
|
||||
`fd` is a program to find entries in your filesystem.
|
||||
|
@ -10,7 +10,10 @@ It is a simple, fast and user-friendly alternative to [`find`](https://www.gnu.o
|
|||
While it does not aim to support all of `find`'s powerful functionality, it provides sensible
|
||||
(opinionated) defaults for a majority of use cases.
|
||||
|
||||
[Installation](#installation) • [How to use](#how-to-use) • [Troubleshooting](#troubleshooting)
|
||||
Quick links:
|
||||
* [How to use](#how-to-use)
|
||||
* [Installation](#installation)
|
||||
* [Troubleshooting](#troubleshooting)
|
||||
|
||||
## Features
|
||||
|
||||
|
@ -140,7 +143,7 @@ target/debug/deps/libnum_cpus-f5ce7ef99006aa05.rlib
|
|||
```
|
||||
|
||||
To really search *all* files and directories, simply combine the hidden and ignore features to show
|
||||
everything (`-HI`) or use `-u`/`--unrestricted`.
|
||||
everything (`-HI`).
|
||||
|
||||
### Matching the full path
|
||||
By default, *fd* only matches the filename of each file. However, using the `--full-path` or `-p` option,
|
||||
|
@ -258,17 +261,12 @@ To make exclude-patterns like these permanent, you can create a `.fdignore` file
|
|||
/mnt/external-drive
|
||||
*.bak
|
||||
```
|
||||
|
||||
> [!NOTE]
|
||||
> `fd` also supports `.ignore` files that are used by other programs such as `rg` or `ag`.
|
||||
Note: `fd` also supports `.ignore` files that are used by other programs such as `rg` or `ag`.
|
||||
|
||||
If you want `fd` to ignore these patterns globally, you can put them in `fd`'s global ignore file.
|
||||
This is usually located in `~/.config/fd/ignore` in macOS or Linux, and `%APPDATA%\fd\ignore` in
|
||||
Windows.
|
||||
|
||||
You may wish to include `.git/` in your `fd/ignore` file so that `.git` directories, and their contents
|
||||
are not included in output if you use the `--hidden` option.
|
||||
|
||||
### Deleting files
|
||||
|
||||
You can use `fd` to remove all files and directories that are matched by your search pattern.
|
||||
|
@ -286,8 +284,7 @@ option:
|
|||
If you also want to remove a certain class of directories, you can use the same technique. You will
|
||||
have to use `rm`s `--recursive`/`-r` flag to remove directories.
|
||||
|
||||
> [!NOTE]
|
||||
> There are scenarios where using `fd … -X rm -r` can cause race conditions: if you have a
|
||||
Note: there are scenarios where using `fd … -X rm -r` can cause race conditions: if you have a
|
||||
path like `…/foo/bar/foo/…` and want to remove all directories named `foo`, you can end up in a
|
||||
situation where the outer `foo` directory is removed first, leading to (harmless) *"'foo/bar/foo':
|
||||
No such file or directory"* errors in the `rm` call.
|
||||
|
@ -316,76 +313,81 @@ Options:
|
|||
-p, --full-path Search full abs. path (default: filename only)
|
||||
-d, --max-depth <depth> Set maximum search depth (default: none)
|
||||
-E, --exclude <pattern> Exclude entries that match the given glob pattern
|
||||
-t, --type <filetype> Filter by type: file (f), directory (d/dir), symlink (l),
|
||||
executable (x), empty (e), socket (s), pipe (p), char-device
|
||||
(c), block-device (b)
|
||||
-t, --type <filetype> Filter by type: file (f), directory (d), symlink (l),
|
||||
executable (x), empty (e), socket (s), pipe (p)
|
||||
-e, --extension <ext> Filter by file extension
|
||||
-S, --size <size> Limit results based on the size of files
|
||||
--changed-within <date|dur> Filter by file modification time (newer than)
|
||||
--changed-before <date|dur> Filter by file modification time (older than)
|
||||
-o, --owner <user:group> Filter by owning user and/or group
|
||||
--format <fmt> Print results according to template
|
||||
-x, --exec <cmd>... Execute a command for each search result
|
||||
-X, --exec-batch <cmd>... Execute a command with all search results at once
|
||||
-c, --color <when> When to use colors [default: auto] [possible values: auto,
|
||||
always, never]
|
||||
-h, --help Print help (see more with '--help')
|
||||
-V, --version Print version
|
||||
-h, --help Print help information (use `--help` for more detail)
|
||||
-V, --version Print version information
|
||||
```
|
||||
|
||||
## Benchmark
|
||||
|
||||
Let's search my home folder for files that end in `[0-9].jpg`. It contains ~750.000
|
||||
subdirectories and about a 4 million files. For averaging and statistical analysis, I'm using
|
||||
Let's search my home folder for files that end in `[0-9].jpg`. It contains ~190.000
|
||||
subdirectories and about a million files. For averaging and statistical analysis, I'm using
|
||||
[hyperfine](https://github.com/sharkdp/hyperfine). The following benchmarks are performed
|
||||
with a "warm"/pre-filled disk-cache (results for a "cold" disk-cache show the same trends).
|
||||
|
||||
Let's start with `find`:
|
||||
```
|
||||
Benchmark 1: find ~ -iregex '.*[0-9]\.jpg$'
|
||||
Time (mean ± σ): 19.922 s ± 0.109 s
|
||||
Range (min … max): 19.765 s … 20.065 s
|
||||
Benchmark #1: find ~ -iregex '.*[0-9]\.jpg$'
|
||||
|
||||
Time (mean ± σ): 7.236 s ± 0.090 s
|
||||
|
||||
Range (min … max): 7.133 s … 7.385 s
|
||||
```
|
||||
|
||||
`find` is much faster if it does not need to perform a regular-expression search:
|
||||
```
|
||||
Benchmark 2: find ~ -iname '*[0-9].jpg'
|
||||
Time (mean ± σ): 11.226 s ± 0.104 s
|
||||
Range (min … max): 11.119 s … 11.466 s
|
||||
Benchmark #2: find ~ -iname '*[0-9].jpg'
|
||||
|
||||
Time (mean ± σ): 3.914 s ± 0.027 s
|
||||
|
||||
Range (min … max): 3.876 s … 3.964 s
|
||||
```
|
||||
|
||||
Now let's try the same for `fd`. Note that `fd` performs a regular expression
|
||||
search by default. The options `-u`/`--unrestricted` option is needed here for
|
||||
a fair comparison. Otherwise `fd` does not have to traverse hidden folders and
|
||||
ignored paths (see below):
|
||||
Now let's try the same for `fd`. Note that `fd` *always* performs a regular expression
|
||||
search. The options `--hidden` and `--no-ignore` are needed for a fair comparison,
|
||||
otherwise `fd` does not have to traverse hidden folders and ignored paths (see below):
|
||||
```
|
||||
Benchmark 3: fd -u '[0-9]\.jpg$' ~
|
||||
Time (mean ± σ): 854.8 ms ± 10.0 ms
|
||||
Range (min … max): 839.2 ms … 868.9 ms
|
||||
```
|
||||
For this particular example, `fd` is approximately **23 times faster** than `find -iregex`
|
||||
and about **13 times faster** than `find -iname`. By the way, both tools found the exact
|
||||
same 546 files :smile:.
|
||||
Benchmark #3: fd -HI '.*[0-9]\.jpg$' ~
|
||||
|
||||
**Note**: This is *one particular* benchmark on *one particular* machine. While we have
|
||||
performed a lot of different tests (and found consistent results), things might
|
||||
be different for you! We encourage everyone to try it out on their own. See
|
||||
Time (mean ± σ): 811.6 ms ± 26.9 ms
|
||||
|
||||
Range (min … max): 786.0 ms … 870.7 ms
|
||||
```
|
||||
For this particular example, `fd` is approximately nine times faster than `find -iregex`
|
||||
and about five times faster than `find -iname`. By the way, both tools found the exact
|
||||
same 20880 files :smile:.
|
||||
|
||||
Finally, let's run `fd` without `--hidden` and `--no-ignore` (this can lead to different
|
||||
search results, of course). If *fd* does not have to traverse the hidden and git-ignored
|
||||
folders, it is almost an order of magnitude faster:
|
||||
```
|
||||
Benchmark #4: fd '[0-9]\.jpg$' ~
|
||||
|
||||
Time (mean ± σ): 123.7 ms ± 6.0 ms
|
||||
|
||||
Range (min … max): 118.8 ms … 140.0 ms
|
||||
```
|
||||
|
||||
**Note**: This is *one particular* benchmark on *one particular* machine. While I have
|
||||
performed quite a lot of different tests (and found consistent results), things might
|
||||
be different for you! I encourage everyone to try it out on their own. See
|
||||
[this repository](https://github.com/sharkdp/fd-benchmarks) for all necessary scripts.
|
||||
|
||||
Concerning *fd*'s speed, a lot of credit goes to the `regex` and `ignore` crates that are
|
||||
also used in [ripgrep](https://github.com/BurntSushi/ripgrep) (check it out!).
|
||||
Concerning *fd*'s speed, the main credit goes to the `regex` and `ignore` crates that are also used
|
||||
in [ripgrep](https://github.com/BurntSushi/ripgrep) (check it out!).
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### `fd` does not find my file!
|
||||
|
||||
Remember that `fd` ignores hidden directories and files by default. It also ignores patterns
|
||||
from `.gitignore` files. If you want to make sure to find absolutely every possible file, always
|
||||
use the options `-u`/`--unrestricted` option (or `-HI` to enable hidden and ignored files):
|
||||
``` bash
|
||||
> fd -u …
|
||||
```
|
||||
|
||||
### Colorized output
|
||||
|
||||
`fd` can colorize files by extension, just like `ls`. In order for this to work, the environment
|
||||
|
@ -399,6 +401,15 @@ for alternative, more complete (or more colorful) variants, see [here](https://g
|
|||
|
||||
`fd` also honors the [`NO_COLOR`](https://no-color.org/) environment variable.
|
||||
|
||||
### `fd` does not find my file!
|
||||
|
||||
Remember that `fd` ignores hidden directories and files by default. It also ignores patterns
|
||||
from `.gitignore` files. If you want to make sure to find absolutely every possible file, always
|
||||
use the options `-H` and `-I` to disable these two features:
|
||||
``` bash
|
||||
> fd -HI …
|
||||
```
|
||||
|
||||
### `fd` doesn't seem to interpret my regex pattern correctly
|
||||
|
||||
A lot of special regex characters (like `[]`, `^`, `$`, ..) are also special characters in your
|
||||
|
@ -477,17 +488,16 @@ In emacs, run `M-x find-file-in-project-by-selected` to find matching files. Alt
|
|||
|
||||
### Printing the output as a tree
|
||||
|
||||
To format the output of `fd` as a file-tree you can use the `tree` command with
|
||||
`--fromfile`:
|
||||
To format the output of `fd` similar to the `tree` command, install [`as-tree`] and pipe the output
|
||||
of `fd` to `as-tree`:
|
||||
```bash
|
||||
❯ fd | tree --fromfile
|
||||
fd | as-tree
|
||||
```
|
||||
|
||||
This can be more useful than running `tree` by itself because `tree` does not
|
||||
ignore any files by default, nor does it support as rich a set of options as
|
||||
`fd` does to control what to print:
|
||||
This can be more useful than running `tree` by itself because `tree` does not ignore any files by
|
||||
default, nor does it support as rich a set of options as `fd` does to control what to print:
|
||||
```bash
|
||||
❯ fd --extension rs | tree --fromfile
|
||||
❯ fd --extension rs | as-tree
|
||||
.
|
||||
├── build.rs
|
||||
└── src
|
||||
|
@ -495,10 +505,9 @@ ignore any files by default, nor does it support as rich a set of options as
|
|||
└── error.rs
|
||||
```
|
||||
|
||||
On bash and similar you can simply create an alias:
|
||||
```bash
|
||||
❯ alias as-tree='tree --fromfile'
|
||||
```
|
||||
For more information about `as-tree`, see [the `as-tree` README][`as-tree`].
|
||||
|
||||
[`as-tree`]: https://github.com/jez/as-tree
|
||||
|
||||
### Using fd with `xargs` or `parallel`
|
||||
|
||||
|
@ -521,7 +530,7 @@ newlines). In the same way, the `-0` option of `xargs` tells it to read the inpu
|
|||
If you run Ubuntu 19.04 (Disco Dingo) or newer, you can install the
|
||||
[officially maintained package](https://packages.ubuntu.com/fd-find):
|
||||
```
|
||||
apt install fd-find
|
||||
sudo apt install fd-find
|
||||
```
|
||||
Note that the binary is called `fdfind` as the binary name `fd` is already used by another package.
|
||||
It is recommended that after installation, you add a link to `fd` by executing command
|
||||
|
@ -531,7 +540,7 @@ Make sure that `$HOME/.local/bin` is in your `$PATH`.
|
|||
If you use an older version of Ubuntu, you can download the latest `.deb` package from the
|
||||
[release page](https://github.com/sharkdp/fd/releases) and install it via:
|
||||
``` bash
|
||||
dpkg -i fd_9.0.0_amd64.deb # adapt version number and architecture
|
||||
sudo dpkg -i fd_8.6.0_amd64.deb # adapt version number and architecture
|
||||
```
|
||||
|
||||
### On Debian
|
||||
|
@ -539,7 +548,7 @@ dpkg -i fd_9.0.0_amd64.deb # adapt version number and architecture
|
|||
If you run Debian Buster or newer, you can install the
|
||||
[officially maintained Debian package](https://tracker.debian.org/pkg/rust-fd-find):
|
||||
```
|
||||
apt-get install fd-find
|
||||
sudo apt-get install fd-find
|
||||
```
|
||||
Note that the binary is called `fdfind` as the binary name `fd` is already used by another package.
|
||||
It is recommended that after installation, you add a link to `fd` by executing command
|
||||
|
@ -567,8 +576,6 @@ You can install [the fd package](https://www.archlinux.org/packages/community/x8
|
|||
```
|
||||
pacman -S fd
|
||||
```
|
||||
You can also install fd [from the AUR](https://aur.archlinux.org/packages/fd-git).
|
||||
|
||||
### On Gentoo Linux
|
||||
|
||||
You can use [the fd ebuild](https://packages.gentoo.org/packages/sys-apps/fd) from the official repo:
|
||||
|
@ -590,31 +597,22 @@ You can install `fd` via xbps-install:
|
|||
xbps-install -S fd
|
||||
```
|
||||
|
||||
### On ALT Linux
|
||||
### On RedHat Enterprise Linux 8 (RHEL8), Almalinux 8, EuroLinux 8 or Rocky Linux 8
|
||||
|
||||
You can install [the fd package](https://packages.altlinux.org/en/sisyphus/srpms/fd/) from the official repo:
|
||||
Get the latest fd-v*-x86_64-unknown-linux-gnu.tar.gz file from [sharkdp on github](https://github.com/sharkdp/fd/releases)
|
||||
```
|
||||
apt-get install fd
|
||||
tar xf fd-v*-x86_64-unknown-linux-gnu.tar.gz
|
||||
chown -R root:root fd-v*-x86_64-unknown-linux-gnu
|
||||
cd fd-v*-x86_64-unknown-linux-gnu
|
||||
sudo cp fd /bin
|
||||
gzip fd.1
|
||||
chown root:root fd.1.gz
|
||||
sudo cp fd.1.gz /usr/share/man/man1
|
||||
sudo cp autocomplete/fd.bash /usr/share/bash-completion/completions/fd
|
||||
source /usr/share/bash-completion/completions/fd
|
||||
fd
|
||||
```
|
||||
|
||||
### On Solus
|
||||
|
||||
You can install [the fd package](https://github.com/getsolus/packages/tree/main/packages/f/fd) from the official repo:
|
||||
```
|
||||
eopkg install fd
|
||||
```
|
||||
|
||||
### On RedHat Enterprise Linux 8/9 (RHEL8/9), Almalinux 8/9, EuroLinux 8/9 or Rocky Linux 8/9
|
||||
|
||||
You can install [the `fd` package](https://copr.fedorainfracloud.org/coprs/tkbcopr/fd/) from Fedora Copr.
|
||||
|
||||
```bash
|
||||
dnf copr enable tkbcopr/fd
|
||||
dnf install fd
|
||||
```
|
||||
|
||||
A different version using the [slower](https://github.com/sharkdp/fd/pull/481#issuecomment-534494592) malloc [instead of jemalloc](https://bugzilla.redhat.com/show_bug.cgi?id=2216193#c1) is also available from the EPEL8/9 repo as the package `fd-find`.
|
||||
|
||||
### On macOS
|
||||
|
||||
You can install `fd` with [Homebrew](https://formulae.brew.sh/formula/fd):
|
||||
|
@ -624,7 +622,7 @@ brew install fd
|
|||
|
||||
… or with MacPorts:
|
||||
```
|
||||
port install fd
|
||||
sudo port install fd
|
||||
```
|
||||
|
||||
### On Windows
|
||||
|
@ -641,11 +639,6 @@ Or via [Chocolatey](https://chocolatey.org):
|
|||
choco install fd
|
||||
```
|
||||
|
||||
Or via [Winget](https://learn.microsoft.com/en-us/windows/package-manager/):
|
||||
```
|
||||
winget install sharkdp.fd
|
||||
```
|
||||
|
||||
### On GuixOS
|
||||
|
||||
You can install [the fd package](https://guix.gnu.org/en/packages/fd-8.1.1/) from the official repo:
|
||||
|
@ -660,13 +653,6 @@ You can use the [Nix package manager](https://nixos.org/nix/) to install `fd`:
|
|||
nix-env -i fd
|
||||
```
|
||||
|
||||
### Via Flox
|
||||
|
||||
You can use [Flox](https://flox.dev) to install `fd` into a Flox environment:
|
||||
```
|
||||
flox install fd
|
||||
```
|
||||
|
||||
### On FreeBSD
|
||||
|
||||
You can install [the fd-find package](https://www.freshports.org/sysutils/fd) from the official repo:
|
||||
|
@ -676,7 +662,7 @@ pkg install fd-find
|
|||
|
||||
### From npm
|
||||
|
||||
On Linux and macOS, you can install the [fd-find](https://npm.im/fd-find) package:
|
||||
On linux and macOS, you can install the [fd-find](https://npm.im/fd-find) package:
|
||||
|
||||
```
|
||||
npm install -g fd-find
|
||||
|
@ -688,7 +674,7 @@ With Rust's package manager [cargo](https://github.com/rust-lang/cargo), you can
|
|||
```
|
||||
cargo install fd-find
|
||||
```
|
||||
Note that rust version *1.77.2* or later is required.
|
||||
Note that rust version *1.60.0* or later is required.
|
||||
|
||||
`make` is also needed for the build.
|
||||
|
||||
|
@ -719,6 +705,8 @@ cargo install --path .
|
|||
|
||||
## License
|
||||
|
||||
Copyright (c) 2017-2021 The fd developers
|
||||
|
||||
`fd` is distributed under the terms of both the MIT License and the Apache License 2.0.
|
||||
|
||||
See the [LICENSE-APACHE](LICENSE-APACHE) and [LICENSE-MIT](LICENSE-MIT) files for license details.
|
||||
|
|
2
build.rs
2
build.rs
|
@ -1,5 +1,5 @@
|
|||
fn main() {
|
||||
let min_version = "1.64";
|
||||
let min_version = "1.60";
|
||||
|
||||
match version_check::is_min_version(min_version) {
|
||||
Some(true) => {}
|
||||
|
|
|
@ -0,0 +1 @@
|
|||
msrv = "1.60.0"
|
|
@ -26,8 +26,6 @@ _fd() {
|
|||
{l,symlink}'\:"symbolic links"'
|
||||
{e,empty}'\:"empty files or directories"'
|
||||
{x,executable}'\:"executable (files)"'
|
||||
{b,block-device}'\:"block devices"'
|
||||
{c,char-device}'\:"character devices"'
|
||||
{s,socket}'\:"sockets"'
|
||||
{p,pipe}'\:"named pipes (FIFOs)"'
|
||||
)
|
||||
|
@ -38,7 +36,7 @@ _fd() {
|
|||
# for all of the potential negation options listed below!
|
||||
if
|
||||
# (--[bpsu]* => match all options marked with '$no')
|
||||
[[ $PREFIX$SUFFIX == --[bopsun]* ]] ||
|
||||
[[ $PREFIX$SUFFIX == --[bopsu]* ]] ||
|
||||
zstyle -t ":complete:$curcontext:*" complete-all
|
||||
then
|
||||
no=
|
||||
|
@ -72,9 +70,6 @@ _fd() {
|
|||
{-g,--glob}'[perform a glob-based search]'
|
||||
{-F,--fixed-strings}'[treat pattern as literal string instead of a regex]'
|
||||
|
||||
+ '(no-require-git)'
|
||||
"$no(no-ignore-full --no-ignore-vcs --no-require-git)--no-require-git[don't require git repo to respect gitignores]"
|
||||
|
||||
+ '(match-full)' # match against full path
|
||||
{-p,--full-path}'[match the pattern against the full path instead of the basename]'
|
||||
|
||||
|
@ -123,7 +118,6 @@ _fd() {
|
|||
|
||||
+ '(filter-mtime-newer)' # filter by files modified after than
|
||||
'--changed-within=[limit search to files/directories modified within the given date/duration]:date or duration'
|
||||
'--changed-after=[alias for --changed-within]:date/duration'
|
||||
'!--change-newer-than=:date/duration'
|
||||
'!--newer=:date/duration'
|
||||
|
||||
|
@ -162,11 +156,7 @@ _fd() {
|
|||
$no'(*)*--search-path=[set search path (instead of positional <path> arguments)]:directory:_files -/'
|
||||
|
||||
+ strip-cwd-prefix
|
||||
$no'(strip-cwd-prefix exec-cmds)--strip-cwd-prefix=[When to strip ./]:when:(always never auto)'
|
||||
|
||||
+ and
|
||||
'--and=[additional required search path]:pattern'
|
||||
|
||||
$no'(strip-cwd-prefix exec-cmds)--strip-cwd-prefix[Strip ./ prefix when output is redirected]'
|
||||
|
||||
+ args # positional arguments
|
||||
'1: :_guard "^-*" pattern'
|
||||
|
|
|
@ -29,19 +29,11 @@ By default
|
|||
.B fd
|
||||
uses regular expressions for the pattern. However, this can be changed to use simple glob patterns
|
||||
with the '\-\-glob' option.
|
||||
.P
|
||||
By default
|
||||
.B fd
|
||||
will exclude hidden files and directories, as well as any files that match gitignore rules
|
||||
or ignore rules in .ignore or .fdignore files.
|
||||
.SH OPTIONS
|
||||
.TP
|
||||
.B \-H, \-\-hidden
|
||||
Include hidden files and directories in the search results
|
||||
(default: hidden files and directories are skipped). The flag can be overridden with '--no-hidden'.
|
||||
.IP
|
||||
Ignored files are still excluded unless \-\-no\-ignore or \-\-no\-ignore\-vcs
|
||||
is also used.
|
||||
.TP
|
||||
.B \-I, \-\-no\-ignore
|
||||
Show search results from files and directories that would otherwise be ignored by
|
||||
|
@ -79,14 +71,6 @@ git setting, which defaults to
|
|||
.IR $HOME/.config/git/ignore ).
|
||||
The flag can be overridden with '--ignore-vcs'.
|
||||
.TP
|
||||
.B \-\-no\-require\-git
|
||||
Do not require a git repository to respect gitignores. By default, fd will only
|
||||
respect global gitignore rules, .gitignore rules and local exclude rules if fd
|
||||
detects that you are searching inside a git repository. This flag allows you to
|
||||
relax this restriction such that fd will respect all git related ignore rules
|
||||
regardless of whether you’re searching in a git repository or not. The flag can
|
||||
be overridden with '--require-git'.
|
||||
.TP
|
||||
.B \-\-no\-ignore\-parent
|
||||
Show search results from files and directories that would otherwise be ignored by gitignore files in
|
||||
parent directories.
|
||||
|
@ -110,11 +94,6 @@ Perform a regular-expression based search (default). This can be used to overrid
|
|||
Treat the pattern as a literal string instead of a regular expression. Note that this also
|
||||
performs substring comparison. If you want to match on an exact filename, consider using '\-\-glob'.
|
||||
.TP
|
||||
.BI "\-\-and " pattern
|
||||
Add additional required search patterns, all of which must be matched. Multiple additional
|
||||
patterns can be specified. The patterns are regular expressions, unless '\-\-glob'
|
||||
or '\-\-fixed\-strings' is used.
|
||||
.TP
|
||||
.B \-a, \-\-absolute\-path
|
||||
Shows the full path starting from the root as opposed to relative paths.
|
||||
The flag can be overridden with '--relative-path'.
|
||||
|
@ -156,20 +135,9 @@ can be used as an alias.
|
|||
Enable the display of filesystem errors for situations such as insufficient
|
||||
permissions or dead symlinks.
|
||||
.TP
|
||||
.B \-\-strip-cwd-prefix [when]
|
||||
By default, relative paths are prefixed with './' when -x/--exec,
|
||||
-X/--exec-batch, or -0/--print0 are given, to reduce the risk of a
|
||||
path starting with '-' being treated as a command line option. Use
|
||||
this flag to change this behavior. If this flag is used without a value,
|
||||
it is equivalent to passing "always". Possible values are:
|
||||
.RS
|
||||
.IP never
|
||||
Never strip the ./ at the beginning of paths
|
||||
.IP always
|
||||
Always strip the ./ at the beginning of paths
|
||||
.IP auto
|
||||
Only strip if used with --exec, --exec-batch, or --print0. That is, it resets to the default behavior.
|
||||
.RE
|
||||
.B \-\-strip-cwd-prefix
|
||||
By default, relative paths are prefixed with './' when the output goes to a non interactive terminal
|
||||
(TTY). Use this flag to disable this behaviour.
|
||||
.TP
|
||||
.B \-\-one\-file\-system, \-\-mount, \-\-xdev
|
||||
By default, fd will traverse the file system tree as far as other options dictate. With this flag, fd ensures that it does not descend into a different file system than the one it started in. Comparable to the -mount or -xdev filters of find(1).
|
||||
|
@ -199,14 +167,10 @@ Filter search by type:
|
|||
.RS
|
||||
.IP "f, file"
|
||||
regular files
|
||||
.IP "d, dir, directory"
|
||||
.IP "d, directory"
|
||||
directories
|
||||
.IP "l, symlink"
|
||||
symbolic links
|
||||
.IP "b, block-device"
|
||||
block devices
|
||||
.IP "c, char-device"
|
||||
character devices
|
||||
.IP "s, socket"
|
||||
sockets
|
||||
.IP "p, pipe"
|
||||
|
@ -320,9 +284,8 @@ tebibytes
|
|||
Filter results based on the file modification time.
|
||||
Files with modification times greater than the argument will be returned.
|
||||
The argument can be provided as a duration (\fI10h, 1d, 35min\fR) or as a specific point
|
||||
in time as full RFC3339 format with time zone, as a date or datetime in the
|
||||
local time zone (\fIYYYY-MM-DD\fR or \fIYYYY-MM-DD HH:MM:SS\fR), or as the prefix '@'
|
||||
followed by the number of seconds since the Unix epoch (@[0-9]+).
|
||||
in time in either full RFC3339 format with time zone, or as a date or datetime in the
|
||||
local time zone (\fIYYYY-MM-DD\fR or \fIYYYY-MM-DD HH:MM:SS\fR).
|
||||
\fB\-\-change-newer-than\fR,
|
||||
.B --newer
|
||||
or
|
||||
|
@ -333,15 +296,13 @@ Examples:
|
|||
\-\-changed-within 2weeks
|
||||
\-\-change-newer-than "2018-10-27 10:00:00"
|
||||
\-\-newer 2018-10-27
|
||||
\-\-changed-after @1704067200
|
||||
.TP
|
||||
.BI "\-\-changed-before " date|duration
|
||||
Filter results based on the file modification time.
|
||||
Files with modification times less than the argument will be returned.
|
||||
The argument can be provided as a duration (\fI10h, 1d, 35min\fR) or as a specific point
|
||||
in time as full RFC3339 format with time zone, as a date or datetime in the
|
||||
local time zone (\fIYYYY-MM-DD\fR or \fIYYYY-MM-DD HH:MM:SS\fR), or as the prefix '@'
|
||||
followed by the number of seconds since the Unix epoch (@[0-9]+).
|
||||
in time in either full RFC3339 format with time zone, or as a date or datetime in the
|
||||
local time zone (\fIYYYY-MM-DD\fR or \fIYYYY-MM-DD HH:MM:SS\fR).
|
||||
.B --change-older-than
|
||||
or
|
||||
.B --older
|
||||
|
@ -350,7 +311,6 @@ can be used as aliases.
|
|||
Examples:
|
||||
\-\-changed-before "2018-10-27 10:00:00"
|
||||
\-\-change-older-than 2weeks
|
||||
\-\-older @1704067200
|
||||
.TP
|
||||
.BI "-o, \-\-owner " [user][:group]
|
||||
Filter files by their user and/or group. Format: [(user|uid)][:(group|gid)]. Either side
|
||||
|
@ -375,30 +335,6 @@ Set the path separator to use when printing file paths. The default is the OS-sp
|
|||
Provide paths to search as an alternative to the positional \fIpath\fR argument. Changes the usage to
|
||||
\'fd [FLAGS/OPTIONS] \-\-search\-path PATH \-\-search\-path PATH2 [PATTERN]\'
|
||||
.TP
|
||||
.BI "\-\-format " fmt
|
||||
Specify a template string that is used for printing a line for each file found.
|
||||
|
||||
The following placeholders are substituted into the string for each file before printing:
|
||||
.RS
|
||||
.IP {}
|
||||
path (of the current search result)
|
||||
.IP {/}
|
||||
basename
|
||||
.IP {//}
|
||||
parent directory
|
||||
.IP {.}
|
||||
path without file extension
|
||||
.IP {/.}
|
||||
basename without file extension
|
||||
.IP {{
|
||||
literal '{' (an escape sequence)
|
||||
.IP }}
|
||||
literal '}' (an escape sequence)
|
||||
.P
|
||||
Notice that you can use "{{" and "}}" to escape "{" and "}" respectively, which is especially
|
||||
useful if you need to include the literal text of one of the above placeholders.
|
||||
.RE
|
||||
.TP
|
||||
.BI "\-x, \-\-exec " command
|
||||
.RS
|
||||
Execute
|
||||
|
@ -415,13 +351,19 @@ This option can be specified multiple times, in which case all commands are run
|
|||
file found, in the order they are provided. In that case, you must supply a ';' argument for
|
||||
all but the last commands.
|
||||
|
||||
If parallelism is enabled, the order commands will be executed in is non-deterministic. And even with
|
||||
--threads=1, the order is determined by the operating system and may not be what you expect. Thus, it is
|
||||
recommended that you don't rely on any ordering of the results.
|
||||
|
||||
Before executing the command, any placeholder patterns in the command are replaced with the
|
||||
corresponding values for the current file. The same placeholders are used as in the "\-\-format"
|
||||
option.
|
||||
The following placeholders are substituted before the command is executed:
|
||||
.RS
|
||||
.IP {}
|
||||
path (of the current search result)
|
||||
.IP {/}
|
||||
basename
|
||||
.IP {//}
|
||||
parent directory
|
||||
.IP {.}
|
||||
path without file extension
|
||||
.IP {/.}
|
||||
basename without file extension
|
||||
.RE
|
||||
|
||||
If no placeholder is present, an implicit "{}" at the end is assumed.
|
||||
|
||||
|
@ -445,12 +387,19 @@ Examples:
|
|||
Execute
|
||||
.I command
|
||||
once, with all search results as arguments.
|
||||
|
||||
The order of the arguments is non-deterministic and should not be relied upon.
|
||||
|
||||
This uses the same placeholders as "\-\-format" and "\-\-exec", but instead of expanding
|
||||
once per command invocation each argument containing a placeholder is expanding for every
|
||||
file in a batch and passed as separate arguments.
|
||||
One of the following placeholders is substituted before the command is executed:
|
||||
.RS
|
||||
.IP {}
|
||||
path (of all search results)
|
||||
.IP {/}
|
||||
basename
|
||||
.IP {//}
|
||||
parent directory
|
||||
.IP {.}
|
||||
path without file extension
|
||||
.IP {/.}
|
||||
basename without file extension
|
||||
.RE
|
||||
|
||||
If no placeholder is present, an implicit "{}" at the end is assumed.
|
||||
|
||||
|
@ -499,17 +448,6 @@ is set, use
|
|||
.IR $XDG_CONFIG_HOME/fd/ignore .
|
||||
Otherwise, use
|
||||
.IR $HOME/.config/fd/ignore .
|
||||
.SH FILES
|
||||
.TP
|
||||
.B .fdignore
|
||||
This file works similarly to a .gitignore file anywhere in the searched tree and specifies patterns
|
||||
that should be excluded from the search. However, this file is specific to fd, and will be used even
|
||||
if the --no-ignore-vcs option is used.
|
||||
.TP
|
||||
.B $XDG_CONFIG_HOME/fd/ignore
|
||||
Global ignore file. Unless ignore mode is turned off (such as with --no-ignore)
|
||||
ignore entries in this file will be ignored, as if it was an .fdignore file in the
|
||||
current directory.
|
||||
.SH EXAMPLES
|
||||
.TP
|
||||
.RI "Find files and directories that match the pattern '" needle "':"
|
||||
|
@ -523,16 +461,6 @@ $ fd -e py
|
|||
.TP
|
||||
.RI "Open all search results with vim:"
|
||||
$ fd pattern -X vim
|
||||
.SH Tips and Tricks
|
||||
.IP \[bu]
|
||||
If you add ".git/" to your global ignore file ($XDG_CONFIG_HOME/fd/ignore), then
|
||||
".git" folders will be ignored by default, even when the --hidden option is used.
|
||||
.IP \[bu]
|
||||
You can use a shell alias or a wrapper script in order to pass desired flags to fd
|
||||
by default. For example if you do not like the default behavior of respecting gitignore,
|
||||
you can use
|
||||
`alias fd="/usr/bin/fd --no-ignore-vcs"`
|
||||
in your .bashrc to create an alias for fd that doesn't ignore git files by default.
|
||||
.SH BUGS
|
||||
Bugs can be reported on GitHub: https://github.com/sharkdp/fd/issues
|
||||
.SH SEE ALSO
|
||||
|
|
|
@ -9,7 +9,7 @@ necessary changes for the upcoming release.
|
|||
- [ ] Update version in `Cargo.toml`. Run `cargo build` to update `Cargo.lock`.
|
||||
Make sure to `git add` the `Cargo.lock` changes as well.
|
||||
- [ ] Find the current min. supported Rust version by running
|
||||
`grep rust-version Cargo.toml`.
|
||||
`grep '^\s*MIN_SUPPORTED_RUST_VERSION' .github/workflows/CICD.yml`.
|
||||
- [ ] Update the `fd` version and the min. supported Rust version in `README.md`.
|
||||
- [ ] Update `CHANGELOG.md`. Change the heading of the *"Upcoming release"* section
|
||||
to the version of this release.
|
||||
|
|
|
@ -1,12 +0,0 @@
|
|||
## Sponsors
|
||||
|
||||
`fd` development is sponsored by many individuals and companies. Thank you very much!
|
||||
|
||||
Please note, that being sponsored does not affect the individuality of the `fd`
|
||||
project or affect the maintainers' actions in any way.
|
||||
We remain impartial and continue to assess pull requests solely on merit - the
|
||||
features added, bugs solved, and effect on the overall complexity of the code.
|
||||
No issue will have a different priority based on sponsorship status of the
|
||||
reporter.
|
||||
|
||||
Contributions from anybody are most welcomed, please see our [`CONTRIBUTING.md`](../CONTRIBUTING.md) guide.
|
File diff suppressed because one or more lines are too long
Before Width: | Height: | Size: 7.2 KiB |
|
@ -1 +0,0 @@
|
|||
# Defaults are used
|
|
@ -1,22 +0,0 @@
|
|||
#!/usr/bin/bash
|
||||
|
||||
set -eu
|
||||
|
||||
# This script automates the "Version bump" section
|
||||
|
||||
version="$1"
|
||||
|
||||
if [[ -z $version ]]; then
|
||||
echo "Usage: must supply version as first argument" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
git switch -C "release-$version"
|
||||
sed -i -e "0,/^\[badges/{s/^version =.*/version = \"$version\"/}" Cargo.toml
|
||||
|
||||
msrv="$(grep -F rust-version Cargo.toml | sed -e 's/^rust-version= "\(.*\)"/\1/')"
|
||||
|
||||
sed -i -e "s/Note that rust version \*[0-9.]+\* or later/Note that rust version *$msrv* or later/" README.md
|
||||
|
||||
sed -i -e "s/^# Upcoming release/# $version/" CHANGELOG.md
|
||||
|
156
src/cli.rs
156
src/cli.rs
|
@ -1,4 +1,3 @@
|
|||
use std::num::NonZeroUsize;
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::time::Duration;
|
||||
|
||||
|
@ -27,14 +26,12 @@ use crate::filter::SizeFilter;
|
|||
max_term_width = 98,
|
||||
args_override_self = true,
|
||||
group(ArgGroup::new("execs").args(&["exec", "exec_batch", "list_details"]).conflicts_with_all(&[
|
||||
"max_results", "quiet", "max_one_result"])),
|
||||
"max_results", "has_results", "count"])),
|
||||
)]
|
||||
pub struct Opts {
|
||||
/// Include hidden directories and files in the search results (default:
|
||||
/// hidden files and directories are skipped). Files and directories are
|
||||
/// considered to be hidden if their name starts with a `.` sign (dot).
|
||||
/// Any files or directories that are ignored due to the rules described by
|
||||
/// --no-ignore are still ignored unless otherwise specified.
|
||||
/// The flag can be overridden with --no-hidden.
|
||||
#[arg(
|
||||
long,
|
||||
|
@ -49,7 +46,7 @@ pub struct Opts {
|
|||
no_hidden: (),
|
||||
|
||||
/// Show search results from files and directories that would otherwise be
|
||||
/// ignored by '.gitignore', '.ignore', '.fdignore', or the global ignore file,
|
||||
/// ignored by '.gitignore', '.ignore', '.fdignore', or the global ignore file.
|
||||
/// The flag can be overridden with --ignore.
|
||||
#[arg(
|
||||
long,
|
||||
|
@ -63,9 +60,8 @@ pub struct Opts {
|
|||
#[arg(long, overrides_with = "no_ignore", hide = true, action = ArgAction::SetTrue)]
|
||||
ignore: (),
|
||||
|
||||
///Show search results from files and directories that
|
||||
///would otherwise be ignored by '.gitignore' files.
|
||||
///The flag can be overridden with --ignore-vcs.
|
||||
///Show search results from files and directories that would otherwise be
|
||||
/// ignored by '.gitignore' files. The flag can be overridden with --ignore-vcs.
|
||||
#[arg(
|
||||
long,
|
||||
hide_short_help = true,
|
||||
|
@ -78,28 +74,6 @@ pub struct Opts {
|
|||
#[arg(long, overrides_with = "no_ignore_vcs", hide = true, action = ArgAction::SetTrue)]
|
||||
ignore_vcs: (),
|
||||
|
||||
/// Do not require a git repository to respect gitignores.
|
||||
/// By default, fd will only respect global gitignore rules, .gitignore rules,
|
||||
/// and local exclude rules if fd detects that you are searching inside a
|
||||
/// git repository. This flag allows you to relax this restriction such that
|
||||
/// fd will respect all git related ignore rules regardless of whether you're
|
||||
/// searching in a git repository or not.
|
||||
///
|
||||
///
|
||||
/// This flag can be disabled with --require-git.
|
||||
#[arg(
|
||||
long,
|
||||
overrides_with = "require_git",
|
||||
hide_short_help = true,
|
||||
// same description as ripgrep's flag: ripgrep/crates/core/app.rs
|
||||
long_help
|
||||
)]
|
||||
pub no_require_git: bool,
|
||||
|
||||
/// Overrides --no-require-git
|
||||
#[arg(long, overrides_with = "no_require_git", hide = true, action = ArgAction::SetTrue)]
|
||||
require_git: (),
|
||||
|
||||
/// Show search results from files and directories that would otherwise be
|
||||
/// ignored by '.gitignore', '.ignore', or '.fdignore' files in parent directories.
|
||||
#[arg(
|
||||
|
@ -226,7 +200,7 @@ pub struct Opts {
|
|||
alias = "dereference",
|
||||
long_help = "By default, fd does not descend into symlinked directories. Using this \
|
||||
flag, symbolic links are also traversed. \
|
||||
Flag can be overridden with --no-follow."
|
||||
Flag can be overriden with --no-follow."
|
||||
)]
|
||||
pub follow: bool,
|
||||
|
||||
|
@ -313,12 +287,10 @@ pub struct Opts {
|
|||
|
||||
/// Filter the search by type:
|
||||
/// {n} 'f' or 'file': regular files
|
||||
/// {n} 'd' or 'dir' or 'directory': directories
|
||||
/// {n} 'd' or 'directory': directories
|
||||
/// {n} 'l' or 'symlink': symbolic links
|
||||
/// {n} 's' or 'socket': socket
|
||||
/// {n} 'p' or 'pipe': named pipe (FIFO)
|
||||
/// {n} 'b' or 'block-device': block device
|
||||
/// {n} 'c' or 'char-device': character device
|
||||
/// {n}{n} 'x' or 'executable': executables
|
||||
/// {n} 'e' or 'empty': empty files or directories
|
||||
///
|
||||
|
@ -351,9 +323,8 @@ pub struct Opts {
|
|||
value_name = "filetype",
|
||||
hide_possible_values = true,
|
||||
value_enum,
|
||||
help = "Filter by type: file (f), directory (d/dir), symlink (l), \
|
||||
executable (x), empty (e), socket (s), pipe (p), \
|
||||
char-device (c), block-device (b)",
|
||||
help = "Filter by type: file (f), directory (d), symlink (l), \
|
||||
executable (x), empty (e), socket (s), pipe (p)",
|
||||
long_help
|
||||
)]
|
||||
pub filetype: Option<Vec<FileType>>,
|
||||
|
@ -398,7 +369,7 @@ pub struct Opts {
|
|||
|
||||
/// Filter results based on the file modification time. Files with modification times
|
||||
/// greater than the argument are returned. The argument can be provided
|
||||
/// as a specific point in time (YYYY-MM-DD HH:MM:SS or @timestamp) or as a duration (10h, 1d, 35min).
|
||||
/// as a specific point in time (YYYY-MM-DD HH:MM:SS) or as a duration (10h, 1d, 35min).
|
||||
/// If the time is not specified, it defaults to 00:00:00.
|
||||
/// '--change-newer-than', '--newer', or '--changed-after' can be used as aliases.
|
||||
///
|
||||
|
@ -420,7 +391,7 @@ pub struct Opts {
|
|||
|
||||
/// Filter results based on the file modification time. Files with modification times
|
||||
/// less than the argument are returned. The argument can be provided
|
||||
/// as a specific point in time (YYYY-MM-DD HH:MM:SS or @timestamp) or as a duration (10h, 1d, 35min).
|
||||
/// as a specific point in time (YYYY-MM-DD HH:MM:SS) or as a duration (10h, 1d, 35min).
|
||||
/// '--change-older-than' or '--older' can be used as aliases.
|
||||
///
|
||||
/// Examples:
|
||||
|
@ -452,20 +423,6 @@ pub struct Opts {
|
|||
)]
|
||||
pub owner: Option<OwnerFilter>,
|
||||
|
||||
/// Instead of printing the file normally, print the format string with the following placeholders replaced:
|
||||
/// '{}': path (of the current search result)
|
||||
/// '{/}': basename
|
||||
/// '{//}': parent directory
|
||||
/// '{.}': path without file extension
|
||||
/// '{/.}': basename without file extension
|
||||
#[arg(
|
||||
long,
|
||||
value_name = "fmt",
|
||||
help = "Print results according to template",
|
||||
conflicts_with = "list_details"
|
||||
)]
|
||||
pub format: Option<String>,
|
||||
|
||||
#[command(flatten)]
|
||||
pub exec: Exec,
|
||||
|
||||
|
@ -511,8 +468,8 @@ pub struct Opts {
|
|||
|
||||
/// Set number of threads to use for searching & executing (default: number
|
||||
/// of available CPU cores)
|
||||
#[arg(long, short = 'j', value_name = "num", hide_short_help = true, value_parser = str::parse::<NonZeroUsize>)]
|
||||
pub threads: Option<NonZeroUsize>,
|
||||
#[arg(long, short = 'j', value_name = "num", hide_short_help = true, value_parser = clap::value_parser!(u32).range(1..))]
|
||||
pub threads: Option<u32>,
|
||||
|
||||
/// Milliseconds to buffer before streaming search results to console
|
||||
///
|
||||
|
@ -526,7 +483,6 @@ pub struct Opts {
|
|||
long,
|
||||
value_name = "count",
|
||||
hide_short_help = true,
|
||||
overrides_with("max_one_result"),
|
||||
help = "Limit the number of search results",
|
||||
long_help
|
||||
)]
|
||||
|
@ -631,10 +587,9 @@ pub struct Opts {
|
|||
/// By default, relative paths are prefixed with './' when -x/--exec,
|
||||
/// -X/--exec-batch, or -0/--print0 are given, to reduce the risk of a
|
||||
/// path starting with '-' being treated as a command line option. Use
|
||||
/// this flag to change this behavior. If this flag is used without a value,
|
||||
/// it is equivalent to passing "always".
|
||||
#[arg(long, conflicts_with_all(&["path", "search_path"]), value_name = "when", hide_short_help = true, require_equals = true, long_help)]
|
||||
strip_cwd_prefix: Option<Option<StripCwdWhen>>,
|
||||
/// this flag to disable this behaviour.
|
||||
#[arg(long, conflicts_with_all(&["path", "search_path"]), hide_short_help = true, long_help)]
|
||||
pub strip_cwd_prefix: bool,
|
||||
|
||||
/// By default, fd will traverse the file system tree as far as other options
|
||||
/// dictate. With this flag, fd ensures that it does not descend into a
|
||||
|
@ -657,7 +612,7 @@ impl Opts {
|
|||
} else if !self.search_path.is_empty() {
|
||||
&self.search_path
|
||||
} else {
|
||||
let current_directory = Path::new("./");
|
||||
let current_directory = Path::new(".");
|
||||
ensure_current_directory_exists(current_directory)?;
|
||||
return Ok(vec![self.normalize_path(current_directory)]);
|
||||
};
|
||||
|
@ -680,9 +635,6 @@ impl Opts {
|
|||
fn normalize_path(&self, path: &Path) -> PathBuf {
|
||||
if self.absolute_path {
|
||||
filesystem::absolute_path(path.normalize().unwrap().as_path()).unwrap()
|
||||
} else if path == Path::new(".") {
|
||||
// Change "." to "./" as a workaround for https://github.com/BurntSushi/ripgrep/pull/2711
|
||||
PathBuf::from("./")
|
||||
} else {
|
||||
path.to_path_buf()
|
||||
}
|
||||
|
@ -705,24 +657,23 @@ impl Opts {
|
|||
self.min_depth.or(self.exact_depth)
|
||||
}
|
||||
|
||||
pub fn threads(&self) -> NonZeroUsize {
|
||||
self.threads.unwrap_or_else(default_num_threads)
|
||||
pub fn threads(&self) -> usize {
|
||||
// This will panic if the number of threads passed in is more than usize::MAX in an environment
|
||||
// where usize is less than 32 bits (for example 16-bit architectures). It's pretty
|
||||
// unlikely fd will be running in such an environment, and even more unlikely someone would
|
||||
// be trying to use that many threads on such an environment, so I think panicing is an
|
||||
// appropriate way to handle that.
|
||||
std::cmp::max(
|
||||
self.threads
|
||||
.map_or_else(num_cpus::get, |n| n.try_into().expect("too many threads")),
|
||||
1,
|
||||
)
|
||||
}
|
||||
|
||||
pub fn max_results(&self) -> Option<usize> {
|
||||
self.max_results
|
||||
.filter(|&m| m > 0)
|
||||
.or_else(|| self.max_one_result.then_some(1))
|
||||
}
|
||||
|
||||
pub fn strip_cwd_prefix<P: FnOnce() -> bool>(&self, auto_pred: P) -> bool {
|
||||
use self::StripCwdWhen::*;
|
||||
self.no_search_paths()
|
||||
&& match self.strip_cwd_prefix.map_or(Auto, |o| o.unwrap_or(Always)) {
|
||||
Auto => auto_pred(),
|
||||
Always => true,
|
||||
Never => false,
|
||||
}
|
||||
.or_else(|| self.max_one_result.then(|| 1))
|
||||
}
|
||||
|
||||
#[cfg(feature = "completions")]
|
||||
|
@ -738,32 +689,14 @@ impl Opts {
|
|||
}
|
||||
}
|
||||
|
||||
/// Get the default number of threads to use, if not explicitly specified.
|
||||
fn default_num_threads() -> NonZeroUsize {
|
||||
// If we can't get the amount of parallelism for some reason, then
|
||||
// default to a single thread, because that is safe.
|
||||
let fallback = NonZeroUsize::MIN;
|
||||
// To limit startup overhead on massively parallel machines, don't use more
|
||||
// than 64 threads.
|
||||
let limit = NonZeroUsize::new(64).unwrap();
|
||||
|
||||
std::thread::available_parallelism()
|
||||
.unwrap_or(fallback)
|
||||
.min(limit)
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, PartialEq, Eq, ValueEnum)]
|
||||
pub enum FileType {
|
||||
#[value(alias = "f")]
|
||||
File,
|
||||
#[value(alias = "d", alias = "dir")]
|
||||
#[value(alias = "d")]
|
||||
Directory,
|
||||
#[value(alias = "l")]
|
||||
Symlink,
|
||||
#[value(alias = "b")]
|
||||
BlockDevice,
|
||||
#[value(alias = "c")]
|
||||
CharDevice,
|
||||
/// A file which is executable by the current effective user
|
||||
#[value(alias = "x")]
|
||||
Executable,
|
||||
|
@ -785,14 +718,15 @@ pub enum ColorWhen {
|
|||
Never,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, PartialEq, Eq, Debug, ValueEnum)]
|
||||
pub enum StripCwdWhen {
|
||||
/// Use the default behavior
|
||||
Auto,
|
||||
/// Always strip the ./ at the beginning of paths
|
||||
Always,
|
||||
/// Never strip the ./
|
||||
Never,
|
||||
impl ColorWhen {
|
||||
pub fn as_str(&self) -> &'static str {
|
||||
use ColorWhen::*;
|
||||
match *self {
|
||||
Auto => "auto",
|
||||
Never => "never",
|
||||
Always => "always",
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// there isn't a derive api for getting grouped values yet,
|
||||
|
@ -804,11 +738,11 @@ pub struct Exec {
|
|||
impl clap::FromArgMatches for Exec {
|
||||
fn from_arg_matches(matches: &ArgMatches) -> clap::error::Result<Self> {
|
||||
let command = matches
|
||||
.get_occurrences::<String>("exec")
|
||||
.grouped_values_of("exec")
|
||||
.map(CommandSet::new)
|
||||
.or_else(|| {
|
||||
matches
|
||||
.get_occurrences::<String>("exec_batch")
|
||||
.grouped_values_of("exec_batch")
|
||||
.map(CommandSet::new_batch)
|
||||
})
|
||||
.transpose()
|
||||
|
@ -836,7 +770,6 @@ impl clap::Args for Exec {
|
|||
.help("Execute a command for each search result")
|
||||
.long_help(
|
||||
"Execute a command for each search result in parallel (use --threads=1 for sequential command execution). \
|
||||
There is no guarantee of the order commands are executed in, and the order should not be depended upon. \
|
||||
All positional arguments following --exec are considered to be arguments to the command - not to fd. \
|
||||
It is therefore recommended to place the '-x'/'--exec' option last.\n\
|
||||
The following placeholders are substituted before the command is executed:\n \
|
||||
|
@ -844,9 +777,7 @@ impl clap::Args for Exec {
|
|||
'{/}': basename\n \
|
||||
'{//}': parent directory\n \
|
||||
'{.}': path without file extension\n \
|
||||
'{/.}': basename without file extension\n \
|
||||
'{{': literal '{' (for escaping)\n \
|
||||
'}}': literal '}' (for escaping)\n\n\
|
||||
'{/.}': basename without file extension\n\n\
|
||||
If no placeholder is present, an implicit \"{}\" at the end is assumed.\n\n\
|
||||
Examples:\n\n \
|
||||
- find all *.zip files and unzip them:\n\n \
|
||||
|
@ -871,15 +802,12 @@ impl clap::Args for Exec {
|
|||
.help("Execute a command with all search results at once")
|
||||
.long_help(
|
||||
"Execute the given command once, with all search results as arguments.\n\
|
||||
The order of the arguments is non-deterministic, and should not be relied upon.\n\
|
||||
One of the following placeholders is substituted before the command is executed:\n \
|
||||
'{}': path (of all search results)\n \
|
||||
'{/}': basename\n \
|
||||
'{//}': parent directory\n \
|
||||
'{.}': path without file extension\n \
|
||||
'{/.}': basename without file extension\n \
|
||||
'{{': literal '{' (for escaping)\n \
|
||||
'}}': literal '}' (for escaping)\n\n\
|
||||
'{/.}': basename without file extension\n\n\
|
||||
If no placeholder is present, an implicit \"{}\" at the end is assumed.\n\n\
|
||||
Examples:\n\n \
|
||||
- Find all test_*.py files and open them in your favorite editor:\n\n \
|
||||
|
|
|
@ -8,7 +8,6 @@ use crate::filetypes::FileTypes;
|
|||
#[cfg(unix)]
|
||||
use crate::filter::OwnerFilter;
|
||||
use crate::filter::{SizeFilter, TimeFilter};
|
||||
use crate::fmt::FormatTemplate;
|
||||
|
||||
/// Configuration options for *fd*.
|
||||
pub struct Config {
|
||||
|
@ -31,9 +30,6 @@ pub struct Config {
|
|||
/// Whether to respect VCS ignore files (`.gitignore`, ..) or not.
|
||||
pub read_vcsignore: bool,
|
||||
|
||||
/// Whether to require a `.git` directory to respect gitignore files.
|
||||
pub require_git_to_read_vcsignore: bool,
|
||||
|
||||
/// Whether to respect the global ignore file or not.
|
||||
pub read_global_ignore: bool,
|
||||
|
||||
|
@ -86,9 +82,6 @@ pub struct Config {
|
|||
/// The value (if present) will be a lowercase string without leading dots.
|
||||
pub extensions: Option<RegexSet>,
|
||||
|
||||
/// A format string to use to format results, similarly to exec
|
||||
pub format: Option<FormatTemplate>,
|
||||
|
||||
/// If a value is supplied, each item found will be used to generate and execute commands.
|
||||
pub command: Option<Arc<CommandSet>>,
|
||||
|
||||
|
|
|
@ -1,20 +1,19 @@
|
|||
use std::cell::OnceCell;
|
||||
use std::ffi::OsString;
|
||||
use std::fs::{FileType, Metadata};
|
||||
use std::path::{Path, PathBuf};
|
||||
|
||||
use lscolors::{Colorable, LsColors, Style};
|
||||
|
||||
use once_cell::unsync::OnceCell;
|
||||
|
||||
use crate::config::Config;
|
||||
use crate::filesystem::strip_current_dir;
|
||||
|
||||
#[derive(Debug)]
|
||||
enum DirEntryInner {
|
||||
Normal(ignore::DirEntry),
|
||||
BrokenSymlink(PathBuf),
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct DirEntry {
|
||||
inner: DirEntryInner,
|
||||
metadata: OnceCell<Option<Metadata>>,
|
||||
|
@ -113,7 +112,7 @@ impl Eq for DirEntry {}
|
|||
impl PartialOrd for DirEntry {
|
||||
#[inline]
|
||||
fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {
|
||||
Some(self.cmp(other))
|
||||
self.path().partial_cmp(other.path())
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -34,10 +34,10 @@ pub fn dirname(path: &Path) -> OsString {
|
|||
#[cfg(test)]
|
||||
mod path_tests {
|
||||
use super::*;
|
||||
use std::path::MAIN_SEPARATOR_STR;
|
||||
use std::path::MAIN_SEPARATOR;
|
||||
|
||||
fn correct(input: &str) -> String {
|
||||
input.replace('/', MAIN_SEPARATOR_STR)
|
||||
input.replace('/', &MAIN_SEPARATOR.to_string())
|
||||
}
|
||||
|
||||
macro_rules! func_tests {
|
|
@ -1,6 +1,9 @@
|
|||
use std::sync::Mutex;
|
||||
use std::sync::{Arc, Mutex};
|
||||
|
||||
use crossbeam_channel::Receiver;
|
||||
|
||||
use crate::config::Config;
|
||||
use crate::dir_entry::DirEntry;
|
||||
use crate::error::print_error;
|
||||
use crate::exit_codes::{merge_exitcodes, ExitCode};
|
||||
use crate::walk::WorkerResult;
|
||||
|
@ -11,47 +14,43 @@ use super::CommandSet;
|
|||
/// generate a command with the supplied command template. The generated command will then
|
||||
/// be executed, and this process will continue until the receiver's sender has closed.
|
||||
pub fn job(
|
||||
results: impl IntoIterator<Item = WorkerResult>,
|
||||
cmd: &CommandSet,
|
||||
out_perm: &Mutex<()>,
|
||||
rx: Receiver<WorkerResult>,
|
||||
cmd: Arc<CommandSet>,
|
||||
out_perm: Arc<Mutex<()>>,
|
||||
config: &Config,
|
||||
) -> ExitCode {
|
||||
// Output should be buffered when only running a single thread
|
||||
let buffer_output: bool = config.threads > 1;
|
||||
|
||||
let mut ret = ExitCode::Success;
|
||||
for result in results {
|
||||
let mut results: Vec<ExitCode> = Vec::new();
|
||||
loop {
|
||||
// Obtain the next result from the receiver, else if the channel
|
||||
// has closed, exit from the loop
|
||||
let dir_entry = match result {
|
||||
WorkerResult::Entry(dir_entry) => dir_entry,
|
||||
WorkerResult::Error(err) => {
|
||||
let dir_entry: DirEntry = match rx.recv() {
|
||||
Ok(WorkerResult::Entry(dir_entry)) => dir_entry,
|
||||
Ok(WorkerResult::Error(err)) => {
|
||||
if config.show_filesystem_errors {
|
||||
print_error(err.to_string());
|
||||
}
|
||||
continue;
|
||||
}
|
||||
Err(_) => break,
|
||||
};
|
||||
|
||||
// Generate a command, execute it and store its exit code.
|
||||
let code = cmd.execute(
|
||||
results.push(cmd.execute(
|
||||
dir_entry.stripped_path(config),
|
||||
config.path_separator.as_deref(),
|
||||
out_perm,
|
||||
Arc::clone(&out_perm),
|
||||
buffer_output,
|
||||
);
|
||||
ret = merge_exitcodes([ret, code]);
|
||||
))
|
||||
}
|
||||
// Returns error in case of any error.
|
||||
ret
|
||||
merge_exitcodes(results)
|
||||
}
|
||||
|
||||
pub fn batch(
|
||||
results: impl IntoIterator<Item = WorkerResult>,
|
||||
cmd: &CommandSet,
|
||||
config: &Config,
|
||||
) -> ExitCode {
|
||||
let paths = results
|
||||
pub fn batch(rx: Receiver<WorkerResult>, cmd: &CommandSet, config: &Config) -> ExitCode {
|
||||
let paths = rx
|
||||
.into_iter()
|
||||
.filter_map(|worker_result| match worker_result {
|
||||
WorkerResult::Entry(dir_entry) => Some(dir_entry.into_stripped_path(config)),
|
||||
|
|
239
src/exec/mod.rs
239
src/exec/mod.rs
|
@ -1,21 +1,27 @@
|
|||
mod command;
|
||||
mod input;
|
||||
mod job;
|
||||
mod token;
|
||||
|
||||
use std::ffi::OsString;
|
||||
use std::borrow::Cow;
|
||||
use std::ffi::{OsStr, OsString};
|
||||
use std::io;
|
||||
use std::iter;
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::path::{Component, Path, PathBuf, Prefix};
|
||||
use std::process::Stdio;
|
||||
use std::sync::Mutex;
|
||||
use std::sync::{Arc, Mutex};
|
||||
|
||||
use anyhow::{bail, Result};
|
||||
use argmax::Command;
|
||||
use once_cell::sync::Lazy;
|
||||
use regex::Regex;
|
||||
|
||||
use crate::exit_codes::{merge_exitcodes, ExitCode};
|
||||
use crate::fmt::{FormatTemplate, Token};
|
||||
|
||||
use self::command::{execute_commands, handle_cmd_error};
|
||||
use self::input::{basename, dirname, remove_extension};
|
||||
pub use self::job::{batch, job};
|
||||
use self::token::Token;
|
||||
|
||||
/// Execution mode of the command
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||
|
@ -33,10 +39,9 @@ pub struct CommandSet {
|
|||
}
|
||||
|
||||
impl CommandSet {
|
||||
pub fn new<I, T, S>(input: I) -> Result<CommandSet>
|
||||
pub fn new<I, S>(input: I) -> Result<CommandSet>
|
||||
where
|
||||
I: IntoIterator<Item = T>,
|
||||
T: IntoIterator<Item = S>,
|
||||
I: IntoIterator<Item = Vec<S>>,
|
||||
S: AsRef<str>,
|
||||
{
|
||||
Ok(CommandSet {
|
||||
|
@ -48,10 +53,9 @@ impl CommandSet {
|
|||
})
|
||||
}
|
||||
|
||||
pub fn new_batch<I, T, S>(input: I) -> Result<CommandSet>
|
||||
pub fn new_batch<I, S>(input: I) -> Result<CommandSet>
|
||||
where
|
||||
I: IntoIterator<Item = T>,
|
||||
T: IntoIterator<Item = S>,
|
||||
I: IntoIterator<Item = Vec<S>>,
|
||||
S: AsRef<str>,
|
||||
{
|
||||
Ok(CommandSet {
|
||||
|
@ -80,14 +84,14 @@ impl CommandSet {
|
|||
&self,
|
||||
input: &Path,
|
||||
path_separator: Option<&str>,
|
||||
out_perm: &Mutex<()>,
|
||||
out_perm: Arc<Mutex<()>>,
|
||||
buffer_output: bool,
|
||||
) -> ExitCode {
|
||||
let commands = self
|
||||
.commands
|
||||
.iter()
|
||||
.map(|c| c.generate(input, path_separator));
|
||||
execute_commands(commands, out_perm, buffer_output)
|
||||
execute_commands(commands, &out_perm, buffer_output)
|
||||
}
|
||||
|
||||
pub fn execute_batch<I>(&self, paths: I, limit: usize, path_separator: Option<&str>) -> ExitCode
|
||||
|
@ -127,7 +131,7 @@ impl CommandSet {
|
|||
#[derive(Debug)]
|
||||
struct CommandBuilder {
|
||||
pre_args: Vec<OsString>,
|
||||
path_arg: FormatTemplate,
|
||||
path_arg: ArgumentTemplate,
|
||||
post_args: Vec<OsString>,
|
||||
cmd: Command,
|
||||
count: usize,
|
||||
|
@ -216,7 +220,7 @@ impl CommandBuilder {
|
|||
/// `generate_and_execute()` method will be used to generate a command and execute it.
|
||||
#[derive(Debug, Clone, PartialEq)]
|
||||
struct CommandTemplate {
|
||||
args: Vec<FormatTemplate>,
|
||||
args: Vec<ArgumentTemplate>,
|
||||
}
|
||||
|
||||
impl CommandTemplate {
|
||||
|
@ -225,15 +229,50 @@ impl CommandTemplate {
|
|||
I: IntoIterator<Item = S>,
|
||||
S: AsRef<str>,
|
||||
{
|
||||
static PLACEHOLDER_PATTERN: Lazy<Regex> =
|
||||
Lazy::new(|| Regex::new(r"\{(/?\.?|//)\}").unwrap());
|
||||
|
||||
let mut args = Vec::new();
|
||||
let mut has_placeholder = false;
|
||||
|
||||
for arg in input {
|
||||
let arg = arg.as_ref();
|
||||
|
||||
let tmpl = FormatTemplate::parse(arg);
|
||||
has_placeholder |= tmpl.has_tokens();
|
||||
args.push(tmpl);
|
||||
let mut tokens = Vec::new();
|
||||
let mut start = 0;
|
||||
|
||||
for placeholder in PLACEHOLDER_PATTERN.find_iter(arg) {
|
||||
// Leading text before the placeholder.
|
||||
if placeholder.start() > start {
|
||||
tokens.push(Token::Text(arg[start..placeholder.start()].to_owned()));
|
||||
}
|
||||
|
||||
start = placeholder.end();
|
||||
|
||||
match placeholder.as_str() {
|
||||
"{}" => tokens.push(Token::Placeholder),
|
||||
"{.}" => tokens.push(Token::NoExt),
|
||||
"{/}" => tokens.push(Token::Basename),
|
||||
"{//}" => tokens.push(Token::Parent),
|
||||
"{/.}" => tokens.push(Token::BasenameNoExt),
|
||||
_ => unreachable!("Unhandled placeholder"),
|
||||
}
|
||||
|
||||
has_placeholder = true;
|
||||
}
|
||||
|
||||
// Without a placeholder, the argument is just fixed text.
|
||||
if tokens.is_empty() {
|
||||
args.push(ArgumentTemplate::Text(arg.to_owned()));
|
||||
continue;
|
||||
}
|
||||
|
||||
if start < arg.len() {
|
||||
// Trailing text after last placeholder.
|
||||
tokens.push(Token::Text(arg[start..].to_owned()));
|
||||
}
|
||||
|
||||
args.push(ArgumentTemplate::Tokens(tokens));
|
||||
}
|
||||
|
||||
// We need to check that we have at least one argument, because if not
|
||||
|
@ -247,7 +286,7 @@ impl CommandTemplate {
|
|||
|
||||
// If a placeholder token was not supplied, append one at the end of the command.
|
||||
if !has_placeholder {
|
||||
args.push(FormatTemplate::Tokens(vec![Token::Placeholder]));
|
||||
args.push(ArgumentTemplate::Tokens(vec![Token::Placeholder]));
|
||||
}
|
||||
|
||||
Ok(CommandTemplate { args })
|
||||
|
@ -270,18 +309,115 @@ impl CommandTemplate {
|
|||
}
|
||||
}
|
||||
|
||||
/// Represents a template for a single command argument.
|
||||
///
|
||||
/// The argument is either a collection of `Token`s including at least one placeholder variant, or
|
||||
/// a fixed text.
|
||||
#[derive(Clone, Debug, PartialEq)]
|
||||
enum ArgumentTemplate {
|
||||
Tokens(Vec<Token>),
|
||||
Text(String),
|
||||
}
|
||||
|
||||
impl ArgumentTemplate {
|
||||
pub fn has_tokens(&self) -> bool {
|
||||
matches!(self, ArgumentTemplate::Tokens(_))
|
||||
}
|
||||
|
||||
/// Generate an argument from this template. If path_separator is Some, then it will replace
|
||||
/// the path separator in all placeholder tokens. Text arguments and tokens are not affected by
|
||||
/// path separator substitution.
|
||||
pub fn generate(&self, path: impl AsRef<Path>, path_separator: Option<&str>) -> OsString {
|
||||
use self::Token::*;
|
||||
let path = path.as_ref();
|
||||
|
||||
match *self {
|
||||
ArgumentTemplate::Tokens(ref tokens) => {
|
||||
let mut s = OsString::new();
|
||||
for token in tokens {
|
||||
match *token {
|
||||
Basename => s.push(Self::replace_separator(basename(path), path_separator)),
|
||||
BasenameNoExt => s.push(Self::replace_separator(
|
||||
&remove_extension(basename(path).as_ref()),
|
||||
path_separator,
|
||||
)),
|
||||
NoExt => s.push(Self::replace_separator(
|
||||
&remove_extension(path),
|
||||
path_separator,
|
||||
)),
|
||||
Parent => s.push(Self::replace_separator(&dirname(path), path_separator)),
|
||||
Placeholder => {
|
||||
s.push(Self::replace_separator(path.as_ref(), path_separator))
|
||||
}
|
||||
Text(ref string) => s.push(string),
|
||||
}
|
||||
}
|
||||
s
|
||||
}
|
||||
ArgumentTemplate::Text(ref text) => OsString::from(text),
|
||||
}
|
||||
}
|
||||
|
||||
/// Replace the path separator in the input with the custom separator string. If path_separator
|
||||
/// is None, simply return a borrowed Cow<OsStr> of the input. Otherwise, the input is
|
||||
/// interpreted as a Path and its components are iterated through and re-joined into a new
|
||||
/// OsString.
|
||||
fn replace_separator<'a>(path: &'a OsStr, path_separator: Option<&str>) -> Cow<'a, OsStr> {
|
||||
// fast-path - no replacement necessary
|
||||
if path_separator.is_none() {
|
||||
return Cow::Borrowed(path);
|
||||
}
|
||||
|
||||
let path_separator = path_separator.unwrap();
|
||||
let mut out = OsString::with_capacity(path.len());
|
||||
let mut components = Path::new(path).components().peekable();
|
||||
|
||||
while let Some(comp) = components.next() {
|
||||
match comp {
|
||||
// Absolute paths on Windows are tricky. A Prefix component is usually a drive
|
||||
// letter or UNC path, and is usually followed by RootDir. There are also
|
||||
// "verbatim" prefixes beginning with "\\?\" that skip normalization. We choose to
|
||||
// ignore verbatim path prefixes here because they're very rare, might be
|
||||
// impossible to reach here, and there's no good way to deal with them. If users
|
||||
// are doing something advanced involving verbatim windows paths, they can do their
|
||||
// own output filtering with a tool like sed.
|
||||
Component::Prefix(prefix) => {
|
||||
if let Prefix::UNC(server, share) = prefix.kind() {
|
||||
// Prefix::UNC is a parsed version of '\\server\share'
|
||||
out.push(path_separator);
|
||||
out.push(path_separator);
|
||||
out.push(server);
|
||||
out.push(path_separator);
|
||||
out.push(share);
|
||||
} else {
|
||||
// All other Windows prefix types are rendered as-is. This results in e.g. "C:" for
|
||||
// drive letters. DeviceNS and Verbatim* prefixes won't have backslashes converted,
|
||||
// but they're not returned by directories fd can search anyway so we don't worry
|
||||
// about them.
|
||||
out.push(comp.as_os_str());
|
||||
}
|
||||
}
|
||||
|
||||
// Root directory is always replaced with the custom separator.
|
||||
Component::RootDir => out.push(path_separator),
|
||||
|
||||
// Everything else is joined normally, with a trailing separator if we're not last
|
||||
_ => {
|
||||
out.push(comp.as_os_str());
|
||||
if components.peek().is_some() {
|
||||
out.push(path_separator);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
Cow::Owned(out)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
fn generate_str(template: &CommandTemplate, input: &str) -> Vec<String> {
|
||||
template
|
||||
.args
|
||||
.iter()
|
||||
.map(|arg| arg.generate(input, None).into_string().unwrap())
|
||||
.collect()
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn tokens_with_placeholder() {
|
||||
assert_eq!(
|
||||
|
@ -289,9 +425,9 @@ mod tests {
|
|||
CommandSet {
|
||||
commands: vec![CommandTemplate {
|
||||
args: vec![
|
||||
FormatTemplate::Text("echo".into()),
|
||||
FormatTemplate::Text("${SHELL}:".into()),
|
||||
FormatTemplate::Tokens(vec![Token::Placeholder]),
|
||||
ArgumentTemplate::Text("echo".into()),
|
||||
ArgumentTemplate::Text("${SHELL}:".into()),
|
||||
ArgumentTemplate::Tokens(vec![Token::Placeholder]),
|
||||
]
|
||||
}],
|
||||
mode: ExecutionMode::OneByOne,
|
||||
|
@ -306,8 +442,8 @@ mod tests {
|
|||
CommandSet {
|
||||
commands: vec![CommandTemplate {
|
||||
args: vec![
|
||||
FormatTemplate::Text("echo".into()),
|
||||
FormatTemplate::Tokens(vec![Token::NoExt]),
|
||||
ArgumentTemplate::Text("echo".into()),
|
||||
ArgumentTemplate::Tokens(vec![Token::NoExt]),
|
||||
],
|
||||
}],
|
||||
mode: ExecutionMode::OneByOne,
|
||||
|
@ -322,8 +458,8 @@ mod tests {
|
|||
CommandSet {
|
||||
commands: vec![CommandTemplate {
|
||||
args: vec![
|
||||
FormatTemplate::Text("echo".into()),
|
||||
FormatTemplate::Tokens(vec![Token::Basename]),
|
||||
ArgumentTemplate::Text("echo".into()),
|
||||
ArgumentTemplate::Tokens(vec![Token::Basename]),
|
||||
],
|
||||
}],
|
||||
mode: ExecutionMode::OneByOne,
|
||||
|
@ -338,8 +474,8 @@ mod tests {
|
|||
CommandSet {
|
||||
commands: vec![CommandTemplate {
|
||||
args: vec![
|
||||
FormatTemplate::Text("echo".into()),
|
||||
FormatTemplate::Tokens(vec![Token::Parent]),
|
||||
ArgumentTemplate::Text("echo".into()),
|
||||
ArgumentTemplate::Tokens(vec![Token::Parent]),
|
||||
],
|
||||
}],
|
||||
mode: ExecutionMode::OneByOne,
|
||||
|
@ -354,8 +490,8 @@ mod tests {
|
|||
CommandSet {
|
||||
commands: vec![CommandTemplate {
|
||||
args: vec![
|
||||
FormatTemplate::Text("echo".into()),
|
||||
FormatTemplate::Tokens(vec![Token::BasenameNoExt]),
|
||||
ArgumentTemplate::Text("echo".into()),
|
||||
ArgumentTemplate::Tokens(vec![Token::BasenameNoExt]),
|
||||
],
|
||||
}],
|
||||
mode: ExecutionMode::OneByOne,
|
||||
|
@ -363,21 +499,6 @@ mod tests {
|
|||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn tokens_with_literal_braces() {
|
||||
let template = CommandTemplate::new(vec!["{{}}", "{{", "{.}}"]).unwrap();
|
||||
assert_eq!(
|
||||
generate_str(&template, "foo"),
|
||||
vec!["{}", "{", "{.}", "foo"]
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn tokens_with_literal_braces_and_placeholder() {
|
||||
let template = CommandTemplate::new(vec!["{{{},end}"]).unwrap();
|
||||
assert_eq!(generate_str(&template, "foo"), vec!["{foo,end}"]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn tokens_multiple() {
|
||||
assert_eq!(
|
||||
|
@ -385,9 +506,9 @@ mod tests {
|
|||
CommandSet {
|
||||
commands: vec![CommandTemplate {
|
||||
args: vec![
|
||||
FormatTemplate::Text("cp".into()),
|
||||
FormatTemplate::Tokens(vec![Token::Placeholder]),
|
||||
FormatTemplate::Tokens(vec![
|
||||
ArgumentTemplate::Text("cp".into()),
|
||||
ArgumentTemplate::Tokens(vec![Token::Placeholder]),
|
||||
ArgumentTemplate::Tokens(vec![
|
||||
Token::BasenameNoExt,
|
||||
Token::Text(".ext".into())
|
||||
]),
|
||||
|
@ -405,8 +526,8 @@ mod tests {
|
|||
CommandSet {
|
||||
commands: vec![CommandTemplate {
|
||||
args: vec![
|
||||
FormatTemplate::Text("echo".into()),
|
||||
FormatTemplate::Tokens(vec![Token::NoExt]),
|
||||
ArgumentTemplate::Text("echo".into()),
|
||||
ArgumentTemplate::Tokens(vec![Token::NoExt]),
|
||||
],
|
||||
}],
|
||||
mode: ExecutionMode::Batch,
|
||||
|
@ -431,7 +552,7 @@ mod tests {
|
|||
|
||||
#[test]
|
||||
fn generate_custom_path_separator() {
|
||||
let arg = FormatTemplate::Tokens(vec![Token::Placeholder]);
|
||||
let arg = ArgumentTemplate::Tokens(vec![Token::Placeholder]);
|
||||
macro_rules! check {
|
||||
($input:expr, $expected:expr) => {
|
||||
assert_eq!(arg.generate($input, Some("#")), OsString::from($expected));
|
||||
|
@ -446,7 +567,7 @@ mod tests {
|
|||
#[cfg(windows)]
|
||||
#[test]
|
||||
fn generate_custom_path_separator_windows() {
|
||||
let arg = FormatTemplate::Tokens(vec![Token::Placeholder]);
|
||||
let arg = ArgumentTemplate::Tokens(vec![Token::Placeholder]);
|
||||
macro_rules! check {
|
||||
($input:expr, $expected:expr) => {
|
||||
assert_eq!(arg.generate($input, Some("#")), OsString::from($expected));
|
||||
|
|
|
@ -0,0 +1,29 @@
|
|||
use std::fmt::{self, Display, Formatter};
|
||||
|
||||
/// Designates what should be written to a buffer
|
||||
///
|
||||
/// Each `Token` contains either text, or a placeholder variant, which will be used to generate
|
||||
/// commands after all tokens for a given command template have been collected.
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
pub enum Token {
|
||||
Placeholder,
|
||||
Basename,
|
||||
Parent,
|
||||
NoExt,
|
||||
BasenameNoExt,
|
||||
Text(String),
|
||||
}
|
||||
|
||||
impl Display for Token {
|
||||
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
|
||||
match *self {
|
||||
Token::Placeholder => f.write_str("{}")?,
|
||||
Token::Basename => f.write_str("{/}")?,
|
||||
Token::Parent => f.write_str("{//}")?,
|
||||
Token::NoExt => f.write_str("{.}")?,
|
||||
Token::BasenameNoExt => f.write_str("{/.}")?,
|
||||
Token::Text(ref string) => f.write_str(string)?,
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
|
@ -59,26 +59,6 @@ pub fn is_empty(entry: &dir_entry::DirEntry) -> bool {
|
|||
}
|
||||
}
|
||||
|
||||
#[cfg(any(unix, target_os = "redox"))]
|
||||
pub fn is_block_device(ft: fs::FileType) -> bool {
|
||||
ft.is_block_device()
|
||||
}
|
||||
|
||||
#[cfg(windows)]
|
||||
pub fn is_block_device(_: fs::FileType) -> bool {
|
||||
false
|
||||
}
|
||||
|
||||
#[cfg(any(unix, target_os = "redox"))]
|
||||
pub fn is_char_device(ft: fs::FileType) -> bool {
|
||||
ft.is_char_device()
|
||||
}
|
||||
|
||||
#[cfg(windows)]
|
||||
pub fn is_char_device(_: fs::FileType) -> bool {
|
||||
false
|
||||
}
|
||||
|
||||
#[cfg(any(unix, target_os = "redox"))]
|
||||
pub fn is_socket(ft: fs::FileType) -> bool {
|
||||
ft.is_socket()
|
||||
|
@ -128,12 +108,14 @@ pub fn strip_current_dir(path: &Path) -> &Path {
|
|||
pub fn default_path_separator() -> Option<String> {
|
||||
if cfg!(windows) {
|
||||
let msystem = env::var("MSYSTEM").ok()?;
|
||||
if !msystem.is_empty() {
|
||||
return Some("/".to_owned());
|
||||
}
|
||||
match msystem.as_str() {
|
||||
"MINGW64" | "MINGW32" | "MSYS" => Some("/".to_owned()),
|
||||
_ => None,
|
||||
}
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
|
|
|
@ -9,8 +9,6 @@ pub struct FileTypes {
|
|||
pub files: bool,
|
||||
pub directories: bool,
|
||||
pub symlinks: bool,
|
||||
pub block_devices: bool,
|
||||
pub char_devices: bool,
|
||||
pub sockets: bool,
|
||||
pub pipes: bool,
|
||||
pub executables_only: bool,
|
||||
|
@ -23,8 +21,6 @@ impl FileTypes {
|
|||
(!self.files && entry_type.is_file())
|
||||
|| (!self.directories && entry_type.is_dir())
|
||||
|| (!self.symlinks && entry_type.is_symlink())
|
||||
|| (!self.block_devices && filesystem::is_block_device(*entry_type))
|
||||
|| (!self.char_devices && filesystem::is_char_device(*entry_type))
|
||||
|| (!self.sockets && filesystem::is_socket(*entry_type))
|
||||
|| (!self.pipes && filesystem::is_pipe(*entry_type))
|
||||
|| (self.executables_only && !entry.path().executable())
|
||||
|
@ -32,8 +28,6 @@ impl FileTypes {
|
|||
|| !(entry_type.is_file()
|
||||
|| entry_type.is_dir()
|
||||
|| entry_type.is_symlink()
|
||||
|| filesystem::is_block_device(*entry_type)
|
||||
|| filesystem::is_char_device(*entry_type)
|
||||
|| filesystem::is_socket(*entry_type)
|
||||
|| filesystem::is_pipe(*entry_type))
|
||||
} else {
|
||||
|
|
|
@ -1,5 +1,4 @@
|
|||
use anyhow::{anyhow, Result};
|
||||
use nix::unistd::{Group, User};
|
||||
use std::fs;
|
||||
|
||||
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
|
||||
|
@ -36,22 +35,16 @@ impl OwnerFilter {
|
|||
}
|
||||
|
||||
let uid = Check::parse(fst, |s| {
|
||||
if let Ok(uid) = s.parse() {
|
||||
Ok(uid)
|
||||
} else {
|
||||
User::from_name(s)?
|
||||
.map(|user| user.uid.as_raw())
|
||||
s.parse()
|
||||
.ok()
|
||||
.or_else(|| users::get_user_by_name(s).map(|user| user.uid()))
|
||||
.ok_or_else(|| anyhow!("'{}' is not a recognized user name", s))
|
||||
}
|
||||
})?;
|
||||
let gid = Check::parse(snd, |s| {
|
||||
if let Ok(gid) = s.parse() {
|
||||
Ok(gid)
|
||||
} else {
|
||||
Group::from_name(s)?
|
||||
.map(|group| group.gid.as_raw())
|
||||
s.parse()
|
||||
.ok()
|
||||
.or_else(|| users::get_group_by_name(s).map(|group| group.gid()))
|
||||
.ok_or_else(|| anyhow!("'{}' is not a recognized group name", s))
|
||||
}
|
||||
})?;
|
||||
|
||||
Ok(OwnerFilter { uid, gid })
|
||||
|
|
|
@ -1,9 +1,9 @@
|
|||
use std::sync::OnceLock;
|
||||
|
||||
use anyhow::anyhow;
|
||||
use once_cell::sync::Lazy;
|
||||
use regex::Regex;
|
||||
|
||||
static SIZE_CAPTURES: OnceLock<Regex> = OnceLock::new();
|
||||
static SIZE_CAPTURES: Lazy<Regex> =
|
||||
Lazy::new(|| Regex::new(r"(?i)^([+-]?)(\d+)(b|[kmgt]i?b?)$").unwrap());
|
||||
|
||||
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
|
||||
pub enum SizeFilter {
|
||||
|
@ -31,13 +31,11 @@ impl SizeFilter {
|
|||
}
|
||||
|
||||
fn parse_opt(s: &str) -> Option<Self> {
|
||||
let pattern =
|
||||
SIZE_CAPTURES.get_or_init(|| Regex::new(r"(?i)^([+-]?)(\d+)(b|[kmgt]i?b?)$").unwrap());
|
||||
if !pattern.is_match(s) {
|
||||
if !SIZE_CAPTURES.is_match(s) {
|
||||
return None;
|
||||
}
|
||||
|
||||
let captures = pattern.captures(s)?;
|
||||
let captures = SIZE_CAPTURES.captures(s)?;
|
||||
let limit_kind = captures.get(1).map_or("+", |m| m.as_str());
|
||||
let quantity = captures
|
||||
.get(2)
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
use chrono::{DateTime, Local, NaiveDate, NaiveDateTime};
|
||||
use chrono::{offset::TimeZone, DateTime, Local, NaiveDate};
|
||||
|
||||
use std::time::SystemTime;
|
||||
|
||||
|
@ -20,21 +20,11 @@ impl TimeFilter {
|
|||
.ok()
|
||||
.or_else(|| {
|
||||
NaiveDate::parse_from_str(s, "%F")
|
||||
.ok()?
|
||||
.and_hms_opt(0, 0, 0)?
|
||||
.and_local_timezone(Local)
|
||||
.latest()
|
||||
})
|
||||
.or_else(|| {
|
||||
NaiveDateTime::parse_from_str(s, "%F %T")
|
||||
.ok()?
|
||||
.and_local_timezone(Local)
|
||||
.latest()
|
||||
})
|
||||
.or_else(|| {
|
||||
let timestamp_secs = s.strip_prefix('@')?.parse().ok()?;
|
||||
DateTime::from_timestamp(timestamp_secs, 0).map(Into::into)
|
||||
.ok()
|
||||
.and_then(|nd| nd.and_hms_opt(0, 0, 0))
|
||||
.and_then(|ndt| Local.from_local_datetime(&ndt).single())
|
||||
})
|
||||
.or_else(|| Local.datetime_from_str(s, "%F %T").ok())
|
||||
.map(|dt| dt.into())
|
||||
})
|
||||
}
|
||||
|
@ -62,10 +52,8 @@ mod tests {
|
|||
|
||||
#[test]
|
||||
fn is_time_filter_applicable() {
|
||||
let ref_time = NaiveDateTime::parse_from_str("2010-10-10 10:10:10", "%F %T")
|
||||
.unwrap()
|
||||
.and_local_timezone(Local)
|
||||
.latest()
|
||||
let ref_time = Local
|
||||
.datetime_from_str("2010-10-10 10:10:10", "%F %T")
|
||||
.unwrap()
|
||||
.into();
|
||||
|
||||
|
@ -139,32 +127,5 @@ mod tests {
|
|||
assert!(!TimeFilter::after(&ref_time, t10s_before)
|
||||
.unwrap()
|
||||
.applies_to(&t1m_ago));
|
||||
|
||||
let ref_timestamp = 1707723412u64; // Mon Feb 12 07:36:52 UTC 2024
|
||||
let ref_time = DateTime::parse_from_rfc3339("2024-02-12T07:36:52+00:00")
|
||||
.unwrap()
|
||||
.into();
|
||||
let t1m_ago = ref_time - Duration::from_secs(60);
|
||||
let t1s_later = ref_time + Duration::from_secs(1);
|
||||
// Timestamp only supported via '@' prefix
|
||||
assert!(TimeFilter::before(&ref_time, &ref_timestamp.to_string()).is_none());
|
||||
assert!(
|
||||
TimeFilter::before(&ref_time, &format!("@{}", ref_timestamp))
|
||||
.unwrap()
|
||||
.applies_to(&t1m_ago)
|
||||
);
|
||||
assert!(
|
||||
!TimeFilter::before(&ref_time, &format!("@{}", ref_timestamp))
|
||||
.unwrap()
|
||||
.applies_to(&t1s_later)
|
||||
);
|
||||
assert!(
|
||||
!TimeFilter::after(&ref_time, &format!("@{}", ref_timestamp))
|
||||
.unwrap()
|
||||
.applies_to(&t1m_ago)
|
||||
);
|
||||
assert!(TimeFilter::after(&ref_time, &format!("@{}", ref_timestamp))
|
||||
.unwrap()
|
||||
.applies_to(&t1s_later));
|
||||
}
|
||||
}
|
||||
|
|
281
src/fmt/mod.rs
281
src/fmt/mod.rs
|
@ -1,281 +0,0 @@
|
|||
mod input;
|
||||
|
||||
use std::borrow::Cow;
|
||||
use std::ffi::{OsStr, OsString};
|
||||
use std::fmt::{self, Display, Formatter};
|
||||
use std::path::{Component, Path, Prefix};
|
||||
use std::sync::OnceLock;
|
||||
|
||||
use aho_corasick::AhoCorasick;
|
||||
|
||||
use self::input::{basename, dirname, remove_extension};
|
||||
|
||||
/// Designates what should be written to a buffer
|
||||
///
|
||||
/// Each `Token` contains either text, or a placeholder variant, which will be used to generate
|
||||
/// commands after all tokens for a given command template have been collected.
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
pub enum Token {
|
||||
Placeholder,
|
||||
Basename,
|
||||
Parent,
|
||||
NoExt,
|
||||
BasenameNoExt,
|
||||
Text(String),
|
||||
}
|
||||
|
||||
impl Display for Token {
|
||||
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
|
||||
match *self {
|
||||
Token::Placeholder => f.write_str("{}")?,
|
||||
Token::Basename => f.write_str("{/}")?,
|
||||
Token::Parent => f.write_str("{//}")?,
|
||||
Token::NoExt => f.write_str("{.}")?,
|
||||
Token::BasenameNoExt => f.write_str("{/.}")?,
|
||||
Token::Text(ref string) => f.write_str(string)?,
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
/// A parsed format string
|
||||
///
|
||||
/// This is either a collection of `Token`s including at least one placeholder variant,
|
||||
/// or a fixed text.
|
||||
#[derive(Clone, Debug, PartialEq)]
|
||||
pub enum FormatTemplate {
|
||||
Tokens(Vec<Token>),
|
||||
Text(String),
|
||||
}
|
||||
|
||||
static PLACEHOLDERS: OnceLock<AhoCorasick> = OnceLock::new();
|
||||
|
||||
impl FormatTemplate {
|
||||
pub fn has_tokens(&self) -> bool {
|
||||
matches!(self, FormatTemplate::Tokens(_))
|
||||
}
|
||||
|
||||
pub fn parse(fmt: &str) -> Self {
|
||||
// NOTE: we assume that { and } have the same length
|
||||
const BRACE_LEN: usize = '{'.len_utf8();
|
||||
let mut tokens = Vec::new();
|
||||
let mut remaining = fmt;
|
||||
let mut buf = String::new();
|
||||
let placeholders = PLACEHOLDERS.get_or_init(|| {
|
||||
AhoCorasick::new(["{{", "}}", "{}", "{/}", "{//}", "{.}", "{/.}"]).unwrap()
|
||||
});
|
||||
while let Some(m) = placeholders.find(remaining) {
|
||||
match m.pattern().as_u32() {
|
||||
0 | 1 => {
|
||||
// we found an escaped {{ or }}, so add
|
||||
// everything up to the first char to the buffer
|
||||
// then skip the second one.
|
||||
buf += &remaining[..m.start() + BRACE_LEN];
|
||||
remaining = &remaining[m.end()..];
|
||||
}
|
||||
id if !remaining[m.end()..].starts_with('}') => {
|
||||
buf += &remaining[..m.start()];
|
||||
if !buf.is_empty() {
|
||||
tokens.push(Token::Text(std::mem::take(&mut buf)));
|
||||
}
|
||||
tokens.push(token_from_pattern_id(id));
|
||||
remaining = &remaining[m.end()..];
|
||||
}
|
||||
_ => {
|
||||
// We got a normal pattern, but the final "}"
|
||||
// is escaped, so add up to that to the buffer, then
|
||||
// skip the final }
|
||||
buf += &remaining[..m.end()];
|
||||
remaining = &remaining[m.end() + BRACE_LEN..];
|
||||
}
|
||||
}
|
||||
}
|
||||
// Add the rest of the string to the buffer, and add the final buffer to the tokens
|
||||
if !remaining.is_empty() {
|
||||
buf += remaining;
|
||||
}
|
||||
if tokens.is_empty() {
|
||||
// No placeholders were found, so just return the text
|
||||
return FormatTemplate::Text(buf);
|
||||
}
|
||||
// Add final text segment
|
||||
if !buf.is_empty() {
|
||||
tokens.push(Token::Text(buf));
|
||||
}
|
||||
debug_assert!(!tokens.is_empty());
|
||||
FormatTemplate::Tokens(tokens)
|
||||
}
|
||||
|
||||
/// Generate a result string from this template. If path_separator is Some, then it will replace
|
||||
/// the path separator in all placeholder tokens. Fixed text and tokens are not affected by
|
||||
/// path separator substitution.
|
||||
pub fn generate(&self, path: impl AsRef<Path>, path_separator: Option<&str>) -> OsString {
|
||||
use Token::*;
|
||||
let path = path.as_ref();
|
||||
|
||||
match *self {
|
||||
Self::Tokens(ref tokens) => {
|
||||
let mut s = OsString::new();
|
||||
for token in tokens {
|
||||
match token {
|
||||
Basename => s.push(Self::replace_separator(basename(path), path_separator)),
|
||||
BasenameNoExt => s.push(Self::replace_separator(
|
||||
&remove_extension(basename(path).as_ref()),
|
||||
path_separator,
|
||||
)),
|
||||
NoExt => s.push(Self::replace_separator(
|
||||
&remove_extension(path),
|
||||
path_separator,
|
||||
)),
|
||||
Parent => s.push(Self::replace_separator(&dirname(path), path_separator)),
|
||||
Placeholder => {
|
||||
s.push(Self::replace_separator(path.as_ref(), path_separator))
|
||||
}
|
||||
Text(ref string) => s.push(string),
|
||||
}
|
||||
}
|
||||
s
|
||||
}
|
||||
Self::Text(ref text) => OsString::from(text),
|
||||
}
|
||||
}
|
||||
|
||||
/// Replace the path separator in the input with the custom separator string. If path_separator
|
||||
/// is None, simply return a borrowed Cow<OsStr> of the input. Otherwise, the input is
|
||||
/// interpreted as a Path and its components are iterated through and re-joined into a new
|
||||
/// OsString.
|
||||
fn replace_separator<'a>(path: &'a OsStr, path_separator: Option<&str>) -> Cow<'a, OsStr> {
|
||||
// fast-path - no replacement necessary
|
||||
if path_separator.is_none() {
|
||||
return Cow::Borrowed(path);
|
||||
}
|
||||
|
||||
let path_separator = path_separator.unwrap();
|
||||
let mut out = OsString::with_capacity(path.len());
|
||||
let mut components = Path::new(path).components().peekable();
|
||||
|
||||
while let Some(comp) = components.next() {
|
||||
match comp {
|
||||
// Absolute paths on Windows are tricky. A Prefix component is usually a drive
|
||||
// letter or UNC path, and is usually followed by RootDir. There are also
|
||||
// "verbatim" prefixes beginning with "\\?\" that skip normalization. We choose to
|
||||
// ignore verbatim path prefixes here because they're very rare, might be
|
||||
// impossible to reach here, and there's no good way to deal with them. If users
|
||||
// are doing something advanced involving verbatim windows paths, they can do their
|
||||
// own output filtering with a tool like sed.
|
||||
Component::Prefix(prefix) => {
|
||||
if let Prefix::UNC(server, share) = prefix.kind() {
|
||||
// Prefix::UNC is a parsed version of '\\server\share'
|
||||
out.push(path_separator);
|
||||
out.push(path_separator);
|
||||
out.push(server);
|
||||
out.push(path_separator);
|
||||
out.push(share);
|
||||
} else {
|
||||
// All other Windows prefix types are rendered as-is. This results in e.g. "C:" for
|
||||
// drive letters. DeviceNS and Verbatim* prefixes won't have backslashes converted,
|
||||
// but they're not returned by directories fd can search anyway so we don't worry
|
||||
// about them.
|
||||
out.push(comp.as_os_str());
|
||||
}
|
||||
}
|
||||
|
||||
// Root directory is always replaced with the custom separator.
|
||||
Component::RootDir => out.push(path_separator),
|
||||
|
||||
// Everything else is joined normally, with a trailing separator if we're not last
|
||||
_ => {
|
||||
out.push(comp.as_os_str());
|
||||
if components.peek().is_some() {
|
||||
out.push(path_separator);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
Cow::Owned(out)
|
||||
}
|
||||
}
|
||||
|
||||
// Convert the id from an aho-corasick match to the
|
||||
// appropriate token
|
||||
fn token_from_pattern_id(id: u32) -> Token {
|
||||
use Token::*;
|
||||
match id {
|
||||
2 => Placeholder,
|
||||
3 => Basename,
|
||||
4 => Parent,
|
||||
5 => NoExt,
|
||||
6 => BasenameNoExt,
|
||||
_ => unreachable!(),
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod fmt_tests {
|
||||
use super::*;
|
||||
use std::path::PathBuf;
|
||||
|
||||
#[test]
|
||||
fn parse_no_placeholders() {
|
||||
let templ = FormatTemplate::parse("This string has no placeholders");
|
||||
assert_eq!(
|
||||
templ,
|
||||
FormatTemplate::Text("This string has no placeholders".into())
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_only_brace_escapes() {
|
||||
let templ = FormatTemplate::parse("This string only has escapes like {{ and }}");
|
||||
assert_eq!(
|
||||
templ,
|
||||
FormatTemplate::Text("This string only has escapes like { and }".into())
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn all_placeholders() {
|
||||
use Token::*;
|
||||
|
||||
let templ = FormatTemplate::parse(
|
||||
"{{path={} \
|
||||
basename={/} \
|
||||
parent={//} \
|
||||
noExt={.} \
|
||||
basenameNoExt={/.} \
|
||||
}}",
|
||||
);
|
||||
assert_eq!(
|
||||
templ,
|
||||
FormatTemplate::Tokens(vec![
|
||||
Text("{path=".into()),
|
||||
Placeholder,
|
||||
Text(" basename=".into()),
|
||||
Basename,
|
||||
Text(" parent=".into()),
|
||||
Parent,
|
||||
Text(" noExt=".into()),
|
||||
NoExt,
|
||||
Text(" basenameNoExt=".into()),
|
||||
BasenameNoExt,
|
||||
Text(" }".into()),
|
||||
])
|
||||
);
|
||||
|
||||
let mut path = PathBuf::new();
|
||||
path.push("a");
|
||||
path.push("folder");
|
||||
path.push("file.txt");
|
||||
|
||||
let expanded = templ.generate(&path, Some("/")).into_string().unwrap();
|
||||
|
||||
assert_eq!(
|
||||
expanded,
|
||||
"{path=a/folder/file.txt \
|
||||
basename=file.txt \
|
||||
parent=a/folder \
|
||||
noExt=a/folder/file \
|
||||
basenameNoExt=file }"
|
||||
);
|
||||
}
|
||||
}
|
38
src/main.rs
38
src/main.rs
|
@ -7,18 +7,17 @@ mod exit_codes;
|
|||
mod filesystem;
|
||||
mod filetypes;
|
||||
mod filter;
|
||||
mod fmt;
|
||||
mod output;
|
||||
mod regex_helper;
|
||||
mod walk;
|
||||
|
||||
use std::env;
|
||||
use std::io::IsTerminal;
|
||||
use std::path::Path;
|
||||
use std::sync::Arc;
|
||||
use std::time;
|
||||
|
||||
use anyhow::{anyhow, bail, Context, Result};
|
||||
use atty::Stream;
|
||||
use clap::{CommandFactory, Parser};
|
||||
use globset::GlobBuilder;
|
||||
use lscolors::LsColors;
|
||||
|
@ -41,7 +40,6 @@ use crate::regex_helper::{pattern_has_uppercase_char, pattern_matches_strings_wi
|
|||
not(target_os = "android"),
|
||||
not(target_os = "macos"),
|
||||
not(target_os = "freebsd"),
|
||||
not(target_os = "openbsd"),
|
||||
not(all(target_env = "musl", target_pointer_width = "32")),
|
||||
not(target_arch = "riscv64"),
|
||||
feature = "use-jemalloc"
|
||||
|
@ -104,7 +102,7 @@ fn run() -> Result<ExitCode> {
|
|||
.map(|pat| build_regex(pat, &config))
|
||||
.collect::<Result<Vec<Regex>>>()?;
|
||||
|
||||
walk::scan(&search_paths, regexps, config)
|
||||
walk::scan(&search_paths, Arc::new(regexps), Arc::new(config))
|
||||
}
|
||||
|
||||
#[cfg(feature = "completions")]
|
||||
|
@ -218,14 +216,12 @@ fn construct_config(mut opts: Opts, pattern_regexps: &[String]) -> Result<Config
|
|||
#[cfg(not(windows))]
|
||||
let ansi_colors_support = true;
|
||||
|
||||
let interactive_terminal = std::io::stdout().is_terminal();
|
||||
|
||||
let interactive_terminal = atty::is(Stream::Stdout);
|
||||
let colored_output = match opts.color {
|
||||
ColorWhen::Always => true,
|
||||
ColorWhen::Never => false,
|
||||
ColorWhen::Auto => {
|
||||
let no_color = env::var_os("NO_COLOR").is_some_and(|x| !x.is_empty());
|
||||
ansi_colors_support && !no_color && interactive_terminal
|
||||
ansi_colors_support && env::var_os("NO_COLOR").is_none() && interactive_terminal
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -243,11 +239,8 @@ fn construct_config(mut opts: Opts, pattern_regexps: &[String]) -> Result<Config
|
|||
ignore_hidden: !(opts.hidden || opts.rg_alias_ignore()),
|
||||
read_fdignore: !(opts.no_ignore || opts.rg_alias_ignore()),
|
||||
read_vcsignore: !(opts.no_ignore || opts.rg_alias_ignore() || opts.no_ignore_vcs),
|
||||
require_git_to_read_vcsignore: !opts.no_require_git,
|
||||
read_parent_ignore: !opts.no_ignore_parent,
|
||||
read_global_ignore: !(opts.no_ignore
|
||||
|| opts.rg_alias_ignore()
|
||||
|| opts.no_global_ignore_file),
|
||||
read_global_ignore: !opts.no_ignore || opts.rg_alias_ignore() || opts.no_global_ignore_file,
|
||||
follow_links: opts.follow,
|
||||
one_file_system: opts.one_file_system,
|
||||
null_separator: opts.null_separator,
|
||||
|
@ -255,7 +248,7 @@ fn construct_config(mut opts: Opts, pattern_regexps: &[String]) -> Result<Config
|
|||
max_depth: opts.max_depth(),
|
||||
min_depth: opts.min_depth(),
|
||||
prune: opts.prune,
|
||||
threads: opts.threads().get(),
|
||||
threads: opts.threads(),
|
||||
max_buffer_time: opts.max_buffer_time,
|
||||
ls_colors,
|
||||
interactive_terminal,
|
||||
|
@ -272,8 +265,6 @@ fn construct_config(mut opts: Opts, pattern_regexps: &[String]) -> Result<Config
|
|||
file_types.files = true;
|
||||
}
|
||||
Empty => file_types.empty_only = true,
|
||||
BlockDevice => file_types.block_devices = true,
|
||||
CharDevice => file_types.char_devices = true,
|
||||
Socket => file_types.sockets = true,
|
||||
Pipe => file_types.pipes = true,
|
||||
}
|
||||
|
@ -300,10 +291,6 @@ fn construct_config(mut opts: Opts, pattern_regexps: &[String]) -> Result<Config
|
|||
.build()
|
||||
})
|
||||
.transpose()?,
|
||||
format: opts
|
||||
.format
|
||||
.as_deref()
|
||||
.map(crate::fmt::FormatTemplate::parse),
|
||||
command: command.map(Arc::new),
|
||||
batch_size: opts.batch_size,
|
||||
exclude_patterns: opts.exclude.iter().map(|p| String::from("!") + p).collect(),
|
||||
|
@ -316,7 +303,8 @@ fn construct_config(mut opts: Opts, pattern_regexps: &[String]) -> Result<Config
|
|||
path_separator,
|
||||
actual_path_separator,
|
||||
max_results: opts.max_results(),
|
||||
strip_cwd_prefix: opts.strip_cwd_prefix(|| !(opts.null_separator || has_command)),
|
||||
strip_cwd_prefix: (opts.no_search_paths()
|
||||
&& (opts.strip_cwd_prefix || !(opts.null_separator || has_command))),
|
||||
})
|
||||
}
|
||||
|
||||
|
@ -329,22 +317,18 @@ fn extract_command(opts: &mut Opts, colored_output: bool) -> Result<Option<Comma
|
|||
if !opts.list_details {
|
||||
return None;
|
||||
}
|
||||
let color_arg = format!("--color={}", opts.color.as_str());
|
||||
|
||||
let res = determine_ls_command(colored_output)
|
||||
let res = determine_ls_command(&color_arg, colored_output)
|
||||
.map(|cmd| CommandSet::new_batch([cmd]).unwrap());
|
||||
Some(res)
|
||||
})
|
||||
.transpose()
|
||||
}
|
||||
|
||||
fn determine_ls_command(colored_output: bool) -> Result<Vec<&'static str>> {
|
||||
fn determine_ls_command(color_arg: &str, colored_output: bool) -> Result<Vec<&str>> {
|
||||
#[allow(unused)]
|
||||
let gnu_ls = |command_name| {
|
||||
let color_arg = if colored_output {
|
||||
"--color=always"
|
||||
} else {
|
||||
"--color=never"
|
||||
};
|
||||
// Note: we use short options here (instead of --long-options) to support more
|
||||
// platforms (like BusyBox).
|
||||
vec![
|
||||
|
|
|
@ -7,7 +7,6 @@ use crate::config::Config;
|
|||
use crate::dir_entry::DirEntry;
|
||||
use crate::error::print_error;
|
||||
use crate::exit_codes::ExitCode;
|
||||
use crate::fmt::FormatTemplate;
|
||||
|
||||
fn replace_path_separator(path: &str, new_path_separator: &str) -> String {
|
||||
path.replace(std::path::MAIN_SEPARATOR, new_path_separator)
|
||||
|
@ -15,10 +14,7 @@ fn replace_path_separator(path: &str, new_path_separator: &str) -> String {
|
|||
|
||||
// TODO: this function is performance critical and can probably be optimized
|
||||
pub fn print_entry<W: Write>(stdout: &mut W, entry: &DirEntry, config: &Config) {
|
||||
// TODO: use format if supplied
|
||||
let r = if let Some(ref format) = config.format {
|
||||
print_entry_format(stdout, entry, config, format)
|
||||
} else if let Some(ref ls_colors) = config.ls_colors {
|
||||
let r = if let Some(ref ls_colors) = config.ls_colors {
|
||||
print_entry_colorized(stdout, entry, config, ls_colors)
|
||||
} else {
|
||||
print_entry_uncolorized(stdout, entry, config)
|
||||
|
@ -58,22 +54,6 @@ fn print_trailing_slash<W: Write>(
|
|||
Ok(())
|
||||
}
|
||||
|
||||
// TODO: this function is performance critical and can probably be optimized
|
||||
fn print_entry_format<W: Write>(
|
||||
stdout: &mut W,
|
||||
entry: &DirEntry,
|
||||
config: &Config,
|
||||
format: &FormatTemplate,
|
||||
) -> io::Result<()> {
|
||||
let separator = if config.null_separator { "\0" } else { "\n" };
|
||||
let output = format.generate(
|
||||
entry.stripped_path(config),
|
||||
config.path_separator.as_deref(),
|
||||
);
|
||||
// TODO: support writing raw bytes on unix?
|
||||
write!(stdout, "{}{}", output.to_string_lossy(), separator)
|
||||
}
|
||||
|
||||
// TODO: this function is performance critical and can probably be optimized
|
||||
fn print_entry_colorized<W: Write>(
|
||||
stdout: &mut W,
|
||||
|
|
|
@ -3,7 +3,7 @@ use regex_syntax::ParserBuilder;
|
|||
|
||||
/// Determine if a regex pattern contains a literal uppercase character.
|
||||
pub fn pattern_has_uppercase_char(pattern: &str) -> bool {
|
||||
let mut parser = ParserBuilder::new().utf8(false).build();
|
||||
let mut parser = ParserBuilder::new().allow_invalid_utf8(true).build();
|
||||
|
||||
parser
|
||||
.parse(pattern)
|
||||
|
@ -16,18 +16,16 @@ fn hir_has_uppercase_char(hir: &Hir) -> bool {
|
|||
use regex_syntax::hir::*;
|
||||
|
||||
match hir.kind() {
|
||||
HirKind::Literal(Literal(bytes)) => match std::str::from_utf8(bytes) {
|
||||
Ok(s) => s.chars().any(|c| c.is_uppercase()),
|
||||
Err(_) => bytes.iter().any(|b| char::from(*b).is_uppercase()),
|
||||
},
|
||||
HirKind::Literal(Literal::Unicode(c)) => c.is_uppercase(),
|
||||
HirKind::Literal(Literal::Byte(b)) => char::from(*b).is_uppercase(),
|
||||
HirKind::Class(Class::Unicode(ranges)) => ranges
|
||||
.iter()
|
||||
.any(|r| r.start().is_uppercase() || r.end().is_uppercase()),
|
||||
HirKind::Class(Class::Bytes(ranges)) => ranges
|
||||
.iter()
|
||||
.any(|r| char::from(r.start()).is_uppercase() || char::from(r.end()).is_uppercase()),
|
||||
HirKind::Capture(Capture { sub, .. }) | HirKind::Repetition(Repetition { sub, .. }) => {
|
||||
hir_has_uppercase_char(sub)
|
||||
HirKind::Group(Group { hir, .. }) | HirKind::Repetition(Repetition { hir, .. }) => {
|
||||
hir_has_uppercase_char(hir)
|
||||
}
|
||||
HirKind::Concat(hirs) | HirKind::Alternation(hirs) => {
|
||||
hirs.iter().any(hir_has_uppercase_char)
|
||||
|
@ -38,7 +36,7 @@ fn hir_has_uppercase_char(hir: &Hir) -> bool {
|
|||
|
||||
/// Determine if a regex pattern only matches strings starting with a literal dot (hidden files)
|
||||
pub fn pattern_matches_strings_with_leading_dot(pattern: &str) -> bool {
|
||||
let mut parser = ParserBuilder::new().utf8(false).build();
|
||||
let mut parser = ParserBuilder::new().allow_invalid_utf8(true).build();
|
||||
|
||||
parser
|
||||
.parse(pattern)
|
||||
|
@ -58,7 +56,7 @@ fn hir_matches_strings_with_leading_dot(hir: &Hir) -> bool {
|
|||
HirKind::Concat(hirs) => {
|
||||
let mut hirs = hirs.iter();
|
||||
if let Some(hir) = hirs.next() {
|
||||
if hir.kind() != &HirKind::Look(Look::Start) {
|
||||
if hir.kind() != &HirKind::Anchor(Anchor::StartText) {
|
||||
return false;
|
||||
}
|
||||
} else {
|
||||
|
@ -66,10 +64,7 @@ fn hir_matches_strings_with_leading_dot(hir: &Hir) -> bool {
|
|||
}
|
||||
|
||||
if let Some(hir) = hirs.next() {
|
||||
match hir.kind() {
|
||||
HirKind::Literal(Literal(bytes)) => bytes.starts_with(&[b'.']),
|
||||
_ => false,
|
||||
}
|
||||
hir.kind() == &HirKind::Literal(Literal::Unicode('.'))
|
||||
} else {
|
||||
false
|
||||
}
|
||||
|
|
531
src/walk.rs
531
src/walk.rs
|
@ -1,18 +1,17 @@
|
|||
use std::borrow::Cow;
|
||||
use std::ffi::OsStr;
|
||||
use std::io::{self, Write};
|
||||
use std::io;
|
||||
use std::mem;
|
||||
use std::path::PathBuf;
|
||||
use std::sync::atomic::{AtomicBool, Ordering};
|
||||
use std::sync::{Arc, Mutex, MutexGuard};
|
||||
use std::sync::{Arc, Mutex};
|
||||
use std::thread;
|
||||
use std::time::{Duration, Instant};
|
||||
use std::{borrow::Cow, io::Write};
|
||||
|
||||
use anyhow::{anyhow, Result};
|
||||
use crossbeam_channel::{bounded, Receiver, RecvTimeoutError, SendError, Sender};
|
||||
use etcetera::BaseStrategy;
|
||||
use ignore::overrides::{Override, OverrideBuilder};
|
||||
use ignore::{WalkBuilder, WalkParallel, WalkState};
|
||||
use crossbeam_channel::{bounded, Receiver, RecvTimeoutError, Sender};
|
||||
use ignore::overrides::OverrideBuilder;
|
||||
use ignore::{self, WalkBuilder};
|
||||
use regex::bytes::Regex;
|
||||
|
||||
use crate::config::Config;
|
||||
|
@ -36,7 +35,6 @@ enum ReceiverMode {
|
|||
|
||||
/// The Worker threads can result in a valid entry having PathBuf or an error.
|
||||
#[allow(clippy::large_enum_variant)]
|
||||
#[derive(Debug)]
|
||||
pub enum WorkerResult {
|
||||
// Errors should be rare, so it's probably better to allow large_enum_variant than
|
||||
// to box the Entry variant
|
||||
|
@ -44,98 +42,139 @@ pub enum WorkerResult {
|
|||
Error(ignore::Error),
|
||||
}
|
||||
|
||||
/// A batch of WorkerResults to send over a channel.
|
||||
#[derive(Clone)]
|
||||
struct Batch {
|
||||
items: Arc<Mutex<Option<Vec<WorkerResult>>>>,
|
||||
}
|
||||
|
||||
impl Batch {
|
||||
fn new() -> Self {
|
||||
Self {
|
||||
items: Arc::new(Mutex::new(Some(vec![]))),
|
||||
}
|
||||
}
|
||||
|
||||
fn lock(&self) -> MutexGuard<'_, Option<Vec<WorkerResult>>> {
|
||||
self.items.lock().unwrap()
|
||||
}
|
||||
}
|
||||
|
||||
impl IntoIterator for Batch {
|
||||
type Item = WorkerResult;
|
||||
type IntoIter = std::vec::IntoIter<WorkerResult>;
|
||||
|
||||
fn into_iter(self) -> Self::IntoIter {
|
||||
self.lock().take().unwrap().into_iter()
|
||||
}
|
||||
}
|
||||
|
||||
/// Wrapper that sends batches of items at once over a channel.
|
||||
struct BatchSender {
|
||||
batch: Batch,
|
||||
tx: Sender<Batch>,
|
||||
limit: usize,
|
||||
}
|
||||
|
||||
impl BatchSender {
|
||||
fn new(tx: Sender<Batch>, limit: usize) -> Self {
|
||||
Self {
|
||||
batch: Batch::new(),
|
||||
tx,
|
||||
limit,
|
||||
}
|
||||
}
|
||||
|
||||
/// Check if we need to flush a batch.
|
||||
fn needs_flush(&self, batch: Option<&Vec<WorkerResult>>) -> bool {
|
||||
match batch {
|
||||
// Limit the batch size to provide some backpressure
|
||||
Some(vec) => vec.len() >= self.limit,
|
||||
// Batch was already taken by the receiver, so make a new one
|
||||
None => true,
|
||||
}
|
||||
}
|
||||
|
||||
/// Add an item to a batch.
|
||||
fn send(&mut self, item: WorkerResult) -> Result<(), SendError<()>> {
|
||||
let mut batch = self.batch.lock();
|
||||
|
||||
if self.needs_flush(batch.as_ref()) {
|
||||
drop(batch);
|
||||
self.batch = Batch::new();
|
||||
batch = self.batch.lock();
|
||||
}
|
||||
|
||||
let items = batch.as_mut().unwrap();
|
||||
items.push(item);
|
||||
|
||||
if items.len() == 1 {
|
||||
// New batch, send it over the channel
|
||||
self.tx
|
||||
.send(self.batch.clone())
|
||||
.map_err(|_| SendError(()))?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
/// Maximum size of the output buffer before flushing results to the console
|
||||
const MAX_BUFFER_LENGTH: usize = 1000;
|
||||
pub const MAX_BUFFER_LENGTH: usize = 1000;
|
||||
/// Default duration until output buffering switches to streaming.
|
||||
const DEFAULT_MAX_BUFFER_TIME: Duration = Duration::from_millis(100);
|
||||
pub const DEFAULT_MAX_BUFFER_TIME: Duration = Duration::from_millis(100);
|
||||
|
||||
/// Recursively scan the given search path for files / pathnames matching the patterns.
|
||||
///
|
||||
/// If the `--exec` argument was supplied, this will create a thread pool for executing
|
||||
/// jobs in parallel from a given command line and the discovered paths. Otherwise, each
|
||||
/// path will simply be written to standard output.
|
||||
pub fn scan(paths: &[PathBuf], patterns: Arc<Vec<Regex>>, config: Arc<Config>) -> Result<ExitCode> {
|
||||
let first_path = &paths[0];
|
||||
|
||||
// Channel capacity was chosen empircally to perform similarly to an unbounded channel
|
||||
let (tx, rx) = bounded(0x4000 * config.threads);
|
||||
|
||||
let mut override_builder = OverrideBuilder::new(first_path);
|
||||
|
||||
for pattern in &config.exclude_patterns {
|
||||
override_builder
|
||||
.add(pattern)
|
||||
.map_err(|e| anyhow!("Malformed exclude pattern: {}", e))?;
|
||||
}
|
||||
let overrides = override_builder
|
||||
.build()
|
||||
.map_err(|_| anyhow!("Mismatch in exclude patterns"))?;
|
||||
|
||||
let mut walker = WalkBuilder::new(first_path);
|
||||
walker
|
||||
.hidden(config.ignore_hidden)
|
||||
.ignore(config.read_fdignore)
|
||||
.parents(config.read_parent_ignore && (config.read_fdignore || config.read_vcsignore))
|
||||
.git_ignore(config.read_vcsignore)
|
||||
.git_global(config.read_vcsignore)
|
||||
.git_exclude(config.read_vcsignore)
|
||||
.overrides(overrides)
|
||||
.follow_links(config.follow_links)
|
||||
// No need to check for supported platforms, option is unavailable on unsupported ones
|
||||
.same_file_system(config.one_file_system)
|
||||
.max_depth(config.max_depth);
|
||||
|
||||
if config.read_fdignore {
|
||||
walker.add_custom_ignore_filename(".fdignore");
|
||||
}
|
||||
|
||||
if config.read_global_ignore {
|
||||
#[cfg(target_os = "macos")]
|
||||
let config_dir_op = std::env::var_os("XDG_CONFIG_HOME")
|
||||
.map(PathBuf::from)
|
||||
.filter(|p| p.is_absolute())
|
||||
.or_else(|| dirs_next::home_dir().map(|d| d.join(".config")));
|
||||
|
||||
#[cfg(not(target_os = "macos"))]
|
||||
let config_dir_op = dirs_next::config_dir();
|
||||
|
||||
if let Some(global_ignore_file) = config_dir_op
|
||||
.map(|p| p.join("fd").join("ignore"))
|
||||
.filter(|p| p.is_file())
|
||||
{
|
||||
let result = walker.add_ignore(global_ignore_file);
|
||||
match result {
|
||||
Some(ignore::Error::Partial(_)) => (),
|
||||
Some(err) => {
|
||||
print_error(format!("Malformed pattern in global ignore file. {}.", err));
|
||||
}
|
||||
None => (),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for ignore_file in &config.ignore_files {
|
||||
let result = walker.add_ignore(ignore_file);
|
||||
match result {
|
||||
Some(ignore::Error::Partial(_)) => (),
|
||||
Some(err) => {
|
||||
print_error(format!("Malformed pattern in custom ignore file. {}.", err));
|
||||
}
|
||||
None => (),
|
||||
}
|
||||
}
|
||||
|
||||
for path in &paths[1..] {
|
||||
walker.add(path);
|
||||
}
|
||||
|
||||
let parallel_walker = walker.threads(config.threads).build_parallel();
|
||||
|
||||
// Flag for cleanly shutting down the parallel walk
|
||||
let quit_flag = Arc::new(AtomicBool::new(false));
|
||||
// Flag specifically for quitting due to ^C
|
||||
let interrupt_flag = Arc::new(AtomicBool::new(false));
|
||||
|
||||
if config.ls_colors.is_some() && config.is_printing() {
|
||||
let quit_flag = Arc::clone(&quit_flag);
|
||||
let interrupt_flag = Arc::clone(&interrupt_flag);
|
||||
|
||||
ctrlc::set_handler(move || {
|
||||
quit_flag.store(true, Ordering::Relaxed);
|
||||
|
||||
if interrupt_flag.fetch_or(true, Ordering::Relaxed) {
|
||||
// Ctrl-C has been pressed twice, exit NOW
|
||||
ExitCode::KilledBySigint.exit();
|
||||
}
|
||||
})
|
||||
.unwrap();
|
||||
}
|
||||
|
||||
// Spawn the thread that receives all results through the channel.
|
||||
let receiver_thread = spawn_receiver(&config, &quit_flag, &interrupt_flag, rx);
|
||||
|
||||
// Spawn the sender threads.
|
||||
spawn_senders(&config, &quit_flag, patterns, parallel_walker, tx);
|
||||
|
||||
// Wait for the receiver thread to print out all results.
|
||||
let exit_code = receiver_thread.join().unwrap();
|
||||
|
||||
if interrupt_flag.load(Ordering::Relaxed) {
|
||||
Ok(ExitCode::KilledBySigint)
|
||||
} else {
|
||||
Ok(exit_code)
|
||||
}
|
||||
}
|
||||
|
||||
/// Wrapper for the receiver thread's buffering behavior.
|
||||
struct ReceiverBuffer<'a, W> {
|
||||
struct ReceiverBuffer<W> {
|
||||
/// The configuration.
|
||||
config: &'a Config,
|
||||
config: Arc<Config>,
|
||||
/// For shutting down the senders.
|
||||
quit_flag: &'a AtomicBool,
|
||||
quit_flag: Arc<AtomicBool>,
|
||||
/// The ^C notifier.
|
||||
interrupt_flag: &'a AtomicBool,
|
||||
interrupt_flag: Arc<AtomicBool>,
|
||||
/// Receiver for worker results.
|
||||
rx: Receiver<Batch>,
|
||||
rx: Receiver<WorkerResult>,
|
||||
/// Standard output.
|
||||
stdout: W,
|
||||
/// The current buffer mode.
|
||||
|
@ -148,12 +187,15 @@ struct ReceiverBuffer<'a, W> {
|
|||
num_results: usize,
|
||||
}
|
||||
|
||||
impl<'a, W: Write> ReceiverBuffer<'a, W> {
|
||||
impl<W: Write> ReceiverBuffer<W> {
|
||||
/// Create a new receiver buffer.
|
||||
fn new(state: &'a WorkerState, rx: Receiver<Batch>, stdout: W) -> Self {
|
||||
let config = &state.config;
|
||||
let quit_flag = state.quit_flag.as_ref();
|
||||
let interrupt_flag = state.interrupt_flag.as_ref();
|
||||
fn new(
|
||||
config: Arc<Config>,
|
||||
quit_flag: Arc<AtomicBool>,
|
||||
interrupt_flag: Arc<AtomicBool>,
|
||||
rx: Receiver<WorkerResult>,
|
||||
stdout: W,
|
||||
) -> Self {
|
||||
let max_buffer_time = config.max_buffer_time.unwrap_or(DEFAULT_MAX_BUFFER_TIME);
|
||||
let deadline = Instant::now() + max_buffer_time;
|
||||
|
||||
|
@ -181,7 +223,7 @@ impl<'a, W: Write> ReceiverBuffer<'a, W> {
|
|||
}
|
||||
|
||||
/// Receive the next worker result.
|
||||
fn recv(&self) -> Result<Batch, RecvTimeoutError> {
|
||||
fn recv(&self) -> Result<WorkerResult, RecvTimeoutError> {
|
||||
match self.mode {
|
||||
ReceiverMode::Buffering => {
|
||||
// Wait at most until we should switch to streaming
|
||||
|
@ -197,10 +239,7 @@ impl<'a, W: Write> ReceiverBuffer<'a, W> {
|
|||
/// Wait for a result or state change.
|
||||
fn poll(&mut self) -> Result<(), ExitCode> {
|
||||
match self.recv() {
|
||||
Ok(batch) => {
|
||||
for result in batch {
|
||||
match result {
|
||||
WorkerResult::Entry(dir_entry) => {
|
||||
Ok(WorkerResult::Entry(dir_entry)) => {
|
||||
if self.config.quiet {
|
||||
return Err(ExitCode::HasResults(true));
|
||||
}
|
||||
|
@ -214,6 +253,7 @@ impl<'a, W: Write> ReceiverBuffer<'a, W> {
|
|||
}
|
||||
ReceiverMode::Streaming => {
|
||||
self.print(&dir_entry)?;
|
||||
self.flush()?;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -224,19 +264,11 @@ impl<'a, W: Write> ReceiverBuffer<'a, W> {
|
|||
}
|
||||
}
|
||||
}
|
||||
WorkerResult::Error(err) => {
|
||||
Ok(WorkerResult::Error(err)) => {
|
||||
if self.config.show_filesystem_errors {
|
||||
print_error(err.to_string());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// If we don't have another batch ready, flush before waiting
|
||||
if self.mode == ReceiverMode::Streaming && self.rx.is_empty() {
|
||||
self.flush()?;
|
||||
}
|
||||
}
|
||||
Err(RecvTimeoutError::Timeout) => {
|
||||
self.stream()?;
|
||||
}
|
||||
|
@ -250,7 +282,7 @@ impl<'a, W: Write> ReceiverBuffer<'a, W> {
|
|||
|
||||
/// Output a path.
|
||||
fn print(&mut self, entry: &DirEntry) -> Result<(), ExitCode> {
|
||||
output::print_entry(&mut self.stdout, entry, self.config);
|
||||
output::print_entry(&mut self.stdout, entry, &self.config);
|
||||
|
||||
if self.interrupt_flag.load(Ordering::Relaxed) {
|
||||
// Ignore any errors on flush, because we're about to exit anyway
|
||||
|
@ -289,7 +321,7 @@ impl<'a, W: Write> ReceiverBuffer<'a, W> {
|
|||
|
||||
/// Flush stdout if necessary.
|
||||
fn flush(&mut self) -> Result<(), ExitCode> {
|
||||
if self.stdout.flush().is_err() {
|
||||
if self.config.interactive_terminal && self.stdout.flush().is_err() {
|
||||
// Probably a broken pipe. Exit gracefully.
|
||||
return Err(ExitCode::GeneralError);
|
||||
}
|
||||
|
@ -297,173 +329,79 @@ impl<'a, W: Write> ReceiverBuffer<'a, W> {
|
|||
}
|
||||
}
|
||||
|
||||
/// State shared by the sender and receiver threads.
|
||||
struct WorkerState {
|
||||
/// The search patterns.
|
||||
patterns: Vec<Regex>,
|
||||
/// The command line configuration.
|
||||
config: Config,
|
||||
/// Flag for cleanly shutting down the parallel walk
|
||||
quit_flag: Arc<AtomicBool>,
|
||||
/// Flag specifically for quitting due to ^C
|
||||
interrupt_flag: Arc<AtomicBool>,
|
||||
}
|
||||
|
||||
impl WorkerState {
|
||||
fn new(patterns: Vec<Regex>, config: Config) -> Self {
|
||||
let quit_flag = Arc::new(AtomicBool::new(false));
|
||||
let interrupt_flag = Arc::new(AtomicBool::new(false));
|
||||
|
||||
Self {
|
||||
patterns,
|
||||
config,
|
||||
quit_flag,
|
||||
interrupt_flag,
|
||||
}
|
||||
}
|
||||
|
||||
fn build_overrides(&self, paths: &[PathBuf]) -> Result<Override> {
|
||||
let first_path = &paths[0];
|
||||
let config = &self.config;
|
||||
|
||||
let mut builder = OverrideBuilder::new(first_path);
|
||||
|
||||
for pattern in &config.exclude_patterns {
|
||||
builder
|
||||
.add(pattern)
|
||||
.map_err(|e| anyhow!("Malformed exclude pattern: {}", e))?;
|
||||
}
|
||||
|
||||
builder
|
||||
.build()
|
||||
.map_err(|_| anyhow!("Mismatch in exclude patterns"))
|
||||
}
|
||||
|
||||
fn build_walker(&self, paths: &[PathBuf]) -> Result<WalkParallel> {
|
||||
let first_path = &paths[0];
|
||||
let config = &self.config;
|
||||
let overrides = self.build_overrides(paths)?;
|
||||
|
||||
let mut builder = WalkBuilder::new(first_path);
|
||||
builder
|
||||
.hidden(config.ignore_hidden)
|
||||
.ignore(config.read_fdignore)
|
||||
.parents(config.read_parent_ignore && (config.read_fdignore || config.read_vcsignore))
|
||||
.git_ignore(config.read_vcsignore)
|
||||
.git_global(config.read_vcsignore)
|
||||
.git_exclude(config.read_vcsignore)
|
||||
.require_git(config.require_git_to_read_vcsignore)
|
||||
.overrides(overrides)
|
||||
.follow_links(config.follow_links)
|
||||
// No need to check for supported platforms, option is unavailable on unsupported ones
|
||||
.same_file_system(config.one_file_system)
|
||||
.max_depth(config.max_depth);
|
||||
|
||||
if config.read_fdignore {
|
||||
builder.add_custom_ignore_filename(".fdignore");
|
||||
}
|
||||
|
||||
if config.read_global_ignore {
|
||||
if let Ok(basedirs) = etcetera::choose_base_strategy() {
|
||||
let global_ignore_file = basedirs.config_dir().join("fd").join("ignore");
|
||||
if global_ignore_file.is_file() {
|
||||
let result = builder.add_ignore(global_ignore_file);
|
||||
match result {
|
||||
Some(ignore::Error::Partial(_)) => (),
|
||||
Some(err) => {
|
||||
print_error(format!(
|
||||
"Malformed pattern in global ignore file. {}.",
|
||||
err
|
||||
));
|
||||
}
|
||||
None => (),
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for ignore_file in &config.ignore_files {
|
||||
let result = builder.add_ignore(ignore_file);
|
||||
match result {
|
||||
Some(ignore::Error::Partial(_)) => (),
|
||||
Some(err) => {
|
||||
print_error(format!("Malformed pattern in custom ignore file. {}.", err));
|
||||
}
|
||||
None => (),
|
||||
}
|
||||
}
|
||||
|
||||
for path in &paths[1..] {
|
||||
builder.add(path);
|
||||
}
|
||||
|
||||
let walker = builder.threads(config.threads).build_parallel();
|
||||
Ok(walker)
|
||||
}
|
||||
|
||||
/// Run the receiver work, either on this thread or a pool of background
|
||||
/// threads (for --exec).
|
||||
fn receive(&self, rx: Receiver<Batch>) -> ExitCode {
|
||||
let config = &self.config;
|
||||
fn spawn_receiver(
|
||||
config: &Arc<Config>,
|
||||
quit_flag: &Arc<AtomicBool>,
|
||||
interrupt_flag: &Arc<AtomicBool>,
|
||||
rx: Receiver<WorkerResult>,
|
||||
) -> thread::JoinHandle<ExitCode> {
|
||||
let config = Arc::clone(config);
|
||||
let quit_flag = Arc::clone(quit_flag);
|
||||
let interrupt_flag = Arc::clone(interrupt_flag);
|
||||
|
||||
let threads = config.threads;
|
||||
thread::spawn(move || {
|
||||
// This will be set to `Some` if the `--exec` argument was supplied.
|
||||
if let Some(ref cmd) = config.command {
|
||||
if cmd.in_batch_mode() {
|
||||
exec::batch(rx.into_iter().flatten(), cmd, config)
|
||||
exec::batch(rx, cmd, &config)
|
||||
} else {
|
||||
let out_perm = Mutex::new(());
|
||||
let out_perm = Arc::new(Mutex::new(()));
|
||||
|
||||
thread::scope(|scope| {
|
||||
// Each spawned job will store its thread handle in here.
|
||||
let threads = config.threads;
|
||||
// Each spawned job will store it's thread handle in here.
|
||||
let mut handles = Vec::with_capacity(threads);
|
||||
for _ in 0..threads {
|
||||
let config = Arc::clone(&config);
|
||||
let rx = rx.clone();
|
||||
let cmd = Arc::clone(cmd);
|
||||
let out_perm = Arc::clone(&out_perm);
|
||||
|
||||
// Spawn a job thread that will listen for and execute inputs.
|
||||
let handle = scope
|
||||
.spawn(|| exec::job(rx.into_iter().flatten(), cmd, &out_perm, config));
|
||||
let handle = thread::spawn(move || exec::job(rx, cmd, out_perm, &config));
|
||||
|
||||
// Push the handle of the spawned thread into the vector for later joining.
|
||||
handles.push(handle);
|
||||
}
|
||||
let exit_codes = handles.into_iter().map(|handle| handle.join().unwrap());
|
||||
|
||||
let exit_codes = handles
|
||||
.into_iter()
|
||||
.map(|handle| handle.join().unwrap())
|
||||
.collect::<Vec<_>>();
|
||||
merge_exitcodes(exit_codes)
|
||||
})
|
||||
}
|
||||
} else {
|
||||
let stdout = io::stdout().lock();
|
||||
let stdout = io::stdout();
|
||||
let stdout = stdout.lock();
|
||||
let stdout = io::BufWriter::new(stdout);
|
||||
|
||||
ReceiverBuffer::new(self, rx, stdout).process()
|
||||
let mut rxbuffer = ReceiverBuffer::new(config, quit_flag, interrupt_flag, rx, stdout);
|
||||
rxbuffer.process()
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
/// Spawn the sender threads.
|
||||
fn spawn_senders(&self, walker: WalkParallel, tx: Sender<Batch>) {
|
||||
walker.run(|| {
|
||||
let patterns = &self.patterns;
|
||||
let config = &self.config;
|
||||
let quit_flag = self.quit_flag.as_ref();
|
||||
fn spawn_senders(
|
||||
config: &Arc<Config>,
|
||||
quit_flag: &Arc<AtomicBool>,
|
||||
patterns: Arc<Vec<Regex>>,
|
||||
parallel_walker: ignore::WalkParallel,
|
||||
tx: Sender<WorkerResult>,
|
||||
) {
|
||||
parallel_walker.run(|| {
|
||||
let config = Arc::clone(config);
|
||||
let patterns = Arc::clone(&patterns);
|
||||
let tx_thread = tx.clone();
|
||||
let quit_flag = Arc::clone(quit_flag);
|
||||
|
||||
let mut limit = 0x100;
|
||||
if let Some(cmd) = &config.command {
|
||||
if !cmd.in_batch_mode() && config.threads > 1 {
|
||||
// Evenly distribute work between multiple receivers
|
||||
limit = 1;
|
||||
}
|
||||
}
|
||||
let mut tx = BatchSender::new(tx.clone(), limit);
|
||||
|
||||
Box::new(move |entry| {
|
||||
Box::new(move |entry_o| {
|
||||
if quit_flag.load(Ordering::Relaxed) {
|
||||
return WalkState::Quit;
|
||||
return ignore::WalkState::Quit;
|
||||
}
|
||||
|
||||
let entry = match entry {
|
||||
let entry = match entry_o {
|
||||
Ok(ref e) if e.depth() == 0 => {
|
||||
// Skip the root directory entry.
|
||||
return WalkState::Continue;
|
||||
return ignore::WalkState::Continue;
|
||||
}
|
||||
Ok(e) => DirEntry::normal(e),
|
||||
Err(ignore::Error::WithPath {
|
||||
|
@ -480,26 +418,26 @@ impl WorkerState {
|
|||
DirEntry::broken_symlink(path)
|
||||
}
|
||||
_ => {
|
||||
return match tx.send(WorkerResult::Error(ignore::Error::WithPath {
|
||||
return match tx_thread.send(WorkerResult::Error(ignore::Error::WithPath {
|
||||
path,
|
||||
err: inner_err,
|
||||
})) {
|
||||
Ok(_) => WalkState::Continue,
|
||||
Err(_) => WalkState::Quit,
|
||||
Ok(_) => ignore::WalkState::Continue,
|
||||
Err(_) => ignore::WalkState::Quit,
|
||||
}
|
||||
}
|
||||
},
|
||||
Err(err) => {
|
||||
return match tx.send(WorkerResult::Error(err)) {
|
||||
Ok(_) => WalkState::Continue,
|
||||
Err(_) => WalkState::Quit,
|
||||
return match tx_thread.send(WorkerResult::Error(err)) {
|
||||
Ok(_) => ignore::WalkState::Continue,
|
||||
Err(_) => ignore::WalkState::Quit,
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
if let Some(min_depth) = config.min_depth {
|
||||
if entry.depth().map_or(true, |d| d < min_depth) {
|
||||
return WalkState::Continue;
|
||||
return ignore::WalkState::Continue;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -525,24 +463,24 @@ impl WorkerState {
|
|||
.iter()
|
||||
.all(|pat| pat.is_match(&filesystem::osstr_to_bytes(search_str.as_ref())))
|
||||
{
|
||||
return WalkState::Continue;
|
||||
return ignore::WalkState::Continue;
|
||||
}
|
||||
|
||||
// Filter out unwanted extensions.
|
||||
if let Some(ref exts_regex) = config.extensions {
|
||||
if let Some(path_str) = entry_path.file_name() {
|
||||
if !exts_regex.is_match(&filesystem::osstr_to_bytes(path_str)) {
|
||||
return WalkState::Continue;
|
||||
return ignore::WalkState::Continue;
|
||||
}
|
||||
} else {
|
||||
return WalkState::Continue;
|
||||
return ignore::WalkState::Continue;
|
||||
}
|
||||
}
|
||||
|
||||
// Filter out unwanted file types.
|
||||
if let Some(ref file_types) = config.file_types {
|
||||
if file_types.should_ignore(&entry) {
|
||||
return WalkState::Continue;
|
||||
return ignore::WalkState::Continue;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -551,10 +489,10 @@ impl WorkerState {
|
|||
if let Some(ref owner_constraint) = config.owner_constraint {
|
||||
if let Some(metadata) = entry.metadata() {
|
||||
if !owner_constraint.matches(metadata) {
|
||||
return WalkState::Continue;
|
||||
return ignore::WalkState::Continue;
|
||||
}
|
||||
} else {
|
||||
return WalkState::Continue;
|
||||
return ignore::WalkState::Continue;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -569,13 +507,13 @@ impl WorkerState {
|
|||
.iter()
|
||||
.any(|sc| !sc.is_within(file_size))
|
||||
{
|
||||
return WalkState::Continue;
|
||||
return ignore::WalkState::Continue;
|
||||
}
|
||||
} else {
|
||||
return WalkState::Continue;
|
||||
return ignore::WalkState::Continue;
|
||||
}
|
||||
} else {
|
||||
return WalkState::Continue;
|
||||
return ignore::WalkState::Continue;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -591,7 +529,7 @@ impl WorkerState {
|
|||
}
|
||||
}
|
||||
if !matched {
|
||||
return WalkState::Continue;
|
||||
return ignore::WalkState::Continue;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -602,67 +540,18 @@ impl WorkerState {
|
|||
}
|
||||
}
|
||||
|
||||
let send_result = tx.send(WorkerResult::Entry(entry));
|
||||
let send_result = tx_thread.send(WorkerResult::Entry(entry));
|
||||
|
||||
if send_result.is_err() {
|
||||
return WalkState::Quit;
|
||||
return ignore::WalkState::Quit;
|
||||
}
|
||||
|
||||
// Apply pruning.
|
||||
if config.prune {
|
||||
return WalkState::Skip;
|
||||
return ignore::WalkState::Skip;
|
||||
}
|
||||
|
||||
WalkState::Continue
|
||||
ignore::WalkState::Continue
|
||||
})
|
||||
});
|
||||
}
|
||||
|
||||
/// Perform the recursive scan.
|
||||
fn scan(&self, paths: &[PathBuf]) -> Result<ExitCode> {
|
||||
let config = &self.config;
|
||||
let walker = self.build_walker(paths)?;
|
||||
|
||||
if config.ls_colors.is_some() && config.is_printing() {
|
||||
let quit_flag = Arc::clone(&self.quit_flag);
|
||||
let interrupt_flag = Arc::clone(&self.interrupt_flag);
|
||||
|
||||
ctrlc::set_handler(move || {
|
||||
quit_flag.store(true, Ordering::Relaxed);
|
||||
|
||||
if interrupt_flag.fetch_or(true, Ordering::Relaxed) {
|
||||
// Ctrl-C has been pressed twice, exit NOW
|
||||
ExitCode::KilledBySigint.exit();
|
||||
}
|
||||
})
|
||||
.unwrap();
|
||||
}
|
||||
|
||||
let (tx, rx) = bounded(2 * config.threads);
|
||||
|
||||
let exit_code = thread::scope(|scope| {
|
||||
// Spawn the receiver thread(s)
|
||||
let receiver = scope.spawn(|| self.receive(rx));
|
||||
|
||||
// Spawn the sender threads.
|
||||
self.spawn_senders(walker, tx);
|
||||
|
||||
receiver.join().unwrap()
|
||||
});
|
||||
|
||||
if self.interrupt_flag.load(Ordering::Relaxed) {
|
||||
Ok(ExitCode::KilledBySigint)
|
||||
} else {
|
||||
Ok(exit_code)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Recursively scan the given search path for files / pathnames matching the patterns.
|
||||
///
|
||||
/// If the `--exec` argument was supplied, this will create a thread pool for executing
|
||||
/// jobs in parallel from a given command line and the discovered paths. Otherwise, each
|
||||
/// path will simply be written to standard output.
|
||||
pub fn scan(paths: &[PathBuf], patterns: Vec<Regex>, config: Config) -> Result<ExitCode> {
|
||||
WorkerState::new(patterns, config).scan(paths)
|
||||
}
|
||||
|
|
|
@ -20,9 +20,6 @@ pub struct TestEnv {
|
|||
|
||||
/// Normalize each line by sorting the whitespace-separated words
|
||||
normalize_line: bool,
|
||||
|
||||
/// Temporary directory for storing test config (global ignore file)
|
||||
config_dir: Option<TempDir>,
|
||||
}
|
||||
|
||||
/// Create the working directory and the test files.
|
||||
|
@ -62,16 +59,6 @@ fn create_working_directory(
|
|||
Ok(temp_dir)
|
||||
}
|
||||
|
||||
fn create_config_directory_with_global_ignore(ignore_file_content: &str) -> io::Result<TempDir> {
|
||||
let config_dir = tempfile::Builder::new().prefix("fd-config").tempdir()?;
|
||||
let fd_dir = config_dir.path().join("fd");
|
||||
fs::create_dir(&fd_dir)?;
|
||||
let mut ignore_file = fs::File::create(fd_dir.join("ignore"))?;
|
||||
ignore_file.write_all(ignore_file_content.as_bytes())?;
|
||||
|
||||
Ok(config_dir)
|
||||
}
|
||||
|
||||
/// Find the *fd* executable.
|
||||
fn find_fd_exe() -> PathBuf {
|
||||
// Tests exe is in target/debug/deps, the *fd* exe is in target/debug
|
||||
|
@ -129,7 +116,7 @@ fn normalize_output(s: &str, trim_start: bool, normalize_line: bool) -> String {
|
|||
.lines()
|
||||
.map(|line| {
|
||||
let line = if trim_start { line.trim_start() } else { line };
|
||||
let line = line.replace('/', std::path::MAIN_SEPARATOR_STR);
|
||||
let line = line.replace('/', &std::path::MAIN_SEPARATOR.to_string());
|
||||
if normalize_line {
|
||||
let mut words: Vec<_> = line.split_whitespace().collect();
|
||||
words.sort_unstable();
|
||||
|
@ -163,7 +150,6 @@ impl TestEnv {
|
|||
temp_dir,
|
||||
fd_exe,
|
||||
normalize_line: false,
|
||||
config_dir: None,
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -172,16 +158,6 @@ impl TestEnv {
|
|||
temp_dir: self.temp_dir,
|
||||
fd_exe: self.fd_exe,
|
||||
normalize_line: normalize,
|
||||
config_dir: self.config_dir,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn global_ignore_file(self, content: &str) -> TestEnv {
|
||||
let config_dir =
|
||||
create_config_directory_with_global_ignore(content).expect("config directory");
|
||||
TestEnv {
|
||||
config_dir: Some(config_dir),
|
||||
..self
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -230,8 +206,13 @@ impl TestEnv {
|
|||
path: P,
|
||||
args: &[&str],
|
||||
) -> process::Output {
|
||||
// Setup *fd* command.
|
||||
let mut cmd = process::Command::new(&self.fd_exe);
|
||||
cmd.current_dir(self.temp_dir.path().join(path));
|
||||
cmd.arg("--no-global-ignore-file").args(args);
|
||||
|
||||
// Run *fd*.
|
||||
let output = self.run_command(path.as_ref(), args);
|
||||
let output = cmd.output().expect("fd output");
|
||||
|
||||
// Check for exit status.
|
||||
if !output.status.success() {
|
||||
|
@ -307,21 +288,6 @@ impl TestEnv {
|
|||
self.assert_error_subdirectory(".", args, Some(expected))
|
||||
}
|
||||
|
||||
fn run_command(&self, path: &Path, args: &[&str]) -> process::Output {
|
||||
// Setup *fd* command.
|
||||
let mut cmd = process::Command::new(&self.fd_exe);
|
||||
cmd.current_dir(self.temp_dir.path().join(path));
|
||||
if let Some(config_dir) = &self.config_dir {
|
||||
cmd.env("XDG_CONFIG_HOME", config_dir.path());
|
||||
} else {
|
||||
cmd.arg("--no-global-ignore-file");
|
||||
}
|
||||
cmd.args(args);
|
||||
|
||||
// Run *fd*.
|
||||
cmd.output().expect("fd output")
|
||||
}
|
||||
|
||||
/// Assert that calling *fd* in the specified path under the root working directory,
|
||||
/// and with the specified arguments produces an error with the expected message.
|
||||
fn assert_error_subdirectory<P: AsRef<Path>>(
|
||||
|
@ -330,7 +296,13 @@ impl TestEnv {
|
|||
args: &[&str],
|
||||
expected: Option<&str>,
|
||||
) -> process::ExitStatus {
|
||||
let output = self.run_command(path.as_ref(), args);
|
||||
// Setup *fd* command.
|
||||
let mut cmd = process::Command::new(&self.fd_exe);
|
||||
cmd.current_dir(self.temp_dir.path().join(path));
|
||||
cmd.arg("--no-global-ignore-file").args(args);
|
||||
|
||||
// Run *fd*.
|
||||
let output = cmd.output().expect("fd output");
|
||||
|
||||
if let Some(expected) = expected {
|
||||
// Normalize both expected and actual output.
|
||||
|
|
260
tests/tests.rs
260
tests/tests.rs
|
@ -1,7 +1,5 @@
|
|||
mod testenv;
|
||||
|
||||
#[cfg(unix)]
|
||||
use nix::unistd::{Gid, Group, Uid, User};
|
||||
use std::fs;
|
||||
use std::io::Write;
|
||||
use std::path::Path;
|
||||
|
@ -810,62 +808,6 @@ fn test_custom_ignore_precedence() {
|
|||
te.assert_output(&["--no-ignore", "foo"], "inner/foo");
|
||||
}
|
||||
|
||||
/// Don't require git to respect gitignore (--no-require-git)
|
||||
#[test]
|
||||
fn test_respect_ignore_files() {
|
||||
let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES);
|
||||
|
||||
// Not in a git repo anymore
|
||||
fs::remove_dir(te.test_root().join(".git")).unwrap();
|
||||
|
||||
// don't respect gitignore because we're not in a git repo
|
||||
te.assert_output(
|
||||
&["foo"],
|
||||
"a.foo
|
||||
gitignored.foo
|
||||
one/b.foo
|
||||
one/two/c.foo
|
||||
one/two/C.Foo2
|
||||
one/two/three/d.foo
|
||||
one/two/three/directory_foo/",
|
||||
);
|
||||
|
||||
// respect gitignore because we set `--no-require-git`
|
||||
te.assert_output(
|
||||
&["--no-require-git", "foo"],
|
||||
"a.foo
|
||||
one/b.foo
|
||||
one/two/c.foo
|
||||
one/two/C.Foo2
|
||||
one/two/three/d.foo
|
||||
one/two/three/directory_foo/",
|
||||
);
|
||||
|
||||
// make sure overriding works
|
||||
te.assert_output(
|
||||
&["--no-require-git", "--require-git", "foo"],
|
||||
"a.foo
|
||||
gitignored.foo
|
||||
one/b.foo
|
||||
one/two/c.foo
|
||||
one/two/C.Foo2
|
||||
one/two/three/d.foo
|
||||
one/two/three/directory_foo/",
|
||||
);
|
||||
|
||||
te.assert_output(
|
||||
&["--no-require-git", "--no-ignore", "foo"],
|
||||
"a.foo
|
||||
gitignored.foo
|
||||
fdignored.foo
|
||||
one/b.foo
|
||||
one/two/c.foo
|
||||
one/two/C.Foo2
|
||||
one/two/three/d.foo
|
||||
one/two/three/directory_foo/",
|
||||
);
|
||||
}
|
||||
|
||||
/// VCS ignored files (--no-ignore-vcs)
|
||||
#[test]
|
||||
fn test_no_ignore_vcs() {
|
||||
|
@ -937,47 +879,6 @@ fn test_no_ignore_aliases() {
|
|||
);
|
||||
}
|
||||
|
||||
#[cfg(not(windows))]
|
||||
#[test]
|
||||
fn test_global_ignore() {
|
||||
let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES).global_ignore_file("one");
|
||||
te.assert_output(
|
||||
&[],
|
||||
"a.foo
|
||||
e1 e2
|
||||
symlink",
|
||||
);
|
||||
}
|
||||
|
||||
#[cfg(not(windows))]
|
||||
#[test_case("--unrestricted", ".hidden.foo
|
||||
a.foo
|
||||
fdignored.foo
|
||||
gitignored.foo
|
||||
one/b.foo
|
||||
one/two/c.foo
|
||||
one/two/C.Foo2
|
||||
one/two/three/d.foo
|
||||
one/two/three/directory_foo/"; "unrestricted")]
|
||||
#[test_case("--no-ignore", "a.foo
|
||||
fdignored.foo
|
||||
gitignored.foo
|
||||
one/b.foo
|
||||
one/two/c.foo
|
||||
one/two/C.Foo2
|
||||
one/two/three/d.foo
|
||||
one/two/three/directory_foo/"; "no-ignore")]
|
||||
#[test_case("--no-global-ignore-file", "a.foo
|
||||
one/b.foo
|
||||
one/two/c.foo
|
||||
one/two/C.Foo2
|
||||
one/two/three/d.foo
|
||||
one/two/three/directory_foo/"; "no-global-ignore-file")]
|
||||
fn test_no_global_ignore(flag: &str, expected_output: &str) {
|
||||
let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES).global_ignore_file("one");
|
||||
te.assert_output(&[flag, "foo"], expected_output);
|
||||
}
|
||||
|
||||
/// Symlinks (--follow)
|
||||
#[test]
|
||||
fn test_follow() {
|
||||
|
@ -1301,18 +1202,10 @@ fn test_type() {
|
|||
fn test_type_executable() {
|
||||
use std::os::unix::fs::OpenOptionsExt;
|
||||
|
||||
// This test assumes the current user isn't root
|
||||
// (otherwise if the executable bit is set for any level, it is executable for the current
|
||||
// user)
|
||||
if Uid::current().is_root() {
|
||||
return;
|
||||
}
|
||||
|
||||
let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES);
|
||||
|
||||
fs::OpenOptions::new()
|
||||
.create_new(true)
|
||||
.truncate(true)
|
||||
.create(true)
|
||||
.write(true)
|
||||
.mode(0o777)
|
||||
.open(te.test_root().join("executable-file.sh"))
|
||||
|
@ -1320,7 +1213,6 @@ fn test_type_executable() {
|
|||
|
||||
fs::OpenOptions::new()
|
||||
.create(true)
|
||||
.truncate(true)
|
||||
.write(true)
|
||||
.mode(0o645)
|
||||
.open(te.test_root().join("not-user-executable-file.sh"))
|
||||
|
@ -1624,66 +1516,6 @@ fn test_excludes() {
|
|||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn format() {
|
||||
let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES);
|
||||
|
||||
te.assert_output(
|
||||
&["--format", "path={}", "--path-separator=/"],
|
||||
"path=a.foo
|
||||
path=e1 e2
|
||||
path=one
|
||||
path=one/b.foo
|
||||
path=one/two
|
||||
path=one/two/C.Foo2
|
||||
path=one/two/c.foo
|
||||
path=one/two/three
|
||||
path=one/two/three/d.foo
|
||||
path=one/two/three/directory_foo
|
||||
path=symlink",
|
||||
);
|
||||
|
||||
te.assert_output(
|
||||
&["foo", "--format", "noExt={.}", "--path-separator=/"],
|
||||
"noExt=a
|
||||
noExt=one/b
|
||||
noExt=one/two/C
|
||||
noExt=one/two/c
|
||||
noExt=one/two/three/d
|
||||
noExt=one/two/three/directory_foo",
|
||||
);
|
||||
|
||||
te.assert_output(
|
||||
&["foo", "--format", "basename={/}", "--path-separator=/"],
|
||||
"basename=a.foo
|
||||
basename=b.foo
|
||||
basename=C.Foo2
|
||||
basename=c.foo
|
||||
basename=d.foo
|
||||
basename=directory_foo",
|
||||
);
|
||||
|
||||
te.assert_output(
|
||||
&["foo", "--format", "name={/.}", "--path-separator=/"],
|
||||
"name=a
|
||||
name=b
|
||||
name=C
|
||||
name=c
|
||||
name=d
|
||||
name=directory_foo",
|
||||
);
|
||||
|
||||
te.assert_output(
|
||||
&["foo", "--format", "parent={//}", "--path-separator=/"],
|
||||
"parent=.
|
||||
parent=one
|
||||
parent=one/two
|
||||
parent=one/two
|
||||
parent=one/two/three
|
||||
parent=one/two/three",
|
||||
);
|
||||
}
|
||||
|
||||
/// Shell script execution (--exec)
|
||||
#[test]
|
||||
fn test_exec() {
|
||||
|
@ -1869,26 +1701,18 @@ fn test_exec_batch() {
|
|||
\n\
|
||||
Usage: fd [OPTIONS] [pattern] [path]...\n\
|
||||
\n\
|
||||
For more information, try '--help'.\n\
|
||||
For more information try '--help'\n\
|
||||
",
|
||||
);
|
||||
|
||||
te.assert_failure_with_error(
|
||||
&["foo", "--exec-batch", "echo", "{/}", ";", "-x", "echo"],
|
||||
"error: the argument '--exec-batch <cmd>...' cannot be used with '--exec <cmd>...'\n\
|
||||
\n\
|
||||
Usage: fd --exec-batch <cmd>... <pattern> [path]...\n\
|
||||
\n\
|
||||
For more information, try '--help'.\n\
|
||||
",
|
||||
"error: The argument '--exec-batch <cmd>...' cannot be used with '--exec <cmd>...'",
|
||||
);
|
||||
|
||||
te.assert_failure_with_error(
|
||||
&["foo", "--exec-batch"],
|
||||
"error: a value is required for '--exec-batch <cmd>...' but none was supplied\n\
|
||||
\n\
|
||||
For more information, try '--help'.\n\
|
||||
",
|
||||
"error: The argument '--exec-batch <cmd>...' requires a value but none was supplied",
|
||||
);
|
||||
|
||||
te.assert_failure_with_error(
|
||||
|
@ -1897,7 +1721,7 @@ fn test_exec_batch() {
|
|||
\n\
|
||||
Usage: fd [OPTIONS] [pattern] [path]...\n\
|
||||
\n\
|
||||
For more information, try '--help'.\n\
|
||||
For more information try '--help'\n\
|
||||
",
|
||||
);
|
||||
|
||||
|
@ -2325,10 +2149,10 @@ fn test_owner_ignore_all() {
|
|||
#[test]
|
||||
fn test_owner_current_user() {
|
||||
let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES);
|
||||
let uid = Uid::current();
|
||||
let uid = users::get_current_uid();
|
||||
te.assert_output(&["--owner", &uid.to_string(), "a.foo"], "a.foo");
|
||||
if let Ok(Some(user)) = User::from_uid(uid) {
|
||||
te.assert_output(&["--owner", &user.name, "a.foo"], "a.foo");
|
||||
if let Some(username) = users::get_current_username().map(|u| u.into_string().unwrap()) {
|
||||
te.assert_output(&["--owner", &username, "a.foo"], "a.foo");
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -2336,10 +2160,10 @@ fn test_owner_current_user() {
|
|||
#[test]
|
||||
fn test_owner_current_group() {
|
||||
let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES);
|
||||
let gid = Gid::current();
|
||||
let gid = users::get_current_gid();
|
||||
te.assert_output(&["--owner", &format!(":{}", gid), "a.foo"], "a.foo");
|
||||
if let Ok(Some(group)) = Group::from_gid(gid) {
|
||||
te.assert_output(&["--owner", &format!(":{}", group.name), "a.foo"], "a.foo");
|
||||
if let Some(groupname) = users::get_current_groupname().map(|u| u.into_string().unwrap()) {
|
||||
te.assert_output(&["--owner", &format!(":{}", groupname), "a.foo"], "a.foo");
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -2347,7 +2171,7 @@ fn test_owner_current_group() {
|
|||
#[test]
|
||||
fn test_owner_root() {
|
||||
// This test assumes the current user isn't root
|
||||
if Uid::current().is_root() || Gid::current() == Gid::from_raw(0) {
|
||||
if users::get_current_uid() == 0 || users::get_current_gid() == 0 {
|
||||
return;
|
||||
}
|
||||
let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES);
|
||||
|
@ -2448,11 +2272,6 @@ fn test_max_results() {
|
|||
};
|
||||
assert_just_one_result_with_option("--max-results=1");
|
||||
assert_just_one_result_with_option("-1");
|
||||
|
||||
// check that --max-results & -1 conflic with --exec
|
||||
te.assert_failure(&["thing", "--max-results=0", "--exec=cat"]);
|
||||
te.assert_failure(&["thing", "-1", "--exec=cat"]);
|
||||
te.assert_failure(&["thing", "--max-results=1", "-1", "--exec=cat"]);
|
||||
}
|
||||
|
||||
/// Filenames with non-utf8 paths are passed to the executed program unchanged
|
||||
|
@ -2539,7 +2358,6 @@ fn test_number_parsing_errors() {
|
|||
#[test_case("--hidden", &["--no-hidden"] ; "hidden")]
|
||||
#[test_case("--no-ignore", &["--ignore"] ; "no-ignore")]
|
||||
#[test_case("--no-ignore-vcs", &["--ignore-vcs"] ; "no-ignore-vcs")]
|
||||
#[test_case("--no-require-git", &["--require-git"] ; "no-require-git")]
|
||||
#[test_case("--follow", &["--no-follow"] ; "follow")]
|
||||
#[test_case("--absolute-path", &["--relative-path"] ; "absolute-path")]
|
||||
#[test_case("-u", &["--ignore", "--no-hidden"] ; "u")]
|
||||
|
@ -2618,57 +2436,3 @@ fn test_invalid_cwd() {
|
|||
panic!("{:?}", output);
|
||||
}
|
||||
}
|
||||
|
||||
/// Test behavior of .git directory with various flags
|
||||
#[test]
|
||||
fn test_git_dir() {
|
||||
let te = TestEnv::new(
|
||||
&[".git/one", "other_dir/.git", "nested/dir/.git"],
|
||||
&[
|
||||
".git/one/foo.a",
|
||||
".git/.foo",
|
||||
".git/a.foo",
|
||||
"other_dir/.git/foo1",
|
||||
"nested/dir/.git/foo2",
|
||||
],
|
||||
);
|
||||
|
||||
te.assert_output(
|
||||
&["--hidden", "foo"],
|
||||
".git/one/foo.a
|
||||
.git/.foo
|
||||
.git/a.foo
|
||||
other_dir/.git/foo1
|
||||
nested/dir/.git/foo2",
|
||||
);
|
||||
te.assert_output(&["--no-ignore", "foo"], "");
|
||||
te.assert_output(
|
||||
&["--hidden", "--no-ignore", "foo"],
|
||||
".git/one/foo.a
|
||||
.git/.foo
|
||||
.git/a.foo
|
||||
other_dir/.git/foo1
|
||||
nested/dir/.git/foo2",
|
||||
);
|
||||
te.assert_output(
|
||||
&["--hidden", "--no-ignore-vcs", "foo"],
|
||||
".git/one/foo.a
|
||||
.git/.foo
|
||||
.git/a.foo
|
||||
other_dir/.git/foo1
|
||||
nested/dir/.git/foo2",
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_gitignore_parent() {
|
||||
let te = TestEnv::new(&["sub"], &[".abc", "sub/.abc"]);
|
||||
|
||||
fs::File::create(te.test_root().join(".gitignore"))
|
||||
.unwrap()
|
||||
.write_all(b".abc\n")
|
||||
.unwrap();
|
||||
|
||||
te.assert_output_subdirectory("sub", &["--hidden"], "");
|
||||
te.assert_output_subdirectory("sub", &["--hidden", "--search-path", "."], "");
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue