mirror of
https://github.com/sharkdp/fd.git
synced 2024-09-27 20:41:30 +02:00
Compare commits
370 Commits
Author | SHA1 | Date | |
---|---|---|---|
|
27c6b50919 | ||
|
8e0bd33af7 | ||
|
b674b9b820 | ||
|
96ac5fa0a0 | ||
|
9b1017a4d5 | ||
|
b191368713 | ||
|
e3d4f57e75 | ||
|
1ce6906808 | ||
|
9cf415ccc7 | ||
|
e9fe337921 | ||
|
8958a7bd74 | ||
|
c26c45dba3 | ||
|
7decd53c70 | ||
|
d5253c5372 | ||
|
3e7691ee6e | ||
|
385131765d | ||
|
adbae9cf54 | ||
|
b862c11550 | ||
|
e7b93e6c96 | ||
|
2ea5d2a90e | ||
|
80a73fda49 | ||
|
db9d2fa299 | ||
|
830fed4f82 | ||
|
e3b7dcbb7e | ||
|
55d2e78cc1 | ||
|
9f0fea6e21 | ||
|
3480df88ef | ||
|
cfced9787e | ||
|
9cefd79ef4 | ||
|
d5525e171b | ||
|
724275f3ef | ||
|
79d1435c68 | ||
|
5147fd6aec | ||
|
f55708e31d | ||
|
b6b16ce959 | ||
|
e65104e762 | ||
|
da65108efc | ||
|
c74fefabb6 | ||
|
45d6fbb9e2 | ||
|
4e9672250b | ||
|
f8270a6a44 | ||
|
7042dff969 | ||
|
f477c4f2c9 | ||
|
b1f7aef00b | ||
|
d8d2c37ec0 | ||
|
609f1adf90 | ||
|
bd649e2fd7 | ||
|
be815c261a | ||
|
d8a808c0e3 | ||
|
c92290c3d7 | ||
|
0649c2b379 | ||
|
755970deba | ||
|
f897b82d76 | ||
|
dbbabed606 | ||
|
ac178e20af | ||
|
29936f0fba | ||
|
bfc16a1dee | ||
|
289a68bac3 | ||
|
fcaff0f385 | ||
|
36163f9c3a | ||
|
d90ec1758e | ||
|
ea22cbd712 | ||
|
d44badc190 | ||
|
6becb66185 | ||
|
1a1f057e5d | ||
|
10a269bd3f | ||
|
90d3381814 | ||
|
b1f83a0bb0 | ||
|
3bc70925a9 | ||
|
f287f08b9f | ||
|
0e4488e9dc | ||
|
d7d63eddbe | ||
|
8acd7722f0 | ||
|
92fab6e058 | ||
|
a0ee0856db | ||
|
b8df500a70 | ||
|
cd96ca071d | ||
|
216472ff9f | ||
|
3680d10e5c | ||
|
abe3b9cd78 | ||
|
7aad6c9edf | ||
|
ddd3aae249 | ||
|
6d3bb68faf | ||
|
21d50dae8c | ||
|
9279b1f0af | ||
|
6647085015 | ||
|
6af8f092ee | ||
|
c4094c7a05 | ||
|
6d58df5f0c | ||
|
ffecccf209 | ||
|
31f2839751 | ||
|
e10a4eab2b | ||
|
8eb047945e | ||
|
1031325cca | ||
|
9fc2167cf9 | ||
|
ae1de4de24 | ||
|
7e5d14b733 | ||
|
85cbea8dcb | ||
|
bc6782624e | ||
|
cf6ff87c7d | ||
|
3cd73d7927 | ||
|
7794c4aae5 | ||
|
8c7a84ea30 | ||
|
e262ade74e | ||
|
11069e284a | ||
|
6e2e86decb | ||
|
15d3b63ccc | ||
|
453577651e | ||
|
39c07b7b4c | ||
|
5910285db0 | ||
|
68fe31da3f | ||
|
f875ea9a52 | ||
|
138919907b | ||
|
b8744626e7 | ||
|
b08d78f6fc | ||
|
4efc05ef27 | ||
|
0788c43c3f | ||
|
3b2fd158b5 | ||
|
c38dbacbd0 | ||
|
728b3200c0 | ||
|
7f74cd9e56 | ||
|
6ae8da6a39 | ||
|
f699c8bb6a | ||
|
ffde94c10e | ||
|
b0a8848f68 | ||
|
d651a595d4 | ||
|
969316cc0e | ||
|
5b46867507 | ||
|
e117a373a7 | ||
|
a4aed14337 | ||
|
9cde3c12a2 | ||
|
906e7a933e | ||
|
077d28d13a | ||
|
b55bb1e9be | ||
|
7a6cc92d6d | ||
|
b694c6e673 | ||
|
17895538a0 | ||
|
72ff1f9a87 | ||
|
ef3194a510 | ||
|
8773402246 | ||
|
ff3fc81db4 | ||
|
0dc3342c33 | ||
|
c66fc812ac | ||
|
14ed023875 | ||
|
58284b8dbe | ||
|
60889d0b99 | ||
|
7e19bad0a4 | ||
|
4b1d73d39d | ||
|
03e19a1ad2 | ||
|
8fb9499c20 | ||
|
38fb6a5958 | ||
|
49cd62d65e | ||
|
24bb5216bb | ||
|
7f8760fd1f | ||
|
3cb6b9d93a | ||
|
c591106b86 | ||
|
9f096737db | ||
|
1bda165b25 | ||
|
f48372624d | ||
|
5cd15536b6 | ||
|
aeb4a5fdad | ||
|
9529f30129 | ||
|
266311ca33 | ||
|
954a3900b9 | ||
|
07343b5baf | ||
|
a03ed8b300 | ||
|
13a93e5cbe | ||
|
d9c4e6239f | ||
|
61ebd9be6a | ||
|
e3b40208d5 | ||
|
16c2d1e1d0 | ||
|
fea1622724 | ||
|
00b64f3ccb | ||
|
74b850a642 | ||
|
4202f3939e | ||
|
e1ecba2ce4 | ||
|
0853e35e1f | ||
|
4b4a74c988 | ||
|
84f032eba8 | ||
|
b8a5f95cf2 | ||
|
73260c0e35 | ||
|
5903dec289 | ||
|
571ebb349b | ||
|
d62bbbbcd1 | ||
|
ad5fb44ddc | ||
|
8bbbd7679b | ||
|
cd32a3827d | ||
|
66c0637c90 | ||
|
c9df4296f9 | ||
|
7c5cf28ace | ||
|
51002c842d | ||
|
8e582971fa | ||
|
6daa72f929 | ||
|
8355d78359 | ||
|
dbc1818073 | ||
|
e57ce7f2a4 | ||
|
d8f89fa59e | ||
|
350003d8da | ||
|
15329f9cfa | ||
|
95b4dff379 | ||
|
c96b1af3be | ||
|
5ee6365510 | ||
|
1d57b3a064 | ||
|
325d419e39 | ||
|
8b5532d8dd | ||
|
7263b5e01d | ||
|
c6fcdbe000 | ||
|
306dacd0b4 | ||
|
08910e4e3f | ||
|
8897659607 | ||
|
53fd416c47 | ||
|
5e0018fb1f | ||
|
054bae01ef | ||
|
8f32a758a4 | ||
|
0fc8facfb7 | ||
|
069b181625 | ||
|
d9b69c8405 | ||
|
a11f8426d4 | ||
|
e6aa8e82f6 | ||
|
978866d983 | ||
|
36bc84041b | ||
|
3ed4ea7538 | ||
|
6b5fe1c634 | ||
|
7c39fff969 | ||
|
b922ca18f0 | ||
|
b8e7cbd5e3 | ||
|
9df9a489f0 | ||
|
fa01a280ed | ||
|
e6b5a4ef9d | ||
|
19832fcbd3 | ||
|
d371b10039 | ||
|
8c50bc733d | ||
|
3f9794cd1a | ||
|
fc240f7b2a | ||
|
dea9110b90 | ||
|
93cdb2628e | ||
|
817c0bc512 | ||
|
e97dec777c | ||
|
5f494b0925 | ||
|
59feb7d6ab | ||
|
97f5326393 | ||
|
e2a298a84f | ||
|
3317362e78 | ||
|
39d0a3ff3c | ||
|
d36c59920d | ||
|
995d2f5e44 | ||
|
3884f054f1 | ||
|
32504fa3d5 | ||
|
afd0efa291 | ||
|
737b5bc42e | ||
|
601d2bb13e | ||
|
917c56b120 | ||
|
9ffd57f4ef | ||
|
08a8723ee7 | ||
|
efdba804ac | ||
|
6f0632273b | ||
|
c848af33d5 | ||
|
f33de6544f | ||
|
d7e5dcf9d2 | ||
|
b38ba68ccc | ||
|
e55907dc8b | ||
|
a248607bee | ||
|
ed23fb9054 | ||
|
7d357a6cec | ||
|
1feed8816a | ||
|
9ce43b2d7b | ||
|
a6a78e1c65 | ||
|
cd14bb8a2c | ||
|
7162f28a5b | ||
|
2328e9cd17 | ||
|
2a6026b25d | ||
|
c62224d2c3 | ||
|
9a40d21ceb | ||
|
d019b02829 | ||
|
2f813601aa | ||
|
aae8519a1d | ||
|
4bfb903b22 | ||
|
d91b2a202e | ||
|
a74a43987a | ||
|
2a588a0171 | ||
|
3ae04546ea | ||
|
a0370aaf25 | ||
|
740edeb73f | ||
|
91e3c3cba5 | ||
|
d6e9cbfff3 | ||
|
8d3172f987 | ||
|
5be58f0f76 | ||
|
8d30d6a4fe | ||
|
5ff866aa26 | ||
|
4ecf013527 | ||
|
1c3a38b423 | ||
|
a3a4912ced | ||
|
0d32bebcc2 | ||
|
0884b837b2 | ||
|
11199079c3 | ||
|
69521a1057 | ||
|
59a487b524 | ||
|
0e2a4bac72 | ||
|
35aa52538c | ||
|
b680a9de9f | ||
|
42244e5f32 | ||
|
072c9e56e1 | ||
|
f7bb60aba5 | ||
|
b019d8f1bf | ||
|
15c795d2e1 | ||
|
a428f7eb13 | ||
|
02c9efba28 | ||
|
aebe7537c3 | ||
|
4356ba3c43 | ||
|
c9afbc5b70 | ||
|
e38e3078ac | ||
|
5439326aa4 | ||
|
35bc1f95fb | ||
|
161ee64399 | ||
|
399bf3a931 | ||
|
bae0a1bfa6 | ||
|
e4bca1033c | ||
|
da40e76aae | ||
|
31ac4a3f5c | ||
|
424d6efcc0 | ||
|
ccf8e69650 | ||
|
ee44c1ed90 | ||
|
3ac2e13a25 | ||
|
06a6a118a1 | ||
|
c095867154 | ||
|
324005fb3a | ||
|
d8166907e6 | ||
|
7cbfb8e29c | ||
|
1c5ce0a661 | ||
|
f98496abcd | ||
|
82aa17f9fb | ||
|
9b8457aeb3 | ||
|
535b34e48a | ||
|
0909d413d0 | ||
|
284ee3d0c6 | ||
|
f3e6536d59 | ||
|
002645d7ac | ||
|
9f6abded0e | ||
|
840a565d3a | ||
|
3cf5ac0b9a | ||
|
a217823510 | ||
|
f867c28a2c | ||
|
73a693ef28 | ||
|
9955e20d01 | ||
|
03052757a7 | ||
|
bdcc24ed04 | ||
|
8478a2c7eb | ||
|
c34bfa30fe | ||
|
af9daff4ee | ||
|
10ba34f78b | ||
|
503ede7535 | ||
|
08c0d427bf | ||
|
ab7d5eff87 | ||
|
686318c005 | ||
|
c04ab74744 | ||
|
8fdfc6c2ef | ||
|
71393fa1be | ||
|
5e50825af2 | ||
|
8fed650de9 | ||
|
4d8569ad6b | ||
|
2f0677b556 | ||
|
0a8a72d4f3 | ||
|
de611c8835 | ||
|
a36f2cf61c | ||
|
b6c7ebc4f1 | ||
|
fd707b42c2 | ||
|
7c86c7d585 | ||
|
27013537c9 | ||
|
addf00cb16 | ||
|
1964e434e6 |
4
.github/dependabot.yml
vendored
4
.github/dependabot.yml
vendored
@ -4,3 +4,7 @@ updates:
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "monthly"
|
||||
- package-ecosystem: "github-actions"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "daily"
|
||||
|
328
.github/workflows/CICD.yml
vendored
328
.github/workflows/CICD.yml
vendored
@ -1,8 +1,8 @@
|
||||
name: CICD
|
||||
|
||||
env:
|
||||
MIN_SUPPORTED_RUST_VERSION: "1.60.0"
|
||||
CICD_INTERMEDIATES_DIR: "_cicd-intermediates"
|
||||
MSRV_FEATURES: "--all-features"
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
@ -14,68 +14,90 @@ on:
|
||||
- '*'
|
||||
|
||||
jobs:
|
||||
code_quality:
|
||||
name: Code quality
|
||||
crate_metadata:
|
||||
name: Extract crate metadata
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Extract crate information
|
||||
id: crate_metadata
|
||||
run: |
|
||||
echo "name=fd" | tee -a $GITHUB_OUTPUT
|
||||
cargo metadata --no-deps --format-version 1 | jq -r '"version=" + .packages[0].version' | tee -a $GITHUB_OUTPUT
|
||||
cargo metadata --no-deps --format-version 1 | jq -r '"maintainer=" + .packages[0].authors[0]' | tee -a $GITHUB_OUTPUT
|
||||
cargo metadata --no-deps --format-version 1 | jq -r '"homepage=" + .packages[0].homepage' | tee -a $GITHUB_OUTPUT
|
||||
cargo metadata --no-deps --format-version 1 | jq -r '"msrv=" + .packages[0].rust_version' | tee -a $GITHUB_OUTPUT
|
||||
outputs:
|
||||
name: ${{ steps.crate_metadata.outputs.name }}
|
||||
version: ${{ steps.crate_metadata.outputs.version }}
|
||||
maintainer: ${{ steps.crate_metadata.outputs.maintainer }}
|
||||
homepage: ${{ steps.crate_metadata.outputs.homepage }}
|
||||
msrv: ${{ steps.crate_metadata.outputs.msrv }}
|
||||
|
||||
ensure_cargo_fmt:
|
||||
name: Ensure 'cargo fmt' has been run
|
||||
runs-on: ubuntu-20.04
|
||||
steps:
|
||||
- name: Checkout source code
|
||||
uses: actions/checkout@v3
|
||||
- name: Install rust toolchain
|
||||
run: |
|
||||
rm -f "${HOME}/.cargo/bin/"{rustfmt,cargo-fmt}
|
||||
rustup set profile minimal
|
||||
rustup toolchain install stable -c "clippy,rustfmt"
|
||||
rustup default stable
|
||||
- name: Rust cache
|
||||
uses: Swatinem/rust-cache@v2
|
||||
- name: Ensure `cargo fmt` has been run
|
||||
run: cargo fmt --check
|
||||
- name: Ensure MSRV is set in `clippy.toml`
|
||||
run: grep "^msrv = \"${{ env.MIN_SUPPORTED_RUST_VERSION }}\"\$" clippy.toml
|
||||
- name: Run clippy
|
||||
run: cargo clippy --locked --all-targets --all-features
|
||||
- uses: dtolnay/rust-toolchain@stable
|
||||
with:
|
||||
components: rustfmt
|
||||
- uses: actions/checkout@v4
|
||||
- run: cargo fmt -- --check
|
||||
|
||||
lint_check:
|
||||
name: Ensure 'cargo clippy' has no warnings
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: dtolnay/rust-toolchain@stable
|
||||
with:
|
||||
components: clippy
|
||||
- uses: actions/checkout@v4
|
||||
- run: cargo clippy --all-targets --all-features -- -Dwarnings
|
||||
|
||||
min_version:
|
||||
name: Minimum supported rust version
|
||||
runs-on: ubuntu-20.04
|
||||
needs: crate_metadata
|
||||
steps:
|
||||
- name: Checkout source code
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Install rust toolchain (v${{ env.MIN_SUPPORTED_RUST_VERSION }})
|
||||
run: |
|
||||
rustup set profile minimal
|
||||
rustup toolchain install ${{ env.MIN_SUPPORTED_RUST_VERSION }} -c clippy
|
||||
rustup default ${{ env.MIN_SUPPORTED_RUST_VERSION }}
|
||||
- name: Rust cache
|
||||
uses: Swatinem/rust-cache@v2
|
||||
- name: Install rust toolchain (v${{ needs.crate_metadata.outputs.msrv }})
|
||||
uses: dtolnay/rust-toolchain@master
|
||||
with:
|
||||
toolchain: ${{ needs.crate_metadata.outputs.msrv }}
|
||||
components: clippy
|
||||
- name: Run clippy (on minimum supported rust version to prevent warnings we can't fix)
|
||||
run: cargo clippy --locked --all-targets --all-features
|
||||
run: cargo clippy --locked --all-targets ${{ env.MSRV_FEATURES }}
|
||||
- name: Run tests
|
||||
run: cargo test --locked
|
||||
run: cargo test --locked ${{ env.MSRV_FEATURES }}
|
||||
|
||||
build:
|
||||
name: ${{ matrix.job.os }} (${{ matrix.job.target }})
|
||||
name: ${{ matrix.job.target }} (${{ matrix.job.os }})
|
||||
runs-on: ${{ matrix.job.os }}
|
||||
needs: crate_metadata
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
job:
|
||||
- { os: ubuntu-20.04, target: arm-unknown-linux-gnueabihf , use-cross: true }
|
||||
- { os: ubuntu-20.04, target: arm-unknown-linux-musleabihf, use-cross: true }
|
||||
- { os: ubuntu-20.04, target: aarch64-unknown-linux-gnu , use-cross: true }
|
||||
- { os: ubuntu-20.04, target: i686-unknown-linux-gnu , use-cross: true }
|
||||
- { os: ubuntu-20.04, target: i686-unknown-linux-musl , use-cross: true }
|
||||
- { os: ubuntu-20.04, target: x86_64-unknown-linux-gnu , use-cross: true }
|
||||
- { os: ubuntu-20.04, target: x86_64-unknown-linux-musl , use-cross: true }
|
||||
- { os: macos-12 , target: x86_64-apple-darwin }
|
||||
# - { os: windows-2019, target: i686-pc-windows-gnu } ## disabled; error: linker `i686-w64-mingw32-gcc` not found
|
||||
- { os: windows-2019, target: i686-pc-windows-msvc }
|
||||
- { os: windows-2019, target: x86_64-pc-windows-gnu }
|
||||
- { os: windows-2019, target: x86_64-pc-windows-msvc }
|
||||
- { target: aarch64-unknown-linux-gnu , os: ubuntu-22.04, use-cross: true }
|
||||
- { target: aarch64-unknown-linux-musl , os: ubuntu-22.04, use-cross: true }
|
||||
- { target: arm-unknown-linux-gnueabihf , os: ubuntu-22.04, use-cross: true }
|
||||
- { target: arm-unknown-linux-musleabihf, os: ubuntu-22.04, use-cross: true }
|
||||
- { target: i686-pc-windows-msvc , os: windows-2022 }
|
||||
- { target: i686-unknown-linux-gnu , os: ubuntu-22.04, use-cross: true }
|
||||
- { target: i686-unknown-linux-musl , os: ubuntu-22.04, use-cross: true }
|
||||
- { target: x86_64-apple-darwin , os: macos-12 }
|
||||
- { target: aarch64-apple-darwin , os: macos-14 }
|
||||
- { target: x86_64-pc-windows-gnu , os: windows-2022 }
|
||||
- { target: x86_64-pc-windows-msvc , os: windows-2022 }
|
||||
- { target: x86_64-unknown-linux-gnu , os: ubuntu-22.04, use-cross: true }
|
||||
- { target: x86_64-unknown-linux-musl , os: ubuntu-22.04, use-cross: true }
|
||||
env:
|
||||
BUILD_CMD: cargo
|
||||
steps:
|
||||
- name: Checkout source code
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Install prerequisites
|
||||
shell: bash
|
||||
@ -85,20 +107,24 @@ jobs:
|
||||
aarch64-unknown-linux-gnu) sudo apt-get -y update ; sudo apt-get -y install gcc-aarch64-linux-gnu ;;
|
||||
esac
|
||||
|
||||
- name: Extract crate information
|
||||
shell: bash
|
||||
run: |
|
||||
echo "PROJECT_NAME=fd" >> $GITHUB_ENV
|
||||
echo "PROJECT_VERSION=$(sed -n 's/^version = "\(.*\)"/\1/p' Cargo.toml | head -n1)" >> $GITHUB_ENV
|
||||
echo "PROJECT_MAINTAINER=$(sed -n 's/^authors = \["\(.*\)"\]/\1/p' Cargo.toml)" >> $GITHUB_ENV
|
||||
echo "PROJECT_HOMEPAGE=$(sed -n 's/^homepage = "\(.*\)"/\1/p' Cargo.toml)" >> $GITHUB_ENV
|
||||
|
||||
- name: Install Rust toolchain
|
||||
run: |
|
||||
rustup set profile minimal
|
||||
rustup toolchain install stable
|
||||
rustup override set stable
|
||||
rustup target add ${{ matrix.job.target }}
|
||||
uses: dtolnay/rust-toolchain@stable
|
||||
with:
|
||||
targets: ${{ matrix.job.target }}
|
||||
# On windows, for now build with 1.77.2, so that it works on windows 7.
|
||||
# When we update the MSRV again, we'll need to revisit this, and probably drop support for Win7
|
||||
toolchain: "${{ contains(matrix.job.target, 'windows-') && '1.77.2' || 'stable' }}"
|
||||
|
||||
- name: Install cross
|
||||
if: matrix.job.use-cross
|
||||
uses: taiki-e/install-action@v2
|
||||
with:
|
||||
tool: cross
|
||||
|
||||
- name: Overwrite build command env variable
|
||||
if: matrix.job.use-cross
|
||||
shell: bash
|
||||
run: echo "BUILD_CMD=cross" >> $GITHUB_ENV
|
||||
|
||||
- name: Show version information (Rust, cargo, GCC)
|
||||
shell: bash
|
||||
@ -110,29 +136,12 @@ jobs:
|
||||
cargo -V
|
||||
rustc -V
|
||||
|
||||
- name: Set cargo cmd
|
||||
shell: bash
|
||||
run: echo "CARGO_CMD=cargo" >> $GITHUB_ENV
|
||||
|
||||
- name: Set cargo cmd to cross
|
||||
shell: bash
|
||||
if: ${{ matrix.job.use-cross == true }}
|
||||
run: echo "CARGO_CMD=cross" >> $GITHUB_ENV
|
||||
|
||||
- name: Rust cache
|
||||
uses: Swatinem/rust-cache@v2
|
||||
with:
|
||||
key: ${{ matrix.job.os }}-${{ matrix.job.target }}
|
||||
|
||||
- name: Install cross
|
||||
if: ${{ matrix.job.use-cross == true }}
|
||||
run: cargo install cross
|
||||
|
||||
- name: Build
|
||||
run: ${{ env.CARGO_CMD }} build --locked --release --target=${{ matrix.job.target }}
|
||||
shell: bash
|
||||
run: $BUILD_CMD build --locked --release --target=${{ matrix.job.target }}
|
||||
|
||||
- name: Strip debug information from executable
|
||||
id: strip
|
||||
- name: Set binary name & path
|
||||
id: bin
|
||||
shell: bash
|
||||
run: |
|
||||
# Figure out suffix of binary
|
||||
@ -141,29 +150,11 @@ jobs:
|
||||
*-pc-windows-*) EXE_suffix=".exe" ;;
|
||||
esac;
|
||||
|
||||
# Figure out what strip tool to use if any
|
||||
STRIP="strip"
|
||||
case ${{ matrix.job.target }} in
|
||||
arm-unknown-linux-*) STRIP="arm-linux-gnueabihf-strip" ;;
|
||||
aarch64-unknown-linux-gnu) STRIP="aarch64-linux-gnu-strip" ;;
|
||||
*-pc-windows-msvc) STRIP="" ;;
|
||||
esac;
|
||||
|
||||
# Setup paths
|
||||
BIN_DIR="${{ env.CICD_INTERMEDIATES_DIR }}/stripped-release-bin/"
|
||||
mkdir -p "${BIN_DIR}"
|
||||
BIN_NAME="${{ env.PROJECT_NAME }}${EXE_suffix}"
|
||||
BIN_PATH="${BIN_DIR}/${BIN_NAME}"
|
||||
BIN_NAME="${{ needs.crate_metadata.outputs.name }}${EXE_suffix}"
|
||||
BIN_PATH="target/${{ matrix.job.target }}/release/${BIN_NAME}"
|
||||
|
||||
# Copy the release build binary to the result location
|
||||
cp "target/${{ matrix.job.target }}/release/${BIN_NAME}" "${BIN_DIR}"
|
||||
|
||||
# Also strip if possible
|
||||
if [ -n "${STRIP}" ]; then
|
||||
"${STRIP}" "${BIN_PATH}"
|
||||
fi
|
||||
|
||||
# Let subsequent steps know where to find the (stripped) bin
|
||||
# Let subsequent steps know where to find the binary
|
||||
echo "BIN_PATH=${BIN_PATH}" >> $GITHUB_OUTPUT
|
||||
echo "BIN_NAME=${BIN_NAME}" >> $GITHUB_OUTPUT
|
||||
|
||||
@ -173,11 +164,12 @@ jobs:
|
||||
run: |
|
||||
# test only library unit tests and binary for arm-type targets
|
||||
unset CARGO_TEST_OPTIONS
|
||||
unset CARGO_TEST_OPTIONS ; case ${{ matrix.job.target }} in arm-* | aarch64-*) CARGO_TEST_OPTIONS="--bin ${PROJECT_NAME}" ;; esac;
|
||||
unset CARGO_TEST_OPTIONS ; case ${{ matrix.job.target }} in arm-* | aarch64-*) CARGO_TEST_OPTIONS="--bin ${{ needs.crate_metadata.outputs.name }}" ;; esac;
|
||||
echo "CARGO_TEST_OPTIONS=${CARGO_TEST_OPTIONS}" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Run tests
|
||||
run: ${{ env.CARGO_CMD }} test --locked --target=${{ matrix.job.target }} ${{ steps.test-options.outputs.CARGO_TEST_OPTIONS}}
|
||||
shell: bash
|
||||
run: $BUILD_CMD test --locked --target=${{ matrix.job.target }} ${{ steps.test-options.outputs.CARGO_TEST_OPTIONS}}
|
||||
|
||||
- name: Generate completions
|
||||
id: completions
|
||||
@ -189,7 +181,7 @@ jobs:
|
||||
shell: bash
|
||||
run: |
|
||||
PKG_suffix=".tar.gz" ; case ${{ matrix.job.target }} in *-pc-windows-*) PKG_suffix=".zip" ;; esac;
|
||||
PKG_BASENAME=${PROJECT_NAME}-v${PROJECT_VERSION}-${{ matrix.job.target }}
|
||||
PKG_BASENAME=${{ needs.crate_metadata.outputs.name }}-v${{ needs.crate_metadata.outputs.version }}-${{ matrix.job.target }}
|
||||
PKG_NAME=${PKG_BASENAME}${PKG_suffix}
|
||||
echo "PKG_NAME=${PKG_NAME}" >> $GITHUB_OUTPUT
|
||||
|
||||
@ -198,14 +190,14 @@ jobs:
|
||||
mkdir -p "${ARCHIVE_DIR}"
|
||||
|
||||
# Binary
|
||||
cp "${{ steps.strip.outputs.BIN_PATH }}" "$ARCHIVE_DIR"
|
||||
|
||||
# Man page
|
||||
cp 'doc/${{ env.PROJECT_NAME }}.1' "$ARCHIVE_DIR"
|
||||
cp "${{ steps.bin.outputs.BIN_PATH }}" "$ARCHIVE_DIR"
|
||||
|
||||
# README, LICENSE and CHANGELOG files
|
||||
cp "README.md" "LICENSE-MIT" "LICENSE-APACHE" "CHANGELOG.md" "$ARCHIVE_DIR"
|
||||
|
||||
# Man page
|
||||
cp 'doc/${{ needs.crate_metadata.outputs.name }}.1' "$ARCHIVE_DIR"
|
||||
|
||||
# Autocompletion files
|
||||
cp -r autocomplete "${ARCHIVE_DIR}"
|
||||
|
||||
@ -224,113 +216,11 @@ jobs:
|
||||
id: debian-package
|
||||
shell: bash
|
||||
if: startsWith(matrix.job.os, 'ubuntu')
|
||||
run: |
|
||||
COPYRIGHT_YEARS="2018 - "$(date "+%Y")
|
||||
DPKG_STAGING="${{ env.CICD_INTERMEDIATES_DIR }}/debian-package"
|
||||
DPKG_DIR="${DPKG_STAGING}/dpkg"
|
||||
mkdir -p "${DPKG_DIR}"
|
||||
|
||||
DPKG_BASENAME=${PROJECT_NAME}
|
||||
DPKG_CONFLICTS=${PROJECT_NAME}-musl
|
||||
case ${{ matrix.job.target }} in *-musl) DPKG_BASENAME=${PROJECT_NAME}-musl ; DPKG_CONFLICTS=${PROJECT_NAME} ;; esac;
|
||||
DPKG_VERSION=${PROJECT_VERSION}
|
||||
|
||||
unset DPKG_ARCH
|
||||
case ${{ matrix.job.target }} in
|
||||
aarch64-*-linux-*) DPKG_ARCH=arm64 ;;
|
||||
arm-*-linux-*hf) DPKG_ARCH=armhf ;;
|
||||
i686-*-linux-*) DPKG_ARCH=i686 ;;
|
||||
x86_64-*-linux-*) DPKG_ARCH=amd64 ;;
|
||||
*) DPKG_ARCH=notset ;;
|
||||
esac;
|
||||
|
||||
DPKG_NAME="${DPKG_BASENAME}_${DPKG_VERSION}_${DPKG_ARCH}.deb"
|
||||
echo "DPKG_NAME=${DPKG_NAME}" >> $GITHUB_OUTPUT
|
||||
|
||||
# Binary
|
||||
install -Dm755 "${{ steps.strip.outputs.BIN_PATH }}" "${DPKG_DIR}/usr/bin/${{ steps.strip.outputs.BIN_NAME }}"
|
||||
|
||||
# Man page
|
||||
install -Dm644 'doc/${{ env.PROJECT_NAME }}.1' "${DPKG_DIR}/usr/share/man/man1/${{ env.PROJECT_NAME }}.1"
|
||||
gzip -n --best "${DPKG_DIR}/usr/share/man/man1/${{ env.PROJECT_NAME }}.1"
|
||||
|
||||
# Autocompletion files
|
||||
install -Dm644 'autocomplete/fd.bash' "${DPKG_DIR}/usr/share/bash-completion/completions/${{ env.PROJECT_NAME }}"
|
||||
install -Dm644 'autocomplete/fd.fish' "${DPKG_DIR}/usr/share/fish/vendor_completions.d/${{ env.PROJECT_NAME }}.fish"
|
||||
install -Dm644 'autocomplete/_fd' "${DPKG_DIR}/usr/share/zsh/vendor-completions/_${{ env.PROJECT_NAME }}"
|
||||
|
||||
# README and LICENSE
|
||||
install -Dm644 "README.md" "${DPKG_DIR}/usr/share/doc/${DPKG_BASENAME}/README.md"
|
||||
install -Dm644 "LICENSE-MIT" "${DPKG_DIR}/usr/share/doc/${DPKG_BASENAME}/LICENSE-MIT"
|
||||
install -Dm644 "LICENSE-APACHE" "${DPKG_DIR}/usr/share/doc/${DPKG_BASENAME}/LICENSE-APACHE"
|
||||
install -Dm644 "CHANGELOG.md" "${DPKG_DIR}/usr/share/doc/${DPKG_BASENAME}/changelog"
|
||||
gzip -n --best "${DPKG_DIR}/usr/share/doc/${DPKG_BASENAME}/changelog"
|
||||
|
||||
cat > "${DPKG_DIR}/usr/share/doc/${DPKG_BASENAME}/copyright" <<EOF
|
||||
Format: http://www.debian.org/doc/packaging-manuals/copyright-format/1.0/
|
||||
Upstream-Name: ${{ env.PROJECT_NAME }}
|
||||
Source: ${{ env.PROJECT_HOMEPAGE }}
|
||||
|
||||
Files: *
|
||||
Copyright: ${{ env.PROJECT_MAINTAINER }}
|
||||
Copyright: $COPYRIGHT_YEARS ${{ env.PROJECT_MAINTAINER }}
|
||||
License: Apache-2.0 or MIT
|
||||
|
||||
License: Apache-2.0
|
||||
On Debian systems, the complete text of the Apache-2.0 can be found in the
|
||||
file /usr/share/common-licenses/Apache-2.0.
|
||||
|
||||
License: MIT
|
||||
Permission is hereby granted, free of charge, to any
|
||||
person obtaining a copy of this software and associated
|
||||
documentation files (the "Software"), to deal in the
|
||||
Software without restriction, including without
|
||||
limitation the rights to use, copy, modify, merge,
|
||||
publish, distribute, sublicense, and/or sell copies of
|
||||
the Software, and to permit persons to whom the Software
|
||||
is furnished to do so, subject to the following
|
||||
conditions:
|
||||
.
|
||||
The above copyright notice and this permission notice
|
||||
shall be included in all copies or substantial portions
|
||||
of the Software.
|
||||
.
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
|
||||
ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
|
||||
TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
|
||||
PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
|
||||
SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
|
||||
CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
|
||||
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
|
||||
IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
|
||||
DEALINGS IN THE SOFTWARE.
|
||||
EOF
|
||||
chmod 644 "${DPKG_DIR}/usr/share/doc/${DPKG_BASENAME}/copyright"
|
||||
|
||||
# control file
|
||||
mkdir -p "${DPKG_DIR}/DEBIAN"
|
||||
cat > "${DPKG_DIR}/DEBIAN/control" <<EOF
|
||||
Package: ${DPKG_BASENAME}
|
||||
Version: ${DPKG_VERSION}
|
||||
Section: utils
|
||||
Priority: optional
|
||||
Maintainer: ${{ env.PROJECT_MAINTAINER }}
|
||||
Homepage: ${{ env.PROJECT_HOMEPAGE }}
|
||||
Architecture: ${DPKG_ARCH}
|
||||
Provides: ${{ env.PROJECT_NAME }}
|
||||
Conflicts: ${DPKG_CONFLICTS}
|
||||
Description: simple, fast and user-friendly alternative to find
|
||||
fd is a program to find entries in your filesystem.
|
||||
It is a simple, fast and user-friendly alternative to find.
|
||||
While it does not aim to support all of finds powerful functionality, it provides
|
||||
sensible (opinionated) defaults for a majority of use cases.
|
||||
EOF
|
||||
|
||||
DPKG_PATH="${DPKG_STAGING}/${DPKG_NAME}"
|
||||
echo "DPKG_PATH=${DPKG_PATH}" >> $GITHUB_OUTPUT
|
||||
|
||||
# build dpkg
|
||||
fakeroot dpkg-deb --build "${DPKG_DIR}" "${DPKG_PATH}"
|
||||
run: bash scripts/create-deb.sh
|
||||
env:
|
||||
TARGET: ${{ matrix.job.target }}
|
||||
DPKG_VERSION: ${{ needs.crate_metadata.version }}
|
||||
BIN_PATH: ${{ steps.bin.outputs.BIN_PATH }}
|
||||
|
||||
- name: "Artifact upload: tarball"
|
||||
uses: actions/upload-artifact@master
|
||||
@ -353,7 +243,7 @@ jobs:
|
||||
echo "IS_RELEASE=${IS_RELEASE}" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Publish archives and packages
|
||||
uses: softprops/action-gh-release@v1
|
||||
uses: softprops/action-gh-release@v2
|
||||
if: steps.is-release.outputs.IS_RELEASE
|
||||
with:
|
||||
files: |
|
||||
@ -361,3 +251,15 @@ jobs:
|
||||
${{ steps.debian-package.outputs.DPKG_PATH }}
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
winget:
|
||||
name: Publish to Winget
|
||||
runs-on: ubuntu-latest
|
||||
needs: build
|
||||
if: startsWith(github.ref, 'refs/tags/v')
|
||||
steps:
|
||||
- uses: vedantmgoyal2009/winget-releaser@v2
|
||||
with:
|
||||
identifier: sharkdp.fd
|
||||
installers-regex: '-pc-windows-msvc\.zip$'
|
||||
token: ${{ secrets.WINGET_TOKEN }}
|
||||
|
118
CHANGELOG.md
118
CHANGELOG.md
@ -1,3 +1,121 @@
|
||||
# 10.2.0
|
||||
|
||||
## Features
|
||||
|
||||
- Add --hyperlink option to add OSC 8 hyperlinks to output
|
||||
|
||||
|
||||
## Bugfixes
|
||||
|
||||
|
||||
## Changes
|
||||
|
||||
- Build windows releases with rust 1.77 so windows 7 is still supported
|
||||
- Deb packages now include symlink for fdfind to be more consistent with official packages
|
||||
|
||||
|
||||
## Other
|
||||
|
||||
# 10.1.0
|
||||
|
||||
## Features
|
||||
|
||||
- Allow passing an optional argument to `--strip-cwd-prefix` of "always", "never", or "auto". to force whether the cwd prefix is stripped or not.
|
||||
- Add a `--format` option which allows using a format template for direct ouput similar to the template used for `--exec`. (#1043)
|
||||
|
||||
## Bugfixes
|
||||
- Fix aarch64 page size again. This time it should actually work. (#1085, #1549) (@tavianator)
|
||||
|
||||
|
||||
## Other
|
||||
|
||||
- aarch64-apple-darwin target added to builds on the release page. Note that this is a tier 2 rust target.
|
||||
|
||||
# v10.0.0
|
||||
|
||||
## Features
|
||||
|
||||
- Add `dir` as an alias to `directory` when using `-t` \ `--type`, see #1460 and #1464 (@Ato2207).
|
||||
- Add support for @%s date format in time filters similar to GNU date (seconds since Unix epoch for --older/--newer), see #1493 (@nabellows)
|
||||
- Breaking: No longer automatically ignore `.git` when using `--hidden` with vcs ignore enabled. This reverts the change in v9.0.0. While this feature
|
||||
was often useful, it also broke some existing workflows, and there wasn't a good way to opt out of it. And there isn't really a good way for us to add
|
||||
a way to opt out of it. And you can easily get similar behavior by adding `.git/` to your global fdignore file.
|
||||
See #1457.
|
||||
|
||||
## Bugfixes
|
||||
|
||||
- Respect NO_COLOR environment variable with `--list-details` option. (#1455)
|
||||
- Fix bug that would cause hidden files to be included despite gitignore rules
|
||||
if search path is "." (#1461, BurntSushi/ripgrep#2711).
|
||||
- aarch64 builds now use 64k page sizes with jemalloc. This fixes issues on some systems, such as ARM Macs that
|
||||
have a larger system page size than the system that the binary was built on. (#1547)
|
||||
- Address [CVE-2024-24576](https://blog.rust-lang.org/2024/04/09/cve-2024-24576.html), by increasing minimum rust version.
|
||||
|
||||
|
||||
## Changes
|
||||
- Minimum supported rust version is now 1.77.2
|
||||
|
||||
|
||||
# v9.0.0
|
||||
|
||||
## Performance
|
||||
|
||||
- Performance has been *significantly improved*, both due to optimizations in the underlying `ignore`
|
||||
crate (#1429), and in `fd` itself (#1422, #1408, #1362) - @tavianator.
|
||||
[Benchmarks results](https://gist.github.com/tavianator/32edbe052f33ef60570cf5456b59de81) show gains
|
||||
of 6-8x for full traversals of smaller directories (100k files) and up to 13x for larger directories (1M files).
|
||||
|
||||
- The default number of threads is now constrained to be at most 64. This should improve startup time on
|
||||
systems with many CPU cores. (#1203, #1410, #1412, #1431) - @tmccombs and @tavianator
|
||||
|
||||
- New flushing behavior when writing output to stdout, providing better performance for TTY and non-TTY
|
||||
use cases, see #1452 and #1313 (@tavianator).
|
||||
|
||||
## Features
|
||||
|
||||
- Support character and block device file types, see #1213 and #1336 (@cgzones)
|
||||
- Breaking: `.git/` is now ignored by default when using `--hidden` / `-H`, use `--no-ignore` / `-I` or
|
||||
`--no-ignore-vcs` to override, see #1387 and #1396 (@skoriop)
|
||||
|
||||
## Bugfixes
|
||||
|
||||
- Fix `NO_COLOR` support, see #1421 (@acuteenvy)
|
||||
|
||||
## Other
|
||||
|
||||
- Fixed documentation typos, see #1409 (@marcospb19)
|
||||
|
||||
## Thanks
|
||||
|
||||
Special thanks to @tavianator for his incredible work on performance in the `ignore` crate and `fd` itself.
|
||||
|
||||
|
||||
|
||||
# v8.7.1
|
||||
|
||||
## Bugfixes
|
||||
|
||||
- `-1` properly conflicts with the exec family of options.
|
||||
- `--max-results` overrides `-1`
|
||||
- `--quiet` properly conflicts with the exec family of options. This used to be the case, but broke during the switch to clap-derive
|
||||
- `--changed-within` now accepts a space as well as a "T" as the separator between date and time (due to update of chrono dependency)
|
||||
|
||||
## Other
|
||||
- Many dependencies were updated
|
||||
- Some documentation was updated and fixed
|
||||
|
||||
# v8.7.0
|
||||
|
||||
## Features
|
||||
|
||||
- Add flag --no-require-git to always respect gitignore files, see #1216 (@vegerot)
|
||||
|
||||
## Bugfixes
|
||||
|
||||
- Fix logic for when to use global ignore file. There was a bug where the only case where the
|
||||
global ignore file wasn't processed was if `--no-ignore` was passed, but neither `--unrestricted`
|
||||
nor `--no-global-ignore-file` is passed. See #1209
|
||||
|
||||
# v8.6.0
|
||||
|
||||
## Features
|
||||
|
@ -13,11 +13,11 @@ give us the chance to discuss any potential changes first.
|
||||
## Add an entry to the changelog
|
||||
|
||||
If your contribution changes the behavior of `fd` (as opposed to a typo-fix
|
||||
in the documentation), please update the [`CHANGELOG.md`](CHANGELOG.md) file
|
||||
in the documentation), please update the [`CHANGELOG.md`](CHANGELOG.md#upcoming-release) file
|
||||
and describe your changes. This makes the release process much easier and
|
||||
therefore helps to get your changes into a new `fd` release faster.
|
||||
|
||||
The top of the `CHANGELOG` contains a *"unreleased"* section with a few
|
||||
The top of the `CHANGELOG` contains an *"Upcoming release"* section with a few
|
||||
subsections (Features, Bugfixes, …). Please add your entry to the subsection
|
||||
that best describes your change.
|
||||
|
||||
|
916
Cargo.lock
generated
916
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
45
Cargo.toml
45
Cargo.toml
@ -12,12 +12,13 @@ keywords = [
|
||||
"filesystem",
|
||||
"tool",
|
||||
]
|
||||
license = "MIT/Apache-2.0"
|
||||
license = "MIT OR Apache-2.0"
|
||||
name = "fd-find"
|
||||
readme = "README.md"
|
||||
repository = "https://github.com/sharkdp/fd"
|
||||
version = "8.6.0"
|
||||
version = "10.2.0"
|
||||
edition= "2021"
|
||||
rust-version = "1.77.2"
|
||||
|
||||
[badges.appveyor]
|
||||
repository = "sharkdp/fd"
|
||||
@ -33,41 +34,38 @@ path = "src/main.rs"
|
||||
version_check = "0.9"
|
||||
|
||||
[dependencies]
|
||||
nu-ansi-term = "0.46"
|
||||
aho-corasick = "1.1"
|
||||
nu-ansi-term = "0.50"
|
||||
argmax = "0.3.1"
|
||||
atty = "0.2"
|
||||
ignore = "0.4.3"
|
||||
num_cpus = "1.13"
|
||||
regex = "1.7.0"
|
||||
regex-syntax = "0.6"
|
||||
ignore = "0.4.22"
|
||||
regex = "1.10.5"
|
||||
regex-syntax = "0.8"
|
||||
ctrlc = "3.2"
|
||||
humantime = "2.1"
|
||||
globset = "0.4"
|
||||
anyhow = "1.0"
|
||||
dirs-next = "2.0"
|
||||
normpath = "0.3.2"
|
||||
once_cell = "1.15.0"
|
||||
crossbeam-channel = "0.5.6"
|
||||
clap_complete = {version = "4.0.6", optional = true}
|
||||
etcetera = "0.8"
|
||||
normpath = "1.1.1"
|
||||
crossbeam-channel = "0.5.13"
|
||||
clap_complete = {version = "4.5.24", optional = true}
|
||||
faccess = "0.2.4"
|
||||
|
||||
[dependencies.clap]
|
||||
version = "4.0.22"
|
||||
features = ["suggestions", "color", "wrap_help", "cargo", "unstable-grouped", "derive"]
|
||||
version = "4.5.13"
|
||||
features = ["suggestions", "color", "wrap_help", "cargo", "derive"]
|
||||
|
||||
[dependencies.chrono]
|
||||
version = "0.4.23"
|
||||
version = "0.4.38"
|
||||
default-features = false
|
||||
features = ["std", "clock"]
|
||||
|
||||
[dependencies.lscolors]
|
||||
version = "0.13"
|
||||
version = "0.19"
|
||||
default-features = false
|
||||
features = ["nu-ansi-term"]
|
||||
|
||||
[target.'cfg(unix)'.dependencies]
|
||||
users = "0.11.0"
|
||||
nix = { version = "0.24.2", default-features = false, features = ["signal"] }
|
||||
nix = { version = "0.29.0", default-features = false, features = ["signal", "user", "hostname"] }
|
||||
|
||||
[target.'cfg(all(unix, not(target_os = "redox")))'.dependencies]
|
||||
libc = "0.2"
|
||||
@ -75,17 +73,18 @@ libc = "0.2"
|
||||
# FIXME: Re-enable jemalloc on macOS
|
||||
# jemalloc is currently disabled on macOS due to a bug in jemalloc in combination with macOS
|
||||
# Catalina. See https://github.com/sharkdp/fd/issues/498 for details.
|
||||
[target.'cfg(all(not(windows), not(target_os = "android"), not(target_os = "macos"), not(target_os = "freebsd"), not(all(target_env = "musl", target_pointer_width = "32")), not(target_arch = "riscv64")))'.dependencies]
|
||||
jemallocator = {version = "0.5.0", optional = true}
|
||||
[target.'cfg(all(not(windows), not(target_os = "android"), not(target_os = "macos"), not(target_os = "freebsd"), not(target_os = "openbsd"), not(all(target_env = "musl", target_pointer_width = "32")), not(target_arch = "riscv64")))'.dependencies]
|
||||
jemallocator = {version = "0.5.4", optional = true}
|
||||
|
||||
[dev-dependencies]
|
||||
diff = "0.1"
|
||||
tempfile = "3.3"
|
||||
tempfile = "3.10"
|
||||
filetime = "0.2"
|
||||
test-case = "2.2"
|
||||
test-case = "3.3"
|
||||
|
||||
[profile.release]
|
||||
lto = true
|
||||
strip = true
|
||||
codegen-units = 1
|
||||
|
||||
[features]
|
||||
|
6
Cross.toml
Normal file
6
Cross.toml
Normal file
@ -0,0 +1,6 @@
|
||||
# https://github.com/sharkdp/fd/issues/1085
|
||||
[target.aarch64-unknown-linux-gnu.env]
|
||||
passthrough = ["JEMALLOC_SYS_WITH_LG_PAGE=16"]
|
||||
|
||||
[target.aarch64-unknown-linux-musl.env]
|
||||
passthrough = ["JEMALLOC_SYS_WITH_LG_PAGE=16"]
|
2
Makefile
2
Makefile
@ -6,7 +6,7 @@ datadir=$(prefix)/share
|
||||
exe_name=fd
|
||||
|
||||
$(EXE): Cargo.toml src/**/*.rs
|
||||
cargo build --profile $(PROFILE)
|
||||
cargo build --profile $(PROFILE) --locked
|
||||
|
||||
.PHONY: completions
|
||||
completions: autocomplete/fd.bash autocomplete/fd.fish autocomplete/fd.ps1 autocomplete/_fd
|
||||
|
198
README.md
198
README.md
@ -2,7 +2,7 @@
|
||||
|
||||
[![CICD](https://github.com/sharkdp/fd/actions/workflows/CICD.yml/badge.svg)](https://github.com/sharkdp/fd/actions/workflows/CICD.yml)
|
||||
[![Version info](https://img.shields.io/crates/v/fd-find.svg)](https://crates.io/crates/fd-find)
|
||||
[[中文](https://github.com/chinanf-boy/fd-zh)]
|
||||
[[中文](https://github.com/cha0ran/fd-zh)]
|
||||
[[한국어](https://github.com/spearkkk/fd-kor)]
|
||||
|
||||
`fd` is a program to find entries in your filesystem.
|
||||
@ -10,10 +10,7 @@ It is a simple, fast and user-friendly alternative to [`find`](https://www.gnu.o
|
||||
While it does not aim to support all of `find`'s powerful functionality, it provides sensible
|
||||
(opinionated) defaults for a majority of use cases.
|
||||
|
||||
Quick links:
|
||||
* [How to use](#how-to-use)
|
||||
* [Installation](#installation)
|
||||
* [Troubleshooting](#troubleshooting)
|
||||
[Installation](#installation) • [How to use](#how-to-use) • [Troubleshooting](#troubleshooting)
|
||||
|
||||
## Features
|
||||
|
||||
@ -63,7 +60,7 @@ X11/xinit/xinitrc
|
||||
X11/xinit/xserverrc
|
||||
```
|
||||
|
||||
The regular expression syntax used by `fd` is [documented here](https://docs.rs/regex/1.0.0/regex/#syntax).
|
||||
The regular expression syntax used by `fd` is [documented here](https://docs.rs/regex/latest/regex/#syntax).
|
||||
|
||||
### Specifying the root directory
|
||||
|
||||
@ -143,7 +140,7 @@ target/debug/deps/libnum_cpus-f5ce7ef99006aa05.rlib
|
||||
```
|
||||
|
||||
To really search *all* files and directories, simply combine the hidden and ignore features to show
|
||||
everything (`-HI`).
|
||||
everything (`-HI`) or use `-u`/`--unrestricted`.
|
||||
|
||||
### Matching the full path
|
||||
By default, *fd* only matches the filename of each file. However, using the `--full-path` or `-p` option,
|
||||
@ -261,12 +258,17 @@ To make exclude-patterns like these permanent, you can create a `.fdignore` file
|
||||
/mnt/external-drive
|
||||
*.bak
|
||||
```
|
||||
Note: `fd` also supports `.ignore` files that are used by other programs such as `rg` or `ag`.
|
||||
|
||||
> [!NOTE]
|
||||
> `fd` also supports `.ignore` files that are used by other programs such as `rg` or `ag`.
|
||||
|
||||
If you want `fd` to ignore these patterns globally, you can put them in `fd`'s global ignore file.
|
||||
This is usually located in `~/.config/fd/ignore` in macOS or Linux, and `%APPDATA%\fd\ignore` in
|
||||
Windows.
|
||||
|
||||
You may wish to include `.git/` in your `fd/ignore` file so that `.git` directories, and their contents
|
||||
are not included in output if you use the `--hidden` option.
|
||||
|
||||
### Deleting files
|
||||
|
||||
You can use `fd` to remove all files and directories that are matched by your search pattern.
|
||||
@ -284,7 +286,8 @@ option:
|
||||
If you also want to remove a certain class of directories, you can use the same technique. You will
|
||||
have to use `rm`s `--recursive`/`-r` flag to remove directories.
|
||||
|
||||
Note: there are scenarios where using `fd … -X rm -r` can cause race conditions: if you have a
|
||||
> [!NOTE]
|
||||
> There are scenarios where using `fd … -X rm -r` can cause race conditions: if you have a
|
||||
path like `…/foo/bar/foo/…` and want to remove all directories named `foo`, you can end up in a
|
||||
situation where the outer `foo` directory is removed first, leading to (harmless) *"'foo/bar/foo':
|
||||
No such file or directory"* errors in the `rm` call.
|
||||
@ -313,81 +316,78 @@ Options:
|
||||
-p, --full-path Search full abs. path (default: filename only)
|
||||
-d, --max-depth <depth> Set maximum search depth (default: none)
|
||||
-E, --exclude <pattern> Exclude entries that match the given glob pattern
|
||||
-t, --type <filetype> Filter by type: file (f), directory (d), symlink (l),
|
||||
executable (x), empty (e), socket (s), pipe (p)
|
||||
-t, --type <filetype> Filter by type: file (f), directory (d/dir), symlink (l),
|
||||
executable (x), empty (e), socket (s), pipe (p), char-device
|
||||
(c), block-device (b)
|
||||
-e, --extension <ext> Filter by file extension
|
||||
-S, --size <size> Limit results based on the size of files
|
||||
--changed-within <date|dur> Filter by file modification time (newer than)
|
||||
--changed-before <date|dur> Filter by file modification time (older than)
|
||||
-o, --owner <user:group> Filter by owning user and/or group
|
||||
--format <fmt> Print results according to template
|
||||
-x, --exec <cmd>... Execute a command for each search result
|
||||
-X, --exec-batch <cmd>... Execute a command with all search results at once
|
||||
-c, --color <when> When to use colors [default: auto] [possible values: auto,
|
||||
always, never]
|
||||
-h, --help Print help information (use `--help` for more detail)
|
||||
-V, --version Print version information
|
||||
--hyperlink[=<when>] Add hyperlinks to output paths [default: never] [possible
|
||||
values: auto, always, never]
|
||||
-h, --help Print help (see more with '--help')
|
||||
-V, --version Print version
|
||||
```
|
||||
|
||||
## Benchmark
|
||||
|
||||
Let's search my home folder for files that end in `[0-9].jpg`. It contains ~190.000
|
||||
subdirectories and about a million files. For averaging and statistical analysis, I'm using
|
||||
Let's search my home folder for files that end in `[0-9].jpg`. It contains ~750.000
|
||||
subdirectories and about a 4 million files. For averaging and statistical analysis, I'm using
|
||||
[hyperfine](https://github.com/sharkdp/hyperfine). The following benchmarks are performed
|
||||
with a "warm"/pre-filled disk-cache (results for a "cold" disk-cache show the same trends).
|
||||
|
||||
Let's start with `find`:
|
||||
```
|
||||
Benchmark #1: find ~ -iregex '.*[0-9]\.jpg$'
|
||||
|
||||
Time (mean ± σ): 7.236 s ± 0.090 s
|
||||
|
||||
Range (min … max): 7.133 s … 7.385 s
|
||||
Benchmark 1: find ~ -iregex '.*[0-9]\.jpg$'
|
||||
Time (mean ± σ): 19.922 s ± 0.109 s
|
||||
Range (min … max): 19.765 s … 20.065 s
|
||||
```
|
||||
|
||||
`find` is much faster if it does not need to perform a regular-expression search:
|
||||
```
|
||||
Benchmark #2: find ~ -iname '*[0-9].jpg'
|
||||
|
||||
Time (mean ± σ): 3.914 s ± 0.027 s
|
||||
|
||||
Range (min … max): 3.876 s … 3.964 s
|
||||
Benchmark 2: find ~ -iname '*[0-9].jpg'
|
||||
Time (mean ± σ): 11.226 s ± 0.104 s
|
||||
Range (min … max): 11.119 s … 11.466 s
|
||||
```
|
||||
|
||||
Now let's try the same for `fd`. Note that `fd` *always* performs a regular expression
|
||||
search. The options `--hidden` and `--no-ignore` are needed for a fair comparison,
|
||||
otherwise `fd` does not have to traverse hidden folders and ignored paths (see below):
|
||||
Now let's try the same for `fd`. Note that `fd` performs a regular expression
|
||||
search by default. The options `-u`/`--unrestricted` option is needed here for
|
||||
a fair comparison. Otherwise `fd` does not have to traverse hidden folders and
|
||||
ignored paths (see below):
|
||||
```
|
||||
Benchmark #3: fd -HI '.*[0-9]\.jpg$' ~
|
||||
|
||||
Time (mean ± σ): 811.6 ms ± 26.9 ms
|
||||
|
||||
Range (min … max): 786.0 ms … 870.7 ms
|
||||
Benchmark 3: fd -u '[0-9]\.jpg$' ~
|
||||
Time (mean ± σ): 854.8 ms ± 10.0 ms
|
||||
Range (min … max): 839.2 ms … 868.9 ms
|
||||
```
|
||||
For this particular example, `fd` is approximately nine times faster than `find -iregex`
|
||||
and about five times faster than `find -iname`. By the way, both tools found the exact
|
||||
same 20880 files :smile:.
|
||||
For this particular example, `fd` is approximately **23 times faster** than `find -iregex`
|
||||
and about **13 times faster** than `find -iname`. By the way, both tools found the exact
|
||||
same 546 files :smile:.
|
||||
|
||||
Finally, let's run `fd` without `--hidden` and `--no-ignore` (this can lead to different
|
||||
search results, of course). If *fd* does not have to traverse the hidden and git-ignored
|
||||
folders, it is almost an order of magnitude faster:
|
||||
```
|
||||
Benchmark #4: fd '[0-9]\.jpg$' ~
|
||||
|
||||
Time (mean ± σ): 123.7 ms ± 6.0 ms
|
||||
|
||||
Range (min … max): 118.8 ms … 140.0 ms
|
||||
```
|
||||
|
||||
**Note**: This is *one particular* benchmark on *one particular* machine. While I have
|
||||
performed quite a lot of different tests (and found consistent results), things might
|
||||
be different for you! I encourage everyone to try it out on their own. See
|
||||
**Note**: This is *one particular* benchmark on *one particular* machine. While we have
|
||||
performed a lot of different tests (and found consistent results), things might
|
||||
be different for you! We encourage everyone to try it out on their own. See
|
||||
[this repository](https://github.com/sharkdp/fd-benchmarks) for all necessary scripts.
|
||||
|
||||
Concerning *fd*'s speed, the main credit goes to the `regex` and `ignore` crates that are also used
|
||||
in [ripgrep](https://github.com/BurntSushi/ripgrep) (check it out!).
|
||||
Concerning *fd*'s speed, a lot of credit goes to the `regex` and `ignore` crates that are
|
||||
also used in [ripgrep](https://github.com/BurntSushi/ripgrep) (check it out!).
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### `fd` does not find my file!
|
||||
|
||||
Remember that `fd` ignores hidden directories and files by default. It also ignores patterns
|
||||
from `.gitignore` files. If you want to make sure to find absolutely every possible file, always
|
||||
use the options `-u`/`--unrestricted` option (or `-HI` to enable hidden and ignored files):
|
||||
``` bash
|
||||
> fd -u …
|
||||
```
|
||||
|
||||
### Colorized output
|
||||
|
||||
`fd` can colorize files by extension, just like `ls`. In order for this to work, the environment
|
||||
@ -401,15 +401,6 @@ for alternative, more complete (or more colorful) variants, see [here](https://g
|
||||
|
||||
`fd` also honors the [`NO_COLOR`](https://no-color.org/) environment variable.
|
||||
|
||||
### `fd` does not find my file!
|
||||
|
||||
Remember that `fd` ignores hidden directories and files by default. It also ignores patterns
|
||||
from `.gitignore` files. If you want to make sure to find absolutely every possible file, always
|
||||
use the options `-H` and `-I` to disable these two features:
|
||||
``` bash
|
||||
> fd -HI …
|
||||
```
|
||||
|
||||
### `fd` doesn't seem to interpret my regex pattern correctly
|
||||
|
||||
A lot of special regex characters (like `[]`, `^`, `$`, ..) are also special characters in your
|
||||
@ -488,16 +479,17 @@ In emacs, run `M-x find-file-in-project-by-selected` to find matching files. Alt
|
||||
|
||||
### Printing the output as a tree
|
||||
|
||||
To format the output of `fd` similar to the `tree` command, install [`as-tree`] and pipe the output
|
||||
of `fd` to `as-tree`:
|
||||
To format the output of `fd` as a file-tree you can use the `tree` command with
|
||||
`--fromfile`:
|
||||
```bash
|
||||
fd | as-tree
|
||||
❯ fd | tree --fromfile
|
||||
```
|
||||
|
||||
This can be more useful than running `tree` by itself because `tree` does not ignore any files by
|
||||
default, nor does it support as rich a set of options as `fd` does to control what to print:
|
||||
This can be more useful than running `tree` by itself because `tree` does not
|
||||
ignore any files by default, nor does it support as rich a set of options as
|
||||
`fd` does to control what to print:
|
||||
```bash
|
||||
❯ fd --extension rs | as-tree
|
||||
❯ fd --extension rs | tree --fromfile
|
||||
.
|
||||
├── build.rs
|
||||
└── src
|
||||
@ -505,9 +497,10 @@ default, nor does it support as rich a set of options as `fd` does to control wh
|
||||
└── error.rs
|
||||
```
|
||||
|
||||
For more information about `as-tree`, see [the `as-tree` README][`as-tree`].
|
||||
|
||||
[`as-tree`]: https://github.com/jez/as-tree
|
||||
On bash and similar you can simply create an alias:
|
||||
```bash
|
||||
❯ alias as-tree='tree --fromfile'
|
||||
```
|
||||
|
||||
### Using fd with `xargs` or `parallel`
|
||||
|
||||
@ -530,7 +523,7 @@ newlines). In the same way, the `-0` option of `xargs` tells it to read the inpu
|
||||
If you run Ubuntu 19.04 (Disco Dingo) or newer, you can install the
|
||||
[officially maintained package](https://packages.ubuntu.com/fd-find):
|
||||
```
|
||||
sudo apt install fd-find
|
||||
apt install fd-find
|
||||
```
|
||||
Note that the binary is called `fdfind` as the binary name `fd` is already used by another package.
|
||||
It is recommended that after installation, you add a link to `fd` by executing command
|
||||
@ -540,21 +533,25 @@ Make sure that `$HOME/.local/bin` is in your `$PATH`.
|
||||
If you use an older version of Ubuntu, you can download the latest `.deb` package from the
|
||||
[release page](https://github.com/sharkdp/fd/releases) and install it via:
|
||||
``` bash
|
||||
sudo dpkg -i fd_8.6.0_amd64.deb # adapt version number and architecture
|
||||
dpkg -i fd_9.0.0_amd64.deb # adapt version number and architecture
|
||||
```
|
||||
|
||||
Note that the .deb packages on the release page for this project still name the executable `fd`.
|
||||
|
||||
### On Debian
|
||||
|
||||
If you run Debian Buster or newer, you can install the
|
||||
[officially maintained Debian package](https://tracker.debian.org/pkg/rust-fd-find):
|
||||
```
|
||||
sudo apt-get install fd-find
|
||||
apt-get install fd-find
|
||||
```
|
||||
Note that the binary is called `fdfind` as the binary name `fd` is already used by another package.
|
||||
It is recommended that after installation, you add a link to `fd` by executing command
|
||||
`ln -s $(which fdfind) ~/.local/bin/fd`, in order to use `fd` in the same way as in this documentation.
|
||||
Make sure that `$HOME/.local/bin` is in your `$PATH`.
|
||||
|
||||
Note that the .deb packages on the release page for this project still name the executable `fd`.
|
||||
|
||||
### On Fedora
|
||||
|
||||
Starting with Fedora 28, you can install `fd` from the official package sources:
|
||||
@ -576,6 +573,8 @@ You can install [the fd package](https://www.archlinux.org/packages/community/x8
|
||||
```
|
||||
pacman -S fd
|
||||
```
|
||||
You can also install fd [from the AUR](https://aur.archlinux.org/packages/fd-git).
|
||||
|
||||
### On Gentoo Linux
|
||||
|
||||
You can use [the fd ebuild](https://packages.gentoo.org/packages/sys-apps/fd) from the official repo:
|
||||
@ -597,22 +596,31 @@ You can install `fd` via xbps-install:
|
||||
xbps-install -S fd
|
||||
```
|
||||
|
||||
### On RedHat Enterprise Linux 8 (RHEL8), Almalinux 8, EuroLinux 8 or Rocky Linux 8
|
||||
### On ALT Linux
|
||||
|
||||
Get the latest fd-v*-x86_64-unknown-linux-gnu.tar.gz file from [sharkdp on github](https://github.com/sharkdp/fd/releases)
|
||||
You can install [the fd package](https://packages.altlinux.org/en/sisyphus/srpms/fd/) from the official repo:
|
||||
```
|
||||
tar xf fd-v*-x86_64-unknown-linux-gnu.tar.gz
|
||||
chown -R root:root fd-v*-x86_64-unknown-linux-gnu
|
||||
cd fd-v*-x86_64-unknown-linux-gnu
|
||||
sudo cp fd /bin
|
||||
gzip fd.1
|
||||
chown root:root fd.1.gz
|
||||
sudo cp fd.1.gz /usr/share/man/man1
|
||||
sudo cp autocomplete/fd.bash /usr/share/bash-completion/completions/fd
|
||||
source /usr/share/bash-completion/completions/fd
|
||||
fd
|
||||
apt-get install fd
|
||||
```
|
||||
|
||||
### On Solus
|
||||
|
||||
You can install [the fd package](https://github.com/getsolus/packages/tree/main/packages/f/fd) from the official repo:
|
||||
```
|
||||
eopkg install fd
|
||||
```
|
||||
|
||||
### On RedHat Enterprise Linux 8/9 (RHEL8/9), Almalinux 8/9, EuroLinux 8/9 or Rocky Linux 8/9
|
||||
|
||||
You can install [the `fd` package](https://copr.fedorainfracloud.org/coprs/tkbcopr/fd/) from Fedora Copr.
|
||||
|
||||
```bash
|
||||
dnf copr enable tkbcopr/fd
|
||||
dnf install fd
|
||||
```
|
||||
|
||||
A different version using the [slower](https://github.com/sharkdp/fd/pull/481#issuecomment-534494592) malloc [instead of jemalloc](https://bugzilla.redhat.com/show_bug.cgi?id=2216193#c1) is also available from the EPEL8/9 repo as the package `fd-find`.
|
||||
|
||||
### On macOS
|
||||
|
||||
You can install `fd` with [Homebrew](https://formulae.brew.sh/formula/fd):
|
||||
@ -622,7 +630,7 @@ brew install fd
|
||||
|
||||
… or with MacPorts:
|
||||
```
|
||||
sudo port install fd
|
||||
port install fd
|
||||
```
|
||||
|
||||
### On Windows
|
||||
@ -639,6 +647,11 @@ Or via [Chocolatey](https://chocolatey.org):
|
||||
choco install fd
|
||||
```
|
||||
|
||||
Or via [Winget](https://learn.microsoft.com/en-us/windows/package-manager/):
|
||||
```
|
||||
winget install sharkdp.fd
|
||||
```
|
||||
|
||||
### On GuixOS
|
||||
|
||||
You can install [the fd package](https://guix.gnu.org/en/packages/fd-8.1.1/) from the official repo:
|
||||
@ -653,6 +666,13 @@ You can use the [Nix package manager](https://nixos.org/nix/) to install `fd`:
|
||||
nix-env -i fd
|
||||
```
|
||||
|
||||
### Via Flox
|
||||
|
||||
You can use [Flox](https://flox.dev) to install `fd` into a Flox environment:
|
||||
```
|
||||
flox install fd
|
||||
```
|
||||
|
||||
### On FreeBSD
|
||||
|
||||
You can install [the fd-find package](https://www.freshports.org/sysutils/fd) from the official repo:
|
||||
@ -662,7 +682,7 @@ pkg install fd-find
|
||||
|
||||
### From npm
|
||||
|
||||
On linux and macOS, you can install the [fd-find](https://npm.im/fd-find) package:
|
||||
On Linux and macOS, you can install the [fd-find](https://npm.im/fd-find) package:
|
||||
|
||||
```
|
||||
npm install -g fd-find
|
||||
@ -674,7 +694,7 @@ With Rust's package manager [cargo](https://github.com/rust-lang/cargo), you can
|
||||
```
|
||||
cargo install fd-find
|
||||
```
|
||||
Note that rust version *1.60.0* or later is required.
|
||||
Note that rust version *1.77.2* or later is required.
|
||||
|
||||
`make` is also needed for the build.
|
||||
|
||||
@ -705,8 +725,6 @@ cargo install --path .
|
||||
|
||||
## License
|
||||
|
||||
Copyright (c) 2017-2021 The fd developers
|
||||
|
||||
`fd` is distributed under the terms of both the MIT License and the Apache License 2.0.
|
||||
|
||||
See the [LICENSE-APACHE](LICENSE-APACHE) and [LICENSE-MIT](LICENSE-MIT) files for license details.
|
||||
|
2
build.rs
2
build.rs
@ -1,5 +1,5 @@
|
||||
fn main() {
|
||||
let min_version = "1.60";
|
||||
let min_version = "1.64";
|
||||
|
||||
match version_check::is_min_version(min_version) {
|
||||
Some(true) => {}
|
||||
|
@ -1 +0,0 @@
|
||||
msrv = "1.60.0"
|
@ -26,6 +26,8 @@ _fd() {
|
||||
{l,symlink}'\:"symbolic links"'
|
||||
{e,empty}'\:"empty files or directories"'
|
||||
{x,executable}'\:"executable (files)"'
|
||||
{b,block-device}'\:"block devices"'
|
||||
{c,char-device}'\:"character devices"'
|
||||
{s,socket}'\:"sockets"'
|
||||
{p,pipe}'\:"named pipes (FIFOs)"'
|
||||
)
|
||||
@ -36,7 +38,7 @@ _fd() {
|
||||
# for all of the potential negation options listed below!
|
||||
if
|
||||
# (--[bpsu]* => match all options marked with '$no')
|
||||
[[ $PREFIX$SUFFIX == --[bopsu]* ]] ||
|
||||
[[ $PREFIX$SUFFIX == --[bopsun]* ]] ||
|
||||
zstyle -t ":complete:$curcontext:*" complete-all
|
||||
then
|
||||
no=
|
||||
@ -70,6 +72,9 @@ _fd() {
|
||||
{-g,--glob}'[perform a glob-based search]'
|
||||
{-F,--fixed-strings}'[treat pattern as literal string instead of a regex]'
|
||||
|
||||
+ '(no-require-git)'
|
||||
"$no(no-ignore-full --no-ignore-vcs --no-require-git)--no-require-git[don't require git repo to respect gitignores]"
|
||||
|
||||
+ '(match-full)' # match against full path
|
||||
{-p,--full-path}'[match the pattern against the full path instead of the basename]'
|
||||
|
||||
@ -118,6 +123,7 @@ _fd() {
|
||||
|
||||
+ '(filter-mtime-newer)' # filter by files modified after than
|
||||
'--changed-within=[limit search to files/directories modified within the given date/duration]:date or duration'
|
||||
'--changed-after=[alias for --changed-within]:date/duration'
|
||||
'!--change-newer-than=:date/duration'
|
||||
'!--newer=:date/duration'
|
||||
|
||||
@ -133,6 +139,8 @@ _fd() {
|
||||
always\:"always use colorized output"
|
||||
))'
|
||||
|
||||
'--hyperlink=-[add hyperlinks to output paths]::when:(auto never always)'
|
||||
|
||||
+ '(threads)'
|
||||
{-j+,--threads=}'[set the number of threads for searching and executing]:number of threads'
|
||||
|
||||
@ -156,7 +164,11 @@ _fd() {
|
||||
$no'(*)*--search-path=[set search path (instead of positional <path> arguments)]:directory:_files -/'
|
||||
|
||||
+ strip-cwd-prefix
|
||||
$no'(strip-cwd-prefix exec-cmds)--strip-cwd-prefix[Strip ./ prefix when output is redirected]'
|
||||
$no'(strip-cwd-prefix exec-cmds)--strip-cwd-prefix=-[When to strip ./]::when:(always never auto)'
|
||||
|
||||
+ and
|
||||
'--and=[additional required search path]:pattern'
|
||||
|
||||
|
||||
+ args # positional arguments
|
||||
'1: :_guard "^-*" pattern'
|
||||
|
158
doc/fd.1
vendored
158
doc/fd.1
vendored
@ -29,11 +29,19 @@ By default
|
||||
.B fd
|
||||
uses regular expressions for the pattern. However, this can be changed to use simple glob patterns
|
||||
with the '\-\-glob' option.
|
||||
.P
|
||||
By default
|
||||
.B fd
|
||||
will exclude hidden files and directories, as well as any files that match gitignore rules
|
||||
or ignore rules in .ignore or .fdignore files.
|
||||
.SH OPTIONS
|
||||
.TP
|
||||
.B \-H, \-\-hidden
|
||||
Include hidden files and directories in the search results
|
||||
(default: hidden files and directories are skipped). The flag can be overridden with '--no-hidden'.
|
||||
.IP
|
||||
Ignored files are still excluded unless \-\-no\-ignore or \-\-no\-ignore\-vcs
|
||||
is also used.
|
||||
.TP
|
||||
.B \-I, \-\-no\-ignore
|
||||
Show search results from files and directories that would otherwise be ignored by
|
||||
@ -71,6 +79,14 @@ git setting, which defaults to
|
||||
.IR $HOME/.config/git/ignore ).
|
||||
The flag can be overridden with '--ignore-vcs'.
|
||||
.TP
|
||||
.B \-\-no\-require\-git
|
||||
Do not require a git repository to respect gitignores. By default, fd will only
|
||||
respect global gitignore rules, .gitignore rules and local exclude rules if fd
|
||||
detects that you are searching inside a git repository. This flag allows you to
|
||||
relax this restriction such that fd will respect all git related ignore rules
|
||||
regardless of whether you’re searching in a git repository or not. The flag can
|
||||
be overridden with '--require-git'.
|
||||
.TP
|
||||
.B \-\-no\-ignore\-parent
|
||||
Show search results from files and directories that would otherwise be ignored by gitignore files in
|
||||
parent directories.
|
||||
@ -94,6 +110,11 @@ Perform a regular-expression based search (default). This can be used to overrid
|
||||
Treat the pattern as a literal string instead of a regular expression. Note that this also
|
||||
performs substring comparison. If you want to match on an exact filename, consider using '\-\-glob'.
|
||||
.TP
|
||||
.BI "\-\-and " pattern
|
||||
Add additional required search patterns, all of which must be matched. Multiple additional
|
||||
patterns can be specified. The patterns are regular expressions, unless '\-\-glob'
|
||||
or '\-\-fixed\-strings' is used.
|
||||
.TP
|
||||
.B \-a, \-\-absolute\-path
|
||||
Shows the full path starting from the root as opposed to relative paths.
|
||||
The flag can be overridden with '--relative-path'.
|
||||
@ -135,9 +156,20 @@ can be used as an alias.
|
||||
Enable the display of filesystem errors for situations such as insufficient
|
||||
permissions or dead symlinks.
|
||||
.TP
|
||||
.B \-\-strip-cwd-prefix
|
||||
By default, relative paths are prefixed with './' when the output goes to a non interactive terminal
|
||||
(TTY). Use this flag to disable this behaviour.
|
||||
.B \-\-strip-cwd-prefix [when]
|
||||
By default, relative paths are prefixed with './' when -x/--exec,
|
||||
-X/--exec-batch, or -0/--print0 are given, to reduce the risk of a
|
||||
path starting with '-' being treated as a command line option. Use
|
||||
this flag to change this behavior. If this flag is used without a value,
|
||||
it is equivalent to passing "always". Possible values are:
|
||||
.RS
|
||||
.IP never
|
||||
Never strip the ./ at the beginning of paths
|
||||
.IP always
|
||||
Always strip the ./ at the beginning of paths
|
||||
.IP auto
|
||||
Only strip if used with --exec, --exec-batch, or --print0. That is, it resets to the default behavior.
|
||||
.RE
|
||||
.TP
|
||||
.B \-\-one\-file\-system, \-\-mount, \-\-xdev
|
||||
By default, fd will traverse the file system tree as far as other options dictate. With this flag, fd ensures that it does not descend into a different file system than the one it started in. Comparable to the -mount or -xdev filters of find(1).
|
||||
@ -167,10 +199,14 @@ Filter search by type:
|
||||
.RS
|
||||
.IP "f, file"
|
||||
regular files
|
||||
.IP "d, directory"
|
||||
.IP "d, dir, directory"
|
||||
directories
|
||||
.IP "l, symlink"
|
||||
symbolic links
|
||||
.IP "b, block-device"
|
||||
block devices
|
||||
.IP "c, char-device"
|
||||
character devices
|
||||
.IP "s, socket"
|
||||
sockets
|
||||
.IP "p, pipe"
|
||||
@ -240,6 +276,24 @@ Do not colorize output.
|
||||
Always colorize output.
|
||||
.RE
|
||||
.TP
|
||||
.B "\-\-hyperlink
|
||||
Specify whether the output should use terminal escape codes to indicate a hyperlink to a
|
||||
file url pointing to the path.
|
||||
|
||||
The value can be auto, always, or never.
|
||||
|
||||
Currently, the default is "never", and if the option is used without an argument "auto" is
|
||||
used. In the future this may be changed to "auto" and "always".
|
||||
.RS
|
||||
.IP auto
|
||||
Only output hyperlinks if color is also enabled, as a proxy for whether terminal escape
|
||||
codes are acceptable.
|
||||
.IP never
|
||||
Never output hyperlink escapes.
|
||||
.IP always
|
||||
Always output hyperlink escapes, regardless of color settings.
|
||||
.RE
|
||||
.TP
|
||||
.BI "\-j, \-\-threads " num
|
||||
Set number of threads to use for searching & executing (default: number of available CPU cores).
|
||||
.TP
|
||||
@ -284,8 +338,9 @@ tebibytes
|
||||
Filter results based on the file modification time.
|
||||
Files with modification times greater than the argument will be returned.
|
||||
The argument can be provided as a duration (\fI10h, 1d, 35min\fR) or as a specific point
|
||||
in time in either full RFC3339 format with time zone, or as a date or datetime in the
|
||||
local time zone (\fIYYYY-MM-DD\fR or \fIYYYY-MM-DD HH:MM:SS\fR).
|
||||
in time as full RFC3339 format with time zone, as a date or datetime in the
|
||||
local time zone (\fIYYYY-MM-DD\fR or \fIYYYY-MM-DD HH:MM:SS\fR), or as the prefix '@'
|
||||
followed by the number of seconds since the Unix epoch (@[0-9]+).
|
||||
\fB\-\-change-newer-than\fR,
|
||||
.B --newer
|
||||
or
|
||||
@ -296,13 +351,15 @@ Examples:
|
||||
\-\-changed-within 2weeks
|
||||
\-\-change-newer-than "2018-10-27 10:00:00"
|
||||
\-\-newer 2018-10-27
|
||||
\-\-changed-after @1704067200
|
||||
.TP
|
||||
.BI "\-\-changed-before " date|duration
|
||||
Filter results based on the file modification time.
|
||||
Files with modification times less than the argument will be returned.
|
||||
The argument can be provided as a duration (\fI10h, 1d, 35min\fR) or as a specific point
|
||||
in time in either full RFC3339 format with time zone, or as a date or datetime in the
|
||||
local time zone (\fIYYYY-MM-DD\fR or \fIYYYY-MM-DD HH:MM:SS\fR).
|
||||
in time as full RFC3339 format with time zone, as a date or datetime in the
|
||||
local time zone (\fIYYYY-MM-DD\fR or \fIYYYY-MM-DD HH:MM:SS\fR), or as the prefix '@'
|
||||
followed by the number of seconds since the Unix epoch (@[0-9]+).
|
||||
.B --change-older-than
|
||||
or
|
||||
.B --older
|
||||
@ -311,6 +368,7 @@ can be used as aliases.
|
||||
Examples:
|
||||
\-\-changed-before "2018-10-27 10:00:00"
|
||||
\-\-change-older-than 2weeks
|
||||
\-\-older @1704067200
|
||||
.TP
|
||||
.BI "-o, \-\-owner " [user][:group]
|
||||
Filter files by their user and/or group. Format: [(user|uid)][:(group|gid)]. Either side
|
||||
@ -335,6 +393,30 @@ Set the path separator to use when printing file paths. The default is the OS-sp
|
||||
Provide paths to search as an alternative to the positional \fIpath\fR argument. Changes the usage to
|
||||
\'fd [FLAGS/OPTIONS] \-\-search\-path PATH \-\-search\-path PATH2 [PATTERN]\'
|
||||
.TP
|
||||
.BI "\-\-format " fmt
|
||||
Specify a template string that is used for printing a line for each file found.
|
||||
|
||||
The following placeholders are substituted into the string for each file before printing:
|
||||
.RS
|
||||
.IP {}
|
||||
path (of the current search result)
|
||||
.IP {/}
|
||||
basename
|
||||
.IP {//}
|
||||
parent directory
|
||||
.IP {.}
|
||||
path without file extension
|
||||
.IP {/.}
|
||||
basename without file extension
|
||||
.IP {{
|
||||
literal '{' (an escape sequence)
|
||||
.IP }}
|
||||
literal '}' (an escape sequence)
|
||||
.P
|
||||
Notice that you can use "{{" and "}}" to escape "{" and "}" respectively, which is especially
|
||||
useful if you need to include the literal text of one of the above placeholders.
|
||||
.RE
|
||||
.TP
|
||||
.BI "\-x, \-\-exec " command
|
||||
.RS
|
||||
Execute
|
||||
@ -351,19 +433,13 @@ This option can be specified multiple times, in which case all commands are run
|
||||
file found, in the order they are provided. In that case, you must supply a ';' argument for
|
||||
all but the last commands.
|
||||
|
||||
The following placeholders are substituted before the command is executed:
|
||||
.RS
|
||||
.IP {}
|
||||
path (of the current search result)
|
||||
.IP {/}
|
||||
basename
|
||||
.IP {//}
|
||||
parent directory
|
||||
.IP {.}
|
||||
path without file extension
|
||||
.IP {/.}
|
||||
basename without file extension
|
||||
.RE
|
||||
If parallelism is enabled, the order commands will be executed in is non-deterministic. And even with
|
||||
--threads=1, the order is determined by the operating system and may not be what you expect. Thus, it is
|
||||
recommended that you don't rely on any ordering of the results.
|
||||
|
||||
Before executing the command, any placeholder patterns in the command are replaced with the
|
||||
corresponding values for the current file. The same placeholders are used as in the "\-\-format"
|
||||
option.
|
||||
|
||||
If no placeholder is present, an implicit "{}" at the end is assumed.
|
||||
|
||||
@ -387,19 +463,12 @@ Examples:
|
||||
Execute
|
||||
.I command
|
||||
once, with all search results as arguments.
|
||||
One of the following placeholders is substituted before the command is executed:
|
||||
.RS
|
||||
.IP {}
|
||||
path (of all search results)
|
||||
.IP {/}
|
||||
basename
|
||||
.IP {//}
|
||||
parent directory
|
||||
.IP {.}
|
||||
path without file extension
|
||||
.IP {/.}
|
||||
basename without file extension
|
||||
.RE
|
||||
|
||||
The order of the arguments is non-deterministic and should not be relied upon.
|
||||
|
||||
This uses the same placeholders as "\-\-format" and "\-\-exec", but instead of expanding
|
||||
once per command invocation each argument containing a placeholder is expanding for every
|
||||
file in a batch and passed as separate arguments.
|
||||
|
||||
If no placeholder is present, an implicit "{}" at the end is assumed.
|
||||
|
||||
@ -448,6 +517,17 @@ is set, use
|
||||
.IR $XDG_CONFIG_HOME/fd/ignore .
|
||||
Otherwise, use
|
||||
.IR $HOME/.config/fd/ignore .
|
||||
.SH FILES
|
||||
.TP
|
||||
.B .fdignore
|
||||
This file works similarly to a .gitignore file anywhere in the searched tree and specifies patterns
|
||||
that should be excluded from the search. However, this file is specific to fd, and will be used even
|
||||
if the --no-ignore-vcs option is used.
|
||||
.TP
|
||||
.B $XDG_CONFIG_HOME/fd/ignore
|
||||
Global ignore file. Unless ignore mode is turned off (such as with --no-ignore)
|
||||
ignore entries in this file will be ignored, as if it was an .fdignore file in the
|
||||
current directory.
|
||||
.SH EXAMPLES
|
||||
.TP
|
||||
.RI "Find files and directories that match the pattern '" needle "':"
|
||||
@ -461,6 +541,16 @@ $ fd -e py
|
||||
.TP
|
||||
.RI "Open all search results with vim:"
|
||||
$ fd pattern -X vim
|
||||
.SH Tips and Tricks
|
||||
.IP \[bu]
|
||||
If you add ".git/" to your global ignore file ($XDG_CONFIG_HOME/fd/ignore), then
|
||||
".git" folders will be ignored by default, even when the --hidden option is used.
|
||||
.IP \[bu]
|
||||
You can use a shell alias or a wrapper script in order to pass desired flags to fd
|
||||
by default. For example if you do not like the default behavior of respecting gitignore,
|
||||
you can use
|
||||
`alias fd="/usr/bin/fd --no-ignore-vcs"`
|
||||
in your .bashrc to create an alias for fd that doesn't ignore git files by default.
|
||||
.SH BUGS
|
||||
Bugs can be reported on GitHub: https://github.com/sharkdp/fd/issues
|
||||
.SH SEE ALSO
|
||||
|
2
doc/release-checklist.md
vendored
2
doc/release-checklist.md
vendored
@ -9,7 +9,7 @@ necessary changes for the upcoming release.
|
||||
- [ ] Update version in `Cargo.toml`. Run `cargo build` to update `Cargo.lock`.
|
||||
Make sure to `git add` the `Cargo.lock` changes as well.
|
||||
- [ ] Find the current min. supported Rust version by running
|
||||
`grep '^\s*MIN_SUPPORTED_RUST_VERSION' .github/workflows/CICD.yml`.
|
||||
`grep rust-version Cargo.toml`.
|
||||
- [ ] Update the `fd` version and the min. supported Rust version in `README.md`.
|
||||
- [ ] Update `CHANGELOG.md`. Change the heading of the *"Upcoming release"* section
|
||||
to the version of this release.
|
||||
|
12
doc/sponsors.md
vendored
Normal file
12
doc/sponsors.md
vendored
Normal file
@ -0,0 +1,12 @@
|
||||
## Sponsors
|
||||
|
||||
`fd` development is sponsored by many individuals and companies. Thank you very much!
|
||||
|
||||
Please note, that being sponsored does not affect the individuality of the `fd`
|
||||
project or affect the maintainers' actions in any way.
|
||||
We remain impartial and continue to assess pull requests solely on merit - the
|
||||
features added, bugs solved, and effect on the overall complexity of the code.
|
||||
No issue will have a different priority based on sponsorship status of the
|
||||
reporter.
|
||||
|
||||
Contributions from anybody are most welcomed, please see our [`CONTRIBUTING.md`](../CONTRIBUTING.md) guide.
|
5
doc/sponsors/terminal_trove_green.svg
vendored
Normal file
5
doc/sponsors/terminal_trove_green.svg
vendored
Normal file
File diff suppressed because one or more lines are too long
After Width: | Height: | Size: 7.2 KiB |
1
rustfmt.toml
Normal file
1
rustfmt.toml
Normal file
@ -0,0 +1 @@
|
||||
# Defaults are used
|
134
scripts/create-deb.sh
Executable file
134
scripts/create-deb.sh
Executable file
@ -0,0 +1,134 @@
|
||||
#!/bin/bash
|
||||
COPYRIGHT_YEARS="2018 - "$(date "+%Y")
|
||||
MAINTAINER="David Peter <mail@david-peter.de>"
|
||||
REPO="https://github.com/sharkdp/fd"
|
||||
DPKG_STAGING="${CICD_INTERMEDIATES_DIR:-.}/debian-package"
|
||||
DPKG_DIR="${DPKG_STAGING}/dpkg"
|
||||
mkdir -p "${DPKG_DIR}"
|
||||
|
||||
if [[ -z "$TARGET" ]]; then
|
||||
TARGET="$(rustc -vV | sed -n 's|host: \(.*\)|\1|p')"
|
||||
fi
|
||||
|
||||
case "$TARGET" in
|
||||
*-musl*)
|
||||
DPKG_BASENAME=fd-musl
|
||||
DPKG_CONFLICTS="fd, fd-find"
|
||||
;;
|
||||
*)
|
||||
DPKG_BASENAME=fd
|
||||
DPKG_CONFLICTS="fd-musl, fd-find"
|
||||
;;
|
||||
esac
|
||||
|
||||
if [[ -z "$DPKG_VERSION" ]]; then
|
||||
DPKG_VERSION=$(cargo metadata --no-deps --format-version 1 | jq -r .packages[0].version)
|
||||
fi
|
||||
|
||||
unset DPKG_ARCH
|
||||
case "${TARGET}" in
|
||||
aarch64-*-linux-*) DPKG_ARCH=arm64 ;;
|
||||
arm-*-linux-*hf) DPKG_ARCH=armhf ;;
|
||||
i686-*-linux-*) DPKG_ARCH=i686 ;;
|
||||
x86_64-*-linux-*) DPKG_ARCH=amd64 ;;
|
||||
*) DPKG_ARCH=notset ;;
|
||||
esac;
|
||||
|
||||
DPKG_NAME="${DPKG_BASENAME}_${DPKG_VERSION}_${DPKG_ARCH}.deb"
|
||||
|
||||
BIN_PATH=${BIN_PATH:-target/${TARGET}/release/fd}
|
||||
|
||||
# Binary
|
||||
install -Dm755 "${BIN_PATH}" "${DPKG_DIR}/usr/bin/fd"
|
||||
|
||||
# Man page
|
||||
install -Dm644 'doc/fd.1' "${DPKG_DIR}/usr/share/man/man1/fd.1"
|
||||
gzip -n --best "${DPKG_DIR}/usr/share/man/man1/fd.1"
|
||||
|
||||
# Autocompletion files
|
||||
install -Dm644 'autocomplete/fd.bash' "${DPKG_DIR}/usr/share/bash-completion/completions/fd"
|
||||
install -Dm644 'autocomplete/fd.fish' "${DPKG_DIR}/usr/share/fish/vendor_completions.d/fd.fish"
|
||||
install -Dm644 'autocomplete/_fd' "${DPKG_DIR}/usr/share/zsh/vendor-completions/_fd"
|
||||
|
||||
# README and LICENSE
|
||||
install -Dm644 "README.md" "${DPKG_DIR}/usr/share/doc/${DPKG_BASENAME}/README.md"
|
||||
install -Dm644 "LICENSE-MIT" "${DPKG_DIR}/usr/share/doc/${DPKG_BASENAME}/LICENSE-MIT"
|
||||
install -Dm644 "LICENSE-APACHE" "${DPKG_DIR}/usr/share/doc/${DPKG_BASENAME}/LICENSE-APACHE"
|
||||
install -Dm644 "CHANGELOG.md" "${DPKG_DIR}/usr/share/doc/${DPKG_BASENAME}/changelog"
|
||||
gzip -n --best "${DPKG_DIR}/usr/share/doc/${DPKG_BASENAME}/changelog"
|
||||
|
||||
# Create symlinks so fdfind can be used as well:
|
||||
ln -s "/usr/bin/fd" "${DPKG_DIR}/usr/bin/fdfind"
|
||||
ln -s './fd.bash' "${DPKG_DIR}/usr/share/bash-completion/completions/fdfind"
|
||||
ln -s './fd.fish' "${DPKG_DIR}/usr/share/fish/vendor_completions.d/fdfind.fish"
|
||||
ln -s './_fd' "${DPKG_DIR}/usr/share/zsh/vendor-completions/_fdfind"
|
||||
|
||||
cat > "${DPKG_DIR}/usr/share/doc/${DPKG_BASENAME}/copyright" <<EOF
|
||||
Format: http://www.debian.org/doc/packaging-manuals/copyright-format/1.0/
|
||||
Upstream-Name: fd
|
||||
Source: ${REPO}
|
||||
|
||||
Files: *
|
||||
Copyright: ${MAINTAINER}
|
||||
Copyright: $COPYRIGHT_YEARS ${MAINTAINER}
|
||||
License: Apache-2.0 or MIT
|
||||
|
||||
License: Apache-2.0
|
||||
On Debian systems, the complete text of the Apache-2.0 can be found in the
|
||||
file /usr/share/common-licenses/Apache-2.0.
|
||||
|
||||
License: MIT
|
||||
Permission is hereby granted, free of charge, to any
|
||||
person obtaining a copy of this software and associated
|
||||
documentation files (the "Software"), to deal in the
|
||||
Software without restriction, including without
|
||||
limitation the rights to use, copy, modify, merge,
|
||||
publish, distribute, sublicense, and/or sell copies of
|
||||
the Software, and to permit persons to whom the Software
|
||||
is furnished to do so, subject to the following
|
||||
conditions:
|
||||
.
|
||||
The above copyright notice and this permission notice
|
||||
shall be included in all copies or substantial portions
|
||||
of the Software.
|
||||
.
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
|
||||
ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
|
||||
TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
|
||||
PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
|
||||
SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
|
||||
CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
|
||||
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
|
||||
IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
|
||||
DEALINGS IN THE SOFTWARE.
|
||||
EOF
|
||||
chmod 644 "${DPKG_DIR}/usr/share/doc/${DPKG_BASENAME}/copyright"
|
||||
|
||||
# control file
|
||||
mkdir -p "${DPKG_DIR}/DEBIAN"
|
||||
cat > "${DPKG_DIR}/DEBIAN/control" <<EOF
|
||||
Package: ${DPKG_BASENAME}
|
||||
Version: ${DPKG_VERSION}
|
||||
Section: utils
|
||||
Priority: optional
|
||||
Maintainer: ${MAINTAINER}
|
||||
Homepage: ${REPO}
|
||||
Architecture: ${DPKG_ARCH}
|
||||
Provides: fd
|
||||
Conflicts: ${DPKG_CONFLICTS}
|
||||
Description: simple, fast and user-friendly alternative to find
|
||||
fd is a program to find entries in your filesystem.
|
||||
It is a simple, fast and user-friendly alternative to find.
|
||||
While it does not aim to support all of finds powerful functionality, it provides
|
||||
sensible (opinionated) defaults for a majority of use cases.
|
||||
EOF
|
||||
|
||||
DPKG_PATH="${DPKG_STAGING}/${DPKG_NAME}"
|
||||
|
||||
if [[ -n $GITHUB_OUTPUT ]]; then
|
||||
echo "DPKG_NAME=${DPKG_NAME}" >> "$GITHUB_OUTPUT"
|
||||
echo "DPKG_PATH=${DPKG_PATH}" >> "$GITHUB_OUTPUT"
|
||||
fi
|
||||
|
||||
# build dpkg
|
||||
fakeroot dpkg-deb --build "${DPKG_DIR}" "${DPKG_PATH}"
|
22
scripts/version-bump.sh
Executable file
22
scripts/version-bump.sh
Executable file
@ -0,0 +1,22 @@
|
||||
#!/usr/bin/bash
|
||||
|
||||
set -eu
|
||||
|
||||
# This script automates the "Version bump" section
|
||||
|
||||
version="$1"
|
||||
|
||||
if [[ -z $version ]]; then
|
||||
echo "Usage: must supply version as first argument" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
git switch -C "release-$version"
|
||||
sed -i -e "0,/^\[badges/{s/^version =.*/version = \"$version\"/}" Cargo.toml
|
||||
|
||||
msrv="$(grep -F rust-version Cargo.toml | sed -e 's/^rust-version= "\(.*\)"/\1/')"
|
||||
|
||||
sed -i -e "s/Note that rust version \*[0-9.]+\* or later/Note that rust version *$msrv* or later/" README.md
|
||||
|
||||
sed -i -e "s/^# Upcoming release/# $version/" CHANGELOG.md
|
||||
|
184
src/cli.rs
184
src/cli.rs
@ -1,3 +1,4 @@
|
||||
use std::num::NonZeroUsize;
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::time::Duration;
|
||||
|
||||
@ -26,12 +27,14 @@ use crate::filter::SizeFilter;
|
||||
max_term_width = 98,
|
||||
args_override_self = true,
|
||||
group(ArgGroup::new("execs").args(&["exec", "exec_batch", "list_details"]).conflicts_with_all(&[
|
||||
"max_results", "has_results", "count"])),
|
||||
"max_results", "quiet", "max_one_result"])),
|
||||
)]
|
||||
pub struct Opts {
|
||||
/// Include hidden directories and files in the search results (default:
|
||||
/// hidden files and directories are skipped). Files and directories are
|
||||
/// considered to be hidden if their name starts with a `.` sign (dot).
|
||||
/// Any files or directories that are ignored due to the rules described by
|
||||
/// --no-ignore are still ignored unless otherwise specified.
|
||||
/// The flag can be overridden with --no-hidden.
|
||||
#[arg(
|
||||
long,
|
||||
@ -46,7 +49,7 @@ pub struct Opts {
|
||||
no_hidden: (),
|
||||
|
||||
/// Show search results from files and directories that would otherwise be
|
||||
/// ignored by '.gitignore', '.ignore', '.fdignore', or the global ignore file.
|
||||
/// ignored by '.gitignore', '.ignore', '.fdignore', or the global ignore file,
|
||||
/// The flag can be overridden with --ignore.
|
||||
#[arg(
|
||||
long,
|
||||
@ -60,8 +63,9 @@ pub struct Opts {
|
||||
#[arg(long, overrides_with = "no_ignore", hide = true, action = ArgAction::SetTrue)]
|
||||
ignore: (),
|
||||
|
||||
///Show search results from files and directories that would otherwise be
|
||||
/// ignored by '.gitignore' files. The flag can be overridden with --ignore-vcs.
|
||||
///Show search results from files and directories that
|
||||
///would otherwise be ignored by '.gitignore' files.
|
||||
///The flag can be overridden with --ignore-vcs.
|
||||
#[arg(
|
||||
long,
|
||||
hide_short_help = true,
|
||||
@ -74,6 +78,28 @@ pub struct Opts {
|
||||
#[arg(long, overrides_with = "no_ignore_vcs", hide = true, action = ArgAction::SetTrue)]
|
||||
ignore_vcs: (),
|
||||
|
||||
/// Do not require a git repository to respect gitignores.
|
||||
/// By default, fd will only respect global gitignore rules, .gitignore rules,
|
||||
/// and local exclude rules if fd detects that you are searching inside a
|
||||
/// git repository. This flag allows you to relax this restriction such that
|
||||
/// fd will respect all git related ignore rules regardless of whether you're
|
||||
/// searching in a git repository or not.
|
||||
///
|
||||
///
|
||||
/// This flag can be disabled with --require-git.
|
||||
#[arg(
|
||||
long,
|
||||
overrides_with = "require_git",
|
||||
hide_short_help = true,
|
||||
// same description as ripgrep's flag: ripgrep/crates/core/app.rs
|
||||
long_help
|
||||
)]
|
||||
pub no_require_git: bool,
|
||||
|
||||
/// Overrides --no-require-git
|
||||
#[arg(long, overrides_with = "no_require_git", hide = true, action = ArgAction::SetTrue)]
|
||||
require_git: (),
|
||||
|
||||
/// Show search results from files and directories that would otherwise be
|
||||
/// ignored by '.gitignore', '.ignore', or '.fdignore' files in parent directories.
|
||||
#[arg(
|
||||
@ -200,7 +226,7 @@ pub struct Opts {
|
||||
alias = "dereference",
|
||||
long_help = "By default, fd does not descend into symlinked directories. Using this \
|
||||
flag, symbolic links are also traversed. \
|
||||
Flag can be overriden with --no-follow."
|
||||
Flag can be overridden with --no-follow."
|
||||
)]
|
||||
pub follow: bool,
|
||||
|
||||
@ -287,10 +313,12 @@ pub struct Opts {
|
||||
|
||||
/// Filter the search by type:
|
||||
/// {n} 'f' or 'file': regular files
|
||||
/// {n} 'd' or 'directory': directories
|
||||
/// {n} 'd' or 'dir' or 'directory': directories
|
||||
/// {n} 'l' or 'symlink': symbolic links
|
||||
/// {n} 's' or 'socket': socket
|
||||
/// {n} 'p' or 'pipe': named pipe (FIFO)
|
||||
/// {n} 'b' or 'block-device': block device
|
||||
/// {n} 'c' or 'char-device': character device
|
||||
/// {n}{n} 'x' or 'executable': executables
|
||||
/// {n} 'e' or 'empty': empty files or directories
|
||||
///
|
||||
@ -323,8 +351,9 @@ pub struct Opts {
|
||||
value_name = "filetype",
|
||||
hide_possible_values = true,
|
||||
value_enum,
|
||||
help = "Filter by type: file (f), directory (d), symlink (l), \
|
||||
executable (x), empty (e), socket (s), pipe (p)",
|
||||
help = "Filter by type: file (f), directory (d/dir), symlink (l), \
|
||||
executable (x), empty (e), socket (s), pipe (p), \
|
||||
char-device (c), block-device (b)",
|
||||
long_help
|
||||
)]
|
||||
pub filetype: Option<Vec<FileType>>,
|
||||
@ -369,7 +398,7 @@ pub struct Opts {
|
||||
|
||||
/// Filter results based on the file modification time. Files with modification times
|
||||
/// greater than the argument are returned. The argument can be provided
|
||||
/// as a specific point in time (YYYY-MM-DD HH:MM:SS) or as a duration (10h, 1d, 35min).
|
||||
/// as a specific point in time (YYYY-MM-DD HH:MM:SS or @timestamp) or as a duration (10h, 1d, 35min).
|
||||
/// If the time is not specified, it defaults to 00:00:00.
|
||||
/// '--change-newer-than', '--newer', or '--changed-after' can be used as aliases.
|
||||
///
|
||||
@ -391,7 +420,7 @@ pub struct Opts {
|
||||
|
||||
/// Filter results based on the file modification time. Files with modification times
|
||||
/// less than the argument are returned. The argument can be provided
|
||||
/// as a specific point in time (YYYY-MM-DD HH:MM:SS) or as a duration (10h, 1d, 35min).
|
||||
/// as a specific point in time (YYYY-MM-DD HH:MM:SS or @timestamp) or as a duration (10h, 1d, 35min).
|
||||
/// '--change-older-than' or '--older' can be used as aliases.
|
||||
///
|
||||
/// Examples:
|
||||
@ -423,6 +452,20 @@ pub struct Opts {
|
||||
)]
|
||||
pub owner: Option<OwnerFilter>,
|
||||
|
||||
/// Instead of printing the file normally, print the format string with the following placeholders replaced:
|
||||
/// '{}': path (of the current search result)
|
||||
/// '{/}': basename
|
||||
/// '{//}': parent directory
|
||||
/// '{.}': path without file extension
|
||||
/// '{/.}': basename without file extension
|
||||
#[arg(
|
||||
long,
|
||||
value_name = "fmt",
|
||||
help = "Print results according to template",
|
||||
conflicts_with = "list_details"
|
||||
)]
|
||||
pub format: Option<String>,
|
||||
|
||||
#[command(flatten)]
|
||||
pub exec: Exec,
|
||||
|
||||
@ -466,10 +509,28 @@ pub struct Opts {
|
||||
)]
|
||||
pub color: ColorWhen,
|
||||
|
||||
/// Add a terminal hyperlink to a file:// url for each path in the output.
|
||||
///
|
||||
/// Auto mode is used if no argument is given to this option.
|
||||
///
|
||||
/// This doesn't do anything for --exec and --exec-batch.
|
||||
#[arg(
|
||||
long,
|
||||
alias = "hyper",
|
||||
value_name = "when",
|
||||
require_equals = true,
|
||||
value_enum,
|
||||
default_value_t = HyperlinkWhen::Never,
|
||||
default_missing_value = "auto",
|
||||
num_args = 0..=1,
|
||||
help = "Add hyperlinks to output paths"
|
||||
)]
|
||||
pub hyperlink: HyperlinkWhen,
|
||||
|
||||
/// Set number of threads to use for searching & executing (default: number
|
||||
/// of available CPU cores)
|
||||
#[arg(long, short = 'j', value_name = "num", hide_short_help = true, value_parser = clap::value_parser!(u32).range(1..))]
|
||||
pub threads: Option<u32>,
|
||||
#[arg(long, short = 'j', value_name = "num", hide_short_help = true, value_parser = str::parse::<NonZeroUsize>)]
|
||||
pub threads: Option<NonZeroUsize>,
|
||||
|
||||
/// Milliseconds to buffer before streaming search results to console
|
||||
///
|
||||
@ -483,6 +544,7 @@ pub struct Opts {
|
||||
long,
|
||||
value_name = "count",
|
||||
hide_short_help = true,
|
||||
overrides_with("max_one_result"),
|
||||
help = "Limit the number of search results",
|
||||
long_help
|
||||
)]
|
||||
@ -587,9 +649,10 @@ pub struct Opts {
|
||||
/// By default, relative paths are prefixed with './' when -x/--exec,
|
||||
/// -X/--exec-batch, or -0/--print0 are given, to reduce the risk of a
|
||||
/// path starting with '-' being treated as a command line option. Use
|
||||
/// this flag to disable this behaviour.
|
||||
#[arg(long, conflicts_with_all(&["path", "search_path"]), hide_short_help = true, long_help)]
|
||||
pub strip_cwd_prefix: bool,
|
||||
/// this flag to change this behavior. If this flag is used without a value,
|
||||
/// it is equivalent to passing "always".
|
||||
#[arg(long, conflicts_with_all(&["path", "search_path"]), value_name = "when", hide_short_help = true, require_equals = true, long_help)]
|
||||
strip_cwd_prefix: Option<Option<StripCwdWhen>>,
|
||||
|
||||
/// By default, fd will traverse the file system tree as far as other options
|
||||
/// dictate. With this flag, fd ensures that it does not descend into a
|
||||
@ -612,7 +675,7 @@ impl Opts {
|
||||
} else if !self.search_path.is_empty() {
|
||||
&self.search_path
|
||||
} else {
|
||||
let current_directory = Path::new(".");
|
||||
let current_directory = Path::new("./");
|
||||
ensure_current_directory_exists(current_directory)?;
|
||||
return Ok(vec![self.normalize_path(current_directory)]);
|
||||
};
|
||||
@ -635,6 +698,9 @@ impl Opts {
|
||||
fn normalize_path(&self, path: &Path) -> PathBuf {
|
||||
if self.absolute_path {
|
||||
filesystem::absolute_path(path.normalize().unwrap().as_path()).unwrap()
|
||||
} else if path == Path::new(".") {
|
||||
// Change "." to "./" as a workaround for https://github.com/BurntSushi/ripgrep/pull/2711
|
||||
PathBuf::from("./")
|
||||
} else {
|
||||
path.to_path_buf()
|
||||
}
|
||||
@ -657,23 +723,24 @@ impl Opts {
|
||||
self.min_depth.or(self.exact_depth)
|
||||
}
|
||||
|
||||
pub fn threads(&self) -> usize {
|
||||
// This will panic if the number of threads passed in is more than usize::MAX in an environment
|
||||
// where usize is less than 32 bits (for example 16-bit architectures). It's pretty
|
||||
// unlikely fd will be running in such an environment, and even more unlikely someone would
|
||||
// be trying to use that many threads on such an environment, so I think panicing is an
|
||||
// appropriate way to handle that.
|
||||
std::cmp::max(
|
||||
self.threads
|
||||
.map_or_else(num_cpus::get, |n| n.try_into().expect("too many threads")),
|
||||
1,
|
||||
)
|
||||
pub fn threads(&self) -> NonZeroUsize {
|
||||
self.threads.unwrap_or_else(default_num_threads)
|
||||
}
|
||||
|
||||
pub fn max_results(&self) -> Option<usize> {
|
||||
self.max_results
|
||||
.filter(|&m| m > 0)
|
||||
.or_else(|| self.max_one_result.then(|| 1))
|
||||
.or_else(|| self.max_one_result.then_some(1))
|
||||
}
|
||||
|
||||
pub fn strip_cwd_prefix<P: FnOnce() -> bool>(&self, auto_pred: P) -> bool {
|
||||
use self::StripCwdWhen::*;
|
||||
self.no_search_paths()
|
||||
&& match self.strip_cwd_prefix.map_or(Auto, |o| o.unwrap_or(Always)) {
|
||||
Auto => auto_pred(),
|
||||
Always => true,
|
||||
Never => false,
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "completions")]
|
||||
@ -689,14 +756,32 @@ impl Opts {
|
||||
}
|
||||
}
|
||||
|
||||
/// Get the default number of threads to use, if not explicitly specified.
|
||||
fn default_num_threads() -> NonZeroUsize {
|
||||
// If we can't get the amount of parallelism for some reason, then
|
||||
// default to a single thread, because that is safe.
|
||||
let fallback = NonZeroUsize::MIN;
|
||||
// To limit startup overhead on massively parallel machines, don't use more
|
||||
// than 64 threads.
|
||||
let limit = NonZeroUsize::new(64).unwrap();
|
||||
|
||||
std::thread::available_parallelism()
|
||||
.unwrap_or(fallback)
|
||||
.min(limit)
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, PartialEq, Eq, ValueEnum)]
|
||||
pub enum FileType {
|
||||
#[value(alias = "f")]
|
||||
File,
|
||||
#[value(alias = "d")]
|
||||
#[value(alias = "d", alias = "dir")]
|
||||
Directory,
|
||||
#[value(alias = "l")]
|
||||
Symlink,
|
||||
#[value(alias = "b")]
|
||||
BlockDevice,
|
||||
#[value(alias = "c")]
|
||||
CharDevice,
|
||||
/// A file which is executable by the current effective user
|
||||
#[value(alias = "x")]
|
||||
Executable,
|
||||
@ -718,15 +803,24 @@ pub enum ColorWhen {
|
||||
Never,
|
||||
}
|
||||
|
||||
impl ColorWhen {
|
||||
pub fn as_str(&self) -> &'static str {
|
||||
use ColorWhen::*;
|
||||
match *self {
|
||||
Auto => "auto",
|
||||
Never => "never",
|
||||
Always => "always",
|
||||
}
|
||||
}
|
||||
#[derive(Copy, Clone, PartialEq, Eq, Debug, ValueEnum)]
|
||||
pub enum StripCwdWhen {
|
||||
/// Use the default behavior
|
||||
Auto,
|
||||
/// Always strip the ./ at the beginning of paths
|
||||
Always,
|
||||
/// Never strip the ./
|
||||
Never,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, PartialEq, Eq, Debug, ValueEnum)]
|
||||
pub enum HyperlinkWhen {
|
||||
/// Use hyperlinks only if color is enabled
|
||||
Auto,
|
||||
/// Always use hyperlinks when printing file paths
|
||||
Always,
|
||||
/// Never use hyperlinks
|
||||
Never,
|
||||
}
|
||||
|
||||
// there isn't a derive api for getting grouped values yet,
|
||||
@ -738,11 +832,11 @@ pub struct Exec {
|
||||
impl clap::FromArgMatches for Exec {
|
||||
fn from_arg_matches(matches: &ArgMatches) -> clap::error::Result<Self> {
|
||||
let command = matches
|
||||
.grouped_values_of("exec")
|
||||
.get_occurrences::<String>("exec")
|
||||
.map(CommandSet::new)
|
||||
.or_else(|| {
|
||||
matches
|
||||
.grouped_values_of("exec_batch")
|
||||
.get_occurrences::<String>("exec_batch")
|
||||
.map(CommandSet::new_batch)
|
||||
})
|
||||
.transpose()
|
||||
@ -770,6 +864,7 @@ impl clap::Args for Exec {
|
||||
.help("Execute a command for each search result")
|
||||
.long_help(
|
||||
"Execute a command for each search result in parallel (use --threads=1 for sequential command execution). \
|
||||
There is no guarantee of the order commands are executed in, and the order should not be depended upon. \
|
||||
All positional arguments following --exec are considered to be arguments to the command - not to fd. \
|
||||
It is therefore recommended to place the '-x'/'--exec' option last.\n\
|
||||
The following placeholders are substituted before the command is executed:\n \
|
||||
@ -777,7 +872,9 @@ impl clap::Args for Exec {
|
||||
'{/}': basename\n \
|
||||
'{//}': parent directory\n \
|
||||
'{.}': path without file extension\n \
|
||||
'{/.}': basename without file extension\n\n\
|
||||
'{/.}': basename without file extension\n \
|
||||
'{{': literal '{' (for escaping)\n \
|
||||
'}}': literal '}' (for escaping)\n\n\
|
||||
If no placeholder is present, an implicit \"{}\" at the end is assumed.\n\n\
|
||||
Examples:\n\n \
|
||||
- find all *.zip files and unzip them:\n\n \
|
||||
@ -802,12 +899,15 @@ impl clap::Args for Exec {
|
||||
.help("Execute a command with all search results at once")
|
||||
.long_help(
|
||||
"Execute the given command once, with all search results as arguments.\n\
|
||||
The order of the arguments is non-deterministic, and should not be relied upon.\n\
|
||||
One of the following placeholders is substituted before the command is executed:\n \
|
||||
'{}': path (of all search results)\n \
|
||||
'{/}': basename\n \
|
||||
'{//}': parent directory\n \
|
||||
'{.}': path without file extension\n \
|
||||
'{/.}': basename without file extension\n\n\
|
||||
'{/.}': basename without file extension\n \
|
||||
'{{': literal '{' (for escaping)\n \
|
||||
'}}': literal '}' (for escaping)\n\n\
|
||||
If no placeholder is present, an implicit \"{}\" at the end is assumed.\n\n\
|
||||
Examples:\n\n \
|
||||
- Find all test_*.py files and open them in your favorite editor:\n\n \
|
||||
|
@ -8,6 +8,7 @@ use crate::filetypes::FileTypes;
|
||||
#[cfg(unix)]
|
||||
use crate::filter::OwnerFilter;
|
||||
use crate::filter::{SizeFilter, TimeFilter};
|
||||
use crate::fmt::FormatTemplate;
|
||||
|
||||
/// Configuration options for *fd*.
|
||||
pub struct Config {
|
||||
@ -30,6 +31,9 @@ pub struct Config {
|
||||
/// Whether to respect VCS ignore files (`.gitignore`, ..) or not.
|
||||
pub read_vcsignore: bool,
|
||||
|
||||
/// Whether to require a `.git` directory to respect gitignore files.
|
||||
pub require_git_to_read_vcsignore: bool,
|
||||
|
||||
/// Whether to respect the global ignore file or not.
|
||||
pub read_global_ignore: bool,
|
||||
|
||||
@ -71,6 +75,7 @@ pub struct Config {
|
||||
pub ls_colors: Option<LsColors>,
|
||||
|
||||
/// Whether or not we are writing to an interactive terminal
|
||||
#[cfg_attr(not(unix), allow(unused))]
|
||||
pub interactive_terminal: bool,
|
||||
|
||||
/// The type of file to search for. If set to `None`, all file types are displayed. If
|
||||
@ -82,6 +87,9 @@ pub struct Config {
|
||||
/// The value (if present) will be a lowercase string without leading dots.
|
||||
pub extensions: Option<RegexSet>,
|
||||
|
||||
/// A format string to use to format results, similarly to exec
|
||||
pub format: Option<FormatTemplate>,
|
||||
|
||||
/// If a value is supplied, each item found will be used to generate and execute commands.
|
||||
pub command: Option<Arc<CommandSet>>,
|
||||
|
||||
@ -119,6 +127,9 @@ pub struct Config {
|
||||
|
||||
/// Whether or not to strip the './' prefix for search results
|
||||
pub strip_cwd_prefix: bool,
|
||||
|
||||
/// Whether or not to use hyperlinks on paths
|
||||
pub hyperlink: bool,
|
||||
}
|
||||
|
||||
impl Config {
|
||||
|
@ -1,19 +1,20 @@
|
||||
use std::cell::OnceCell;
|
||||
use std::ffi::OsString;
|
||||
use std::fs::{FileType, Metadata};
|
||||
use std::path::{Path, PathBuf};
|
||||
|
||||
use lscolors::{Colorable, LsColors, Style};
|
||||
|
||||
use once_cell::unsync::OnceCell;
|
||||
|
||||
use crate::config::Config;
|
||||
use crate::filesystem::strip_current_dir;
|
||||
|
||||
#[derive(Debug)]
|
||||
enum DirEntryInner {
|
||||
Normal(ignore::DirEntry),
|
||||
BrokenSymlink(PathBuf),
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct DirEntry {
|
||||
inner: DirEntryInner,
|
||||
metadata: OnceCell<Option<Metadata>>,
|
||||
@ -112,7 +113,7 @@ impl Eq for DirEntry {}
|
||||
impl PartialOrd for DirEntry {
|
||||
#[inline]
|
||||
fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {
|
||||
self.path().partial_cmp(other.path())
|
||||
Some(self.cmp(other))
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1,9 +1,6 @@
|
||||
use std::sync::{Arc, Mutex};
|
||||
|
||||
use crossbeam_channel::Receiver;
|
||||
use std::sync::Mutex;
|
||||
|
||||
use crate::config::Config;
|
||||
use crate::dir_entry::DirEntry;
|
||||
use crate::error::print_error;
|
||||
use crate::exit_codes::{merge_exitcodes, ExitCode};
|
||||
use crate::walk::WorkerResult;
|
||||
@ -14,43 +11,47 @@ use super::CommandSet;
|
||||
/// generate a command with the supplied command template. The generated command will then
|
||||
/// be executed, and this process will continue until the receiver's sender has closed.
|
||||
pub fn job(
|
||||
rx: Receiver<WorkerResult>,
|
||||
cmd: Arc<CommandSet>,
|
||||
out_perm: Arc<Mutex<()>>,
|
||||
results: impl IntoIterator<Item = WorkerResult>,
|
||||
cmd: &CommandSet,
|
||||
out_perm: &Mutex<()>,
|
||||
config: &Config,
|
||||
) -> ExitCode {
|
||||
// Output should be buffered when only running a single thread
|
||||
let buffer_output: bool = config.threads > 1;
|
||||
|
||||
let mut results: Vec<ExitCode> = Vec::new();
|
||||
loop {
|
||||
let mut ret = ExitCode::Success;
|
||||
for result in results {
|
||||
// Obtain the next result from the receiver, else if the channel
|
||||
// has closed, exit from the loop
|
||||
let dir_entry: DirEntry = match rx.recv() {
|
||||
Ok(WorkerResult::Entry(dir_entry)) => dir_entry,
|
||||
Ok(WorkerResult::Error(err)) => {
|
||||
let dir_entry = match result {
|
||||
WorkerResult::Entry(dir_entry) => dir_entry,
|
||||
WorkerResult::Error(err) => {
|
||||
if config.show_filesystem_errors {
|
||||
print_error(err.to_string());
|
||||
}
|
||||
continue;
|
||||
}
|
||||
Err(_) => break,
|
||||
};
|
||||
|
||||
// Generate a command, execute it and store its exit code.
|
||||
results.push(cmd.execute(
|
||||
let code = cmd.execute(
|
||||
dir_entry.stripped_path(config),
|
||||
config.path_separator.as_deref(),
|
||||
Arc::clone(&out_perm),
|
||||
out_perm,
|
||||
buffer_output,
|
||||
))
|
||||
);
|
||||
ret = merge_exitcodes([ret, code]);
|
||||
}
|
||||
// Returns error in case of any error.
|
||||
merge_exitcodes(results)
|
||||
ret
|
||||
}
|
||||
|
||||
pub fn batch(rx: Receiver<WorkerResult>, cmd: &CommandSet, config: &Config) -> ExitCode {
|
||||
let paths = rx
|
||||
pub fn batch(
|
||||
results: impl IntoIterator<Item = WorkerResult>,
|
||||
cmd: &CommandSet,
|
||||
config: &Config,
|
||||
) -> ExitCode {
|
||||
let paths = results
|
||||
.into_iter()
|
||||
.filter_map(|worker_result| match worker_result {
|
||||
WorkerResult::Entry(dir_entry) => Some(dir_entry.into_stripped_path(config)),
|
||||
|
239
src/exec/mod.rs
239
src/exec/mod.rs
@ -1,27 +1,21 @@
|
||||
mod command;
|
||||
mod input;
|
||||
mod job;
|
||||
mod token;
|
||||
|
||||
use std::borrow::Cow;
|
||||
use std::ffi::{OsStr, OsString};
|
||||
use std::ffi::OsString;
|
||||
use std::io;
|
||||
use std::iter;
|
||||
use std::path::{Component, Path, PathBuf, Prefix};
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::process::Stdio;
|
||||
use std::sync::{Arc, Mutex};
|
||||
use std::sync::Mutex;
|
||||
|
||||
use anyhow::{bail, Result};
|
||||
use argmax::Command;
|
||||
use once_cell::sync::Lazy;
|
||||
use regex::Regex;
|
||||
|
||||
use crate::exit_codes::{merge_exitcodes, ExitCode};
|
||||
use crate::fmt::{FormatTemplate, Token};
|
||||
|
||||
use self::command::{execute_commands, handle_cmd_error};
|
||||
use self::input::{basename, dirname, remove_extension};
|
||||
pub use self::job::{batch, job};
|
||||
use self::token::Token;
|
||||
|
||||
/// Execution mode of the command
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||
@ -39,9 +33,10 @@ pub struct CommandSet {
|
||||
}
|
||||
|
||||
impl CommandSet {
|
||||
pub fn new<I, S>(input: I) -> Result<CommandSet>
|
||||
pub fn new<I, T, S>(input: I) -> Result<CommandSet>
|
||||
where
|
||||
I: IntoIterator<Item = Vec<S>>,
|
||||
I: IntoIterator<Item = T>,
|
||||
T: IntoIterator<Item = S>,
|
||||
S: AsRef<str>,
|
||||
{
|
||||
Ok(CommandSet {
|
||||
@ -53,9 +48,10 @@ impl CommandSet {
|
||||
})
|
||||
}
|
||||
|
||||
pub fn new_batch<I, S>(input: I) -> Result<CommandSet>
|
||||
pub fn new_batch<I, T, S>(input: I) -> Result<CommandSet>
|
||||
where
|
||||
I: IntoIterator<Item = Vec<S>>,
|
||||
I: IntoIterator<Item = T>,
|
||||
T: IntoIterator<Item = S>,
|
||||
S: AsRef<str>,
|
||||
{
|
||||
Ok(CommandSet {
|
||||
@ -84,14 +80,14 @@ impl CommandSet {
|
||||
&self,
|
||||
input: &Path,
|
||||
path_separator: Option<&str>,
|
||||
out_perm: Arc<Mutex<()>>,
|
||||
out_perm: &Mutex<()>,
|
||||
buffer_output: bool,
|
||||
) -> ExitCode {
|
||||
let commands = self
|
||||
.commands
|
||||
.iter()
|
||||
.map(|c| c.generate(input, path_separator));
|
||||
execute_commands(commands, &out_perm, buffer_output)
|
||||
execute_commands(commands, out_perm, buffer_output)
|
||||
}
|
||||
|
||||
pub fn execute_batch<I>(&self, paths: I, limit: usize, path_separator: Option<&str>) -> ExitCode
|
||||
@ -131,7 +127,7 @@ impl CommandSet {
|
||||
#[derive(Debug)]
|
||||
struct CommandBuilder {
|
||||
pre_args: Vec<OsString>,
|
||||
path_arg: ArgumentTemplate,
|
||||
path_arg: FormatTemplate,
|
||||
post_args: Vec<OsString>,
|
||||
cmd: Command,
|
||||
count: usize,
|
||||
@ -220,7 +216,7 @@ impl CommandBuilder {
|
||||
/// `generate_and_execute()` method will be used to generate a command and execute it.
|
||||
#[derive(Debug, Clone, PartialEq)]
|
||||
struct CommandTemplate {
|
||||
args: Vec<ArgumentTemplate>,
|
||||
args: Vec<FormatTemplate>,
|
||||
}
|
||||
|
||||
impl CommandTemplate {
|
||||
@ -229,50 +225,15 @@ impl CommandTemplate {
|
||||
I: IntoIterator<Item = S>,
|
||||
S: AsRef<str>,
|
||||
{
|
||||
static PLACEHOLDER_PATTERN: Lazy<Regex> =
|
||||
Lazy::new(|| Regex::new(r"\{(/?\.?|//)\}").unwrap());
|
||||
|
||||
let mut args = Vec::new();
|
||||
let mut has_placeholder = false;
|
||||
|
||||
for arg in input {
|
||||
let arg = arg.as_ref();
|
||||
|
||||
let mut tokens = Vec::new();
|
||||
let mut start = 0;
|
||||
|
||||
for placeholder in PLACEHOLDER_PATTERN.find_iter(arg) {
|
||||
// Leading text before the placeholder.
|
||||
if placeholder.start() > start {
|
||||
tokens.push(Token::Text(arg[start..placeholder.start()].to_owned()));
|
||||
}
|
||||
|
||||
start = placeholder.end();
|
||||
|
||||
match placeholder.as_str() {
|
||||
"{}" => tokens.push(Token::Placeholder),
|
||||
"{.}" => tokens.push(Token::NoExt),
|
||||
"{/}" => tokens.push(Token::Basename),
|
||||
"{//}" => tokens.push(Token::Parent),
|
||||
"{/.}" => tokens.push(Token::BasenameNoExt),
|
||||
_ => unreachable!("Unhandled placeholder"),
|
||||
}
|
||||
|
||||
has_placeholder = true;
|
||||
}
|
||||
|
||||
// Without a placeholder, the argument is just fixed text.
|
||||
if tokens.is_empty() {
|
||||
args.push(ArgumentTemplate::Text(arg.to_owned()));
|
||||
continue;
|
||||
}
|
||||
|
||||
if start < arg.len() {
|
||||
// Trailing text after last placeholder.
|
||||
tokens.push(Token::Text(arg[start..].to_owned()));
|
||||
}
|
||||
|
||||
args.push(ArgumentTemplate::Tokens(tokens));
|
||||
let tmpl = FormatTemplate::parse(arg);
|
||||
has_placeholder |= tmpl.has_tokens();
|
||||
args.push(tmpl);
|
||||
}
|
||||
|
||||
// We need to check that we have at least one argument, because if not
|
||||
@ -286,7 +247,7 @@ impl CommandTemplate {
|
||||
|
||||
// If a placeholder token was not supplied, append one at the end of the command.
|
||||
if !has_placeholder {
|
||||
args.push(ArgumentTemplate::Tokens(vec![Token::Placeholder]));
|
||||
args.push(FormatTemplate::Tokens(vec![Token::Placeholder]));
|
||||
}
|
||||
|
||||
Ok(CommandTemplate { args })
|
||||
@ -309,115 +270,18 @@ impl CommandTemplate {
|
||||
}
|
||||
}
|
||||
|
||||
/// Represents a template for a single command argument.
|
||||
///
|
||||
/// The argument is either a collection of `Token`s including at least one placeholder variant, or
|
||||
/// a fixed text.
|
||||
#[derive(Clone, Debug, PartialEq)]
|
||||
enum ArgumentTemplate {
|
||||
Tokens(Vec<Token>),
|
||||
Text(String),
|
||||
}
|
||||
|
||||
impl ArgumentTemplate {
|
||||
pub fn has_tokens(&self) -> bool {
|
||||
matches!(self, ArgumentTemplate::Tokens(_))
|
||||
}
|
||||
|
||||
/// Generate an argument from this template. If path_separator is Some, then it will replace
|
||||
/// the path separator in all placeholder tokens. Text arguments and tokens are not affected by
|
||||
/// path separator substitution.
|
||||
pub fn generate(&self, path: impl AsRef<Path>, path_separator: Option<&str>) -> OsString {
|
||||
use self::Token::*;
|
||||
let path = path.as_ref();
|
||||
|
||||
match *self {
|
||||
ArgumentTemplate::Tokens(ref tokens) => {
|
||||
let mut s = OsString::new();
|
||||
for token in tokens {
|
||||
match *token {
|
||||
Basename => s.push(Self::replace_separator(basename(path), path_separator)),
|
||||
BasenameNoExt => s.push(Self::replace_separator(
|
||||
&remove_extension(basename(path).as_ref()),
|
||||
path_separator,
|
||||
)),
|
||||
NoExt => s.push(Self::replace_separator(
|
||||
&remove_extension(path),
|
||||
path_separator,
|
||||
)),
|
||||
Parent => s.push(Self::replace_separator(&dirname(path), path_separator)),
|
||||
Placeholder => {
|
||||
s.push(Self::replace_separator(path.as_ref(), path_separator))
|
||||
}
|
||||
Text(ref string) => s.push(string),
|
||||
}
|
||||
}
|
||||
s
|
||||
}
|
||||
ArgumentTemplate::Text(ref text) => OsString::from(text),
|
||||
}
|
||||
}
|
||||
|
||||
/// Replace the path separator in the input with the custom separator string. If path_separator
|
||||
/// is None, simply return a borrowed Cow<OsStr> of the input. Otherwise, the input is
|
||||
/// interpreted as a Path and its components are iterated through and re-joined into a new
|
||||
/// OsString.
|
||||
fn replace_separator<'a>(path: &'a OsStr, path_separator: Option<&str>) -> Cow<'a, OsStr> {
|
||||
// fast-path - no replacement necessary
|
||||
if path_separator.is_none() {
|
||||
return Cow::Borrowed(path);
|
||||
}
|
||||
|
||||
let path_separator = path_separator.unwrap();
|
||||
let mut out = OsString::with_capacity(path.len());
|
||||
let mut components = Path::new(path).components().peekable();
|
||||
|
||||
while let Some(comp) = components.next() {
|
||||
match comp {
|
||||
// Absolute paths on Windows are tricky. A Prefix component is usually a drive
|
||||
// letter or UNC path, and is usually followed by RootDir. There are also
|
||||
// "verbatim" prefixes beginning with "\\?\" that skip normalization. We choose to
|
||||
// ignore verbatim path prefixes here because they're very rare, might be
|
||||
// impossible to reach here, and there's no good way to deal with them. If users
|
||||
// are doing something advanced involving verbatim windows paths, they can do their
|
||||
// own output filtering with a tool like sed.
|
||||
Component::Prefix(prefix) => {
|
||||
if let Prefix::UNC(server, share) = prefix.kind() {
|
||||
// Prefix::UNC is a parsed version of '\\server\share'
|
||||
out.push(path_separator);
|
||||
out.push(path_separator);
|
||||
out.push(server);
|
||||
out.push(path_separator);
|
||||
out.push(share);
|
||||
} else {
|
||||
// All other Windows prefix types are rendered as-is. This results in e.g. "C:" for
|
||||
// drive letters. DeviceNS and Verbatim* prefixes won't have backslashes converted,
|
||||
// but they're not returned by directories fd can search anyway so we don't worry
|
||||
// about them.
|
||||
out.push(comp.as_os_str());
|
||||
}
|
||||
}
|
||||
|
||||
// Root directory is always replaced with the custom separator.
|
||||
Component::RootDir => out.push(path_separator),
|
||||
|
||||
// Everything else is joined normally, with a trailing separator if we're not last
|
||||
_ => {
|
||||
out.push(comp.as_os_str());
|
||||
if components.peek().is_some() {
|
||||
out.push(path_separator);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
Cow::Owned(out)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
fn generate_str(template: &CommandTemplate, input: &str) -> Vec<String> {
|
||||
template
|
||||
.args
|
||||
.iter()
|
||||
.map(|arg| arg.generate(input, None).into_string().unwrap())
|
||||
.collect()
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn tokens_with_placeholder() {
|
||||
assert_eq!(
|
||||
@ -425,9 +289,9 @@ mod tests {
|
||||
CommandSet {
|
||||
commands: vec![CommandTemplate {
|
||||
args: vec![
|
||||
ArgumentTemplate::Text("echo".into()),
|
||||
ArgumentTemplate::Text("${SHELL}:".into()),
|
||||
ArgumentTemplate::Tokens(vec![Token::Placeholder]),
|
||||
FormatTemplate::Text("echo".into()),
|
||||
FormatTemplate::Text("${SHELL}:".into()),
|
||||
FormatTemplate::Tokens(vec![Token::Placeholder]),
|
||||
]
|
||||
}],
|
||||
mode: ExecutionMode::OneByOne,
|
||||
@ -442,8 +306,8 @@ mod tests {
|
||||
CommandSet {
|
||||
commands: vec![CommandTemplate {
|
||||
args: vec![
|
||||
ArgumentTemplate::Text("echo".into()),
|
||||
ArgumentTemplate::Tokens(vec![Token::NoExt]),
|
||||
FormatTemplate::Text("echo".into()),
|
||||
FormatTemplate::Tokens(vec![Token::NoExt]),
|
||||
],
|
||||
}],
|
||||
mode: ExecutionMode::OneByOne,
|
||||
@ -458,8 +322,8 @@ mod tests {
|
||||
CommandSet {
|
||||
commands: vec![CommandTemplate {
|
||||
args: vec![
|
||||
ArgumentTemplate::Text("echo".into()),
|
||||
ArgumentTemplate::Tokens(vec![Token::Basename]),
|
||||
FormatTemplate::Text("echo".into()),
|
||||
FormatTemplate::Tokens(vec![Token::Basename]),
|
||||
],
|
||||
}],
|
||||
mode: ExecutionMode::OneByOne,
|
||||
@ -474,8 +338,8 @@ mod tests {
|
||||
CommandSet {
|
||||
commands: vec![CommandTemplate {
|
||||
args: vec![
|
||||
ArgumentTemplate::Text("echo".into()),
|
||||
ArgumentTemplate::Tokens(vec![Token::Parent]),
|
||||
FormatTemplate::Text("echo".into()),
|
||||
FormatTemplate::Tokens(vec![Token::Parent]),
|
||||
],
|
||||
}],
|
||||
mode: ExecutionMode::OneByOne,
|
||||
@ -490,8 +354,8 @@ mod tests {
|
||||
CommandSet {
|
||||
commands: vec![CommandTemplate {
|
||||
args: vec![
|
||||
ArgumentTemplate::Text("echo".into()),
|
||||
ArgumentTemplate::Tokens(vec![Token::BasenameNoExt]),
|
||||
FormatTemplate::Text("echo".into()),
|
||||
FormatTemplate::Tokens(vec![Token::BasenameNoExt]),
|
||||
],
|
||||
}],
|
||||
mode: ExecutionMode::OneByOne,
|
||||
@ -499,6 +363,21 @@ mod tests {
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn tokens_with_literal_braces() {
|
||||
let template = CommandTemplate::new(vec!["{{}}", "{{", "{.}}"]).unwrap();
|
||||
assert_eq!(
|
||||
generate_str(&template, "foo"),
|
||||
vec!["{}", "{", "{.}", "foo"]
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn tokens_with_literal_braces_and_placeholder() {
|
||||
let template = CommandTemplate::new(vec!["{{{},end}"]).unwrap();
|
||||
assert_eq!(generate_str(&template, "foo"), vec!["{foo,end}"]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn tokens_multiple() {
|
||||
assert_eq!(
|
||||
@ -506,9 +385,9 @@ mod tests {
|
||||
CommandSet {
|
||||
commands: vec![CommandTemplate {
|
||||
args: vec![
|
||||
ArgumentTemplate::Text("cp".into()),
|
||||
ArgumentTemplate::Tokens(vec![Token::Placeholder]),
|
||||
ArgumentTemplate::Tokens(vec![
|
||||
FormatTemplate::Text("cp".into()),
|
||||
FormatTemplate::Tokens(vec![Token::Placeholder]),
|
||||
FormatTemplate::Tokens(vec![
|
||||
Token::BasenameNoExt,
|
||||
Token::Text(".ext".into())
|
||||
]),
|
||||
@ -526,8 +405,8 @@ mod tests {
|
||||
CommandSet {
|
||||
commands: vec![CommandTemplate {
|
||||
args: vec![
|
||||
ArgumentTemplate::Text("echo".into()),
|
||||
ArgumentTemplate::Tokens(vec![Token::NoExt]),
|
||||
FormatTemplate::Text("echo".into()),
|
||||
FormatTemplate::Tokens(vec![Token::NoExt]),
|
||||
],
|
||||
}],
|
||||
mode: ExecutionMode::Batch,
|
||||
@ -552,7 +431,7 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn generate_custom_path_separator() {
|
||||
let arg = ArgumentTemplate::Tokens(vec![Token::Placeholder]);
|
||||
let arg = FormatTemplate::Tokens(vec![Token::Placeholder]);
|
||||
macro_rules! check {
|
||||
($input:expr, $expected:expr) => {
|
||||
assert_eq!(arg.generate($input, Some("#")), OsString::from($expected));
|
||||
@ -567,7 +446,7 @@ mod tests {
|
||||
#[cfg(windows)]
|
||||
#[test]
|
||||
fn generate_custom_path_separator_windows() {
|
||||
let arg = ArgumentTemplate::Tokens(vec![Token::Placeholder]);
|
||||
let arg = FormatTemplate::Tokens(vec![Token::Placeholder]);
|
||||
macro_rules! check {
|
||||
($input:expr, $expected:expr) => {
|
||||
assert_eq!(arg.generate($input, Some("#")), OsString::from($expected));
|
||||
|
@ -1,29 +0,0 @@
|
||||
use std::fmt::{self, Display, Formatter};
|
||||
|
||||
/// Designates what should be written to a buffer
|
||||
///
|
||||
/// Each `Token` contains either text, or a placeholder variant, which will be used to generate
|
||||
/// commands after all tokens for a given command template have been collected.
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
pub enum Token {
|
||||
Placeholder,
|
||||
Basename,
|
||||
Parent,
|
||||
NoExt,
|
||||
BasenameNoExt,
|
||||
Text(String),
|
||||
}
|
||||
|
||||
impl Display for Token {
|
||||
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
|
||||
match *self {
|
||||
Token::Placeholder => f.write_str("{}")?,
|
||||
Token::Basename => f.write_str("{/}")?,
|
||||
Token::Parent => f.write_str("{//}")?,
|
||||
Token::NoExt => f.write_str("{.}")?,
|
||||
Token::BasenameNoExt => f.write_str("{/.}")?,
|
||||
Token::Text(ref string) => f.write_str(string)?,
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
@ -59,6 +59,26 @@ pub fn is_empty(entry: &dir_entry::DirEntry) -> bool {
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(any(unix, target_os = "redox"))]
|
||||
pub fn is_block_device(ft: fs::FileType) -> bool {
|
||||
ft.is_block_device()
|
||||
}
|
||||
|
||||
#[cfg(windows)]
|
||||
pub fn is_block_device(_: fs::FileType) -> bool {
|
||||
false
|
||||
}
|
||||
|
||||
#[cfg(any(unix, target_os = "redox"))]
|
||||
pub fn is_char_device(ft: fs::FileType) -> bool {
|
||||
ft.is_char_device()
|
||||
}
|
||||
|
||||
#[cfg(windows)]
|
||||
pub fn is_char_device(_: fs::FileType) -> bool {
|
||||
false
|
||||
}
|
||||
|
||||
#[cfg(any(unix, target_os = "redox"))]
|
||||
pub fn is_socket(ft: fs::FileType) -> bool {
|
||||
ft.is_socket()
|
||||
@ -108,13 +128,11 @@ pub fn strip_current_dir(path: &Path) -> &Path {
|
||||
pub fn default_path_separator() -> Option<String> {
|
||||
if cfg!(windows) {
|
||||
let msystem = env::var("MSYSTEM").ok()?;
|
||||
match msystem.as_str() {
|
||||
"MINGW64" | "MINGW32" | "MSYS" => Some("/".to_owned()),
|
||||
_ => None,
|
||||
if !msystem.is_empty() {
|
||||
return Some("/".to_owned());
|
||||
}
|
||||
} else {
|
||||
None
|
||||
}
|
||||
None
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
|
@ -9,6 +9,8 @@ pub struct FileTypes {
|
||||
pub files: bool,
|
||||
pub directories: bool,
|
||||
pub symlinks: bool,
|
||||
pub block_devices: bool,
|
||||
pub char_devices: bool,
|
||||
pub sockets: bool,
|
||||
pub pipes: bool,
|
||||
pub executables_only: bool,
|
||||
@ -21,6 +23,8 @@ impl FileTypes {
|
||||
(!self.files && entry_type.is_file())
|
||||
|| (!self.directories && entry_type.is_dir())
|
||||
|| (!self.symlinks && entry_type.is_symlink())
|
||||
|| (!self.block_devices && filesystem::is_block_device(*entry_type))
|
||||
|| (!self.char_devices && filesystem::is_char_device(*entry_type))
|
||||
|| (!self.sockets && filesystem::is_socket(*entry_type))
|
||||
|| (!self.pipes && filesystem::is_pipe(*entry_type))
|
||||
|| (self.executables_only && !entry.path().executable())
|
||||
@ -28,6 +32,8 @@ impl FileTypes {
|
||||
|| !(entry_type.is_file()
|
||||
|| entry_type.is_dir()
|
||||
|| entry_type.is_symlink()
|
||||
|| filesystem::is_block_device(*entry_type)
|
||||
|| filesystem::is_char_device(*entry_type)
|
||||
|| filesystem::is_socket(*entry_type)
|
||||
|| filesystem::is_pipe(*entry_type))
|
||||
} else {
|
||||
|
@ -1,4 +1,5 @@
|
||||
use anyhow::{anyhow, Result};
|
||||
use nix::unistd::{Group, User};
|
||||
use std::fs;
|
||||
|
||||
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
|
||||
@ -35,16 +36,22 @@ impl OwnerFilter {
|
||||
}
|
||||
|
||||
let uid = Check::parse(fst, |s| {
|
||||
s.parse()
|
||||
.ok()
|
||||
.or_else(|| users::get_user_by_name(s).map(|user| user.uid()))
|
||||
.ok_or_else(|| anyhow!("'{}' is not a recognized user name", s))
|
||||
if let Ok(uid) = s.parse() {
|
||||
Ok(uid)
|
||||
} else {
|
||||
User::from_name(s)?
|
||||
.map(|user| user.uid.as_raw())
|
||||
.ok_or_else(|| anyhow!("'{}' is not a recognized user name", s))
|
||||
}
|
||||
})?;
|
||||
let gid = Check::parse(snd, |s| {
|
||||
s.parse()
|
||||
.ok()
|
||||
.or_else(|| users::get_group_by_name(s).map(|group| group.gid()))
|
||||
.ok_or_else(|| anyhow!("'{}' is not a recognized group name", s))
|
||||
if let Ok(gid) = s.parse() {
|
||||
Ok(gid)
|
||||
} else {
|
||||
Group::from_name(s)?
|
||||
.map(|group| group.gid.as_raw())
|
||||
.ok_or_else(|| anyhow!("'{}' is not a recognized group name", s))
|
||||
}
|
||||
})?;
|
||||
|
||||
Ok(OwnerFilter { uid, gid })
|
||||
|
@ -1,9 +1,9 @@
|
||||
use std::sync::OnceLock;
|
||||
|
||||
use anyhow::anyhow;
|
||||
use once_cell::sync::Lazy;
|
||||
use regex::Regex;
|
||||
|
||||
static SIZE_CAPTURES: Lazy<Regex> =
|
||||
Lazy::new(|| Regex::new(r"(?i)^([+-]?)(\d+)(b|[kmgt]i?b?)$").unwrap());
|
||||
static SIZE_CAPTURES: OnceLock<Regex> = OnceLock::new();
|
||||
|
||||
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
|
||||
pub enum SizeFilter {
|
||||
@ -31,11 +31,13 @@ impl SizeFilter {
|
||||
}
|
||||
|
||||
fn parse_opt(s: &str) -> Option<Self> {
|
||||
if !SIZE_CAPTURES.is_match(s) {
|
||||
let pattern =
|
||||
SIZE_CAPTURES.get_or_init(|| Regex::new(r"(?i)^([+-]?)(\d+)(b|[kmgt]i?b?)$").unwrap());
|
||||
if !pattern.is_match(s) {
|
||||
return None;
|
||||
}
|
||||
|
||||
let captures = SIZE_CAPTURES.captures(s)?;
|
||||
let captures = pattern.captures(s)?;
|
||||
let limit_kind = captures.get(1).map_or("+", |m| m.as_str());
|
||||
let quantity = captures
|
||||
.get(2)
|
||||
|
@ -1,4 +1,4 @@
|
||||
use chrono::{offset::TimeZone, DateTime, Local, NaiveDate};
|
||||
use chrono::{DateTime, Local, NaiveDate, NaiveDateTime};
|
||||
|
||||
use std::time::SystemTime;
|
||||
|
||||
@ -20,11 +20,21 @@ impl TimeFilter {
|
||||
.ok()
|
||||
.or_else(|| {
|
||||
NaiveDate::parse_from_str(s, "%F")
|
||||
.ok()
|
||||
.and_then(|nd| nd.and_hms_opt(0, 0, 0))
|
||||
.and_then(|ndt| Local.from_local_datetime(&ndt).single())
|
||||
.ok()?
|
||||
.and_hms_opt(0, 0, 0)?
|
||||
.and_local_timezone(Local)
|
||||
.latest()
|
||||
})
|
||||
.or_else(|| {
|
||||
NaiveDateTime::parse_from_str(s, "%F %T")
|
||||
.ok()?
|
||||
.and_local_timezone(Local)
|
||||
.latest()
|
||||
})
|
||||
.or_else(|| {
|
||||
let timestamp_secs = s.strip_prefix('@')?.parse().ok()?;
|
||||
DateTime::from_timestamp(timestamp_secs, 0).map(Into::into)
|
||||
})
|
||||
.or_else(|| Local.datetime_from_str(s, "%F %T").ok())
|
||||
.map(|dt| dt.into())
|
||||
})
|
||||
}
|
||||
@ -52,8 +62,10 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn is_time_filter_applicable() {
|
||||
let ref_time = Local
|
||||
.datetime_from_str("2010-10-10 10:10:10", "%F %T")
|
||||
let ref_time = NaiveDateTime::parse_from_str("2010-10-10 10:10:10", "%F %T")
|
||||
.unwrap()
|
||||
.and_local_timezone(Local)
|
||||
.latest()
|
||||
.unwrap()
|
||||
.into();
|
||||
|
||||
@ -127,5 +139,32 @@ mod tests {
|
||||
assert!(!TimeFilter::after(&ref_time, t10s_before)
|
||||
.unwrap()
|
||||
.applies_to(&t1m_ago));
|
||||
|
||||
let ref_timestamp = 1707723412u64; // Mon Feb 12 07:36:52 UTC 2024
|
||||
let ref_time = DateTime::parse_from_rfc3339("2024-02-12T07:36:52+00:00")
|
||||
.unwrap()
|
||||
.into();
|
||||
let t1m_ago = ref_time - Duration::from_secs(60);
|
||||
let t1s_later = ref_time + Duration::from_secs(1);
|
||||
// Timestamp only supported via '@' prefix
|
||||
assert!(TimeFilter::before(&ref_time, &ref_timestamp.to_string()).is_none());
|
||||
assert!(
|
||||
TimeFilter::before(&ref_time, &format!("@{}", ref_timestamp))
|
||||
.unwrap()
|
||||
.applies_to(&t1m_ago)
|
||||
);
|
||||
assert!(
|
||||
!TimeFilter::before(&ref_time, &format!("@{}", ref_timestamp))
|
||||
.unwrap()
|
||||
.applies_to(&t1s_later)
|
||||
);
|
||||
assert!(
|
||||
!TimeFilter::after(&ref_time, &format!("@{}", ref_timestamp))
|
||||
.unwrap()
|
||||
.applies_to(&t1m_ago)
|
||||
);
|
||||
assert!(TimeFilter::after(&ref_time, &format!("@{}", ref_timestamp))
|
||||
.unwrap()
|
||||
.applies_to(&t1s_later));
|
||||
}
|
||||
}
|
||||
|
@ -34,10 +34,10 @@ pub fn dirname(path: &Path) -> OsString {
|
||||
#[cfg(test)]
|
||||
mod path_tests {
|
||||
use super::*;
|
||||
use std::path::MAIN_SEPARATOR;
|
||||
use std::path::MAIN_SEPARATOR_STR;
|
||||
|
||||
fn correct(input: &str) -> String {
|
||||
input.replace('/', &MAIN_SEPARATOR.to_string())
|
||||
input.replace('/', MAIN_SEPARATOR_STR)
|
||||
}
|
||||
|
||||
macro_rules! func_tests {
|
281
src/fmt/mod.rs
Normal file
281
src/fmt/mod.rs
Normal file
@ -0,0 +1,281 @@
|
||||
mod input;
|
||||
|
||||
use std::borrow::Cow;
|
||||
use std::ffi::{OsStr, OsString};
|
||||
use std::fmt::{self, Display, Formatter};
|
||||
use std::path::{Component, Path, Prefix};
|
||||
use std::sync::OnceLock;
|
||||
|
||||
use aho_corasick::AhoCorasick;
|
||||
|
||||
use self::input::{basename, dirname, remove_extension};
|
||||
|
||||
/// Designates what should be written to a buffer
|
||||
///
|
||||
/// Each `Token` contains either text, or a placeholder variant, which will be used to generate
|
||||
/// commands after all tokens for a given command template have been collected.
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
pub enum Token {
|
||||
Placeholder,
|
||||
Basename,
|
||||
Parent,
|
||||
NoExt,
|
||||
BasenameNoExt,
|
||||
Text(String),
|
||||
}
|
||||
|
||||
impl Display for Token {
|
||||
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
|
||||
match *self {
|
||||
Token::Placeholder => f.write_str("{}")?,
|
||||
Token::Basename => f.write_str("{/}")?,
|
||||
Token::Parent => f.write_str("{//}")?,
|
||||
Token::NoExt => f.write_str("{.}")?,
|
||||
Token::BasenameNoExt => f.write_str("{/.}")?,
|
||||
Token::Text(ref string) => f.write_str(string)?,
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
/// A parsed format string
|
||||
///
|
||||
/// This is either a collection of `Token`s including at least one placeholder variant,
|
||||
/// or a fixed text.
|
||||
#[derive(Clone, Debug, PartialEq)]
|
||||
pub enum FormatTemplate {
|
||||
Tokens(Vec<Token>),
|
||||
Text(String),
|
||||
}
|
||||
|
||||
static PLACEHOLDERS: OnceLock<AhoCorasick> = OnceLock::new();
|
||||
|
||||
impl FormatTemplate {
|
||||
pub fn has_tokens(&self) -> bool {
|
||||
matches!(self, FormatTemplate::Tokens(_))
|
||||
}
|
||||
|
||||
pub fn parse(fmt: &str) -> Self {
|
||||
// NOTE: we assume that { and } have the same length
|
||||
const BRACE_LEN: usize = '{'.len_utf8();
|
||||
let mut tokens = Vec::new();
|
||||
let mut remaining = fmt;
|
||||
let mut buf = String::new();
|
||||
let placeholders = PLACEHOLDERS.get_or_init(|| {
|
||||
AhoCorasick::new(["{{", "}}", "{}", "{/}", "{//}", "{.}", "{/.}"]).unwrap()
|
||||
});
|
||||
while let Some(m) = placeholders.find(remaining) {
|
||||
match m.pattern().as_u32() {
|
||||
0 | 1 => {
|
||||
// we found an escaped {{ or }}, so add
|
||||
// everything up to the first char to the buffer
|
||||
// then skip the second one.
|
||||
buf += &remaining[..m.start() + BRACE_LEN];
|
||||
remaining = &remaining[m.end()..];
|
||||
}
|
||||
id if !remaining[m.end()..].starts_with('}') => {
|
||||
buf += &remaining[..m.start()];
|
||||
if !buf.is_empty() {
|
||||
tokens.push(Token::Text(std::mem::take(&mut buf)));
|
||||
}
|
||||
tokens.push(token_from_pattern_id(id));
|
||||
remaining = &remaining[m.end()..];
|
||||
}
|
||||
_ => {
|
||||
// We got a normal pattern, but the final "}"
|
||||
// is escaped, so add up to that to the buffer, then
|
||||
// skip the final }
|
||||
buf += &remaining[..m.end()];
|
||||
remaining = &remaining[m.end() + BRACE_LEN..];
|
||||
}
|
||||
}
|
||||
}
|
||||
// Add the rest of the string to the buffer, and add the final buffer to the tokens
|
||||
if !remaining.is_empty() {
|
||||
buf += remaining;
|
||||
}
|
||||
if tokens.is_empty() {
|
||||
// No placeholders were found, so just return the text
|
||||
return FormatTemplate::Text(buf);
|
||||
}
|
||||
// Add final text segment
|
||||
if !buf.is_empty() {
|
||||
tokens.push(Token::Text(buf));
|
||||
}
|
||||
debug_assert!(!tokens.is_empty());
|
||||
FormatTemplate::Tokens(tokens)
|
||||
}
|
||||
|
||||
/// Generate a result string from this template. If path_separator is Some, then it will replace
|
||||
/// the path separator in all placeholder tokens. Fixed text and tokens are not affected by
|
||||
/// path separator substitution.
|
||||
pub fn generate(&self, path: impl AsRef<Path>, path_separator: Option<&str>) -> OsString {
|
||||
use Token::*;
|
||||
let path = path.as_ref();
|
||||
|
||||
match *self {
|
||||
Self::Tokens(ref tokens) => {
|
||||
let mut s = OsString::new();
|
||||
for token in tokens {
|
||||
match token {
|
||||
Basename => s.push(Self::replace_separator(basename(path), path_separator)),
|
||||
BasenameNoExt => s.push(Self::replace_separator(
|
||||
&remove_extension(basename(path).as_ref()),
|
||||
path_separator,
|
||||
)),
|
||||
NoExt => s.push(Self::replace_separator(
|
||||
&remove_extension(path),
|
||||
path_separator,
|
||||
)),
|
||||
Parent => s.push(Self::replace_separator(&dirname(path), path_separator)),
|
||||
Placeholder => {
|
||||
s.push(Self::replace_separator(path.as_ref(), path_separator))
|
||||
}
|
||||
Text(ref string) => s.push(string),
|
||||
}
|
||||
}
|
||||
s
|
||||
}
|
||||
Self::Text(ref text) => OsString::from(text),
|
||||
}
|
||||
}
|
||||
|
||||
/// Replace the path separator in the input with the custom separator string. If path_separator
|
||||
/// is None, simply return a borrowed Cow<OsStr> of the input. Otherwise, the input is
|
||||
/// interpreted as a Path and its components are iterated through and re-joined into a new
|
||||
/// OsString.
|
||||
fn replace_separator<'a>(path: &'a OsStr, path_separator: Option<&str>) -> Cow<'a, OsStr> {
|
||||
// fast-path - no replacement necessary
|
||||
if path_separator.is_none() {
|
||||
return Cow::Borrowed(path);
|
||||
}
|
||||
|
||||
let path_separator = path_separator.unwrap();
|
||||
let mut out = OsString::with_capacity(path.len());
|
||||
let mut components = Path::new(path).components().peekable();
|
||||
|
||||
while let Some(comp) = components.next() {
|
||||
match comp {
|
||||
// Absolute paths on Windows are tricky. A Prefix component is usually a drive
|
||||
// letter or UNC path, and is usually followed by RootDir. There are also
|
||||
// "verbatim" prefixes beginning with "\\?\" that skip normalization. We choose to
|
||||
// ignore verbatim path prefixes here because they're very rare, might be
|
||||
// impossible to reach here, and there's no good way to deal with them. If users
|
||||
// are doing something advanced involving verbatim windows paths, they can do their
|
||||
// own output filtering with a tool like sed.
|
||||
Component::Prefix(prefix) => {
|
||||
if let Prefix::UNC(server, share) = prefix.kind() {
|
||||
// Prefix::UNC is a parsed version of '\\server\share'
|
||||
out.push(path_separator);
|
||||
out.push(path_separator);
|
||||
out.push(server);
|
||||
out.push(path_separator);
|
||||
out.push(share);
|
||||
} else {
|
||||
// All other Windows prefix types are rendered as-is. This results in e.g. "C:" for
|
||||
// drive letters. DeviceNS and Verbatim* prefixes won't have backslashes converted,
|
||||
// but they're not returned by directories fd can search anyway so we don't worry
|
||||
// about them.
|
||||
out.push(comp.as_os_str());
|
||||
}
|
||||
}
|
||||
|
||||
// Root directory is always replaced with the custom separator.
|
||||
Component::RootDir => out.push(path_separator),
|
||||
|
||||
// Everything else is joined normally, with a trailing separator if we're not last
|
||||
_ => {
|
||||
out.push(comp.as_os_str());
|
||||
if components.peek().is_some() {
|
||||
out.push(path_separator);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
Cow::Owned(out)
|
||||
}
|
||||
}
|
||||
|
||||
// Convert the id from an aho-corasick match to the
|
||||
// appropriate token
|
||||
fn token_from_pattern_id(id: u32) -> Token {
|
||||
use Token::*;
|
||||
match id {
|
||||
2 => Placeholder,
|
||||
3 => Basename,
|
||||
4 => Parent,
|
||||
5 => NoExt,
|
||||
6 => BasenameNoExt,
|
||||
_ => unreachable!(),
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod fmt_tests {
|
||||
use super::*;
|
||||
use std::path::PathBuf;
|
||||
|
||||
#[test]
|
||||
fn parse_no_placeholders() {
|
||||
let templ = FormatTemplate::parse("This string has no placeholders");
|
||||
assert_eq!(
|
||||
templ,
|
||||
FormatTemplate::Text("This string has no placeholders".into())
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_only_brace_escapes() {
|
||||
let templ = FormatTemplate::parse("This string only has escapes like {{ and }}");
|
||||
assert_eq!(
|
||||
templ,
|
||||
FormatTemplate::Text("This string only has escapes like { and }".into())
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn all_placeholders() {
|
||||
use Token::*;
|
||||
|
||||
let templ = FormatTemplate::parse(
|
||||
"{{path={} \
|
||||
basename={/} \
|
||||
parent={//} \
|
||||
noExt={.} \
|
||||
basenameNoExt={/.} \
|
||||
}}",
|
||||
);
|
||||
assert_eq!(
|
||||
templ,
|
||||
FormatTemplate::Tokens(vec![
|
||||
Text("{path=".into()),
|
||||
Placeholder,
|
||||
Text(" basename=".into()),
|
||||
Basename,
|
||||
Text(" parent=".into()),
|
||||
Parent,
|
||||
Text(" noExt=".into()),
|
||||
NoExt,
|
||||
Text(" basenameNoExt=".into()),
|
||||
BasenameNoExt,
|
||||
Text(" }".into()),
|
||||
])
|
||||
);
|
||||
|
||||
let mut path = PathBuf::new();
|
||||
path.push("a");
|
||||
path.push("folder");
|
||||
path.push("file.txt");
|
||||
|
||||
let expanded = templ.generate(&path, Some("/")).into_string().unwrap();
|
||||
|
||||
assert_eq!(
|
||||
expanded,
|
||||
"{path=a/folder/file.txt \
|
||||
basename=file.txt \
|
||||
parent=a/folder \
|
||||
noExt=a/folder/file \
|
||||
basenameNoExt=file }"
|
||||
);
|
||||
}
|
||||
}
|
87
src/hyperlink.rs
Normal file
87
src/hyperlink.rs
Normal file
@ -0,0 +1,87 @@
|
||||
use crate::filesystem::absolute_path;
|
||||
use std::fmt::{self, Formatter, Write};
|
||||
use std::path::{Path, PathBuf};
|
||||
|
||||
pub(crate) struct PathUrl(PathBuf);
|
||||
|
||||
impl PathUrl {
|
||||
pub(crate) fn new(path: &Path) -> Option<PathUrl> {
|
||||
Some(PathUrl(absolute_path(path).ok()?))
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for PathUrl {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
|
||||
write!(f, "file://{}", host())?;
|
||||
let bytes = self.0.as_os_str().as_encoded_bytes();
|
||||
for &byte in bytes.iter() {
|
||||
encode(f, byte)?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
fn encode(f: &mut Formatter, byte: u8) -> fmt::Result {
|
||||
// NOTE:
|
||||
// Most terminals can handle non-ascii unicode characters in a file url fine. But on some OSes (notably
|
||||
// windows), the encoded bytes of the path may not be valid UTF-8. Since we don't know if a
|
||||
// byte >= 128 is part of a valid UTF-8 encoding or not, we just percent encode any non-ascii
|
||||
// byte.
|
||||
// Percent encoding these bytes is probably safer anyway.
|
||||
match byte {
|
||||
b'0'..=b'9' | b'A'..=b'Z' | b'a'..=b'z' | b'/' | b':' | b'-' | b'.' | b'_' | b'~' => {
|
||||
f.write_char(byte.into())
|
||||
}
|
||||
#[cfg(windows)]
|
||||
b'\\' => f.write_char('/'),
|
||||
_ => {
|
||||
write!(f, "%{:02X}", byte)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(unix)]
|
||||
fn host() -> &'static str {
|
||||
use std::sync::OnceLock;
|
||||
|
||||
static HOSTNAME: OnceLock<String> = OnceLock::new();
|
||||
|
||||
HOSTNAME
|
||||
.get_or_init(|| {
|
||||
nix::unistd::gethostname()
|
||||
.ok()
|
||||
.and_then(|h| h.into_string().ok())
|
||||
.unwrap_or_default()
|
||||
})
|
||||
.as_ref()
|
||||
}
|
||||
|
||||
#[cfg(not(unix))]
|
||||
const fn host() -> &'static str {
|
||||
""
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use super::*;
|
||||
|
||||
// This allows us to test the encoding without having to worry about the host, or absolute path
|
||||
struct Encoded(&'static str);
|
||||
|
||||
impl fmt::Display for Encoded {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
|
||||
for byte in self.0.bytes() {
|
||||
encode(f, byte)?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_unicode_encoding() {
|
||||
assert_eq!(
|
||||
Encoded("$*\x1bßé/∫😃\x07").to_string(),
|
||||
"%24%2A%1B%C3%9F%C3%A9/%E2%88%AB%F0%9F%98%83%07",
|
||||
);
|
||||
}
|
||||
}
|
47
src/main.rs
47
src/main.rs
@ -7,23 +7,25 @@ mod exit_codes;
|
||||
mod filesystem;
|
||||
mod filetypes;
|
||||
mod filter;
|
||||
mod fmt;
|
||||
mod hyperlink;
|
||||
mod output;
|
||||
mod regex_helper;
|
||||
mod walk;
|
||||
|
||||
use std::env;
|
||||
use std::io::IsTerminal;
|
||||
use std::path::Path;
|
||||
use std::sync::Arc;
|
||||
use std::time;
|
||||
|
||||
use anyhow::{anyhow, bail, Context, Result};
|
||||
use atty::Stream;
|
||||
use clap::{CommandFactory, Parser};
|
||||
use globset::GlobBuilder;
|
||||
use lscolors::LsColors;
|
||||
use regex::bytes::{Regex, RegexBuilder, RegexSetBuilder};
|
||||
|
||||
use crate::cli::{ColorWhen, Opts};
|
||||
use crate::cli::{ColorWhen, HyperlinkWhen, Opts};
|
||||
use crate::config::Config;
|
||||
use crate::exec::CommandSet;
|
||||
use crate::exit_codes::ExitCode;
|
||||
@ -40,6 +42,7 @@ use crate::regex_helper::{pattern_has_uppercase_char, pattern_matches_strings_wi
|
||||
not(target_os = "android"),
|
||||
not(target_os = "macos"),
|
||||
not(target_os = "freebsd"),
|
||||
not(target_os = "openbsd"),
|
||||
not(all(target_env = "musl", target_pointer_width = "32")),
|
||||
not(target_arch = "riscv64"),
|
||||
feature = "use-jemalloc"
|
||||
@ -102,7 +105,7 @@ fn run() -> Result<ExitCode> {
|
||||
.map(|pat| build_regex(pat, &config))
|
||||
.collect::<Result<Vec<Regex>>>()?;
|
||||
|
||||
walk::scan(&search_paths, Arc::new(regexps), Arc::new(config))
|
||||
walk::scan(&search_paths, regexps, config)
|
||||
}
|
||||
|
||||
#[cfg(feature = "completions")]
|
||||
@ -216,12 +219,14 @@ fn construct_config(mut opts: Opts, pattern_regexps: &[String]) -> Result<Config
|
||||
#[cfg(not(windows))]
|
||||
let ansi_colors_support = true;
|
||||
|
||||
let interactive_terminal = atty::is(Stream::Stdout);
|
||||
let interactive_terminal = std::io::stdout().is_terminal();
|
||||
|
||||
let colored_output = match opts.color {
|
||||
ColorWhen::Always => true,
|
||||
ColorWhen::Never => false,
|
||||
ColorWhen::Auto => {
|
||||
ansi_colors_support && env::var_os("NO_COLOR").is_none() && interactive_terminal
|
||||
let no_color = env::var_os("NO_COLOR").is_some_and(|x| !x.is_empty());
|
||||
ansi_colors_support && !no_color && interactive_terminal
|
||||
}
|
||||
};
|
||||
|
||||
@ -230,6 +235,11 @@ fn construct_config(mut opts: Opts, pattern_regexps: &[String]) -> Result<Config
|
||||
} else {
|
||||
None
|
||||
};
|
||||
let hyperlink = match opts.hyperlink {
|
||||
HyperlinkWhen::Always => true,
|
||||
HyperlinkWhen::Never => false,
|
||||
HyperlinkWhen::Auto => colored_output,
|
||||
};
|
||||
let command = extract_command(&mut opts, colored_output)?;
|
||||
let has_command = command.is_some();
|
||||
|
||||
@ -239,8 +249,11 @@ fn construct_config(mut opts: Opts, pattern_regexps: &[String]) -> Result<Config
|
||||
ignore_hidden: !(opts.hidden || opts.rg_alias_ignore()),
|
||||
read_fdignore: !(opts.no_ignore || opts.rg_alias_ignore()),
|
||||
read_vcsignore: !(opts.no_ignore || opts.rg_alias_ignore() || opts.no_ignore_vcs),
|
||||
require_git_to_read_vcsignore: !opts.no_require_git,
|
||||
read_parent_ignore: !opts.no_ignore_parent,
|
||||
read_global_ignore: !opts.no_ignore || opts.rg_alias_ignore() || opts.no_global_ignore_file,
|
||||
read_global_ignore: !(opts.no_ignore
|
||||
|| opts.rg_alias_ignore()
|
||||
|| opts.no_global_ignore_file),
|
||||
follow_links: opts.follow,
|
||||
one_file_system: opts.one_file_system,
|
||||
null_separator: opts.null_separator,
|
||||
@ -248,9 +261,10 @@ fn construct_config(mut opts: Opts, pattern_regexps: &[String]) -> Result<Config
|
||||
max_depth: opts.max_depth(),
|
||||
min_depth: opts.min_depth(),
|
||||
prune: opts.prune,
|
||||
threads: opts.threads(),
|
||||
threads: opts.threads().get(),
|
||||
max_buffer_time: opts.max_buffer_time,
|
||||
ls_colors,
|
||||
hyperlink,
|
||||
interactive_terminal,
|
||||
file_types: opts.filetype.as_ref().map(|values| {
|
||||
use crate::cli::FileType::*;
|
||||
@ -265,6 +279,8 @@ fn construct_config(mut opts: Opts, pattern_regexps: &[String]) -> Result<Config
|
||||
file_types.files = true;
|
||||
}
|
||||
Empty => file_types.empty_only = true,
|
||||
BlockDevice => file_types.block_devices = true,
|
||||
CharDevice => file_types.char_devices = true,
|
||||
Socket => file_types.sockets = true,
|
||||
Pipe => file_types.pipes = true,
|
||||
}
|
||||
@ -291,6 +307,10 @@ fn construct_config(mut opts: Opts, pattern_regexps: &[String]) -> Result<Config
|
||||
.build()
|
||||
})
|
||||
.transpose()?,
|
||||
format: opts
|
||||
.format
|
||||
.as_deref()
|
||||
.map(crate::fmt::FormatTemplate::parse),
|
||||
command: command.map(Arc::new),
|
||||
batch_size: opts.batch_size,
|
||||
exclude_patterns: opts.exclude.iter().map(|p| String::from("!") + p).collect(),
|
||||
@ -303,8 +323,7 @@ fn construct_config(mut opts: Opts, pattern_regexps: &[String]) -> Result<Config
|
||||
path_separator,
|
||||
actual_path_separator,
|
||||
max_results: opts.max_results(),
|
||||
strip_cwd_prefix: (opts.no_search_paths()
|
||||
&& (opts.strip_cwd_prefix || !(opts.null_separator || has_command))),
|
||||
strip_cwd_prefix: opts.strip_cwd_prefix(|| !(opts.null_separator || has_command)),
|
||||
})
|
||||
}
|
||||
|
||||
@ -317,18 +336,22 @@ fn extract_command(opts: &mut Opts, colored_output: bool) -> Result<Option<Comma
|
||||
if !opts.list_details {
|
||||
return None;
|
||||
}
|
||||
let color_arg = format!("--color={}", opts.color.as_str());
|
||||
|
||||
let res = determine_ls_command(&color_arg, colored_output)
|
||||
let res = determine_ls_command(colored_output)
|
||||
.map(|cmd| CommandSet::new_batch([cmd]).unwrap());
|
||||
Some(res)
|
||||
})
|
||||
.transpose()
|
||||
}
|
||||
|
||||
fn determine_ls_command(color_arg: &str, colored_output: bool) -> Result<Vec<&str>> {
|
||||
fn determine_ls_command(colored_output: bool) -> Result<Vec<&'static str>> {
|
||||
#[allow(unused)]
|
||||
let gnu_ls = |command_name| {
|
||||
let color_arg = if colored_output {
|
||||
"--color=always"
|
||||
} else {
|
||||
"--color=never"
|
||||
};
|
||||
// Note: we use short options here (instead of --long-options) to support more
|
||||
// platforms (like BusyBox).
|
||||
vec![
|
||||
|
@ -5,29 +5,39 @@ use lscolors::{Indicator, LsColors, Style};
|
||||
|
||||
use crate::config::Config;
|
||||
use crate::dir_entry::DirEntry;
|
||||
use crate::error::print_error;
|
||||
use crate::exit_codes::ExitCode;
|
||||
use crate::fmt::FormatTemplate;
|
||||
use crate::hyperlink::PathUrl;
|
||||
|
||||
fn replace_path_separator(path: &str, new_path_separator: &str) -> String {
|
||||
path.replace(std::path::MAIN_SEPARATOR, new_path_separator)
|
||||
}
|
||||
|
||||
// TODO: this function is performance critical and can probably be optimized
|
||||
pub fn print_entry<W: Write>(stdout: &mut W, entry: &DirEntry, config: &Config) {
|
||||
let r = if let Some(ref ls_colors) = config.ls_colors {
|
||||
print_entry_colorized(stdout, entry, config, ls_colors)
|
||||
pub fn print_entry<W: Write>(stdout: &mut W, entry: &DirEntry, config: &Config) -> io::Result<()> {
|
||||
let mut has_hyperlink = false;
|
||||
if config.hyperlink {
|
||||
if let Some(url) = PathUrl::new(entry.path()) {
|
||||
write!(stdout, "\x1B]8;;{}\x1B\\", url)?;
|
||||
has_hyperlink = true;
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(ref format) = config.format {
|
||||
print_entry_format(stdout, entry, config, format)?;
|
||||
} else if let Some(ref ls_colors) = config.ls_colors {
|
||||
print_entry_colorized(stdout, entry, config, ls_colors)?;
|
||||
} else {
|
||||
print_entry_uncolorized(stdout, entry, config)
|
||||
print_entry_uncolorized(stdout, entry, config)?;
|
||||
};
|
||||
|
||||
if let Err(e) = r {
|
||||
if e.kind() == ::std::io::ErrorKind::BrokenPipe {
|
||||
// Exit gracefully in case of a broken pipe (e.g. 'fd ... | head -n 3').
|
||||
ExitCode::Success.exit();
|
||||
} else {
|
||||
print_error(format!("Could not write to output: {}", e));
|
||||
ExitCode::GeneralError.exit();
|
||||
}
|
||||
if has_hyperlink {
|
||||
write!(stdout, "\x1B]8;;\x1B\\")?;
|
||||
}
|
||||
|
||||
if config.null_separator {
|
||||
write!(stdout, "\0")
|
||||
} else {
|
||||
writeln!(stdout)
|
||||
}
|
||||
}
|
||||
|
||||
@ -54,6 +64,21 @@ fn print_trailing_slash<W: Write>(
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// TODO: this function is performance critical and can probably be optimized
|
||||
fn print_entry_format<W: Write>(
|
||||
stdout: &mut W,
|
||||
entry: &DirEntry,
|
||||
config: &Config,
|
||||
format: &FormatTemplate,
|
||||
) -> io::Result<()> {
|
||||
let output = format.generate(
|
||||
entry.stripped_path(config),
|
||||
config.path_separator.as_deref(),
|
||||
);
|
||||
// TODO: support writing raw bytes on unix?
|
||||
write!(stdout, "{}", output.to_string_lossy())
|
||||
}
|
||||
|
||||
// TODO: this function is performance critical and can probably be optimized
|
||||
fn print_entry_colorized<W: Write>(
|
||||
stdout: &mut W,
|
||||
@ -103,12 +128,6 @@ fn print_entry_colorized<W: Write>(
|
||||
ls_colors.style_for_indicator(Indicator::Directory),
|
||||
)?;
|
||||
|
||||
if config.null_separator {
|
||||
write!(stdout, "\0")?;
|
||||
} else {
|
||||
writeln!(stdout)?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@ -118,7 +137,6 @@ fn print_entry_uncolorized_base<W: Write>(
|
||||
entry: &DirEntry,
|
||||
config: &Config,
|
||||
) -> io::Result<()> {
|
||||
let separator = if config.null_separator { "\0" } else { "\n" };
|
||||
let path = entry.stripped_path(config);
|
||||
|
||||
let mut path_string = path.to_string_lossy();
|
||||
@ -126,8 +144,7 @@ fn print_entry_uncolorized_base<W: Write>(
|
||||
*path_string.to_mut() = replace_path_separator(&path_string, separator);
|
||||
}
|
||||
write!(stdout, "{}", path_string)?;
|
||||
print_trailing_slash(stdout, entry, config, None)?;
|
||||
write!(stdout, "{}", separator)
|
||||
print_trailing_slash(stdout, entry, config, None)
|
||||
}
|
||||
|
||||
#[cfg(not(unix))]
|
||||
@ -152,9 +169,7 @@ fn print_entry_uncolorized<W: Write>(
|
||||
print_entry_uncolorized_base(stdout, entry, config)
|
||||
} else {
|
||||
// Print path as raw bytes, allowing invalid UTF-8 filenames to be passed to other processes
|
||||
let separator = if config.null_separator { b"\0" } else { b"\n" };
|
||||
stdout.write_all(entry.stripped_path(config).as_os_str().as_bytes())?;
|
||||
print_trailing_slash(stdout, entry, config, None)?;
|
||||
stdout.write_all(separator)
|
||||
print_trailing_slash(stdout, entry, config, None)
|
||||
}
|
||||
}
|
||||
|
@ -3,7 +3,7 @@ use regex_syntax::ParserBuilder;
|
||||
|
||||
/// Determine if a regex pattern contains a literal uppercase character.
|
||||
pub fn pattern_has_uppercase_char(pattern: &str) -> bool {
|
||||
let mut parser = ParserBuilder::new().allow_invalid_utf8(true).build();
|
||||
let mut parser = ParserBuilder::new().utf8(false).build();
|
||||
|
||||
parser
|
||||
.parse(pattern)
|
||||
@ -16,16 +16,18 @@ fn hir_has_uppercase_char(hir: &Hir) -> bool {
|
||||
use regex_syntax::hir::*;
|
||||
|
||||
match hir.kind() {
|
||||
HirKind::Literal(Literal::Unicode(c)) => c.is_uppercase(),
|
||||
HirKind::Literal(Literal::Byte(b)) => char::from(*b).is_uppercase(),
|
||||
HirKind::Literal(Literal(bytes)) => match std::str::from_utf8(bytes) {
|
||||
Ok(s) => s.chars().any(|c| c.is_uppercase()),
|
||||
Err(_) => bytes.iter().any(|b| char::from(*b).is_uppercase()),
|
||||
},
|
||||
HirKind::Class(Class::Unicode(ranges)) => ranges
|
||||
.iter()
|
||||
.any(|r| r.start().is_uppercase() || r.end().is_uppercase()),
|
||||
HirKind::Class(Class::Bytes(ranges)) => ranges
|
||||
.iter()
|
||||
.any(|r| char::from(r.start()).is_uppercase() || char::from(r.end()).is_uppercase()),
|
||||
HirKind::Group(Group { hir, .. }) | HirKind::Repetition(Repetition { hir, .. }) => {
|
||||
hir_has_uppercase_char(hir)
|
||||
HirKind::Capture(Capture { sub, .. }) | HirKind::Repetition(Repetition { sub, .. }) => {
|
||||
hir_has_uppercase_char(sub)
|
||||
}
|
||||
HirKind::Concat(hirs) | HirKind::Alternation(hirs) => {
|
||||
hirs.iter().any(hir_has_uppercase_char)
|
||||
@ -36,7 +38,7 @@ fn hir_has_uppercase_char(hir: &Hir) -> bool {
|
||||
|
||||
/// Determine if a regex pattern only matches strings starting with a literal dot (hidden files)
|
||||
pub fn pattern_matches_strings_with_leading_dot(pattern: &str) -> bool {
|
||||
let mut parser = ParserBuilder::new().allow_invalid_utf8(true).build();
|
||||
let mut parser = ParserBuilder::new().utf8(false).build();
|
||||
|
||||
parser
|
||||
.parse(pattern)
|
||||
@ -56,7 +58,7 @@ fn hir_matches_strings_with_leading_dot(hir: &Hir) -> bool {
|
||||
HirKind::Concat(hirs) => {
|
||||
let mut hirs = hirs.iter();
|
||||
if let Some(hir) = hirs.next() {
|
||||
if hir.kind() != &HirKind::Anchor(Anchor::StartText) {
|
||||
if hir.kind() != &HirKind::Look(Look::Start) {
|
||||
return false;
|
||||
}
|
||||
} else {
|
||||
@ -64,7 +66,10 @@ fn hir_matches_strings_with_leading_dot(hir: &Hir) -> bool {
|
||||
}
|
||||
|
||||
if let Some(hir) = hirs.next() {
|
||||
hir.kind() == &HirKind::Literal(Literal::Unicode('.'))
|
||||
match hir.kind() {
|
||||
HirKind::Literal(Literal(bytes)) => bytes.starts_with(b"."),
|
||||
_ => false,
|
||||
}
|
||||
} else {
|
||||
false
|
||||
}
|
||||
|
830
src/walk.rs
830
src/walk.rs
@ -1,17 +1,18 @@
|
||||
use std::borrow::Cow;
|
||||
use std::ffi::OsStr;
|
||||
use std::io;
|
||||
use std::io::{self, Write};
|
||||
use std::mem;
|
||||
use std::path::PathBuf;
|
||||
use std::sync::atomic::{AtomicBool, Ordering};
|
||||
use std::sync::{Arc, Mutex};
|
||||
use std::sync::{Arc, Mutex, MutexGuard};
|
||||
use std::thread;
|
||||
use std::time::{Duration, Instant};
|
||||
use std::{borrow::Cow, io::Write};
|
||||
|
||||
use anyhow::{anyhow, Result};
|
||||
use crossbeam_channel::{bounded, Receiver, RecvTimeoutError, Sender};
|
||||
use ignore::overrides::OverrideBuilder;
|
||||
use ignore::{self, WalkBuilder};
|
||||
use crossbeam_channel::{bounded, Receiver, RecvTimeoutError, SendError, Sender};
|
||||
use etcetera::BaseStrategy;
|
||||
use ignore::overrides::{Override, OverrideBuilder};
|
||||
use ignore::{WalkBuilder, WalkParallel, WalkState};
|
||||
use regex::bytes::Regex;
|
||||
|
||||
use crate::config::Config;
|
||||
@ -35,6 +36,7 @@ enum ReceiverMode {
|
||||
|
||||
/// The Worker threads can result in a valid entry having PathBuf or an error.
|
||||
#[allow(clippy::large_enum_variant)]
|
||||
#[derive(Debug)]
|
||||
pub enum WorkerResult {
|
||||
// Errors should be rare, so it's probably better to allow large_enum_variant than
|
||||
// to box the Entry variant
|
||||
@ -42,139 +44,98 @@ pub enum WorkerResult {
|
||||
Error(ignore::Error),
|
||||
}
|
||||
|
||||
/// Maximum size of the output buffer before flushing results to the console
|
||||
pub const MAX_BUFFER_LENGTH: usize = 1000;
|
||||
/// Default duration until output buffering switches to streaming.
|
||||
pub const DEFAULT_MAX_BUFFER_TIME: Duration = Duration::from_millis(100);
|
||||
/// A batch of WorkerResults to send over a channel.
|
||||
#[derive(Clone)]
|
||||
struct Batch {
|
||||
items: Arc<Mutex<Option<Vec<WorkerResult>>>>,
|
||||
}
|
||||
|
||||
/// Recursively scan the given search path for files / pathnames matching the patterns.
|
||||
///
|
||||
/// If the `--exec` argument was supplied, this will create a thread pool for executing
|
||||
/// jobs in parallel from a given command line and the discovered paths. Otherwise, each
|
||||
/// path will simply be written to standard output.
|
||||
pub fn scan(paths: &[PathBuf], patterns: Arc<Vec<Regex>>, config: Arc<Config>) -> Result<ExitCode> {
|
||||
let first_path = &paths[0];
|
||||
|
||||
// Channel capacity was chosen empircally to perform similarly to an unbounded channel
|
||||
let (tx, rx) = bounded(0x4000 * config.threads);
|
||||
|
||||
let mut override_builder = OverrideBuilder::new(first_path);
|
||||
|
||||
for pattern in &config.exclude_patterns {
|
||||
override_builder
|
||||
.add(pattern)
|
||||
.map_err(|e| anyhow!("Malformed exclude pattern: {}", e))?;
|
||||
}
|
||||
let overrides = override_builder
|
||||
.build()
|
||||
.map_err(|_| anyhow!("Mismatch in exclude patterns"))?;
|
||||
|
||||
let mut walker = WalkBuilder::new(first_path);
|
||||
walker
|
||||
.hidden(config.ignore_hidden)
|
||||
.ignore(config.read_fdignore)
|
||||
.parents(config.read_parent_ignore && (config.read_fdignore || config.read_vcsignore))
|
||||
.git_ignore(config.read_vcsignore)
|
||||
.git_global(config.read_vcsignore)
|
||||
.git_exclude(config.read_vcsignore)
|
||||
.overrides(overrides)
|
||||
.follow_links(config.follow_links)
|
||||
// No need to check for supported platforms, option is unavailable on unsupported ones
|
||||
.same_file_system(config.one_file_system)
|
||||
.max_depth(config.max_depth);
|
||||
|
||||
if config.read_fdignore {
|
||||
walker.add_custom_ignore_filename(".fdignore");
|
||||
}
|
||||
|
||||
if config.read_global_ignore {
|
||||
#[cfg(target_os = "macos")]
|
||||
let config_dir_op = std::env::var_os("XDG_CONFIG_HOME")
|
||||
.map(PathBuf::from)
|
||||
.filter(|p| p.is_absolute())
|
||||
.or_else(|| dirs_next::home_dir().map(|d| d.join(".config")));
|
||||
|
||||
#[cfg(not(target_os = "macos"))]
|
||||
let config_dir_op = dirs_next::config_dir();
|
||||
|
||||
if let Some(global_ignore_file) = config_dir_op
|
||||
.map(|p| p.join("fd").join("ignore"))
|
||||
.filter(|p| p.is_file())
|
||||
{
|
||||
let result = walker.add_ignore(global_ignore_file);
|
||||
match result {
|
||||
Some(ignore::Error::Partial(_)) => (),
|
||||
Some(err) => {
|
||||
print_error(format!("Malformed pattern in global ignore file. {}.", err));
|
||||
}
|
||||
None => (),
|
||||
}
|
||||
impl Batch {
|
||||
fn new() -> Self {
|
||||
Self {
|
||||
items: Arc::new(Mutex::new(Some(vec![]))),
|
||||
}
|
||||
}
|
||||
|
||||
for ignore_file in &config.ignore_files {
|
||||
let result = walker.add_ignore(ignore_file);
|
||||
match result {
|
||||
Some(ignore::Error::Partial(_)) => (),
|
||||
Some(err) => {
|
||||
print_error(format!("Malformed pattern in custom ignore file. {}.", err));
|
||||
}
|
||||
None => (),
|
||||
}
|
||||
}
|
||||
|
||||
for path in &paths[1..] {
|
||||
walker.add(path);
|
||||
}
|
||||
|
||||
let parallel_walker = walker.threads(config.threads).build_parallel();
|
||||
|
||||
// Flag for cleanly shutting down the parallel walk
|
||||
let quit_flag = Arc::new(AtomicBool::new(false));
|
||||
// Flag specifically for quitting due to ^C
|
||||
let interrupt_flag = Arc::new(AtomicBool::new(false));
|
||||
|
||||
if config.ls_colors.is_some() && config.is_printing() {
|
||||
let quit_flag = Arc::clone(&quit_flag);
|
||||
let interrupt_flag = Arc::clone(&interrupt_flag);
|
||||
|
||||
ctrlc::set_handler(move || {
|
||||
quit_flag.store(true, Ordering::Relaxed);
|
||||
|
||||
if interrupt_flag.fetch_or(true, Ordering::Relaxed) {
|
||||
// Ctrl-C has been pressed twice, exit NOW
|
||||
ExitCode::KilledBySigint.exit();
|
||||
}
|
||||
})
|
||||
.unwrap();
|
||||
}
|
||||
|
||||
// Spawn the thread that receives all results through the channel.
|
||||
let receiver_thread = spawn_receiver(&config, &quit_flag, &interrupt_flag, rx);
|
||||
|
||||
// Spawn the sender threads.
|
||||
spawn_senders(&config, &quit_flag, patterns, parallel_walker, tx);
|
||||
|
||||
// Wait for the receiver thread to print out all results.
|
||||
let exit_code = receiver_thread.join().unwrap();
|
||||
|
||||
if interrupt_flag.load(Ordering::Relaxed) {
|
||||
Ok(ExitCode::KilledBySigint)
|
||||
} else {
|
||||
Ok(exit_code)
|
||||
fn lock(&self) -> MutexGuard<'_, Option<Vec<WorkerResult>>> {
|
||||
self.items.lock().unwrap()
|
||||
}
|
||||
}
|
||||
|
||||
impl IntoIterator for Batch {
|
||||
type Item = WorkerResult;
|
||||
type IntoIter = std::vec::IntoIter<WorkerResult>;
|
||||
|
||||
fn into_iter(self) -> Self::IntoIter {
|
||||
self.lock().take().unwrap().into_iter()
|
||||
}
|
||||
}
|
||||
|
||||
/// Wrapper that sends batches of items at once over a channel.
|
||||
struct BatchSender {
|
||||
batch: Batch,
|
||||
tx: Sender<Batch>,
|
||||
limit: usize,
|
||||
}
|
||||
|
||||
impl BatchSender {
|
||||
fn new(tx: Sender<Batch>, limit: usize) -> Self {
|
||||
Self {
|
||||
batch: Batch::new(),
|
||||
tx,
|
||||
limit,
|
||||
}
|
||||
}
|
||||
|
||||
/// Check if we need to flush a batch.
|
||||
fn needs_flush(&self, batch: Option<&Vec<WorkerResult>>) -> bool {
|
||||
match batch {
|
||||
// Limit the batch size to provide some backpressure
|
||||
Some(vec) => vec.len() >= self.limit,
|
||||
// Batch was already taken by the receiver, so make a new one
|
||||
None => true,
|
||||
}
|
||||
}
|
||||
|
||||
/// Add an item to a batch.
|
||||
fn send(&mut self, item: WorkerResult) -> Result<(), SendError<()>> {
|
||||
let mut batch = self.batch.lock();
|
||||
|
||||
if self.needs_flush(batch.as_ref()) {
|
||||
drop(batch);
|
||||
self.batch = Batch::new();
|
||||
batch = self.batch.lock();
|
||||
}
|
||||
|
||||
let items = batch.as_mut().unwrap();
|
||||
items.push(item);
|
||||
|
||||
if items.len() == 1 {
|
||||
// New batch, send it over the channel
|
||||
self.tx
|
||||
.send(self.batch.clone())
|
||||
.map_err(|_| SendError(()))?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
/// Maximum size of the output buffer before flushing results to the console
|
||||
const MAX_BUFFER_LENGTH: usize = 1000;
|
||||
/// Default duration until output buffering switches to streaming.
|
||||
const DEFAULT_MAX_BUFFER_TIME: Duration = Duration::from_millis(100);
|
||||
|
||||
/// Wrapper for the receiver thread's buffering behavior.
|
||||
struct ReceiverBuffer<W> {
|
||||
struct ReceiverBuffer<'a, W> {
|
||||
/// The configuration.
|
||||
config: Arc<Config>,
|
||||
config: &'a Config,
|
||||
/// For shutting down the senders.
|
||||
quit_flag: Arc<AtomicBool>,
|
||||
quit_flag: &'a AtomicBool,
|
||||
/// The ^C notifier.
|
||||
interrupt_flag: Arc<AtomicBool>,
|
||||
interrupt_flag: &'a AtomicBool,
|
||||
/// Receiver for worker results.
|
||||
rx: Receiver<WorkerResult>,
|
||||
rx: Receiver<Batch>,
|
||||
/// Standard output.
|
||||
stdout: W,
|
||||
/// The current buffer mode.
|
||||
@ -187,15 +148,12 @@ struct ReceiverBuffer<W> {
|
||||
num_results: usize,
|
||||
}
|
||||
|
||||
impl<W: Write> ReceiverBuffer<W> {
|
||||
impl<'a, W: Write> ReceiverBuffer<'a, W> {
|
||||
/// Create a new receiver buffer.
|
||||
fn new(
|
||||
config: Arc<Config>,
|
||||
quit_flag: Arc<AtomicBool>,
|
||||
interrupt_flag: Arc<AtomicBool>,
|
||||
rx: Receiver<WorkerResult>,
|
||||
stdout: W,
|
||||
) -> Self {
|
||||
fn new(state: &'a WorkerState, rx: Receiver<Batch>, stdout: W) -> Self {
|
||||
let config = &state.config;
|
||||
let quit_flag = state.quit_flag.as_ref();
|
||||
let interrupt_flag = state.interrupt_flag.as_ref();
|
||||
let max_buffer_time = config.max_buffer_time.unwrap_or(DEFAULT_MAX_BUFFER_TIME);
|
||||
let deadline = Instant::now() + max_buffer_time;
|
||||
|
||||
@ -223,7 +181,7 @@ impl<W: Write> ReceiverBuffer<W> {
|
||||
}
|
||||
|
||||
/// Receive the next worker result.
|
||||
fn recv(&self) -> Result<WorkerResult, RecvTimeoutError> {
|
||||
fn recv(&self) -> Result<Batch, RecvTimeoutError> {
|
||||
match self.mode {
|
||||
ReceiverMode::Buffering => {
|
||||
// Wait at most until we should switch to streaming
|
||||
@ -239,34 +197,44 @@ impl<W: Write> ReceiverBuffer<W> {
|
||||
/// Wait for a result or state change.
|
||||
fn poll(&mut self) -> Result<(), ExitCode> {
|
||||
match self.recv() {
|
||||
Ok(WorkerResult::Entry(dir_entry)) => {
|
||||
if self.config.quiet {
|
||||
return Err(ExitCode::HasResults(true));
|
||||
}
|
||||
Ok(batch) => {
|
||||
for result in batch {
|
||||
match result {
|
||||
WorkerResult::Entry(dir_entry) => {
|
||||
if self.config.quiet {
|
||||
return Err(ExitCode::HasResults(true));
|
||||
}
|
||||
|
||||
match self.mode {
|
||||
ReceiverMode::Buffering => {
|
||||
self.buffer.push(dir_entry);
|
||||
if self.buffer.len() > MAX_BUFFER_LENGTH {
|
||||
self.stream()?;
|
||||
match self.mode {
|
||||
ReceiverMode::Buffering => {
|
||||
self.buffer.push(dir_entry);
|
||||
if self.buffer.len() > MAX_BUFFER_LENGTH {
|
||||
self.stream()?;
|
||||
}
|
||||
}
|
||||
ReceiverMode::Streaming => {
|
||||
self.print(&dir_entry)?;
|
||||
}
|
||||
}
|
||||
|
||||
self.num_results += 1;
|
||||
if let Some(max_results) = self.config.max_results {
|
||||
if self.num_results >= max_results {
|
||||
return self.stop();
|
||||
}
|
||||
}
|
||||
}
|
||||
WorkerResult::Error(err) => {
|
||||
if self.config.show_filesystem_errors {
|
||||
print_error(err.to_string());
|
||||
}
|
||||
}
|
||||
}
|
||||
ReceiverMode::Streaming => {
|
||||
self.print(&dir_entry)?;
|
||||
self.flush()?;
|
||||
}
|
||||
}
|
||||
|
||||
self.num_results += 1;
|
||||
if let Some(max_results) = self.config.max_results {
|
||||
if self.num_results >= max_results {
|
||||
return self.stop();
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(WorkerResult::Error(err)) => {
|
||||
if self.config.show_filesystem_errors {
|
||||
print_error(err.to_string());
|
||||
// If we don't have another batch ready, flush before waiting
|
||||
if self.mode == ReceiverMode::Streaming && self.rx.is_empty() {
|
||||
self.flush()?;
|
||||
}
|
||||
}
|
||||
Err(RecvTimeoutError::Timeout) => {
|
||||
@ -282,7 +250,12 @@ impl<W: Write> ReceiverBuffer<W> {
|
||||
|
||||
/// Output a path.
|
||||
fn print(&mut self, entry: &DirEntry) -> Result<(), ExitCode> {
|
||||
output::print_entry(&mut self.stdout, entry, &self.config);
|
||||
if let Err(e) = output::print_entry(&mut self.stdout, entry, self.config) {
|
||||
if e.kind() != ::std::io::ErrorKind::BrokenPipe {
|
||||
print_error(format!("Could not write to output: {}", e));
|
||||
return Err(ExitCode::GeneralError);
|
||||
}
|
||||
}
|
||||
|
||||
if self.interrupt_flag.load(Ordering::Relaxed) {
|
||||
// Ignore any errors on flush, because we're about to exit anyway
|
||||
@ -321,7 +294,7 @@ impl<W: Write> ReceiverBuffer<W> {
|
||||
|
||||
/// Flush stdout if necessary.
|
||||
fn flush(&mut self) -> Result<(), ExitCode> {
|
||||
if self.config.interactive_terminal && self.stdout.flush().is_err() {
|
||||
if self.stdout.flush().is_err() {
|
||||
// Probably a broken pipe. Exit gracefully.
|
||||
return Err(ExitCode::GeneralError);
|
||||
}
|
||||
@ -329,229 +302,372 @@ impl<W: Write> ReceiverBuffer<W> {
|
||||
}
|
||||
}
|
||||
|
||||
fn spawn_receiver(
|
||||
config: &Arc<Config>,
|
||||
quit_flag: &Arc<AtomicBool>,
|
||||
interrupt_flag: &Arc<AtomicBool>,
|
||||
rx: Receiver<WorkerResult>,
|
||||
) -> thread::JoinHandle<ExitCode> {
|
||||
let config = Arc::clone(config);
|
||||
let quit_flag = Arc::clone(quit_flag);
|
||||
let interrupt_flag = Arc::clone(interrupt_flag);
|
||||
/// State shared by the sender and receiver threads.
|
||||
struct WorkerState {
|
||||
/// The search patterns.
|
||||
patterns: Vec<Regex>,
|
||||
/// The command line configuration.
|
||||
config: Config,
|
||||
/// Flag for cleanly shutting down the parallel walk
|
||||
quit_flag: Arc<AtomicBool>,
|
||||
/// Flag specifically for quitting due to ^C
|
||||
interrupt_flag: Arc<AtomicBool>,
|
||||
}
|
||||
|
||||
impl WorkerState {
|
||||
fn new(patterns: Vec<Regex>, config: Config) -> Self {
|
||||
let quit_flag = Arc::new(AtomicBool::new(false));
|
||||
let interrupt_flag = Arc::new(AtomicBool::new(false));
|
||||
|
||||
Self {
|
||||
patterns,
|
||||
config,
|
||||
quit_flag,
|
||||
interrupt_flag,
|
||||
}
|
||||
}
|
||||
|
||||
fn build_overrides(&self, paths: &[PathBuf]) -> Result<Override> {
|
||||
let first_path = &paths[0];
|
||||
let config = &self.config;
|
||||
|
||||
let mut builder = OverrideBuilder::new(first_path);
|
||||
|
||||
for pattern in &config.exclude_patterns {
|
||||
builder
|
||||
.add(pattern)
|
||||
.map_err(|e| anyhow!("Malformed exclude pattern: {}", e))?;
|
||||
}
|
||||
|
||||
builder
|
||||
.build()
|
||||
.map_err(|_| anyhow!("Mismatch in exclude patterns"))
|
||||
}
|
||||
|
||||
fn build_walker(&self, paths: &[PathBuf]) -> Result<WalkParallel> {
|
||||
let first_path = &paths[0];
|
||||
let config = &self.config;
|
||||
let overrides = self.build_overrides(paths)?;
|
||||
|
||||
let mut builder = WalkBuilder::new(first_path);
|
||||
builder
|
||||
.hidden(config.ignore_hidden)
|
||||
.ignore(config.read_fdignore)
|
||||
.parents(config.read_parent_ignore && (config.read_fdignore || config.read_vcsignore))
|
||||
.git_ignore(config.read_vcsignore)
|
||||
.git_global(config.read_vcsignore)
|
||||
.git_exclude(config.read_vcsignore)
|
||||
.require_git(config.require_git_to_read_vcsignore)
|
||||
.overrides(overrides)
|
||||
.follow_links(config.follow_links)
|
||||
// No need to check for supported platforms, option is unavailable on unsupported ones
|
||||
.same_file_system(config.one_file_system)
|
||||
.max_depth(config.max_depth);
|
||||
|
||||
if config.read_fdignore {
|
||||
builder.add_custom_ignore_filename(".fdignore");
|
||||
}
|
||||
|
||||
if config.read_global_ignore {
|
||||
if let Ok(basedirs) = etcetera::choose_base_strategy() {
|
||||
let global_ignore_file = basedirs.config_dir().join("fd").join("ignore");
|
||||
if global_ignore_file.is_file() {
|
||||
let result = builder.add_ignore(global_ignore_file);
|
||||
match result {
|
||||
Some(ignore::Error::Partial(_)) => (),
|
||||
Some(err) => {
|
||||
print_error(format!(
|
||||
"Malformed pattern in global ignore file. {}.",
|
||||
err
|
||||
));
|
||||
}
|
||||
None => (),
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for ignore_file in &config.ignore_files {
|
||||
let result = builder.add_ignore(ignore_file);
|
||||
match result {
|
||||
Some(ignore::Error::Partial(_)) => (),
|
||||
Some(err) => {
|
||||
print_error(format!("Malformed pattern in custom ignore file. {}.", err));
|
||||
}
|
||||
None => (),
|
||||
}
|
||||
}
|
||||
|
||||
for path in &paths[1..] {
|
||||
builder.add(path);
|
||||
}
|
||||
|
||||
let walker = builder.threads(config.threads).build_parallel();
|
||||
Ok(walker)
|
||||
}
|
||||
|
||||
/// Run the receiver work, either on this thread or a pool of background
|
||||
/// threads (for --exec).
|
||||
fn receive(&self, rx: Receiver<Batch>) -> ExitCode {
|
||||
let config = &self.config;
|
||||
|
||||
let threads = config.threads;
|
||||
thread::spawn(move || {
|
||||
// This will be set to `Some` if the `--exec` argument was supplied.
|
||||
if let Some(ref cmd) = config.command {
|
||||
if cmd.in_batch_mode() {
|
||||
exec::batch(rx, cmd, &config)
|
||||
exec::batch(rx.into_iter().flatten(), cmd, config)
|
||||
} else {
|
||||
let out_perm = Arc::new(Mutex::new(()));
|
||||
let out_perm = Mutex::new(());
|
||||
|
||||
// Each spawned job will store it's thread handle in here.
|
||||
let mut handles = Vec::with_capacity(threads);
|
||||
for _ in 0..threads {
|
||||
let config = Arc::clone(&config);
|
||||
let rx = rx.clone();
|
||||
let cmd = Arc::clone(cmd);
|
||||
let out_perm = Arc::clone(&out_perm);
|
||||
thread::scope(|scope| {
|
||||
// Each spawned job will store its thread handle in here.
|
||||
let threads = config.threads;
|
||||
let mut handles = Vec::with_capacity(threads);
|
||||
for _ in 0..threads {
|
||||
let rx = rx.clone();
|
||||
|
||||
// Spawn a job thread that will listen for and execute inputs.
|
||||
let handle = thread::spawn(move || exec::job(rx, cmd, out_perm, &config));
|
||||
// Spawn a job thread that will listen for and execute inputs.
|
||||
let handle = scope
|
||||
.spawn(|| exec::job(rx.into_iter().flatten(), cmd, &out_perm, config));
|
||||
|
||||
// Push the handle of the spawned thread into the vector for later joining.
|
||||
handles.push(handle);
|
||||
}
|
||||
|
||||
let exit_codes = handles
|
||||
.into_iter()
|
||||
.map(|handle| handle.join().unwrap())
|
||||
.collect::<Vec<_>>();
|
||||
merge_exitcodes(exit_codes)
|
||||
// Push the handle of the spawned thread into the vector for later joining.
|
||||
handles.push(handle);
|
||||
}
|
||||
let exit_codes = handles.into_iter().map(|handle| handle.join().unwrap());
|
||||
merge_exitcodes(exit_codes)
|
||||
})
|
||||
}
|
||||
} else {
|
||||
let stdout = io::stdout();
|
||||
let stdout = stdout.lock();
|
||||
let stdout = io::stdout().lock();
|
||||
let stdout = io::BufWriter::new(stdout);
|
||||
|
||||
let mut rxbuffer = ReceiverBuffer::new(config, quit_flag, interrupt_flag, rx, stdout);
|
||||
rxbuffer.process()
|
||||
ReceiverBuffer::new(self, rx, stdout).process()
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
fn spawn_senders(
|
||||
config: &Arc<Config>,
|
||||
quit_flag: &Arc<AtomicBool>,
|
||||
patterns: Arc<Vec<Regex>>,
|
||||
parallel_walker: ignore::WalkParallel,
|
||||
tx: Sender<WorkerResult>,
|
||||
) {
|
||||
parallel_walker.run(|| {
|
||||
let config = Arc::clone(config);
|
||||
let patterns = Arc::clone(&patterns);
|
||||
let tx_thread = tx.clone();
|
||||
let quit_flag = Arc::clone(quit_flag);
|
||||
/// Spawn the sender threads.
|
||||
fn spawn_senders(&self, walker: WalkParallel, tx: Sender<Batch>) {
|
||||
walker.run(|| {
|
||||
let patterns = &self.patterns;
|
||||
let config = &self.config;
|
||||
let quit_flag = self.quit_flag.as_ref();
|
||||
|
||||
Box::new(move |entry_o| {
|
||||
if quit_flag.load(Ordering::Relaxed) {
|
||||
return ignore::WalkState::Quit;
|
||||
}
|
||||
|
||||
let entry = match entry_o {
|
||||
Ok(ref e) if e.depth() == 0 => {
|
||||
// Skip the root directory entry.
|
||||
return ignore::WalkState::Continue;
|
||||
let mut limit = 0x100;
|
||||
if let Some(cmd) = &config.command {
|
||||
if !cmd.in_batch_mode() && config.threads > 1 {
|
||||
// Evenly distribute work between multiple receivers
|
||||
limit = 1;
|
||||
}
|
||||
Ok(e) => DirEntry::normal(e),
|
||||
Err(ignore::Error::WithPath {
|
||||
path,
|
||||
err: inner_err,
|
||||
}) => match inner_err.as_ref() {
|
||||
ignore::Error::Io(io_error)
|
||||
if io_error.kind() == io::ErrorKind::NotFound
|
||||
&& path
|
||||
.symlink_metadata()
|
||||
.ok()
|
||||
.map_or(false, |m| m.file_type().is_symlink()) =>
|
||||
{
|
||||
DirEntry::broken_symlink(path)
|
||||
}
|
||||
let mut tx = BatchSender::new(tx.clone(), limit);
|
||||
|
||||
Box::new(move |entry| {
|
||||
if quit_flag.load(Ordering::Relaxed) {
|
||||
return WalkState::Quit;
|
||||
}
|
||||
|
||||
let entry = match entry {
|
||||
Ok(ref e) if e.depth() == 0 => {
|
||||
// Skip the root directory entry.
|
||||
return WalkState::Continue;
|
||||
}
|
||||
_ => {
|
||||
return match tx_thread.send(WorkerResult::Error(ignore::Error::WithPath {
|
||||
path,
|
||||
err: inner_err,
|
||||
})) {
|
||||
Ok(_) => ignore::WalkState::Continue,
|
||||
Err(_) => ignore::WalkState::Quit,
|
||||
}
|
||||
}
|
||||
},
|
||||
Err(err) => {
|
||||
return match tx_thread.send(WorkerResult::Error(err)) {
|
||||
Ok(_) => ignore::WalkState::Continue,
|
||||
Err(_) => ignore::WalkState::Quit,
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
if let Some(min_depth) = config.min_depth {
|
||||
if entry.depth().map_or(true, |d| d < min_depth) {
|
||||
return ignore::WalkState::Continue;
|
||||
}
|
||||
}
|
||||
|
||||
// Check the name first, since it doesn't require metadata
|
||||
let entry_path = entry.path();
|
||||
|
||||
let search_str: Cow<OsStr> = if config.search_full_path {
|
||||
let path_abs_buf = filesystem::path_absolute_form(entry_path)
|
||||
.expect("Retrieving absolute path succeeds");
|
||||
Cow::Owned(path_abs_buf.as_os_str().to_os_string())
|
||||
} else {
|
||||
match entry_path.file_name() {
|
||||
Some(filename) => Cow::Borrowed(filename),
|
||||
None => unreachable!(
|
||||
"Encountered file system entry without a file name. This should only \
|
||||
happen for paths like 'foo/bar/..' or '/' which are not supposed to \
|
||||
appear in a file system traversal."
|
||||
),
|
||||
}
|
||||
};
|
||||
|
||||
if !patterns
|
||||
.iter()
|
||||
.all(|pat| pat.is_match(&filesystem::osstr_to_bytes(search_str.as_ref())))
|
||||
{
|
||||
return ignore::WalkState::Continue;
|
||||
}
|
||||
|
||||
// Filter out unwanted extensions.
|
||||
if let Some(ref exts_regex) = config.extensions {
|
||||
if let Some(path_str) = entry_path.file_name() {
|
||||
if !exts_regex.is_match(&filesystem::osstr_to_bytes(path_str)) {
|
||||
return ignore::WalkState::Continue;
|
||||
}
|
||||
} else {
|
||||
return ignore::WalkState::Continue;
|
||||
}
|
||||
}
|
||||
|
||||
// Filter out unwanted file types.
|
||||
if let Some(ref file_types) = config.file_types {
|
||||
if file_types.should_ignore(&entry) {
|
||||
return ignore::WalkState::Continue;
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(unix)]
|
||||
{
|
||||
if let Some(ref owner_constraint) = config.owner_constraint {
|
||||
if let Some(metadata) = entry.metadata() {
|
||||
if !owner_constraint.matches(metadata) {
|
||||
return ignore::WalkState::Continue;
|
||||
}
|
||||
} else {
|
||||
return ignore::WalkState::Continue;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Filter out unwanted sizes if it is a file and we have been given size constraints.
|
||||
if !config.size_constraints.is_empty() {
|
||||
if entry_path.is_file() {
|
||||
if let Some(metadata) = entry.metadata() {
|
||||
let file_size = metadata.len();
|
||||
if config
|
||||
.size_constraints
|
||||
.iter()
|
||||
.any(|sc| !sc.is_within(file_size))
|
||||
Ok(e) => DirEntry::normal(e),
|
||||
Err(ignore::Error::WithPath {
|
||||
path,
|
||||
err: inner_err,
|
||||
}) => match inner_err.as_ref() {
|
||||
ignore::Error::Io(io_error)
|
||||
if io_error.kind() == io::ErrorKind::NotFound
|
||||
&& path
|
||||
.symlink_metadata()
|
||||
.ok()
|
||||
.map_or(false, |m| m.file_type().is_symlink()) =>
|
||||
{
|
||||
return ignore::WalkState::Continue;
|
||||
DirEntry::broken_symlink(path)
|
||||
}
|
||||
_ => {
|
||||
return match tx.send(WorkerResult::Error(ignore::Error::WithPath {
|
||||
path,
|
||||
err: inner_err,
|
||||
})) {
|
||||
Ok(_) => WalkState::Continue,
|
||||
Err(_) => WalkState::Quit,
|
||||
}
|
||||
}
|
||||
},
|
||||
Err(err) => {
|
||||
return match tx.send(WorkerResult::Error(err)) {
|
||||
Ok(_) => WalkState::Continue,
|
||||
Err(_) => WalkState::Quit,
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
if let Some(min_depth) = config.min_depth {
|
||||
if entry.depth().map_or(true, |d| d < min_depth) {
|
||||
return WalkState::Continue;
|
||||
}
|
||||
}
|
||||
|
||||
// Check the name first, since it doesn't require metadata
|
||||
let entry_path = entry.path();
|
||||
|
||||
let search_str: Cow<OsStr> = if config.search_full_path {
|
||||
let path_abs_buf = filesystem::path_absolute_form(entry_path)
|
||||
.expect("Retrieving absolute path succeeds");
|
||||
Cow::Owned(path_abs_buf.as_os_str().to_os_string())
|
||||
} else {
|
||||
match entry_path.file_name() {
|
||||
Some(filename) => Cow::Borrowed(filename),
|
||||
None => unreachable!(
|
||||
"Encountered file system entry without a file name. This should only \
|
||||
happen for paths like 'foo/bar/..' or '/' which are not supposed to \
|
||||
appear in a file system traversal."
|
||||
),
|
||||
}
|
||||
};
|
||||
|
||||
if !patterns
|
||||
.iter()
|
||||
.all(|pat| pat.is_match(&filesystem::osstr_to_bytes(search_str.as_ref())))
|
||||
{
|
||||
return WalkState::Continue;
|
||||
}
|
||||
|
||||
// Filter out unwanted extensions.
|
||||
if let Some(ref exts_regex) = config.extensions {
|
||||
if let Some(path_str) = entry_path.file_name() {
|
||||
if !exts_regex.is_match(&filesystem::osstr_to_bytes(path_str)) {
|
||||
return WalkState::Continue;
|
||||
}
|
||||
} else {
|
||||
return ignore::WalkState::Continue;
|
||||
}
|
||||
} else {
|
||||
return ignore::WalkState::Continue;
|
||||
}
|
||||
}
|
||||
|
||||
// Filter out unwanted modification times
|
||||
if !config.time_constraints.is_empty() {
|
||||
let mut matched = false;
|
||||
if let Some(metadata) = entry.metadata() {
|
||||
if let Ok(modified) = metadata.modified() {
|
||||
matched = config
|
||||
.time_constraints
|
||||
.iter()
|
||||
.all(|tf| tf.applies_to(&modified));
|
||||
return WalkState::Continue;
|
||||
}
|
||||
}
|
||||
if !matched {
|
||||
return ignore::WalkState::Continue;
|
||||
|
||||
// Filter out unwanted file types.
|
||||
if let Some(ref file_types) = config.file_types {
|
||||
if file_types.should_ignore(&entry) {
|
||||
return WalkState::Continue;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if config.is_printing() {
|
||||
if let Some(ls_colors) = &config.ls_colors {
|
||||
// Compute colors in parallel
|
||||
entry.style(ls_colors);
|
||||
#[cfg(unix)]
|
||||
{
|
||||
if let Some(ref owner_constraint) = config.owner_constraint {
|
||||
if let Some(metadata) = entry.metadata() {
|
||||
if !owner_constraint.matches(metadata) {
|
||||
return WalkState::Continue;
|
||||
}
|
||||
} else {
|
||||
return WalkState::Continue;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let send_result = tx_thread.send(WorkerResult::Entry(entry));
|
||||
// Filter out unwanted sizes if it is a file and we have been given size constraints.
|
||||
if !config.size_constraints.is_empty() {
|
||||
if entry_path.is_file() {
|
||||
if let Some(metadata) = entry.metadata() {
|
||||
let file_size = metadata.len();
|
||||
if config
|
||||
.size_constraints
|
||||
.iter()
|
||||
.any(|sc| !sc.is_within(file_size))
|
||||
{
|
||||
return WalkState::Continue;
|
||||
}
|
||||
} else {
|
||||
return WalkState::Continue;
|
||||
}
|
||||
} else {
|
||||
return WalkState::Continue;
|
||||
}
|
||||
}
|
||||
|
||||
if send_result.is_err() {
|
||||
return ignore::WalkState::Quit;
|
||||
}
|
||||
// Filter out unwanted modification times
|
||||
if !config.time_constraints.is_empty() {
|
||||
let mut matched = false;
|
||||
if let Some(metadata) = entry.metadata() {
|
||||
if let Ok(modified) = metadata.modified() {
|
||||
matched = config
|
||||
.time_constraints
|
||||
.iter()
|
||||
.all(|tf| tf.applies_to(&modified));
|
||||
}
|
||||
}
|
||||
if !matched {
|
||||
return WalkState::Continue;
|
||||
}
|
||||
}
|
||||
|
||||
// Apply pruning.
|
||||
if config.prune {
|
||||
return ignore::WalkState::Skip;
|
||||
}
|
||||
if config.is_printing() {
|
||||
if let Some(ls_colors) = &config.ls_colors {
|
||||
// Compute colors in parallel
|
||||
entry.style(ls_colors);
|
||||
}
|
||||
}
|
||||
|
||||
ignore::WalkState::Continue
|
||||
})
|
||||
});
|
||||
let send_result = tx.send(WorkerResult::Entry(entry));
|
||||
|
||||
if send_result.is_err() {
|
||||
return WalkState::Quit;
|
||||
}
|
||||
|
||||
// Apply pruning.
|
||||
if config.prune {
|
||||
return WalkState::Skip;
|
||||
}
|
||||
|
||||
WalkState::Continue
|
||||
})
|
||||
});
|
||||
}
|
||||
|
||||
/// Perform the recursive scan.
|
||||
fn scan(&self, paths: &[PathBuf]) -> Result<ExitCode> {
|
||||
let config = &self.config;
|
||||
let walker = self.build_walker(paths)?;
|
||||
|
||||
if config.ls_colors.is_some() && config.is_printing() {
|
||||
let quit_flag = Arc::clone(&self.quit_flag);
|
||||
let interrupt_flag = Arc::clone(&self.interrupt_flag);
|
||||
|
||||
ctrlc::set_handler(move || {
|
||||
quit_flag.store(true, Ordering::Relaxed);
|
||||
|
||||
if interrupt_flag.fetch_or(true, Ordering::Relaxed) {
|
||||
// Ctrl-C has been pressed twice, exit NOW
|
||||
ExitCode::KilledBySigint.exit();
|
||||
}
|
||||
})
|
||||
.unwrap();
|
||||
}
|
||||
|
||||
let (tx, rx) = bounded(2 * config.threads);
|
||||
|
||||
let exit_code = thread::scope(|scope| {
|
||||
// Spawn the receiver thread(s)
|
||||
let receiver = scope.spawn(|| self.receive(rx));
|
||||
|
||||
// Spawn the sender threads.
|
||||
self.spawn_senders(walker, tx);
|
||||
|
||||
receiver.join().unwrap()
|
||||
});
|
||||
|
||||
if self.interrupt_flag.load(Ordering::Relaxed) {
|
||||
Ok(ExitCode::KilledBySigint)
|
||||
} else {
|
||||
Ok(exit_code)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Recursively scan the given search path for files / pathnames matching the patterns.
|
||||
///
|
||||
/// If the `--exec` argument was supplied, this will create a thread pool for executing
|
||||
/// jobs in parallel from a given command line and the discovered paths. Otherwise, each
|
||||
/// path will simply be written to standard output.
|
||||
pub fn scan(paths: &[PathBuf], patterns: Vec<Regex>, config: Config) -> Result<ExitCode> {
|
||||
WorkerState::new(patterns, config).scan(paths)
|
||||
}
|
||||
|
@ -20,6 +20,9 @@ pub struct TestEnv {
|
||||
|
||||
/// Normalize each line by sorting the whitespace-separated words
|
||||
normalize_line: bool,
|
||||
|
||||
/// Temporary directory for storing test config (global ignore file)
|
||||
config_dir: Option<TempDir>,
|
||||
}
|
||||
|
||||
/// Create the working directory and the test files.
|
||||
@ -59,6 +62,16 @@ fn create_working_directory(
|
||||
Ok(temp_dir)
|
||||
}
|
||||
|
||||
fn create_config_directory_with_global_ignore(ignore_file_content: &str) -> io::Result<TempDir> {
|
||||
let config_dir = tempfile::Builder::new().prefix("fd-config").tempdir()?;
|
||||
let fd_dir = config_dir.path().join("fd");
|
||||
fs::create_dir(&fd_dir)?;
|
||||
let mut ignore_file = fs::File::create(fd_dir.join("ignore"))?;
|
||||
ignore_file.write_all(ignore_file_content.as_bytes())?;
|
||||
|
||||
Ok(config_dir)
|
||||
}
|
||||
|
||||
/// Find the *fd* executable.
|
||||
fn find_fd_exe() -> PathBuf {
|
||||
// Tests exe is in target/debug/deps, the *fd* exe is in target/debug
|
||||
@ -116,7 +129,7 @@ fn normalize_output(s: &str, trim_start: bool, normalize_line: bool) -> String {
|
||||
.lines()
|
||||
.map(|line| {
|
||||
let line = if trim_start { line.trim_start() } else { line };
|
||||
let line = line.replace('/', &std::path::MAIN_SEPARATOR.to_string());
|
||||
let line = line.replace('/', std::path::MAIN_SEPARATOR_STR);
|
||||
if normalize_line {
|
||||
let mut words: Vec<_> = line.split_whitespace().collect();
|
||||
words.sort_unstable();
|
||||
@ -150,6 +163,7 @@ impl TestEnv {
|
||||
temp_dir,
|
||||
fd_exe,
|
||||
normalize_line: false,
|
||||
config_dir: None,
|
||||
}
|
||||
}
|
||||
|
||||
@ -158,6 +172,16 @@ impl TestEnv {
|
||||
temp_dir: self.temp_dir,
|
||||
fd_exe: self.fd_exe,
|
||||
normalize_line: normalize,
|
||||
config_dir: self.config_dir,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn global_ignore_file(self, content: &str) -> TestEnv {
|
||||
let config_dir =
|
||||
create_config_directory_with_global_ignore(content).expect("config directory");
|
||||
TestEnv {
|
||||
config_dir: Some(config_dir),
|
||||
..self
|
||||
}
|
||||
}
|
||||
|
||||
@ -206,13 +230,8 @@ impl TestEnv {
|
||||
path: P,
|
||||
args: &[&str],
|
||||
) -> process::Output {
|
||||
// Setup *fd* command.
|
||||
let mut cmd = process::Command::new(&self.fd_exe);
|
||||
cmd.current_dir(self.temp_dir.path().join(path));
|
||||
cmd.arg("--no-global-ignore-file").args(args);
|
||||
|
||||
// Run *fd*.
|
||||
let output = cmd.output().expect("fd output");
|
||||
let output = self.run_command(path.as_ref(), args);
|
||||
|
||||
// Check for exit status.
|
||||
if !output.status.success() {
|
||||
@ -288,6 +307,24 @@ impl TestEnv {
|
||||
self.assert_error_subdirectory(".", args, Some(expected))
|
||||
}
|
||||
|
||||
fn run_command(&self, path: &Path, args: &[&str]) -> process::Output {
|
||||
// Setup *fd* command.
|
||||
let mut cmd = process::Command::new(&self.fd_exe);
|
||||
cmd.current_dir(self.temp_dir.path().join(path));
|
||||
if let Some(config_dir) = &self.config_dir {
|
||||
cmd.env("XDG_CONFIG_HOME", config_dir.path());
|
||||
} else {
|
||||
cmd.arg("--no-global-ignore-file");
|
||||
}
|
||||
// Make sure LS_COLORS is unset to ensure consistent
|
||||
// color output
|
||||
cmd.env("LS_COLORS", "");
|
||||
cmd.args(args);
|
||||
|
||||
// Run *fd*.
|
||||
cmd.output().expect("fd output")
|
||||
}
|
||||
|
||||
/// Assert that calling *fd* in the specified path under the root working directory,
|
||||
/// and with the specified arguments produces an error with the expected message.
|
||||
fn assert_error_subdirectory<P: AsRef<Path>>(
|
||||
@ -296,13 +333,7 @@ impl TestEnv {
|
||||
args: &[&str],
|
||||
expected: Option<&str>,
|
||||
) -> process::ExitStatus {
|
||||
// Setup *fd* command.
|
||||
let mut cmd = process::Command::new(&self.fd_exe);
|
||||
cmd.current_dir(self.temp_dir.path().join(path));
|
||||
cmd.arg("--no-global-ignore-file").args(args);
|
||||
|
||||
// Run *fd*.
|
||||
let output = cmd.output().expect("fd output");
|
||||
let output = self.run_command(path.as_ref(), args);
|
||||
|
||||
if let Some(expected) = expected {
|
||||
// Normalize both expected and actual output.
|
||||
|
278
tests/tests.rs
278
tests/tests.rs
@ -1,5 +1,7 @@
|
||||
mod testenv;
|
||||
|
||||
#[cfg(unix)]
|
||||
use nix::unistd::{Gid, Group, Uid, User};
|
||||
use std::fs;
|
||||
use std::io::Write;
|
||||
use std::path::Path;
|
||||
@ -808,6 +810,62 @@ fn test_custom_ignore_precedence() {
|
||||
te.assert_output(&["--no-ignore", "foo"], "inner/foo");
|
||||
}
|
||||
|
||||
/// Don't require git to respect gitignore (--no-require-git)
|
||||
#[test]
|
||||
fn test_respect_ignore_files() {
|
||||
let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES);
|
||||
|
||||
// Not in a git repo anymore
|
||||
fs::remove_dir(te.test_root().join(".git")).unwrap();
|
||||
|
||||
// don't respect gitignore because we're not in a git repo
|
||||
te.assert_output(
|
||||
&["foo"],
|
||||
"a.foo
|
||||
gitignored.foo
|
||||
one/b.foo
|
||||
one/two/c.foo
|
||||
one/two/C.Foo2
|
||||
one/two/three/d.foo
|
||||
one/two/three/directory_foo/",
|
||||
);
|
||||
|
||||
// respect gitignore because we set `--no-require-git`
|
||||
te.assert_output(
|
||||
&["--no-require-git", "foo"],
|
||||
"a.foo
|
||||
one/b.foo
|
||||
one/two/c.foo
|
||||
one/two/C.Foo2
|
||||
one/two/three/d.foo
|
||||
one/two/three/directory_foo/",
|
||||
);
|
||||
|
||||
// make sure overriding works
|
||||
te.assert_output(
|
||||
&["--no-require-git", "--require-git", "foo"],
|
||||
"a.foo
|
||||
gitignored.foo
|
||||
one/b.foo
|
||||
one/two/c.foo
|
||||
one/two/C.Foo2
|
||||
one/two/three/d.foo
|
||||
one/two/three/directory_foo/",
|
||||
);
|
||||
|
||||
te.assert_output(
|
||||
&["--no-require-git", "--no-ignore", "foo"],
|
||||
"a.foo
|
||||
gitignored.foo
|
||||
fdignored.foo
|
||||
one/b.foo
|
||||
one/two/c.foo
|
||||
one/two/C.Foo2
|
||||
one/two/three/d.foo
|
||||
one/two/three/directory_foo/",
|
||||
);
|
||||
}
|
||||
|
||||
/// VCS ignored files (--no-ignore-vcs)
|
||||
#[test]
|
||||
fn test_no_ignore_vcs() {
|
||||
@ -879,6 +937,47 @@ fn test_no_ignore_aliases() {
|
||||
);
|
||||
}
|
||||
|
||||
#[cfg(not(windows))]
|
||||
#[test]
|
||||
fn test_global_ignore() {
|
||||
let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES).global_ignore_file("one");
|
||||
te.assert_output(
|
||||
&[],
|
||||
"a.foo
|
||||
e1 e2
|
||||
symlink",
|
||||
);
|
||||
}
|
||||
|
||||
#[cfg(not(windows))]
|
||||
#[test_case("--unrestricted", ".hidden.foo
|
||||
a.foo
|
||||
fdignored.foo
|
||||
gitignored.foo
|
||||
one/b.foo
|
||||
one/two/c.foo
|
||||
one/two/C.Foo2
|
||||
one/two/three/d.foo
|
||||
one/two/three/directory_foo/"; "unrestricted")]
|
||||
#[test_case("--no-ignore", "a.foo
|
||||
fdignored.foo
|
||||
gitignored.foo
|
||||
one/b.foo
|
||||
one/two/c.foo
|
||||
one/two/C.Foo2
|
||||
one/two/three/d.foo
|
||||
one/two/three/directory_foo/"; "no-ignore")]
|
||||
#[test_case("--no-global-ignore-file", "a.foo
|
||||
one/b.foo
|
||||
one/two/c.foo
|
||||
one/two/C.Foo2
|
||||
one/two/three/d.foo
|
||||
one/two/three/directory_foo/"; "no-global-ignore-file")]
|
||||
fn test_no_global_ignore(flag: &str, expected_output: &str) {
|
||||
let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES).global_ignore_file("one");
|
||||
te.assert_output(&[flag, "foo"], expected_output);
|
||||
}
|
||||
|
||||
/// Symlinks (--follow)
|
||||
#[test]
|
||||
fn test_follow() {
|
||||
@ -1202,10 +1301,18 @@ fn test_type() {
|
||||
fn test_type_executable() {
|
||||
use std::os::unix::fs::OpenOptionsExt;
|
||||
|
||||
// This test assumes the current user isn't root
|
||||
// (otherwise if the executable bit is set for any level, it is executable for the current
|
||||
// user)
|
||||
if Uid::current().is_root() {
|
||||
return;
|
||||
}
|
||||
|
||||
let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES);
|
||||
|
||||
fs::OpenOptions::new()
|
||||
.create(true)
|
||||
.create_new(true)
|
||||
.truncate(true)
|
||||
.write(true)
|
||||
.mode(0o777)
|
||||
.open(te.test_root().join("executable-file.sh"))
|
||||
@ -1213,6 +1320,7 @@ fn test_type_executable() {
|
||||
|
||||
fs::OpenOptions::new()
|
||||
.create(true)
|
||||
.truncate(true)
|
||||
.write(true)
|
||||
.mode(0o645)
|
||||
.open(te.test_root().join("not-user-executable-file.sh"))
|
||||
@ -1516,6 +1624,66 @@ fn test_excludes() {
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn format() {
|
||||
let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES);
|
||||
|
||||
te.assert_output(
|
||||
&["--format", "path={}", "--path-separator=/"],
|
||||
"path=a.foo
|
||||
path=e1 e2
|
||||
path=one
|
||||
path=one/b.foo
|
||||
path=one/two
|
||||
path=one/two/C.Foo2
|
||||
path=one/two/c.foo
|
||||
path=one/two/three
|
||||
path=one/two/three/d.foo
|
||||
path=one/two/three/directory_foo
|
||||
path=symlink",
|
||||
);
|
||||
|
||||
te.assert_output(
|
||||
&["foo", "--format", "noExt={.}", "--path-separator=/"],
|
||||
"noExt=a
|
||||
noExt=one/b
|
||||
noExt=one/two/C
|
||||
noExt=one/two/c
|
||||
noExt=one/two/three/d
|
||||
noExt=one/two/three/directory_foo",
|
||||
);
|
||||
|
||||
te.assert_output(
|
||||
&["foo", "--format", "basename={/}", "--path-separator=/"],
|
||||
"basename=a.foo
|
||||
basename=b.foo
|
||||
basename=C.Foo2
|
||||
basename=c.foo
|
||||
basename=d.foo
|
||||
basename=directory_foo",
|
||||
);
|
||||
|
||||
te.assert_output(
|
||||
&["foo", "--format", "name={/.}", "--path-separator=/"],
|
||||
"name=a
|
||||
name=b
|
||||
name=C
|
||||
name=c
|
||||
name=d
|
||||
name=directory_foo",
|
||||
);
|
||||
|
||||
te.assert_output(
|
||||
&["foo", "--format", "parent={//}", "--path-separator=/"],
|
||||
"parent=.
|
||||
parent=one
|
||||
parent=one/two
|
||||
parent=one/two
|
||||
parent=one/two/three
|
||||
parent=one/two/three",
|
||||
);
|
||||
}
|
||||
|
||||
/// Shell script execution (--exec)
|
||||
#[test]
|
||||
fn test_exec() {
|
||||
@ -1701,18 +1869,26 @@ fn test_exec_batch() {
|
||||
\n\
|
||||
Usage: fd [OPTIONS] [pattern] [path]...\n\
|
||||
\n\
|
||||
For more information try '--help'\n\
|
||||
For more information, try '--help'.\n\
|
||||
",
|
||||
);
|
||||
|
||||
te.assert_failure_with_error(
|
||||
&["foo", "--exec-batch", "echo", "{/}", ";", "-x", "echo"],
|
||||
"error: The argument '--exec-batch <cmd>...' cannot be used with '--exec <cmd>...'",
|
||||
"error: the argument '--exec-batch <cmd>...' cannot be used with '--exec <cmd>...'\n\
|
||||
\n\
|
||||
Usage: fd --exec-batch <cmd>... <pattern> [path]...\n\
|
||||
\n\
|
||||
For more information, try '--help'.\n\
|
||||
",
|
||||
);
|
||||
|
||||
te.assert_failure_with_error(
|
||||
&["foo", "--exec-batch"],
|
||||
"error: The argument '--exec-batch <cmd>...' requires a value but none was supplied",
|
||||
"error: a value is required for '--exec-batch <cmd>...' but none was supplied\n\
|
||||
\n\
|
||||
For more information, try '--help'.\n\
|
||||
",
|
||||
);
|
||||
|
||||
te.assert_failure_with_error(
|
||||
@ -1721,7 +1897,7 @@ fn test_exec_batch() {
|
||||
\n\
|
||||
Usage: fd [OPTIONS] [pattern] [path]...\n\
|
||||
\n\
|
||||
For more information try '--help'\n\
|
||||
For more information, try '--help'.\n\
|
||||
",
|
||||
);
|
||||
|
||||
@ -2149,10 +2325,10 @@ fn test_owner_ignore_all() {
|
||||
#[test]
|
||||
fn test_owner_current_user() {
|
||||
let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES);
|
||||
let uid = users::get_current_uid();
|
||||
let uid = Uid::current();
|
||||
te.assert_output(&["--owner", &uid.to_string(), "a.foo"], "a.foo");
|
||||
if let Some(username) = users::get_current_username().map(|u| u.into_string().unwrap()) {
|
||||
te.assert_output(&["--owner", &username, "a.foo"], "a.foo");
|
||||
if let Ok(Some(user)) = User::from_uid(uid) {
|
||||
te.assert_output(&["--owner", &user.name, "a.foo"], "a.foo");
|
||||
}
|
||||
}
|
||||
|
||||
@ -2160,10 +2336,10 @@ fn test_owner_current_user() {
|
||||
#[test]
|
||||
fn test_owner_current_group() {
|
||||
let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES);
|
||||
let gid = users::get_current_gid();
|
||||
let gid = Gid::current();
|
||||
te.assert_output(&["--owner", &format!(":{}", gid), "a.foo"], "a.foo");
|
||||
if let Some(groupname) = users::get_current_groupname().map(|u| u.into_string().unwrap()) {
|
||||
te.assert_output(&["--owner", &format!(":{}", groupname), "a.foo"], "a.foo");
|
||||
if let Ok(Some(group)) = Group::from_gid(gid) {
|
||||
te.assert_output(&["--owner", &format!(":{}", group.name), "a.foo"], "a.foo");
|
||||
}
|
||||
}
|
||||
|
||||
@ -2171,7 +2347,7 @@ fn test_owner_current_group() {
|
||||
#[test]
|
||||
fn test_owner_root() {
|
||||
// This test assumes the current user isn't root
|
||||
if users::get_current_uid() == 0 || users::get_current_gid() == 0 {
|
||||
if Uid::current().is_root() || Gid::current() == Gid::from_raw(0) {
|
||||
return;
|
||||
}
|
||||
let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES);
|
||||
@ -2272,6 +2448,11 @@ fn test_max_results() {
|
||||
};
|
||||
assert_just_one_result_with_option("--max-results=1");
|
||||
assert_just_one_result_with_option("-1");
|
||||
|
||||
// check that --max-results & -1 conflic with --exec
|
||||
te.assert_failure(&["thing", "--max-results=0", "--exec=cat"]);
|
||||
te.assert_failure(&["thing", "-1", "--exec=cat"]);
|
||||
te.assert_failure(&["thing", "--max-results=1", "-1", "--exec=cat"]);
|
||||
}
|
||||
|
||||
/// Filenames with non-utf8 paths are passed to the executed program unchanged
|
||||
@ -2358,6 +2539,7 @@ fn test_number_parsing_errors() {
|
||||
#[test_case("--hidden", &["--no-hidden"] ; "hidden")]
|
||||
#[test_case("--no-ignore", &["--ignore"] ; "no-ignore")]
|
||||
#[test_case("--no-ignore-vcs", &["--ignore-vcs"] ; "no-ignore-vcs")]
|
||||
#[test_case("--no-require-git", &["--require-git"] ; "no-require-git")]
|
||||
#[test_case("--follow", &["--no-follow"] ; "follow")]
|
||||
#[test_case("--absolute-path", &["--relative-path"] ; "absolute-path")]
|
||||
#[test_case("-u", &["--ignore", "--no-hidden"] ; "u")]
|
||||
@ -2436,3 +2618,75 @@ fn test_invalid_cwd() {
|
||||
panic!("{:?}", output);
|
||||
}
|
||||
}
|
||||
|
||||
/// Test behavior of .git directory with various flags
|
||||
#[test]
|
||||
fn test_git_dir() {
|
||||
let te = TestEnv::new(
|
||||
&[".git/one", "other_dir/.git", "nested/dir/.git"],
|
||||
&[
|
||||
".git/one/foo.a",
|
||||
".git/.foo",
|
||||
".git/a.foo",
|
||||
"other_dir/.git/foo1",
|
||||
"nested/dir/.git/foo2",
|
||||
],
|
||||
);
|
||||
|
||||
te.assert_output(
|
||||
&["--hidden", "foo"],
|
||||
".git/one/foo.a
|
||||
.git/.foo
|
||||
.git/a.foo
|
||||
other_dir/.git/foo1
|
||||
nested/dir/.git/foo2",
|
||||
);
|
||||
te.assert_output(&["--no-ignore", "foo"], "");
|
||||
te.assert_output(
|
||||
&["--hidden", "--no-ignore", "foo"],
|
||||
".git/one/foo.a
|
||||
.git/.foo
|
||||
.git/a.foo
|
||||
other_dir/.git/foo1
|
||||
nested/dir/.git/foo2",
|
||||
);
|
||||
te.assert_output(
|
||||
&["--hidden", "--no-ignore-vcs", "foo"],
|
||||
".git/one/foo.a
|
||||
.git/.foo
|
||||
.git/a.foo
|
||||
other_dir/.git/foo1
|
||||
nested/dir/.git/foo2",
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_gitignore_parent() {
|
||||
let te = TestEnv::new(&["sub"], &[".abc", "sub/.abc"]);
|
||||
|
||||
fs::File::create(te.test_root().join(".gitignore"))
|
||||
.unwrap()
|
||||
.write_all(b".abc\n")
|
||||
.unwrap();
|
||||
|
||||
te.assert_output_subdirectory("sub", &["--hidden"], "");
|
||||
te.assert_output_subdirectory("sub", &["--hidden", "--search-path", "."], "");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_hyperlink() {
|
||||
let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES);
|
||||
|
||||
#[cfg(unix)]
|
||||
let hostname = nix::unistd::gethostname().unwrap().into_string().unwrap();
|
||||
#[cfg(not(unix))]
|
||||
let hostname = "";
|
||||
|
||||
let expected = format!(
|
||||
"\x1b]8;;file://{}{}/a.foo\x1b\\a.foo\x1b]8;;\x1b\\",
|
||||
hostname,
|
||||
get_absolute_root_path(&te),
|
||||
);
|
||||
|
||||
te.assert_output(&["--hyperlink=always", "a.foo"], &expected);
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user