mirror of https://github.com/sharkdp/fd.git
Compare commits
170 Commits
Author | SHA1 | Date |
---|---|---|
David Peter | 29936f0fba | |
David Peter | bfc16a1dee | |
Thayne McCombs | 289a68bac3 | |
Thayne McCombs | fcaff0f385 | |
Thayne McCombs | 36163f9c3a | |
Thayne McCombs | d90ec1758e | |
Thayne McCombs | ea22cbd712 | |
Thayne McCombs | d44badc190 | |
Thayne McCombs | 6becb66185 | |
Thayne McCombs | 1a1f057e5d | |
Thayne McCombs | 10a269bd3f | |
Thayne McCombs | 90d3381814 | |
Thayne McCombs | b1f83a0bb0 | |
Thayne McCombs | 3bc70925a9 | |
Thayne McCombs | f287f08b9f | |
Tavian Barnes | 0e4488e9dc | |
Tavian Barnes | d7d63eddbe | |
Thayne McCombs | 8acd7722f0 | |
Thayne McCombs | 92fab6e058 | |
Thayne McCombs | a0ee0856db | |
Thayne McCombs | b8df500a70 | |
Thayne McCombs | cd96ca071d | |
Tavian Barnes | 216472ff9f | |
Thayne McCombs | 3680d10e5c | |
dependabot[bot] | abe3b9cd78 | |
Thayne McCombs | 7aad6c9edf | |
Thayne McCombs | ddd3aae249 | |
dependabot[bot] | 6d3bb68faf | |
dependabot[bot] | 21d50dae8c | |
David Peter | 9279b1f0af | |
Thayne McCombs | 6647085015 | |
Thayne McCombs | 6af8f092ee | |
Thayne McCombs | c4094c7a05 | |
AlbydS | 6d58df5f0c | |
AlbydS | ffecccf209 | |
Thayne McCombs | 31f2839751 | |
Thayne McCombs | e10a4eab2b | |
Thayne McCombs | 8eb047945e | |
Tavian Barnes | 1031325cca | |
Tavian Barnes | 9fc2167cf9 | |
Tavian Barnes | ae1de4de24 | |
Thayne McCombs | 7e5d14b733 | |
Thayne McCombs | 85cbea8dcb | |
Thayne McCombs | bc6782624e | |
Thayne McCombs | cf6ff87c7d | |
Thayne McCombs | 3cd73d7927 | |
binlingyu | 7794c4aae5 | |
Thayne McCombs | 8c7a84ea30 | |
Thayne McCombs | e262ade74e | |
Thayne McCombs | 11069e284a | |
Thayne McCombs | 6e2e86decb | |
Thayne McCombs | 15d3b63ccc | |
dependabot[bot] | 453577651e | |
dependabot[bot] | 39c07b7b4c | |
dependabot[bot] | 5910285db0 | |
Thayne McCombs | 68fe31da3f | |
Jian Wang | f875ea9a52 | |
one230six | 138919907b | |
Thayne McCombs | b8744626e7 | |
dependabot[bot] | b08d78f6fc | |
Tavian Barnes | 4efc05ef27 | |
garlic-hub | 0788c43c3f | |
Thayne McCombs | 3b2fd158b5 | |
Thayne McCombs | c38dbacbd0 | |
Thayne McCombs | 728b3200c0 | |
dependabot[bot] | 7f74cd9e56 | |
dependabot[bot] | 6ae8da6a39 | |
dependabot[bot] | f699c8bb6a | |
Nathan Bellows | ffde94c10e | |
Nathan Bellows | b0a8848f68 | |
AlbydS | d651a595d4 | |
David Peter | 969316cc0e | |
David Peter | 5b46867507 | |
Thayne McCombs | e117a373a7 | |
dependabot[bot] | a4aed14337 | |
Thayne McCombs | 9cde3c12a2 | |
Thayne McCombs | 906e7a933e | |
dependabot[bot] | 077d28d13a | |
dependabot[bot] | b55bb1e9be | |
Thayne McCombs | 7a6cc92d6d | |
Thayne McCombs | b694c6e673 | |
dependabot[bot] | 17895538a0 | |
dependabot[bot] | 72ff1f9a87 | |
Thayne McCombs | ef3194a510 | |
Maksim Bondarenkov | 8773402246 | |
Rob | ff3fc81db4 | |
Tavian Barnes | 0dc3342c33 | |
Thayne McCombs | c66fc812ac | |
Thayne McCombs | 14ed023875 | |
Tavian Barnes | 58284b8dbe | |
Tavian Barnes | 60889d0b99 | |
dependabot[bot] | 7e19bad0a4 | |
Thayne McCombs | 4b1d73d39d | |
dependabot[bot] | 03e19a1ad2 | |
Thayne McCombs | 8fb9499c20 | |
Thayne McCombs | 38fb6a5958 | |
dependabot[bot] | 49cd62d65e | |
dependabot[bot] | 24bb5216bb | |
dependabot[bot] | 7f8760fd1f | |
Alexandru-Constantin Atomei | 3cb6b9d93a | |
Atomei Alexandru | c591106b86 | |
Alexandru-Constantin Atomei | 9f096737db | |
Alexandru-Constantin Atomei | 1bda165b25 | |
Thayne McCombs | f48372624d | |
Roshan Jossy | 5cd15536b6 | |
Sayan Goswami | aeb4a5fdad | |
Thayne McCombs | 9529f30129 | |
Thayne McCombs | 266311ca33 | |
Tavian Barnes | 954a3900b9 | |
David Peter | 07343b5baf | |
David Peter | a03ed8b300 | |
David Peter | 13a93e5cbe | |
David Peter | d9c4e6239f | |
David Peter | 61ebd9be6a | |
David Peter | e3b40208d5 | |
Tavian Barnes | 16c2d1e1d0 | |
Tavian Barnes | fea1622724 | |
David Peter | 00b64f3ccb | |
Thayne McCombs | 74b850a642 | |
dependabot[bot] | 4202f3939e | |
dependabot[bot] | e1ecba2ce4 | |
dependabot[bot] | 0853e35e1f | |
dependabot[bot] | 4b4a74c988 | |
Tavian Barnes | 84f032eba8 | |
Tavian Barnes | b8a5f95cf2 | |
Tavian Barnes | 73260c0e35 | |
Tavian Barnes | 5903dec289 | |
Tavian Barnes | 571ebb349b | |
Tavian Barnes | d62bbbbcd1 | |
Lena | ad5fb44ddc | |
Tavian Barnes | 8bbbd7679b | |
David Peter | cd32a3827d | |
Tavian Barnes | 66c0637c90 | |
Tavian Barnes | c9df4296f9 | |
Tavian Barnes | 7c5cf28ace | |
Tavian Barnes | 51002c842d | |
Tavian Barnes | 8e582971fa | |
Tavian Barnes | 6daa72f929 | |
dependabot[bot] | 8355d78359 | |
dependabot[bot] | dbc1818073 | |
dependabot[bot] | e57ce7f2a4 | |
dependabot[bot] | d8f89fa59e | |
dependabot[bot] | 350003d8da | |
tkb-github | 15329f9cfa | |
Thayne McCombs | 95b4dff379 | |
Thayne McCombs | c96b1af3be | |
Thayne McCombs | 5ee6365510 | |
Thayne McCombs | 1d57b3a064 | |
Thayne McCombs | 325d419e39 | |
Thayne McCombs | 8b5532d8dd | |
João Marcos P. Bezerra | 7263b5e01d | |
Thayne McCombs | c6fcdbe000 | |
Thayne McCombs | 306dacd0b4 | |
Thayne McCombs | 08910e4e3f | |
Thayne McCombs | 8897659607 | |
Thayne McCombs | 53fd416c47 | |
Thayne McCombs | 5e0018fb1f | |
Thayne McCombs | 054bae01ef | |
David Peter | 8f32a758a4 | |
David Peter | 0fc8facfb7 | |
Thayne McCombs | 069b181625 | |
Thayne McCombs | d9b69c8405 | |
Thayne McCombs | a11f8426d4 | |
Thayne McCombs | e6aa8e82f6 | |
sitiom | 978866d983 | |
Christian Göttsche | 36bc84041b | |
Thayne McCombs | 3ed4ea7538 | |
Karthik Prakash | 9df9a489f0 | |
skoriop | 8c50bc733d | |
skoriop | 3f9794cd1a |
|
@ -44,6 +44,16 @@ jobs:
|
|||
- uses: actions/checkout@v4
|
||||
- run: cargo fmt -- --check
|
||||
|
||||
lint_check:
|
||||
name: Ensure 'cargo clippy' has no warnings
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: dtolnay/rust-toolchain@stable
|
||||
with:
|
||||
components: clippy
|
||||
- uses: actions/checkout@v4
|
||||
- run: cargo clippy --all-targets --all-features -- -Dwarnings
|
||||
|
||||
min_version:
|
||||
name: Minimum supported rust version
|
||||
runs-on: ubuntu-20.04
|
||||
|
@ -71,12 +81,14 @@ jobs:
|
|||
matrix:
|
||||
job:
|
||||
- { target: aarch64-unknown-linux-gnu , os: ubuntu-20.04, use-cross: true }
|
||||
- { target: aarch64-unknown-linux-musl , os: ubuntu-20.04, use-cross: true }
|
||||
- { target: arm-unknown-linux-gnueabihf , os: ubuntu-20.04, use-cross: true }
|
||||
- { target: arm-unknown-linux-musleabihf, os: ubuntu-20.04, use-cross: true }
|
||||
- { target: i686-pc-windows-msvc , os: windows-2019 }
|
||||
- { target: i686-unknown-linux-gnu , os: ubuntu-20.04, use-cross: true }
|
||||
- { target: i686-unknown-linux-musl , os: ubuntu-20.04, use-cross: true }
|
||||
- { target: x86_64-apple-darwin , os: macos-12 }
|
||||
- { target: aarch64-apple-darwin , os: macos-14 }
|
||||
- { target: x86_64-pc-windows-gnu , os: windows-2019 }
|
||||
- { target: x86_64-pc-windows-msvc , os: windows-2019 }
|
||||
- { target: x86_64-unknown-linux-gnu , os: ubuntu-20.04, use-cross: true }
|
||||
|
@ -209,7 +221,7 @@ jobs:
|
|||
|
||||
DPKG_BASENAME=${{ needs.crate_metadata.outputs.name }}
|
||||
DPKG_CONFLICTS=${{ needs.crate_metadata.outputs.name }}-musl
|
||||
case ${{ matrix.job.target }} in *-musl) DPKG_BASENAME=${{ needs.crate_metadata.outputs.name }}-musl ; DPKG_CONFLICTS=${{ needs.crate_metadata.outputs.name }} ;; esac;
|
||||
case ${{ matrix.job.target }} in *-musl*) DPKG_BASENAME=${{ needs.crate_metadata.outputs.name }}-musl ; DPKG_CONFLICTS=${{ needs.crate_metadata.outputs.name }} ;; esac;
|
||||
DPKG_VERSION=${{ needs.crate_metadata.outputs.version }}
|
||||
|
||||
unset DPKG_ARCH
|
||||
|
@ -330,7 +342,7 @@ jobs:
|
|||
echo "IS_RELEASE=${IS_RELEASE}" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Publish archives and packages
|
||||
uses: softprops/action-gh-release@v1
|
||||
uses: softprops/action-gh-release@v2
|
||||
if: steps.is-release.outputs.IS_RELEASE
|
||||
with:
|
||||
files: |
|
||||
|
@ -341,7 +353,7 @@ jobs:
|
|||
|
||||
winget:
|
||||
name: Publish to Winget
|
||||
runs-on: windows-latest # Action can only run on Windows
|
||||
runs-on: ubuntu-latest
|
||||
needs: build
|
||||
if: startsWith(github.ref, 'refs/tags/v')
|
||||
steps:
|
||||
|
|
75
CHANGELOG.md
75
CHANGELOG.md
|
@ -1,3 +1,78 @@
|
|||
# 10.1.0
|
||||
|
||||
## Features
|
||||
|
||||
- Allow passing an optional argument to `--strip-cwd-prefix` of "always", "never", or "auto". to force whether the cwd prefix is stripped or not.
|
||||
- Add a `--format` option which allows using a format template for direct ouput similar to the template used for `--exec`. (#1043)
|
||||
|
||||
## Bugfixes
|
||||
- Fix aarch64 page size again. This time it should actually work. (#1085, #1549) (@tavianator)
|
||||
|
||||
|
||||
## Other
|
||||
|
||||
- aarch64-apple-darwin target added to builds on the release page. Note that this is a tier 2 rust target.
|
||||
|
||||
# v10.0.0
|
||||
|
||||
## Features
|
||||
|
||||
- Add `dir` as an alias to `directory` when using `-t` \ `--type`, see #1460 and #1464 (@Ato2207).
|
||||
- Add support for @%s date format in time filters similar to GNU date (seconds since Unix epoch for --older/--newer), see #1493 (@nabellows)
|
||||
- Breaking: No longer automatically ignore `.git` when using `--hidden` with vcs ignore enabled. This reverts the change in v9.0.0. While this feature
|
||||
was often useful, it also broke some existing workflows, and there wasn't a good way to opt out of it. And there isn't really a good way for us to add
|
||||
a way to opt out of it. And you can easily get similar behavior by adding `.git/` to your global fdignore file.
|
||||
See #1457.
|
||||
|
||||
## Bugfixes
|
||||
|
||||
- Respect NO_COLOR environment variable with `--list-details` option. (#1455)
|
||||
- Fix bug that would cause hidden files to be included despite gitignore rules
|
||||
if search path is "." (#1461, BurntSushi/ripgrep#2711).
|
||||
- aarch64 builds now use 64k page sizes with jemalloc. This fixes issues on some systems, such as ARM Macs that
|
||||
have a larger system page size than the system that the binary was built on. (#1547)
|
||||
- Address [CVE-2024-24576](https://blog.rust-lang.org/2024/04/09/cve-2024-24576.html), by increasing minimum rust version.
|
||||
|
||||
|
||||
## Changes
|
||||
- Minimum supported rust version is now 1.77.2
|
||||
|
||||
|
||||
# v9.0.0
|
||||
|
||||
## Performance
|
||||
|
||||
- Performance has been *significantly improved*, both due to optimizations in the underlying `ignore`
|
||||
crate (#1429), and in `fd` itself (#1422, #1408, #1362) - @tavianator.
|
||||
[Benchmarks results](https://gist.github.com/tavianator/32edbe052f33ef60570cf5456b59de81) show gains
|
||||
of 6-8x for full traversals of smaller directories (100k files) and up to 13x for larger directories (1M files).
|
||||
|
||||
- The default number of threads is now constrained to be at most 64. This should improve startup time on
|
||||
systems with many CPU cores. (#1203, #1410, #1412, #1431) - @tmccombs and @tavianator
|
||||
|
||||
- New flushing behavior when writing output to stdout, providing better performance for TTY and non-TTY
|
||||
use cases, see #1452 and #1313 (@tavianator).
|
||||
|
||||
## Features
|
||||
|
||||
- Support character and block device file types, see #1213 and #1336 (@cgzones)
|
||||
- Breaking: `.git/` is now ignored by default when using `--hidden` / `-H`, use `--no-ignore` / `-I` or
|
||||
`--no-ignore-vcs` to override, see #1387 and #1396 (@skoriop)
|
||||
|
||||
## Bugfixes
|
||||
|
||||
- Fix `NO_COLOR` support, see #1421 (@acuteenvy)
|
||||
|
||||
## Other
|
||||
|
||||
- Fixed documentation typos, see #1409 (@marcospb19)
|
||||
|
||||
## Thanks
|
||||
|
||||
Special thanks to @tavianator for his incredible work on performance in the `ignore` crate and `fd` itself.
|
||||
|
||||
|
||||
|
||||
# v8.7.1
|
||||
|
||||
## Bugfixes
|
||||
|
|
File diff suppressed because it is too large
Load Diff
31
Cargo.toml
31
Cargo.toml
|
@ -16,9 +16,9 @@ license = "MIT OR Apache-2.0"
|
|||
name = "fd-find"
|
||||
readme = "README.md"
|
||||
repository = "https://github.com/sharkdp/fd"
|
||||
version = "8.7.1"
|
||||
version = "10.1.0"
|
||||
edition= "2021"
|
||||
rust-version = "1.70.0"
|
||||
rust-version = "1.77.2"
|
||||
|
||||
[badges.appveyor]
|
||||
repository = "sharkdp/fd"
|
||||
|
@ -34,39 +34,38 @@ path = "src/main.rs"
|
|||
version_check = "0.9"
|
||||
|
||||
[dependencies]
|
||||
nu-ansi-term = "0.49"
|
||||
aho-corasick = "1.1"
|
||||
nu-ansi-term = "0.50"
|
||||
argmax = "0.3.1"
|
||||
ignore = "0.4.20"
|
||||
num_cpus = "1.16"
|
||||
regex = "1.9.6"
|
||||
regex-syntax = "0.7"
|
||||
ignore = "0.4.22"
|
||||
regex = "1.10.3"
|
||||
regex-syntax = "0.8"
|
||||
ctrlc = "3.2"
|
||||
humantime = "2.1"
|
||||
globset = "0.4"
|
||||
anyhow = "1.0"
|
||||
etcetera = "0.8"
|
||||
normpath = "1.1.1"
|
||||
crossbeam-channel = "0.5.8"
|
||||
clap_complete = {version = "4.4.1", optional = true}
|
||||
crossbeam-channel = "0.5.12"
|
||||
clap_complete = {version = "4.4.9", optional = true}
|
||||
faccess = "0.2.4"
|
||||
|
||||
[dependencies.clap]
|
||||
version = "4.4.6"
|
||||
version = "4.4.13"
|
||||
features = ["suggestions", "color", "wrap_help", "cargo", "derive"]
|
||||
|
||||
[dependencies.chrono]
|
||||
version = "0.4.28"
|
||||
version = "0.4.38"
|
||||
default-features = false
|
||||
features = ["std", "clock"]
|
||||
|
||||
[dependencies.lscolors]
|
||||
version = "0.15"
|
||||
version = "0.17"
|
||||
default-features = false
|
||||
features = ["nu-ansi-term"]
|
||||
|
||||
[target.'cfg(unix)'.dependencies]
|
||||
users = "0.11.0"
|
||||
nix = { version = "0.26.2", default-features = false, features = ["signal"] }
|
||||
nix = { version = "0.28.0", default-features = false, features = ["signal", "user"] }
|
||||
|
||||
[target.'cfg(all(unix, not(target_os = "redox")))'.dependencies]
|
||||
libc = "0.2"
|
||||
|
@ -79,9 +78,9 @@ jemallocator = {version = "0.5.4", optional = true}
|
|||
|
||||
[dev-dependencies]
|
||||
diff = "0.1"
|
||||
tempfile = "3.6"
|
||||
tempfile = "3.10"
|
||||
filetime = "0.2"
|
||||
test-case = "3.1"
|
||||
test-case = "3.3"
|
||||
|
||||
[profile.release]
|
||||
lto = true
|
||||
|
|
|
@ -0,0 +1,6 @@
|
|||
# https://github.com/sharkdp/fd/issues/1085
|
||||
[target.aarch64-unknown-linux-gnu.env]
|
||||
passthrough = ["JEMALLOC_SYS_WITH_LG_PAGE=16"]
|
||||
|
||||
[target.aarch64-unknown-linux-musl.env]
|
||||
passthrough = ["JEMALLOC_SYS_WITH_LG_PAGE=16"]
|
2
Makefile
2
Makefile
|
@ -6,7 +6,7 @@ datadir=$(prefix)/share
|
|||
exe_name=fd
|
||||
|
||||
$(EXE): Cargo.toml src/**/*.rs
|
||||
cargo build --profile $(PROFILE)
|
||||
cargo build --profile $(PROFILE) --locked
|
||||
|
||||
.PHONY: completions
|
||||
completions: autocomplete/fd.bash autocomplete/fd.fish autocomplete/fd.ps1 autocomplete/_fd
|
||||
|
|
137
README.md
137
README.md
|
@ -10,10 +10,7 @@ It is a simple, fast and user-friendly alternative to [`find`](https://www.gnu.o
|
|||
While it does not aim to support all of `find`'s powerful functionality, it provides sensible
|
||||
(opinionated) defaults for a majority of use cases.
|
||||
|
||||
Quick links:
|
||||
* [How to use](#how-to-use)
|
||||
* [Installation](#installation)
|
||||
* [Troubleshooting](#troubleshooting)
|
||||
[Installation](#installation) • [How to use](#how-to-use) • [Troubleshooting](#troubleshooting)
|
||||
|
||||
## Features
|
||||
|
||||
|
@ -143,7 +140,7 @@ target/debug/deps/libnum_cpus-f5ce7ef99006aa05.rlib
|
|||
```
|
||||
|
||||
To really search *all* files and directories, simply combine the hidden and ignore features to show
|
||||
everything (`-HI`).
|
||||
everything (`-HI`) or use `-u`/`--unrestricted`.
|
||||
|
||||
### Matching the full path
|
||||
By default, *fd* only matches the filename of each file. However, using the `--full-path` or `-p` option,
|
||||
|
@ -261,12 +258,17 @@ To make exclude-patterns like these permanent, you can create a `.fdignore` file
|
|||
/mnt/external-drive
|
||||
*.bak
|
||||
```
|
||||
Note: `fd` also supports `.ignore` files that are used by other programs such as `rg` or `ag`.
|
||||
|
||||
> [!NOTE]
|
||||
> `fd` also supports `.ignore` files that are used by other programs such as `rg` or `ag`.
|
||||
|
||||
If you want `fd` to ignore these patterns globally, you can put them in `fd`'s global ignore file.
|
||||
This is usually located in `~/.config/fd/ignore` in macOS or Linux, and `%APPDATA%\fd\ignore` in
|
||||
Windows.
|
||||
|
||||
You may wish to include `.git/` in your `fd/ignore` file so that `.git` directories, and their contents
|
||||
are not included in output if you use the `--hidden` option.
|
||||
|
||||
### Deleting files
|
||||
|
||||
You can use `fd` to remove all files and directories that are matched by your search pattern.
|
||||
|
@ -284,7 +286,8 @@ option:
|
|||
If you also want to remove a certain class of directories, you can use the same technique. You will
|
||||
have to use `rm`s `--recursive`/`-r` flag to remove directories.
|
||||
|
||||
Note: there are scenarios where using `fd … -X rm -r` can cause race conditions: if you have a
|
||||
> [!NOTE]
|
||||
> There are scenarios where using `fd … -X rm -r` can cause race conditions: if you have a
|
||||
path like `…/foo/bar/foo/…` and want to remove all directories named `foo`, you can end up in a
|
||||
situation where the outer `foo` directory is removed first, leading to (harmless) *"'foo/bar/foo':
|
||||
No such file or directory"* errors in the `rm` call.
|
||||
|
@ -313,13 +316,15 @@ Options:
|
|||
-p, --full-path Search full abs. path (default: filename only)
|
||||
-d, --max-depth <depth> Set maximum search depth (default: none)
|
||||
-E, --exclude <pattern> Exclude entries that match the given glob pattern
|
||||
-t, --type <filetype> Filter by type: file (f), directory (d), symlink (l),
|
||||
executable (x), empty (e), socket (s), pipe (p)
|
||||
-t, --type <filetype> Filter by type: file (f), directory (d/dir), symlink (l),
|
||||
executable (x), empty (e), socket (s), pipe (p), char-device
|
||||
(c), block-device (b)
|
||||
-e, --extension <ext> Filter by file extension
|
||||
-S, --size <size> Limit results based on the size of files
|
||||
--changed-within <date|dur> Filter by file modification time (newer than)
|
||||
--changed-before <date|dur> Filter by file modification time (older than)
|
||||
-o, --owner <user:group> Filter by owning user and/or group
|
||||
--format <fmt> Print results according to template
|
||||
-x, --exec <cmd>... Execute a command for each search result
|
||||
-X, --exec-batch <cmd>... Execute a command with all search results at once
|
||||
-c, --color <when> When to use colors [default: auto] [possible values: auto,
|
||||
|
@ -330,64 +335,57 @@ Options:
|
|||
|
||||
## Benchmark
|
||||
|
||||
Let's search my home folder for files that end in `[0-9].jpg`. It contains ~190.000
|
||||
subdirectories and about a million files. For averaging and statistical analysis, I'm using
|
||||
Let's search my home folder for files that end in `[0-9].jpg`. It contains ~750.000
|
||||
subdirectories and about a 4 million files. For averaging and statistical analysis, I'm using
|
||||
[hyperfine](https://github.com/sharkdp/hyperfine). The following benchmarks are performed
|
||||
with a "warm"/pre-filled disk-cache (results for a "cold" disk-cache show the same trends).
|
||||
|
||||
Let's start with `find`:
|
||||
```
|
||||
Benchmark #1: find ~ -iregex '.*[0-9]\.jpg$'
|
||||
|
||||
Time (mean ± σ): 7.236 s ± 0.090 s
|
||||
|
||||
Range (min … max): 7.133 s … 7.385 s
|
||||
Benchmark 1: find ~ -iregex '.*[0-9]\.jpg$'
|
||||
Time (mean ± σ): 19.922 s ± 0.109 s
|
||||
Range (min … max): 19.765 s … 20.065 s
|
||||
```
|
||||
|
||||
`find` is much faster if it does not need to perform a regular-expression search:
|
||||
```
|
||||
Benchmark #2: find ~ -iname '*[0-9].jpg'
|
||||
|
||||
Time (mean ± σ): 3.914 s ± 0.027 s
|
||||
|
||||
Range (min … max): 3.876 s … 3.964 s
|
||||
Benchmark 2: find ~ -iname '*[0-9].jpg'
|
||||
Time (mean ± σ): 11.226 s ± 0.104 s
|
||||
Range (min … max): 11.119 s … 11.466 s
|
||||
```
|
||||
|
||||
Now let's try the same for `fd`. Note that `fd` *always* performs a regular expression
|
||||
search. The options `--hidden` and `--no-ignore` are needed for a fair comparison,
|
||||
otherwise `fd` does not have to traverse hidden folders and ignored paths (see below):
|
||||
Now let's try the same for `fd`. Note that `fd` performs a regular expression
|
||||
search by default. The options `-u`/`--unrestricted` option is needed here for
|
||||
a fair comparison. Otherwise `fd` does not have to traverse hidden folders and
|
||||
ignored paths (see below):
|
||||
```
|
||||
Benchmark #3: fd -HI '.*[0-9]\.jpg$' ~
|
||||
|
||||
Time (mean ± σ): 811.6 ms ± 26.9 ms
|
||||
|
||||
Range (min … max): 786.0 ms … 870.7 ms
|
||||
Benchmark 3: fd -u '[0-9]\.jpg$' ~
|
||||
Time (mean ± σ): 854.8 ms ± 10.0 ms
|
||||
Range (min … max): 839.2 ms … 868.9 ms
|
||||
```
|
||||
For this particular example, `fd` is approximately nine times faster than `find -iregex`
|
||||
and about five times faster than `find -iname`. By the way, both tools found the exact
|
||||
same 20880 files :smile:.
|
||||
For this particular example, `fd` is approximately **23 times faster** than `find -iregex`
|
||||
and about **13 times faster** than `find -iname`. By the way, both tools found the exact
|
||||
same 546 files :smile:.
|
||||
|
||||
Finally, let's run `fd` without `--hidden` and `--no-ignore` (this can lead to different
|
||||
search results, of course). If *fd* does not have to traverse the hidden and git-ignored
|
||||
folders, it is almost an order of magnitude faster:
|
||||
```
|
||||
Benchmark #4: fd '[0-9]\.jpg$' ~
|
||||
|
||||
Time (mean ± σ): 123.7 ms ± 6.0 ms
|
||||
|
||||
Range (min … max): 118.8 ms … 140.0 ms
|
||||
```
|
||||
|
||||
**Note**: This is *one particular* benchmark on *one particular* machine. While I have
|
||||
performed quite a lot of different tests (and found consistent results), things might
|
||||
be different for you! I encourage everyone to try it out on their own. See
|
||||
**Note**: This is *one particular* benchmark on *one particular* machine. While we have
|
||||
performed a lot of different tests (and found consistent results), things might
|
||||
be different for you! We encourage everyone to try it out on their own. See
|
||||
[this repository](https://github.com/sharkdp/fd-benchmarks) for all necessary scripts.
|
||||
|
||||
Concerning *fd*'s speed, the main credit goes to the `regex` and `ignore` crates that are also used
|
||||
in [ripgrep](https://github.com/BurntSushi/ripgrep) (check it out!).
|
||||
Concerning *fd*'s speed, a lot of credit goes to the `regex` and `ignore` crates that are
|
||||
also used in [ripgrep](https://github.com/BurntSushi/ripgrep) (check it out!).
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### `fd` does not find my file!
|
||||
|
||||
Remember that `fd` ignores hidden directories and files by default. It also ignores patterns
|
||||
from `.gitignore` files. If you want to make sure to find absolutely every possible file, always
|
||||
use the options `-u`/`--unrestricted` option (or `-HI` to enable hidden and ignored files):
|
||||
``` bash
|
||||
> fd -u …
|
||||
```
|
||||
|
||||
### Colorized output
|
||||
|
||||
`fd` can colorize files by extension, just like `ls`. In order for this to work, the environment
|
||||
|
@ -401,15 +399,6 @@ for alternative, more complete (or more colorful) variants, see [here](https://g
|
|||
|
||||
`fd` also honors the [`NO_COLOR`](https://no-color.org/) environment variable.
|
||||
|
||||
### `fd` does not find my file!
|
||||
|
||||
Remember that `fd` ignores hidden directories and files by default. It also ignores patterns
|
||||
from `.gitignore` files. If you want to make sure to find absolutely every possible file, always
|
||||
use the options `-H` and `-I` to disable these two features:
|
||||
``` bash
|
||||
> fd -HI …
|
||||
```
|
||||
|
||||
### `fd` doesn't seem to interpret my regex pattern correctly
|
||||
|
||||
A lot of special regex characters (like `[]`, `^`, `$`, ..) are also special characters in your
|
||||
|
@ -532,7 +521,7 @@ newlines). In the same way, the `-0` option of `xargs` tells it to read the inpu
|
|||
If you run Ubuntu 19.04 (Disco Dingo) or newer, you can install the
|
||||
[officially maintained package](https://packages.ubuntu.com/fd-find):
|
||||
```
|
||||
sudo apt install fd-find
|
||||
apt install fd-find
|
||||
```
|
||||
Note that the binary is called `fdfind` as the binary name `fd` is already used by another package.
|
||||
It is recommended that after installation, you add a link to `fd` by executing command
|
||||
|
@ -542,7 +531,7 @@ Make sure that `$HOME/.local/bin` is in your `$PATH`.
|
|||
If you use an older version of Ubuntu, you can download the latest `.deb` package from the
|
||||
[release page](https://github.com/sharkdp/fd/releases) and install it via:
|
||||
``` bash
|
||||
sudo dpkg -i fd_8.7.1_amd64.deb # adapt version number and architecture
|
||||
dpkg -i fd_9.0.0_amd64.deb # adapt version number and architecture
|
||||
```
|
||||
|
||||
### On Debian
|
||||
|
@ -550,7 +539,7 @@ sudo dpkg -i fd_8.7.1_amd64.deb # adapt version number and architecture
|
|||
If you run Debian Buster or newer, you can install the
|
||||
[officially maintained Debian package](https://tracker.debian.org/pkg/rust-fd-find):
|
||||
```
|
||||
sudo apt-get install fd-find
|
||||
apt-get install fd-find
|
||||
```
|
||||
Note that the binary is called `fdfind` as the binary name `fd` is already used by another package.
|
||||
It is recommended that after installation, you add a link to `fd` by executing command
|
||||
|
@ -578,6 +567,8 @@ You can install [the fd package](https://www.archlinux.org/packages/community/x8
|
|||
```
|
||||
pacman -S fd
|
||||
```
|
||||
You can also install fd [from the AUR](https://aur.archlinux.org/packages/fd-git).
|
||||
|
||||
### On Gentoo Linux
|
||||
|
||||
You can use [the fd ebuild](https://packages.gentoo.org/packages/sys-apps/fd) from the official repo:
|
||||
|
@ -599,7 +590,21 @@ You can install `fd` via xbps-install:
|
|||
xbps-install -S fd
|
||||
```
|
||||
|
||||
### On RedHat Enterprise Linux 8 (RHEL8), Almalinux 8, EuroLinux 8 or Rocky Linux 8
|
||||
### On ALT Linux
|
||||
|
||||
You can install [the fd package](https://packages.altlinux.org/en/sisyphus/srpms/fd/) from the official repo:
|
||||
```
|
||||
apt-get install fd
|
||||
```
|
||||
|
||||
### On Solus
|
||||
|
||||
You can install [the fd package](https://github.com/getsolus/packages/tree/main/packages/f/fd) from the official repo:
|
||||
```
|
||||
eopkg install fd
|
||||
```
|
||||
|
||||
### On RedHat Enterprise Linux 8/9 (RHEL8/9), Almalinux 8/9, EuroLinux 8/9 or Rocky Linux 8/9
|
||||
|
||||
You can install [the `fd` package](https://copr.fedorainfracloud.org/coprs/tkbcopr/fd/) from Fedora Copr.
|
||||
|
||||
|
@ -608,7 +613,7 @@ dnf copr enable tkbcopr/fd
|
|||
dnf install fd
|
||||
```
|
||||
|
||||
A different version using the [slower](https://github.com/sharkdp/fd/pull/481#issuecomment-534494592) malloc [instead of jemalloc](https://bugzilla.redhat.com/show_bug.cgi?id=2216193#c1) is also available from the EPEL8 repo.
|
||||
A different version using the [slower](https://github.com/sharkdp/fd/pull/481#issuecomment-534494592) malloc [instead of jemalloc](https://bugzilla.redhat.com/show_bug.cgi?id=2216193#c1) is also available from the EPEL8/9 repo as the package `fd-find`.
|
||||
|
||||
### On macOS
|
||||
|
||||
|
@ -619,7 +624,7 @@ brew install fd
|
|||
|
||||
… or with MacPorts:
|
||||
```
|
||||
sudo port install fd
|
||||
port install fd
|
||||
```
|
||||
|
||||
### On Windows
|
||||
|
@ -664,7 +669,7 @@ pkg install fd-find
|
|||
|
||||
### From npm
|
||||
|
||||
On linux and macOS, you can install the [fd-find](https://npm.im/fd-find) package:
|
||||
On Linux and macOS, you can install the [fd-find](https://npm.im/fd-find) package:
|
||||
|
||||
```
|
||||
npm install -g fd-find
|
||||
|
@ -676,7 +681,7 @@ With Rust's package manager [cargo](https://github.com/rust-lang/cargo), you can
|
|||
```
|
||||
cargo install fd-find
|
||||
```
|
||||
Note that rust version *1.64.0* or later is required.
|
||||
Note that rust version *1.77.2* or later is required.
|
||||
|
||||
`make` is also needed for the build.
|
||||
|
||||
|
@ -707,8 +712,6 @@ cargo install --path .
|
|||
|
||||
## License
|
||||
|
||||
Copyright (c) 2017-2021 The fd developers
|
||||
|
||||
`fd` is distributed under the terms of both the MIT License and the Apache License 2.0.
|
||||
|
||||
See the [LICENSE-APACHE](LICENSE-APACHE) and [LICENSE-MIT](LICENSE-MIT) files for license details.
|
||||
|
|
|
@ -26,6 +26,8 @@ _fd() {
|
|||
{l,symlink}'\:"symbolic links"'
|
||||
{e,empty}'\:"empty files or directories"'
|
||||
{x,executable}'\:"executable (files)"'
|
||||
{b,block-device}'\:"block devices"'
|
||||
{c,char-device}'\:"character devices"'
|
||||
{s,socket}'\:"sockets"'
|
||||
{p,pipe}'\:"named pipes (FIFOs)"'
|
||||
)
|
||||
|
@ -160,7 +162,7 @@ _fd() {
|
|||
$no'(*)*--search-path=[set search path (instead of positional <path> arguments)]:directory:_files -/'
|
||||
|
||||
+ strip-cwd-prefix
|
||||
$no'(strip-cwd-prefix exec-cmds)--strip-cwd-prefix[Strip ./ prefix when output is redirected]'
|
||||
$no'(strip-cwd-prefix exec-cmds)--strip-cwd-prefix=[When to strip ./]:when:(always never auto)'
|
||||
|
||||
+ and
|
||||
'--and=[additional required search path]:pattern'
|
||||
|
|
|
@ -29,11 +29,19 @@ By default
|
|||
.B fd
|
||||
uses regular expressions for the pattern. However, this can be changed to use simple glob patterns
|
||||
with the '\-\-glob' option.
|
||||
.P
|
||||
By default
|
||||
.B fd
|
||||
will exclude hidden files and directories, as well as any files that match gitignore rules
|
||||
or ignore rules in .ignore or .fdignore files.
|
||||
.SH OPTIONS
|
||||
.TP
|
||||
.B \-H, \-\-hidden
|
||||
Include hidden files and directories in the search results
|
||||
(default: hidden files and directories are skipped). The flag can be overridden with '--no-hidden'.
|
||||
.IP
|
||||
Ignored files are still excluded unless \-\-no\-ignore or \-\-no\-ignore\-vcs
|
||||
is also used.
|
||||
.TP
|
||||
.B \-I, \-\-no\-ignore
|
||||
Show search results from files and directories that would otherwise be ignored by
|
||||
|
@ -148,9 +156,20 @@ can be used as an alias.
|
|||
Enable the display of filesystem errors for situations such as insufficient
|
||||
permissions or dead symlinks.
|
||||
.TP
|
||||
.B \-\-strip-cwd-prefix
|
||||
By default, relative paths are prefixed with './' when the output goes to a non interactive terminal
|
||||
(TTY). Use this flag to disable this behaviour.
|
||||
.B \-\-strip-cwd-prefix [when]
|
||||
By default, relative paths are prefixed with './' when -x/--exec,
|
||||
-X/--exec-batch, or -0/--print0 are given, to reduce the risk of a
|
||||
path starting with '-' being treated as a command line option. Use
|
||||
this flag to change this behavior. If this flag is used without a value,
|
||||
it is equivalent to passing "always". Possible values are:
|
||||
.RS
|
||||
.IP never
|
||||
Never strip the ./ at the beginning of paths
|
||||
.IP always
|
||||
Always strip the ./ at the beginning of paths
|
||||
.IP auto
|
||||
Only strip if used with --exec, --exec-batch, or --print0. That is, it resets to the default behavior.
|
||||
.RE
|
||||
.TP
|
||||
.B \-\-one\-file\-system, \-\-mount, \-\-xdev
|
||||
By default, fd will traverse the file system tree as far as other options dictate. With this flag, fd ensures that it does not descend into a different file system than the one it started in. Comparable to the -mount or -xdev filters of find(1).
|
||||
|
@ -180,10 +199,14 @@ Filter search by type:
|
|||
.RS
|
||||
.IP "f, file"
|
||||
regular files
|
||||
.IP "d, directory"
|
||||
.IP "d, dir, directory"
|
||||
directories
|
||||
.IP "l, symlink"
|
||||
symbolic links
|
||||
.IP "b, block-device"
|
||||
block devices
|
||||
.IP "c, char-device"
|
||||
character devices
|
||||
.IP "s, socket"
|
||||
sockets
|
||||
.IP "p, pipe"
|
||||
|
@ -297,8 +320,9 @@ tebibytes
|
|||
Filter results based on the file modification time.
|
||||
Files with modification times greater than the argument will be returned.
|
||||
The argument can be provided as a duration (\fI10h, 1d, 35min\fR) or as a specific point
|
||||
in time in either full RFC3339 format with time zone, or as a date or datetime in the
|
||||
local time zone (\fIYYYY-MM-DD\fR or \fIYYYY-MM-DD HH:MM:SS\fR).
|
||||
in time as full RFC3339 format with time zone, as a date or datetime in the
|
||||
local time zone (\fIYYYY-MM-DD\fR or \fIYYYY-MM-DD HH:MM:SS\fR), or as the prefix '@'
|
||||
followed by the number of seconds since the Unix epoch (@[0-9]+).
|
||||
\fB\-\-change-newer-than\fR,
|
||||
.B --newer
|
||||
or
|
||||
|
@ -309,13 +333,15 @@ Examples:
|
|||
\-\-changed-within 2weeks
|
||||
\-\-change-newer-than "2018-10-27 10:00:00"
|
||||
\-\-newer 2018-10-27
|
||||
\-\-changed-after @1704067200
|
||||
.TP
|
||||
.BI "\-\-changed-before " date|duration
|
||||
Filter results based on the file modification time.
|
||||
Files with modification times less than the argument will be returned.
|
||||
The argument can be provided as a duration (\fI10h, 1d, 35min\fR) or as a specific point
|
||||
in time in either full RFC3339 format with time zone, or as a date or datetime in the
|
||||
local time zone (\fIYYYY-MM-DD\fR or \fIYYYY-MM-DD HH:MM:SS\fR).
|
||||
in time as full RFC3339 format with time zone, as a date or datetime in the
|
||||
local time zone (\fIYYYY-MM-DD\fR or \fIYYYY-MM-DD HH:MM:SS\fR), or as the prefix '@'
|
||||
followed by the number of seconds since the Unix epoch (@[0-9]+).
|
||||
.B --change-older-than
|
||||
or
|
||||
.B --older
|
||||
|
@ -324,6 +350,7 @@ can be used as aliases.
|
|||
Examples:
|
||||
\-\-changed-before "2018-10-27 10:00:00"
|
||||
\-\-change-older-than 2weeks
|
||||
\-\-older @1704067200
|
||||
.TP
|
||||
.BI "-o, \-\-owner " [user][:group]
|
||||
Filter files by their user and/or group. Format: [(user|uid)][:(group|gid)]. Either side
|
||||
|
@ -348,6 +375,30 @@ Set the path separator to use when printing file paths. The default is the OS-sp
|
|||
Provide paths to search as an alternative to the positional \fIpath\fR argument. Changes the usage to
|
||||
\'fd [FLAGS/OPTIONS] \-\-search\-path PATH \-\-search\-path PATH2 [PATTERN]\'
|
||||
.TP
|
||||
.BI "\-\-format " fmt
|
||||
Specify a template string that is used for printing a line for each file found.
|
||||
|
||||
The following placeholders are substituted into the string for each file before printing:
|
||||
.RS
|
||||
.IP {}
|
||||
path (of the current search result)
|
||||
.IP {/}
|
||||
basename
|
||||
.IP {//}
|
||||
parent directory
|
||||
.IP {.}
|
||||
path without file extension
|
||||
.IP {/.}
|
||||
basename without file extension
|
||||
.IP {{
|
||||
literal '{' (an escape sequence)
|
||||
.IP }}
|
||||
literal '}' (an escape sequence)
|
||||
.P
|
||||
Notice that you can use "{{" and "}}" to escape "{" and "}" respectively, which is especially
|
||||
useful if you need to include the literal text of one of the above placeholders.
|
||||
.RE
|
||||
.TP
|
||||
.BI "\-x, \-\-exec " command
|
||||
.RS
|
||||
Execute
|
||||
|
@ -364,19 +415,13 @@ This option can be specified multiple times, in which case all commands are run
|
|||
file found, in the order they are provided. In that case, you must supply a ';' argument for
|
||||
all but the last commands.
|
||||
|
||||
The following placeholders are substituted before the command is executed:
|
||||
.RS
|
||||
.IP {}
|
||||
path (of the current search result)
|
||||
.IP {/}
|
||||
basename
|
||||
.IP {//}
|
||||
parent directory
|
||||
.IP {.}
|
||||
path without file extension
|
||||
.IP {/.}
|
||||
basename without file extension
|
||||
.RE
|
||||
If parallelism is enabled, the order commands will be executed in is non-deterministic. And even with
|
||||
--threads=1, the order is determined by the operating system and may not be what you expect. Thus, it is
|
||||
recommended that you don't rely on any ordering of the results.
|
||||
|
||||
Before executing the command, any placeholder patterns in the command are replaced with the
|
||||
corresponding values for the current file. The same placeholders are used as in the "\-\-format"
|
||||
option.
|
||||
|
||||
If no placeholder is present, an implicit "{}" at the end is assumed.
|
||||
|
||||
|
@ -400,19 +445,12 @@ Examples:
|
|||
Execute
|
||||
.I command
|
||||
once, with all search results as arguments.
|
||||
One of the following placeholders is substituted before the command is executed:
|
||||
.RS
|
||||
.IP {}
|
||||
path (of all search results)
|
||||
.IP {/}
|
||||
basename
|
||||
.IP {//}
|
||||
parent directory
|
||||
.IP {.}
|
||||
path without file extension
|
||||
.IP {/.}
|
||||
basename without file extension
|
||||
.RE
|
||||
|
||||
The order of the arguments is non-deterministic and should not be relied upon.
|
||||
|
||||
This uses the same placeholders as "\-\-format" and "\-\-exec", but instead of expanding
|
||||
once per command invocation each argument containing a placeholder is expanding for every
|
||||
file in a batch and passed as separate arguments.
|
||||
|
||||
If no placeholder is present, an implicit "{}" at the end is assumed.
|
||||
|
||||
|
@ -461,6 +499,17 @@ is set, use
|
|||
.IR $XDG_CONFIG_HOME/fd/ignore .
|
||||
Otherwise, use
|
||||
.IR $HOME/.config/fd/ignore .
|
||||
.SH FILES
|
||||
.TP
|
||||
.B .fdignore
|
||||
This file works similarly to a .gitignore file anywhere in the searched tree and specifies patterns
|
||||
that should be excluded from the search. However, this file is specific to fd, and will be used even
|
||||
if the --no-ignore-vcs option is used.
|
||||
.TP
|
||||
.B $XDG_CONFIG_HOME/fd/ignore
|
||||
Global ignore file. Unless ignore mode is turned off (such as with --no-ignore)
|
||||
ignore entries in this file will be ignored, as if it was an .fdignore file in the
|
||||
current directory.
|
||||
.SH EXAMPLES
|
||||
.TP
|
||||
.RI "Find files and directories that match the pattern '" needle "':"
|
||||
|
@ -474,6 +523,16 @@ $ fd -e py
|
|||
.TP
|
||||
.RI "Open all search results with vim:"
|
||||
$ fd pattern -X vim
|
||||
.SH Tips and Tricks
|
||||
.IP \[bu]
|
||||
If you add ".git/" to your global ignore file ($XDG_CONFIG_HOME/fd/ignore), then
|
||||
".git" folders will be ignored by default, even when the --hidden option is used.
|
||||
.IP \[bu]
|
||||
You can use a shell alias or a wrapper script in order to pass desired flags to fd
|
||||
by default. For example if you do not like the default behavior of respecting gitignore,
|
||||
you can use
|
||||
`alias fd="/usr/bin/fd --no-ignore-vcs"`
|
||||
in your .bashrc to create an alias for fd that doesn't ignore git files by default.
|
||||
.SH BUGS
|
||||
Bugs can be reported on GitHub: https://github.com/sharkdp/fd/issues
|
||||
.SH SEE ALSO
|
||||
|
|
|
@ -9,7 +9,7 @@ necessary changes for the upcoming release.
|
|||
- [ ] Update version in `Cargo.toml`. Run `cargo build` to update `Cargo.lock`.
|
||||
Make sure to `git add` the `Cargo.lock` changes as well.
|
||||
- [ ] Find the current min. supported Rust version by running
|
||||
`grep '^\s*MIN_SUPPORTED_RUST_VERSION' .github/workflows/CICD.yml`.
|
||||
`grep rust-version Cargo.toml`.
|
||||
- [ ] Update the `fd` version and the min. supported Rust version in `README.md`.
|
||||
- [ ] Update `CHANGELOG.md`. Change the heading of the *"Upcoming release"* section
|
||||
to the version of this release.
|
||||
|
|
|
@ -0,0 +1,12 @@
|
|||
## Sponsors
|
||||
|
||||
`fd` development is sponsored by many individuals and companies. Thank you very much!
|
||||
|
||||
Please note, that being sponsored does not affect the individuality of the `fd`
|
||||
project or affect the maintainers' actions in any way.
|
||||
We remain impartial and continue to assess pull requests solely on merit - the
|
||||
features added, bugs solved, and effect on the overall complexity of the code.
|
||||
No issue will have a different priority based on sponsorship status of the
|
||||
reporter.
|
||||
|
||||
Contributions from anybody are most welcomed, please see our [`CONTRIBUTING.md`](../CONTRIBUTING.md) guide.
|
File diff suppressed because one or more lines are too long
After Width: | Height: | Size: 7.2 KiB |
|
@ -0,0 +1,22 @@
|
|||
#!/usr/bin/bash
|
||||
|
||||
set -eu
|
||||
|
||||
# This script automates the "Version bump" section
|
||||
|
||||
version="$1"
|
||||
|
||||
if [[ -z $version ]]; then
|
||||
echo "Usage: must supply version as first argument" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
git switch -C "release-$version"
|
||||
sed -i -e "0,/^\[badges/{s/^version =.*/version = \"$version\"/}" Cargo.toml
|
||||
|
||||
msrv="$(grep -F rust-version Cargo.toml | sed -e 's/^rust-version= "\(.*\)"/\1/')"
|
||||
|
||||
sed -i -e "s/Note that rust version \*[0-9.]+\* or later/Note that rust version *$msrv* or later/" README.md
|
||||
|
||||
sed -i -e "s/^# Upcoming release/# $version/" CHANGELOG.md
|
||||
|
125
src/cli.rs
125
src/cli.rs
|
@ -1,3 +1,4 @@
|
|||
use std::num::NonZeroUsize;
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::time::Duration;
|
||||
|
||||
|
@ -32,6 +33,8 @@ pub struct Opts {
|
|||
/// Include hidden directories and files in the search results (default:
|
||||
/// hidden files and directories are skipped). Files and directories are
|
||||
/// considered to be hidden if their name starts with a `.` sign (dot).
|
||||
/// Any files or directories that are ignored due to the rules described by
|
||||
/// --no-ignore are still ignored unless otherwise specified.
|
||||
/// The flag can be overridden with --no-hidden.
|
||||
#[arg(
|
||||
long,
|
||||
|
@ -46,7 +49,7 @@ pub struct Opts {
|
|||
no_hidden: (),
|
||||
|
||||
/// Show search results from files and directories that would otherwise be
|
||||
/// ignored by '.gitignore', '.ignore', '.fdignore', or the global ignore file.
|
||||
/// ignored by '.gitignore', '.ignore', '.fdignore', or the global ignore file,
|
||||
/// The flag can be overridden with --ignore.
|
||||
#[arg(
|
||||
long,
|
||||
|
@ -60,8 +63,9 @@ pub struct Opts {
|
|||
#[arg(long, overrides_with = "no_ignore", hide = true, action = ArgAction::SetTrue)]
|
||||
ignore: (),
|
||||
|
||||
///Show search results from files and directories that would otherwise be
|
||||
/// ignored by '.gitignore' files. The flag can be overridden with --ignore-vcs.
|
||||
///Show search results from files and directories that
|
||||
///would otherwise be ignored by '.gitignore' files.
|
||||
///The flag can be overridden with --ignore-vcs.
|
||||
#[arg(
|
||||
long,
|
||||
hide_short_help = true,
|
||||
|
@ -222,7 +226,7 @@ pub struct Opts {
|
|||
alias = "dereference",
|
||||
long_help = "By default, fd does not descend into symlinked directories. Using this \
|
||||
flag, symbolic links are also traversed. \
|
||||
Flag can be overriden with --no-follow."
|
||||
Flag can be overridden with --no-follow."
|
||||
)]
|
||||
pub follow: bool,
|
||||
|
||||
|
@ -309,10 +313,12 @@ pub struct Opts {
|
|||
|
||||
/// Filter the search by type:
|
||||
/// {n} 'f' or 'file': regular files
|
||||
/// {n} 'd' or 'directory': directories
|
||||
/// {n} 'd' or 'dir' or 'directory': directories
|
||||
/// {n} 'l' or 'symlink': symbolic links
|
||||
/// {n} 's' or 'socket': socket
|
||||
/// {n} 'p' or 'pipe': named pipe (FIFO)
|
||||
/// {n} 'b' or 'block-device': block device
|
||||
/// {n} 'c' or 'char-device': character device
|
||||
/// {n}{n} 'x' or 'executable': executables
|
||||
/// {n} 'e' or 'empty': empty files or directories
|
||||
///
|
||||
|
@ -345,8 +351,9 @@ pub struct Opts {
|
|||
value_name = "filetype",
|
||||
hide_possible_values = true,
|
||||
value_enum,
|
||||
help = "Filter by type: file (f), directory (d), symlink (l), \
|
||||
executable (x), empty (e), socket (s), pipe (p)",
|
||||
help = "Filter by type: file (f), directory (d/dir), symlink (l), \
|
||||
executable (x), empty (e), socket (s), pipe (p), \
|
||||
char-device (c), block-device (b)",
|
||||
long_help
|
||||
)]
|
||||
pub filetype: Option<Vec<FileType>>,
|
||||
|
@ -391,7 +398,7 @@ pub struct Opts {
|
|||
|
||||
/// Filter results based on the file modification time. Files with modification times
|
||||
/// greater than the argument are returned. The argument can be provided
|
||||
/// as a specific point in time (YYYY-MM-DD HH:MM:SS) or as a duration (10h, 1d, 35min).
|
||||
/// as a specific point in time (YYYY-MM-DD HH:MM:SS or @timestamp) or as a duration (10h, 1d, 35min).
|
||||
/// If the time is not specified, it defaults to 00:00:00.
|
||||
/// '--change-newer-than', '--newer', or '--changed-after' can be used as aliases.
|
||||
///
|
||||
|
@ -413,7 +420,7 @@ pub struct Opts {
|
|||
|
||||
/// Filter results based on the file modification time. Files with modification times
|
||||
/// less than the argument are returned. The argument can be provided
|
||||
/// as a specific point in time (YYYY-MM-DD HH:MM:SS) or as a duration (10h, 1d, 35min).
|
||||
/// as a specific point in time (YYYY-MM-DD HH:MM:SS or @timestamp) or as a duration (10h, 1d, 35min).
|
||||
/// '--change-older-than' or '--older' can be used as aliases.
|
||||
///
|
||||
/// Examples:
|
||||
|
@ -445,6 +452,20 @@ pub struct Opts {
|
|||
)]
|
||||
pub owner: Option<OwnerFilter>,
|
||||
|
||||
/// Instead of printing the file normally, print the format string with the following placeholders replaced:
|
||||
/// '{}': path (of the current search result)
|
||||
/// '{/}': basename
|
||||
/// '{//}': parent directory
|
||||
/// '{.}': path without file extension
|
||||
/// '{/.}': basename without file extension
|
||||
#[arg(
|
||||
long,
|
||||
value_name = "fmt",
|
||||
help = "Print results according to template",
|
||||
conflicts_with = "list_details"
|
||||
)]
|
||||
pub format: Option<String>,
|
||||
|
||||
#[command(flatten)]
|
||||
pub exec: Exec,
|
||||
|
||||
|
@ -490,8 +511,8 @@ pub struct Opts {
|
|||
|
||||
/// Set number of threads to use for searching & executing (default: number
|
||||
/// of available CPU cores)
|
||||
#[arg(long, short = 'j', value_name = "num", hide_short_help = true, value_parser = clap::value_parser!(u32).range(1..))]
|
||||
pub threads: Option<u32>,
|
||||
#[arg(long, short = 'j', value_name = "num", hide_short_help = true, value_parser = str::parse::<NonZeroUsize>)]
|
||||
pub threads: Option<NonZeroUsize>,
|
||||
|
||||
/// Milliseconds to buffer before streaming search results to console
|
||||
///
|
||||
|
@ -610,9 +631,10 @@ pub struct Opts {
|
|||
/// By default, relative paths are prefixed with './' when -x/--exec,
|
||||
/// -X/--exec-batch, or -0/--print0 are given, to reduce the risk of a
|
||||
/// path starting with '-' being treated as a command line option. Use
|
||||
/// this flag to disable this behaviour.
|
||||
#[arg(long, conflicts_with_all(&["path", "search_path"]), hide_short_help = true, long_help)]
|
||||
pub strip_cwd_prefix: bool,
|
||||
/// this flag to change this behavior. If this flag is used without a value,
|
||||
/// it is equivalent to passing "always".
|
||||
#[arg(long, conflicts_with_all(&["path", "search_path"]), value_name = "when", hide_short_help = true, require_equals = true, long_help)]
|
||||
strip_cwd_prefix: Option<Option<StripCwdWhen>>,
|
||||
|
||||
/// By default, fd will traverse the file system tree as far as other options
|
||||
/// dictate. With this flag, fd ensures that it does not descend into a
|
||||
|
@ -635,7 +657,7 @@ impl Opts {
|
|||
} else if !self.search_path.is_empty() {
|
||||
&self.search_path
|
||||
} else {
|
||||
let current_directory = Path::new(".");
|
||||
let current_directory = Path::new("./");
|
||||
ensure_current_directory_exists(current_directory)?;
|
||||
return Ok(vec![self.normalize_path(current_directory)]);
|
||||
};
|
||||
|
@ -658,6 +680,9 @@ impl Opts {
|
|||
fn normalize_path(&self, path: &Path) -> PathBuf {
|
||||
if self.absolute_path {
|
||||
filesystem::absolute_path(path.normalize().unwrap().as_path()).unwrap()
|
||||
} else if path == Path::new(".") {
|
||||
// Change "." to "./" as a workaround for https://github.com/BurntSushi/ripgrep/pull/2711
|
||||
PathBuf::from("./")
|
||||
} else {
|
||||
path.to_path_buf()
|
||||
}
|
||||
|
@ -680,17 +705,8 @@ impl Opts {
|
|||
self.min_depth.or(self.exact_depth)
|
||||
}
|
||||
|
||||
pub fn threads(&self) -> usize {
|
||||
// This will panic if the number of threads passed in is more than usize::MAX in an environment
|
||||
// where usize is less than 32 bits (for example 16-bit architectures). It's pretty
|
||||
// unlikely fd will be running in such an environment, and even more unlikely someone would
|
||||
// be trying to use that many threads on such an environment, so I think panicing is an
|
||||
// appropriate way to handle that.
|
||||
std::cmp::max(
|
||||
self.threads
|
||||
.map_or_else(num_cpus::get, |n| n.try_into().expect("too many threads")),
|
||||
1,
|
||||
)
|
||||
pub fn threads(&self) -> NonZeroUsize {
|
||||
self.threads.unwrap_or_else(default_num_threads)
|
||||
}
|
||||
|
||||
pub fn max_results(&self) -> Option<usize> {
|
||||
|
@ -699,6 +715,16 @@ impl Opts {
|
|||
.or_else(|| self.max_one_result.then_some(1))
|
||||
}
|
||||
|
||||
pub fn strip_cwd_prefix<P: FnOnce() -> bool>(&self, auto_pred: P) -> bool {
|
||||
use self::StripCwdWhen::*;
|
||||
self.no_search_paths()
|
||||
&& match self.strip_cwd_prefix.map_or(Auto, |o| o.unwrap_or(Always)) {
|
||||
Auto => auto_pred(),
|
||||
Always => true,
|
||||
Never => false,
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "completions")]
|
||||
pub fn gen_completions(&self) -> anyhow::Result<Option<Shell>> {
|
||||
self.gen_completions
|
||||
|
@ -712,14 +738,32 @@ impl Opts {
|
|||
}
|
||||
}
|
||||
|
||||
/// Get the default number of threads to use, if not explicitly specified.
|
||||
fn default_num_threads() -> NonZeroUsize {
|
||||
// If we can't get the amount of parallelism for some reason, then
|
||||
// default to a single thread, because that is safe.
|
||||
let fallback = NonZeroUsize::MIN;
|
||||
// To limit startup overhead on massively parallel machines, don't use more
|
||||
// than 64 threads.
|
||||
let limit = NonZeroUsize::new(64).unwrap();
|
||||
|
||||
std::thread::available_parallelism()
|
||||
.unwrap_or(fallback)
|
||||
.min(limit)
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, PartialEq, Eq, ValueEnum)]
|
||||
pub enum FileType {
|
||||
#[value(alias = "f")]
|
||||
File,
|
||||
#[value(alias = "d")]
|
||||
#[value(alias = "d", alias = "dir")]
|
||||
Directory,
|
||||
#[value(alias = "l")]
|
||||
Symlink,
|
||||
#[value(alias = "b")]
|
||||
BlockDevice,
|
||||
#[value(alias = "c")]
|
||||
CharDevice,
|
||||
/// A file which is executable by the current effective user
|
||||
#[value(alias = "x")]
|
||||
Executable,
|
||||
|
@ -741,15 +785,14 @@ pub enum ColorWhen {
|
|||
Never,
|
||||
}
|
||||
|
||||
impl ColorWhen {
|
||||
pub fn as_str(&self) -> &'static str {
|
||||
use ColorWhen::*;
|
||||
match *self {
|
||||
Auto => "auto",
|
||||
Never => "never",
|
||||
Always => "always",
|
||||
}
|
||||
}
|
||||
#[derive(Copy, Clone, PartialEq, Eq, Debug, ValueEnum)]
|
||||
pub enum StripCwdWhen {
|
||||
/// Use the default behavior
|
||||
Auto,
|
||||
/// Always strip the ./ at the beginning of paths
|
||||
Always,
|
||||
/// Never strip the ./
|
||||
Never,
|
||||
}
|
||||
|
||||
// there isn't a derive api for getting grouped values yet,
|
||||
|
@ -793,6 +836,7 @@ impl clap::Args for Exec {
|
|||
.help("Execute a command for each search result")
|
||||
.long_help(
|
||||
"Execute a command for each search result in parallel (use --threads=1 for sequential command execution). \
|
||||
There is no guarantee of the order commands are executed in, and the order should not be depended upon. \
|
||||
All positional arguments following --exec are considered to be arguments to the command - not to fd. \
|
||||
It is therefore recommended to place the '-x'/'--exec' option last.\n\
|
||||
The following placeholders are substituted before the command is executed:\n \
|
||||
|
@ -800,7 +844,9 @@ impl clap::Args for Exec {
|
|||
'{/}': basename\n \
|
||||
'{//}': parent directory\n \
|
||||
'{.}': path without file extension\n \
|
||||
'{/.}': basename without file extension\n\n\
|
||||
'{/.}': basename without file extension\n \
|
||||
'{{': literal '{' (for escaping)\n \
|
||||
'}}': literal '}' (for escaping)\n\n\
|
||||
If no placeholder is present, an implicit \"{}\" at the end is assumed.\n\n\
|
||||
Examples:\n\n \
|
||||
- find all *.zip files and unzip them:\n\n \
|
||||
|
@ -825,12 +871,15 @@ impl clap::Args for Exec {
|
|||
.help("Execute a command with all search results at once")
|
||||
.long_help(
|
||||
"Execute the given command once, with all search results as arguments.\n\
|
||||
The order of the arguments is non-deterministic, and should not be relied upon.\n\
|
||||
One of the following placeholders is substituted before the command is executed:\n \
|
||||
'{}': path (of all search results)\n \
|
||||
'{/}': basename\n \
|
||||
'{//}': parent directory\n \
|
||||
'{.}': path without file extension\n \
|
||||
'{/.}': basename without file extension\n\n\
|
||||
'{/.}': basename without file extension\n \
|
||||
'{{': literal '{' (for escaping)\n \
|
||||
'}}': literal '}' (for escaping)\n\n\
|
||||
If no placeholder is present, an implicit \"{}\" at the end is assumed.\n\n\
|
||||
Examples:\n\n \
|
||||
- Find all test_*.py files and open them in your favorite editor:\n\n \
|
||||
|
|
|
@ -8,6 +8,7 @@ use crate::filetypes::FileTypes;
|
|||
#[cfg(unix)]
|
||||
use crate::filter::OwnerFilter;
|
||||
use crate::filter::{SizeFilter, TimeFilter};
|
||||
use crate::fmt::FormatTemplate;
|
||||
|
||||
/// Configuration options for *fd*.
|
||||
pub struct Config {
|
||||
|
@ -85,6 +86,9 @@ pub struct Config {
|
|||
/// The value (if present) will be a lowercase string without leading dots.
|
||||
pub extensions: Option<RegexSet>,
|
||||
|
||||
/// A format string to use to format results, similarly to exec
|
||||
pub format: Option<FormatTemplate>,
|
||||
|
||||
/// If a value is supplied, each item found will be used to generate and execute commands.
|
||||
pub command: Option<Arc<CommandSet>>,
|
||||
|
||||
|
|
|
@ -8,11 +8,13 @@ use lscolors::{Colorable, LsColors, Style};
|
|||
use crate::config::Config;
|
||||
use crate::filesystem::strip_current_dir;
|
||||
|
||||
#[derive(Debug)]
|
||||
enum DirEntryInner {
|
||||
Normal(ignore::DirEntry),
|
||||
BrokenSymlink(PathBuf),
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct DirEntry {
|
||||
inner: DirEntryInner,
|
||||
metadata: OnceCell<Option<Metadata>>,
|
||||
|
@ -111,7 +113,7 @@ impl Eq for DirEntry {}
|
|||
impl PartialOrd for DirEntry {
|
||||
#[inline]
|
||||
fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {
|
||||
self.path().partial_cmp(other.path())
|
||||
Some(self.cmp(other))
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -1,9 +1,6 @@
|
|||
use std::sync::Mutex;
|
||||
|
||||
use crossbeam_channel::Receiver;
|
||||
|
||||
use crate::config::Config;
|
||||
use crate::dir_entry::DirEntry;
|
||||
use crate::error::print_error;
|
||||
use crate::exit_codes::{merge_exitcodes, ExitCode};
|
||||
use crate::walk::WorkerResult;
|
||||
|
@ -14,7 +11,7 @@ use super::CommandSet;
|
|||
/// generate a command with the supplied command template. The generated command will then
|
||||
/// be executed, and this process will continue until the receiver's sender has closed.
|
||||
pub fn job(
|
||||
rx: Receiver<WorkerResult>,
|
||||
results: impl IntoIterator<Item = WorkerResult>,
|
||||
cmd: &CommandSet,
|
||||
out_perm: &Mutex<()>,
|
||||
config: &Config,
|
||||
|
@ -22,35 +19,39 @@ pub fn job(
|
|||
// Output should be buffered when only running a single thread
|
||||
let buffer_output: bool = config.threads > 1;
|
||||
|
||||
let mut results: Vec<ExitCode> = Vec::new();
|
||||
loop {
|
||||
let mut ret = ExitCode::Success;
|
||||
for result in results {
|
||||
// Obtain the next result from the receiver, else if the channel
|
||||
// has closed, exit from the loop
|
||||
let dir_entry: DirEntry = match rx.recv() {
|
||||
Ok(WorkerResult::Entry(dir_entry)) => dir_entry,
|
||||
Ok(WorkerResult::Error(err)) => {
|
||||
let dir_entry = match result {
|
||||
WorkerResult::Entry(dir_entry) => dir_entry,
|
||||
WorkerResult::Error(err) => {
|
||||
if config.show_filesystem_errors {
|
||||
print_error(err.to_string());
|
||||
}
|
||||
continue;
|
||||
}
|
||||
Err(_) => break,
|
||||
};
|
||||
|
||||
// Generate a command, execute it and store its exit code.
|
||||
results.push(cmd.execute(
|
||||
let code = cmd.execute(
|
||||
dir_entry.stripped_path(config),
|
||||
config.path_separator.as_deref(),
|
||||
out_perm,
|
||||
buffer_output,
|
||||
))
|
||||
);
|
||||
ret = merge_exitcodes([ret, code]);
|
||||
}
|
||||
// Returns error in case of any error.
|
||||
merge_exitcodes(results)
|
||||
ret
|
||||
}
|
||||
|
||||
pub fn batch(rx: Receiver<WorkerResult>, cmd: &CommandSet, config: &Config) -> ExitCode {
|
||||
let paths = rx
|
||||
pub fn batch(
|
||||
results: impl IntoIterator<Item = WorkerResult>,
|
||||
cmd: &CommandSet,
|
||||
config: &Config,
|
||||
) -> ExitCode {
|
||||
let paths = results
|
||||
.into_iter()
|
||||
.filter_map(|worker_result| match worker_result {
|
||||
WorkerResult::Entry(dir_entry) => Some(dir_entry.into_stripped_path(config)),
|
||||
|
|
226
src/exec/mod.rs
226
src/exec/mod.rs
|
@ -1,26 +1,21 @@
|
|||
mod command;
|
||||
mod input;
|
||||
mod job;
|
||||
mod token;
|
||||
|
||||
use std::borrow::Cow;
|
||||
use std::ffi::{OsStr, OsString};
|
||||
use std::ffi::OsString;
|
||||
use std::io;
|
||||
use std::iter;
|
||||
use std::path::{Component, Path, PathBuf, Prefix};
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::process::Stdio;
|
||||
use std::sync::{Mutex, OnceLock};
|
||||
use std::sync::Mutex;
|
||||
|
||||
use anyhow::{bail, Result};
|
||||
use argmax::Command;
|
||||
use regex::Regex;
|
||||
|
||||
use crate::exit_codes::{merge_exitcodes, ExitCode};
|
||||
use crate::fmt::{FormatTemplate, Token};
|
||||
|
||||
use self::command::{execute_commands, handle_cmd_error};
|
||||
use self::input::{basename, dirname, remove_extension};
|
||||
pub use self::job::{batch, job};
|
||||
use self::token::Token;
|
||||
|
||||
/// Execution mode of the command
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||
|
@ -132,7 +127,7 @@ impl CommandSet {
|
|||
#[derive(Debug)]
|
||||
struct CommandBuilder {
|
||||
pre_args: Vec<OsString>,
|
||||
path_arg: ArgumentTemplate,
|
||||
path_arg: FormatTemplate,
|
||||
post_args: Vec<OsString>,
|
||||
cmd: Command,
|
||||
count: usize,
|
||||
|
@ -221,7 +216,7 @@ impl CommandBuilder {
|
|||
/// `generate_and_execute()` method will be used to generate a command and execute it.
|
||||
#[derive(Debug, Clone, PartialEq)]
|
||||
struct CommandTemplate {
|
||||
args: Vec<ArgumentTemplate>,
|
||||
args: Vec<FormatTemplate>,
|
||||
}
|
||||
|
||||
impl CommandTemplate {
|
||||
|
@ -230,52 +225,15 @@ impl CommandTemplate {
|
|||
I: IntoIterator<Item = S>,
|
||||
S: AsRef<str>,
|
||||
{
|
||||
static PLACEHOLDER_PATTERN: OnceLock<Regex> = OnceLock::new();
|
||||
|
||||
let mut args = Vec::new();
|
||||
let mut has_placeholder = false;
|
||||
|
||||
for arg in input {
|
||||
let arg = arg.as_ref();
|
||||
|
||||
let mut tokens = Vec::new();
|
||||
let mut start = 0;
|
||||
|
||||
let pattern =
|
||||
PLACEHOLDER_PATTERN.get_or_init(|| Regex::new(r"\{(/?\.?|//)\}").unwrap());
|
||||
|
||||
for placeholder in pattern.find_iter(arg) {
|
||||
// Leading text before the placeholder.
|
||||
if placeholder.start() > start {
|
||||
tokens.push(Token::Text(arg[start..placeholder.start()].to_owned()));
|
||||
}
|
||||
|
||||
start = placeholder.end();
|
||||
|
||||
match placeholder.as_str() {
|
||||
"{}" => tokens.push(Token::Placeholder),
|
||||
"{.}" => tokens.push(Token::NoExt),
|
||||
"{/}" => tokens.push(Token::Basename),
|
||||
"{//}" => tokens.push(Token::Parent),
|
||||
"{/.}" => tokens.push(Token::BasenameNoExt),
|
||||
_ => unreachable!("Unhandled placeholder"),
|
||||
}
|
||||
|
||||
has_placeholder = true;
|
||||
}
|
||||
|
||||
// Without a placeholder, the argument is just fixed text.
|
||||
if tokens.is_empty() {
|
||||
args.push(ArgumentTemplate::Text(arg.to_owned()));
|
||||
continue;
|
||||
}
|
||||
|
||||
if start < arg.len() {
|
||||
// Trailing text after last placeholder.
|
||||
tokens.push(Token::Text(arg[start..].to_owned()));
|
||||
}
|
||||
|
||||
args.push(ArgumentTemplate::Tokens(tokens));
|
||||
let tmpl = FormatTemplate::parse(arg);
|
||||
has_placeholder |= tmpl.has_tokens();
|
||||
args.push(tmpl);
|
||||
}
|
||||
|
||||
// We need to check that we have at least one argument, because if not
|
||||
|
@ -289,7 +247,7 @@ impl CommandTemplate {
|
|||
|
||||
// If a placeholder token was not supplied, append one at the end of the command.
|
||||
if !has_placeholder {
|
||||
args.push(ArgumentTemplate::Tokens(vec![Token::Placeholder]));
|
||||
args.push(FormatTemplate::Tokens(vec![Token::Placeholder]));
|
||||
}
|
||||
|
||||
Ok(CommandTemplate { args })
|
||||
|
@ -312,115 +270,18 @@ impl CommandTemplate {
|
|||
}
|
||||
}
|
||||
|
||||
/// Represents a template for a single command argument.
|
||||
///
|
||||
/// The argument is either a collection of `Token`s including at least one placeholder variant, or
|
||||
/// a fixed text.
|
||||
#[derive(Clone, Debug, PartialEq)]
|
||||
enum ArgumentTemplate {
|
||||
Tokens(Vec<Token>),
|
||||
Text(String),
|
||||
}
|
||||
|
||||
impl ArgumentTemplate {
|
||||
pub fn has_tokens(&self) -> bool {
|
||||
matches!(self, ArgumentTemplate::Tokens(_))
|
||||
}
|
||||
|
||||
/// Generate an argument from this template. If path_separator is Some, then it will replace
|
||||
/// the path separator in all placeholder tokens. Text arguments and tokens are not affected by
|
||||
/// path separator substitution.
|
||||
pub fn generate(&self, path: impl AsRef<Path>, path_separator: Option<&str>) -> OsString {
|
||||
use self::Token::*;
|
||||
let path = path.as_ref();
|
||||
|
||||
match *self {
|
||||
ArgumentTemplate::Tokens(ref tokens) => {
|
||||
let mut s = OsString::new();
|
||||
for token in tokens {
|
||||
match *token {
|
||||
Basename => s.push(Self::replace_separator(basename(path), path_separator)),
|
||||
BasenameNoExt => s.push(Self::replace_separator(
|
||||
&remove_extension(basename(path).as_ref()),
|
||||
path_separator,
|
||||
)),
|
||||
NoExt => s.push(Self::replace_separator(
|
||||
&remove_extension(path),
|
||||
path_separator,
|
||||
)),
|
||||
Parent => s.push(Self::replace_separator(&dirname(path), path_separator)),
|
||||
Placeholder => {
|
||||
s.push(Self::replace_separator(path.as_ref(), path_separator))
|
||||
}
|
||||
Text(ref string) => s.push(string),
|
||||
}
|
||||
}
|
||||
s
|
||||
}
|
||||
ArgumentTemplate::Text(ref text) => OsString::from(text),
|
||||
}
|
||||
}
|
||||
|
||||
/// Replace the path separator in the input with the custom separator string. If path_separator
|
||||
/// is None, simply return a borrowed Cow<OsStr> of the input. Otherwise, the input is
|
||||
/// interpreted as a Path and its components are iterated through and re-joined into a new
|
||||
/// OsString.
|
||||
fn replace_separator<'a>(path: &'a OsStr, path_separator: Option<&str>) -> Cow<'a, OsStr> {
|
||||
// fast-path - no replacement necessary
|
||||
if path_separator.is_none() {
|
||||
return Cow::Borrowed(path);
|
||||
}
|
||||
|
||||
let path_separator = path_separator.unwrap();
|
||||
let mut out = OsString::with_capacity(path.len());
|
||||
let mut components = Path::new(path).components().peekable();
|
||||
|
||||
while let Some(comp) = components.next() {
|
||||
match comp {
|
||||
// Absolute paths on Windows are tricky. A Prefix component is usually a drive
|
||||
// letter or UNC path, and is usually followed by RootDir. There are also
|
||||
// "verbatim" prefixes beginning with "\\?\" that skip normalization. We choose to
|
||||
// ignore verbatim path prefixes here because they're very rare, might be
|
||||
// impossible to reach here, and there's no good way to deal with them. If users
|
||||
// are doing something advanced involving verbatim windows paths, they can do their
|
||||
// own output filtering with a tool like sed.
|
||||
Component::Prefix(prefix) => {
|
||||
if let Prefix::UNC(server, share) = prefix.kind() {
|
||||
// Prefix::UNC is a parsed version of '\\server\share'
|
||||
out.push(path_separator);
|
||||
out.push(path_separator);
|
||||
out.push(server);
|
||||
out.push(path_separator);
|
||||
out.push(share);
|
||||
} else {
|
||||
// All other Windows prefix types are rendered as-is. This results in e.g. "C:" for
|
||||
// drive letters. DeviceNS and Verbatim* prefixes won't have backslashes converted,
|
||||
// but they're not returned by directories fd can search anyway so we don't worry
|
||||
// about them.
|
||||
out.push(comp.as_os_str());
|
||||
}
|
||||
}
|
||||
|
||||
// Root directory is always replaced with the custom separator.
|
||||
Component::RootDir => out.push(path_separator),
|
||||
|
||||
// Everything else is joined normally, with a trailing separator if we're not last
|
||||
_ => {
|
||||
out.push(comp.as_os_str());
|
||||
if components.peek().is_some() {
|
||||
out.push(path_separator);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
Cow::Owned(out)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
fn generate_str(template: &CommandTemplate, input: &str) -> Vec<String> {
|
||||
template
|
||||
.args
|
||||
.iter()
|
||||
.map(|arg| arg.generate(input, None).into_string().unwrap())
|
||||
.collect()
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn tokens_with_placeholder() {
|
||||
assert_eq!(
|
||||
|
@ -428,9 +289,9 @@ mod tests {
|
|||
CommandSet {
|
||||
commands: vec![CommandTemplate {
|
||||
args: vec![
|
||||
ArgumentTemplate::Text("echo".into()),
|
||||
ArgumentTemplate::Text("${SHELL}:".into()),
|
||||
ArgumentTemplate::Tokens(vec![Token::Placeholder]),
|
||||
FormatTemplate::Text("echo".into()),
|
||||
FormatTemplate::Text("${SHELL}:".into()),
|
||||
FormatTemplate::Tokens(vec![Token::Placeholder]),
|
||||
]
|
||||
}],
|
||||
mode: ExecutionMode::OneByOne,
|
||||
|
@ -445,8 +306,8 @@ mod tests {
|
|||
CommandSet {
|
||||
commands: vec![CommandTemplate {
|
||||
args: vec![
|
||||
ArgumentTemplate::Text("echo".into()),
|
||||
ArgumentTemplate::Tokens(vec![Token::NoExt]),
|
||||
FormatTemplate::Text("echo".into()),
|
||||
FormatTemplate::Tokens(vec![Token::NoExt]),
|
||||
],
|
||||
}],
|
||||
mode: ExecutionMode::OneByOne,
|
||||
|
@ -461,8 +322,8 @@ mod tests {
|
|||
CommandSet {
|
||||
commands: vec![CommandTemplate {
|
||||
args: vec![
|
||||
ArgumentTemplate::Text("echo".into()),
|
||||
ArgumentTemplate::Tokens(vec![Token::Basename]),
|
||||
FormatTemplate::Text("echo".into()),
|
||||
FormatTemplate::Tokens(vec![Token::Basename]),
|
||||
],
|
||||
}],
|
||||
mode: ExecutionMode::OneByOne,
|
||||
|
@ -477,8 +338,8 @@ mod tests {
|
|||
CommandSet {
|
||||
commands: vec![CommandTemplate {
|
||||
args: vec![
|
||||
ArgumentTemplate::Text("echo".into()),
|
||||
ArgumentTemplate::Tokens(vec![Token::Parent]),
|
||||
FormatTemplate::Text("echo".into()),
|
||||
FormatTemplate::Tokens(vec![Token::Parent]),
|
||||
],
|
||||
}],
|
||||
mode: ExecutionMode::OneByOne,
|
||||
|
@ -493,8 +354,8 @@ mod tests {
|
|||
CommandSet {
|
||||
commands: vec![CommandTemplate {
|
||||
args: vec![
|
||||
ArgumentTemplate::Text("echo".into()),
|
||||
ArgumentTemplate::Tokens(vec![Token::BasenameNoExt]),
|
||||
FormatTemplate::Text("echo".into()),
|
||||
FormatTemplate::Tokens(vec![Token::BasenameNoExt]),
|
||||
],
|
||||
}],
|
||||
mode: ExecutionMode::OneByOne,
|
||||
|
@ -502,6 +363,21 @@ mod tests {
|
|||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn tokens_with_literal_braces() {
|
||||
let template = CommandTemplate::new(vec!["{{}}", "{{", "{.}}"]).unwrap();
|
||||
assert_eq!(
|
||||
generate_str(&template, "foo"),
|
||||
vec!["{}", "{", "{.}", "foo"]
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn tokens_with_literal_braces_and_placeholder() {
|
||||
let template = CommandTemplate::new(vec!["{{{},end}"]).unwrap();
|
||||
assert_eq!(generate_str(&template, "foo"), vec!["{foo,end}"]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn tokens_multiple() {
|
||||
assert_eq!(
|
||||
|
@ -509,9 +385,9 @@ mod tests {
|
|||
CommandSet {
|
||||
commands: vec![CommandTemplate {
|
||||
args: vec![
|
||||
ArgumentTemplate::Text("cp".into()),
|
||||
ArgumentTemplate::Tokens(vec![Token::Placeholder]),
|
||||
ArgumentTemplate::Tokens(vec![
|
||||
FormatTemplate::Text("cp".into()),
|
||||
FormatTemplate::Tokens(vec![Token::Placeholder]),
|
||||
FormatTemplate::Tokens(vec![
|
||||
Token::BasenameNoExt,
|
||||
Token::Text(".ext".into())
|
||||
]),
|
||||
|
@ -529,8 +405,8 @@ mod tests {
|
|||
CommandSet {
|
||||
commands: vec![CommandTemplate {
|
||||
args: vec![
|
||||
ArgumentTemplate::Text("echo".into()),
|
||||
ArgumentTemplate::Tokens(vec![Token::NoExt]),
|
||||
FormatTemplate::Text("echo".into()),
|
||||
FormatTemplate::Tokens(vec![Token::NoExt]),
|
||||
],
|
||||
}],
|
||||
mode: ExecutionMode::Batch,
|
||||
|
@ -555,7 +431,7 @@ mod tests {
|
|||
|
||||
#[test]
|
||||
fn generate_custom_path_separator() {
|
||||
let arg = ArgumentTemplate::Tokens(vec![Token::Placeholder]);
|
||||
let arg = FormatTemplate::Tokens(vec![Token::Placeholder]);
|
||||
macro_rules! check {
|
||||
($input:expr, $expected:expr) => {
|
||||
assert_eq!(arg.generate($input, Some("#")), OsString::from($expected));
|
||||
|
@ -570,7 +446,7 @@ mod tests {
|
|||
#[cfg(windows)]
|
||||
#[test]
|
||||
fn generate_custom_path_separator_windows() {
|
||||
let arg = ArgumentTemplate::Tokens(vec![Token::Placeholder]);
|
||||
let arg = FormatTemplate::Tokens(vec![Token::Placeholder]);
|
||||
macro_rules! check {
|
||||
($input:expr, $expected:expr) => {
|
||||
assert_eq!(arg.generate($input, Some("#")), OsString::from($expected));
|
||||
|
|
|
@ -1,29 +0,0 @@
|
|||
use std::fmt::{self, Display, Formatter};
|
||||
|
||||
/// Designates what should be written to a buffer
|
||||
///
|
||||
/// Each `Token` contains either text, or a placeholder variant, which will be used to generate
|
||||
/// commands after all tokens for a given command template have been collected.
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
pub enum Token {
|
||||
Placeholder,
|
||||
Basename,
|
||||
Parent,
|
||||
NoExt,
|
||||
BasenameNoExt,
|
||||
Text(String),
|
||||
}
|
||||
|
||||
impl Display for Token {
|
||||
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
|
||||
match *self {
|
||||
Token::Placeholder => f.write_str("{}")?,
|
||||
Token::Basename => f.write_str("{/}")?,
|
||||
Token::Parent => f.write_str("{//}")?,
|
||||
Token::NoExt => f.write_str("{.}")?,
|
||||
Token::BasenameNoExt => f.write_str("{/.}")?,
|
||||
Token::Text(ref string) => f.write_str(string)?,
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
|
@ -59,6 +59,26 @@ pub fn is_empty(entry: &dir_entry::DirEntry) -> bool {
|
|||
}
|
||||
}
|
||||
|
||||
#[cfg(any(unix, target_os = "redox"))]
|
||||
pub fn is_block_device(ft: fs::FileType) -> bool {
|
||||
ft.is_block_device()
|
||||
}
|
||||
|
||||
#[cfg(windows)]
|
||||
pub fn is_block_device(_: fs::FileType) -> bool {
|
||||
false
|
||||
}
|
||||
|
||||
#[cfg(any(unix, target_os = "redox"))]
|
||||
pub fn is_char_device(ft: fs::FileType) -> bool {
|
||||
ft.is_char_device()
|
||||
}
|
||||
|
||||
#[cfg(windows)]
|
||||
pub fn is_char_device(_: fs::FileType) -> bool {
|
||||
false
|
||||
}
|
||||
|
||||
#[cfg(any(unix, target_os = "redox"))]
|
||||
pub fn is_socket(ft: fs::FileType) -> bool {
|
||||
ft.is_socket()
|
||||
|
@ -108,13 +128,11 @@ pub fn strip_current_dir(path: &Path) -> &Path {
|
|||
pub fn default_path_separator() -> Option<String> {
|
||||
if cfg!(windows) {
|
||||
let msystem = env::var("MSYSTEM").ok()?;
|
||||
match msystem.as_str() {
|
||||
"MINGW64" | "MINGW32" | "MSYS" => Some("/".to_owned()),
|
||||
_ => None,
|
||||
if !msystem.is_empty() {
|
||||
return Some("/".to_owned());
|
||||
}
|
||||
} else {
|
||||
None
|
||||
}
|
||||
None
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
|
|
|
@ -9,6 +9,8 @@ pub struct FileTypes {
|
|||
pub files: bool,
|
||||
pub directories: bool,
|
||||
pub symlinks: bool,
|
||||
pub block_devices: bool,
|
||||
pub char_devices: bool,
|
||||
pub sockets: bool,
|
||||
pub pipes: bool,
|
||||
pub executables_only: bool,
|
||||
|
@ -21,6 +23,8 @@ impl FileTypes {
|
|||
(!self.files && entry_type.is_file())
|
||||
|| (!self.directories && entry_type.is_dir())
|
||||
|| (!self.symlinks && entry_type.is_symlink())
|
||||
|| (!self.block_devices && filesystem::is_block_device(*entry_type))
|
||||
|| (!self.char_devices && filesystem::is_char_device(*entry_type))
|
||||
|| (!self.sockets && filesystem::is_socket(*entry_type))
|
||||
|| (!self.pipes && filesystem::is_pipe(*entry_type))
|
||||
|| (self.executables_only && !entry.path().executable())
|
||||
|
@ -28,6 +32,8 @@ impl FileTypes {
|
|||
|| !(entry_type.is_file()
|
||||
|| entry_type.is_dir()
|
||||
|| entry_type.is_symlink()
|
||||
|| filesystem::is_block_device(*entry_type)
|
||||
|| filesystem::is_char_device(*entry_type)
|
||||
|| filesystem::is_socket(*entry_type)
|
||||
|| filesystem::is_pipe(*entry_type))
|
||||
} else {
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
use anyhow::{anyhow, Result};
|
||||
use nix::unistd::{Group, User};
|
||||
use std::fs;
|
||||
|
||||
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
|
||||
|
@ -35,16 +36,22 @@ impl OwnerFilter {
|
|||
}
|
||||
|
||||
let uid = Check::parse(fst, |s| {
|
||||
s.parse()
|
||||
.ok()
|
||||
.or_else(|| users::get_user_by_name(s).map(|user| user.uid()))
|
||||
.ok_or_else(|| anyhow!("'{}' is not a recognized user name", s))
|
||||
if let Ok(uid) = s.parse() {
|
||||
Ok(uid)
|
||||
} else {
|
||||
User::from_name(s)?
|
||||
.map(|user| user.uid.as_raw())
|
||||
.ok_or_else(|| anyhow!("'{}' is not a recognized user name", s))
|
||||
}
|
||||
})?;
|
||||
let gid = Check::parse(snd, |s| {
|
||||
s.parse()
|
||||
.ok()
|
||||
.or_else(|| users::get_group_by_name(s).map(|group| group.gid()))
|
||||
.ok_or_else(|| anyhow!("'{}' is not a recognized group name", s))
|
||||
if let Ok(gid) = s.parse() {
|
||||
Ok(gid)
|
||||
} else {
|
||||
Group::from_name(s)?
|
||||
.map(|group| group.gid.as_raw())
|
||||
.ok_or_else(|| anyhow!("'{}' is not a recognized group name", s))
|
||||
}
|
||||
})?;
|
||||
|
||||
Ok(OwnerFilter { uid, gid })
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
use chrono::{offset::TimeZone, DateTime, Local, NaiveDate};
|
||||
use chrono::{DateTime, Local, NaiveDate, NaiveDateTime};
|
||||
|
||||
use std::time::SystemTime;
|
||||
|
||||
|
@ -20,11 +20,21 @@ impl TimeFilter {
|
|||
.ok()
|
||||
.or_else(|| {
|
||||
NaiveDate::parse_from_str(s, "%F")
|
||||
.ok()
|
||||
.and_then(|nd| nd.and_hms_opt(0, 0, 0))
|
||||
.and_then(|ndt| Local.from_local_datetime(&ndt).single())
|
||||
.ok()?
|
||||
.and_hms_opt(0, 0, 0)?
|
||||
.and_local_timezone(Local)
|
||||
.latest()
|
||||
})
|
||||
.or_else(|| {
|
||||
NaiveDateTime::parse_from_str(s, "%F %T")
|
||||
.ok()?
|
||||
.and_local_timezone(Local)
|
||||
.latest()
|
||||
})
|
||||
.or_else(|| {
|
||||
let timestamp_secs = s.strip_prefix('@')?.parse().ok()?;
|
||||
DateTime::from_timestamp(timestamp_secs, 0).map(Into::into)
|
||||
})
|
||||
.or_else(|| Local.datetime_from_str(s, "%F %T").ok())
|
||||
.map(|dt| dt.into())
|
||||
})
|
||||
}
|
||||
|
@ -52,8 +62,10 @@ mod tests {
|
|||
|
||||
#[test]
|
||||
fn is_time_filter_applicable() {
|
||||
let ref_time = Local
|
||||
.datetime_from_str("2010-10-10 10:10:10", "%F %T")
|
||||
let ref_time = NaiveDateTime::parse_from_str("2010-10-10 10:10:10", "%F %T")
|
||||
.unwrap()
|
||||
.and_local_timezone(Local)
|
||||
.latest()
|
||||
.unwrap()
|
||||
.into();
|
||||
|
||||
|
@ -127,5 +139,32 @@ mod tests {
|
|||
assert!(!TimeFilter::after(&ref_time, t10s_before)
|
||||
.unwrap()
|
||||
.applies_to(&t1m_ago));
|
||||
|
||||
let ref_timestamp = 1707723412u64; // Mon Feb 12 07:36:52 UTC 2024
|
||||
let ref_time = DateTime::parse_from_rfc3339("2024-02-12T07:36:52+00:00")
|
||||
.unwrap()
|
||||
.into();
|
||||
let t1m_ago = ref_time - Duration::from_secs(60);
|
||||
let t1s_later = ref_time + Duration::from_secs(1);
|
||||
// Timestamp only supported via '@' prefix
|
||||
assert!(TimeFilter::before(&ref_time, &ref_timestamp.to_string()).is_none());
|
||||
assert!(
|
||||
TimeFilter::before(&ref_time, &format!("@{}", ref_timestamp))
|
||||
.unwrap()
|
||||
.applies_to(&t1m_ago)
|
||||
);
|
||||
assert!(
|
||||
!TimeFilter::before(&ref_time, &format!("@{}", ref_timestamp))
|
||||
.unwrap()
|
||||
.applies_to(&t1s_later)
|
||||
);
|
||||
assert!(
|
||||
!TimeFilter::after(&ref_time, &format!("@{}", ref_timestamp))
|
||||
.unwrap()
|
||||
.applies_to(&t1m_ago)
|
||||
);
|
||||
assert!(TimeFilter::after(&ref_time, &format!("@{}", ref_timestamp))
|
||||
.unwrap()
|
||||
.applies_to(&t1s_later));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -34,10 +34,10 @@ pub fn dirname(path: &Path) -> OsString {
|
|||
#[cfg(test)]
|
||||
mod path_tests {
|
||||
use super::*;
|
||||
use std::path::MAIN_SEPARATOR;
|
||||
use std::path::MAIN_SEPARATOR_STR;
|
||||
|
||||
fn correct(input: &str) -> String {
|
||||
input.replace('/', &MAIN_SEPARATOR.to_string())
|
||||
input.replace('/', MAIN_SEPARATOR_STR)
|
||||
}
|
||||
|
||||
macro_rules! func_tests {
|
|
@ -0,0 +1,281 @@
|
|||
mod input;
|
||||
|
||||
use std::borrow::Cow;
|
||||
use std::ffi::{OsStr, OsString};
|
||||
use std::fmt::{self, Display, Formatter};
|
||||
use std::path::{Component, Path, Prefix};
|
||||
use std::sync::OnceLock;
|
||||
|
||||
use aho_corasick::AhoCorasick;
|
||||
|
||||
use self::input::{basename, dirname, remove_extension};
|
||||
|
||||
/// Designates what should be written to a buffer
|
||||
///
|
||||
/// Each `Token` contains either text, or a placeholder variant, which will be used to generate
|
||||
/// commands after all tokens for a given command template have been collected.
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
pub enum Token {
|
||||
Placeholder,
|
||||
Basename,
|
||||
Parent,
|
||||
NoExt,
|
||||
BasenameNoExt,
|
||||
Text(String),
|
||||
}
|
||||
|
||||
impl Display for Token {
|
||||
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
|
||||
match *self {
|
||||
Token::Placeholder => f.write_str("{}")?,
|
||||
Token::Basename => f.write_str("{/}")?,
|
||||
Token::Parent => f.write_str("{//}")?,
|
||||
Token::NoExt => f.write_str("{.}")?,
|
||||
Token::BasenameNoExt => f.write_str("{/.}")?,
|
||||
Token::Text(ref string) => f.write_str(string)?,
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
/// A parsed format string
|
||||
///
|
||||
/// This is either a collection of `Token`s including at least one placeholder variant,
|
||||
/// or a fixed text.
|
||||
#[derive(Clone, Debug, PartialEq)]
|
||||
pub enum FormatTemplate {
|
||||
Tokens(Vec<Token>),
|
||||
Text(String),
|
||||
}
|
||||
|
||||
static PLACEHOLDERS: OnceLock<AhoCorasick> = OnceLock::new();
|
||||
|
||||
impl FormatTemplate {
|
||||
pub fn has_tokens(&self) -> bool {
|
||||
matches!(self, FormatTemplate::Tokens(_))
|
||||
}
|
||||
|
||||
pub fn parse(fmt: &str) -> Self {
|
||||
// NOTE: we assume that { and } have the same length
|
||||
const BRACE_LEN: usize = '{'.len_utf8();
|
||||
let mut tokens = Vec::new();
|
||||
let mut remaining = fmt;
|
||||
let mut buf = String::new();
|
||||
let placeholders = PLACEHOLDERS.get_or_init(|| {
|
||||
AhoCorasick::new(["{{", "}}", "{}", "{/}", "{//}", "{.}", "{/.}"]).unwrap()
|
||||
});
|
||||
while let Some(m) = placeholders.find(remaining) {
|
||||
match m.pattern().as_u32() {
|
||||
0 | 1 => {
|
||||
// we found an escaped {{ or }}, so add
|
||||
// everything up to the first char to the buffer
|
||||
// then skip the second one.
|
||||
buf += &remaining[..m.start() + BRACE_LEN];
|
||||
remaining = &remaining[m.end()..];
|
||||
}
|
||||
id if !remaining[m.end()..].starts_with('}') => {
|
||||
buf += &remaining[..m.start()];
|
||||
if !buf.is_empty() {
|
||||
tokens.push(Token::Text(std::mem::take(&mut buf)));
|
||||
}
|
||||
tokens.push(token_from_pattern_id(id));
|
||||
remaining = &remaining[m.end()..];
|
||||
}
|
||||
_ => {
|
||||
// We got a normal pattern, but the final "}"
|
||||
// is escaped, so add up to that to the buffer, then
|
||||
// skip the final }
|
||||
buf += &remaining[..m.end()];
|
||||
remaining = &remaining[m.end() + BRACE_LEN..];
|
||||
}
|
||||
}
|
||||
}
|
||||
// Add the rest of the string to the buffer, and add the final buffer to the tokens
|
||||
if !remaining.is_empty() {
|
||||
buf += remaining;
|
||||
}
|
||||
if tokens.is_empty() {
|
||||
// No placeholders were found, so just return the text
|
||||
return FormatTemplate::Text(buf);
|
||||
}
|
||||
// Add final text segment
|
||||
if !buf.is_empty() {
|
||||
tokens.push(Token::Text(buf));
|
||||
}
|
||||
debug_assert!(!tokens.is_empty());
|
||||
FormatTemplate::Tokens(tokens)
|
||||
}
|
||||
|
||||
/// Generate a result string from this template. If path_separator is Some, then it will replace
|
||||
/// the path separator in all placeholder tokens. Fixed text and tokens are not affected by
|
||||
/// path separator substitution.
|
||||
pub fn generate(&self, path: impl AsRef<Path>, path_separator: Option<&str>) -> OsString {
|
||||
use Token::*;
|
||||
let path = path.as_ref();
|
||||
|
||||
match *self {
|
||||
Self::Tokens(ref tokens) => {
|
||||
let mut s = OsString::new();
|
||||
for token in tokens {
|
||||
match token {
|
||||
Basename => s.push(Self::replace_separator(basename(path), path_separator)),
|
||||
BasenameNoExt => s.push(Self::replace_separator(
|
||||
&remove_extension(basename(path).as_ref()),
|
||||
path_separator,
|
||||
)),
|
||||
NoExt => s.push(Self::replace_separator(
|
||||
&remove_extension(path),
|
||||
path_separator,
|
||||
)),
|
||||
Parent => s.push(Self::replace_separator(&dirname(path), path_separator)),
|
||||
Placeholder => {
|
||||
s.push(Self::replace_separator(path.as_ref(), path_separator))
|
||||
}
|
||||
Text(ref string) => s.push(string),
|
||||
}
|
||||
}
|
||||
s
|
||||
}
|
||||
Self::Text(ref text) => OsString::from(text),
|
||||
}
|
||||
}
|
||||
|
||||
/// Replace the path separator in the input with the custom separator string. If path_separator
|
||||
/// is None, simply return a borrowed Cow<OsStr> of the input. Otherwise, the input is
|
||||
/// interpreted as a Path and its components are iterated through and re-joined into a new
|
||||
/// OsString.
|
||||
fn replace_separator<'a>(path: &'a OsStr, path_separator: Option<&str>) -> Cow<'a, OsStr> {
|
||||
// fast-path - no replacement necessary
|
||||
if path_separator.is_none() {
|
||||
return Cow::Borrowed(path);
|
||||
}
|
||||
|
||||
let path_separator = path_separator.unwrap();
|
||||
let mut out = OsString::with_capacity(path.len());
|
||||
let mut components = Path::new(path).components().peekable();
|
||||
|
||||
while let Some(comp) = components.next() {
|
||||
match comp {
|
||||
// Absolute paths on Windows are tricky. A Prefix component is usually a drive
|
||||
// letter or UNC path, and is usually followed by RootDir. There are also
|
||||
// "verbatim" prefixes beginning with "\\?\" that skip normalization. We choose to
|
||||
// ignore verbatim path prefixes here because they're very rare, might be
|
||||
// impossible to reach here, and there's no good way to deal with them. If users
|
||||
// are doing something advanced involving verbatim windows paths, they can do their
|
||||
// own output filtering with a tool like sed.
|
||||
Component::Prefix(prefix) => {
|
||||
if let Prefix::UNC(server, share) = prefix.kind() {
|
||||
// Prefix::UNC is a parsed version of '\\server\share'
|
||||
out.push(path_separator);
|
||||
out.push(path_separator);
|
||||
out.push(server);
|
||||
out.push(path_separator);
|
||||
out.push(share);
|
||||
} else {
|
||||
// All other Windows prefix types are rendered as-is. This results in e.g. "C:" for
|
||||
// drive letters. DeviceNS and Verbatim* prefixes won't have backslashes converted,
|
||||
// but they're not returned by directories fd can search anyway so we don't worry
|
||||
// about them.
|
||||
out.push(comp.as_os_str());
|
||||
}
|
||||
}
|
||||
|
||||
// Root directory is always replaced with the custom separator.
|
||||
Component::RootDir => out.push(path_separator),
|
||||
|
||||
// Everything else is joined normally, with a trailing separator if we're not last
|
||||
_ => {
|
||||
out.push(comp.as_os_str());
|
||||
if components.peek().is_some() {
|
||||
out.push(path_separator);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
Cow::Owned(out)
|
||||
}
|
||||
}
|
||||
|
||||
// Convert the id from an aho-corasick match to the
|
||||
// appropriate token
|
||||
fn token_from_pattern_id(id: u32) -> Token {
|
||||
use Token::*;
|
||||
match id {
|
||||
2 => Placeholder,
|
||||
3 => Basename,
|
||||
4 => Parent,
|
||||
5 => NoExt,
|
||||
6 => BasenameNoExt,
|
||||
_ => unreachable!(),
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod fmt_tests {
|
||||
use super::*;
|
||||
use std::path::PathBuf;
|
||||
|
||||
#[test]
|
||||
fn parse_no_placeholders() {
|
||||
let templ = FormatTemplate::parse("This string has no placeholders");
|
||||
assert_eq!(
|
||||
templ,
|
||||
FormatTemplate::Text("This string has no placeholders".into())
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_only_brace_escapes() {
|
||||
let templ = FormatTemplate::parse("This string only has escapes like {{ and }}");
|
||||
assert_eq!(
|
||||
templ,
|
||||
FormatTemplate::Text("This string only has escapes like { and }".into())
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn all_placeholders() {
|
||||
use Token::*;
|
||||
|
||||
let templ = FormatTemplate::parse(
|
||||
"{{path={} \
|
||||
basename={/} \
|
||||
parent={//} \
|
||||
noExt={.} \
|
||||
basenameNoExt={/.} \
|
||||
}}",
|
||||
);
|
||||
assert_eq!(
|
||||
templ,
|
||||
FormatTemplate::Tokens(vec![
|
||||
Text("{path=".into()),
|
||||
Placeholder,
|
||||
Text(" basename=".into()),
|
||||
Basename,
|
||||
Text(" parent=".into()),
|
||||
Parent,
|
||||
Text(" noExt=".into()),
|
||||
NoExt,
|
||||
Text(" basenameNoExt=".into()),
|
||||
BasenameNoExt,
|
||||
Text(" }".into()),
|
||||
])
|
||||
);
|
||||
|
||||
let mut path = PathBuf::new();
|
||||
path.push("a");
|
||||
path.push("folder");
|
||||
path.push("file.txt");
|
||||
|
||||
let expanded = templ.generate(&path, Some("/")).into_string().unwrap();
|
||||
|
||||
assert_eq!(
|
||||
expanded,
|
||||
"{path=a/folder/file.txt \
|
||||
basename=file.txt \
|
||||
parent=a/folder \
|
||||
noExt=a/folder/file \
|
||||
basenameNoExt=file }"
|
||||
);
|
||||
}
|
||||
}
|
28
src/main.rs
28
src/main.rs
|
@ -7,6 +7,7 @@ mod exit_codes;
|
|||
mod filesystem;
|
||||
mod filetypes;
|
||||
mod filter;
|
||||
mod fmt;
|
||||
mod output;
|
||||
mod regex_helper;
|
||||
mod walk;
|
||||
|
@ -103,7 +104,7 @@ fn run() -> Result<ExitCode> {
|
|||
.map(|pat| build_regex(pat, &config))
|
||||
.collect::<Result<Vec<Regex>>>()?;
|
||||
|
||||
walk::scan(&search_paths, Arc::new(regexps), Arc::new(config))
|
||||
walk::scan(&search_paths, regexps, config)
|
||||
}
|
||||
|
||||
#[cfg(feature = "completions")]
|
||||
|
@ -218,11 +219,13 @@ fn construct_config(mut opts: Opts, pattern_regexps: &[String]) -> Result<Config
|
|||
let ansi_colors_support = true;
|
||||
|
||||
let interactive_terminal = std::io::stdout().is_terminal();
|
||||
|
||||
let colored_output = match opts.color {
|
||||
ColorWhen::Always => true,
|
||||
ColorWhen::Never => false,
|
||||
ColorWhen::Auto => {
|
||||
ansi_colors_support && env::var_os("NO_COLOR").is_none() && interactive_terminal
|
||||
let no_color = env::var_os("NO_COLOR").is_some_and(|x| !x.is_empty());
|
||||
ansi_colors_support && !no_color && interactive_terminal
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -252,7 +255,7 @@ fn construct_config(mut opts: Opts, pattern_regexps: &[String]) -> Result<Config
|
|||
max_depth: opts.max_depth(),
|
||||
min_depth: opts.min_depth(),
|
||||
prune: opts.prune,
|
||||
threads: opts.threads(),
|
||||
threads: opts.threads().get(),
|
||||
max_buffer_time: opts.max_buffer_time,
|
||||
ls_colors,
|
||||
interactive_terminal,
|
||||
|
@ -269,6 +272,8 @@ fn construct_config(mut opts: Opts, pattern_regexps: &[String]) -> Result<Config
|
|||
file_types.files = true;
|
||||
}
|
||||
Empty => file_types.empty_only = true,
|
||||
BlockDevice => file_types.block_devices = true,
|
||||
CharDevice => file_types.char_devices = true,
|
||||
Socket => file_types.sockets = true,
|
||||
Pipe => file_types.pipes = true,
|
||||
}
|
||||
|
@ -295,6 +300,10 @@ fn construct_config(mut opts: Opts, pattern_regexps: &[String]) -> Result<Config
|
|||
.build()
|
||||
})
|
||||
.transpose()?,
|
||||
format: opts
|
||||
.format
|
||||
.as_deref()
|
||||
.map(crate::fmt::FormatTemplate::parse),
|
||||
command: command.map(Arc::new),
|
||||
batch_size: opts.batch_size,
|
||||
exclude_patterns: opts.exclude.iter().map(|p| String::from("!") + p).collect(),
|
||||
|
@ -307,8 +316,7 @@ fn construct_config(mut opts: Opts, pattern_regexps: &[String]) -> Result<Config
|
|||
path_separator,
|
||||
actual_path_separator,
|
||||
max_results: opts.max_results(),
|
||||
strip_cwd_prefix: (opts.no_search_paths()
|
||||
&& (opts.strip_cwd_prefix || !(opts.null_separator || has_command))),
|
||||
strip_cwd_prefix: opts.strip_cwd_prefix(|| !(opts.null_separator || has_command)),
|
||||
})
|
||||
}
|
||||
|
||||
|
@ -321,18 +329,22 @@ fn extract_command(opts: &mut Opts, colored_output: bool) -> Result<Option<Comma
|
|||
if !opts.list_details {
|
||||
return None;
|
||||
}
|
||||
let color_arg = format!("--color={}", opts.color.as_str());
|
||||
|
||||
let res = determine_ls_command(&color_arg, colored_output)
|
||||
let res = determine_ls_command(colored_output)
|
||||
.map(|cmd| CommandSet::new_batch([cmd]).unwrap());
|
||||
Some(res)
|
||||
})
|
||||
.transpose()
|
||||
}
|
||||
|
||||
fn determine_ls_command(color_arg: &str, colored_output: bool) -> Result<Vec<&str>> {
|
||||
fn determine_ls_command(colored_output: bool) -> Result<Vec<&'static str>> {
|
||||
#[allow(unused)]
|
||||
let gnu_ls = |command_name| {
|
||||
let color_arg = if colored_output {
|
||||
"--color=always"
|
||||
} else {
|
||||
"--color=never"
|
||||
};
|
||||
// Note: we use short options here (instead of --long-options) to support more
|
||||
// platforms (like BusyBox).
|
||||
vec![
|
||||
|
|
|
@ -7,6 +7,7 @@ use crate::config::Config;
|
|||
use crate::dir_entry::DirEntry;
|
||||
use crate::error::print_error;
|
||||
use crate::exit_codes::ExitCode;
|
||||
use crate::fmt::FormatTemplate;
|
||||
|
||||
fn replace_path_separator(path: &str, new_path_separator: &str) -> String {
|
||||
path.replace(std::path::MAIN_SEPARATOR, new_path_separator)
|
||||
|
@ -14,7 +15,10 @@ fn replace_path_separator(path: &str, new_path_separator: &str) -> String {
|
|||
|
||||
// TODO: this function is performance critical and can probably be optimized
|
||||
pub fn print_entry<W: Write>(stdout: &mut W, entry: &DirEntry, config: &Config) {
|
||||
let r = if let Some(ref ls_colors) = config.ls_colors {
|
||||
// TODO: use format if supplied
|
||||
let r = if let Some(ref format) = config.format {
|
||||
print_entry_format(stdout, entry, config, format)
|
||||
} else if let Some(ref ls_colors) = config.ls_colors {
|
||||
print_entry_colorized(stdout, entry, config, ls_colors)
|
||||
} else {
|
||||
print_entry_uncolorized(stdout, entry, config)
|
||||
|
@ -54,6 +58,22 @@ fn print_trailing_slash<W: Write>(
|
|||
Ok(())
|
||||
}
|
||||
|
||||
// TODO: this function is performance critical and can probably be optimized
|
||||
fn print_entry_format<W: Write>(
|
||||
stdout: &mut W,
|
||||
entry: &DirEntry,
|
||||
config: &Config,
|
||||
format: &FormatTemplate,
|
||||
) -> io::Result<()> {
|
||||
let separator = if config.null_separator { "\0" } else { "\n" };
|
||||
let output = format.generate(
|
||||
entry.stripped_path(config),
|
||||
config.path_separator.as_deref(),
|
||||
);
|
||||
// TODO: support writing raw bytes on unix?
|
||||
write!(stdout, "{}{}", output.to_string_lossy(), separator)
|
||||
}
|
||||
|
||||
// TODO: this function is performance critical and can probably be optimized
|
||||
fn print_entry_colorized<W: Write>(
|
||||
stdout: &mut W,
|
||||
|
|
|
@ -16,7 +16,7 @@ fn hir_has_uppercase_char(hir: &Hir) -> bool {
|
|||
use regex_syntax::hir::*;
|
||||
|
||||
match hir.kind() {
|
||||
HirKind::Literal(Literal(bytes)) => match std::str::from_utf8(&bytes) {
|
||||
HirKind::Literal(Literal(bytes)) => match std::str::from_utf8(bytes) {
|
||||
Ok(s) => s.chars().any(|c| c.is_uppercase()),
|
||||
Err(_) => bytes.iter().any(|b| char::from(*b).is_uppercase()),
|
||||
},
|
||||
|
|
787
src/walk.rs
787
src/walk.rs
|
@ -1,18 +1,18 @@
|
|||
use std::borrow::Cow;
|
||||
use std::ffi::OsStr;
|
||||
use std::io;
|
||||
use std::io::{self, Write};
|
||||
use std::mem;
|
||||
use std::path::PathBuf;
|
||||
use std::sync::atomic::{AtomicBool, Ordering};
|
||||
use std::sync::{Arc, Mutex};
|
||||
use std::sync::{Arc, Mutex, MutexGuard};
|
||||
use std::thread;
|
||||
use std::time::{Duration, Instant};
|
||||
use std::{borrow::Cow, io::Write};
|
||||
|
||||
use anyhow::{anyhow, Result};
|
||||
use crossbeam_channel::{bounded, Receiver, RecvTimeoutError, Sender};
|
||||
use crossbeam_channel::{bounded, Receiver, RecvTimeoutError, SendError, Sender};
|
||||
use etcetera::BaseStrategy;
|
||||
use ignore::overrides::OverrideBuilder;
|
||||
use ignore::{self, WalkBuilder};
|
||||
use ignore::overrides::{Override, OverrideBuilder};
|
||||
use ignore::{WalkBuilder, WalkParallel, WalkState};
|
||||
use regex::bytes::Regex;
|
||||
|
||||
use crate::config::Config;
|
||||
|
@ -36,6 +36,7 @@ enum ReceiverMode {
|
|||
|
||||
/// The Worker threads can result in a valid entry having PathBuf or an error.
|
||||
#[allow(clippy::large_enum_variant)]
|
||||
#[derive(Debug)]
|
||||
pub enum WorkerResult {
|
||||
// Errors should be rare, so it's probably better to allow large_enum_variant than
|
||||
// to box the Entry variant
|
||||
|
@ -43,131 +44,98 @@ pub enum WorkerResult {
|
|||
Error(ignore::Error),
|
||||
}
|
||||
|
||||
/// Maximum size of the output buffer before flushing results to the console
|
||||
pub const MAX_BUFFER_LENGTH: usize = 1000;
|
||||
/// Default duration until output buffering switches to streaming.
|
||||
pub const DEFAULT_MAX_BUFFER_TIME: Duration = Duration::from_millis(100);
|
||||
/// A batch of WorkerResults to send over a channel.
|
||||
#[derive(Clone)]
|
||||
struct Batch {
|
||||
items: Arc<Mutex<Option<Vec<WorkerResult>>>>,
|
||||
}
|
||||
|
||||
/// Recursively scan the given search path for files / pathnames matching the patterns.
|
||||
///
|
||||
/// If the `--exec` argument was supplied, this will create a thread pool for executing
|
||||
/// jobs in parallel from a given command line and the discovered paths. Otherwise, each
|
||||
/// path will simply be written to standard output.
|
||||
pub fn scan(paths: &[PathBuf], patterns: Arc<Vec<Regex>>, config: Arc<Config>) -> Result<ExitCode> {
|
||||
let first_path = &paths[0];
|
||||
|
||||
// Channel capacity was chosen empircally to perform similarly to an unbounded channel
|
||||
let (tx, rx) = bounded(0x4000 * config.threads);
|
||||
|
||||
let mut override_builder = OverrideBuilder::new(first_path);
|
||||
|
||||
for pattern in &config.exclude_patterns {
|
||||
override_builder
|
||||
.add(pattern)
|
||||
.map_err(|e| anyhow!("Malformed exclude pattern: {}", e))?;
|
||||
}
|
||||
let overrides = override_builder
|
||||
.build()
|
||||
.map_err(|_| anyhow!("Mismatch in exclude patterns"))?;
|
||||
|
||||
let mut walker = WalkBuilder::new(first_path);
|
||||
walker
|
||||
.hidden(config.ignore_hidden)
|
||||
.ignore(config.read_fdignore)
|
||||
.parents(config.read_parent_ignore && (config.read_fdignore || config.read_vcsignore))
|
||||
.git_ignore(config.read_vcsignore)
|
||||
.git_global(config.read_vcsignore)
|
||||
.git_exclude(config.read_vcsignore)
|
||||
.require_git(config.require_git_to_read_vcsignore)
|
||||
.overrides(overrides)
|
||||
.follow_links(config.follow_links)
|
||||
// No need to check for supported platforms, option is unavailable on unsupported ones
|
||||
.same_file_system(config.one_file_system)
|
||||
.max_depth(config.max_depth);
|
||||
|
||||
if config.read_fdignore {
|
||||
walker.add_custom_ignore_filename(".fdignore");
|
||||
}
|
||||
|
||||
if config.read_global_ignore {
|
||||
if let Ok(basedirs) = etcetera::choose_base_strategy() {
|
||||
let global_ignore_file = basedirs.config_dir().join("fd").join("ignore");
|
||||
if global_ignore_file.is_file() {
|
||||
let result = walker.add_ignore(global_ignore_file);
|
||||
match result {
|
||||
Some(ignore::Error::Partial(_)) => (),
|
||||
Some(err) => {
|
||||
print_error(format!("Malformed pattern in global ignore file. {}.", err));
|
||||
}
|
||||
None => (),
|
||||
}
|
||||
}
|
||||
impl Batch {
|
||||
fn new() -> Self {
|
||||
Self {
|
||||
items: Arc::new(Mutex::new(Some(vec![]))),
|
||||
}
|
||||
}
|
||||
|
||||
for ignore_file in &config.ignore_files {
|
||||
let result = walker.add_ignore(ignore_file);
|
||||
match result {
|
||||
Some(ignore::Error::Partial(_)) => (),
|
||||
Some(err) => {
|
||||
print_error(format!("Malformed pattern in custom ignore file. {}.", err));
|
||||
}
|
||||
None => (),
|
||||
}
|
||||
}
|
||||
|
||||
for path in &paths[1..] {
|
||||
walker.add(path);
|
||||
}
|
||||
|
||||
let parallel_walker = walker.threads(config.threads).build_parallel();
|
||||
|
||||
// Flag for cleanly shutting down the parallel walk
|
||||
let quit_flag = Arc::new(AtomicBool::new(false));
|
||||
// Flag specifically for quitting due to ^C
|
||||
let interrupt_flag = Arc::new(AtomicBool::new(false));
|
||||
|
||||
if config.ls_colors.is_some() && config.is_printing() {
|
||||
let quit_flag = Arc::clone(&quit_flag);
|
||||
let interrupt_flag = Arc::clone(&interrupt_flag);
|
||||
|
||||
ctrlc::set_handler(move || {
|
||||
quit_flag.store(true, Ordering::Relaxed);
|
||||
|
||||
if interrupt_flag.fetch_or(true, Ordering::Relaxed) {
|
||||
// Ctrl-C has been pressed twice, exit NOW
|
||||
ExitCode::KilledBySigint.exit();
|
||||
}
|
||||
})
|
||||
.unwrap();
|
||||
}
|
||||
|
||||
// Spawn the thread that receives all results through the channel.
|
||||
let receiver_thread = spawn_receiver(&config, &quit_flag, &interrupt_flag, rx);
|
||||
|
||||
// Spawn the sender threads.
|
||||
spawn_senders(&config, &quit_flag, patterns, parallel_walker, tx);
|
||||
|
||||
// Wait for the receiver thread to print out all results.
|
||||
let exit_code = receiver_thread.join().unwrap();
|
||||
|
||||
if interrupt_flag.load(Ordering::Relaxed) {
|
||||
Ok(ExitCode::KilledBySigint)
|
||||
} else {
|
||||
Ok(exit_code)
|
||||
fn lock(&self) -> MutexGuard<'_, Option<Vec<WorkerResult>>> {
|
||||
self.items.lock().unwrap()
|
||||
}
|
||||
}
|
||||
|
||||
impl IntoIterator for Batch {
|
||||
type Item = WorkerResult;
|
||||
type IntoIter = std::vec::IntoIter<WorkerResult>;
|
||||
|
||||
fn into_iter(self) -> Self::IntoIter {
|
||||
self.lock().take().unwrap().into_iter()
|
||||
}
|
||||
}
|
||||
|
||||
/// Wrapper that sends batches of items at once over a channel.
|
||||
struct BatchSender {
|
||||
batch: Batch,
|
||||
tx: Sender<Batch>,
|
||||
limit: usize,
|
||||
}
|
||||
|
||||
impl BatchSender {
|
||||
fn new(tx: Sender<Batch>, limit: usize) -> Self {
|
||||
Self {
|
||||
batch: Batch::new(),
|
||||
tx,
|
||||
limit,
|
||||
}
|
||||
}
|
||||
|
||||
/// Check if we need to flush a batch.
|
||||
fn needs_flush(&self, batch: Option<&Vec<WorkerResult>>) -> bool {
|
||||
match batch {
|
||||
// Limit the batch size to provide some backpressure
|
||||
Some(vec) => vec.len() >= self.limit,
|
||||
// Batch was already taken by the receiver, so make a new one
|
||||
None => true,
|
||||
}
|
||||
}
|
||||
|
||||
/// Add an item to a batch.
|
||||
fn send(&mut self, item: WorkerResult) -> Result<(), SendError<()>> {
|
||||
let mut batch = self.batch.lock();
|
||||
|
||||
if self.needs_flush(batch.as_ref()) {
|
||||
drop(batch);
|
||||
self.batch = Batch::new();
|
||||
batch = self.batch.lock();
|
||||
}
|
||||
|
||||
let items = batch.as_mut().unwrap();
|
||||
items.push(item);
|
||||
|
||||
if items.len() == 1 {
|
||||
// New batch, send it over the channel
|
||||
self.tx
|
||||
.send(self.batch.clone())
|
||||
.map_err(|_| SendError(()))?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
/// Maximum size of the output buffer before flushing results to the console
|
||||
const MAX_BUFFER_LENGTH: usize = 1000;
|
||||
/// Default duration until output buffering switches to streaming.
|
||||
const DEFAULT_MAX_BUFFER_TIME: Duration = Duration::from_millis(100);
|
||||
|
||||
/// Wrapper for the receiver thread's buffering behavior.
|
||||
struct ReceiverBuffer<W> {
|
||||
struct ReceiverBuffer<'a, W> {
|
||||
/// The configuration.
|
||||
config: Arc<Config>,
|
||||
config: &'a Config,
|
||||
/// For shutting down the senders.
|
||||
quit_flag: Arc<AtomicBool>,
|
||||
quit_flag: &'a AtomicBool,
|
||||
/// The ^C notifier.
|
||||
interrupt_flag: Arc<AtomicBool>,
|
||||
interrupt_flag: &'a AtomicBool,
|
||||
/// Receiver for worker results.
|
||||
rx: Receiver<WorkerResult>,
|
||||
rx: Receiver<Batch>,
|
||||
/// Standard output.
|
||||
stdout: W,
|
||||
/// The current buffer mode.
|
||||
|
@ -180,15 +148,12 @@ struct ReceiverBuffer<W> {
|
|||
num_results: usize,
|
||||
}
|
||||
|
||||
impl<W: Write> ReceiverBuffer<W> {
|
||||
impl<'a, W: Write> ReceiverBuffer<'a, W> {
|
||||
/// Create a new receiver buffer.
|
||||
fn new(
|
||||
config: Arc<Config>,
|
||||
quit_flag: Arc<AtomicBool>,
|
||||
interrupt_flag: Arc<AtomicBool>,
|
||||
rx: Receiver<WorkerResult>,
|
||||
stdout: W,
|
||||
) -> Self {
|
||||
fn new(state: &'a WorkerState, rx: Receiver<Batch>, stdout: W) -> Self {
|
||||
let config = &state.config;
|
||||
let quit_flag = state.quit_flag.as_ref();
|
||||
let interrupt_flag = state.interrupt_flag.as_ref();
|
||||
let max_buffer_time = config.max_buffer_time.unwrap_or(DEFAULT_MAX_BUFFER_TIME);
|
||||
let deadline = Instant::now() + max_buffer_time;
|
||||
|
||||
|
@ -216,7 +181,7 @@ impl<W: Write> ReceiverBuffer<W> {
|
|||
}
|
||||
|
||||
/// Receive the next worker result.
|
||||
fn recv(&self) -> Result<WorkerResult, RecvTimeoutError> {
|
||||
fn recv(&self) -> Result<Batch, RecvTimeoutError> {
|
||||
match self.mode {
|
||||
ReceiverMode::Buffering => {
|
||||
// Wait at most until we should switch to streaming
|
||||
|
@ -232,34 +197,44 @@ impl<W: Write> ReceiverBuffer<W> {
|
|||
/// Wait for a result or state change.
|
||||
fn poll(&mut self) -> Result<(), ExitCode> {
|
||||
match self.recv() {
|
||||
Ok(WorkerResult::Entry(dir_entry)) => {
|
||||
if self.config.quiet {
|
||||
return Err(ExitCode::HasResults(true));
|
||||
}
|
||||
Ok(batch) => {
|
||||
for result in batch {
|
||||
match result {
|
||||
WorkerResult::Entry(dir_entry) => {
|
||||
if self.config.quiet {
|
||||
return Err(ExitCode::HasResults(true));
|
||||
}
|
||||
|
||||
match self.mode {
|
||||
ReceiverMode::Buffering => {
|
||||
self.buffer.push(dir_entry);
|
||||
if self.buffer.len() > MAX_BUFFER_LENGTH {
|
||||
self.stream()?;
|
||||
match self.mode {
|
||||
ReceiverMode::Buffering => {
|
||||
self.buffer.push(dir_entry);
|
||||
if self.buffer.len() > MAX_BUFFER_LENGTH {
|
||||
self.stream()?;
|
||||
}
|
||||
}
|
||||
ReceiverMode::Streaming => {
|
||||
self.print(&dir_entry)?;
|
||||
}
|
||||
}
|
||||
|
||||
self.num_results += 1;
|
||||
if let Some(max_results) = self.config.max_results {
|
||||
if self.num_results >= max_results {
|
||||
return self.stop();
|
||||
}
|
||||
}
|
||||
}
|
||||
WorkerResult::Error(err) => {
|
||||
if self.config.show_filesystem_errors {
|
||||
print_error(err.to_string());
|
||||
}
|
||||
}
|
||||
}
|
||||
ReceiverMode::Streaming => {
|
||||
self.print(&dir_entry)?;
|
||||
self.flush()?;
|
||||
}
|
||||
}
|
||||
|
||||
self.num_results += 1;
|
||||
if let Some(max_results) = self.config.max_results {
|
||||
if self.num_results >= max_results {
|
||||
return self.stop();
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(WorkerResult::Error(err)) => {
|
||||
if self.config.show_filesystem_errors {
|
||||
print_error(err.to_string());
|
||||
// If we don't have another batch ready, flush before waiting
|
||||
if self.mode == ReceiverMode::Streaming && self.rx.is_empty() {
|
||||
self.flush()?;
|
||||
}
|
||||
}
|
||||
Err(RecvTimeoutError::Timeout) => {
|
||||
|
@ -275,7 +250,7 @@ impl<W: Write> ReceiverBuffer<W> {
|
|||
|
||||
/// Output a path.
|
||||
fn print(&mut self, entry: &DirEntry) -> Result<(), ExitCode> {
|
||||
output::print_entry(&mut self.stdout, entry, &self.config);
|
||||
output::print_entry(&mut self.stdout, entry, self.config);
|
||||
|
||||
if self.interrupt_flag.load(Ordering::Relaxed) {
|
||||
// Ignore any errors on flush, because we're about to exit anyway
|
||||
|
@ -314,7 +289,7 @@ impl<W: Write> ReceiverBuffer<W> {
|
|||
|
||||
/// Flush stdout if necessary.
|
||||
fn flush(&mut self) -> Result<(), ExitCode> {
|
||||
if self.config.interactive_terminal && self.stdout.flush().is_err() {
|
||||
if self.stdout.flush().is_err() {
|
||||
// Probably a broken pipe. Exit gracefully.
|
||||
return Err(ExitCode::GeneralError);
|
||||
}
|
||||
|
@ -322,33 +297,132 @@ impl<W: Write> ReceiverBuffer<W> {
|
|||
}
|
||||
}
|
||||
|
||||
fn spawn_receiver(
|
||||
config: &Arc<Config>,
|
||||
quit_flag: &Arc<AtomicBool>,
|
||||
interrupt_flag: &Arc<AtomicBool>,
|
||||
rx: Receiver<WorkerResult>,
|
||||
) -> thread::JoinHandle<ExitCode> {
|
||||
let config = Arc::clone(config);
|
||||
let quit_flag = Arc::clone(quit_flag);
|
||||
let interrupt_flag = Arc::clone(interrupt_flag);
|
||||
/// State shared by the sender and receiver threads.
|
||||
struct WorkerState {
|
||||
/// The search patterns.
|
||||
patterns: Vec<Regex>,
|
||||
/// The command line configuration.
|
||||
config: Config,
|
||||
/// Flag for cleanly shutting down the parallel walk
|
||||
quit_flag: Arc<AtomicBool>,
|
||||
/// Flag specifically for quitting due to ^C
|
||||
interrupt_flag: Arc<AtomicBool>,
|
||||
}
|
||||
|
||||
impl WorkerState {
|
||||
fn new(patterns: Vec<Regex>, config: Config) -> Self {
|
||||
let quit_flag = Arc::new(AtomicBool::new(false));
|
||||
let interrupt_flag = Arc::new(AtomicBool::new(false));
|
||||
|
||||
Self {
|
||||
patterns,
|
||||
config,
|
||||
quit_flag,
|
||||
interrupt_flag,
|
||||
}
|
||||
}
|
||||
|
||||
fn build_overrides(&self, paths: &[PathBuf]) -> Result<Override> {
|
||||
let first_path = &paths[0];
|
||||
let config = &self.config;
|
||||
|
||||
let mut builder = OverrideBuilder::new(first_path);
|
||||
|
||||
for pattern in &config.exclude_patterns {
|
||||
builder
|
||||
.add(pattern)
|
||||
.map_err(|e| anyhow!("Malformed exclude pattern: {}", e))?;
|
||||
}
|
||||
|
||||
builder
|
||||
.build()
|
||||
.map_err(|_| anyhow!("Mismatch in exclude patterns"))
|
||||
}
|
||||
|
||||
fn build_walker(&self, paths: &[PathBuf]) -> Result<WalkParallel> {
|
||||
let first_path = &paths[0];
|
||||
let config = &self.config;
|
||||
let overrides = self.build_overrides(paths)?;
|
||||
|
||||
let mut builder = WalkBuilder::new(first_path);
|
||||
builder
|
||||
.hidden(config.ignore_hidden)
|
||||
.ignore(config.read_fdignore)
|
||||
.parents(config.read_parent_ignore && (config.read_fdignore || config.read_vcsignore))
|
||||
.git_ignore(config.read_vcsignore)
|
||||
.git_global(config.read_vcsignore)
|
||||
.git_exclude(config.read_vcsignore)
|
||||
.require_git(config.require_git_to_read_vcsignore)
|
||||
.overrides(overrides)
|
||||
.follow_links(config.follow_links)
|
||||
// No need to check for supported platforms, option is unavailable on unsupported ones
|
||||
.same_file_system(config.one_file_system)
|
||||
.max_depth(config.max_depth);
|
||||
|
||||
if config.read_fdignore {
|
||||
builder.add_custom_ignore_filename(".fdignore");
|
||||
}
|
||||
|
||||
if config.read_global_ignore {
|
||||
if let Ok(basedirs) = etcetera::choose_base_strategy() {
|
||||
let global_ignore_file = basedirs.config_dir().join("fd").join("ignore");
|
||||
if global_ignore_file.is_file() {
|
||||
let result = builder.add_ignore(global_ignore_file);
|
||||
match result {
|
||||
Some(ignore::Error::Partial(_)) => (),
|
||||
Some(err) => {
|
||||
print_error(format!(
|
||||
"Malformed pattern in global ignore file. {}.",
|
||||
err
|
||||
));
|
||||
}
|
||||
None => (),
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for ignore_file in &config.ignore_files {
|
||||
let result = builder.add_ignore(ignore_file);
|
||||
match result {
|
||||
Some(ignore::Error::Partial(_)) => (),
|
||||
Some(err) => {
|
||||
print_error(format!("Malformed pattern in custom ignore file. {}.", err));
|
||||
}
|
||||
None => (),
|
||||
}
|
||||
}
|
||||
|
||||
for path in &paths[1..] {
|
||||
builder.add(path);
|
||||
}
|
||||
|
||||
let walker = builder.threads(config.threads).build_parallel();
|
||||
Ok(walker)
|
||||
}
|
||||
|
||||
/// Run the receiver work, either on this thread or a pool of background
|
||||
/// threads (for --exec).
|
||||
fn receive(&self, rx: Receiver<Batch>) -> ExitCode {
|
||||
let config = &self.config;
|
||||
|
||||
let threads = config.threads;
|
||||
thread::spawn(move || {
|
||||
// This will be set to `Some` if the `--exec` argument was supplied.
|
||||
if let Some(ref cmd) = config.command {
|
||||
if cmd.in_batch_mode() {
|
||||
exec::batch(rx, cmd, &config)
|
||||
exec::batch(rx.into_iter().flatten(), cmd, config)
|
||||
} else {
|
||||
let out_perm = Mutex::new(());
|
||||
|
||||
thread::scope(|scope| {
|
||||
// Each spawned job will store it's thread handle in here.
|
||||
// Each spawned job will store its thread handle in here.
|
||||
let threads = config.threads;
|
||||
let mut handles = Vec::with_capacity(threads);
|
||||
for _ in 0..threads {
|
||||
let rx = rx.clone();
|
||||
|
||||
// Spawn a job thread that will listen for and execute inputs.
|
||||
let handle = scope.spawn(|| exec::job(rx, cmd, &out_perm, &config));
|
||||
let handle = scope
|
||||
.spawn(|| exec::job(rx.into_iter().flatten(), cmd, &out_perm, config));
|
||||
|
||||
// Push the handle of the spawned thread into the vector for later joining.
|
||||
handles.push(handle);
|
||||
|
@ -358,188 +432,237 @@ fn spawn_receiver(
|
|||
})
|
||||
}
|
||||
} else {
|
||||
let stdout = io::stdout();
|
||||
let stdout = stdout.lock();
|
||||
let stdout = io::stdout().lock();
|
||||
let stdout = io::BufWriter::new(stdout);
|
||||
|
||||
let mut rxbuffer = ReceiverBuffer::new(config, quit_flag, interrupt_flag, rx, stdout);
|
||||
rxbuffer.process()
|
||||
ReceiverBuffer::new(self, rx, stdout).process()
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
fn spawn_senders(
|
||||
config: &Arc<Config>,
|
||||
quit_flag: &Arc<AtomicBool>,
|
||||
patterns: Arc<Vec<Regex>>,
|
||||
parallel_walker: ignore::WalkParallel,
|
||||
tx: Sender<WorkerResult>,
|
||||
) {
|
||||
parallel_walker.run(|| {
|
||||
let config = Arc::clone(config);
|
||||
let patterns = Arc::clone(&patterns);
|
||||
let tx_thread = tx.clone();
|
||||
let quit_flag = Arc::clone(quit_flag);
|
||||
/// Spawn the sender threads.
|
||||
fn spawn_senders(&self, walker: WalkParallel, tx: Sender<Batch>) {
|
||||
walker.run(|| {
|
||||
let patterns = &self.patterns;
|
||||
let config = &self.config;
|
||||
let quit_flag = self.quit_flag.as_ref();
|
||||
|
||||
Box::new(move |entry_o| {
|
||||
if quit_flag.load(Ordering::Relaxed) {
|
||||
return ignore::WalkState::Quit;
|
||||
}
|
||||
|
||||
let entry = match entry_o {
|
||||
Ok(ref e) if e.depth() == 0 => {
|
||||
// Skip the root directory entry.
|
||||
return ignore::WalkState::Continue;
|
||||
let mut limit = 0x100;
|
||||
if let Some(cmd) = &config.command {
|
||||
if !cmd.in_batch_mode() && config.threads > 1 {
|
||||
// Evenly distribute work between multiple receivers
|
||||
limit = 1;
|
||||
}
|
||||
Ok(e) => DirEntry::normal(e),
|
||||
Err(ignore::Error::WithPath {
|
||||
path,
|
||||
err: inner_err,
|
||||
}) => match inner_err.as_ref() {
|
||||
ignore::Error::Io(io_error)
|
||||
if io_error.kind() == io::ErrorKind::NotFound
|
||||
&& path
|
||||
.symlink_metadata()
|
||||
.ok()
|
||||
.map_or(false, |m| m.file_type().is_symlink()) =>
|
||||
{
|
||||
DirEntry::broken_symlink(path)
|
||||
}
|
||||
let mut tx = BatchSender::new(tx.clone(), limit);
|
||||
|
||||
Box::new(move |entry| {
|
||||
if quit_flag.load(Ordering::Relaxed) {
|
||||
return WalkState::Quit;
|
||||
}
|
||||
|
||||
let entry = match entry {
|
||||
Ok(ref e) if e.depth() == 0 => {
|
||||
// Skip the root directory entry.
|
||||
return WalkState::Continue;
|
||||
}
|
||||
_ => {
|
||||
return match tx_thread.send(WorkerResult::Error(ignore::Error::WithPath {
|
||||
path,
|
||||
err: inner_err,
|
||||
})) {
|
||||
Ok(_) => ignore::WalkState::Continue,
|
||||
Err(_) => ignore::WalkState::Quit,
|
||||
}
|
||||
}
|
||||
},
|
||||
Err(err) => {
|
||||
return match tx_thread.send(WorkerResult::Error(err)) {
|
||||
Ok(_) => ignore::WalkState::Continue,
|
||||
Err(_) => ignore::WalkState::Quit,
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
if let Some(min_depth) = config.min_depth {
|
||||
if entry.depth().map_or(true, |d| d < min_depth) {
|
||||
return ignore::WalkState::Continue;
|
||||
}
|
||||
}
|
||||
|
||||
// Check the name first, since it doesn't require metadata
|
||||
let entry_path = entry.path();
|
||||
|
||||
let search_str: Cow<OsStr> = if config.search_full_path {
|
||||
let path_abs_buf = filesystem::path_absolute_form(entry_path)
|
||||
.expect("Retrieving absolute path succeeds");
|
||||
Cow::Owned(path_abs_buf.as_os_str().to_os_string())
|
||||
} else {
|
||||
match entry_path.file_name() {
|
||||
Some(filename) => Cow::Borrowed(filename),
|
||||
None => unreachable!(
|
||||
"Encountered file system entry without a file name. This should only \
|
||||
happen for paths like 'foo/bar/..' or '/' which are not supposed to \
|
||||
appear in a file system traversal."
|
||||
),
|
||||
}
|
||||
};
|
||||
|
||||
if !patterns
|
||||
.iter()
|
||||
.all(|pat| pat.is_match(&filesystem::osstr_to_bytes(search_str.as_ref())))
|
||||
{
|
||||
return ignore::WalkState::Continue;
|
||||
}
|
||||
|
||||
// Filter out unwanted extensions.
|
||||
if let Some(ref exts_regex) = config.extensions {
|
||||
if let Some(path_str) = entry_path.file_name() {
|
||||
if !exts_regex.is_match(&filesystem::osstr_to_bytes(path_str)) {
|
||||
return ignore::WalkState::Continue;
|
||||
}
|
||||
} else {
|
||||
return ignore::WalkState::Continue;
|
||||
}
|
||||
}
|
||||
|
||||
// Filter out unwanted file types.
|
||||
if let Some(ref file_types) = config.file_types {
|
||||
if file_types.should_ignore(&entry) {
|
||||
return ignore::WalkState::Continue;
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(unix)]
|
||||
{
|
||||
if let Some(ref owner_constraint) = config.owner_constraint {
|
||||
if let Some(metadata) = entry.metadata() {
|
||||
if !owner_constraint.matches(metadata) {
|
||||
return ignore::WalkState::Continue;
|
||||
}
|
||||
} else {
|
||||
return ignore::WalkState::Continue;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Filter out unwanted sizes if it is a file and we have been given size constraints.
|
||||
if !config.size_constraints.is_empty() {
|
||||
if entry_path.is_file() {
|
||||
if let Some(metadata) = entry.metadata() {
|
||||
let file_size = metadata.len();
|
||||
if config
|
||||
.size_constraints
|
||||
.iter()
|
||||
.any(|sc| !sc.is_within(file_size))
|
||||
Ok(e) => DirEntry::normal(e),
|
||||
Err(ignore::Error::WithPath {
|
||||
path,
|
||||
err: inner_err,
|
||||
}) => match inner_err.as_ref() {
|
||||
ignore::Error::Io(io_error)
|
||||
if io_error.kind() == io::ErrorKind::NotFound
|
||||
&& path
|
||||
.symlink_metadata()
|
||||
.ok()
|
||||
.map_or(false, |m| m.file_type().is_symlink()) =>
|
||||
{
|
||||
return ignore::WalkState::Continue;
|
||||
DirEntry::broken_symlink(path)
|
||||
}
|
||||
_ => {
|
||||
return match tx.send(WorkerResult::Error(ignore::Error::WithPath {
|
||||
path,
|
||||
err: inner_err,
|
||||
})) {
|
||||
Ok(_) => WalkState::Continue,
|
||||
Err(_) => WalkState::Quit,
|
||||
}
|
||||
}
|
||||
},
|
||||
Err(err) => {
|
||||
return match tx.send(WorkerResult::Error(err)) {
|
||||
Ok(_) => WalkState::Continue,
|
||||
Err(_) => WalkState::Quit,
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
if let Some(min_depth) = config.min_depth {
|
||||
if entry.depth().map_or(true, |d| d < min_depth) {
|
||||
return WalkState::Continue;
|
||||
}
|
||||
}
|
||||
|
||||
// Check the name first, since it doesn't require metadata
|
||||
let entry_path = entry.path();
|
||||
|
||||
let search_str: Cow<OsStr> = if config.search_full_path {
|
||||
let path_abs_buf = filesystem::path_absolute_form(entry_path)
|
||||
.expect("Retrieving absolute path succeeds");
|
||||
Cow::Owned(path_abs_buf.as_os_str().to_os_string())
|
||||
} else {
|
||||
match entry_path.file_name() {
|
||||
Some(filename) => Cow::Borrowed(filename),
|
||||
None => unreachable!(
|
||||
"Encountered file system entry without a file name. This should only \
|
||||
happen for paths like 'foo/bar/..' or '/' which are not supposed to \
|
||||
appear in a file system traversal."
|
||||
),
|
||||
}
|
||||
};
|
||||
|
||||
if !patterns
|
||||
.iter()
|
||||
.all(|pat| pat.is_match(&filesystem::osstr_to_bytes(search_str.as_ref())))
|
||||
{
|
||||
return WalkState::Continue;
|
||||
}
|
||||
|
||||
// Filter out unwanted extensions.
|
||||
if let Some(ref exts_regex) = config.extensions {
|
||||
if let Some(path_str) = entry_path.file_name() {
|
||||
if !exts_regex.is_match(&filesystem::osstr_to_bytes(path_str)) {
|
||||
return WalkState::Continue;
|
||||
}
|
||||
} else {
|
||||
return ignore::WalkState::Continue;
|
||||
}
|
||||
} else {
|
||||
return ignore::WalkState::Continue;
|
||||
}
|
||||
}
|
||||
|
||||
// Filter out unwanted modification times
|
||||
if !config.time_constraints.is_empty() {
|
||||
let mut matched = false;
|
||||
if let Some(metadata) = entry.metadata() {
|
||||
if let Ok(modified) = metadata.modified() {
|
||||
matched = config
|
||||
.time_constraints
|
||||
.iter()
|
||||
.all(|tf| tf.applies_to(&modified));
|
||||
return WalkState::Continue;
|
||||
}
|
||||
}
|
||||
if !matched {
|
||||
return ignore::WalkState::Continue;
|
||||
|
||||
// Filter out unwanted file types.
|
||||
if let Some(ref file_types) = config.file_types {
|
||||
if file_types.should_ignore(&entry) {
|
||||
return WalkState::Continue;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if config.is_printing() {
|
||||
if let Some(ls_colors) = &config.ls_colors {
|
||||
// Compute colors in parallel
|
||||
entry.style(ls_colors);
|
||||
#[cfg(unix)]
|
||||
{
|
||||
if let Some(ref owner_constraint) = config.owner_constraint {
|
||||
if let Some(metadata) = entry.metadata() {
|
||||
if !owner_constraint.matches(metadata) {
|
||||
return WalkState::Continue;
|
||||
}
|
||||
} else {
|
||||
return WalkState::Continue;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let send_result = tx_thread.send(WorkerResult::Entry(entry));
|
||||
// Filter out unwanted sizes if it is a file and we have been given size constraints.
|
||||
if !config.size_constraints.is_empty() {
|
||||
if entry_path.is_file() {
|
||||
if let Some(metadata) = entry.metadata() {
|
||||
let file_size = metadata.len();
|
||||
if config
|
||||
.size_constraints
|
||||
.iter()
|
||||
.any(|sc| !sc.is_within(file_size))
|
||||
{
|
||||
return WalkState::Continue;
|
||||
}
|
||||
} else {
|
||||
return WalkState::Continue;
|
||||
}
|
||||
} else {
|
||||
return WalkState::Continue;
|
||||
}
|
||||
}
|
||||
|
||||
if send_result.is_err() {
|
||||
return ignore::WalkState::Quit;
|
||||
}
|
||||
// Filter out unwanted modification times
|
||||
if !config.time_constraints.is_empty() {
|
||||
let mut matched = false;
|
||||
if let Some(metadata) = entry.metadata() {
|
||||
if let Ok(modified) = metadata.modified() {
|
||||
matched = config
|
||||
.time_constraints
|
||||
.iter()
|
||||
.all(|tf| tf.applies_to(&modified));
|
||||
}
|
||||
}
|
||||
if !matched {
|
||||
return WalkState::Continue;
|
||||
}
|
||||
}
|
||||
|
||||
// Apply pruning.
|
||||
if config.prune {
|
||||
return ignore::WalkState::Skip;
|
||||
}
|
||||
if config.is_printing() {
|
||||
if let Some(ls_colors) = &config.ls_colors {
|
||||
// Compute colors in parallel
|
||||
entry.style(ls_colors);
|
||||
}
|
||||
}
|
||||
|
||||
ignore::WalkState::Continue
|
||||
})
|
||||
});
|
||||
let send_result = tx.send(WorkerResult::Entry(entry));
|
||||
|
||||
if send_result.is_err() {
|
||||
return WalkState::Quit;
|
||||
}
|
||||
|
||||
// Apply pruning.
|
||||
if config.prune {
|
||||
return WalkState::Skip;
|
||||
}
|
||||
|
||||
WalkState::Continue
|
||||
})
|
||||
});
|
||||
}
|
||||
|
||||
/// Perform the recursive scan.
|
||||
fn scan(&self, paths: &[PathBuf]) -> Result<ExitCode> {
|
||||
let config = &self.config;
|
||||
let walker = self.build_walker(paths)?;
|
||||
|
||||
if config.ls_colors.is_some() && config.is_printing() {
|
||||
let quit_flag = Arc::clone(&self.quit_flag);
|
||||
let interrupt_flag = Arc::clone(&self.interrupt_flag);
|
||||
|
||||
ctrlc::set_handler(move || {
|
||||
quit_flag.store(true, Ordering::Relaxed);
|
||||
|
||||
if interrupt_flag.fetch_or(true, Ordering::Relaxed) {
|
||||
// Ctrl-C has been pressed twice, exit NOW
|
||||
ExitCode::KilledBySigint.exit();
|
||||
}
|
||||
})
|
||||
.unwrap();
|
||||
}
|
||||
|
||||
let (tx, rx) = bounded(2 * config.threads);
|
||||
|
||||
let exit_code = thread::scope(|scope| {
|
||||
// Spawn the receiver thread(s)
|
||||
let receiver = scope.spawn(|| self.receive(rx));
|
||||
|
||||
// Spawn the sender threads.
|
||||
self.spawn_senders(walker, tx);
|
||||
|
||||
receiver.join().unwrap()
|
||||
});
|
||||
|
||||
if self.interrupt_flag.load(Ordering::Relaxed) {
|
||||
Ok(ExitCode::KilledBySigint)
|
||||
} else {
|
||||
Ok(exit_code)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Recursively scan the given search path for files / pathnames matching the patterns.
|
||||
///
|
||||
/// If the `--exec` argument was supplied, this will create a thread pool for executing
|
||||
/// jobs in parallel from a given command line and the discovered paths. Otherwise, each
|
||||
/// path will simply be written to standard output.
|
||||
pub fn scan(paths: &[PathBuf], patterns: Vec<Regex>, config: Config) -> Result<ExitCode> {
|
||||
WorkerState::new(patterns, config).scan(paths)
|
||||
}
|
||||
|
|
|
@ -129,7 +129,7 @@ fn normalize_output(s: &str, trim_start: bool, normalize_line: bool) -> String {
|
|||
.lines()
|
||||
.map(|line| {
|
||||
let line = if trim_start { line.trim_start() } else { line };
|
||||
let line = line.replace('/', &std::path::MAIN_SEPARATOR.to_string());
|
||||
let line = line.replace('/', std::path::MAIN_SEPARATOR_STR);
|
||||
if normalize_line {
|
||||
let mut words: Vec<_> = line.split_whitespace().collect();
|
||||
words.sort_unstable();
|
||||
|
|
136
tests/tests.rs
136
tests/tests.rs
|
@ -1,5 +1,7 @@
|
|||
mod testenv;
|
||||
|
||||
#[cfg(unix)]
|
||||
use nix::unistd::{Gid, Group, Uid, User};
|
||||
use std::fs;
|
||||
use std::io::Write;
|
||||
use std::path::Path;
|
||||
|
@ -1302,14 +1304,15 @@ fn test_type_executable() {
|
|||
// This test assumes the current user isn't root
|
||||
// (otherwise if the executable bit is set for any level, it is executable for the current
|
||||
// user)
|
||||
if users::get_current_uid() == 0 {
|
||||
if Uid::current().is_root() {
|
||||
return;
|
||||
}
|
||||
|
||||
let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES);
|
||||
|
||||
fs::OpenOptions::new()
|
||||
.create(true)
|
||||
.create_new(true)
|
||||
.truncate(true)
|
||||
.write(true)
|
||||
.mode(0o777)
|
||||
.open(te.test_root().join("executable-file.sh"))
|
||||
|
@ -1317,6 +1320,7 @@ fn test_type_executable() {
|
|||
|
||||
fs::OpenOptions::new()
|
||||
.create(true)
|
||||
.truncate(true)
|
||||
.write(true)
|
||||
.mode(0o645)
|
||||
.open(te.test_root().join("not-user-executable-file.sh"))
|
||||
|
@ -1620,6 +1624,66 @@ fn test_excludes() {
|
|||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn format() {
|
||||
let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES);
|
||||
|
||||
te.assert_output(
|
||||
&["--format", "path={}", "--path-separator=/"],
|
||||
"path=a.foo
|
||||
path=e1 e2
|
||||
path=one
|
||||
path=one/b.foo
|
||||
path=one/two
|
||||
path=one/two/C.Foo2
|
||||
path=one/two/c.foo
|
||||
path=one/two/three
|
||||
path=one/two/three/d.foo
|
||||
path=one/two/three/directory_foo
|
||||
path=symlink",
|
||||
);
|
||||
|
||||
te.assert_output(
|
||||
&["foo", "--format", "noExt={.}", "--path-separator=/"],
|
||||
"noExt=a
|
||||
noExt=one/b
|
||||
noExt=one/two/C
|
||||
noExt=one/two/c
|
||||
noExt=one/two/three/d
|
||||
noExt=one/two/three/directory_foo",
|
||||
);
|
||||
|
||||
te.assert_output(
|
||||
&["foo", "--format", "basename={/}", "--path-separator=/"],
|
||||
"basename=a.foo
|
||||
basename=b.foo
|
||||
basename=C.Foo2
|
||||
basename=c.foo
|
||||
basename=d.foo
|
||||
basename=directory_foo",
|
||||
);
|
||||
|
||||
te.assert_output(
|
||||
&["foo", "--format", "name={/.}", "--path-separator=/"],
|
||||
"name=a
|
||||
name=b
|
||||
name=C
|
||||
name=c
|
||||
name=d
|
||||
name=directory_foo",
|
||||
);
|
||||
|
||||
te.assert_output(
|
||||
&["foo", "--format", "parent={//}", "--path-separator=/"],
|
||||
"parent=.
|
||||
parent=one
|
||||
parent=one/two
|
||||
parent=one/two
|
||||
parent=one/two/three
|
||||
parent=one/two/three",
|
||||
);
|
||||
}
|
||||
|
||||
/// Shell script execution (--exec)
|
||||
#[test]
|
||||
fn test_exec() {
|
||||
|
@ -2261,10 +2325,10 @@ fn test_owner_ignore_all() {
|
|||
#[test]
|
||||
fn test_owner_current_user() {
|
||||
let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES);
|
||||
let uid = users::get_current_uid();
|
||||
let uid = Uid::current();
|
||||
te.assert_output(&["--owner", &uid.to_string(), "a.foo"], "a.foo");
|
||||
if let Some(username) = users::get_current_username().map(|u| u.into_string().unwrap()) {
|
||||
te.assert_output(&["--owner", &username, "a.foo"], "a.foo");
|
||||
if let Ok(Some(user)) = User::from_uid(uid) {
|
||||
te.assert_output(&["--owner", &user.name, "a.foo"], "a.foo");
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -2272,10 +2336,10 @@ fn test_owner_current_user() {
|
|||
#[test]
|
||||
fn test_owner_current_group() {
|
||||
let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES);
|
||||
let gid = users::get_current_gid();
|
||||
let gid = Gid::current();
|
||||
te.assert_output(&["--owner", &format!(":{}", gid), "a.foo"], "a.foo");
|
||||
if let Some(groupname) = users::get_current_groupname().map(|u| u.into_string().unwrap()) {
|
||||
te.assert_output(&["--owner", &format!(":{}", groupname), "a.foo"], "a.foo");
|
||||
if let Ok(Some(group)) = Group::from_gid(gid) {
|
||||
te.assert_output(&["--owner", &format!(":{}", group.name), "a.foo"], "a.foo");
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -2283,7 +2347,7 @@ fn test_owner_current_group() {
|
|||
#[test]
|
||||
fn test_owner_root() {
|
||||
// This test assumes the current user isn't root
|
||||
if users::get_current_uid() == 0 || users::get_current_gid() == 0 {
|
||||
if Uid::current().is_root() || Gid::current() == Gid::from_raw(0) {
|
||||
return;
|
||||
}
|
||||
let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES);
|
||||
|
@ -2554,3 +2618,57 @@ fn test_invalid_cwd() {
|
|||
panic!("{:?}", output);
|
||||
}
|
||||
}
|
||||
|
||||
/// Test behavior of .git directory with various flags
|
||||
#[test]
|
||||
fn test_git_dir() {
|
||||
let te = TestEnv::new(
|
||||
&[".git/one", "other_dir/.git", "nested/dir/.git"],
|
||||
&[
|
||||
".git/one/foo.a",
|
||||
".git/.foo",
|
||||
".git/a.foo",
|
||||
"other_dir/.git/foo1",
|
||||
"nested/dir/.git/foo2",
|
||||
],
|
||||
);
|
||||
|
||||
te.assert_output(
|
||||
&["--hidden", "foo"],
|
||||
".git/one/foo.a
|
||||
.git/.foo
|
||||
.git/a.foo
|
||||
other_dir/.git/foo1
|
||||
nested/dir/.git/foo2",
|
||||
);
|
||||
te.assert_output(&["--no-ignore", "foo"], "");
|
||||
te.assert_output(
|
||||
&["--hidden", "--no-ignore", "foo"],
|
||||
".git/one/foo.a
|
||||
.git/.foo
|
||||
.git/a.foo
|
||||
other_dir/.git/foo1
|
||||
nested/dir/.git/foo2",
|
||||
);
|
||||
te.assert_output(
|
||||
&["--hidden", "--no-ignore-vcs", "foo"],
|
||||
".git/one/foo.a
|
||||
.git/.foo
|
||||
.git/a.foo
|
||||
other_dir/.git/foo1
|
||||
nested/dir/.git/foo2",
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_gitignore_parent() {
|
||||
let te = TestEnv::new(&["sub"], &[".abc", "sub/.abc"]);
|
||||
|
||||
fs::File::create(te.test_root().join(".gitignore"))
|
||||
.unwrap()
|
||||
.write_all(b".abc\n")
|
||||
.unwrap();
|
||||
|
||||
te.assert_output_subdirectory("sub", &["--hidden"], "");
|
||||
te.assert_output_subdirectory("sub", &["--hidden", "--search-path", "."], "");
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue