Compare commits

..

No commits in common. "master" and "v7.1.0" have entirely different histories.

65 changed files with 3222 additions and 10114 deletions

1
.github/FUNDING.yml vendored
View File

@ -1 +0,0 @@
github: [sharkdp, tavianator]

View File

@ -1,42 +0,0 @@
name: Bug Report
description: Report a bug.
title: "[BUG] "
labels: bug
body:
- type: markdown
attributes:
value: |
Please check out the [troubleshooting section](https://github.com/sharkdp/fd#troubleshooting) first.
- type: checkboxes
attributes:
label: Checks
options:
- label: I have read the troubleshooting section and still think this is a bug.
required: true
- type: textarea
id: bug
attributes:
label: "Describe the bug you encountered:"
validations:
required: true
- type: textarea
id: expected
attributes:
label: "Describe what you expected to happen:"
- type: input
id: version
attributes:
label: "What version of `fd` are you using?"
placeholder: "paste the output of `fd --version` here"
validations:
required: true
- type: textarea
id: os
attributes:
label: Which operating system / distribution are you on?
placeholder: |
Unix: paste the output of `uname -srm` and `lsb_release -a` here.
Windows: please tell us your Windows version
render: shell
validations:
required: true

View File

@ -1 +0,0 @@
blank_issues_enabled: true

View File

@ -1,8 +0,0 @@
---
name: Feature Request
about: Suggest an idea for this project.
title: ''
labels: feature-request
assignees: ''
---

View File

@ -1,13 +0,0 @@
---
name: Question
about: Ask a question about 'fd'.
title: ''
labels: question
assignees: ''
---
**What version of `fd` are you using?**
[paste the output of `fd --version` here]

View File

@ -1,10 +0,0 @@
version: 2
updates:
- package-ecosystem: "cargo"
directory: "/"
schedule:
interval: "monthly"
- package-ecosystem: "github-actions"
directory: "/"
schedule:
interval: "daily"

View File

@ -1,265 +0,0 @@
name: CICD
env:
CICD_INTERMEDIATES_DIR: "_cicd-intermediates"
MSRV_FEATURES: "--all-features"
on:
workflow_dispatch:
pull_request:
push:
branches:
- master
tags:
- '*'
jobs:
crate_metadata:
name: Extract crate metadata
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Extract crate information
id: crate_metadata
run: |
echo "name=fd" | tee -a $GITHUB_OUTPUT
cargo metadata --no-deps --format-version 1 | jq -r '"version=" + .packages[0].version' | tee -a $GITHUB_OUTPUT
cargo metadata --no-deps --format-version 1 | jq -r '"maintainer=" + .packages[0].authors[0]' | tee -a $GITHUB_OUTPUT
cargo metadata --no-deps --format-version 1 | jq -r '"homepage=" + .packages[0].homepage' | tee -a $GITHUB_OUTPUT
cargo metadata --no-deps --format-version 1 | jq -r '"msrv=" + .packages[0].rust_version' | tee -a $GITHUB_OUTPUT
outputs:
name: ${{ steps.crate_metadata.outputs.name }}
version: ${{ steps.crate_metadata.outputs.version }}
maintainer: ${{ steps.crate_metadata.outputs.maintainer }}
homepage: ${{ steps.crate_metadata.outputs.homepage }}
msrv: ${{ steps.crate_metadata.outputs.msrv }}
ensure_cargo_fmt:
name: Ensure 'cargo fmt' has been run
runs-on: ubuntu-20.04
steps:
- uses: dtolnay/rust-toolchain@stable
with:
components: rustfmt
- uses: actions/checkout@v4
- run: cargo fmt -- --check
lint_check:
name: Ensure 'cargo clippy' has no warnings
runs-on: ubuntu-latest
steps:
- uses: dtolnay/rust-toolchain@stable
with:
components: clippy
- uses: actions/checkout@v4
- run: cargo clippy --all-targets --all-features -- -Dwarnings
min_version:
name: Minimum supported rust version
runs-on: ubuntu-20.04
needs: crate_metadata
steps:
- name: Checkout source code
uses: actions/checkout@v4
- name: Install rust toolchain (v${{ needs.crate_metadata.outputs.msrv }})
uses: dtolnay/rust-toolchain@master
with:
toolchain: ${{ needs.crate_metadata.outputs.msrv }}
components: clippy
- name: Run clippy (on minimum supported rust version to prevent warnings we can't fix)
run: cargo clippy --locked --all-targets ${{ env.MSRV_FEATURES }}
- name: Run tests
run: cargo test --locked ${{ env.MSRV_FEATURES }}
build:
name: ${{ matrix.job.target }} (${{ matrix.job.os }})
runs-on: ${{ matrix.job.os }}
needs: crate_metadata
strategy:
fail-fast: false
matrix:
job:
- { target: aarch64-unknown-linux-gnu , os: ubuntu-22.04, use-cross: true }
- { target: aarch64-unknown-linux-musl , os: ubuntu-22.04, use-cross: true }
- { target: arm-unknown-linux-gnueabihf , os: ubuntu-22.04, use-cross: true }
- { target: arm-unknown-linux-musleabihf, os: ubuntu-22.04, use-cross: true }
- { target: i686-pc-windows-msvc , os: windows-2022 }
- { target: i686-unknown-linux-gnu , os: ubuntu-22.04, use-cross: true }
- { target: i686-unknown-linux-musl , os: ubuntu-22.04, use-cross: true }
- { target: x86_64-apple-darwin , os: macos-12 }
- { target: aarch64-apple-darwin , os: macos-14 }
- { target: x86_64-pc-windows-gnu , os: windows-2022 }
- { target: x86_64-pc-windows-msvc , os: windows-2022 }
- { target: x86_64-unknown-linux-gnu , os: ubuntu-22.04, use-cross: true }
- { target: x86_64-unknown-linux-musl , os: ubuntu-22.04, use-cross: true }
env:
BUILD_CMD: cargo
steps:
- name: Checkout source code
uses: actions/checkout@v4
- name: Install prerequisites
shell: bash
run: |
case ${{ matrix.job.target }} in
arm-unknown-linux-*) sudo apt-get -y update ; sudo apt-get -y install gcc-arm-linux-gnueabihf ;;
aarch64-unknown-linux-gnu) sudo apt-get -y update ; sudo apt-get -y install gcc-aarch64-linux-gnu ;;
esac
- name: Install Rust toolchain
uses: dtolnay/rust-toolchain@stable
with:
targets: ${{ matrix.job.target }}
# On windows, for now build with 1.77.2, so that it works on windows 7.
# When we update the MSRV again, we'll need to revisit this, and probably drop support for Win7
toolchain: "${{ contains(matrix.job.target, 'windows-') && '1.77.2' || 'stable' }}"
- name: Install cross
if: matrix.job.use-cross
uses: taiki-e/install-action@v2
with:
tool: cross
- name: Overwrite build command env variable
if: matrix.job.use-cross
shell: bash
run: echo "BUILD_CMD=cross" >> $GITHUB_ENV
- name: Show version information (Rust, cargo, GCC)
shell: bash
run: |
gcc --version || true
rustup -V
rustup toolchain list
rustup default
cargo -V
rustc -V
- name: Build
shell: bash
run: $BUILD_CMD build --locked --release --target=${{ matrix.job.target }}
- name: Set binary name & path
id: bin
shell: bash
run: |
# Figure out suffix of binary
EXE_suffix=""
case ${{ matrix.job.target }} in
*-pc-windows-*) EXE_suffix=".exe" ;;
esac;
# Setup paths
BIN_NAME="${{ needs.crate_metadata.outputs.name }}${EXE_suffix}"
BIN_PATH="target/${{ matrix.job.target }}/release/${BIN_NAME}"
# Let subsequent steps know where to find the binary
echo "BIN_PATH=${BIN_PATH}" >> $GITHUB_OUTPUT
echo "BIN_NAME=${BIN_NAME}" >> $GITHUB_OUTPUT
- name: Set testing options
id: test-options
shell: bash
run: |
# test only library unit tests and binary for arm-type targets
unset CARGO_TEST_OPTIONS
unset CARGO_TEST_OPTIONS ; case ${{ matrix.job.target }} in arm-* | aarch64-*) CARGO_TEST_OPTIONS="--bin ${{ needs.crate_metadata.outputs.name }}" ;; esac;
echo "CARGO_TEST_OPTIONS=${CARGO_TEST_OPTIONS}" >> $GITHUB_OUTPUT
- name: Run tests
shell: bash
run: $BUILD_CMD test --locked --target=${{ matrix.job.target }} ${{ steps.test-options.outputs.CARGO_TEST_OPTIONS}}
- name: Generate completions
id: completions
shell: bash
run: make completions
- name: Create tarball
id: package
shell: bash
run: |
PKG_suffix=".tar.gz" ; case ${{ matrix.job.target }} in *-pc-windows-*) PKG_suffix=".zip" ;; esac;
PKG_BASENAME=${{ needs.crate_metadata.outputs.name }}-v${{ needs.crate_metadata.outputs.version }}-${{ matrix.job.target }}
PKG_NAME=${PKG_BASENAME}${PKG_suffix}
echo "PKG_NAME=${PKG_NAME}" >> $GITHUB_OUTPUT
PKG_STAGING="${{ env.CICD_INTERMEDIATES_DIR }}/package"
ARCHIVE_DIR="${PKG_STAGING}/${PKG_BASENAME}/"
mkdir -p "${ARCHIVE_DIR}"
# Binary
cp "${{ steps.bin.outputs.BIN_PATH }}" "$ARCHIVE_DIR"
# README, LICENSE and CHANGELOG files
cp "README.md" "LICENSE-MIT" "LICENSE-APACHE" "CHANGELOG.md" "$ARCHIVE_DIR"
# Man page
cp 'doc/${{ needs.crate_metadata.outputs.name }}.1' "$ARCHIVE_DIR"
# Autocompletion files
cp -r autocomplete "${ARCHIVE_DIR}"
# base compressed package
pushd "${PKG_STAGING}/" >/dev/null
case ${{ matrix.job.target }} in
*-pc-windows-*) 7z -y a "${PKG_NAME}" "${PKG_BASENAME}"/* | tail -2 ;;
*) tar czf "${PKG_NAME}" "${PKG_BASENAME}"/* ;;
esac;
popd >/dev/null
# Let subsequent steps know where to find the compressed package
echo "PKG_PATH=${PKG_STAGING}/${PKG_NAME}" >> $GITHUB_OUTPUT
- name: Create Debian package
id: debian-package
shell: bash
if: startsWith(matrix.job.os, 'ubuntu')
run: bash scripts/create-deb.sh
env:
TARGET: ${{ matrix.job.target }}
DPKG_VERSION: ${{ needs.crate_metadata.version }}
BIN_PATH: ${{ steps.bin.outputs.BIN_PATH }}
- name: "Artifact upload: tarball"
uses: actions/upload-artifact@master
with:
name: ${{ steps.package.outputs.PKG_NAME }}
path: ${{ steps.package.outputs.PKG_PATH }}
- name: "Artifact upload: Debian package"
uses: actions/upload-artifact@master
if: steps.debian-package.outputs.DPKG_NAME
with:
name: ${{ steps.debian-package.outputs.DPKG_NAME }}
path: ${{ steps.debian-package.outputs.DPKG_PATH }}
- name: Check for release
id: is-release
shell: bash
run: |
unset IS_RELEASE ; if [[ $GITHUB_REF =~ ^refs/tags/v[0-9].* ]]; then IS_RELEASE='true' ; fi
echo "IS_RELEASE=${IS_RELEASE}" >> $GITHUB_OUTPUT
- name: Publish archives and packages
uses: softprops/action-gh-release@v2
if: steps.is-release.outputs.IS_RELEASE
with:
files: |
${{ steps.package.outputs.PKG_PATH }}
${{ steps.debian-package.outputs.DPKG_PATH }}
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
winget:
name: Publish to Winget
runs-on: ubuntu-latest
needs: build
if: startsWith(github.ref, 'refs/tags/v')
steps:
- uses: vedantmgoyal2009/winget-releaser@v2
with:
identifier: sharkdp.fd
installers-regex: '-pc-windows-msvc\.zip$'
token: ${{ secrets.WINGET_TOKEN }}

1
.gitignore vendored
View File

@ -1,3 +1,2 @@
target/ target/
/autocomplete/
**/*.rs.bk **/*.rs.bk

141
.travis.yml Normal file
View File

@ -0,0 +1,141 @@
language: rust
cache: cargo
matrix:
# allow_failures:
# - rust: nightly
include:
# Stable channel.
- os: linux
rust: stable
env: TARGET=x86_64-unknown-linux-gnu
- os: linux
rust: stable
env: TARGET=x86_64-unknown-linux-musl
- os: linux
rust: stable
env: TARGET=i686-unknown-linux-gnu
- os: linux
rust: stable
env: TARGET=i686-unknown-linux-musl
- os: osx
rust: stable
env: TARGET=x86_64-apple-darwin
- os: linux
rust: stable
env:
- TARGET=arm-unknown-linux-gnueabihf
- CARGO_TARGET_ARM_UNKNOWN_LINUX_GNUEABIHF_LINKER=arm-linux-gnueabihf-gcc-4.8
# Beta channel.
- os: linux
rust: beta
env: TARGET=x86_64-unknown-linux-gnu
# Disabled to reduce total CI time
# - os: linux
# rust: beta
# env: TARGET=x86_64-unknown-linux-musl
# - os: linux
# rust: beta
# env: TARGET=i686-unknown-linux-gnu
# - os: linux
# rust: beta
# env: TARGET=i686-unknown-linux-musl
# - os: osx
# rust: beta
# env: TARGET=x86_64-apple-darwin
# Nightly channel.
- os: linux
rust: nightly
env: TARGET=x86_64-unknown-linux-gnu
# Disabled to reduce total CI time
# - os: linux
# rust: nightly
# env: TARGET=x86_64-unknown-linux-musl
# - os: linux
# rust: nightly
# env: TARGET=i686-unknown-linux-gnu
# - os: linux
# rust: nightly
# env: TARGET=i686-unknown-linux-musl
# - os: osx
# rust: nightly
# env: TARGET=x86_64-apple-darwin
# Minimum Rust supported channel.
- os: linux
rust: 1.20.0
env: TARGET=x86_64-unknown-linux-gnu
- os: linux
rust: 1.20.0
env: TARGET=x86_64-unknown-linux-musl
- os: linux
rust: 1.20.0
env: TARGET=i686-unknown-linux-gnu
- os: linux
rust: 1.20.0
env: TARGET=i686-unknown-linux-musl
- os: osx
rust: 1.20.0
env: TARGET=x86_64-apple-darwin
# Code formatting check
- os: linux
rust: nightly
# skip the global install step
install:
- cargo install --debug --force rustfmt-nightly
script: cargo fmt -- --check
sudo: required
before_install:
- ci/before_install.bash
env:
global:
# Default target on travis-ci.
# Used as conditional check in the install stage
- HOST=x86_64-unknown-linux-gnu
# Used on the deployment script
- PROJECT_NAME=fd
install:
# prevent target re-add error from rustup
- if [[ $TRAVIS_OS_NAME = linux && $HOST != $TARGET ]]; then rustup target add $TARGET; fi
script:
- ci/script.bash
before_deploy:
- bash ci/before_deploy.bash
deploy:
provider: releases
# NOTE updating the `api_key.secure`
# - go to: https://github.com/settings/tokens/new
# - generate new token using `public_repo` scope
# - encrypt it using: `travis encrypt API_KEY_HERE`
# - paste the output below
api_key:
secure: "RyFdh2lpDmaNhPar7ezsb18Xz+6XFM40y7cZCDRML+Sk+eYK1xtDNfEhDRJU5Qo1ReVsByds/QJTSXr2KmZPk3lXwG3SiN7UtrLUxCxFr6qrcM/iujlKTf5UxeRklkzPXxnH95DEyEgxvgbVhWTGVDWoyMnrVQXZKDy6z1iAiYB5h2Zl1rs+MRb/Enlt5q6XIKAlG0ppGtl8CfYudq5ZiqfJaMWTt9SWm2YskC8FeMc0S3IM6/EhTvaNYLdaarFqVWQEVql+6oCuL3ayPzmGyxLdxM37tIMNQ0f97zxqWodacXTG5ULdRD8if1l/SmTujrtjbZ0KWRjsjOq4vBtxBJKGdprcSiB0xH/hToqqtTSO0z5FPXi5cB8UlK6YLDDHcP3kXNer8CYMLI1VPaUDLTF57/0/RPi2DZiiGfZsIAS6PsICbHdTQVzxQckM4lN1vnAGgkhXIMbztml21pv+QrGy98OZJ0ubf5ztgQhpT0WPH4JXT8M6htsoo8dZf8lQ5aLfmW9RKePJDqixQwPqmimPIkrlxRDTDGII0ZAZws7l779eOLmEcM2tH2HbsUKUCZIG/pRHLSlP45Jn2bULGzuXZ2daq70z6zvIbom0CUzSXIvdTXEZI2AM5RBvPYGGaKI8YlxgRdQvJp3h0BzPdFOXI3RAxscCY7PJpa/RdIg="
# for uploading multiple files
file_glob: true
# NOTE explanation on each env variable
# - PROJECT_NAME: name of the project, set on the `env.global` above
# - TRAVIS_TAG: tag name that the build is being deployed for, usually the version number
# - TARGET: target triple of the build
file:
- $PROJECT_NAME-$TRAVIS_TAG-$TARGET.*
- $PROJECT_NAME*.deb
# don't delete artifacts from previous stage
skip_cleanup: true
on:
# deploy only if we push a tag
tags: true
# deploy only on stable channel that has TARGET env variable sets
condition: $TRAVIS_RUST_VERSION = stable && $TARGET != ""
notifications:
email:
on_success: never

View File

@ -1,788 +0,0 @@
# 10.2.0
## Features
- Add --hyperlink option to add OSC 8 hyperlinks to output
## Bugfixes
## Changes
- Build windows releases with rust 1.77 so windows 7 is still supported
- Deb packages now include symlink for fdfind to be more consistent with official packages
## Other
# 10.1.0
## Features
- Allow passing an optional argument to `--strip-cwd-prefix` of "always", "never", or "auto". to force whether the cwd prefix is stripped or not.
- Add a `--format` option which allows using a format template for direct ouput similar to the template used for `--exec`. (#1043)
## Bugfixes
- Fix aarch64 page size again. This time it should actually work. (#1085, #1549) (@tavianator)
## Other
- aarch64-apple-darwin target added to builds on the release page. Note that this is a tier 2 rust target.
# v10.0.0
## Features
- Add `dir` as an alias to `directory` when using `-t` \ `--type`, see #1460 and #1464 (@Ato2207).
- Add support for @%s date format in time filters similar to GNU date (seconds since Unix epoch for --older/--newer), see #1493 (@nabellows)
- Breaking: No longer automatically ignore `.git` when using `--hidden` with vcs ignore enabled. This reverts the change in v9.0.0. While this feature
was often useful, it also broke some existing workflows, and there wasn't a good way to opt out of it. And there isn't really a good way for us to add
a way to opt out of it. And you can easily get similar behavior by adding `.git/` to your global fdignore file.
See #1457.
## Bugfixes
- Respect NO_COLOR environment variable with `--list-details` option. (#1455)
- Fix bug that would cause hidden files to be included despite gitignore rules
if search path is "." (#1461, BurntSushi/ripgrep#2711).
- aarch64 builds now use 64k page sizes with jemalloc. This fixes issues on some systems, such as ARM Macs that
have a larger system page size than the system that the binary was built on. (#1547)
- Address [CVE-2024-24576](https://blog.rust-lang.org/2024/04/09/cve-2024-24576.html), by increasing minimum rust version.
## Changes
- Minimum supported rust version is now 1.77.2
# v9.0.0
## Performance
- Performance has been *significantly improved*, both due to optimizations in the underlying `ignore`
crate (#1429), and in `fd` itself (#1422, #1408, #1362) - @tavianator.
[Benchmarks results](https://gist.github.com/tavianator/32edbe052f33ef60570cf5456b59de81) show gains
of 6-8x for full traversals of smaller directories (100k files) and up to 13x for larger directories (1M files).
- The default number of threads is now constrained to be at most 64. This should improve startup time on
systems with many CPU cores. (#1203, #1410, #1412, #1431) - @tmccombs and @tavianator
- New flushing behavior when writing output to stdout, providing better performance for TTY and non-TTY
use cases, see #1452 and #1313 (@tavianator).
## Features
- Support character and block device file types, see #1213 and #1336 (@cgzones)
- Breaking: `.git/` is now ignored by default when using `--hidden` / `-H`, use `--no-ignore` / `-I` or
`--no-ignore-vcs` to override, see #1387 and #1396 (@skoriop)
## Bugfixes
- Fix `NO_COLOR` support, see #1421 (@acuteenvy)
## Other
- Fixed documentation typos, see #1409 (@marcospb19)
## Thanks
Special thanks to @tavianator for his incredible work on performance in the `ignore` crate and `fd` itself.
# v8.7.1
## Bugfixes
- `-1` properly conflicts with the exec family of options.
- `--max-results` overrides `-1`
- `--quiet` properly conflicts with the exec family of options. This used to be the case, but broke during the switch to clap-derive
- `--changed-within` now accepts a space as well as a "T" as the separator between date and time (due to update of chrono dependency)
## Other
- Many dependencies were updated
- Some documentation was updated and fixed
# v8.7.0
## Features
- Add flag --no-require-git to always respect gitignore files, see #1216 (@vegerot)
## Bugfixes
- Fix logic for when to use global ignore file. There was a bug where the only case where the
global ignore file wasn't processed was if `--no-ignore` was passed, but neither `--unrestricted`
nor `--no-global-ignore-file` is passed. See #1209
# v8.6.0
## Features
- New `--and <pattern>` option to add additional patterns that must also be matched. See #315
and #1139 (@Uthar)
- Added `--changed-after` as alias for `--changed-within`, to have a name consistent with `--changed-before`.
## Changes
- Breaking: On Unix-like systems, `--type executable` now additionally checks if
the file is executable by the current user, see #1106 and #1169 (@ptipiak)
## Bugfixes
- Use fd instead of fd.exe for Powershell completions (when completions are generated on windows)
## Other
# v8.5.3
## Bugfixes
- Fix completion generation to not include full path of fd command
- Fix build error if completions feature is disabled
# v8.5.2
## Bugfixes
- Fix --owner option value parsing, see #1163 and #1164 (@tmccombs)
# v8.5.1
## Bugfixes
- Fix --threads/-j option value parsing, see #1160 and #1162 (@sharkdp)
# v8.5.0
## Features
- `--type executable`/`-t` now works on Windows, see #1051 and #1061 (@tavianator)
## Bugfixes
- Fixed differences between piped / non-piped output. This changes `fd`s behavior back to what we
had before 8.3.0, i.e. there will be no leading `./` prefixes, unless `--exec`/`-x`,
`--exec-batch`/`-X`, or `--print0`/`-0` are used. `--strip-cwd-prefix` can be used to strip that
prefix in those cases. See #1046, #1115, and #1121 (@tavianator)
- `fd` could previously crash with a panic due to a race condition in Rusts standard library
(see https://github.com/rust-lang/rust/issues/39364). This has been fixed by switching to a different
message passing implementation, see #1060 and #1146 (@tavianator)
- `fd`s memory usage will not grow unboundedly on huge directory trees, see #1146 (@tavianator)
- fd returns an error when current working directory does not exist while a search path is
specified, see #1072 (@vijfhoek)
- Improved "command not found" error message, see #1083 and #1109 (@themkat)
- Preserve command exit codes when using `--exec-batch`, see #1136 and #1137 (@amesgen)
## Changes
- No leading `./` prefix for non-interactive results, see above.
- fd now colorizes paths in parallel, significantly improving performance, see #1148 (@tavianator)
- fd can now avoid `stat` syscalls even when colorizing paths, as long as the color scheme doesn't
require metadata, see #1148 (@tavianator)
- The statically linked `musl` versions of `fd` now use `jmalloc`, leading to a significant performance
improvement, see #1062 (@tavianator)
## Other
- Added link back to GitHub in man page and `--help` text, see #1086 (@scottchiefbaker)
- Major update in how `fd` handles command line options internally, see #1067 (@tmccombs)
# v8.4.0
## Features
- Support multiple `--exec <cmd>` instances, see #406 and #960 (@tmccombs)
## Bugfixes
- "Argument list too long" errors can not appear anymore when using `--exec-batch`/`-X`, as the command invocations are automatically batched at the maximum possible size, even if `--batch-size` is not given. See #410 and #1020 (@tavianator)
## Changes
- Directories are now printed with an additional path separator at the end: `foo/bar/`, see #436 and #812 (@yyogo)
- The `-u` flag was changed to be equivalent to `-HI` (previously, a single `-u` was only equivalent to `-I`). Additional `-u` flags are still allowed, but ignored. See #840 and #986 (@jacksontheel)
## Other
- Added installation instructions for RHEL8, see #989 (@ethsol)
# v8.3.2
## Bugfixes
- Invalid absolute path on windows when searching from the drive root, see #931 and #936 (@gbarta)
# v8.3.1
## Bugfixes
- Stop implying `--no-ignore-parent` when `--no-vcs-ignore` is supplied, see #907, #901, #908 (@tmccombs)
- fd no longer waits for the whole traversal if the only matches arrive within max_buffer_time, see #868 and #895 (@tavianator)
- `--max-results=1` now immediately quits after the first result, see #867
- `fd -h` does not panic anymore when stdout is closed, see #897
## Changes
- Disable jemalloc on FreeBSD, see #896 (@xanderio)
- Updated man page, see #912 (@rlue)
- Updated zsh completions, see #932 (@tmccombs)
# v8.3.0
## Performance improvements
- Colorized output is now significantly faster, see #720 and #853 (@tavianator)
- Writing to stdout is now buffered if the output does not go to a TTY. This increases performance
when the output of `fd` is piped to another program or to a file, see #885 (@tmccombs, original
implementation by @sourlemon207)
- File metadata is now cached between the different filters that require it (e.g. `--owner`,
`--size`), reducing the number of `stat` syscalls when multiple filters are used; see #863
(@tavianator, original implementation by @alexmaco)
## Features
- Don't buffer command output from `--exec` when using a single thread. See #522
- Add new `-q, --quiet` flag, see #303 (@Asha20)
- Add new `--no-ignore-parent` flag, see #787 (@will459)
- Add new `--batch-size` flag, see #410 (@devonhollowood)
- Add opposing command-line options, see #595 (@Asha20)
- Add support for more filesystem indicators in `LS_COLORS`, see
https://github.com/sharkdp/lscolors/pull/35 (@tavianator)
## Bugfixes
- Always show the `./` prefix for search results unless the output is a TTY or `--strip-cwd-prefix` is set, see #760 and #861 (@jcaplan)
- Set default path separator to `/` in MSYS, see #537 and #730 (@aswild)
- fd cannot search files under a RAM disk, see #752
- fd doesn't show substituted drive on Windows, see #365
- Properly handle write errors to devices that are full, see #737
- Use local time zone for time functions (`--change-newer-than`, `--change-older-than`), see #631 (@jacobmischka)
- Support `--list-details` on more platforms (like BusyBox), see #783
- The filters `--owner`, `--size`, and `--changed-{within,before}` now apply to symbolic links
themselves, rather than the link target, except when `--follow` is specified; see #863
- Change time comparisons to be exclusive, see #794 (@jacobmischka)
## Changes
- Apply custom `--path-separator` to commands run with `--exec(-batch)` and `--list-details`, see #697 (@aswild)
## Other
- Many documentation updates
# v8.2.1
No functional changes with respect to v8.2.0. Bugfix in the release process.
# v8.2.0
## Features
- Add new `--prune` flag, see #535 (@reima)
- Improved the usability of the time-based options, see #624 and #645 (@gorogoroumaru)
- Add support for exact file sizes in the `--size` filter, see #669 and #696 (@Rogach)
- `fd` now prints an error message if the search pattern requires a leading dot but
`--hidden` is not enabled (Unix only), see #615
## Bugfixes
- Avoid panic when performing limited searches in directories with restricted permissions, see #678
- Invalid numeric command-line arguments are silently ignored, see #675
- Disable jemalloc on Android, see #662
- The `--help` text will be colorless if `NO_COLOR` has been set, see #600 (@xanonid)
## Changes
- If `LS_COLORS` is not set (e.g. on Windows), we now provide a more comprehensive default which
includes much more filetypes, see #604 and #682 (mjsir911).
## Other
- Added `zsh` completion files, see #654 and #189 (@smancill)
# v8.1.1
## Bugfixes
- Support colored output on older Windows versions if either (1) `--color=always` is set or (2) the `TERM` environment variable is set. See #469
# v8.1.0
## Features
- Add new `--owner [user][:group]` filter. See #307 (pull #581) (@alexmaco)
- Add support for a global ignore file (`~/.config/fd/ignore` on Unix), see #575 (@soedirgo)
- Do not exit immediately if one of the search paths is missing, see #587 (@DJRHails)
## Bugfixes
- Reverted a change from fd 8.0 that enabled colors on all Windows terminals (see below) in order to support older Windows versions again, see #577. Unfortunately, this re-opens #469
- Fix segfault caused by jemalloc on macOS Catalina, see #498
- Fix `--glob` behavior with empty pattern, see #579 (@SeamusConnor)
- Fix `--list-details` on FreeBSD, DragonFly BSD, OpenBSD and NetBSD. See #573 (@t6)
## Changes
- Updated documentation for `--size`, see #584
# v8.0.0
## Features
- Add a new `-l`/`--list-details` option to show more details about the search results. This is
basically an alias for `--exec-batch ls -l` with some additional `ls` options.
This can be used in order to:
* see metadata like permissions, owner, file size, modification times (#491)
* see symlink targets (#482)
* achieve a deterministic output order (#324, #196, #159)
- Add a new `--max-results=<count>` option to limit the number of search results, see #472, #476 and #555
This can be useful to speed up searches in cases where you know that there are only N results.
Using this option is also (slightly) faster than piping to `head -n <count>` where `fd` can only
exit when it finds the search results `<count> + 1`.
- Add the alias `-1` for `--max-results=1`, see #561. (@SimplyDanny).
- Add new `--type socket` and `--type pipe` filters, see #511.
- Add new `--min-depth <depth>` and `--exact-depth <depth>` options in addition to the existing option
to limit the maximum depth. See #404.
- Support additional ANSI font styles in `LS_COLORS`: faint, slow blink, rapid blink, dimmed, hidden and strikethrough.
## Bugfixes
- Preserve non-UTF8 filenames: invalid UTF-8 filenames are now properly passed to child-processes
when using `--exec`, `--exec-batch` or `--list-details`. In `fd`'s output, we replace non-UTF-8
sequences with the "<22>" character. However, if the output of `fd` goes to another process, we
print the actual bytes of the filename. For more details, see #558 and #295.
- `LS_COLORS` entries with unsupported font styles are not completely ignored, see #552
## Changes
- Colored output will now be enabled by default on older Windows versions.
This allows the use of colored output if the terminal supports it (e.g.
MinTTY, Git Bash). On the other hand, this will be a regression for users
on older Windows versions with terminals that do not support ANSI escape
sequences. Affected users can use an alias `fd="fd --color=never"` to
continue using `fd` without colors. There is no change of behavior for
Windows 10. See #469.
- When using `--glob` in combination with `--full-path`, a `*` character does not match a path
separation character (`/` or `\\`) anymore. You can use `**` for that. This allows things like
`fd -p -g '/some/base/path/*/*/*.txt'` which would previously match to arbitrary depths (instead
of exactly two folders below `/some/base/path`. See #404.
- "Legacy" support to use `fd -exec` (with a single dash) has been removed. Use `fd -x` or
`fd --exec` instead.
- Overall improved error handling and error messages.
## Other
- Korean translation of the README, see: [한국어](https://github.com/spearkkk/fd-kor) (@spearkkk)
# v7.5.0
## Features
- Added `--one-file-system` (aliases: `--mount`, `--xdev`) to not cross file system boundaries on Unix and Windows, see #507 (@FallenWarrior2k).
- Added `--base-directory` to change the working directory in which `fd` is run, see #509 and #475 (@hajdamak).
- `fd` will not use colored output if the `NO_COLOR` environment variable is set, see #550 and #551 (@metadave).
- `fd --exec` will return exit code 1 if one of the executed commands fails, see #526 and #531 (@fusillicode and @Giuffre)
## Bug Fixes
- Fixed 'command not found' error when using zsh completion, see #487 (@barskern).
- `fd -L` should include broken symlinks, see #357 and #497 (@tommilligan, @neersighted and @sharkdp)
- Display directories even if we don't have permission to enter, see #437 (@sharkdp)
## Changes
- A flag can now be passed multiple times without producing an error, see #488 and #496 (@rootbid).
- Search results are sorted when using the `-X` option to match the behaviour of piping to `xargs`, see #441 and #524 (@Marcoleni @crash-g).
# v7.4.0
## Performance improvements
- Reduce number of `stat` syscalls, improving the performance for searches where file metadata is
required (`--type`, `--size`, `--changed-within`, …), see #434 (@tavianator)
- Use jemalloc by default, improving the performance for almost all searches, see #481. Note that
Windows and `*musl*` builds do not profit from this.
## Features
- Added a new `-g`/`--glob` option to switch to glob-based searches (instead of regular expression
based searches). This is accompanied by a new `--regex` option that can be used to switch back,
if users want to `alias fd="fd --glob"`. See #284
- Added a new `--path-separator <sep>` option which can be useful for Windows users who
want/need `fd` to use `/` instead of `\`, see #428 and #153 (@mookid)
- Added support for hidden files on Windows, see #379
- When `fd` is run with the `--exec-batch`/`-X` option, it now exposes the exit status of the
command that was run, see #333.
- Exit immediately when Ctrl-C has been pressed twice, see #423
## Bugfixes
- Make `--changed-within`/`--changed-before` work for directories, see #470
## Other
- Pre-built `fd` binaries should now be available for `armhf` targets, see #457 (@detly)
- `fd` is now available on Alpine Linux, see #451 (@5paceToast)
- `fd` is now in the officla FreeBSD repositories, see #412 (@t6)
- Added OpenBSD install instructions, see #421 (@evitalis)
- Added metadata to the Debian package, see #416 (@cathalgarvey)
- `fd` can be installed via npm, see #438 (@pablopunk)
# v7.3.0
## Features
- New `--exec-batch <cmd>`/`-X <cmd>` option for batch execution of commands, see #360 (@kimsnj).
This allows you to do things like:
``` bash
fd … -X vim # open all search results in vim (or any other editor)
fd … -X ls -l # view detailed stats about the search results with 'ls'
fd -e svg -X inkscape # open all SVG files in Inkscape
```
- Support for 24-bit color codes (when specified via `LS_COLORS`) as well as
different font styles (bold, italic, underline).
## Changes
- A few performance improvements, in particular when printing lots of colorized
results to the console, see #370
- The `LS_COLORS` handling has been "outsourced" to a separate crate (https://github.com/sharkdp/lscolors) that is now being used by other tools as well: [fselect](https://github.com/jhspetersson/fselect), [lsd](https://github.com/Peltoche/lsd/pull/84). For details, see #363.
## Other
- `fd` will be available in Ubuntu Disco DIngo (19.04), see #373 (@sylvestre)
- This release should come with a static ARM binary (`arm-unknown-linux-musleabihf`), see #320 (@duncanfinney)
- Various documentation improvements, see #389
## Thanks
Special thanks to @alexmaco for his awesome work on refactoring and code improvements! (see #401, #398, and #383)
# v7.2.0
## Features
* Added support for filtering by file modification time by adding two new options `--changed-before <date|duration>` and `--changed-within <..>`. For more details, see the `--help` text, the man page, the relevant issue #165 and the PR #339 (@kimsnj)
* Added `--show-errors` option to enable the display of filesystem error messages such as "permission denied", see #311 (@psinghal20 and @majecty)
* Added `--maxdepth` as a (hidden) alias for `--max-depth`, see #323 (@mqudsi)
* Added `--search-path` option which can be supplied to replace the positional `path` argument at any position.
## Changes
* Loosen strict handling of missing `--ignore-file`, see #280 (@psinghal20)
* Re-enabled `.ignore` files, see #156.
## Bugfixes
* `fd` could previously get stuck when run from the root directory in the
presence of zombie processes. This curious bug has been fixed in Rust 1.29 and higher. For more details, see #288, [rust-lang/rust#50619](https://github.com/rust-lang/rust/issues/50619) and [the fix](https://github.com/rust-lang/rust/pull/50630)
## Other
* `fd` has officially landed in Debian! See #345 for details. Thanks goes to @sylvestre, @paride and possibly others I don't know about.
* Added Chinese translation of README (@chinanf-boy)
## Thanks
A special thanks goes to @joshleeb for his amazing improvements throughout
the code base (new tests, refactoring work and various other things)!
# v7.1.0
## Features
* Added `--size` filter option, see #276 (@stevepentland, @JonathanxD and @alexmaco)
* Added `--type empty` (or `-t e`) to search for empty files and/or directories, see #273
## Changes
* With the new version, `.gitignore` files will only be respected in Git repositories, not outside.
* A few performance improvements for `--type` searches, see 641976cf7ad311ba741571ca8b7f02b2654b6955 and 50a2bab5cd52d26d4a3bc786885a2c270ed3b227
## Other
* Starting with this release, we will offer pre-built ARM binaries, see #244
* Added instructions on how to use `fd` with `emacs`, see #282 (@redguardtoo)
* `fd` is now in the official openSUSE repositories, see #275 (@avindra)
* `fd` is now available via MacPorts, see #291 (@raimue)
# v7.0.0
## Features
* Added `--type executable` (or `-t x`) to search for executable files only, see #246 (@PramodBisht)
* Added support for `.fdignore` files, see #156 and #241.
* Added `--ignore-file` option to add custom ignore files, see #156.
* Suggest `--fixed-strings` on invalid regular expressions, see #234 (@PramodBisht)
* Detect when user supplied path instead of pattern, see #235.
## Changes
* `.ignore` and `.rgignore` files are not parsed anymore. Use `.fdignore` files
or add custom files via `--ignore-file` instead.
* Updated to `regex-syntax` 0.5 (@cuviper)
## Bugfixes
* Properly normalize absolute paths, see #268
* Invalid utf8 filenames displayed when `-e` is used, see #250
* If `--type` is used, fifos/sockets/etc. are always shown, see #260
## Other
* Packaging:
* The Arch Linux package is now simply called `fd`.
* There is now a `fd` ebuild for Gentoo Linux.
* There is a `scoop` package for `fd` (Windows).
* There is a `Chocolatey` package for `fd` (Windows).
* There is a Fedora `copr` package for `fd`.
# v6.3.0
## Features
* Files with multiple extensions can now be found via `--extension`/`-e`, see #214 (@althonos)
``` bash
> fd -e tar.gz
```
* Added new `-F`/`--fixed-strings`/`--literal` option that treats the pattern as a literal string instead of a regular expression, see #157
``` bash
> fd -F 'file(1).txt'
```
* Allow `-exec` to work as `--exec`, see #226 (@stevepentland)
## Bugfixes
* Fixed `Ctrl-C` handling when using `--exec`, see #224 (@Doxterpepper)
* Fixed wrong file owner for files in deb package, see #213
## Other
* Replaced old gif by a fancy new SVG screencast (@marionebl)
* Updated [benchmark results](https://github.com/sharkdp/fd#benchmark) (fd has become faster in the meantime!). There is a new repository that hosts several benchmarking scripts for fd: https://github.com/sharkdp/fd-benchmarks
# v6.2.0
## Features
* Support for filtering by multiple file extensions and multiple file types, see #199 and #177
(@tkadur).
For example, it's possible to search for C++ source or header files:
``` bash
> fd -e cpp -e c -e cxx -e h pattern
```
## Changes
* The size of the output buffer (for sorting search results) is now limited to 1000 entries. This
improves the search speed significantly if there are a lot of results, see #191 (@sharkdp).
## Bugfixes
* Fix a bug where long-running searches could not be killed via Ctrl-C, see #210 (@Doxterpepper)
* fd's exit codes are now in accordance with Unix standards, see #201 (@Doxterpepper)
## Other
* Bash, zsh and fish completion should now work with the Ubuntu `.deb` packages, see #195 and #209
(@tmccombs and @sharkdp)
* There is a new section on how to set up `fzf` to use `fd` in the
[README](https://github.com/sharkdp/fd#using-fd-with-fzf), see #168.
# v6.1.0
## Features
* Support for multiple search paths, see #166 (@Doxterpepper)
* Added `--no-ignore-vcs` option to disable `.gitignore` and other VCS ignore files,
without disabling `.ignore` files - see #156 (@ptzz).
## Bugfixes
* Handle terminal signals, see #128 (@Doxterpepper)
* Fixed hang on `--exec` when user input was required, see #178 and #193 (@reima)
## Other
* Debian packages are now created via Travis CI and should be available for this and all
future releases (@tmccombs).
* fd is now available on Void Linux (@maxice8)
* The minimum required Rust version is now 1.20
## Thanks
@Doxterpepper deserves a special mention for his great work that is included in this release and
for the support in ticket discussions and concerning Travis CI fixes. Thank you very much!
Thanks also go out to @tmccombs for the work on Debian packages and for reviewing a lot of pull requests!
# v6.0.0
## Changes
- The `--exec`/`-x` option does not spawn an intermediate shell anymore. This improves the
performance of parallel command execution and fixes a whole class of (present and potentially
future) problems with shell escaping. The drawback is that shell commands cannot directly be
called with `--exec`. See #155 for the full discussion. These changes have been implemented by
@reima (Thanks!).
## Bugfixes
- `--exec` does not escape cmd.exe metacharacters on Windows (see #155, as above).
## Other
* *fd* is now available in the FreeBSD ports (@andoriyu)
* The minimal `rustc` version is now checked when building with `cargo`, see #164 (@matematikaadit)
* The output directory for the shell completion files is created if it does not exist (@andoriyu)
# v5.0.0
## Features
* Added new `--exec`, `-x` option for parallel command execution (@mmstick, see #84 and #116). See the corresponding [README section](https://github.com/sharkdp/fd#parallel-command-execution) for an introduction.
* Auto-disable color output on unsupported Windows shells like `cmd.exe` (@iology, see #129)
* Added the `--exclude`, `-X` option to suppress certain files/directories in the search results
(see #89).
* Added ripgrep aliases `-u` and `-uu` for `--no-ignore` and `--no-ignore --hidden`, respectively
(@unsignedint, see #92)
* Added `-i`, `--ignore-case` (@iology, see #95)
* Made smart case really smart (@reima, see #103)
* Added RedoxOS support (@goyox86, see #131)
## Changes
* The dot `.` can now match newlines in file names (@iology, see #111)
* The short `--type` argument for symlinks has been changed from `s` to `l` (@jcpetkovich, see #83)
## Bugfixes
* Various improvements in root-path and symlink handling (@iology, see #82, #107, and #113)
* Fixed absolute path handling on Windows (@reima, #93)
* Fixed: current directory not included when using relative path (see #81)
* Fixed `--type` behavior for unknown file types (@iology, see #150)
* Some fixes around `--exec` (@iology, see #142)
## Other
* Major updates and bugfixes to our continuous integration and deployment tooling on Travis
(@matematikaadit, see #149, #145, #133)
* Code style improvements & automatic style checking via `rustfmt` on Travis (@Detegr, see #99)
* Added a man page (@pickfire, see #77)
* *fd* has been relicensed under the dual license MIT/Apache-2.0 (@Detegr, see #105)
* Major refactorings and code improvements (Big thanks to @gsquire, @reima, @iology)
* First version of [`CONTRIBUTING`](https://github.com/sharkdp/fd/blob/master/CONTRIBUTING.md) guidelines
* There is now a Nix package (@mehandes)
* *fd* is now in the official Arch Linux repos (@cassava)
* Improved tooling around shell completion files (@ImbaKnugel, see #124)
* Updated tutorial in the [`README`](https://github.com/sharkdp/fd/blob/master/README.md)
* The minimum required version of Rust has been bumped to 1.19.
## Thanks
A *lot* of things have happened since the last release and I'd like to thank all contributors for their great support. I'd also like to thank those that have contributed by reporting bugs and by posting feature requests.
I'd also like to take this chance to say a special Thank You to a few people that have stood out in one way or another: To @iology, for contributing a multitude of bugfixes, improvements and new features. To @reima and @Detegr for their continuing great support. To @mmstick, for implementing the most advanced new feature of *fd*. And to @matematikaadit for the CI/tooling upgrades.
# v4.0.0
## Features
* Added filtering by file extension, for example `fd -e txt`, see #56 (@reima)
* Add option to force colored output: `--color always`, see #49 (@Detegr)
* Generate Shell completions for Bash, ZSH, Fish and Powershell, see #64 (@ImbaKnugel)
* Better & extended `--help` text (@abaez and @Detegr)
* Proper Windows support, see #70
## Changes
* The integration tests have been re-written in Rust :sparkles:, making them platform-independent and easily callable via `cargo test` - see #65 (many thanks to @reima!)
* New tutorial in the README (@deg4uss3r)
* Reduced number of `stat` syscalls for each result from 3 to 1, see #36.
* Enabled Appveyor CI
# v3.1.0
## Features
- Added file type filtering, e.g. `find --type directory` or `find -t f` (@exitium)
# v3.0.0
## Features
- Directories are now traversed in parallel, leading to significant performance improvements (see [benchmarks](https://github.com/sharkdp/fd#benchmark))
- Added `--print0` option (@michaelmior)
- Added AUR packages (@wezm)
## Changes
- Changed short flag for `--follow` from `-f` to `-L` (consistency with `ripgrep`)
# v2.0.0
* Changed `--sensitive` to `--case-sensitive`
* Changed `--absolute` to `--absolute-path`
* Throw an error if root directory is not existent, see #39
* Use absolute paths if the root dir is an absolute path, see #40
* Handle invalid UTF-8, see #34 #38
* Support `-V`, `--version` by switching from `getopts` to `clap`.
Misc:
* It's now possible to install `fd` via homebrew on macOS: `brew install fd`.
# v1.1.0
- Windows compatibility (@sebasv), see #29 #35
- Safely exit on broken output pipes (e.g.: usage with `head`, `tail`, ..), see #24
- Backport for rust 1.16, see #23
# v1.0.0
* Respect `.(git)ignore` files
* Use `LS_COLORS` environment variable directly, instead of `~/.dir_colors` file.
* Added unit and integration tests
* Added optional second argument (search path)
# v0.3.0
- Parse dircolors files, closes #20
- Colorize each path component, closes #19
- Add short command line option for --hidden, see #18
# v0.2.0
- Option to follow symlinks, disable colors, closes #16, closes #17
- `--filename` instead of `--full-path`
- Option to search hidden directories, closes #12
- Configurable search depth, closes #13
- Detect interactive terminal, closes #11
# v0.1.0
Initial release

View File

@ -10,24 +10,6 @@ We welcome any form of contribution:
**Note**: Before you take the time to open a pull request, please open a ticket first. This will **Note**: Before you take the time to open a pull request, please open a ticket first. This will
give us the chance to discuss any potential changes first. give us the chance to discuss any potential changes first.
## Add an entry to the changelog
If your contribution changes the behavior of `fd` (as opposed to a typo-fix
in the documentation), please update the [`CHANGELOG.md`](CHANGELOG.md#upcoming-release) file
and describe your changes. This makes the release process much easier and
therefore helps to get your changes into a new `fd` release faster.
The top of the `CHANGELOG` contains an *"Upcoming release"* section with a few
subsections (Features, Bugfixes, …). Please add your entry to the subsection
that best describes your change.
Entries follow this format:
```
- Short description of what has been changed, see #123 (@user)
```
Here, `#123` is the number of the original issue and/or your pull request.
Please replace `@user` by your GitHub username.
## Important links ## Important links
* [Open issues](https://github.com/sharkdp/fd/issues) * [Open issues](https://github.com/sharkdp/fd/issues)
@ -35,3 +17,4 @@ Please replace `@user` by your GitHub username.
* [Development section in the README](https://github.com/sharkdp/fd#development) * [Development section in the README](https://github.com/sharkdp/fd#development)
* [fd on crates.io](https://crates.io/crates/fd-find) * [fd on crates.io](https://crates.io/crates/fd-find)
* [LICENSE-APACHE](https://github.com/sharkdp/fd/blob/master/LICENSE-APACHE) and [LICENSE-MIT](https://github.com/sharkdp/fd/blob/master/LICENSE-MIT) * [LICENSE-APACHE](https://github.com/sharkdp/fd/blob/master/LICENSE-APACHE) and [LICENSE-MIT](https://github.com/sharkdp/fd/blob/master/LICENSE-MIT)

1039
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@ -12,83 +12,42 @@ keywords = [
"filesystem", "filesystem",
"tool", "tool",
] ]
license = "MIT OR Apache-2.0" license = "MIT/Apache-2.0"
name = "fd-find" name = "fd-find"
readme = "README.md" readme = "README.md"
repository = "https://github.com/sharkdp/fd" repository = "https://github.com/sharkdp/fd"
version = "10.2.0" version = "7.1.0"
edition= "2021"
rust-version = "1.77.2"
[[bin]]
name = "fd"
path = "src/main.rs"
[badges.appveyor] [badges.appveyor]
repository = "sharkdp/fd" repository = "sharkdp/fd"
[badges.travis-ci] [badges.travis-ci]
repository = "sharkdp/fd" repository = "sharkdp/fd"
[[bin]]
name = "fd"
path = "src/main.rs"
[build-dependencies] [build-dependencies]
version_check = "0.9" clap = "2.31.2"
version_check = "0.1.3"
[dependencies] [dependencies]
aho-corasick = "1.1" ansi_term = "0.11"
nu-ansi-term = "0.50" atty = "0.2"
argmax = "0.3.1" ignore = "0.4.3"
ignore = "0.4.22" lazy_static = "1.1.0"
regex = "1.10.5" num_cpus = "1.8"
regex-syntax = "0.8" regex = "1.0.0"
ctrlc = "3.2" regex-syntax = "0.6"
humantime = "2.1" ctrlc = "3.1"
globset = "0.4"
anyhow = "1.0"
etcetera = "0.8"
normpath = "1.1.1"
crossbeam-channel = "0.5.13"
clap_complete = {version = "4.5.24", optional = true}
faccess = "0.2.4"
[dependencies.clap] [dependencies.clap]
version = "4.5.13" version = "2.31.2"
features = ["suggestions", "color", "wrap_help", "cargo", "derive"] features = ["suggestions", "color", "wrap_help"]
[dependencies.chrono]
version = "0.4.38"
default-features = false
features = ["std", "clock"]
[dependencies.lscolors]
version = "0.19"
default-features = false
features = ["nu-ansi-term"]
[target.'cfg(unix)'.dependencies]
nix = { version = "0.29.0", default-features = false, features = ["signal", "user", "hostname"] }
[target.'cfg(all(unix, not(target_os = "redox")))'.dependencies] [target.'cfg(all(unix, not(target_os = "redox")))'.dependencies]
libc = "0.2" libc = "0.2"
# FIXME: Re-enable jemalloc on macOS
# jemalloc is currently disabled on macOS due to a bug in jemalloc in combination with macOS
# Catalina. See https://github.com/sharkdp/fd/issues/498 for details.
[target.'cfg(all(not(windows), not(target_os = "android"), not(target_os = "macos"), not(target_os = "freebsd"), not(target_os = "openbsd"), not(all(target_env = "musl", target_pointer_width = "32")), not(target_arch = "riscv64")))'.dependencies]
jemallocator = {version = "0.5.4", optional = true}
[dev-dependencies] [dev-dependencies]
diff = "0.1" diff = "0.1"
tempfile = "3.10" tempdir = "0.3"
filetime = "0.2"
test-case = "3.3"
[profile.release]
lto = true
strip = true
codegen-units = 1
[features]
use-jemalloc = ["jemallocator"]
completions = ["clap_complete"]
base = ["use-jemalloc"]
default = ["use-jemalloc", "completions"]

View File

@ -1,6 +0,0 @@
# https://github.com/sharkdp/fd/issues/1085
[target.aarch64-unknown-linux-gnu.env]
passthrough = ["JEMALLOC_SYS_WITH_LG_PAGE=16"]
[target.aarch64-unknown-linux-musl.env]
passthrough = ["JEMALLOC_SYS_WITH_LG_PAGE=16"]

View File

@ -186,7 +186,7 @@ APPENDIX: How to apply the Apache License to your work.
same "printed page" as the copyright notice for easier same "printed page" as the copyright notice for easier
identification within third-party archives. identification within third-party archives.
Copyright 2017-2020 fd developers Copyright 2017 fd developers
Licensed under the Apache License, Version 2.0 (the "License"); Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License. you may not use this file except in compliance with the License.

View File

@ -1,21 +1,23 @@
MIT License Permission is hereby granted, free of charge, to any
person obtaining a copy of this software and associated
documentation files (the "Software"), to deal in the
Software without restriction, including without
limitation the rights to use, copy, modify, merge,
publish, distribute, sublicense, and/or sell copies of
the Software, and to permit persons to whom the Software
is furnished to do so, subject to the following
conditions:
Copyright (c) 2017-present The fd developers The above copyright notice and this permission notice
shall be included in all copies or substantial portions
of the Software.
Permission is hereby granted, free of charge, to any person obtaining a copy THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
of this software and associated documentation files (the "Software"), to deal ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
in the Software without restriction, including without limitation the rights TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
copies of the Software, and to permit persons to whom the Software is SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
furnished to do so, subject to the following conditions: CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
The above copyright notice and this permission notice shall be included in all IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
copies or substantial portions of the Software. DEALINGS IN THE SOFTWARE.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

View File

@ -1,37 +0,0 @@
PROFILE=release
EXE=target/$(PROFILE)/fd
prefix=/usr/local
bindir=$(prefix)/bin
datadir=$(prefix)/share
exe_name=fd
$(EXE): Cargo.toml src/**/*.rs
cargo build --profile $(PROFILE) --locked
.PHONY: completions
completions: autocomplete/fd.bash autocomplete/fd.fish autocomplete/fd.ps1 autocomplete/_fd
comp_dir=@mkdir -p autocomplete
autocomplete/fd.bash: $(EXE)
$(comp_dir)
$(EXE) --gen-completions bash > $@
autocomplete/fd.fish: $(EXE)
$(comp_dir)
$(EXE) --gen-completions fish > $@
autocomplete/fd.ps1: $(EXE)
$(comp_dir)
$(EXE) --gen-completions powershell > $@
autocomplete/_fd: contrib/completion/_fd
$(comp_dir)
cp $< $@
install: $(EXE) completions
install -Dm755 $(EXE) $(DESTDIR)$(bindir)/fd
install -Dm644 autocomplete/fd.bash $(DESTDIR)/$(datadir)/bash-completion/completions/$(exe_name)
install -Dm644 autocomplete/fd.fish $(DESTDIR)/$(datadir)/fish/vendor_completions.d/$(exe_name).fish
install -Dm644 autocomplete/_fd $(DESTDIR)/$(datadir)/zsh/site-functions/_$(exe_name)
install -Dm644 doc/fd.1 $(DESTDIR)/$(datadir)/man/man1/$(exe_name).1

832
README.md
View File

@ -1,40 +1,284 @@
# fd # fd
[![Build Status](https://travis-ci.org/sharkdp/fd.svg?branch=master)](https://travis-ci.org/sharkdp/fd)
[![CICD](https://github.com/sharkdp/fd/actions/workflows/CICD.yml/badge.svg)](https://github.com/sharkdp/fd/actions/workflows/CICD.yml) [![Build status](https://ci.appveyor.com/api/projects/status/21c4p5fwggc5gy3j?svg=true)](https://ci.appveyor.com/project/sharkdp/fd)
[![Version info](https://img.shields.io/crates/v/fd-find.svg)](https://crates.io/crates/fd-find) [![Version info](https://img.shields.io/crates/v/fd-find.svg)](https://crates.io/crates/fd-find)
[[中文](https://github.com/cha0ran/fd-zh)]
[[한국어](https://github.com/spearkkk/fd-kor)]
`fd` is a program to find entries in your filesystem. *fd* is a simple, fast and user-friendly alternative to
It is a simple, fast and user-friendly alternative to [`find`](https://www.gnu.org/software/findutils/). [*find*](https://www.gnu.org/software/findutils/).
While it does not aim to support all of `find`'s powerful functionality, it provides sensible
(opinionated) defaults for a majority of use cases.
[Installation](#installation) • [How to use](#how-to-use) • [Troubleshooting](#troubleshooting) While it does not seek to mirror all of *find*'s powerful functionality, it provides sensible
(opinionated) defaults for [80%](https://en.wikipedia.org/wiki/Pareto_principle) of the use cases.
## Features ## Features
* Convenient syntax: `fd PATTERN` instead of `find -iname '*PATTERN*'`.
* Intuitive syntax: `fd PATTERN` instead of `find -iname '*PATTERN*'`. * Colorized terminal output (similar to *ls*).
* Regular expression (default) and glob-based patterns. * It's *fast* (see [benchmarks](#benchmark) below).
* [Very fast](#benchmark) due to parallelized directory traversal.
* Uses colors to highlight different file types (same as `ls`).
* Supports [parallel command execution](#command-execution)
* Smart case: the search is case-insensitive by default. It switches to * Smart case: the search is case-insensitive by default. It switches to
case-sensitive if the pattern contains an uppercase case-sensitive if the pattern contains an uppercase
character[\*](http://vimdoc.sourceforge.net/htmldoc/options.html#'smartcase'). character[\*](http://vimdoc.sourceforge.net/htmldoc/options.html#'smartcase').
* Ignores hidden directories and files, by default. * Ignores hidden directories and files, by default.
* Ignores patterns from your `.gitignore`, by default. * Ignores patterns from your `.gitignore`, by default.
* Regular expressions.
* Unicode-awareness.
* The command name is *50%* shorter[\*](https://github.com/ggreer/the_silver_searcher) than * The command name is *50%* shorter[\*](https://github.com/ggreer/the_silver_searcher) than
`find` :-). `find` :-).
* Parallel command execution with a syntax similar to GNU Parallel.
## Demo ## Demo
![Demo](doc/screencast.svg) ![Demo](doc/screencast.svg)
## How to use ## Benchmark
Let's search my home folder for files that end in `[0-9].jpg`. It contains ~190.000
subdirectories and about a million files. For averaging and statistical analysis, I'm using
[hyperfine](https://github.com/sharkdp/hyperfine). The following benchmarks are performed
with a "warm"/pre-filled disk-cache (results for a "cold" disk-cache show the same trends).
Let's start with `find`:
```
Benchmark #1: find ~ -iregex '.*[0-9]\.jpg$'
Time (mean ± σ): 7.236 s ± 0.090 s
Range (min … max): 7.133 s … 7.385 s
```
`find` is much faster if it does not need to perform a regular-expression search:
```
Benchmark #2: find ~ -iname '*[0-9].jpg'
Time (mean ± σ): 3.914 s ± 0.027 s
Range (min … max): 3.876 s … 3.964 s
```
Now let's try the same for `fd`. Note that `fd` *always* performs a regular expression
search. The options `--hidden` and `--no-ignore` are needed for a fair comparison,
otherwise `fd` does not have to traverse hidden folders and ignored paths (see below):
```
Benchmark #3: fd -HI '.*[0-9]\.jpg$' ~
Time (mean ± σ): 811.6 ms ± 26.9 ms
Range (min … max): 786.0 ms … 870.7 ms
```
For this particular example, `fd` is approximately nine times faster than `find -iregex`
and about five times faster than `find -iname`. By the way, both tools found the exact
same 20880 files :smile:.
Finally, let's run `fd` without `--hidden` and `--no-ignore` (this can lead to different
search results, of course). If *fd* does not have to traverse the hidden and git-ignored
folders, it is almost an order of magnitude faster:
```
Benchmark #4: fd '[0-9]\.jpg$' ~
Time (mean ± σ): 123.7 ms ± 6.0 ms
Range (min … max): 118.8 ms … 140.0 ms
```
**Note**: This is *one particular* benchmark on *one particular* machine. While I have
performed quite a lot of different tests (and found consistent results), things might
be different for you! I encourage everyone to try it out on their own. See
[this repository](https://github.com/sharkdp/fd-benchmarks) for all necessary scripts.
Concerning *fd*'s speed, the main credit goes to the `regex` and `ignore` crates that are also used
in [ripgrep](https://github.com/BurntSushi/ripgrep) (check it out!).
## Colorized output
`fd` can colorize files by extension, just like `ls`. In order for this to work, the environment
variable [`LS_COLORS`](https://linux.die.net/man/5/dir_colors) has to be set. Typically, the value
of this variable is set by the `dircolors` command which provides a convenient configuration format
to define colors for different file formats.
On most distributions, `LS_COLORS` should be set already. If you are looking for alternative, more
complete (and more colorful) variants, see
[here](https://github.com/seebi/dircolors-solarized) or
[here](https://github.com/trapd00r/LS_COLORS).
## Parallel command execution
If the `-x`/`--exec` option is specified alongside a command template, a job pool will be created
for executing commands in parallel for each discovered path as the input. The syntax for generating
commands is similar to that of GNU Parallel:
- `{}`: A placeholder token that will be replaced with the path of the search result
(`documents/images/party.jpg`).
- `{.}`: Like `{}`, but without the file extension (`documents/images/party`).
- `{/}`: A placeholder that will be replaced by the basename of the search result (`party.jpg`).
- `{//}`: Uses the parent of the discovered path (`documents/images`).
- `{/.}`: Uses the basename, with the extension removed (`party`).
``` bash
# Convert all jpg files to png files:
fd -e jpg -x convert {} {.}.png
# Unpack all zip files (if no placeholder is given, the path is appended):
fd -e zip -x unzip
# Convert all flac files into opus files:
fd -e flac -x ffmpeg -i {} -c:a libopus {.}.opus
# Count the number of lines in Rust files (the command template can be terminated with ';'):
fd -x wc -l \; -e rs
```
## Installation
### On Ubuntu
*... and other Debian-based Linux distributions.*
Download the latest `.deb` package from the [release page](https://github.com/sharkdp/fd/releases) and install it via:
``` bash
sudo dpkg -i fd_7.0.0_amd64.deb # adapt version number and architecture
```
### On Fedora
Starting with Fedora 28, you can install `fd` from the official package sources:
``` bash
dnf install fd-find
```
For older versions, you can use this [Fedora copr](https://copr.fedorainfracloud.org/coprs/keefle/fd/) to install `fd`:
``` bash
dnf copr enable keefle/fd
dnf install fd
```
### On Arch Linux
You can install [the fd package](https://www.archlinux.org/packages/community/x86_64/fd/) from the official repos:
```
pacman -S fd
```
### On Gentoo Linux
You can use [the fd ebuild](https://packages.gentoo.org/packages/sys-apps/fd) from the official repo:
```
emerge -av fd
```
### On openSUSE Linux
You can install [the fd package](https://software.opensuse.org/package/fd) from the official repo:
```
zypper in fd
```
### On Void Linux
You can install `fd` via xbps-install:
```
xbps-install -S fd
```
### On macOS
You can install `fd` with [Homebrew](http://braumeister.org/formula/fd):
```
brew install fd
```
… or with MacPorts:
```
sudo port install fd
```
### On Windows
You can download pre-built binaries from the [release page](https://github.com/sharkdp/fd/releases).
Alternatively, you can install `fd` via [Scoop](http://scoop.sh):
```
scoop install fd
```
Or via [Chocolatey](https://chocolatey.org):
```
choco install fd
```
### On NixOS / via Nix
You can use the [Nix package manager](https://nixos.org/nix/) to install `fd`:
```
nix-env -i fd
```
### On FreeBSD
You can install `sysutils/fd` via portmaster:
```
portmaster sysutils/fd
```
### From source
With Rust's package manager [cargo](https://github.com/rust-lang/cargo), you can install *fd* via:
```
cargo install fd-find
```
Note that rust version *1.20.0* or later is required.
### From binaries
The [release page](https://github.com/sharkdp/fd/releases) includes precompiled binaries for Linux, macOS and Windows.
## Development
```bash
git clone https://github.com/sharkdp/fd
# Build
cd fd
cargo build
# Run unit tests and integration tests
cargo test
# Install
cargo install
```
## Command-line options
```
USAGE:
fd [FLAGS/OPTIONS] [<pattern>] [<path>...]
FLAGS:
-H, --hidden Search hidden files and directories
-I, --no-ignore Do not respect .(git|fd)ignore files
--no-ignore-vcs Do not respect .gitignore files
-s, --case-sensitive Case-sensitive search (default: smart case)
-i, --ignore-case Case-insensitive search (default: smart case)
-F, --fixed-strings Treat the pattern as a literal string
-a, --absolute-path Show absolute instead of relative paths
-L, --follow Follow symbolic links
-p, --full-path Search full path (default: file-/dirname only)
-0, --print0 Separate results by the null character
-h, --help Prints help information
-V, --version Prints version information
OPTIONS:
-d, --max-depth <depth> Set maximum search depth (default: none)
-t, --type <filetype>... Filter by type: file (f), directory (d), symlink (l),
executable (x), empty (e)
-e, --extension <ext>... Filter by file extension
-x, --exec <cmd> Execute a command for each search result
-E, --exclude <pattern>... Exclude entries that match the given glob pattern
--ignore-file <path>... Add a custom ignore-file in .gitignore format
-c, --color <when> When to use colors: never, *auto*, always
-j, --threads <num> Set number of threads to use for searching & executing
-S, --size <size>... Limit results based on the size of files.
ARGS:
<pattern> the search pattern, a regular expression (optional)
<path>... the root directory for the filesystem search (optional)
```
## Tutorial
First, to get an overview of all available command line options, you can either run First, to get an overview of all available command line options, you can either run
[`fd -h`](#command-line-options) for a concise help message or `fd --help` for a more detailed `fd -h` for a concise help message (see above) or `fd --help` for a more detailed
version. version.
### Simple search ### Simple search
@ -60,8 +304,6 @@ X11/xinit/xinitrc
X11/xinit/xserverrc X11/xinit/xserverrc
``` ```
The regular expression syntax used by `fd` is [documented here](https://docs.rs/regex/latest/regex/#syntax).
### Specifying the root directory ### Specifying the root directory
If we want to search a specific directory, it can be given as a second argument to *fd*: If we want to search a specific directory, it can be given as a second argument to *fd*:
@ -72,7 +314,7 @@ If we want to search a specific directory, it can be given as a second argument
/etc/passwd /etc/passwd
``` ```
### List all files, recursively ### Running *fd* without any arguments
*fd* can be called with no arguments. This is very useful to get a quick overview of all entries *fd* can be called with no arguments. This is very useful to get a quick overview of all entries
in the current directory, recursively (similar to `ls -R`): in the current directory, recursively (similar to `ls -R`):
@ -84,15 +326,6 @@ testenv/mod.rs
tests.rs tests.rs
``` ```
If you want to use this functionality to list all files in a given directory, you have to use
a catch-all pattern such as `.` or `^`:
``` bash
> fd . fd/tests/
testenv
testenv/mod.rs
tests.rs
```
### Searching for a particular file extension ### Searching for a particular file extension
Often, we are interested in all files of a particular type. This can be done with the `-e` (or Often, we are interested in all files of a particular type. This can be done with the `-e` (or
@ -112,15 +345,6 @@ src/lscolors/mod.rs
tests/testenv/mod.rs tests/testenv/mod.rs
``` ```
### Searching for a particular file name
To find files with exactly the provided search pattern, use the `-g` (or `--glob`) option:
``` bash
> fd -g libc.so /usr
/usr/lib32/libc.so
/usr/lib/libc.so
```
### Hidden and ignored files ### Hidden and ignored files
By default, *fd* does not search hidden directories and does not show hidden files in the By default, *fd* does not search hidden directories and does not show hidden files in the
search results. To disable this behavior, we can use the `-H` (or `--hidden`) option: search results. To disable this behavior, we can use the `-H` (or `--hidden`) option:
@ -140,96 +364,7 @@ target/debug/deps/libnum_cpus-f5ce7ef99006aa05.rlib
``` ```
To really search *all* files and directories, simply combine the hidden and ignore features to show To really search *all* files and directories, simply combine the hidden and ignore features to show
everything (`-HI`) or use `-u`/`--unrestricted`. everything (`-HI`).
### Matching the full path
By default, *fd* only matches the filename of each file. However, using the `--full-path` or `-p` option,
you can match against the full path.
```bash
> fd -p -g '**/.git/config'
> fd -p '.*/lesson-\d+/[a-z]+.(jpg|png)'
```
### Command execution
Instead of just showing the search results, you often want to *do something* with them. `fd`
provides two ways to execute external commands for each of your search results:
* The `-x`/`--exec` option runs an external command *for each of the search results* (in parallel).
* The `-X`/`--exec-batch` option launches the external command once, with *all search results as arguments*.
#### Examples
Recursively find all zip archives and unpack them:
``` bash
fd -e zip -x unzip
```
If there are two such files, `file1.zip` and `backup/file2.zip`, this would execute
`unzip file1.zip` and `unzip backup/file2.zip`. The two `unzip` processes run in parallel
(if the files are found fast enough).
Find all `*.h` and `*.cpp` files and auto-format them inplace with `clang-format -i`:
``` bash
fd -e h -e cpp -x clang-format -i
```
Note how the `-i` option to `clang-format` can be passed as a separate argument. This is why
we put the `-x` option last.
Find all `test_*.py` files and open them in your favorite editor:
``` bash
fd -g 'test_*.py' -X vim
```
Note that we use capital `-X` here to open a single `vim` instance. If there are two such files,
`test_basic.py` and `lib/test_advanced.py`, this will run `vim test_basic.py lib/test_advanced.py`.
To see details like file permissions, owners, file sizes etc., you can tell `fd` to show them
by running `ls` for each result:
``` bash
fd … -X ls -lhd --color=always
```
This pattern is so useful that `fd` provides a shortcut. You can use the `-l`/`--list-details`
option to execute `ls` in this way: `fd … -l`.
The `-X` option is also useful when combining `fd` with [ripgrep](https://github.com/BurntSushi/ripgrep/) (`rg`) in order to search within a certain class of files, like all C++ source files:
```bash
fd -e cpp -e cxx -e h -e hpp -X rg 'std::cout'
```
Convert all `*.jpg` files to `*.png` files:
``` bash
fd -e jpg -x convert {} {.}.png
```
Here, `{}` is a placeholder for the search result. `{.}` is the same, without the file extension.
See below for more details on the placeholder syntax.
The terminal output of commands run from parallel threads using `-x` will not be interlaced or garbled,
so `fd -x` can be used to rudimentarily parallelize a task run over many files.
An example of this is calculating the checksum of each individual file within a directory.
```
fd -tf -x md5sum > file_checksums.txt
```
#### Placeholder syntax
The `-x` and `-X` options take a *command template* as a series of arguments (instead of a single string).
If you want to add additional options to `fd` after the command template, you can terminate it with a `\;`.
The syntax for generating commands is similar to that of [GNU Parallel](https://www.gnu.org/software/parallel/):
- `{}`: A placeholder token that will be replaced with the path of the search result
(`documents/images/party.jpg`).
- `{.}`: Like `{}`, but without the file extension (`documents/images/party`).
- `{/}`: A placeholder that will be replaced by the basename of the search result (`party.jpg`).
- `{//}`: The parent of the discovered path (`documents/images`).
- `{/.}`: The basename, with the extension removed (`party`).
If you do not include a placeholder, *fd* automatically adds a `{}` at the end.
#### Parallel vs. serial execution
For `-x`/`--exec`, you can control the number of parallel jobs by using the `-j`/`--threads` option.
Use `--threads=1` for serial execution.
### Excluding specific files or directories ### Excluding specific files or directories
@ -259,177 +394,18 @@ To make exclude-patterns like these permanent, you can create a `.fdignore` file
*.bak *.bak
``` ```
> [!NOTE] ### Using fd with `xargs` or `parallel`
> `fd` also supports `.ignore` files that are used by other programs such as `rg` or `ag`.
If you want `fd` to ignore these patterns globally, you can put them in `fd`'s global ignore file. If we want to run a command on all search results, we can pipe the output to `xargs`:
This is usually located in `~/.config/fd/ignore` in macOS or Linux, and `%APPDATA%\fd\ignore` in
Windows.
You may wish to include `.git/` in your `fd/ignore` file so that `.git` directories, and their contents
are not included in output if you use the `--hidden` option.
### Deleting files
You can use `fd` to remove all files and directories that are matched by your search pattern.
If you only want to remove files, you can use the `--exec-batch`/`-X` option to call `rm`. For
example, to recursively remove all `.DS_Store` files, run:
``` bash ``` bash
> fd -H '^\.DS_Store$' -tf -X rm > fd -0 -e rs | xargs -0 wc -l
```
If you are unsure, always call `fd` without `-X rm` first. Alternatively, use `rm`s "interactive"
option:
``` bash
> fd -H '^\.DS_Store$' -tf -X rm -i
``` ```
Here, the `-0` option tells *fd* to separate search results by the NULL character (instead of
newlines). In the same way, the `-0` option of `xargs` tells it to read the input in this way.
If you also want to remove a certain class of directories, you can use the same technique. You will ### Integration with other programs
have to use `rm`s `--recursive`/`-r` flag to remove directories.
> [!NOTE] #### Using fd with `fzf`
> There are scenarios where using `fd … -X rm -r` can cause race conditions: if you have a
path like `…/foo/bar/foo/…` and want to remove all directories named `foo`, you can end up in a
situation where the outer `foo` directory is removed first, leading to (harmless) *"'foo/bar/foo':
No such file or directory"* errors in the `rm` call.
### Command-line options
This is the output of `fd -h`. To see the full set of command-line options, use `fd --help` which
also includes a much more detailed help text.
```
Usage: fd [OPTIONS] [pattern] [path]...
Arguments:
[pattern] the search pattern (a regular expression, unless '--glob' is used; optional)
[path]... the root directories for the filesystem search (optional)
Options:
-H, --hidden Search hidden files and directories
-I, --no-ignore Do not respect .(git|fd)ignore files
-s, --case-sensitive Case-sensitive search (default: smart case)
-i, --ignore-case Case-insensitive search (default: smart case)
-g, --glob Glob-based search (default: regular expression)
-a, --absolute-path Show absolute instead of relative paths
-l, --list-details Use a long listing format with file metadata
-L, --follow Follow symbolic links
-p, --full-path Search full abs. path (default: filename only)
-d, --max-depth <depth> Set maximum search depth (default: none)
-E, --exclude <pattern> Exclude entries that match the given glob pattern
-t, --type <filetype> Filter by type: file (f), directory (d/dir), symlink (l),
executable (x), empty (e), socket (s), pipe (p), char-device
(c), block-device (b)
-e, --extension <ext> Filter by file extension
-S, --size <size> Limit results based on the size of files
--changed-within <date|dur> Filter by file modification time (newer than)
--changed-before <date|dur> Filter by file modification time (older than)
-o, --owner <user:group> Filter by owning user and/or group
--format <fmt> Print results according to template
-x, --exec <cmd>... Execute a command for each search result
-X, --exec-batch <cmd>... Execute a command with all search results at once
-c, --color <when> When to use colors [default: auto] [possible values: auto,
always, never]
--hyperlink[=<when>] Add hyperlinks to output paths [default: never] [possible
values: auto, always, never]
-h, --help Print help (see more with '--help')
-V, --version Print version
```
## Benchmark
Let's search my home folder for files that end in `[0-9].jpg`. It contains ~750.000
subdirectories and about a 4 million files. For averaging and statistical analysis, I'm using
[hyperfine](https://github.com/sharkdp/hyperfine). The following benchmarks are performed
with a "warm"/pre-filled disk-cache (results for a "cold" disk-cache show the same trends).
Let's start with `find`:
```
Benchmark 1: find ~ -iregex '.*[0-9]\.jpg$'
Time (mean ± σ): 19.922 s ± 0.109 s
Range (min … max): 19.765 s … 20.065 s
```
`find` is much faster if it does not need to perform a regular-expression search:
```
Benchmark 2: find ~ -iname '*[0-9].jpg'
Time (mean ± σ): 11.226 s ± 0.104 s
Range (min … max): 11.119 s … 11.466 s
```
Now let's try the same for `fd`. Note that `fd` performs a regular expression
search by default. The options `-u`/`--unrestricted` option is needed here for
a fair comparison. Otherwise `fd` does not have to traverse hidden folders and
ignored paths (see below):
```
Benchmark 3: fd -u '[0-9]\.jpg$' ~
Time (mean ± σ): 854.8 ms ± 10.0 ms
Range (min … max): 839.2 ms … 868.9 ms
```
For this particular example, `fd` is approximately **23 times faster** than `find -iregex`
and about **13 times faster** than `find -iname`. By the way, both tools found the exact
same 546 files :smile:.
**Note**: This is *one particular* benchmark on *one particular* machine. While we have
performed a lot of different tests (and found consistent results), things might
be different for you! We encourage everyone to try it out on their own. See
[this repository](https://github.com/sharkdp/fd-benchmarks) for all necessary scripts.
Concerning *fd*'s speed, a lot of credit goes to the `regex` and `ignore` crates that are
also used in [ripgrep](https://github.com/BurntSushi/ripgrep) (check it out!).
## Troubleshooting
### `fd` does not find my file!
Remember that `fd` ignores hidden directories and files by default. It also ignores patterns
from `.gitignore` files. If you want to make sure to find absolutely every possible file, always
use the options `-u`/`--unrestricted` option (or `-HI` to enable hidden and ignored files):
``` bash
> fd -u …
```
### Colorized output
`fd` can colorize files by extension, just like `ls`. In order for this to work, the environment
variable [`LS_COLORS`](https://linux.die.net/man/5/dir_colors) has to be set. Typically, the value
of this variable is set by the `dircolors` command which provides a convenient configuration format
to define colors for different file formats.
On most distributions, `LS_COLORS` should be set already. If you are on Windows or if you are looking
for alternative, more complete (or more colorful) variants, see [here](https://github.com/sharkdp/vivid),
[here](https://github.com/seebi/dircolors-solarized) or
[here](https://github.com/trapd00r/LS_COLORS).
`fd` also honors the [`NO_COLOR`](https://no-color.org/) environment variable.
### `fd` doesn't seem to interpret my regex pattern correctly
A lot of special regex characters (like `[]`, `^`, `$`, ..) are also special characters in your
shell. If in doubt, always make sure to put single quotes around the regex pattern:
``` bash
> fd '^[A-Z][0-9]+$'
```
If your pattern starts with a dash, you have to add `--` to signal the end of command line
options. Otherwise, the pattern will be interpreted as a command-line option. Alternatively,
use a character class with a single hyphen character:
``` bash
> fd -- '-pattern'
> fd '[-]pattern'
```
### "Command not found" for `alias`es or shell functions
Shell `alias`es and shell functions can not be used for command execution via `fd -x` or
`fd -X`. In `zsh`, you can make the alias global via `alias -g myalias="…"`. In `bash`,
you can use `export -f my_function` to make available to child processes. You would still
need to call `fd -x bash -c 'my_function "$1"' bash`. For other use cases or shells, use
a (temporary) shell script.
## Integration with other programs
### Using fd with `fzf`
You can use *fd* to generate input for the command-line fuzzy finder [fzf](https://github.com/junegunn/fzf): You can use *fd* to generate input for the command-line fuzzy finder [fzf](https://github.com/junegunn/fzf):
``` bash ``` bash
@ -452,21 +428,7 @@ export FZF_DEFAULT_OPTS="--ansi"
For more details, see the [Tips section](https://github.com/junegunn/fzf#tips) of the fzf README. For more details, see the [Tips section](https://github.com/junegunn/fzf#tips) of the fzf README.
### Using fd with `rofi` #### Using fd with `emacs`
[*rofi*](https://github.com/davatorium/rofi) is a graphical launch menu application that is able to create menus by reading from *stdin*. Piping `fd` output into `rofi`s `-dmenu` mode creates fuzzy-searchable lists of files and directories.
#### Example
Create a case-insensitive searchable multi-select list of *PDF* files under your `$HOME` directory and open the selection with your configured PDF viewer. To list all file types, drop the `-e pdf` argument.
``` bash
fd --type f -e pdf . $HOME | rofi -keep-right -dmenu -i -p FILES -multi-select | xargs -I {} xdg-open {}
```
To modify the list that is presented by rofi, add arguments to the `fd` command. To modify the search behaviour of rofi, add arguments to the `rofi` command.
### Using fd with `emacs`
The emacs package [find-file-in-project](https://github.com/technomancy/find-file-in-project) can The emacs package [find-file-in-project](https://github.com/technomancy/find-file-in-project) can
use *fd* to find files. use *fd* to find files.
@ -476,255 +438,3 @@ After installing `find-file-in-project`, add the line `(setq ffip-use-rust-fd t)
In emacs, run `M-x find-file-in-project-by-selected` to find matching files. Alternatively, run In emacs, run `M-x find-file-in-project-by-selected` to find matching files. Alternatively, run
`M-x find-file-in-project` to list all available files in the project. `M-x find-file-in-project` to list all available files in the project.
### Printing the output as a tree
To format the output of `fd` as a file-tree you can use the `tree` command with
`--fromfile`:
```bash
fd | tree --fromfile
```
This can be more useful than running `tree` by itself because `tree` does not
ignore any files by default, nor does it support as rich a set of options as
`fd` does to control what to print:
```bash
fd --extension rs | tree --fromfile
.
├── build.rs
└── src
├── app.rs
└── error.rs
```
On bash and similar you can simply create an alias:
```bash
alias as-tree='tree --fromfile'
```
### Using fd with `xargs` or `parallel`
Note that `fd` has a builtin feature for [command execution](#command-execution) with
its `-x`/`--exec` and `-X`/`--exec-batch` options. If you prefer, you can still use
it in combination with `xargs`:
``` bash
> fd -0 -e rs | xargs -0 wc -l
```
Here, the `-0` option tells *fd* to separate search results by the NULL character (instead of
newlines). In the same way, the `-0` option of `xargs` tells it to read the input in this way.
## Installation
[![Packaging status](https://repology.org/badge/vertical-allrepos/fd-find.svg)](https://repology.org/project/fd-find/versions)
### On Ubuntu
*... and other Debian-based Linux distributions.*
If you run Ubuntu 19.04 (Disco Dingo) or newer, you can install the
[officially maintained package](https://packages.ubuntu.com/fd-find):
```
apt install fd-find
```
Note that the binary is called `fdfind` as the binary name `fd` is already used by another package.
It is recommended that after installation, you add a link to `fd` by executing command
`ln -s $(which fdfind) ~/.local/bin/fd`, in order to use `fd` in the same way as in this documentation.
Make sure that `$HOME/.local/bin` is in your `$PATH`.
If you use an older version of Ubuntu, you can download the latest `.deb` package from the
[release page](https://github.com/sharkdp/fd/releases) and install it via:
``` bash
dpkg -i fd_9.0.0_amd64.deb # adapt version number and architecture
```
Note that the .deb packages on the release page for this project still name the executable `fd`.
### On Debian
If you run Debian Buster or newer, you can install the
[officially maintained Debian package](https://tracker.debian.org/pkg/rust-fd-find):
```
apt-get install fd-find
```
Note that the binary is called `fdfind` as the binary name `fd` is already used by another package.
It is recommended that after installation, you add a link to `fd` by executing command
`ln -s $(which fdfind) ~/.local/bin/fd`, in order to use `fd` in the same way as in this documentation.
Make sure that `$HOME/.local/bin` is in your `$PATH`.
Note that the .deb packages on the release page for this project still name the executable `fd`.
### On Fedora
Starting with Fedora 28, you can install `fd` from the official package sources:
``` bash
dnf install fd-find
```
### On Alpine Linux
You can install [the fd package](https://pkgs.alpinelinux.org/packages?name=fd)
from the official sources, provided you have the appropriate repository enabled:
```
apk add fd
```
### On Arch Linux
You can install [the fd package](https://www.archlinux.org/packages/community/x86_64/fd/) from the official repos:
```
pacman -S fd
```
You can also install fd [from the AUR](https://aur.archlinux.org/packages/fd-git).
### On Gentoo Linux
You can use [the fd ebuild](https://packages.gentoo.org/packages/sys-apps/fd) from the official repo:
```
emerge -av fd
```
### On openSUSE Linux
You can install [the fd package](https://software.opensuse.org/package/fd) from the official repo:
```
zypper in fd
```
### On Void Linux
You can install `fd` via xbps-install:
```
xbps-install -S fd
```
### On ALT Linux
You can install [the fd package](https://packages.altlinux.org/en/sisyphus/srpms/fd/) from the official repo:
```
apt-get install fd
```
### On Solus
You can install [the fd package](https://github.com/getsolus/packages/tree/main/packages/f/fd) from the official repo:
```
eopkg install fd
```
### On RedHat Enterprise Linux 8/9 (RHEL8/9), Almalinux 8/9, EuroLinux 8/9 or Rocky Linux 8/9
You can install [the `fd` package](https://copr.fedorainfracloud.org/coprs/tkbcopr/fd/) from Fedora Copr.
```bash
dnf copr enable tkbcopr/fd
dnf install fd
```
A different version using the [slower](https://github.com/sharkdp/fd/pull/481#issuecomment-534494592) malloc [instead of jemalloc](https://bugzilla.redhat.com/show_bug.cgi?id=2216193#c1) is also available from the EPEL8/9 repo as the package `fd-find`.
### On macOS
You can install `fd` with [Homebrew](https://formulae.brew.sh/formula/fd):
```
brew install fd
```
… or with MacPorts:
```
port install fd
```
### On Windows
You can download pre-built binaries from the [release page](https://github.com/sharkdp/fd/releases).
Alternatively, you can install `fd` via [Scoop](http://scoop.sh):
```
scoop install fd
```
Or via [Chocolatey](https://chocolatey.org):
```
choco install fd
```
Or via [Winget](https://learn.microsoft.com/en-us/windows/package-manager/):
```
winget install sharkdp.fd
```
### On GuixOS
You can install [the fd package](https://guix.gnu.org/en/packages/fd-8.1.1/) from the official repo:
```
guix install fd
```
### On NixOS / via Nix
You can use the [Nix package manager](https://nixos.org/nix/) to install `fd`:
```
nix-env -i fd
```
### Via Flox
You can use [Flox](https://flox.dev) to install `fd` into a Flox environment:
```
flox install fd
```
### On FreeBSD
You can install [the fd-find package](https://www.freshports.org/sysutils/fd) from the official repo:
```
pkg install fd-find
```
### From npm
On Linux and macOS, you can install the [fd-find](https://npm.im/fd-find) package:
```
npm install -g fd-find
```
### From source
With Rust's package manager [cargo](https://github.com/rust-lang/cargo), you can install *fd* via:
```
cargo install fd-find
```
Note that rust version *1.77.2* or later is required.
`make` is also needed for the build.
### From binaries
The [release page](https://github.com/sharkdp/fd/releases) includes precompiled binaries for Linux, macOS and Windows. Statically-linked binaries are also available: look for archives with `musl` in the file name.
## Development
```bash
git clone https://github.com/sharkdp/fd
# Build
cd fd
cargo build
# Run unit tests and integration tests
cargo test
# Install
cargo install --path .
```
## Maintainers
- [sharkdp](https://github.com/sharkdp)
- [tmccombs](https://github.com/tmccombs)
- [tavianator](https://github.com/tavianator)
## License
`fd` is distributed under the terms of both the MIT License and the Apache License 2.0.
See the [LICENSE-APACHE](LICENSE-APACHE) and [LICENSE-MIT](LICENSE-MIT) files for license details.

94
appveyor.yml Normal file
View File

@ -0,0 +1,94 @@
# Source: https://github.com/starkat99/appveyor-rust/
environment:
global:
PROJECT_NAME: fd
matrix:
# Stable channel
- TARGET: i686-pc-windows-gnu
CHANNEL: stable
- TARGET: i686-pc-windows-msvc
CHANNEL: stable
- TARGET: x86_64-pc-windows-gnu
CHANNEL: stable
- TARGET: x86_64-pc-windows-msvc
CHANNEL: stable
# Beta channel (disabled for speed reasons)
#- TARGET: i686-pc-windows-gnu
# CHANNEL: beta
#- TARGET: i686-pc-windows-msvc
# CHANNEL: beta
#- TARGET: x86_64-pc-windows-gnu
# CHANNEL: beta
#- TARGET: x86_64-pc-windows-msvc
# CHANNEL: beta
# Nightly channel (disabled for speed reasons)
#- TARGET: i686-pc-windows-gnu
# CHANNEL: nightly
#- TARGET: i686-pc-windows-msvc
# CHANNEL: nightly
#- TARGET: x86_64-pc-windows-gnu
# CHANNEL: nightly
#- TARGET: x86_64-pc-windows-msvc
# CHANNEL: nightly
# Install Rust and Cargo
# (Based on from https://github.com/rust-lang/libc/blob/master/appveyor.yml)
install:
- curl -sSf -o rustup-init.exe https://win.rustup.rs
- rustup-init.exe --default-host %TARGET% --default-toolchain %CHANNEL% -y
- set PATH=%PATH%;C:\Users\appveyor\.cargo\bin
- rustc -Vv
- cargo -V
# 'cargo test' takes care of building for us, so disable Appveyor's build stage. This prevents
# the "directory does not contain a project or solution file" error.
# source: https://github.com/starkat99/appveyor-rust/blob/master/appveyor.yml#L113
build: false
# Equivalent to Travis' `script` phase
test_script:
- cargo build --verbose
- cargo run
- cargo test
before_deploy:
# Generate artifacts for release
- cargo build --release
- mkdir staging
- copy target\release\fd.exe staging
- ps: copy target\release\build\fd-find*\out\_fd.ps1 staging
- cd staging
# release zipfile will look like 'rust-everywhere-v1.2.3-x86_64-pc-windows-msvc'
- 7z a ../%PROJECT_NAME%-%APPVEYOR_REPO_TAG_NAME%-%TARGET%.zip *
- appveyor PushArtifact ../%PROJECT_NAME%-%APPVEYOR_REPO_TAG_NAME%-%TARGET%.zip
deploy:
description: 'Windows release'
# All the zipped artifacts will be deployed
artifact: /.*\.zip/
# Here's how:
# - Go to 'https://github.com/settings/tokens/new' and generate a Token with only the
# `public_repo` scope enabled
# - Then go to 'https://ci.appveyor.com/tools/encrypt' and enter the newly generated token.
# - Enter the "encrypted value" below
auth_token:
secure: mWJ8ieZdGEgHf232fdMyzb9T1rKbkJivYbp/REMo8ax3X5vkQJDHhFjWeuWA3wIC
provider: GitHub
# deploy when a new tag is pushed and only on the stable channel
on:
# channel to use to produce the release artifacts
CHANNEL: stable
appveyor_repo_tag: true
branches:
only:
- master
# IMPORTANT Regex to match tags. Required, or appveyor may not trigger deploys when a new tag
# is pushed. This regex matches semantic versions like v1.2.3-rc4+2016.02.22
- /^v\d+\.\d+\.\d+.*$/
# Disable caching, for now
#cache:
# - '%USERPROFILE%\.cargo'
# - 'target -> Cargo.lock'

View File

@ -1,12 +1,43 @@
fn main() { // Copyright (c) 2017 fd developers
let min_version = "1.64"; // Licensed under the Apache License, Version 2.0
// <LICENSE-APACHE or http://www.apache.org/licenses/LICENSE-2.0>
// or the MIT license <LICENSE-MIT or http://opensource.org/licenses/MIT>,
// at your option. All files in the project carrying such
// notice may not be copied, modified, or distributed except
// according to those terms.
match version_check::is_min_version(min_version) { #[macro_use]
Some(true) => {} extern crate clap;
// rustc version too small or can't figure it out extern crate version_check;
use clap::Shell;
use std::fs;
use std::io::{self, Write};
use std::process::exit;
include!("src/app.rs");
fn main() {
match version_check::is_min_version("1.20") {
// rustc >= 1.20
Some((true, _)) => {}
// rustc < 1.20 or can't figure it out
_ => { _ => {
eprintln!("'fd' requires rustc >= {}", min_version); writeln!(&mut io::stderr(), "This crate requires rustc >= 1.20").unwrap();
std::process::exit(1); exit(1);
} }
} }
let var = std::env::var_os("SHELL_COMPLETIONS_DIR").or(std::env::var_os("OUT_DIR"));
let outdir = match var {
None => return,
Some(outdir) => outdir,
};
fs::create_dir_all(&outdir).unwrap();
let mut app = build_app();
app.gen_completions("fd", Shell::Bash, &outdir);
app.gen_completions("fd", Shell::Fish, &outdir);
app.gen_completions("fd", Shell::Zsh, &outdir);
app.gen_completions("fd", Shell::PowerShell, &outdir);
} }

128
ci/before_deploy.bash Executable file
View File

@ -0,0 +1,128 @@
#!/usr/bin/env bash
# Building and packaging for release
set -ex
build() {
cargo build --target "$TARGET" --release --verbose
}
pack() {
local tempdir
local out_dir
local package_name
local gcc_prefix
tempdir=$(mktemp -d 2>/dev/null || mktemp -d -t tmp)
out_dir=$(pwd)
package_name="$PROJECT_NAME-$TRAVIS_TAG-$TARGET"
if [[ $TARGET == "arm-unknown-linux-gnueabihf" ]]; then
gcc_prefix="arm-linux-gnueabihf-"
else
gcc_prefix=""
fi
# create a "staging" directory
mkdir "$tempdir/$package_name"
mkdir "$tempdir/$package_name/autocomplete"
# copying the main binary
cp "target/$TARGET/release/$PROJECT_NAME" "$tempdir/$package_name/"
"${gcc_prefix}"strip "$tempdir/$package_name/$PROJECT_NAME"
# manpage, readme and license
cp "doc/$PROJECT_NAME.1" "$tempdir/$package_name"
cp README.md "$tempdir/$package_name"
cp LICENSE-MIT "$tempdir/$package_name"
cp LICENSE-APACHE "$tempdir/$package_name"
# various autocomplete
cp target/"$TARGET"/release/build/"$PROJECT_NAME"-*/out/"$PROJECT_NAME".bash "$tempdir/$package_name/autocomplete/${PROJECT_NAME}.bash-completion"
cp target/"$TARGET"/release/build/"$PROJECT_NAME"-*/out/"$PROJECT_NAME".fish "$tempdir/$package_name/autocomplete"
cp target/"$TARGET"/release/build/"$PROJECT_NAME"-*/out/_"$PROJECT_NAME" "$tempdir/$package_name/autocomplete"
# archiving
pushd "$tempdir"
tar czf "$out_dir/$package_name.tar.gz" "$package_name"/*
popd
rm -r "$tempdir"
}
make_deb() {
local tempdir
local architecture
local version
local dpkgname
local conflictname
case $TARGET in
x86_64*)
architecture=amd64
;;
i686*)
architecture=i386
;;
*)
echo "make_deb: skipping target '${TARGET}'" >&2
return 0
;;
esac
version=${TRAVIS_TAG#v}
if [[ $TARGET = *musl* ]]; then
dpkgname=$PROJECT_NAME-musl
conflictname=$PROJECT_NAME
else
dpkgname=$PROJECT_NAME
conflictname=$PROJECT_NAME-musl
fi
tempdir=$(mktemp -d 2>/dev/null || mktemp -d -t tmp)
# copy the main binary
install -Dm755 "target/$TARGET/release/$PROJECT_NAME" "$tempdir/usr/bin/$PROJECT_NAME"
strip "$tempdir/usr/bin/$PROJECT_NAME"
# manpage
install -Dm644 "doc/$PROJECT_NAME.1" "$tempdir/usr/share/man/man1/$PROJECT_NAME.1"
gzip --best "$tempdir/usr/share/man/man1/$PROJECT_NAME.1"
# readme and license
install -Dm644 README.md "$tempdir/usr/share/doc/$PROJECT_NAME/README.md"
install -Dm644 LICENSE-MIT "$tempdir/usr/share/doc/$PROJECT_NAME/LICENSE-MIT"
install -Dm644 LICENSE-APACHE "$tempdir/usr/share/doc/$PROJECT_NAME/LICENSE-APACHE"
# completions
install -Dm644 target/$TARGET/release/build/$PROJECT_NAME-*/out/$PROJECT_NAME.bash "$tempdir/usr/share/bash-completion/completions/${PROJECT_NAME}"
install -Dm644 target/$TARGET/release/build/$PROJECT_NAME-*/out/$PROJECT_NAME.fish "$tempdir/usr/share/fish/completions/$PROJECT_NAME.fish"
install -Dm644 target/$TARGET/release/build/$PROJECT_NAME-*/out/_$PROJECT_NAME "$tempdir/usr/share/zsh/vendor-completions/_$PROJECT_NAME"
# Control file
mkdir "$tempdir/DEBIAN"
cat > "$tempdir/DEBIAN/control" <<EOF
Package: $dpkgname
Version: $version
Section: utils
Priority: optional
Maintainer: David Peter <mail@david-peter.de>
Architecture: $architecture
Provides: $PROJECT_NAME
Conflicts: $conflictname
Description: Simple, fast and user-friendly alternative to find
While fd does not seek to mirror all of find's powerful functionality, it
provides sensible (opinionated) defaults for 80% of the use cases.
EOF
fakeroot dpkg-deb --build "$tempdir" "${dpkgname}_${version}_${architecture}.deb"
}
main() {
build
pack
if [[ $TARGET = *linux* ]]; then
make_deb
fi
}
main

26
ci/before_install.bash Executable file
View File

@ -0,0 +1,26 @@
#!/usr/bin/env bash
set -ex
if [ "$TRAVIS_OS_NAME" != linux ]; then
exit 0
fi
sudo apt-get update
# needed to build deb packages
sudo apt-get install -y fakeroot
# needed for i686 linux gnu target
if [[ $TARGET == i686-unknown-linux-gnu ]]; then
sudo apt-get install -y gcc-multilib
fi
# needed for cross-compiling for arm
if [[ $TARGET == arm-unknown-linux-gnueabihf ]]; then
sudo apt-get install -y \
gcc-4.8-arm-linux-gnueabihf \
binutils-arm-linux-gnueabihf \
libc6-armhf-cross \
libc6-dev-armhf-cross
fi

11
ci/script.bash Executable file
View File

@ -0,0 +1,11 @@
#!/usr/bin/env bash
set -ex
# Incorporate TARGET env var to the build and test process
cargo build --target "$TARGET" --verbose
# We cannot run arm executables on linux
if [[ $TARGET != arm-unknown-linux-gnueabihf ]]; then
cargo test --target "$TARGET" --verbose
fi

View File

@ -1,285 +0,0 @@
#compdef fd
##
# zsh completion function for fd
#
# Based on ripgrep completion function.
# Originally based on code from the zsh-users project — see copyright notice
# below.
autoload -U is-at-least
_fd() {
local curcontext="$curcontext" no='!' ret=1
local -a context line state state_descr _arguments_options fd_types fd_args
local -A opt_args
if is-at-least 5.2; then
_arguments_options=( -s -S )
else
_arguments_options=( -s )
fi
fd_types=(
{f,file}'\:"regular files"'
{d,directory}'\:"directories"'
{l,symlink}'\:"symbolic links"'
{e,empty}'\:"empty files or directories"'
{x,executable}'\:"executable (files)"'
{b,block-device}'\:"block devices"'
{c,char-device}'\:"character devices"'
{s,socket}'\:"sockets"'
{p,pipe}'\:"named pipes (FIFOs)"'
)
# Do not complete rare options unless either the current prefix
# matches one of those options or the user has the `complete-all`
# style set. Note that this prefix check has to be updated manually to account
# for all of the potential negation options listed below!
if
# (--[bpsu]* => match all options marked with '$no')
[[ $PREFIX$SUFFIX == --[bopsun]* ]] ||
zstyle -t ":complete:$curcontext:*" complete-all
then
no=
fi
# We make heavy use of argument groups here to prevent the option specs from
# growing unwieldy. These aren't supported in zsh <5.4, though, so we'll strip
# them out below if necessary. This makes the exclusions inaccurate on those
# older versions, but oh well — it's not that big a deal
fd_args=(
+ '(hidden)' # hidden files
{-H,--hidden}'[search hidden files/directories]'
+ '(no-ignore-full)' # all ignore files
'(no-ignore-partial)'{-I,--no-ignore}"[don't respect .(git|fd)ignore and global ignore files]"
$no'(no-ignore-partial)*'{-u,--unrestricted}'[alias for --no-ignore, when repeated also alias for --hidden]'
+ no-ignore-partial # some ignore files
"(no-ignore-full --no-ignore-vcs)--no-ignore-vcs[don't respect .gitignore files]"
"!(no-ignore-full --no-global-ignore-file)--no-global-ignore-file[don't respect the global ignore file]"
$no'(no-ignore-full --no-ignore-parent)--no-ignore-parent[]'
+ '(case)' # case-sensitivity
{-s,--case-sensitive}'[perform a case-sensitive search]'
{-i,--ignore-case}'[perform a case-insensitive search]'
+ '(regex-pattern)' # regex-based search pattern
'(no-regex-pattern)--regex[perform a regex-based search (default)]'
+ '(no-regex-pattern)' # non-regex-based search pattern
{-g,--glob}'[perform a glob-based search]'
{-F,--fixed-strings}'[treat pattern as literal string instead of a regex]'
+ '(no-require-git)'
"$no(no-ignore-full --no-ignore-vcs --no-require-git)--no-require-git[don't require git repo to respect gitignores]"
+ '(match-full)' # match against full path
{-p,--full-path}'[match the pattern against the full path instead of the basename]'
+ '(follow)' # follow symlinks
{-L,--follow}'[follow symbolic links to directories]'
+ '(abs-path)' # show absolute paths
'(long-listing)'{-a,--absolute-path}'[show absolute paths instead of relative paths]'
+ '(null-sep)' # use null separator for output
'(long-listing)'{-0,--print0}'[separate search results by the null character]'
+ '(long-listing)' # long-listing output
'(abs-path null-sep max-results exec-cmds)'{-l,--list-details}'[use a long listing format with file metadata]'
+ '(max-results)' # max number of results
'(long-listing exec-cmds)--max-results=[limit number of search results to given count and quit]:count'
'(long-listing exec-cmds)-1[limit to a single search result and quit]'
+ '(fs-errors)' # file-system errors
$no'--show-errors[enable the display of filesystem errors]'
+ '(fs-traversal)' # file-system traversal
$no"--one-file-system[don't descend into directories on other file systems]"
'!--mount'
'!--xdev'
+ dir-depth # directory depth
'(--exact-depth -d --max-depth)'{-d+,--max-depth=}'[set max directory depth to descend when searching]:depth'
'!(--exact-depth -d --max-depth)--maxdepth:depth'
'(--exact-depth --min-depth)--min-depth=[set directory depth to descend before start searching]:depth'
'(--exact-depth -d --max-depth --maxdepth --min-depth)--exact-depth=[only search at the exact given directory depth]:depth'
+ prune # pruning
"--prune[don't traverse into matching directories]"
+ filter-misc # filter search
'*'{-t+,--type=}"[filter search by type]:type:(($fd_types))"
'*'{-e+,--extension=}'[filter search by file extension]:extension'
'*'{-E+,--exclude=}'[exclude files/directories that match the given glob pattern]:glob pattern'
'*'{-S+,--size=}'[limit search by file size]:size limit:->size'
'(-o --owner)'{-o+,--owner=}'[filter by owning user and/or group]:owner and/or group:->owner'
+ ignore-file # extra ignore files
'*--ignore-file=[add a custom, low-precedence ignore-file with .gitignore format]: :_files'
+ '(filter-mtime-newer)' # filter by files modified after than
'--changed-within=[limit search to files/directories modified within the given date/duration]:date or duration'
'--changed-after=[alias for --changed-within]:date/duration'
'!--change-newer-than=:date/duration'
'!--newer=:date/duration'
+ '(filter-mtime-older)' # filter by files modified before than
'--changed-before=[limit search to files/directories modified before the given date/duration]:date or duration'
'!--change-older-than=:date/duration'
'!--older=:date/duration'
+ '(color)' # colorize output
{-c+,--color=}'[declare when to colorize search results]:when to colorize:((
auto\:"show colors if the output goes to an interactive console (default)"
never\:"do not use colorized output"
always\:"always use colorized output"
))'
'--hyperlink=-[add hyperlinks to output paths]::when:(auto never always)'
+ '(threads)'
{-j+,--threads=}'[set the number of threads for searching and executing]:number of threads'
+ '(exec-cmds)' # execute command
'(long-listing max-results)'{-x+,--exec=}'[execute command for each search result]:command: _command_names -e:*\;::program arguments: _normal'
'(long-listing max-results)'{-X+,--exec-batch=}'[execute command for all search results at once]:command: _command_names -e:*\;::program arguments: _normal'
'(long-listing max-results)--batch-size=[max number of args for each -X call]:size'
+ other
'!(--max-buffer-time)--max-buffer-time=[set amount of time to buffer before showing output]:time (ms)'
+ '(about)' # about flags
'(: * -)'{-h,--help}'[display help message]'
'(: * -)'{-V,--version}'[display version information]'
+ path-sep # set path separator for output
$no'(--path-separator)--path-separator=[set the path separator to use when printing file paths]:path separator'
+ search-path
$no'(--base-directory)--base-directory=[change the current working directory to the given path]:directory:_files -/'
$no'(*)*--search-path=[set search path (instead of positional <path> arguments)]:directory:_files -/'
+ strip-cwd-prefix
$no'(strip-cwd-prefix exec-cmds)--strip-cwd-prefix=-[When to strip ./]::when:(always never auto)'
+ and
'--and=[additional required search path]:pattern'
+ args # positional arguments
'1: :_guard "^-*" pattern'
'(--search-path)*:directory:_files -/'
)
# Strip out argument groups where unsupported (see above)
is-at-least 5.4 ||
fd_args=( ${(@)args:#(#i)(+|[a-z0-9][a-z0-9_-]#|\([a-z0-9][a-z0-9_-]#\))} )
_arguments $_arguments_options : $fd_args && ret=0
case ${state} in
owner)
compset -P '(\\|)\!'
if compset -P '*:'; then
_groups && ret=0
else
if
compset -S ':*' ||
# Do not add the colon suffix when completing "!user<TAB>
# (with a starting double-quote) otherwise pressing tab again
# after the inserted colon "!user:<TAB> will complete history modifiers
[[ $IPREFIX == (\\|\!)* && ($QIPREFIX == \"* && -z $QISUFFIX) ]]
then
_users && ret=0
else
local q
# Since quotes are needed when using the negation prefix !,
# automatically remove the colon suffix also when closing the quote
if [[ $QIPREFIX == [\'\"]* ]]; then
q=${QIPREFIX:0:1}
fi
_users -r ": \t\n\-$q" -S : && ret=0
fi
fi
;;
size)
if compset -P '[-+][0-9]##'; then
local -a suff=(
'B:bytes'
'K:kilobytes (10^3 = 1000 bytes)'
'M:megabytes (10^6 = 1000^2 bytes)'
'G:gigabytes (10^9 = 1000^3 bytes)'
'T:terabytes (10^12 = 1000^4 bytes)'
'Ki:kibibytes ( 2^10 = 1024 bytes)'
'Mi:mebibytes ( 2^20 = 1024^2 bytes)'
'Gi:gigibytes ( 2^30 = 1024^3 bytes)'
'Ti:tebibytes ( 2^40 = 1024^4 bytes)'
)
_describe -t units 'size limit units' suff -V 'units'
elif compset -P '[-+]'; then
_message -e 'size limit number (full format: <+-><number><unit>)'
else
_values 'size limit prefix (full format: <prefix><number><unit>)' \
'\+[file size must be greater or equal to]'\
'-[file size must be less than or equal to]' && ret=0
fi
;;
esac
return ret
}
_fd "$@"
# ------------------------------------------------------------------------------
# Copyright (c) 2011 GitHub zsh-users - http://github.com/zsh-users
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the zsh-users nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL ZSH-USERS BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
# ------------------------------------------------------------------------------
# Description
# -----------
#
# Completion script for fd
#
# ------------------------------------------------------------------------------
# Authors
# -------
#
# * smancill (https://github.com/smancill)
#
# ------------------------------------------------------------------------------
# Local Variables:
# mode: shell-script
# coding: utf-8-unix
# indent-tabs-mode: nil
# sh-indentation: 2
# sh-basic-offset: 2
# End:
# vim: ft=zsh sw=2 ts=2 et

1
doc/.gitattributes vendored
View File

@ -1 +0,0 @@
* linguist-vendored

415
doc/fd.1
View File

@ -24,155 +24,45 @@ fd \- find entries in the filesystem
.B fd .B fd
is a simple, fast and user-friendly alternative to is a simple, fast and user-friendly alternative to
.BR find (1). .BR find (1).
.P
By default
.B fd
uses regular expressions for the pattern. However, this can be changed to use simple glob patterns
with the '\-\-glob' option.
.P
By default
.B fd
will exclude hidden files and directories, as well as any files that match gitignore rules
or ignore rules in .ignore or .fdignore files.
.SH OPTIONS .SH OPTIONS
.TP .TP
.B \-H, \-\-hidden .B \-H, \-\-hidden
Include hidden files and directories in the search results Include hidden files and directories in the search results.
(default: hidden files and directories are skipped). The flag can be overridden with '--no-hidden'.
.IP
Ignored files are still excluded unless \-\-no\-ignore or \-\-no\-ignore\-vcs
is also used.
.TP .TP
.B \-I, \-\-no\-ignore .B \-I, \-\-no\-ignore
Show search results from files and directories that would otherwise be ignored by Do not respect files like
.RS
.IP \[bu] 2
.I .gitignore .I .gitignore
.IP \[bu] and
.I .git/info/exclude
.IP \[bu]
The global gitignore configuration (by default
.IR $HOME/.config/git/ignore )
.IP \[bu]
.I .ignore
.IP \[bu]
.I .fdignore .I .fdignore
.IP \[bu] and include ignored files in the search results.
The global fd ignore file (usually
.I $HOME/.config/fd/ignore
)
.RE
.IP
The flag can be overridden with '--ignore'.
.TP
.B \-u, \-\-unrestricted
Perform an unrestricted search, including ignored and hidden files. This is an alias for '--hidden --no-ignore'.
.TP .TP
.B \-\-no\-ignore\-vcs .B \-\-no\-ignore\-vcs
Show search results from files and directories that would otherwise be ignored by gitignore files Do not respect version control ignore files like
including .I ".gitignore"
.IR .gitignore , and include the respective entries in the search results.
.IR .git/info/exclude ,
and the global gitignore configuration
.RI ( core.excludesFile
git setting, which defaults to
.IR $HOME/.config/git/ignore ).
The flag can be overridden with '--ignore-vcs'.
.TP
.B \-\-no\-require\-git
Do not require a git repository to respect gitignores. By default, fd will only
respect global gitignore rules, .gitignore rules and local exclude rules if fd
detects that you are searching inside a git repository. This flag allows you to
relax this restriction such that fd will respect all git related ignore rules
regardless of whether youre searching in a git repository or not. The flag can
be overridden with '--require-git'.
.TP
.B \-\-no\-ignore\-parent
Show search results from files and directories that would otherwise be ignored by gitignore files in
parent directories.
.TP .TP
.B \-s, \-\-case\-sensitive .B \-s, \-\-case\-sensitive
Perform a case-sensitive search. By default, fd uses case-insensitive searches, unless the Perform a case-sensitive search (default: smart case).
pattern contains an uppercase character (smart case).
.TP .TP
.B \-i, \-\-ignore\-case .B \-i, \-\-ignore\-case
Perform a case-insensitive search. By default, fd uses case-insensitive searches, unless the Perform a case-insensitive search (default: smart case).
pattern contains an uppercase character (smart case).
.TP
.B \-g, \-\-glob
Perform a glob-based search instead of a regular expression search.
If combined with the '\-\-full-path' option, '**' can be used to match multiple path components.
.TP
.B \-\-regex
Perform a regular-expression based search (default). This can be used to override --glob.
.TP .TP
.B \-F, \-\-fixed\-strings .B \-F, \-\-fixed\-strings
Treat the pattern as a literal string instead of a regular expression. Note that this also Treat the pattern as a literal string instead of a regular expression.
performs substring comparison. If you want to match on an exact filename, consider using '\-\-glob'.
.TP
.BI "\-\-and " pattern
Add additional required search patterns, all of which must be matched. Multiple additional
patterns can be specified. The patterns are regular expressions, unless '\-\-glob'
or '\-\-fixed\-strings' is used.
.TP .TP
.B \-a, \-\-absolute\-path .B \-a, \-\-absolute\-path
Shows the full path starting from the root as opposed to relative paths. Show absolute instead of relative paths.
The flag can be overridden with '--relative-path'.
.TP
.B \-l, \-\-list\-details
Use a detailed listing format like 'ls -l'. This is basically an alias
for '--exec-batch ls -l' with some additional 'ls' options. This can be used
to see more metadata, to show symlink targets and to achieve a deterministic
sort order.
.TP .TP
.B \-L, \-\-follow .B \-L, \-\-follow
By default, fd does not descend into symlinked directories. Using this flag, symbolic links are Dereference all symbolic links encountered.
also traversed. The flag can be overridden with '--no-follow'.
.TP .TP
.B \-p, \-\-full\-path .B \-p, \-\-full\-path
By default, the search pattern is only matched against the filename (or directory name). Using Match the
this flag, the
.I pattern .I pattern
is matched against the full path. against the full path instead of just the file or directory name.
.TP .TP
.B \-0, \-\-print0 .B \-0, \-\-print0
Separate search results by the null character (instead of newlines). Useful for piping results to Separate search results by null characters instead of newlines.
.IR xargs .
.TP
.B \-\-max\-results count
Limit the number of search results to 'count' and quit immediately.
.TP
.B \-1
Limit the search to a single result and quit immediately. This is an alias for '--max-results=1'.
.TP
.B \-q, \-\-quiet
When the flag is present, the program does not print anything and will instead exit with a code of 0 if there is at least one search result.
Otherwise, the exit code will be 1.
This is mainly for usage in scripts and can be faster than checking for output because the search can be stopped early after the first match.
.B \-\-has\-results
can be used as an alias.
.TP
.B \-\-show-errors
Enable the display of filesystem errors for situations such as insufficient
permissions or dead symlinks.
.TP
.B \-\-strip-cwd-prefix [when]
By default, relative paths are prefixed with './' when -x/--exec,
-X/--exec-batch, or -0/--print0 are given, to reduce the risk of a
path starting with '-' being treated as a command line option. Use
this flag to change this behavior. If this flag is used without a value,
it is equivalent to passing "always". Possible values are:
.RS
.IP never
Never strip the ./ at the beginning of paths
.IP always
Always strip the ./ at the beginning of paths
.IP auto
Only strip if used with --exec, --exec-batch, or --print0. That is, it resets to the default behavior.
.RE
.TP
.B \-\-one\-file\-system, \-\-mount, \-\-xdev
By default, fd will traverse the file system tree as far as other options dictate. With this flag, fd ensures that it does not descend into a different file system than the one it started in. Comparable to the -mount or -xdev filters of find(1).
.TP .TP
.B \-h, \-\-help .B \-h, \-\-help
Print help information. Print help information.
@ -185,32 +75,15 @@ Limit directory traversal to at most
.I d .I d
levels of depth. By default, there is no limit on the search depth. levels of depth. By default, there is no limit on the search depth.
.TP .TP
.BI "\-\-min\-depth " d
Only show search results starting at the given depth. See also: '--max-depth' and '--exact-depth'.
.TP
.BI "\-\-exact\-depth " d
Only show search results at the exact given depth. This is an alias for '--min-depth <depth> --max-depth <depth>'.
.TP
.B \-\-prune
Do not traverse into matching directories.
.TP
.BI "\-t, \-\-type " filetype .BI "\-t, \-\-type " filetype
Filter search by type: Filter search by type:
.RS .RS
.IP "f, file" .IP "f, file"
regular files regular files
.IP "d, dir, directory" .IP "d, directories"
directories directories
.IP "l, symlink" .IP "l, symlink"
symbolic links symbolic links
.IP "b, block-device"
block devices
.IP "c, char-device"
character devices
.IP "s, socket"
sockets
.IP "p, pipe"
named pipes (FIFOs)
.IP "x, executable" .IP "x, executable"
executable (files) executable (files)
.IP "e, empty" .IP "e, empty"
@ -218,46 +91,18 @@ empty files or directories
.RE .RE
.RS .RS
This option can be specified more than once to include multiple file types. This option can be used repeatedly to allow for multiple file types.
Searching for '--type file --type symlink' will show both regular files as well as
symlinks. Note that the 'executable' and 'empty' filters work differently: '--type
executable' implies '--type file' by default. And '--type empty' searches for
empty files and directories, unless either '--type file' or '--type directory' is
specified in addition.
Examples:
- Only search for files:
fd --type file …
fd -tf …
- Find both files and symlinks
fd --type file --type symlink …
fd -tf -tl …
- Find executable files:
fd --type executable
fd -tx
- Find empty files:
fd --type empty --type file
fd -te -tf
- Find empty directories:
fd --type empty --type directory
fd -te -td
.RE .RE
.TP .TP
.BI "\-e, \-\-extension " ext .BI "\-e, \-\-extension " ext
Filter search results by file extension Filter search results by file extension
.IR ext . .IR ext .
This option can be used repeatedly to allow for multiple possible file extensions. This option can be used repeatedly to allow for multiple possible file extensions.
If you want to search for files without extension, you can use the regex '^[^.]+$'
as a normal search pattern.
.TP .TP
.BI "\-E, \-\-exclude " pattern .BI "\-E, \-\-exclude " pattern
Exclude files/directories that match the given glob pattern. Exclude files/directories that match the given glob pattern.
This overrides any other ignore logic. This overrides any other ignore logic.
Multiple exclude patterns can be specified. Multiple exclude patterns can be specified.
Examples:
\-\-exclude '*.pyc'
\-\-exclude node_modules
.TP .TP
.BI "\-\-ignore-file " path .BI "\-\-ignore-file " path
Add a custom ignore-file in '.gitignore' format. Add a custom ignore-file in '.gitignore' format.
@ -276,26 +121,8 @@ Do not colorize output.
Always colorize output. Always colorize output.
.RE .RE
.TP .TP
.B "\-\-hyperlink
Specify whether the output should use terminal escape codes to indicate a hyperlink to a
file url pointing to the path.
The value can be auto, always, or never.
Currently, the default is "never", and if the option is used without an argument "auto" is
used. In the future this may be changed to "auto" and "always".
.RS
.IP auto
Only output hyperlinks if color is also enabled, as a proxy for whether terminal escape
codes are acceptable.
.IP never
Never output hyperlink escapes.
.IP always
Always output hyperlink escapes, regardless of color settings.
.RE
.TP
.BI "\-j, \-\-threads " num .BI "\-j, \-\-threads " num
Set number of threads to use for searching & executing (default: number of available CPU cores). Number of threads to use for searching (default: number of available CPUs).
.TP .TP
.BI "\-S, \-\-size " size .BI "\-S, \-\-size " size
Limit results based on the size of files using the format Limit results based on the size of files using the format
@ -305,8 +132,6 @@ Limit results based on the size of files using the format
file size must be greater than or equal to this file size must be greater than or equal to this
.IP '-' .IP '-'
file size must be less than or equal to this file size must be less than or equal to this
.P
If neither '+' nor '-' is specified, file size must be exactly equal to this.
.IP 'NUM' .IP 'NUM'
The numeric size (e.g. 500) The numeric size (e.g. 500)
.IP 'UNIT' .IP 'UNIT'
@ -316,7 +141,7 @@ Allowed unit values:
.IP 'b' .IP 'b'
bytes bytes
.IP 'k' .IP 'k'
kilobytes (base ten, 10^3 = 1000 bytes) kilobytes
.IP 'm' .IP 'm'
megabytes megabytes
.IP 'g' .IP 'g'
@ -324,7 +149,7 @@ gigabytes
.IP 't' .IP 't'
terabytes terabytes
.IP 'ki' .IP 'ki'
kibibytes (base two, 2^10 = 1024 bytes) kibibytes
.IP 'mi' .IP 'mi'
mebibytes mebibytes
.IP 'gi' .IP 'gi'
@ -334,72 +159,13 @@ tebibytes
.RE .RE
.RE .RE
.TP .TP
.BI "\-\-changed-within " date|duration .BI "\-x, \-\-exec " command "\fR [args...] ;"
Filter results based on the file modification time. Execute
Files with modification times greater than the argument will be returned. .I command
The argument can be provided as a duration (\fI10h, 1d, 35min\fR) or as a specific point for each search result. The following placeholders are substituted by a path derived from the current search result:
in time as full RFC3339 format with time zone, as a date or datetime in the
local time zone (\fIYYYY-MM-DD\fR or \fIYYYY-MM-DD HH:MM:SS\fR), or as the prefix '@'
followed by the number of seconds since the Unix epoch (@[0-9]+).
\fB\-\-change-newer-than\fR,
.B --newer
or
.B --changed-after
can be used as aliases.
Examples:
\-\-changed-within 2weeks
\-\-change-newer-than "2018-10-27 10:00:00"
\-\-newer 2018-10-27
\-\-changed-after @1704067200
.TP
.BI "\-\-changed-before " date|duration
Filter results based on the file modification time.
Files with modification times less than the argument will be returned.
The argument can be provided as a duration (\fI10h, 1d, 35min\fR) or as a specific point
in time as full RFC3339 format with time zone, as a date or datetime in the
local time zone (\fIYYYY-MM-DD\fR or \fIYYYY-MM-DD HH:MM:SS\fR), or as the prefix '@'
followed by the number of seconds since the Unix epoch (@[0-9]+).
.B --change-older-than
or
.B --older
can be used as aliases.
Examples:
\-\-changed-before "2018-10-27 10:00:00"
\-\-change-older-than 2weeks
\-\-older @1704067200
.TP
.BI "-o, \-\-owner " [user][:group]
Filter files by their user and/or group. Format: [(user|uid)][:(group|gid)]. Either side
is optional. Precede either side with a '!' to exclude files instead.
Examples:
\-\-owner john
\-\-owner :students
\-\-owner "!john:students"
.TP
.BI "\-\-base\-directory " path
Change the current working directory of fd to the provided path. This means that search results will
be shown with respect to the given base path. Note that relative paths which are passed to fd via the
positional \fIpath\fR argument or the \fB\-\-search\-path\fR option will also be resolved relative to
this directory.
.TP
.BI "\-\-path\-separator " separator
Set the path separator to use when printing file paths. The default is the OS-specific separator
('/' on Unix, '\\' on Windows).
.TP
.BI "\-\-search\-path " search\-path
Provide paths to search as an alternative to the positional \fIpath\fR argument. Changes the usage to
\'fd [FLAGS/OPTIONS] \-\-search\-path PATH \-\-search\-path PATH2 [PATTERN]\'
.TP
.BI "\-\-format " fmt
Specify a template string that is used for printing a line for each file found.
The following placeholders are substituted into the string for each file before printing:
.RS .RS
.IP {} .IP {}
path (of the current search result) path
.IP {/} .IP {/}
basename basename
.IP {//} .IP {//}
@ -408,126 +174,12 @@ parent directory
path without file extension path without file extension
.IP {/.} .IP {/.}
basename without file extension basename without file extension
.IP {{
literal '{' (an escape sequence)
.IP }}
literal '}' (an escape sequence)
.P
Notice that you can use "{{" and "}}" to escape "{" and "}" respectively, which is especially
useful if you need to include the literal text of one of the above placeholders.
.RE .RE
.TP
.BI "\-x, \-\-exec " command
.RS
Execute
.I command
for each search result in parallel (use --threads=1 for sequential command execution).
Note that all subsequent positional arguments are considered to be arguments to the
.I command
- not to fd.
It is therefore recommended to place the \-x/\-\-exec option last. Alternatively, you can supply
a ';' argument to end the argument list and continue with more fd options.
Most shells require ';' to be escaped: '\\;'.
This option can be specified multiple times, in which case all commands are run for each
file found, in the order they are provided. In that case, you must supply a ';' argument for
all but the last commands.
If parallelism is enabled, the order commands will be executed in is non-deterministic. And even with
--threads=1, the order is determined by the operating system and may not be what you expect. Thus, it is
recommended that you don't rely on any ordering of the results.
Before executing the command, any placeholder patterns in the command are replaced with the
corresponding values for the current file. The same placeholders are used as in the "\-\-format"
option.
If no placeholder is present, an implicit "{}" at the end is assumed.
Examples:
- find all *.zip files and unzip them:
fd -e zip -x unzip
- find *.h and *.cpp files and run "clang-format -i .." for each of them:
fd -e h -e cpp -x clang-format -i
- Convert all *.jpg files to *.png files:
fd -e jpg -x convert {} {.}.png
.RE
.TP
.BI "\-X, \-\-exec-batch " command
.RS
Execute
.I command
once, with all search results as arguments.
The order of the arguments is non-deterministic and should not be relied upon.
This uses the same placeholders as "\-\-format" and "\-\-exec", but instead of expanding
once per command invocation each argument containing a placeholder is expanding for every
file in a batch and passed as separate arguments.
If no placeholder is present, an implicit "{}" at the end is assumed.
Like \-\-exec, this can be used multiple times, in which case each command will be run in
the order given.
Examples:
- Find all test_*.py files and open them in your favorite editor:
fd -g 'test_*.py' -X vim
Note that this executes a single "vim" process with all search results as arguments.
- Find all *.rs files and count the lines with "wc -l ...":
fd -e rs -X wc -l
.RE
.TP
.BI "\-\-batch-size " size
Maximum number of arguments to pass to the command given with -X. If the number of results is
greater than the given size, the command given with -X is run again with remaining arguments. A
batch size of zero means there is no limit (default), but note that batching might still happen
due to OS restrictions on the maximum length of command lines.
.SH PATTERN SYNTAX
The regular expression syntax used by fd is documented here:
https://docs.rs/regex/1.0.0/regex/#syntax
The glob syntax is documented here:
https://docs.rs/globset/#syntax
.SH ENVIRONMENT .SH ENVIRONMENT
.TP .TP
.B LS_COLORS .B LS_COLORS
Determines how to colorize search results, see Determines how to colorize search results, see
.BR dircolors (1) . .BR dircolors (1) .
.TP
.B NO_COLOR
Disables colorized output.
.TP
.B XDG_CONFIG_HOME, HOME
Used to locate the global ignore file. If
.B XDG_CONFIG_HOME
is set, use
.IR $XDG_CONFIG_HOME/fd/ignore .
Otherwise, use
.IR $HOME/.config/fd/ignore .
.SH FILES
.TP
.B .fdignore
This file works similarly to a .gitignore file anywhere in the searched tree and specifies patterns
that should be excluded from the search. However, this file is specific to fd, and will be used even
if the --no-ignore-vcs option is used.
.TP
.B $XDG_CONFIG_HOME/fd/ignore
Global ignore file. Unless ignore mode is turned off (such as with --no-ignore)
ignore entries in this file will be ignored, as if it was an .fdignore file in the
current directory.
.SH EXAMPLES .SH EXAMPLES
.TP .TP
.RI "Find files and directories that match the pattern '" needle "':" .RI "Find files and directories that match the pattern '" needle "':"
@ -536,22 +188,7 @@ $ fd needle
.RI "Start a search in a given directory (" /var/log "):" .RI "Start a search in a given directory (" /var/log "):"
$ fd nginx /var/log $ fd nginx /var/log
.TP .TP
.RI "Find all Python files (all files with the extension " .py ") in the current directory:" .RI "Find all Python files (all files with the extention " .py ") in the current directory:"
$ fd -e py $ fd -e py
.TP
.RI "Open all search results with vim:"
$ fd pattern -X vim
.SH Tips and Tricks
.IP \[bu]
If you add ".git/" to your global ignore file ($XDG_CONFIG_HOME/fd/ignore), then
".git" folders will be ignored by default, even when the --hidden option is used.
.IP \[bu]
You can use a shell alias or a wrapper script in order to pass desired flags to fd
by default. For example if you do not like the default behavior of respecting gitignore,
you can use
`alias fd="/usr/bin/fd --no-ignore-vcs"`
in your .bashrc to create an alias for fd that doesn't ignore git files by default.
.SH BUGS
Bugs can be reported on GitHub: https://github.com/sharkdp/fd/issues
.SH SEE ALSO .SH SEE ALSO
.BR find (1) .BR find (1)

Binary file not shown.

Before

Width:  |  Height:  |  Size: 9.9 KiB

View File

@ -1,161 +0,0 @@
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<!-- Created with Inkscape (http://www.inkscape.org/) -->
<svg
width="66mm"
height="66mm"
viewBox="0 0 66 66.000001"
version="1.1"
id="svg5"
inkscape:version="1.1 (c4e8f9ed74, 2021-05-24)"
sodipodi:docname="logo.svg"
inkscape:export-filename="/home/shark/Informatik/rust/fd/doc/logo.png"
inkscape:export-xdpi="192.42"
inkscape:export-ydpi="192.42"
xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
xmlns="http://www.w3.org/2000/svg"
xmlns:svg="http://www.w3.org/2000/svg">
<sodipodi:namedview
id="namedview7"
pagecolor="#ffffff"
bordercolor="#999999"
borderopacity="1"
inkscape:pageshadow="0"
inkscape:pageopacity="1"
inkscape:pagecheckerboard="0"
inkscape:document-units="mm"
showgrid="false"
inkscape:zoom="2.1795515"
inkscape:cx="114.47309"
inkscape:cy="176.18304"
inkscape:window-width="1920"
inkscape:window-height="1175"
inkscape:window-x="1920"
inkscape:window-y="0"
inkscape:window-maximized="1"
inkscape:current-layer="layer1"
showguides="false"
inkscape:guide-bbox="true"
inkscape:snap-global="false"
fit-margin-top="2"
fit-margin-left="2"
fit-margin-right="2"
fit-margin-bottom="2"
lock-margins="true">
<sodipodi:guide
position="26.228232,26.126763"
orientation="0,-1"
id="guide47826" />
<sodipodi:guide
position="25.799494,2.3628924"
orientation="0,-1"
id="guide47828" />
</sodipodi:namedview>
<defs
id="defs2" />
<g
inkscape:label="Ebene 1"
inkscape:groupmode="layer"
id="layer1"
transform="translate(-21.358009,-148.28012)">
<g
id="g66267"
transform="matrix(0.84959471,0,0,0.84959471,7.9920783,43.351816)">
<text
xml:space="preserve"
style="font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;font-size:29.7126px;line-height:0;font-family:'Fira Sans Condensed';-inkscape-font-specification:'Fira Sans Condensed, ';white-space:pre;inline-size:37.3715;fill:#e5e5e5;fill-opacity:1;stroke-width:0.742816"
x="50.647034"
y="173.19841"
id="text50653"
transform="matrix(1.0604862,0,0,1.0604862,-3.3101428,-10.150043)"><tspan
x="50.647034"
y="173.19841"
id="tspan66635"><tspan
style="font-family:'Source Code Pro';-inkscape-font-specification:'Source Code Pro'"
id="tspan66633">fd</tspan></tspan></text>
<text
xml:space="preserve"
style="font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;font-size:29.7126px;line-height:0;font-family:'Fira Sans Condensed';-inkscape-font-specification:'Fira Sans Condensed, ';white-space:pre;inline-size:37.3715;fill:#00ccff;fill-opacity:0.996078;stroke-width:0.742816"
x="50.647034"
y="173.19841"
id="text1244"
transform="matrix(1.0604862,0,0,1.0604862,-2.8008599,-9.6407599)"><tspan
x="50.647034"
y="173.19841"
id="tspan66639"><tspan
style="font-family:'Source Code Pro';-inkscape-font-specification:'Source Code Pro'"
id="tspan66637">fd</tspan></tspan></text>
<g
id="g47824"
transform="translate(0.1724878,-0.35338542)">
<g
id="g42041">
<path
style="fill:none;stroke:#939dac;stroke-width:1.065;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
d="m 28.209616,155.51329 v 15.68758 H 40.83001"
id="path39763"
sodipodi:nodetypes="ccc" />
<path
style="fill:#b7bec8;stroke:#939dac;stroke-width:1.065;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
d="M 27.869464,161.83767 H 40.261291"
id="path39765" />
</g>
<g
id="g41945"
transform="translate(-1.0583333)">
<path
style="fill:#0088aa;fill-opacity:0.993797;stroke:none;stroke-width:0.265;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
d="m 42.164961,159.42939 -1.554274,-1.89462 -1.975227,-0.005 c -0.941841,0.014 -1.165466,0.27232 -1.14085,2.88812 z"
id="path40006"
sodipodi:nodetypes="ccccc" />
<rect
style="fill:#01ccff;fill-opacity:1;stroke:none;stroke-width:2.3;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0.80126;paint-order:stroke fill markers"
id="rect39949"
width="9.4925022"
height="6.2080379"
x="37.492516"
y="158.82776"
ry="0.90871465" />
</g>
<g
id="g41951"
transform="translate(-1.0583334,9.3665773)">
<path
style="fill:#373e48;fill-opacity:0.993797;stroke:none;stroke-width:0.265;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
d="m 42.164961,159.42939 -1.554274,-1.89462 -1.975227,-0.005 c -0.941841,0.014 -1.165466,0.27232 -1.14085,2.88812 z"
id="path41947"
sodipodi:nodetypes="ccccc" />
<rect
style="fill:#535d6c;fill-opacity:0.993797;stroke:none;stroke-width:2.3;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0.80126;paint-order:stroke fill markers"
id="rect41949"
width="9.4925022"
height="6.2080379"
x="37.492516"
y="158.82776"
ry="0.90871465" />
</g>
<g
id="g41957"
transform="translate(-14.306994,-6.8962642)">
<path
style="fill:#373e48;fill-opacity:0.993797;stroke:none;stroke-width:0.265;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
d="m 42.164961,159.42939 -1.554274,-1.89462 -1.975227,-0.005 c -0.941841,0.014 -1.165466,0.27232 -1.14085,2.88812 z"
id="path41953"
sodipodi:nodetypes="ccccc" />
<rect
style="fill:#535d6c;fill-opacity:0.993797;stroke:none;stroke-width:2.3;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0.80126;paint-order:stroke fill markers"
id="rect41955"
width="9.4925022"
height="6.2080379"
x="37.492516"
y="158.82776"
ry="0.90871465" />
</g>
</g>
<g
id="g65006"
transform="matrix(0.55302761,0,0,0.55302761,66.463548,117.45819)" />
</g>
</g>
</svg>

Before

Width:  |  Height:  |  Size: 6.8 KiB

View File

@ -1,66 +0,0 @@
# Release checklist
This file can be used as-is, or copied into the GitHub PR description which includes
necessary changes for the upcoming release.
## Version bump
- [ ] Create a new branch for the required changes for this release.
- [ ] Update version in `Cargo.toml`. Run `cargo build` to update `Cargo.lock`.
Make sure to `git add` the `Cargo.lock` changes as well.
- [ ] Find the current min. supported Rust version by running
`grep rust-version Cargo.toml`.
- [ ] Update the `fd` version and the min. supported Rust version in `README.md`.
- [ ] Update `CHANGELOG.md`. Change the heading of the *"Upcoming release"* section
to the version of this release.
## Pre-release checks and updates
- [ ] Install the latest version (`cargo install --locked -f --path .`) and make
sure that it is available on the `PATH` (`fd --version` should show the
new version).
- [ ] Review `-h`, `--help`, and the `man` page.
- [ ] Run `fd -h` and copy the output to the *"Command-line options"* section in
the README
- [ ] Push all changes and wait for CI to succeed (before continuing with the
next section).
- [ ] Optional: manually test the new features and command-line options described
in the `CHANGELOG.md`.
- [ ] Run `cargo publish --dry-run` to make sure that it will succeed later
(after creating the GitHub release).
## Release
- [ ] Merge your release branch (should be a fast-forward merge).
- [ ] Create a tag and push it: `git tag vX.Y.Z; git push origin tag vX.Y.Z`.
This will trigger the deployment via GitHub Actions.
REMINDER: If your `origin` is a fork, don't forget to push to e.g. `upstream`
instead.
- [ ] Go to https://github.com/sharkdp/fd/releases/new to create the new
release. Select the new tag and also use it as the release title. For the
release notes, copy the corresponding section from `CHANGELOG.md` and
possibly add additional remarks for package maintainers.
Publish the release.
- [ ] Check if the binary deployment works (archives and Debian packages should
appear when the CI run *for the Git tag* has finished).
- [ ] Publish to crates.io by running `cargo publish` in a *clean* repository.
One way to do this is to clone a fresh copy.
## Post-release
- [ ] Prepare a new *"Upcoming release"* section at the top of `CHANGELOG.md`.
Put this at the top:
# Upcoming release
## Features
## Bugfixes
## Changes
## Other

View File

@ -1,8 +1,6 @@
#!/bin/bash #!/bin/bash
# Designed to be executed via svg-term from the fd root directory: # Designed to be executed via svg-term from the fd root directory:
# svg-term --command="bash doc/screencast.sh" --out doc/screencast.svg --padding=10 # svg-term --command="bash doc/screencast.sh" --out doc/screencast.svg --padding=10
# Then run this (workaround for #1003):
# sed -i '' 's/<text/<text font-size="1.67"/g' doc/screencast.svg
set -e set -e
set -u set -u
@ -22,7 +20,7 @@ enter() {
} }
prompt() { prompt() {
printf '%b ' "$PROMPT" | pv -q printf '%b ' $PROMPT | pv -q
} }
type() { type() {
@ -36,11 +34,9 @@ main() {
enter "fd app" enter "fd app"
enter "fd fi" enter "fd sh"
enter "fd fi --type f" enter "fd sh --type f"
enter "fd --type d"
enter "fd -e md" enter "fd -e md"

File diff suppressed because one or more lines are too long

Before

Width:  |  Height:  |  Size: 124 KiB

After

Width:  |  Height:  |  Size: 115 KiB

View File

@ -1,12 +0,0 @@
## Sponsors
`fd` development is sponsored by many individuals and companies. Thank you very much!
Please note, that being sponsored does not affect the individuality of the `fd`
project or affect the maintainers' actions in any way.
We remain impartial and continue to assess pull requests solely on merit - the
features added, bugs solved, and effect on the overall complexity of the code.
No issue will have a different priority based on sponsorship status of the
reporter.
Contributions from anybody are most welcomed, please see our [`CONTRIBUTING.md`](../CONTRIBUTING.md) guide.

File diff suppressed because one or more lines are too long

Before

Width:  |  Height:  |  Size: 7.2 KiB

View File

@ -1 +0,0 @@
# Defaults are used

View File

@ -1,134 +0,0 @@
#!/bin/bash
COPYRIGHT_YEARS="2018 - "$(date "+%Y")
MAINTAINER="David Peter <mail@david-peter.de>"
REPO="https://github.com/sharkdp/fd"
DPKG_STAGING="${CICD_INTERMEDIATES_DIR:-.}/debian-package"
DPKG_DIR="${DPKG_STAGING}/dpkg"
mkdir -p "${DPKG_DIR}"
if [[ -z "$TARGET" ]]; then
TARGET="$(rustc -vV | sed -n 's|host: \(.*\)|\1|p')"
fi
case "$TARGET" in
*-musl*)
DPKG_BASENAME=fd-musl
DPKG_CONFLICTS="fd, fd-find"
;;
*)
DPKG_BASENAME=fd
DPKG_CONFLICTS="fd-musl, fd-find"
;;
esac
if [[ -z "$DPKG_VERSION" ]]; then
DPKG_VERSION=$(cargo metadata --no-deps --format-version 1 | jq -r .packages[0].version)
fi
unset DPKG_ARCH
case "${TARGET}" in
aarch64-*-linux-*) DPKG_ARCH=arm64 ;;
arm-*-linux-*hf) DPKG_ARCH=armhf ;;
i686-*-linux-*) DPKG_ARCH=i686 ;;
x86_64-*-linux-*) DPKG_ARCH=amd64 ;;
*) DPKG_ARCH=notset ;;
esac;
DPKG_NAME="${DPKG_BASENAME}_${DPKG_VERSION}_${DPKG_ARCH}.deb"
BIN_PATH=${BIN_PATH:-target/${TARGET}/release/fd}
# Binary
install -Dm755 "${BIN_PATH}" "${DPKG_DIR}/usr/bin/fd"
# Man page
install -Dm644 'doc/fd.1' "${DPKG_DIR}/usr/share/man/man1/fd.1"
gzip -n --best "${DPKG_DIR}/usr/share/man/man1/fd.1"
# Autocompletion files
install -Dm644 'autocomplete/fd.bash' "${DPKG_DIR}/usr/share/bash-completion/completions/fd"
install -Dm644 'autocomplete/fd.fish' "${DPKG_DIR}/usr/share/fish/vendor_completions.d/fd.fish"
install -Dm644 'autocomplete/_fd' "${DPKG_DIR}/usr/share/zsh/vendor-completions/_fd"
# README and LICENSE
install -Dm644 "README.md" "${DPKG_DIR}/usr/share/doc/${DPKG_BASENAME}/README.md"
install -Dm644 "LICENSE-MIT" "${DPKG_DIR}/usr/share/doc/${DPKG_BASENAME}/LICENSE-MIT"
install -Dm644 "LICENSE-APACHE" "${DPKG_DIR}/usr/share/doc/${DPKG_BASENAME}/LICENSE-APACHE"
install -Dm644 "CHANGELOG.md" "${DPKG_DIR}/usr/share/doc/${DPKG_BASENAME}/changelog"
gzip -n --best "${DPKG_DIR}/usr/share/doc/${DPKG_BASENAME}/changelog"
# Create symlinks so fdfind can be used as well:
ln -s "/usr/bin/fd" "${DPKG_DIR}/usr/bin/fdfind"
ln -s './fd.bash' "${DPKG_DIR}/usr/share/bash-completion/completions/fdfind"
ln -s './fd.fish' "${DPKG_DIR}/usr/share/fish/vendor_completions.d/fdfind.fish"
ln -s './_fd' "${DPKG_DIR}/usr/share/zsh/vendor-completions/_fdfind"
cat > "${DPKG_DIR}/usr/share/doc/${DPKG_BASENAME}/copyright" <<EOF
Format: http://www.debian.org/doc/packaging-manuals/copyright-format/1.0/
Upstream-Name: fd
Source: ${REPO}
Files: *
Copyright: ${MAINTAINER}
Copyright: $COPYRIGHT_YEARS ${MAINTAINER}
License: Apache-2.0 or MIT
License: Apache-2.0
On Debian systems, the complete text of the Apache-2.0 can be found in the
file /usr/share/common-licenses/Apache-2.0.
License: MIT
Permission is hereby granted, free of charge, to any
person obtaining a copy of this software and associated
documentation files (the "Software"), to deal in the
Software without restriction, including without
limitation the rights to use, copy, modify, merge,
publish, distribute, sublicense, and/or sell copies of
the Software, and to permit persons to whom the Software
is furnished to do so, subject to the following
conditions:
.
The above copyright notice and this permission notice
shall be included in all copies or substantial portions
of the Software.
.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
EOF
chmod 644 "${DPKG_DIR}/usr/share/doc/${DPKG_BASENAME}/copyright"
# control file
mkdir -p "${DPKG_DIR}/DEBIAN"
cat > "${DPKG_DIR}/DEBIAN/control" <<EOF
Package: ${DPKG_BASENAME}
Version: ${DPKG_VERSION}
Section: utils
Priority: optional
Maintainer: ${MAINTAINER}
Homepage: ${REPO}
Architecture: ${DPKG_ARCH}
Provides: fd
Conflicts: ${DPKG_CONFLICTS}
Description: simple, fast and user-friendly alternative to find
fd is a program to find entries in your filesystem.
It is a simple, fast and user-friendly alternative to find.
While it does not aim to support all of finds powerful functionality, it provides
sensible (opinionated) defaults for a majority of use cases.
EOF
DPKG_PATH="${DPKG_STAGING}/${DPKG_NAME}"
if [[ -n $GITHUB_OUTPUT ]]; then
echo "DPKG_NAME=${DPKG_NAME}" >> "$GITHUB_OUTPUT"
echo "DPKG_PATH=${DPKG_PATH}" >> "$GITHUB_OUTPUT"
fi
# build dpkg
fakeroot dpkg-deb --build "${DPKG_DIR}" "${DPKG_PATH}"

View File

@ -1,22 +0,0 @@
#!/usr/bin/bash
set -eu
# This script automates the "Version bump" section
version="$1"
if [[ -z $version ]]; then
echo "Usage: must supply version as first argument" >&2
exit 1
fi
git switch -C "release-$version"
sed -i -e "0,/^\[badges/{s/^version =.*/version = \"$version\"/}" Cargo.toml
msrv="$(grep -F rust-version Cargo.toml | sed -e 's/^rust-version= "\(.*\)"/\1/')"
sed -i -e "s/Note that rust version \*[0-9.]+\* or later/Note that rust version *$msrv* or later/" README.md
sed -i -e "s/^# Upcoming release/# $version/" CHANGELOG.md

272
src/app.rs Normal file
View File

@ -0,0 +1,272 @@
// Copyright (c) 2017 fd developers
// Licensed under the Apache License, Version 2.0
// <LICENSE-APACHE or http://www.apache.org/licenses/LICENSE-2.0>
// or the MIT license <LICENSE-MIT or http://opensource.org/licenses/MIT>,
// at your option. All files in the project carrying such
// notice may not be copied, modified, or distributed except
// according to those terms.
use std::collections::HashMap;
use clap::{App, AppSettings, Arg};
struct Help {
short: &'static str,
long: &'static str,
}
macro_rules! doc {
($map:expr, $name:expr, $short:expr) => {
doc!($map, $name, $short, $short)
};
($map:expr, $name:expr, $short:expr, $long:expr) => {
$map.insert(
$name,
Help {
short: $short,
long: concat!($long, "\n "),
},
);
};
}
pub fn build_app() -> App<'static, 'static> {
let helps = usage();
let arg = |name| {
Arg::with_name(name)
.help(helps[name].short)
.long_help(helps[name].long)
};
App::new("fd")
.version(crate_version!())
.usage("fd [FLAGS/OPTIONS] [<pattern>] [<path>...]")
.setting(AppSettings::ColoredHelp)
.setting(AppSettings::DeriveDisplayOrder)
.arg(arg("hidden").long("hidden").short("H"))
.arg(arg("no-ignore").long("no-ignore").short("I"))
.arg(arg("no-ignore-vcs").long("no-ignore-vcs"))
.arg(
arg("rg-alias-hidden-ignore")
.short("u")
.multiple(true)
.hidden(true),
).arg(
arg("case-sensitive")
.long("case-sensitive")
.short("s")
.overrides_with("ignore-case"),
).arg(
arg("ignore-case")
.long("ignore-case")
.short("i")
.overrides_with("case-sensitive"),
).arg(
arg("fixed-strings")
.long("fixed-strings")
.short("F")
.alias("literal"),
).arg(arg("absolute-path").long("absolute-path").short("a"))
.arg(arg("follow").long("follow").short("L").alias("dereference"))
.arg(arg("full-path").long("full-path").short("p"))
.arg(arg("null_separator").long("print0").short("0"))
.arg(arg("depth").long("max-depth").short("d").takes_value(true))
.arg(
arg("file-type")
.long("type")
.short("t")
.multiple(true)
.number_of_values(1)
.takes_value(true)
.value_name("filetype")
.possible_values(&[
"f",
"file",
"d",
"directory",
"l",
"symlink",
"x",
"executable",
"e",
"empty",
]).hide_possible_values(true),
).arg(
arg("extension")
.long("extension")
.short("e")
.multiple(true)
.number_of_values(1)
.takes_value(true)
.value_name("ext"),
).arg(
arg("exec")
.long("exec")
.short("x")
.min_values(1)
.allow_hyphen_values(true)
.value_terminator(";")
.value_name("cmd"),
).arg(
arg("exclude")
.long("exclude")
.short("E")
.takes_value(true)
.value_name("pattern")
.number_of_values(1)
.multiple(true),
).arg(
arg("ignore-file")
.long("ignore-file")
.takes_value(true)
.value_name("path")
.number_of_values(1)
.multiple(true),
).arg(
arg("color")
.long("color")
.short("c")
.takes_value(true)
.value_name("when")
.possible_values(&["never", "auto", "always"])
.hide_possible_values(true),
).arg(
arg("threads")
.long("threads")
.short("j")
.takes_value(true)
.value_name("num"),
).arg(
arg("size")
.long("size")
.short("S")
.takes_value(true)
.number_of_values(1)
.allow_hyphen_values(true)
.multiple(true),
).arg(
arg("max-buffer-time")
.long("max-buffer-time")
.takes_value(true)
.hidden(true),
).arg(arg("pattern"))
.arg(arg("path").multiple(true))
}
#[cfg_attr(rustfmt, rustfmt_skip)]
fn usage() -> HashMap<&'static str, Help> {
let mut h = HashMap::new();
doc!(h, "hidden"
, "Search hidden files and directories"
, "Include hidden directories and files in the search results (default: hidden files \
and directories are skipped).");
doc!(h, "no-ignore"
, "Do not respect .(git|fd)ignore files"
, "Show search results from files and directories that would otherwise be ignored by \
'.gitignore' or '.fdignore' files.");
doc!(h, "no-ignore-vcs"
, "Do not respect .gitignore files"
, "Show search results from files and directories that would otherwise be ignored by \
'.gitignore' files.");
doc!(h, "case-sensitive"
, "Case-sensitive search (default: smart case)"
, "Perform a case-sensitive search. By default, fd uses case-insensitive searches, \
unless the pattern contains an uppercase character (smart case).");
doc!(h, "ignore-case"
, "Case-insensitive search (default: smart case)"
, "Perform a case-insensitive search. By default, fd uses case-insensitive searches, \
unless the pattern contains an uppercase character (smart case).");
doc!(h, "fixed-strings"
, "Treat the pattern as a literal string"
, "Treat the pattern as a literal string instead of a regular expression.");
doc!(h, "absolute-path"
, "Show absolute instead of relative paths"
, "Shows the full path starting from the root as opposed to relative paths.");
doc!(h, "follow"
, "Follow symbolic links"
, "By default, fd does not descend into symlinked directories. Using this flag, symbolic \
links are also traversed.");
doc!(h, "full-path"
, "Search full path (default: file-/dirname only)"
, "By default, the search pattern is only matched against the filename (or directory \
name). Using this flag, the pattern is matched against the full path.");
doc!(h, "null_separator"
, "Separate results by the null character"
, "Separate search results by the null character (instead of newlines). Useful for \
piping results to 'xargs'.");
doc!(h, "depth"
, "Set maximum search depth (default: none)"
, "Limit the directory traversal to a given depth. By default, there is no limit \
on the search depth.");
doc!(h, "file-type"
, "Filter by type: file (f), directory (d), symlink (l),\nexecutable (x), empty (e)"
, "Filter the search by type (multiple allowable filetypes can be specified):\n \
'f' or 'file': regular files\n \
'd' or 'directory': directories\n \
'l' or 'symlink': symbolic links\n \
'x' or 'executable': executables\n \
'e' or 'empty': empty files or directories");
doc!(h, "extension"
, "Filter by file extension"
, "(Additionally) filter search results by their file extension. Multiple allowable file \
extensions can be specified.");
doc!(h, "exec"
, "Execute a command for each search result"
, "Execute a command for each search result.\n\
All arguments following --exec are taken to be arguments to the command until the \
argument ';' is encountered.\n\
Each occurrence of the following placeholders is substituted by a path derived from the \
current search result before the command is executed:\n \
'{}': path\n \
'{/}': basename\n \
'{//}': parent directory\n \
'{.}': path without file extension\n \
'{/.}': basename without file extension");
doc!(h, "exclude"
, "Exclude entries that match the given glob pattern"
, "Exclude files/directories that match the given glob pattern. This overrides any \
other ignore logic. Multiple exclude patterns can be specified.");
doc!(h, "ignore-file"
, "Add a custom ignore-file in .gitignore format"
, "Add a custom ignore-file in '.gitignore' format. These files have a low precedence.");
doc!(h, "color"
, "When to use colors: never, *auto*, always"
, "Declare when to use color for the pattern match output:\n \
'auto': show colors if the output goes to an interactive console (default)\n \
'never': do not use colorized output\n \
'always': always use colorized output");
doc!(h, "threads"
, "Set number of threads to use for searching & executing"
, "Set number of threads to use for searching & executing (default: number of available \
CPU cores)");
doc!(h, "max-buffer-time"
, "the time (in ms) to buffer, before streaming to the console"
, "Amount of time in milliseconds to buffer, before streaming the search results to\
the console.");
doc!(h, "pattern"
, "the search pattern, a regular expression (optional)");
doc!(h, "path"
, "the root directory for the filesystem search (optional)"
, "The directory where the filesystem search is rooted (optional). \
If omitted, search the current working directory.");
doc!(h, "rg-alias-hidden-ignore"
, "Alias for no-ignore and/or hidden"
, "Alias for no-ignore ('u') and no-ignore and hidden ('uu')");
doc!(h, "size"
, "Limit results based on the size of files."
, "Limit results based on the size of files using the format <+-><NUM><UNIT>.\n \
'+': file size must be greater than or equal to this\n \
'-': file size must be less than or equal to this\n \
'NUM': The numeric size (e.g. 500)\n \
'UNIT': The units for NUM. They are not case-sensitive.\n\
Allowed unit values:\n \
'b': bytes\n \
'k': kilobytes\n \
'm': megabytes\n \
'g': gigabytes\n \
't': terabytes\n \
'ki': kibibytes\n \
'mi': mebibytes\n \
'gi': gibibytes\n \
'ti': tebibytes");
h
}

View File

@ -1,939 +0,0 @@
use std::num::NonZeroUsize;
use std::path::{Path, PathBuf};
use std::time::Duration;
use anyhow::anyhow;
use clap::{
error::ErrorKind, value_parser, Arg, ArgAction, ArgGroup, ArgMatches, Command, Parser,
ValueEnum,
};
#[cfg(feature = "completions")]
use clap_complete::Shell;
use normpath::PathExt;
use crate::error::print_error;
use crate::exec::CommandSet;
use crate::filesystem;
#[cfg(unix)]
use crate::filter::OwnerFilter;
use crate::filter::SizeFilter;
#[derive(Parser)]
#[command(
name = "fd",
version,
about = "A program to find entries in your filesystem",
after_long_help = "Bugs can be reported on GitHub: https://github.com/sharkdp/fd/issues",
max_term_width = 98,
args_override_self = true,
group(ArgGroup::new("execs").args(&["exec", "exec_batch", "list_details"]).conflicts_with_all(&[
"max_results", "quiet", "max_one_result"])),
)]
pub struct Opts {
/// Include hidden directories and files in the search results (default:
/// hidden files and directories are skipped). Files and directories are
/// considered to be hidden if their name starts with a `.` sign (dot).
/// Any files or directories that are ignored due to the rules described by
/// --no-ignore are still ignored unless otherwise specified.
/// The flag can be overridden with --no-hidden.
#[arg(
long,
short = 'H',
help = "Search hidden files and directories",
long_help
)]
pub hidden: bool,
/// Overrides --hidden
#[arg(long, overrides_with = "hidden", hide = true, action = ArgAction::SetTrue)]
no_hidden: (),
/// Show search results from files and directories that would otherwise be
/// ignored by '.gitignore', '.ignore', '.fdignore', or the global ignore file,
/// The flag can be overridden with --ignore.
#[arg(
long,
short = 'I',
help = "Do not respect .(git|fd)ignore files",
long_help
)]
pub no_ignore: bool,
/// Overrides --no-ignore
#[arg(long, overrides_with = "no_ignore", hide = true, action = ArgAction::SetTrue)]
ignore: (),
///Show search results from files and directories that
///would otherwise be ignored by '.gitignore' files.
///The flag can be overridden with --ignore-vcs.
#[arg(
long,
hide_short_help = true,
help = "Do not respect .gitignore files",
long_help
)]
pub no_ignore_vcs: bool,
/// Overrides --no-ignore-vcs
#[arg(long, overrides_with = "no_ignore_vcs", hide = true, action = ArgAction::SetTrue)]
ignore_vcs: (),
/// Do not require a git repository to respect gitignores.
/// By default, fd will only respect global gitignore rules, .gitignore rules,
/// and local exclude rules if fd detects that you are searching inside a
/// git repository. This flag allows you to relax this restriction such that
/// fd will respect all git related ignore rules regardless of whether you're
/// searching in a git repository or not.
///
///
/// This flag can be disabled with --require-git.
#[arg(
long,
overrides_with = "require_git",
hide_short_help = true,
// same description as ripgrep's flag: ripgrep/crates/core/app.rs
long_help
)]
pub no_require_git: bool,
/// Overrides --no-require-git
#[arg(long, overrides_with = "no_require_git", hide = true, action = ArgAction::SetTrue)]
require_git: (),
/// Show search results from files and directories that would otherwise be
/// ignored by '.gitignore', '.ignore', or '.fdignore' files in parent directories.
#[arg(
long,
hide_short_help = true,
help = "Do not respect .(git|fd)ignore files in parent directories",
long_help
)]
pub no_ignore_parent: bool,
/// Do not respect the global ignore file
#[arg(long, hide = true)]
pub no_global_ignore_file: bool,
/// Perform an unrestricted search, including ignored and hidden files. This is
/// an alias for '--no-ignore --hidden'.
#[arg(long = "unrestricted", short = 'u', overrides_with_all(&["ignore", "no_hidden"]), action(ArgAction::Count), hide_short_help = true,
help = "Unrestricted search, alias for '--no-ignore --hidden'",
long_help,
)]
rg_alias_hidden_ignore: u8,
/// Case-sensitive search (default: smart case)
#[arg(
long,
short = 's',
overrides_with("ignore_case"),
long_help = "Perform a case-sensitive search. By default, fd uses case-insensitive \
searches, unless the pattern contains an uppercase character (smart \
case)."
)]
pub case_sensitive: bool,
/// Perform a case-insensitive search. By default, fd uses case-insensitive
/// searches, unless the pattern contains an uppercase character (smart
/// case).
#[arg(
long,
short = 'i',
overrides_with("case_sensitive"),
help = "Case-insensitive search (default: smart case)",
long_help
)]
pub ignore_case: bool,
/// Perform a glob-based search instead of a regular expression search.
#[arg(
long,
short = 'g',
conflicts_with("fixed_strings"),
help = "Glob-based search (default: regular expression)",
long_help
)]
pub glob: bool,
/// Perform a regular-expression based search (default). This can be used to
/// override --glob.
#[arg(
long,
overrides_with("glob"),
hide_short_help = true,
help = "Regular-expression based search (default)",
long_help
)]
pub regex: bool,
/// Treat the pattern as a literal string instead of a regular expression. Note
/// that this also performs substring comparison. If you want to match on an
/// exact filename, consider using '--glob'.
#[arg(
long,
short = 'F',
alias = "literal",
hide_short_help = true,
help = "Treat pattern as literal string stead of regex",
long_help
)]
pub fixed_strings: bool,
/// Add additional required search patterns, all of which must be matched. Multiple
/// additional patterns can be specified. The patterns are regular
/// expressions, unless '--glob' or '--fixed-strings' is used.
#[arg(
long = "and",
value_name = "pattern",
help = "Additional search patterns that need to be matched",
long_help,
hide_short_help = true,
allow_hyphen_values = true
)]
pub exprs: Option<Vec<String>>,
/// Shows the full path starting from the root as opposed to relative paths.
/// The flag can be overridden with --relative-path.
#[arg(
long,
short = 'a',
help = "Show absolute instead of relative paths",
long_help
)]
pub absolute_path: bool,
/// Overrides --absolute-path
#[arg(long, overrides_with = "absolute_path", hide = true, action = ArgAction::SetTrue)]
relative_path: (),
/// Use a detailed listing format like 'ls -l'. This is basically an alias
/// for '--exec-batch ls -l' with some additional 'ls' options. This can be
/// used to see more metadata, to show symlink targets and to achieve a
/// deterministic sort order.
#[arg(
long,
short = 'l',
conflicts_with("absolute_path"),
help = "Use a long listing format with file metadata",
long_help
)]
pub list_details: bool,
/// Follow symbolic links
#[arg(
long,
short = 'L',
alias = "dereference",
long_help = "By default, fd does not descend into symlinked directories. Using this \
flag, symbolic links are also traversed. \
Flag can be overridden with --no-follow."
)]
pub follow: bool,
/// Overrides --follow
#[arg(long, overrides_with = "follow", hide = true, action = ArgAction::SetTrue)]
no_follow: (),
/// By default, the search pattern is only matched against the filename (or directory name). Using this flag, the pattern is matched against the full (absolute) path. Example:
/// fd --glob -p '**/.git/config'
#[arg(
long,
short = 'p',
help = "Search full abs. path (default: filename only)",
long_help,
verbatim_doc_comment
)]
pub full_path: bool,
/// Separate search results by the null character (instead of newlines).
/// Useful for piping results to 'xargs'.
#[arg(
long = "print0",
short = '0',
conflicts_with("list_details"),
hide_short_help = true,
help = "Separate search results by the null character",
long_help
)]
pub null_separator: bool,
/// Limit the directory traversal to a given depth. By default, there is no
/// limit on the search depth.
#[arg(
long,
short = 'd',
value_name = "depth",
alias("maxdepth"),
help = "Set maximum search depth (default: none)",
long_help
)]
max_depth: Option<usize>,
/// Only show search results starting at the given depth.
/// See also: '--max-depth' and '--exact-depth'
#[arg(
long,
value_name = "depth",
hide_short_help = true,
help = "Only show search results starting at the given depth.",
long_help
)]
min_depth: Option<usize>,
/// Only show search results at the exact given depth. This is an alias for
/// '--min-depth <depth> --max-depth <depth>'.
#[arg(long, value_name = "depth", hide_short_help = true, conflicts_with_all(&["max_depth", "min_depth"]),
help = "Only show search results at the exact given depth",
long_help,
)]
exact_depth: Option<usize>,
/// Exclude files/directories that match the given glob pattern. This
/// overrides any other ignore logic. Multiple exclude patterns can be
/// specified.
///
/// Examples:
/// {n} --exclude '*.pyc'
/// {n} --exclude node_modules
#[arg(
long,
short = 'E',
value_name = "pattern",
help = "Exclude entries that match the given glob pattern",
long_help
)]
pub exclude: Vec<String>,
/// Do not traverse into directories that match the search criteria. If
/// you want to exclude specific directories, use the '--exclude=…' option.
#[arg(long, hide_short_help = true, conflicts_with_all(&["size", "exact_depth"]),
long_help,
)]
pub prune: bool,
/// Filter the search by type:
/// {n} 'f' or 'file': regular files
/// {n} 'd' or 'dir' or 'directory': directories
/// {n} 'l' or 'symlink': symbolic links
/// {n} 's' or 'socket': socket
/// {n} 'p' or 'pipe': named pipe (FIFO)
/// {n} 'b' or 'block-device': block device
/// {n} 'c' or 'char-device': character device
/// {n}{n} 'x' or 'executable': executables
/// {n} 'e' or 'empty': empty files or directories
///
/// This option can be specified more than once to include multiple file types.
/// Searching for '--type file --type symlink' will show both regular files as
/// well as symlinks. Note that the 'executable' and 'empty' filters work differently:
/// '--type executable' implies '--type file' by default. And '--type empty' searches
/// for empty files and directories, unless either '--type file' or '--type directory'
/// is specified in addition.
///
/// Examples:
/// {n} - Only search for files:
/// {n} fd --type file …
/// {n} fd -tf …
/// {n} - Find both files and symlinks
/// {n} fd --type file --type symlink …
/// {n} fd -tf -tl …
/// {n} - Find executable files:
/// {n} fd --type executable
/// {n} fd -tx
/// {n} - Find empty files:
/// {n} fd --type empty --type file
/// {n} fd -te -tf
/// {n} - Find empty directories:
/// {n} fd --type empty --type directory
/// {n} fd -te -td
#[arg(
long = "type",
short = 't',
value_name = "filetype",
hide_possible_values = true,
value_enum,
help = "Filter by type: file (f), directory (d/dir), symlink (l), \
executable (x), empty (e), socket (s), pipe (p), \
char-device (c), block-device (b)",
long_help
)]
pub filetype: Option<Vec<FileType>>,
/// (Additionally) filter search results by their file extension. Multiple
/// allowable file extensions can be specified.
///
/// If you want to search for files without extension,
/// you can use the regex '^[^.]+$' as a normal search pattern.
#[arg(
long = "extension",
short = 'e',
value_name = "ext",
help = "Filter by file extension",
long_help
)]
pub extensions: Option<Vec<String>>,
/// Limit results based on the size of files using the format <+-><NUM><UNIT>.
/// '+': file size must be greater than or equal to this
/// '-': file size must be less than or equal to this
///
/// If neither '+' nor '-' is specified, file size must be exactly equal to this.
/// 'NUM': The numeric size (e.g. 500)
/// 'UNIT': The units for NUM. They are not case-sensitive.
/// Allowed unit values:
/// 'b': bytes
/// 'k': kilobytes (base ten, 10^3 = 1000 bytes)
/// 'm': megabytes
/// 'g': gigabytes
/// 't': terabytes
/// 'ki': kibibytes (base two, 2^10 = 1024 bytes)
/// 'mi': mebibytes
/// 'gi': gibibytes
/// 'ti': tebibytes
#[arg(long, short = 'S', value_parser = SizeFilter::from_string, allow_hyphen_values = true, verbatim_doc_comment, value_name = "size",
help = "Limit results based on the size of files",
long_help,
verbatim_doc_comment,
)]
pub size: Vec<SizeFilter>,
/// Filter results based on the file modification time. Files with modification times
/// greater than the argument are returned. The argument can be provided
/// as a specific point in time (YYYY-MM-DD HH:MM:SS or @timestamp) or as a duration (10h, 1d, 35min).
/// If the time is not specified, it defaults to 00:00:00.
/// '--change-newer-than', '--newer', or '--changed-after' can be used as aliases.
///
/// Examples:
/// {n} --changed-within 2weeks
/// {n} --change-newer-than '2018-10-27 10:00:00'
/// {n} --newer 2018-10-27
/// {n} --changed-after 1day
#[arg(
long,
alias("change-newer-than"),
alias("newer"),
alias("changed-after"),
value_name = "date|dur",
help = "Filter by file modification time (newer than)",
long_help
)]
pub changed_within: Option<String>,
/// Filter results based on the file modification time. Files with modification times
/// less than the argument are returned. The argument can be provided
/// as a specific point in time (YYYY-MM-DD HH:MM:SS or @timestamp) or as a duration (10h, 1d, 35min).
/// '--change-older-than' or '--older' can be used as aliases.
///
/// Examples:
/// {n} --changed-before '2018-10-27 10:00:00'
/// {n} --change-older-than 2weeks
/// {n} --older 2018-10-27
#[arg(
long,
alias("change-older-than"),
alias("older"),
value_name = "date|dur",
help = "Filter by file modification time (older than)",
long_help
)]
pub changed_before: Option<String>,
/// Filter files by their user and/or group.
/// Format: [(user|uid)][:(group|gid)]. Either side is optional.
/// Precede either side with a '!' to exclude files instead.
///
/// Examples:
/// {n} --owner john
/// {n} --owner :students
/// {n} --owner '!john:students'
#[cfg(unix)]
#[arg(long, short = 'o', value_parser = OwnerFilter::from_string, value_name = "user:group",
help = "Filter by owning user and/or group",
long_help,
)]
pub owner: Option<OwnerFilter>,
/// Instead of printing the file normally, print the format string with the following placeholders replaced:
/// '{}': path (of the current search result)
/// '{/}': basename
/// '{//}': parent directory
/// '{.}': path without file extension
/// '{/.}': basename without file extension
#[arg(
long,
value_name = "fmt",
help = "Print results according to template",
conflicts_with = "list_details"
)]
pub format: Option<String>,
#[command(flatten)]
pub exec: Exec,
/// Maximum number of arguments to pass to the command given with -X.
/// If the number of results is greater than the given size,
/// the command given with -X is run again with remaining arguments.
/// A batch size of zero means there is no limit (default), but note
/// that batching might still happen due to OS restrictions on the
/// maximum length of command lines.
#[arg(
long,
value_name = "size",
hide_short_help = true,
requires("exec_batch"),
value_parser = value_parser!(usize),
default_value_t,
help = "Max number of arguments to run as a batch size with -X",
long_help,
)]
pub batch_size: usize,
/// Add a custom ignore-file in '.gitignore' format. These files have a low precedence.
#[arg(
long,
value_name = "path",
hide_short_help = true,
help = "Add a custom ignore-file in '.gitignore' format",
long_help
)]
pub ignore_file: Vec<PathBuf>,
/// Declare when to use color for the pattern match output
#[arg(
long,
short = 'c',
value_enum,
default_value_t = ColorWhen::Auto,
value_name = "when",
help = "When to use colors",
long_help,
)]
pub color: ColorWhen,
/// Add a terminal hyperlink to a file:// url for each path in the output.
///
/// Auto mode is used if no argument is given to this option.
///
/// This doesn't do anything for --exec and --exec-batch.
#[arg(
long,
alias = "hyper",
value_name = "when",
require_equals = true,
value_enum,
default_value_t = HyperlinkWhen::Never,
default_missing_value = "auto",
num_args = 0..=1,
help = "Add hyperlinks to output paths"
)]
pub hyperlink: HyperlinkWhen,
/// Set number of threads to use for searching & executing (default: number
/// of available CPU cores)
#[arg(long, short = 'j', value_name = "num", hide_short_help = true, value_parser = str::parse::<NonZeroUsize>)]
pub threads: Option<NonZeroUsize>,
/// Milliseconds to buffer before streaming search results to console
///
/// Amount of time in milliseconds to buffer, before streaming the search
/// results to the console.
#[arg(long, hide = true, value_parser = parse_millis)]
pub max_buffer_time: Option<Duration>,
///Limit the number of search results to 'count' and quit immediately.
#[arg(
long,
value_name = "count",
hide_short_help = true,
overrides_with("max_one_result"),
help = "Limit the number of search results",
long_help
)]
max_results: Option<usize>,
/// Limit the search to a single result and quit immediately.
/// This is an alias for '--max-results=1'.
#[arg(
short = '1',
hide_short_help = true,
overrides_with("max_results"),
help = "Limit search to a single result",
long_help
)]
max_one_result: bool,
/// When the flag is present, the program does not print anything and will
/// return with an exit code of 0 if there is at least one match. Otherwise, the
/// exit code will be 1.
/// '--has-results' can be used as an alias.
#[arg(
long,
short = 'q',
alias = "has-results",
hide_short_help = true,
conflicts_with("max_results"),
help = "Print nothing, exit code 0 if match found, 1 otherwise",
long_help
)]
pub quiet: bool,
/// Enable the display of filesystem errors for situations such as
/// insufficient permissions or dead symlinks.
#[arg(
long,
hide_short_help = true,
help = "Show filesystem errors",
long_help
)]
pub show_errors: bool,
/// Change the current working directory of fd to the provided path. This
/// means that search results will be shown with respect to the given base
/// path. Note that relative paths which are passed to fd via the positional
/// <path> argument or the '--search-path' option will also be resolved
/// relative to this directory.
#[arg(
long,
value_name = "path",
hide_short_help = true,
help = "Change current working directory",
long_help
)]
pub base_directory: Option<PathBuf>,
/// the search pattern which is either a regular expression (default) or a glob
/// pattern (if --glob is used). If no pattern has been specified, every entry
/// is considered a match. If your pattern starts with a dash (-), make sure to
/// pass '--' first, or it will be considered as a flag (fd -- '-foo').
#[arg(
default_value = "",
hide_default_value = true,
value_name = "pattern",
help = "the search pattern (a regular expression, unless '--glob' is used; optional)",
long_help
)]
pub pattern: String,
/// Set the path separator to use when printing file paths. The default is
/// the OS-specific separator ('/' on Unix, '\' on Windows).
#[arg(
long,
value_name = "separator",
hide_short_help = true,
help = "Set path separator when printing file paths",
long_help
)]
pub path_separator: Option<String>,
/// The directory where the filesystem search is rooted (optional). If
/// omitted, search the current working directory.
#[arg(action = ArgAction::Append,
value_name = "path",
help = "the root directories for the filesystem search (optional)",
long_help,
)]
path: Vec<PathBuf>,
/// Provide paths to search as an alternative to the positional <path>
/// argument. Changes the usage to `fd [OPTIONS] --search-path <path>
/// --search-path <path2> [<pattern>]`
#[arg(
long,
conflicts_with("path"),
value_name = "search-path",
hide_short_help = true,
help = "Provides paths to search as an alternative to the positional <path> argument",
long_help
)]
search_path: Vec<PathBuf>,
/// By default, relative paths are prefixed with './' when -x/--exec,
/// -X/--exec-batch, or -0/--print0 are given, to reduce the risk of a
/// path starting with '-' being treated as a command line option. Use
/// this flag to change this behavior. If this flag is used without a value,
/// it is equivalent to passing "always".
#[arg(long, conflicts_with_all(&["path", "search_path"]), value_name = "when", hide_short_help = true, require_equals = true, long_help)]
strip_cwd_prefix: Option<Option<StripCwdWhen>>,
/// By default, fd will traverse the file system tree as far as other options
/// dictate. With this flag, fd ensures that it does not descend into a
/// different file system than the one it started in. Comparable to the -mount
/// or -xdev filters of find(1).
#[cfg(any(unix, windows))]
#[arg(long, aliases(&["mount", "xdev"]), hide_short_help = true, long_help)]
pub one_file_system: bool,
#[cfg(feature = "completions")]
#[arg(long, hide = true, exclusive = true)]
gen_completions: Option<Option<Shell>>,
}
impl Opts {
pub fn search_paths(&self) -> anyhow::Result<Vec<PathBuf>> {
// would it make sense to concatenate these?
let paths = if !self.path.is_empty() {
&self.path
} else if !self.search_path.is_empty() {
&self.search_path
} else {
let current_directory = Path::new("./");
ensure_current_directory_exists(current_directory)?;
return Ok(vec![self.normalize_path(current_directory)]);
};
Ok(paths
.iter()
.filter_map(|path| {
if filesystem::is_existing_directory(path) {
Some(self.normalize_path(path))
} else {
print_error(format!(
"Search path '{}' is not a directory.",
path.to_string_lossy()
));
None
}
})
.collect())
}
fn normalize_path(&self, path: &Path) -> PathBuf {
if self.absolute_path {
filesystem::absolute_path(path.normalize().unwrap().as_path()).unwrap()
} else if path == Path::new(".") {
// Change "." to "./" as a workaround for https://github.com/BurntSushi/ripgrep/pull/2711
PathBuf::from("./")
} else {
path.to_path_buf()
}
}
pub fn no_search_paths(&self) -> bool {
self.path.is_empty() && self.search_path.is_empty()
}
#[inline]
pub fn rg_alias_ignore(&self) -> bool {
self.rg_alias_hidden_ignore > 0
}
pub fn max_depth(&self) -> Option<usize> {
self.max_depth.or(self.exact_depth)
}
pub fn min_depth(&self) -> Option<usize> {
self.min_depth.or(self.exact_depth)
}
pub fn threads(&self) -> NonZeroUsize {
self.threads.unwrap_or_else(default_num_threads)
}
pub fn max_results(&self) -> Option<usize> {
self.max_results
.filter(|&m| m > 0)
.or_else(|| self.max_one_result.then_some(1))
}
pub fn strip_cwd_prefix<P: FnOnce() -> bool>(&self, auto_pred: P) -> bool {
use self::StripCwdWhen::*;
self.no_search_paths()
&& match self.strip_cwd_prefix.map_or(Auto, |o| o.unwrap_or(Always)) {
Auto => auto_pred(),
Always => true,
Never => false,
}
}
#[cfg(feature = "completions")]
pub fn gen_completions(&self) -> anyhow::Result<Option<Shell>> {
self.gen_completions
.map(|maybe_shell| match maybe_shell {
Some(sh) => Ok(sh),
None => {
Shell::from_env().ok_or_else(|| anyhow!("Unable to get shell from environment"))
}
})
.transpose()
}
}
/// Get the default number of threads to use, if not explicitly specified.
fn default_num_threads() -> NonZeroUsize {
// If we can't get the amount of parallelism for some reason, then
// default to a single thread, because that is safe.
let fallback = NonZeroUsize::MIN;
// To limit startup overhead on massively parallel machines, don't use more
// than 64 threads.
let limit = NonZeroUsize::new(64).unwrap();
std::thread::available_parallelism()
.unwrap_or(fallback)
.min(limit)
}
#[derive(Copy, Clone, PartialEq, Eq, ValueEnum)]
pub enum FileType {
#[value(alias = "f")]
File,
#[value(alias = "d", alias = "dir")]
Directory,
#[value(alias = "l")]
Symlink,
#[value(alias = "b")]
BlockDevice,
#[value(alias = "c")]
CharDevice,
/// A file which is executable by the current effective user
#[value(alias = "x")]
Executable,
#[value(alias = "e")]
Empty,
#[value(alias = "s")]
Socket,
#[value(alias = "p")]
Pipe,
}
#[derive(Copy, Clone, PartialEq, Eq, Debug, ValueEnum)]
pub enum ColorWhen {
/// show colors if the output goes to an interactive console (default)
Auto,
/// always use colorized output
Always,
/// do not use colorized output
Never,
}
#[derive(Copy, Clone, PartialEq, Eq, Debug, ValueEnum)]
pub enum StripCwdWhen {
/// Use the default behavior
Auto,
/// Always strip the ./ at the beginning of paths
Always,
/// Never strip the ./
Never,
}
#[derive(Copy, Clone, PartialEq, Eq, Debug, ValueEnum)]
pub enum HyperlinkWhen {
/// Use hyperlinks only if color is enabled
Auto,
/// Always use hyperlinks when printing file paths
Always,
/// Never use hyperlinks
Never,
}
// there isn't a derive api for getting grouped values yet,
// so we have to use hand-rolled parsing for exec and exec-batch
pub struct Exec {
pub command: Option<CommandSet>,
}
impl clap::FromArgMatches for Exec {
fn from_arg_matches(matches: &ArgMatches) -> clap::error::Result<Self> {
let command = matches
.get_occurrences::<String>("exec")
.map(CommandSet::new)
.or_else(|| {
matches
.get_occurrences::<String>("exec_batch")
.map(CommandSet::new_batch)
})
.transpose()
.map_err(|e| clap::Error::raw(ErrorKind::InvalidValue, e))?;
Ok(Exec { command })
}
fn update_from_arg_matches(&mut self, matches: &ArgMatches) -> clap::error::Result<()> {
*self = Self::from_arg_matches(matches)?;
Ok(())
}
}
impl clap::Args for Exec {
fn augment_args(cmd: Command) -> Command {
cmd.arg(Arg::new("exec")
.action(ArgAction::Append)
.long("exec")
.short('x')
.num_args(1..)
.allow_hyphen_values(true)
.value_terminator(";")
.value_name("cmd")
.conflicts_with("list_details")
.help("Execute a command for each search result")
.long_help(
"Execute a command for each search result in parallel (use --threads=1 for sequential command execution). \
There is no guarantee of the order commands are executed in, and the order should not be depended upon. \
All positional arguments following --exec are considered to be arguments to the command - not to fd. \
It is therefore recommended to place the '-x'/'--exec' option last.\n\
The following placeholders are substituted before the command is executed:\n \
'{}': path (of the current search result)\n \
'{/}': basename\n \
'{//}': parent directory\n \
'{.}': path without file extension\n \
'{/.}': basename without file extension\n \
'{{': literal '{' (for escaping)\n \
'}}': literal '}' (for escaping)\n\n\
If no placeholder is present, an implicit \"{}\" at the end is assumed.\n\n\
Examples:\n\n \
- find all *.zip files and unzip them:\n\n \
fd -e zip -x unzip\n\n \
- find *.h and *.cpp files and run \"clang-format -i ..\" for each of them:\n\n \
fd -e h -e cpp -x clang-format -i\n\n \
- Convert all *.jpg files to *.png files:\n\n \
fd -e jpg -x convert {} {.}.png\
",
),
)
.arg(
Arg::new("exec_batch")
.action(ArgAction::Append)
.long("exec-batch")
.short('X')
.num_args(1..)
.allow_hyphen_values(true)
.value_terminator(";")
.value_name("cmd")
.conflicts_with_all(["exec", "list_details"])
.help("Execute a command with all search results at once")
.long_help(
"Execute the given command once, with all search results as arguments.\n\
The order of the arguments is non-deterministic, and should not be relied upon.\n\
One of the following placeholders is substituted before the command is executed:\n \
'{}': path (of all search results)\n \
'{/}': basename\n \
'{//}': parent directory\n \
'{.}': path without file extension\n \
'{/.}': basename without file extension\n \
'{{': literal '{' (for escaping)\n \
'}}': literal '}' (for escaping)\n\n\
If no placeholder is present, an implicit \"{}\" at the end is assumed.\n\n\
Examples:\n\n \
- Find all test_*.py files and open them in your favorite editor:\n\n \
fd -g 'test_*.py' -X vim\n\n \
- Find all *.rs files and count the lines with \"wc -l ...\":\n\n \
fd -e rs -X wc -l\
"
),
)
}
fn augment_args_for_update(cmd: Command) -> Command {
Self::augment_args(cmd)
}
}
fn parse_millis(arg: &str) -> Result<Duration, std::num::ParseIntError> {
Ok(Duration::from_millis(arg.parse()?))
}
fn ensure_current_directory_exists(current_directory: &Path) -> anyhow::Result<()> {
if filesystem::is_existing_directory(current_directory) {
Ok(())
} else {
Err(anyhow!(
"Could not retrieve current directory (has it been deleted?)."
))
}
}

View File

@ -1,140 +0,0 @@
use std::{path::PathBuf, sync::Arc, time::Duration};
use lscolors::LsColors;
use regex::bytes::RegexSet;
use crate::exec::CommandSet;
use crate::filetypes::FileTypes;
#[cfg(unix)]
use crate::filter::OwnerFilter;
use crate::filter::{SizeFilter, TimeFilter};
use crate::fmt::FormatTemplate;
/// Configuration options for *fd*.
pub struct Config {
/// Whether the search is case-sensitive or case-insensitive.
pub case_sensitive: bool,
/// Whether to search within the full file path or just the base name (filename or directory
/// name).
pub search_full_path: bool,
/// Whether to ignore hidden files and directories (or not).
pub ignore_hidden: bool,
/// Whether to respect `.fdignore` files or not.
pub read_fdignore: bool,
/// Whether to respect ignore files in parent directories or not.
pub read_parent_ignore: bool,
/// Whether to respect VCS ignore files (`.gitignore`, ..) or not.
pub read_vcsignore: bool,
/// Whether to require a `.git` directory to respect gitignore files.
pub require_git_to_read_vcsignore: bool,
/// Whether to respect the global ignore file or not.
pub read_global_ignore: bool,
/// Whether to follow symlinks or not.
pub follow_links: bool,
/// Whether to limit the search to starting file system or not.
pub one_file_system: bool,
/// Whether elements of output should be separated by a null character
pub null_separator: bool,
/// The maximum search depth, or `None` if no maximum search depth should be set.
///
/// A depth of `1` includes all files under the current directory, a depth of `2` also includes
/// all files under subdirectories of the current directory, etc.
pub max_depth: Option<usize>,
/// The minimum depth for reported entries, or `None`.
pub min_depth: Option<usize>,
/// Whether to stop traversing into matching directories.
pub prune: bool,
/// The number of threads to use.
pub threads: usize,
/// If true, the program doesn't print anything and will instead return an exit code of 0
/// if there's at least one match. Otherwise, the exit code will be 1.
pub quiet: bool,
/// Time to buffer results internally before streaming to the console. This is useful to
/// provide a sorted output, in case the total execution time is shorter than
/// `max_buffer_time`.
pub max_buffer_time: Option<Duration>,
/// `None` if the output should not be colorized. Otherwise, a `LsColors` instance that defines
/// how to style different filetypes.
pub ls_colors: Option<LsColors>,
/// Whether or not we are writing to an interactive terminal
#[cfg_attr(not(unix), allow(unused))]
pub interactive_terminal: bool,
/// The type of file to search for. If set to `None`, all file types are displayed. If
/// set to `Some(..)`, only the types that are specified are shown.
pub file_types: Option<FileTypes>,
/// The extension to search for. Only entries matching the extension will be included.
///
/// The value (if present) will be a lowercase string without leading dots.
pub extensions: Option<RegexSet>,
/// A format string to use to format results, similarly to exec
pub format: Option<FormatTemplate>,
/// If a value is supplied, each item found will be used to generate and execute commands.
pub command: Option<Arc<CommandSet>>,
/// Maximum number of search results to pass to each `command`. If zero, the number is
/// unlimited.
pub batch_size: usize,
/// A list of glob patterns that should be excluded from the search.
pub exclude_patterns: Vec<String>,
/// A list of custom ignore files.
pub ignore_files: Vec<PathBuf>,
/// The given constraints on the size of returned files
pub size_constraints: Vec<SizeFilter>,
/// Constraints on last modification time of files
pub time_constraints: Vec<TimeFilter>,
#[cfg(unix)]
/// User/group ownership constraint
pub owner_constraint: Option<OwnerFilter>,
/// Whether or not to display filesystem errors
pub show_filesystem_errors: bool,
/// The separator used to print file paths.
pub path_separator: Option<String>,
/// The actual separator, either the system default separator or `path_separator`
pub actual_path_separator: String,
/// The maximum number of search results
pub max_results: Option<usize>,
/// Whether or not to strip the './' prefix for search results
pub strip_cwd_prefix: bool,
/// Whether or not to use hyperlinks on paths
pub hyperlink: bool,
}
impl Config {
/// Check whether results are being printed.
pub fn is_printing(&self) -> bool {
self.command.is_none()
}
}

View File

@ -1,155 +0,0 @@
use std::cell::OnceCell;
use std::ffi::OsString;
use std::fs::{FileType, Metadata};
use std::path::{Path, PathBuf};
use lscolors::{Colorable, LsColors, Style};
use crate::config::Config;
use crate::filesystem::strip_current_dir;
#[derive(Debug)]
enum DirEntryInner {
Normal(ignore::DirEntry),
BrokenSymlink(PathBuf),
}
#[derive(Debug)]
pub struct DirEntry {
inner: DirEntryInner,
metadata: OnceCell<Option<Metadata>>,
style: OnceCell<Option<Style>>,
}
impl DirEntry {
#[inline]
pub fn normal(e: ignore::DirEntry) -> Self {
Self {
inner: DirEntryInner::Normal(e),
metadata: OnceCell::new(),
style: OnceCell::new(),
}
}
pub fn broken_symlink(path: PathBuf) -> Self {
Self {
inner: DirEntryInner::BrokenSymlink(path),
metadata: OnceCell::new(),
style: OnceCell::new(),
}
}
pub fn path(&self) -> &Path {
match &self.inner {
DirEntryInner::Normal(e) => e.path(),
DirEntryInner::BrokenSymlink(pathbuf) => pathbuf.as_path(),
}
}
pub fn into_path(self) -> PathBuf {
match self.inner {
DirEntryInner::Normal(e) => e.into_path(),
DirEntryInner::BrokenSymlink(p) => p,
}
}
/// Returns the path as it should be presented to the user.
pub fn stripped_path(&self, config: &Config) -> &Path {
if config.strip_cwd_prefix {
strip_current_dir(self.path())
} else {
self.path()
}
}
/// Returns the path as it should be presented to the user.
pub fn into_stripped_path(self, config: &Config) -> PathBuf {
if config.strip_cwd_prefix {
self.stripped_path(config).to_path_buf()
} else {
self.into_path()
}
}
pub fn file_type(&self) -> Option<FileType> {
match &self.inner {
DirEntryInner::Normal(e) => e.file_type(),
DirEntryInner::BrokenSymlink(_) => self.metadata().map(|m| m.file_type()),
}
}
pub fn metadata(&self) -> Option<&Metadata> {
self.metadata
.get_or_init(|| match &self.inner {
DirEntryInner::Normal(e) => e.metadata().ok(),
DirEntryInner::BrokenSymlink(path) => path.symlink_metadata().ok(),
})
.as_ref()
}
pub fn depth(&self) -> Option<usize> {
match &self.inner {
DirEntryInner::Normal(e) => Some(e.depth()),
DirEntryInner::BrokenSymlink(_) => None,
}
}
pub fn style(&self, ls_colors: &LsColors) -> Option<&Style> {
self.style
.get_or_init(|| ls_colors.style_for(self).cloned())
.as_ref()
}
}
impl PartialEq for DirEntry {
#[inline]
fn eq(&self, other: &Self) -> bool {
self.path() == other.path()
}
}
impl Eq for DirEntry {}
impl PartialOrd for DirEntry {
#[inline]
fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {
Some(self.cmp(other))
}
}
impl Ord for DirEntry {
#[inline]
fn cmp(&self, other: &Self) -> std::cmp::Ordering {
self.path().cmp(other.path())
}
}
impl Colorable for DirEntry {
fn path(&self) -> PathBuf {
self.path().to_owned()
}
fn file_name(&self) -> OsString {
let name = match &self.inner {
DirEntryInner::Normal(e) => e.file_name(),
DirEntryInner::BrokenSymlink(path) => {
// Path::file_name() only works if the last component is Normal,
// but we want it for all component types, so we open code it.
// Copied from LsColors::style_for_path_with_metadata().
path.components()
.last()
.map(|c| c.as_os_str())
.unwrap_or_else(|| path.as_os_str())
}
};
name.to_owned()
}
fn file_type(&self) -> Option<FileType> {
self.file_type()
}
fn metadata(&self) -> Option<Metadata> {
self.metadata().cloned()
}
}

View File

@ -1,3 +0,0 @@
pub fn print_error(msg: impl Into<String>) {
eprintln!("[fd error]: {}", msg.into());
}

View File

@ -1,110 +1,40 @@
// Copyright (c) 2017 fd developers
// Licensed under the Apache License, Version 2.0
// <LICENSE-APACHE or http://www.apache.org/licenses/LICENSE-2.0>
// or the MIT license <LICENSE-MIT or http://opensource.org/licenses/MIT>,
// at your option. All files in the project carrying such
// notice may not be copied, modified, or distributed except
// according to those terms.
use std::io; use std::io;
use std::io::Write; use std::io::Write;
use std::sync::Mutex; use std::process::Command;
use std::sync::{Arc, Mutex};
use argmax::Command;
use crate::error::print_error;
use crate::exit_codes::ExitCode;
struct Outputs {
stdout: Vec<u8>,
stderr: Vec<u8>,
}
struct OutputBuffer<'a> {
output_permission: &'a Mutex<()>,
outputs: Vec<Outputs>,
}
impl<'a> OutputBuffer<'a> {
fn new(output_permission: &'a Mutex<()>) -> Self {
Self {
output_permission,
outputs: Vec::new(),
}
}
fn push(&mut self, stdout: Vec<u8>, stderr: Vec<u8>) {
self.outputs.push(Outputs { stdout, stderr });
}
fn write(self) {
// avoid taking the lock if there is nothing to do
if self.outputs.is_empty() {
return;
}
// While this lock is active, this thread will be the only thread allowed
// to write its outputs.
let _lock = self.output_permission.lock().unwrap();
let stdout = io::stdout();
let stderr = io::stderr();
let mut stdout = stdout.lock();
let mut stderr = stderr.lock();
for output in self.outputs.iter() {
let _ = stdout.write_all(&output.stdout);
let _ = stderr.write_all(&output.stderr);
}
}
}
/// Executes a command. /// Executes a command.
pub fn execute_commands<I: Iterator<Item = io::Result<Command>>>( pub fn execute_command(mut cmd: Command, out_perm: Arc<Mutex<()>>) {
cmds: I, // Spawn the supplied command.
out_perm: &Mutex<()>, let output = cmd.output();
enable_output_buffering: bool,
) -> ExitCode {
let mut output_buffer = OutputBuffer::new(out_perm);
for result in cmds {
let mut cmd = match result {
Ok(cmd) => cmd,
Err(e) => return handle_cmd_error(None, e),
};
// Spawn the supplied command. // Then wait for the command to exit, if it was spawned.
let output = if enable_output_buffering { match output {
cmd.output() Ok(output) => {
} else { // While this lock is active, this thread will be the only thread allowed
// If running on only one thread, don't buffer output // to write its outputs.
// Allows for viewing and interacting with intermediate command output let _lock = out_perm.lock().unwrap();
cmd.spawn().and_then(|c| c.wait_with_output())
};
// Then wait for the command to exit, if it was spawned. let stdout = io::stdout();
match output { let stderr = io::stderr();
Ok(output) => {
if enable_output_buffering { let _ = stdout.lock().write_all(&output.stdout);
output_buffer.push(output.stdout, output.stderr); let _ = stderr.lock().write_all(&output.stderr);
} }
if output.status.code() != Some(0) { Err(why) => {
output_buffer.write(); if why.kind() == io::ErrorKind::NotFound {
return ExitCode::GeneralError; eprintln!("fd: execution error: command not found");
} } else {
} eprintln!("fd: execution error: {}", why);
Err(why) => {
output_buffer.write();
return handle_cmd_error(Some(&cmd), why);
} }
} }
} }
output_buffer.write();
ExitCode::Success
}
pub fn handle_cmd_error(cmd: Option<&Command>, err: io::Error) -> ExitCode {
match (cmd, err) {
(Some(cmd), err) if err.kind() == io::ErrorKind::NotFound => {
print_error(format!(
"Command not found: {}",
cmd.get_program().to_string_lossy()
));
ExitCode::GeneralError
}
(_, err) => {
print_error(format!("Problem while executing command: {}", err));
ExitCode::GeneralError
}
}
} }

165
src/exec/input.rs Normal file
View File

@ -0,0 +1,165 @@
// Copyright (c) 2017 fd developers
// Licensed under the Apache License, Version 2.0
// <LICENSE-APACHE or http://www.apache.org/licenses/LICENSE-2.0>
// or the MIT license <LICENSE-MIT or http://opensource.org/licenses/MIT>,
// at your option. All files in the project carrying such
// notice may not be copied, modified, or distributed except
// according to those terms.
use std::path::MAIN_SEPARATOR;
/// Removes the parent component of the path
pub fn basename(path: &str) -> &str {
let mut index = 0;
for (id, character) in path.char_indices() {
if character == MAIN_SEPARATOR {
index = id;
}
}
// FIXME: On Windows, should return what for C:file.txt D:file.txt and \\server\share ?
if index != 0 {
return &path[index + 1..];
}
path
}
/// Removes the extension from the path
pub fn remove_extension(path: &str) -> &str {
let mut has_dir = false;
let mut dir_index = 0;
let mut ext_index = 0;
for (id, character) in path.char_indices() {
if character == MAIN_SEPARATOR {
has_dir = true;
dir_index = id;
}
if character == '.' {
ext_index = id;
}
}
// Account for hidden files and directories
if ext_index != 0 && (!has_dir || dir_index + 2 <= ext_index) {
return &path[0..ext_index];
}
path
}
/// Removes the basename from the path.
pub fn dirname(path: &str) -> &str {
let mut has_dir = false;
let mut index = 0;
for (id, character) in path.char_indices() {
if character == MAIN_SEPARATOR {
has_dir = true;
index = id;
}
}
// FIXME: On Windows, return what for C:file.txt D:file.txt and \\server\share ?
if !has_dir {
"."
} else if index == 0 {
&path[..1]
} else {
&path[0..index]
}
}
#[cfg(test)]
mod tests {
use super::{basename, dirname, remove_extension, MAIN_SEPARATOR};
fn correct(input: &str) -> String {
input.replace('/', &MAIN_SEPARATOR.to_string())
}
#[test]
fn path_remove_ext_simple() {
assert_eq!(remove_extension("foo.txt"), "foo");
}
#[test]
fn path_remove_ext_dir() {
assert_eq!(
remove_extension(&correct("dir/foo.txt")),
correct("dir/foo")
);
}
#[test]
fn path_hidden() {
assert_eq!(remove_extension(".foo"), ".foo")
}
#[test]
fn path_remove_ext_utf8() {
assert_eq!(remove_extension("💖.txt"), "💖");
}
#[test]
fn path_remove_ext_empty() {
assert_eq!(remove_extension(""), "");
}
#[test]
fn path_basename_simple() {
assert_eq!(basename("foo.txt"), "foo.txt");
}
#[test]
fn path_basename_no_ext() {
assert_eq!(remove_extension(basename("foo.txt")), "foo");
}
#[test]
fn path_basename_dir() {
assert_eq!(basename(&correct("dir/foo.txt")), "foo.txt");
}
#[test]
fn path_basename_empty() {
assert_eq!(basename(""), "");
}
#[test]
fn path_basename_utf8() {
assert_eq!(basename(&correct("💖/foo.txt")), "foo.txt");
assert_eq!(basename(&correct("dir/💖.txt")), "💖.txt");
}
#[test]
fn path_dirname_simple() {
assert_eq!(dirname("foo.txt"), ".");
}
#[test]
fn path_dirname_dir() {
assert_eq!(dirname(&correct("dir/foo.txt")), "dir");
}
#[test]
fn path_dirname_utf8() {
assert_eq!(dirname(&correct("💖/foo.txt")), "💖");
assert_eq!(dirname(&correct("dir/💖.txt")), "dir");
}
#[test]
fn path_dirname_empty() {
assert_eq!(dirname(""), ".");
}
#[test]
fn path_dirname_root() {
#[cfg(windows)]
assert_eq!(dirname("C:\\"), "C:");
#[cfg(windows)]
assert_eq!(dirname("\\"), "\\");
#[cfg(not(windows))]
assert_eq!(dirname("/"), "/");
}
}

View File

@ -1,67 +1,35 @@
use std::sync::Mutex; // Copyright (c) 2017 fd developers
// Licensed under the Apache License, Version 2.0
// <LICENSE-APACHE or http://www.apache.org/licenses/LICENSE-2.0>
// or the MIT license <LICENSE-MIT or http://opensource.org/licenses/MIT>,
// at your option. All files in the project carrying such
// notice may not be copied, modified, or distributed except
// according to those terms.
use crate::config::Config; use std::path::PathBuf;
use crate::error::print_error; use std::sync::mpsc::Receiver;
use crate::exit_codes::{merge_exitcodes, ExitCode}; use std::sync::{Arc, Mutex};
use crate::walk::WorkerResult;
use super::CommandSet; use super::CommandTemplate;
/// An event loop that listens for inputs from the `rx` receiver. Each received input will /// An event loop that listens for inputs from the `rx` receiver. Each received input will
/// generate a command with the supplied command template. The generated command will then /// generate a command with the supplied command template. The generated command will then
/// be executed, and this process will continue until the receiver's sender has closed. /// be executed, and this process will continue until the receiver's sender has closed.
pub fn job( pub fn job(rx: Arc<Mutex<Receiver<PathBuf>>>, cmd: Arc<CommandTemplate>, out_perm: Arc<Mutex<()>>) {
results: impl IntoIterator<Item = WorkerResult>, loop {
cmd: &CommandSet, // Create a lock on the shared receiver for this thread.
out_perm: &Mutex<()>, let lock = rx.lock().unwrap();
config: &Config,
) -> ExitCode {
// Output should be buffered when only running a single thread
let buffer_output: bool = config.threads > 1;
let mut ret = ExitCode::Success; // Obtain the next path from the receiver, else if the channel
for result in results {
// Obtain the next result from the receiver, else if the channel
// has closed, exit from the loop // has closed, exit from the loop
let dir_entry = match result { let value: PathBuf = match lock.recv() {
WorkerResult::Entry(dir_entry) => dir_entry, Ok(value) => value,
WorkerResult::Error(err) => { Err(_) => break,
if config.show_filesystem_errors {
print_error(err.to_string());
}
continue;
}
}; };
// Generate a command, execute it and store its exit code. // Drop the lock so that other threads can read from the the receiver.
let code = cmd.execute( drop(lock);
dir_entry.stripped_path(config), // Generate a command and execute it.
config.path_separator.as_deref(), cmd.generate_and_execute(&value, Arc::clone(&out_perm));
out_perm,
buffer_output,
);
ret = merge_exitcodes([ret, code]);
} }
// Returns error in case of any error.
ret
}
pub fn batch(
results: impl IntoIterator<Item = WorkerResult>,
cmd: &CommandSet,
config: &Config,
) -> ExitCode {
let paths = results
.into_iter()
.filter_map(|worker_result| match worker_result {
WorkerResult::Entry(dir_entry) => Some(dir_entry.into_stripped_path(config)),
WorkerResult::Error(err) => {
if config.show_filesystem_errors {
print_error(err.to_string());
}
None
}
});
cmd.execute_batch(paths, config.batch_size, config.path_separator.as_deref())
} }

View File

@ -1,473 +1,177 @@
// Copyright (c) 2017 fd developers
// Licensed under the Apache License, Version 2.0
// <LICENSE-APACHE or http://www.apache.org/licenses/LICENSE-2.0>
// or the MIT license <LICENSE-MIT or http://opensource.org/licenses/MIT>,
// at your option. All files in the project carrying such
// notice may not be copied, modified, or distributed except
// according to those terms.
// TODO: Possible optimization could avoid pushing characters on a buffer.
mod command; mod command;
mod input;
mod job; mod job;
mod token;
use std::ffi::OsString; use std::borrow::Cow;
use std::io; use std::path::Path;
use std::iter; use std::process::Command;
use std::path::{Path, PathBuf}; use std::sync::{Arc, Mutex};
use std::process::Stdio;
use std::sync::Mutex;
use anyhow::{bail, Result}; use regex::Regex;
use argmax::Command;
use crate::exit_codes::{merge_exitcodes, ExitCode}; use self::command::execute_command;
use crate::fmt::{FormatTemplate, Token}; use self::input::{basename, dirname, remove_extension};
pub use self::job::job;
use self::command::{execute_commands, handle_cmd_error}; use self::token::Token;
pub use self::job::{batch, job};
/// Execution mode of the command
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum ExecutionMode {
/// Command is executed for each search result
OneByOne,
/// Command is run for a batch of results at once
Batch,
}
#[derive(Debug, Clone, PartialEq)]
pub struct CommandSet {
mode: ExecutionMode,
commands: Vec<CommandTemplate>,
}
impl CommandSet {
pub fn new<I, T, S>(input: I) -> Result<CommandSet>
where
I: IntoIterator<Item = T>,
T: IntoIterator<Item = S>,
S: AsRef<str>,
{
Ok(CommandSet {
mode: ExecutionMode::OneByOne,
commands: input
.into_iter()
.map(CommandTemplate::new)
.collect::<Result<_>>()?,
})
}
pub fn new_batch<I, T, S>(input: I) -> Result<CommandSet>
where
I: IntoIterator<Item = T>,
T: IntoIterator<Item = S>,
S: AsRef<str>,
{
Ok(CommandSet {
mode: ExecutionMode::Batch,
commands: input
.into_iter()
.map(|args| {
let cmd = CommandTemplate::new(args)?;
if cmd.number_of_tokens() > 1 {
bail!("Only one placeholder allowed for batch commands");
}
if cmd.args[0].has_tokens() {
bail!("First argument of exec-batch is expected to be a fixed executable");
}
Ok(cmd)
})
.collect::<Result<Vec<_>>>()?,
})
}
pub fn in_batch_mode(&self) -> bool {
self.mode == ExecutionMode::Batch
}
pub fn execute(
&self,
input: &Path,
path_separator: Option<&str>,
out_perm: &Mutex<()>,
buffer_output: bool,
) -> ExitCode {
let commands = self
.commands
.iter()
.map(|c| c.generate(input, path_separator));
execute_commands(commands, out_perm, buffer_output)
}
pub fn execute_batch<I>(&self, paths: I, limit: usize, path_separator: Option<&str>) -> ExitCode
where
I: Iterator<Item = PathBuf>,
{
let builders: io::Result<Vec<_>> = self
.commands
.iter()
.map(|c| CommandBuilder::new(c, limit))
.collect();
match builders {
Ok(mut builders) => {
for path in paths {
for builder in &mut builders {
if let Err(e) = builder.push(&path, path_separator) {
return handle_cmd_error(Some(&builder.cmd), e);
}
}
}
for builder in &mut builders {
if let Err(e) = builder.finish() {
return handle_cmd_error(Some(&builder.cmd), e);
}
}
merge_exitcodes(builders.iter().map(|b| b.exit_code()))
}
Err(e) => handle_cmd_error(None, e),
}
}
}
/// Represents a multi-exec command as it is built.
#[derive(Debug)]
struct CommandBuilder {
pre_args: Vec<OsString>,
path_arg: FormatTemplate,
post_args: Vec<OsString>,
cmd: Command,
count: usize,
limit: usize,
exit_code: ExitCode,
}
impl CommandBuilder {
fn new(template: &CommandTemplate, limit: usize) -> io::Result<Self> {
let mut pre_args = vec![];
let mut path_arg = None;
let mut post_args = vec![];
for arg in &template.args {
if arg.has_tokens() {
path_arg = Some(arg.clone());
} else if path_arg.is_none() {
pre_args.push(arg.generate("", None));
} else {
post_args.push(arg.generate("", None));
}
}
let cmd = Self::new_command(&pre_args)?;
Ok(Self {
pre_args,
path_arg: path_arg.unwrap(),
post_args,
cmd,
count: 0,
limit,
exit_code: ExitCode::Success,
})
}
fn new_command(pre_args: &[OsString]) -> io::Result<Command> {
let mut cmd = Command::new(&pre_args[0]);
cmd.stdin(Stdio::inherit());
cmd.stdout(Stdio::inherit());
cmd.stderr(Stdio::inherit());
cmd.try_args(&pre_args[1..])?;
Ok(cmd)
}
fn push(&mut self, path: &Path, separator: Option<&str>) -> io::Result<()> {
if self.limit > 0 && self.count >= self.limit {
self.finish()?;
}
let arg = self.path_arg.generate(path, separator);
if !self
.cmd
.args_would_fit(iter::once(&arg).chain(&self.post_args))
{
self.finish()?;
}
self.cmd.try_arg(arg)?;
self.count += 1;
Ok(())
}
fn finish(&mut self) -> io::Result<()> {
if self.count > 0 {
self.cmd.try_args(&self.post_args)?;
if !self.cmd.status()?.success() {
self.exit_code = ExitCode::GeneralError;
}
self.cmd = Self::new_command(&self.pre_args)?;
self.count = 0;
}
Ok(())
}
fn exit_code(&self) -> ExitCode {
self.exit_code
}
}
/// Represents a template that is utilized to generate command strings. /// Represents a template that is utilized to generate command strings.
/// ///
/// The template is meant to be coupled with an input in order to generate a command. The /// The template is meant to be coupled with an input in order to generate a command. The
/// `generate_and_execute()` method will be used to generate a command and execute it. /// `generate_and_execute()` method will be used to generate a command and execute it.
#[derive(Debug, Clone, PartialEq)] #[derive(Debug, Clone, PartialEq)]
struct CommandTemplate { pub struct CommandTemplate {
args: Vec<FormatTemplate>, args: Vec<ArgumentTemplate>,
} }
impl CommandTemplate { impl CommandTemplate {
fn new<I, S>(input: I) -> Result<CommandTemplate> pub fn new<I, S>(input: I) -> CommandTemplate
where where
I: IntoIterator<Item = S>, I: IntoIterator<Item = S>,
S: AsRef<str>, S: AsRef<str>,
{ {
lazy_static! {
static ref PLACEHOLDER_PATTERN: Regex = Regex::new(r"\{(/?\.?|//)\}").unwrap();
}
let mut args = Vec::new(); let mut args = Vec::new();
let mut has_placeholder = false; let mut has_placeholder = false;
for arg in input { for arg in input {
let arg = arg.as_ref(); let arg = arg.as_ref();
let tmpl = FormatTemplate::parse(arg); let mut tokens = Vec::new();
has_placeholder |= tmpl.has_tokens(); let mut start = 0;
args.push(tmpl);
}
// We need to check that we have at least one argument, because if not for placeholder in PLACEHOLDER_PATTERN.find_iter(arg) {
// it will try to execute each file and directory it finds. // Leading text before the placeholder.
// if placeholder.start() > start {
// Sadly, clap can't currently handle this for us, see tokens.push(Token::Text(arg[start..placeholder.start()].to_owned()));
// https://github.com/clap-rs/clap/issues/3542 }
if args.is_empty() {
bail!("No executable provided for --exec or --exec-batch"); start = placeholder.end();
match placeholder.as_str() {
"{}" => tokens.push(Token::Placeholder),
"{.}" => tokens.push(Token::NoExt),
"{/}" => tokens.push(Token::Basename),
"{//}" => tokens.push(Token::Parent),
"{/.}" => tokens.push(Token::BasenameNoExt),
_ => panic!("Unhandled placeholder"),
}
has_placeholder = true;
}
// Without a placeholder, the argument is just fixed text.
if tokens.is_empty() {
args.push(ArgumentTemplate::Text(arg.to_owned()));
continue;
}
if start < arg.len() {
// Trailing text after last placeholder.
tokens.push(Token::Text(arg[start..].to_owned()));
}
args.push(ArgumentTemplate::Tokens(tokens));
} }
// If a placeholder token was not supplied, append one at the end of the command. // If a placeholder token was not supplied, append one at the end of the command.
if !has_placeholder { if !has_placeholder {
args.push(FormatTemplate::Tokens(vec![Token::Placeholder])); args.push(ArgumentTemplate::Tokens(vec![Token::Placeholder]));
} }
Ok(CommandTemplate { args }) CommandTemplate { args: args }
}
fn number_of_tokens(&self) -> usize {
self.args.iter().filter(|arg| arg.has_tokens()).count()
} }
/// Generates and executes a command. /// Generates and executes a command.
/// ///
/// Using the internal `args` field, and a supplied `input` variable, a `Command` will be /// Using the internal `args` field, and a supplied `input` variable, a `Command` will be
/// build. /// build. Once all arguments have been processed, the command is executed.
fn generate(&self, input: &Path, path_separator: Option<&str>) -> io::Result<Command> { pub fn generate_and_execute(&self, input: &Path, out_perm: Arc<Mutex<()>>) {
let mut cmd = Command::new(self.args[0].generate(input, path_separator)); let input = input
.strip_prefix(".")
.unwrap_or(input)
.to_string_lossy()
.into_owned();
let mut cmd = Command::new(self.args[0].generate(&input).as_ref());
for arg in &self.args[1..] { for arg in &self.args[1..] {
cmd.try_arg(arg.generate(input, path_separator))?; cmd.arg(arg.generate(&input).as_ref());
}
execute_command(cmd, out_perm)
}
}
/// Represents a template for a single command argument.
///
/// The argument is either a collection of `Token`s including at least one placeholder variant, or
/// a fixed text.
#[derive(Clone, Debug, PartialEq)]
enum ArgumentTemplate {
Tokens(Vec<Token>),
Text(String),
}
impl ArgumentTemplate {
pub fn generate<'a>(&'a self, path: &str) -> Cow<'a, str> {
use self::Token::*;
match *self {
ArgumentTemplate::Tokens(ref tokens) => {
let mut s = String::new();
for token in tokens {
match *token {
Basename => s += basename(path),
BasenameNoExt => s += remove_extension(basename(path)),
NoExt => s += remove_extension(path),
Parent => s += dirname(path),
Placeholder => s += path,
Text(ref string) => s += string,
}
}
Cow::Owned(s)
}
ArgumentTemplate::Text(ref text) => Cow::Borrowed(text),
} }
Ok(cmd)
} }
} }
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use super::*; use super::{ArgumentTemplate, CommandTemplate, Token};
fn generate_str(template: &CommandTemplate, input: &str) -> Vec<String> {
template
.args
.iter()
.map(|arg| arg.generate(input, None).into_string().unwrap())
.collect()
}
#[test] #[test]
fn tokens_with_placeholder() { fn tokens() {
let expected = CommandTemplate {
args: vec![
ArgumentTemplate::Text("echo".into()),
ArgumentTemplate::Text("${SHELL}:".into()),
ArgumentTemplate::Tokens(vec![Token::Placeholder]),
],
};
assert_eq!(CommandTemplate::new(&[&"echo", &"${SHELL}:"]), expected);
assert_eq!( assert_eq!(
CommandSet::new(vec![vec![&"echo", &"${SHELL}:"]]).unwrap(), CommandTemplate::new(&["echo", "{.}"]),
CommandSet { CommandTemplate {
commands: vec![CommandTemplate { args: vec![
args: vec![ ArgumentTemplate::Text("echo".into()),
FormatTemplate::Text("echo".into()), ArgumentTemplate::Tokens(vec![Token::NoExt]),
FormatTemplate::Text("${SHELL}:".into()), ],
FormatTemplate::Tokens(vec![Token::Placeholder]),
]
}],
mode: ExecutionMode::OneByOne,
} }
); );
} }
#[test]
fn tokens_with_no_extension() {
assert_eq!(
CommandSet::new(vec![vec!["echo", "{.}"]]).unwrap(),
CommandSet {
commands: vec![CommandTemplate {
args: vec![
FormatTemplate::Text("echo".into()),
FormatTemplate::Tokens(vec![Token::NoExt]),
],
}],
mode: ExecutionMode::OneByOne,
}
);
}
#[test]
fn tokens_with_basename() {
assert_eq!(
CommandSet::new(vec![vec!["echo", "{/}"]]).unwrap(),
CommandSet {
commands: vec![CommandTemplate {
args: vec![
FormatTemplate::Text("echo".into()),
FormatTemplate::Tokens(vec![Token::Basename]),
],
}],
mode: ExecutionMode::OneByOne,
}
);
}
#[test]
fn tokens_with_parent() {
assert_eq!(
CommandSet::new(vec![vec!["echo", "{//}"]]).unwrap(),
CommandSet {
commands: vec![CommandTemplate {
args: vec![
FormatTemplate::Text("echo".into()),
FormatTemplate::Tokens(vec![Token::Parent]),
],
}],
mode: ExecutionMode::OneByOne,
}
);
}
#[test]
fn tokens_with_basename_no_extension() {
assert_eq!(
CommandSet::new(vec![vec!["echo", "{/.}"]]).unwrap(),
CommandSet {
commands: vec![CommandTemplate {
args: vec![
FormatTemplate::Text("echo".into()),
FormatTemplate::Tokens(vec![Token::BasenameNoExt]),
],
}],
mode: ExecutionMode::OneByOne,
}
);
}
#[test]
fn tokens_with_literal_braces() {
let template = CommandTemplate::new(vec!["{{}}", "{{", "{.}}"]).unwrap();
assert_eq!(
generate_str(&template, "foo"),
vec!["{}", "{", "{.}", "foo"]
);
}
#[test]
fn tokens_with_literal_braces_and_placeholder() {
let template = CommandTemplate::new(vec!["{{{},end}"]).unwrap();
assert_eq!(generate_str(&template, "foo"), vec!["{foo,end}"]);
}
#[test]
fn tokens_multiple() {
assert_eq!(
CommandSet::new(vec![vec!["cp", "{}", "{/.}.ext"]]).unwrap(),
CommandSet {
commands: vec![CommandTemplate {
args: vec![
FormatTemplate::Text("cp".into()),
FormatTemplate::Tokens(vec![Token::Placeholder]),
FormatTemplate::Tokens(vec![
Token::BasenameNoExt,
Token::Text(".ext".into())
]),
],
}],
mode: ExecutionMode::OneByOne,
}
);
}
#[test]
fn tokens_single_batch() {
assert_eq!(
CommandSet::new_batch(vec![vec!["echo", "{.}"]]).unwrap(),
CommandSet {
commands: vec![CommandTemplate {
args: vec![
FormatTemplate::Text("echo".into()),
FormatTemplate::Tokens(vec![Token::NoExt]),
],
}],
mode: ExecutionMode::Batch,
}
);
}
#[test]
fn tokens_multiple_batch() {
assert!(CommandSet::new_batch(vec![vec!["echo", "{.}", "{}"]]).is_err());
}
#[test]
fn template_no_args() {
assert!(CommandTemplate::new::<Vec<_>, &'static str>(vec![]).is_err());
}
#[test]
fn command_set_no_args() {
assert!(CommandSet::new(vec![vec!["echo"], vec![]]).is_err());
}
#[test]
fn generate_custom_path_separator() {
let arg = FormatTemplate::Tokens(vec![Token::Placeholder]);
macro_rules! check {
($input:expr, $expected:expr) => {
assert_eq!(arg.generate($input, Some("#")), OsString::from($expected));
};
}
check!("foo", "foo");
check!("foo/bar", "foo#bar");
check!("/foo/bar/baz", "#foo#bar#baz");
}
#[cfg(windows)]
#[test]
fn generate_custom_path_separator_windows() {
let arg = FormatTemplate::Tokens(vec![Token::Placeholder]);
macro_rules! check {
($input:expr, $expected:expr) => {
assert_eq!(arg.generate($input, Some("#")), OsString::from($expected));
};
}
// path starting with a drive letter
check!(r"C:\foo\bar", "C:#foo#bar");
// UNC path
check!(r"\\server\share\path", "##server#share#path");
// Drive Relative path - no separator after the colon omits the RootDir path component.
// This is uncommon, but valid
check!(r"C:foo\bar", "C:foo#bar");
// forward slashes should get normalized and interpreted as separators
check!("C:/foo/bar", "C:#foo#bar");
check!("C:foo/bar", "C:foo#bar");
// Rust does not interpret "//server/share" as a UNC path, but rather as a normal
// absolute path that begins with RootDir, and the two slashes get combined together as
// a single path separator during normalization.
//check!("//server/share/path", "##server#share#path");
}
} }

37
src/exec/token.rs Normal file
View File

@ -0,0 +1,37 @@
// Copyright (c) 2017 fd developers
// Licensed under the Apache License, Version 2.0
// <LICENSE-APACHE or http://www.apache.org/licenses/LICENSE-2.0>
// or the MIT license <LICENSE-MIT or http://opensource.org/licenses/MIT>,
// at your option. All files in the project carrying such
// notice may not be copied, modified, or distributed except
// according to those terms.
use std::fmt::{self, Display, Formatter};
/// Designates what should be written to a buffer
///
/// Each `Token` contains either text, or a placeholder variant, which will be used to generate
/// commands after all tokens for a given command template have been collected.
#[derive(Clone, Debug, PartialEq)]
pub enum Token {
Placeholder,
Basename,
Parent,
NoExt,
BasenameNoExt,
Text(String),
}
impl Display for Token {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
match *self {
Token::Placeholder => f.write_str("{}")?,
Token::Basename => f.write_str("{/}")?,
Token::Parent => f.write_str("{//}")?,
Token::NoExt => f.write_str("{.}")?,
Token::BasenameNoExt => f.write_str("{/.}")?,
Token::Text(ref string) => f.write_str(string)?,
}
Ok(())
}
}

View File

@ -1,94 +1,4 @@
use std::process; /// exit code 1 represents a general error
pub const ERROR: i32 = 1;
#[cfg(unix)] /// exit code 130 represents a process killed by signal SIGINT
use nix::sys::signal::{raise, signal, SigHandler, Signal}; pub const SIGINT: i32 = 130;
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum ExitCode {
Success,
HasResults(bool),
GeneralError,
KilledBySigint,
}
impl From<ExitCode> for i32 {
fn from(code: ExitCode) -> Self {
match code {
ExitCode::Success => 0,
ExitCode::HasResults(has_results) => !has_results as i32,
ExitCode::GeneralError => 1,
ExitCode::KilledBySigint => 130,
}
}
}
impl ExitCode {
fn is_error(self) -> bool {
i32::from(self) != 0
}
/// Exit the process with the appropriate code.
pub fn exit(self) -> ! {
#[cfg(unix)]
if self == ExitCode::KilledBySigint {
// Get rid of the SIGINT handler, if present, and raise SIGINT
unsafe {
if signal(Signal::SIGINT, SigHandler::SigDfl).is_ok() {
let _ = raise(Signal::SIGINT);
}
}
}
process::exit(self.into())
}
}
pub fn merge_exitcodes(results: impl IntoIterator<Item = ExitCode>) -> ExitCode {
if results.into_iter().any(ExitCode::is_error) {
return ExitCode::GeneralError;
}
ExitCode::Success
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn success_when_no_results() {
assert_eq!(merge_exitcodes([]), ExitCode::Success);
}
#[test]
fn general_error_if_at_least_one_error() {
assert_eq!(
merge_exitcodes([ExitCode::GeneralError]),
ExitCode::GeneralError
);
assert_eq!(
merge_exitcodes([ExitCode::KilledBySigint]),
ExitCode::GeneralError
);
assert_eq!(
merge_exitcodes([ExitCode::KilledBySigint, ExitCode::Success]),
ExitCode::GeneralError
);
assert_eq!(
merge_exitcodes([ExitCode::Success, ExitCode::GeneralError]),
ExitCode::GeneralError
);
assert_eq!(
merge_exitcodes([ExitCode::GeneralError, ExitCode::KilledBySigint]),
ExitCode::GeneralError
);
}
#[test]
fn success_if_no_error() {
assert_eq!(merge_exitcodes([ExitCode::Success]), ExitCode::Success);
assert_eq!(
merge_exitcodes([ExitCode::Success, ExitCode::Success]),
ExitCode::Success
);
}
}

View File

@ -1,156 +0,0 @@
use std::borrow::Cow;
use std::env;
use std::ffi::OsStr;
use std::fs;
use std::io;
#[cfg(any(unix, target_os = "redox"))]
use std::os::unix::fs::FileTypeExt;
use std::path::{Path, PathBuf};
use normpath::PathExt;
use crate::dir_entry;
pub fn path_absolute_form(path: &Path) -> io::Result<PathBuf> {
if path.is_absolute() {
return Ok(path.to_path_buf());
}
let path = path.strip_prefix(".").unwrap_or(path);
env::current_dir().map(|path_buf| path_buf.join(path))
}
pub fn absolute_path(path: &Path) -> io::Result<PathBuf> {
let path_buf = path_absolute_form(path)?;
#[cfg(windows)]
let path_buf = Path::new(
path_buf
.as_path()
.to_string_lossy()
.trim_start_matches(r"\\?\"),
)
.to_path_buf();
Ok(path_buf)
}
pub fn is_existing_directory(path: &Path) -> bool {
// Note: we do not use `.exists()` here, as `.` always exists, even if
// the CWD has been deleted.
path.is_dir() && (path.file_name().is_some() || path.normalize().is_ok())
}
pub fn is_empty(entry: &dir_entry::DirEntry) -> bool {
if let Some(file_type) = entry.file_type() {
if file_type.is_dir() {
if let Ok(mut entries) = fs::read_dir(entry.path()) {
entries.next().is_none()
} else {
false
}
} else if file_type.is_file() {
entry.metadata().map(|m| m.len() == 0).unwrap_or(false)
} else {
false
}
} else {
false
}
}
#[cfg(any(unix, target_os = "redox"))]
pub fn is_block_device(ft: fs::FileType) -> bool {
ft.is_block_device()
}
#[cfg(windows)]
pub fn is_block_device(_: fs::FileType) -> bool {
false
}
#[cfg(any(unix, target_os = "redox"))]
pub fn is_char_device(ft: fs::FileType) -> bool {
ft.is_char_device()
}
#[cfg(windows)]
pub fn is_char_device(_: fs::FileType) -> bool {
false
}
#[cfg(any(unix, target_os = "redox"))]
pub fn is_socket(ft: fs::FileType) -> bool {
ft.is_socket()
}
#[cfg(windows)]
pub fn is_socket(_: fs::FileType) -> bool {
false
}
#[cfg(any(unix, target_os = "redox"))]
pub fn is_pipe(ft: fs::FileType) -> bool {
ft.is_fifo()
}
#[cfg(windows)]
pub fn is_pipe(_: fs::FileType) -> bool {
false
}
#[cfg(any(unix, target_os = "redox"))]
pub fn osstr_to_bytes(input: &OsStr) -> Cow<[u8]> {
use std::os::unix::ffi::OsStrExt;
Cow::Borrowed(input.as_bytes())
}
#[cfg(windows)]
pub fn osstr_to_bytes(input: &OsStr) -> Cow<[u8]> {
let string = input.to_string_lossy();
match string {
Cow::Owned(string) => Cow::Owned(string.into_bytes()),
Cow::Borrowed(string) => Cow::Borrowed(string.as_bytes()),
}
}
/// Remove the `./` prefix from a path.
pub fn strip_current_dir(path: &Path) -> &Path {
path.strip_prefix(".").unwrap_or(path)
}
/// Default value for the path_separator, mainly for MSYS/MSYS2, which set the MSYSTEM
/// environment variable, and we set fd's path separator to '/' rather than Rust's default of '\'.
///
/// Returns Some to use a nonstandard path separator, or None to use rust's default on the target
/// platform.
pub fn default_path_separator() -> Option<String> {
if cfg!(windows) {
let msystem = env::var("MSYSTEM").ok()?;
if !msystem.is_empty() {
return Some("/".to_owned());
}
}
None
}
#[cfg(test)]
mod tests {
use super::strip_current_dir;
use std::path::Path;
#[test]
fn strip_current_dir_basic() {
assert_eq!(strip_current_dir(Path::new("./foo")), Path::new("foo"));
assert_eq!(strip_current_dir(Path::new("foo")), Path::new("foo"));
assert_eq!(
strip_current_dir(Path::new("./foo/bar/baz")),
Path::new("foo/bar/baz")
);
assert_eq!(
strip_current_dir(Path::new("foo/bar/baz")),
Path::new("foo/bar/baz")
);
}
}

View File

@ -1,43 +0,0 @@
use crate::dir_entry;
use crate::filesystem;
use faccess::PathExt;
/// Whether or not to show
#[derive(Default)]
pub struct FileTypes {
pub files: bool,
pub directories: bool,
pub symlinks: bool,
pub block_devices: bool,
pub char_devices: bool,
pub sockets: bool,
pub pipes: bool,
pub executables_only: bool,
pub empty_only: bool,
}
impl FileTypes {
pub fn should_ignore(&self, entry: &dir_entry::DirEntry) -> bool {
if let Some(ref entry_type) = entry.file_type() {
(!self.files && entry_type.is_file())
|| (!self.directories && entry_type.is_dir())
|| (!self.symlinks && entry_type.is_symlink())
|| (!self.block_devices && filesystem::is_block_device(*entry_type))
|| (!self.char_devices && filesystem::is_char_device(*entry_type))
|| (!self.sockets && filesystem::is_socket(*entry_type))
|| (!self.pipes && filesystem::is_pipe(*entry_type))
|| (self.executables_only && !entry.path().executable())
|| (self.empty_only && !filesystem::is_empty(entry))
|| !(entry_type.is_file()
|| entry_type.is_dir()
|| entry_type.is_symlink()
|| filesystem::is_block_device(*entry_type)
|| filesystem::is_char_device(*entry_type)
|| filesystem::is_socket(*entry_type)
|| filesystem::is_pipe(*entry_type))
} else {
true
}
}
}

View File

@ -1,11 +0,0 @@
pub use self::size::SizeFilter;
pub use self::time::TimeFilter;
#[cfg(unix)]
pub use self::owner::OwnerFilter;
mod size;
mod time;
#[cfg(unix)]
mod owner;

View File

@ -1,140 +0,0 @@
use anyhow::{anyhow, Result};
use nix::unistd::{Group, User};
use std::fs;
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub struct OwnerFilter {
uid: Check<u32>,
gid: Check<u32>,
}
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
enum Check<T> {
Equal(T),
NotEq(T),
Ignore,
}
impl OwnerFilter {
const IGNORE: Self = OwnerFilter {
uid: Check::Ignore,
gid: Check::Ignore,
};
/// Parses an owner constraint
/// Returns an error if the string is invalid
/// Returns Ok(None) when string is acceptable but a noop (such as "" or ":")
pub fn from_string(input: &str) -> Result<Self> {
let mut it = input.split(':');
let (fst, snd) = (it.next(), it.next());
if it.next().is_some() {
return Err(anyhow!(
"more than one ':' present in owner string '{}'. See 'fd --help'.",
input
));
}
let uid = Check::parse(fst, |s| {
if let Ok(uid) = s.parse() {
Ok(uid)
} else {
User::from_name(s)?
.map(|user| user.uid.as_raw())
.ok_or_else(|| anyhow!("'{}' is not a recognized user name", s))
}
})?;
let gid = Check::parse(snd, |s| {
if let Ok(gid) = s.parse() {
Ok(gid)
} else {
Group::from_name(s)?
.map(|group| group.gid.as_raw())
.ok_or_else(|| anyhow!("'{}' is not a recognized group name", s))
}
})?;
Ok(OwnerFilter { uid, gid })
}
/// If self is a no-op (ignore both uid and gid) then return `None`, otherwise wrap in a `Some`
pub fn filter_ignore(self) -> Option<Self> {
if self == Self::IGNORE {
None
} else {
Some(self)
}
}
pub fn matches(&self, md: &fs::Metadata) -> bool {
use std::os::unix::fs::MetadataExt;
self.uid.check(md.uid()) && self.gid.check(md.gid())
}
}
impl<T: PartialEq> Check<T> {
fn check(&self, v: T) -> bool {
match self {
Check::Equal(x) => v == *x,
Check::NotEq(x) => v != *x,
Check::Ignore => true,
}
}
fn parse<F>(s: Option<&str>, f: F) -> Result<Self>
where
F: Fn(&str) -> Result<T>,
{
let (s, equality) = match s {
Some("") | None => return Ok(Check::Ignore),
Some(s) if s.starts_with('!') => (&s[1..], false),
Some(s) => (s, true),
};
f(s).map(|x| {
if equality {
Check::Equal(x)
} else {
Check::NotEq(x)
}
})
}
}
#[cfg(test)]
mod owner_parsing {
use super::OwnerFilter;
macro_rules! owner_tests {
($($name:ident: $value:expr => $result:pat,)*) => {
$(
#[test]
fn $name() {
let o = OwnerFilter::from_string($value);
match o {
$result => {},
_ => panic!("{:?} does not match {}", o, stringify!($result)),
}
}
)*
};
}
use super::Check::*;
owner_tests! {
empty: "" => Ok(OwnerFilter::IGNORE),
uid_only: "5" => Ok(OwnerFilter { uid: Equal(5), gid: Ignore }),
uid_gid: "9:3" => Ok(OwnerFilter { uid: Equal(9), gid: Equal(3) }),
gid_only: ":8" => Ok(OwnerFilter { uid: Ignore, gid: Equal(8) }),
colon_only: ":" => Ok(OwnerFilter::IGNORE),
trailing: "5:" => Ok(OwnerFilter { uid: Equal(5), gid: Ignore }),
uid_negate: "!5" => Ok(OwnerFilter { uid: NotEq(5), gid: Ignore }),
both_negate:"!4:!3" => Ok(OwnerFilter { uid: NotEq(4), gid: NotEq(3) }),
uid_not_gid:"6:!8" => Ok(OwnerFilter { uid: Equal(6), gid: NotEq(8) }),
more_colons:"3:5:" => Err(_),
only_colons:"::" => Err(_),
}
}

View File

@ -1,219 +0,0 @@
use std::sync::OnceLock;
use anyhow::anyhow;
use regex::Regex;
static SIZE_CAPTURES: OnceLock<Regex> = OnceLock::new();
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum SizeFilter {
Max(u64),
Min(u64),
Equals(u64),
}
// SI prefixes (powers of 10)
const KILO: u64 = 1000;
const MEGA: u64 = KILO * 1000;
const GIGA: u64 = MEGA * 1000;
const TERA: u64 = GIGA * 1000;
// Binary prefixes (powers of 2)
const KIBI: u64 = 1024;
const MEBI: u64 = KIBI * 1024;
const GIBI: u64 = MEBI * 1024;
const TEBI: u64 = GIBI * 1024;
impl SizeFilter {
pub fn from_string(s: &str) -> anyhow::Result<Self> {
SizeFilter::parse_opt(s)
.ok_or_else(|| anyhow!("'{}' is not a valid size constraint. See 'fd --help'.", s))
}
fn parse_opt(s: &str) -> Option<Self> {
let pattern =
SIZE_CAPTURES.get_or_init(|| Regex::new(r"(?i)^([+-]?)(\d+)(b|[kmgt]i?b?)$").unwrap());
if !pattern.is_match(s) {
return None;
}
let captures = pattern.captures(s)?;
let limit_kind = captures.get(1).map_or("+", |m| m.as_str());
let quantity = captures
.get(2)
.and_then(|v| v.as_str().parse::<u64>().ok())?;
let multiplier = match &captures.get(3).map_or("b", |m| m.as_str()).to_lowercase()[..] {
v if v.starts_with("ki") => KIBI,
v if v.starts_with('k') => KILO,
v if v.starts_with("mi") => MEBI,
v if v.starts_with('m') => MEGA,
v if v.starts_with("gi") => GIBI,
v if v.starts_with('g') => GIGA,
v if v.starts_with("ti") => TEBI,
v if v.starts_with('t') => TERA,
"b" => 1,
_ => return None,
};
let size = quantity * multiplier;
match limit_kind {
"+" => Some(SizeFilter::Min(size)),
"-" => Some(SizeFilter::Max(size)),
"" => Some(SizeFilter::Equals(size)),
_ => None,
}
}
pub fn is_within(&self, size: u64) -> bool {
match *self {
SizeFilter::Max(limit) => size <= limit,
SizeFilter::Min(limit) => size >= limit,
SizeFilter::Equals(limit) => size == limit,
}
}
}
#[cfg(test)]
mod tests {
use super::*;
macro_rules! gen_size_filter_parse_test {
($($name: ident: $val: expr,)*) => {
$(
#[test]
fn $name() {
let (txt, expected) = $val;
let actual = SizeFilter::from_string(txt).unwrap();
assert_eq!(actual, expected);
}
)*
};
}
// Parsing and size conversion tests data. Ensure that each type gets properly interpreted.
// Call with higher base values to ensure expected multiplication (only need a couple)
gen_size_filter_parse_test! {
byte_plus: ("+1b", SizeFilter::Min(1)),
byte_plus_multiplier: ("+10b", SizeFilter::Min(10)),
byte_minus: ("-1b", SizeFilter::Max(1)),
kilo_plus: ("+1k", SizeFilter::Min(1000)),
kilo_plus_suffix: ("+1kb", SizeFilter::Min(1000)),
kilo_minus: ("-1k", SizeFilter::Max(1000)),
kilo_minus_multiplier: ("-100k", SizeFilter::Max(100_000)),
kilo_minus_suffix: ("-1kb", SizeFilter::Max(1000)),
kilo_plus_upper: ("+1K", SizeFilter::Min(1000)),
kilo_plus_suffix_upper: ("+1KB", SizeFilter::Min(1000)),
kilo_minus_upper: ("-1K", SizeFilter::Max(1000)),
kilo_minus_suffix_upper: ("-1Kb", SizeFilter::Max(1000)),
kibi_plus: ("+1ki", SizeFilter::Min(1024)),
kibi_plus_multiplier: ("+10ki", SizeFilter::Min(10_240)),
kibi_plus_suffix: ("+1kib", SizeFilter::Min(1024)),
kibi_minus: ("-1ki", SizeFilter::Max(1024)),
kibi_minus_multiplier: ("-100ki", SizeFilter::Max(102_400)),
kibi_minus_suffix: ("-1kib", SizeFilter::Max(1024)),
kibi_plus_upper: ("+1KI", SizeFilter::Min(1024)),
kibi_plus_suffix_upper: ("+1KiB", SizeFilter::Min(1024)),
kibi_minus_upper: ("-1Ki", SizeFilter::Max(1024)),
kibi_minus_suffix_upper: ("-1KIB", SizeFilter::Max(1024)),
mega_plus: ("+1m", SizeFilter::Min(1_000_000)),
mega_plus_suffix: ("+1mb", SizeFilter::Min(1_000_000)),
mega_minus: ("-1m", SizeFilter::Max(1_000_000)),
mega_minus_suffix: ("-1mb", SizeFilter::Max(1_000_000)),
mega_plus_upper: ("+1M", SizeFilter::Min(1_000_000)),
mega_plus_suffix_upper: ("+1MB", SizeFilter::Min(1_000_000)),
mega_minus_upper: ("-1M", SizeFilter::Max(1_000_000)),
mega_minus_suffix_upper: ("-1Mb", SizeFilter::Max(1_000_000)),
mebi_plus: ("+1mi", SizeFilter::Min(1_048_576)),
mebi_plus_suffix: ("+1mib", SizeFilter::Min(1_048_576)),
mebi_minus: ("-1mi", SizeFilter::Max(1_048_576)),
mebi_minus_suffix: ("-1mib", SizeFilter::Max(1_048_576)),
mebi_plus_upper: ("+1MI", SizeFilter::Min(1_048_576)),
mebi_plus_suffix_upper: ("+1MiB", SizeFilter::Min(1_048_576)),
mebi_minus_upper: ("-1Mi", SizeFilter::Max(1_048_576)),
mebi_minus_suffix_upper: ("-1MIB", SizeFilter::Max(1_048_576)),
giga_plus: ("+1g", SizeFilter::Min(1_000_000_000)),
giga_plus_suffix: ("+1gb", SizeFilter::Min(1_000_000_000)),
giga_minus: ("-1g", SizeFilter::Max(1_000_000_000)),
giga_minus_suffix: ("-1gb", SizeFilter::Max(1_000_000_000)),
giga_plus_upper: ("+1G", SizeFilter::Min(1_000_000_000)),
giga_plus_suffix_upper: ("+1GB", SizeFilter::Min(1_000_000_000)),
giga_minus_upper: ("-1G", SizeFilter::Max(1_000_000_000)),
giga_minus_suffix_upper: ("-1Gb", SizeFilter::Max(1_000_000_000)),
gibi_plus: ("+1gi", SizeFilter::Min(1_073_741_824)),
gibi_plus_suffix: ("+1gib", SizeFilter::Min(1_073_741_824)),
gibi_minus: ("-1gi", SizeFilter::Max(1_073_741_824)),
gibi_minus_suffix: ("-1gib", SizeFilter::Max(1_073_741_824)),
gibi_plus_upper: ("+1GI", SizeFilter::Min(1_073_741_824)),
gibi_plus_suffix_upper: ("+1GiB", SizeFilter::Min(1_073_741_824)),
gibi_minus_upper: ("-1Gi", SizeFilter::Max(1_073_741_824)),
gibi_minus_suffix_upper: ("-1GIB", SizeFilter::Max(1_073_741_824)),
tera_plus: ("+1t", SizeFilter::Min(1_000_000_000_000)),
tera_plus_suffix: ("+1tb", SizeFilter::Min(1_000_000_000_000)),
tera_minus: ("-1t", SizeFilter::Max(1_000_000_000_000)),
tera_minus_suffix: ("-1tb", SizeFilter::Max(1_000_000_000_000)),
tera_plus_upper: ("+1T", SizeFilter::Min(1_000_000_000_000)),
tera_plus_suffix_upper: ("+1TB", SizeFilter::Min(1_000_000_000_000)),
tera_minus_upper: ("-1T", SizeFilter::Max(1_000_000_000_000)),
tera_minus_suffix_upper: ("-1Tb", SizeFilter::Max(1_000_000_000_000)),
tebi_plus: ("+1ti", SizeFilter::Min(1_099_511_627_776)),
tebi_plus_suffix: ("+1tib", SizeFilter::Min(1_099_511_627_776)),
tebi_minus: ("-1ti", SizeFilter::Max(1_099_511_627_776)),
tebi_minus_suffix: ("-1tib", SizeFilter::Max(1_099_511_627_776)),
tebi_plus_upper: ("+1TI", SizeFilter::Min(1_099_511_627_776)),
tebi_plus_suffix_upper: ("+1TiB", SizeFilter::Min(1_099_511_627_776)),
tebi_minus_upper: ("-1Ti", SizeFilter::Max(1_099_511_627_776)),
tebi_minus_suffix_upper: ("-1TIB", SizeFilter::Max(1_099_511_627_776)),
}
/// Invalid parse testing
macro_rules! gen_size_filter_failure {
($($name:ident: $value:expr,)*) => {
$(
#[test]
fn $name() {
let i = SizeFilter::from_string($value);
assert!(i.is_err());
}
)*
};
}
// Invalid parse data
gen_size_filter_failure! {
ensure_missing_number_returns_none: "+g",
ensure_missing_unit_returns_none: "+18",
ensure_bad_format_returns_none_1: "$10M",
ensure_bad_format_returns_none_2: "badval",
ensure_bad_format_returns_none_3: "9999",
ensure_invalid_unit_returns_none_1: "+50a",
ensure_invalid_unit_returns_none_2: "-10v",
ensure_invalid_unit_returns_none_3: "+1Mv",
ensure_bib_format_returns_none: "+1bib",
ensure_bb_format_returns_none: "+1bb",
}
#[test]
fn is_within_less_than() {
let f = SizeFilter::from_string("-1k").unwrap();
assert!(f.is_within(999));
}
#[test]
fn is_within_less_than_equal() {
let f = SizeFilter::from_string("-1k").unwrap();
assert!(f.is_within(1000));
}
#[test]
fn is_within_greater_than() {
let f = SizeFilter::from_string("+1k").unwrap();
assert!(f.is_within(1001));
}
#[test]
fn is_within_greater_than_equal() {
let f = SizeFilter::from_string("+1K").unwrap();
assert!(f.is_within(1000));
}
}

View File

@ -1,170 +0,0 @@
use chrono::{DateTime, Local, NaiveDate, NaiveDateTime};
use std::time::SystemTime;
/// Filter based on time ranges.
#[derive(Debug, PartialEq, Eq)]
pub enum TimeFilter {
Before(SystemTime),
After(SystemTime),
}
impl TimeFilter {
fn from_str(ref_time: &SystemTime, s: &str) -> Option<SystemTime> {
humantime::parse_duration(s)
.map(|duration| *ref_time - duration)
.ok()
.or_else(|| {
DateTime::parse_from_rfc3339(s)
.map(|dt| dt.into())
.ok()
.or_else(|| {
NaiveDate::parse_from_str(s, "%F")
.ok()?
.and_hms_opt(0, 0, 0)?
.and_local_timezone(Local)
.latest()
})
.or_else(|| {
NaiveDateTime::parse_from_str(s, "%F %T")
.ok()?
.and_local_timezone(Local)
.latest()
})
.or_else(|| {
let timestamp_secs = s.strip_prefix('@')?.parse().ok()?;
DateTime::from_timestamp(timestamp_secs, 0).map(Into::into)
})
.map(|dt| dt.into())
})
}
pub fn before(ref_time: &SystemTime, s: &str) -> Option<TimeFilter> {
TimeFilter::from_str(ref_time, s).map(TimeFilter::Before)
}
pub fn after(ref_time: &SystemTime, s: &str) -> Option<TimeFilter> {
TimeFilter::from_str(ref_time, s).map(TimeFilter::After)
}
pub fn applies_to(&self, t: &SystemTime) -> bool {
match self {
TimeFilter::Before(limit) => t < limit,
TimeFilter::After(limit) => t > limit,
}
}
}
#[cfg(test)]
mod tests {
use super::*;
use std::time::Duration;
#[test]
fn is_time_filter_applicable() {
let ref_time = NaiveDateTime::parse_from_str("2010-10-10 10:10:10", "%F %T")
.unwrap()
.and_local_timezone(Local)
.latest()
.unwrap()
.into();
assert!(TimeFilter::after(&ref_time, "1min")
.unwrap()
.applies_to(&ref_time));
assert!(!TimeFilter::before(&ref_time, "1min")
.unwrap()
.applies_to(&ref_time));
let t1m_ago = ref_time - Duration::from_secs(60);
assert!(!TimeFilter::after(&ref_time, "30sec")
.unwrap()
.applies_to(&t1m_ago));
assert!(TimeFilter::after(&ref_time, "2min")
.unwrap()
.applies_to(&t1m_ago));
assert!(TimeFilter::before(&ref_time, "30sec")
.unwrap()
.applies_to(&t1m_ago));
assert!(!TimeFilter::before(&ref_time, "2min")
.unwrap()
.applies_to(&t1m_ago));
let t10s_before = "2010-10-10 10:10:00";
assert!(!TimeFilter::before(&ref_time, t10s_before)
.unwrap()
.applies_to(&ref_time));
assert!(TimeFilter::before(&ref_time, t10s_before)
.unwrap()
.applies_to(&t1m_ago));
assert!(TimeFilter::after(&ref_time, t10s_before)
.unwrap()
.applies_to(&ref_time));
assert!(!TimeFilter::after(&ref_time, t10s_before)
.unwrap()
.applies_to(&t1m_ago));
let same_day = "2010-10-10";
assert!(!TimeFilter::before(&ref_time, same_day)
.unwrap()
.applies_to(&ref_time));
assert!(!TimeFilter::before(&ref_time, same_day)
.unwrap()
.applies_to(&t1m_ago));
assert!(TimeFilter::after(&ref_time, same_day)
.unwrap()
.applies_to(&ref_time));
assert!(TimeFilter::after(&ref_time, same_day)
.unwrap()
.applies_to(&t1m_ago));
let ref_time = DateTime::parse_from_rfc3339("2010-10-10T10:10:10+00:00")
.unwrap()
.into();
let t1m_ago = ref_time - Duration::from_secs(60);
let t10s_before = "2010-10-10T10:10:00+00:00";
assert!(!TimeFilter::before(&ref_time, t10s_before)
.unwrap()
.applies_to(&ref_time));
assert!(TimeFilter::before(&ref_time, t10s_before)
.unwrap()
.applies_to(&t1m_ago));
assert!(TimeFilter::after(&ref_time, t10s_before)
.unwrap()
.applies_to(&ref_time));
assert!(!TimeFilter::after(&ref_time, t10s_before)
.unwrap()
.applies_to(&t1m_ago));
let ref_timestamp = 1707723412u64; // Mon Feb 12 07:36:52 UTC 2024
let ref_time = DateTime::parse_from_rfc3339("2024-02-12T07:36:52+00:00")
.unwrap()
.into();
let t1m_ago = ref_time - Duration::from_secs(60);
let t1s_later = ref_time + Duration::from_secs(1);
// Timestamp only supported via '@' prefix
assert!(TimeFilter::before(&ref_time, &ref_timestamp.to_string()).is_none());
assert!(
TimeFilter::before(&ref_time, &format!("@{}", ref_timestamp))
.unwrap()
.applies_to(&t1m_ago)
);
assert!(
!TimeFilter::before(&ref_time, &format!("@{}", ref_timestamp))
.unwrap()
.applies_to(&t1s_later)
);
assert!(
!TimeFilter::after(&ref_time, &format!("@{}", ref_timestamp))
.unwrap()
.applies_to(&t1m_ago)
);
assert!(TimeFilter::after(&ref_time, &format!("@{}", ref_timestamp))
.unwrap()
.applies_to(&t1s_later));
}
}

View File

@ -1,87 +0,0 @@
use std::ffi::{OsStr, OsString};
use std::path::{Path, PathBuf};
use crate::filesystem::strip_current_dir;
/// Removes the parent component of the path
pub fn basename(path: &Path) -> &OsStr {
path.file_name().unwrap_or(path.as_os_str())
}
/// Removes the extension from the path
pub fn remove_extension(path: &Path) -> OsString {
let dirname = dirname(path);
let stem = path.file_stem().unwrap_or(path.as_os_str());
let path = PathBuf::from(dirname).join(stem);
strip_current_dir(&path).to_owned().into_os_string()
}
/// Removes the basename from the path.
pub fn dirname(path: &Path) -> OsString {
path.parent()
.map(|p| {
if p == OsStr::new("") {
OsString::from(".")
} else {
p.as_os_str().to_owned()
}
})
.unwrap_or_else(|| path.as_os_str().to_owned())
}
#[cfg(test)]
mod path_tests {
use super::*;
use std::path::MAIN_SEPARATOR_STR;
fn correct(input: &str) -> String {
input.replace('/', MAIN_SEPARATOR_STR)
}
macro_rules! func_tests {
($($name:ident: $func:ident for $input:expr => $output:expr)+) => {
$(
#[test]
fn $name() {
let input_path = PathBuf::from(&correct($input));
let output_string = OsString::from(correct($output));
assert_eq!($func(&input_path), output_string);
}
)+
}
}
func_tests! {
remove_ext_simple: remove_extension for "foo.txt" => "foo"
remove_ext_dir: remove_extension for "dir/foo.txt" => "dir/foo"
hidden: remove_extension for ".foo" => ".foo"
remove_ext_utf8: remove_extension for "💖.txt" => "💖"
remove_ext_empty: remove_extension for "" => ""
basename_simple: basename for "foo.txt" => "foo.txt"
basename_dir: basename for "dir/foo.txt" => "foo.txt"
basename_empty: basename for "" => ""
basename_utf8_0: basename for "💖/foo.txt" => "foo.txt"
basename_utf8_1: basename for "dir/💖.txt" => "💖.txt"
dirname_simple: dirname for "foo.txt" => "."
dirname_dir: dirname for "dir/foo.txt" => "dir"
dirname_utf8_0: dirname for "💖/foo.txt" => "💖"
dirname_utf8_1: dirname for "dir/💖.txt" => "dir"
}
#[test]
#[cfg(windows)]
fn dirname_root() {
assert_eq!(dirname(&PathBuf::from("C:")), OsString::from("C:"));
assert_eq!(dirname(&PathBuf::from("\\")), OsString::from("\\"));
}
#[test]
#[cfg(not(windows))]
fn dirname_root() {
assert_eq!(dirname(&PathBuf::from("/")), OsString::from("/"));
}
}

View File

@ -1,281 +0,0 @@
mod input;
use std::borrow::Cow;
use std::ffi::{OsStr, OsString};
use std::fmt::{self, Display, Formatter};
use std::path::{Component, Path, Prefix};
use std::sync::OnceLock;
use aho_corasick::AhoCorasick;
use self::input::{basename, dirname, remove_extension};
/// Designates what should be written to a buffer
///
/// Each `Token` contains either text, or a placeholder variant, which will be used to generate
/// commands after all tokens for a given command template have been collected.
#[derive(Clone, Debug, PartialEq, Eq)]
pub enum Token {
Placeholder,
Basename,
Parent,
NoExt,
BasenameNoExt,
Text(String),
}
impl Display for Token {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
match *self {
Token::Placeholder => f.write_str("{}")?,
Token::Basename => f.write_str("{/}")?,
Token::Parent => f.write_str("{//}")?,
Token::NoExt => f.write_str("{.}")?,
Token::BasenameNoExt => f.write_str("{/.}")?,
Token::Text(ref string) => f.write_str(string)?,
}
Ok(())
}
}
/// A parsed format string
///
/// This is either a collection of `Token`s including at least one placeholder variant,
/// or a fixed text.
#[derive(Clone, Debug, PartialEq)]
pub enum FormatTemplate {
Tokens(Vec<Token>),
Text(String),
}
static PLACEHOLDERS: OnceLock<AhoCorasick> = OnceLock::new();
impl FormatTemplate {
pub fn has_tokens(&self) -> bool {
matches!(self, FormatTemplate::Tokens(_))
}
pub fn parse(fmt: &str) -> Self {
// NOTE: we assume that { and } have the same length
const BRACE_LEN: usize = '{'.len_utf8();
let mut tokens = Vec::new();
let mut remaining = fmt;
let mut buf = String::new();
let placeholders = PLACEHOLDERS.get_or_init(|| {
AhoCorasick::new(["{{", "}}", "{}", "{/}", "{//}", "{.}", "{/.}"]).unwrap()
});
while let Some(m) = placeholders.find(remaining) {
match m.pattern().as_u32() {
0 | 1 => {
// we found an escaped {{ or }}, so add
// everything up to the first char to the buffer
// then skip the second one.
buf += &remaining[..m.start() + BRACE_LEN];
remaining = &remaining[m.end()..];
}
id if !remaining[m.end()..].starts_with('}') => {
buf += &remaining[..m.start()];
if !buf.is_empty() {
tokens.push(Token::Text(std::mem::take(&mut buf)));
}
tokens.push(token_from_pattern_id(id));
remaining = &remaining[m.end()..];
}
_ => {
// We got a normal pattern, but the final "}"
// is escaped, so add up to that to the buffer, then
// skip the final }
buf += &remaining[..m.end()];
remaining = &remaining[m.end() + BRACE_LEN..];
}
}
}
// Add the rest of the string to the buffer, and add the final buffer to the tokens
if !remaining.is_empty() {
buf += remaining;
}
if tokens.is_empty() {
// No placeholders were found, so just return the text
return FormatTemplate::Text(buf);
}
// Add final text segment
if !buf.is_empty() {
tokens.push(Token::Text(buf));
}
debug_assert!(!tokens.is_empty());
FormatTemplate::Tokens(tokens)
}
/// Generate a result string from this template. If path_separator is Some, then it will replace
/// the path separator in all placeholder tokens. Fixed text and tokens are not affected by
/// path separator substitution.
pub fn generate(&self, path: impl AsRef<Path>, path_separator: Option<&str>) -> OsString {
use Token::*;
let path = path.as_ref();
match *self {
Self::Tokens(ref tokens) => {
let mut s = OsString::new();
for token in tokens {
match token {
Basename => s.push(Self::replace_separator(basename(path), path_separator)),
BasenameNoExt => s.push(Self::replace_separator(
&remove_extension(basename(path).as_ref()),
path_separator,
)),
NoExt => s.push(Self::replace_separator(
&remove_extension(path),
path_separator,
)),
Parent => s.push(Self::replace_separator(&dirname(path), path_separator)),
Placeholder => {
s.push(Self::replace_separator(path.as_ref(), path_separator))
}
Text(ref string) => s.push(string),
}
}
s
}
Self::Text(ref text) => OsString::from(text),
}
}
/// Replace the path separator in the input with the custom separator string. If path_separator
/// is None, simply return a borrowed Cow<OsStr> of the input. Otherwise, the input is
/// interpreted as a Path and its components are iterated through and re-joined into a new
/// OsString.
fn replace_separator<'a>(path: &'a OsStr, path_separator: Option<&str>) -> Cow<'a, OsStr> {
// fast-path - no replacement necessary
if path_separator.is_none() {
return Cow::Borrowed(path);
}
let path_separator = path_separator.unwrap();
let mut out = OsString::with_capacity(path.len());
let mut components = Path::new(path).components().peekable();
while let Some(comp) = components.next() {
match comp {
// Absolute paths on Windows are tricky. A Prefix component is usually a drive
// letter or UNC path, and is usually followed by RootDir. There are also
// "verbatim" prefixes beginning with "\\?\" that skip normalization. We choose to
// ignore verbatim path prefixes here because they're very rare, might be
// impossible to reach here, and there's no good way to deal with them. If users
// are doing something advanced involving verbatim windows paths, they can do their
// own output filtering with a tool like sed.
Component::Prefix(prefix) => {
if let Prefix::UNC(server, share) = prefix.kind() {
// Prefix::UNC is a parsed version of '\\server\share'
out.push(path_separator);
out.push(path_separator);
out.push(server);
out.push(path_separator);
out.push(share);
} else {
// All other Windows prefix types are rendered as-is. This results in e.g. "C:" for
// drive letters. DeviceNS and Verbatim* prefixes won't have backslashes converted,
// but they're not returned by directories fd can search anyway so we don't worry
// about them.
out.push(comp.as_os_str());
}
}
// Root directory is always replaced with the custom separator.
Component::RootDir => out.push(path_separator),
// Everything else is joined normally, with a trailing separator if we're not last
_ => {
out.push(comp.as_os_str());
if components.peek().is_some() {
out.push(path_separator);
}
}
}
}
Cow::Owned(out)
}
}
// Convert the id from an aho-corasick match to the
// appropriate token
fn token_from_pattern_id(id: u32) -> Token {
use Token::*;
match id {
2 => Placeholder,
3 => Basename,
4 => Parent,
5 => NoExt,
6 => BasenameNoExt,
_ => unreachable!(),
}
}
#[cfg(test)]
mod fmt_tests {
use super::*;
use std::path::PathBuf;
#[test]
fn parse_no_placeholders() {
let templ = FormatTemplate::parse("This string has no placeholders");
assert_eq!(
templ,
FormatTemplate::Text("This string has no placeholders".into())
);
}
#[test]
fn parse_only_brace_escapes() {
let templ = FormatTemplate::parse("This string only has escapes like {{ and }}");
assert_eq!(
templ,
FormatTemplate::Text("This string only has escapes like { and }".into())
);
}
#[test]
fn all_placeholders() {
use Token::*;
let templ = FormatTemplate::parse(
"{{path={} \
basename={/} \
parent={//} \
noExt={.} \
basenameNoExt={/.} \
}}",
);
assert_eq!(
templ,
FormatTemplate::Tokens(vec![
Text("{path=".into()),
Placeholder,
Text(" basename=".into()),
Basename,
Text(" parent=".into()),
Parent,
Text(" noExt=".into()),
NoExt,
Text(" basenameNoExt=".into()),
BasenameNoExt,
Text(" }".into()),
])
);
let mut path = PathBuf::new();
path.push("a");
path.push("folder");
path.push("file.txt");
let expanded = templ.generate(&path, Some("/")).into_string().unwrap();
assert_eq!(
expanded,
"{path=a/folder/file.txt \
basename=file.txt \
parent=a/folder \
noExt=a/folder/file \
basenameNoExt=file }"
);
}
}

77
src/fshelper/mod.rs Normal file
View File

@ -0,0 +1,77 @@
// Copyright (c) 2017 fd developers
// Licensed under the Apache License, Version 2.0
// <LICENSE-APACHE or http://www.apache.org/licenses/LICENSE-2.0>
// or the MIT license <LICENSE-MIT or http://opensource.org/licenses/MIT>,
// at your option. All files in the project carrying such
// notice may not be copied, modified, or distributed except
// according to those terms.
use std::env::current_dir;
use std::fs;
use std::io;
#[cfg(any(unix, target_os = "redox"))]
use std::os::unix::fs::PermissionsExt;
use std::path::{Path, PathBuf};
use ignore::DirEntry;
pub fn path_absolute_form(path: &Path) -> io::Result<PathBuf> {
if path.is_absolute() {
Ok(path.to_path_buf())
} else {
let path = path.strip_prefix(".").unwrap_or(path);
current_dir().map(|path_buf| path_buf.join(path))
}
}
pub fn absolute_path(path: &Path) -> io::Result<PathBuf> {
let path_buf = path_absolute_form(path)?;
#[cfg(windows)]
let path_buf = Path::new(
path_buf
.as_path()
.to_string_lossy()
.trim_left_matches(r"\\?\"),
).to_path_buf();
Ok(path_buf)
}
// Path::is_dir() is not guaranteed to be intuitively correct for "." and ".."
// See: https://github.com/rust-lang/rust/issues/45302
pub fn is_dir(path: &Path) -> bool {
if path.file_name().is_some() {
path.is_dir()
} else {
path.is_dir() && path.canonicalize().is_ok()
}
}
#[cfg(any(unix, target_os = "redox"))]
pub fn is_executable(md: &fs::Metadata) -> bool {
md.permissions().mode() & 0o111 != 0
}
#[cfg(windows)]
pub fn is_executable(_: &fs::Metadata) -> bool {
false
}
pub fn is_empty(entry: &DirEntry) -> bool {
if let Some(file_type) = entry.file_type() {
if file_type.is_dir() {
if let Ok(mut entries) = fs::read_dir(entry.path()) {
entries.next().is_none()
} else {
false
}
} else if file_type.is_file() {
entry.metadata().map(|m| m.len() == 0).unwrap_or(false)
} else {
false
}
} else {
false
}
}

View File

@ -1,87 +0,0 @@
use crate::filesystem::absolute_path;
use std::fmt::{self, Formatter, Write};
use std::path::{Path, PathBuf};
pub(crate) struct PathUrl(PathBuf);
impl PathUrl {
pub(crate) fn new(path: &Path) -> Option<PathUrl> {
Some(PathUrl(absolute_path(path).ok()?))
}
}
impl fmt::Display for PathUrl {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
write!(f, "file://{}", host())?;
let bytes = self.0.as_os_str().as_encoded_bytes();
for &byte in bytes.iter() {
encode(f, byte)?;
}
Ok(())
}
}
fn encode(f: &mut Formatter, byte: u8) -> fmt::Result {
// NOTE:
// Most terminals can handle non-ascii unicode characters in a file url fine. But on some OSes (notably
// windows), the encoded bytes of the path may not be valid UTF-8. Since we don't know if a
// byte >= 128 is part of a valid UTF-8 encoding or not, we just percent encode any non-ascii
// byte.
// Percent encoding these bytes is probably safer anyway.
match byte {
b'0'..=b'9' | b'A'..=b'Z' | b'a'..=b'z' | b'/' | b':' | b'-' | b'.' | b'_' | b'~' => {
f.write_char(byte.into())
}
#[cfg(windows)]
b'\\' => f.write_char('/'),
_ => {
write!(f, "%{:02X}", byte)
}
}
}
#[cfg(unix)]
fn host() -> &'static str {
use std::sync::OnceLock;
static HOSTNAME: OnceLock<String> = OnceLock::new();
HOSTNAME
.get_or_init(|| {
nix::unistd::gethostname()
.ok()
.and_then(|h| h.into_string().ok())
.unwrap_or_default()
})
.as_ref()
}
#[cfg(not(unix))]
const fn host() -> &'static str {
""
}
#[cfg(test)]
mod test {
use super::*;
// This allows us to test the encoding without having to worry about the host, or absolute path
struct Encoded(&'static str);
impl fmt::Display for Encoded {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
for byte in self.0.bytes() {
encode(f, byte)?;
}
Ok(())
}
}
#[test]
fn test_unicode_encoding() {
assert_eq!(
Encoded("$*\x1bßé/∫😃\x07").to_string(),
"%24%2A%1B%C3%9F%C3%A9/%E2%88%AB%F0%9F%98%83%07",
);
}
}

481
src/internal.rs Normal file
View File

@ -0,0 +1,481 @@
// Copyright (c) 2017 fd developers
// Licensed under the Apache License, Version 2.0
// <LICENSE-APACHE or http://www.apache.org/licenses/LICENSE-2.0>
// or the MIT license <LICENSE-MIT or http://opensource.org/licenses/MIT>,
// at your option. All files in the project carrying such
// notice may not be copied, modified, or distributed except
// according to those terms.
use std::ffi::OsString;
use std::io::Write;
use std::path::PathBuf;
use std::process;
use std::time;
use exec::CommandTemplate;
use lscolors::LsColors;
use regex::{Regex, RegexSet};
use regex_syntax::hir::Hir;
use regex_syntax::Parser;
lazy_static! {
static ref SIZE_CAPTURES: Regex = { Regex::new(r"(?i)^([+-])(\d+)(b|[kmgt]i?b?)$").unwrap() };
}
/// Whether or not to show
pub struct FileTypes {
pub files: bool,
pub directories: bool,
pub symlinks: bool,
pub executables_only: bool,
pub empty_only: bool,
}
impl Default for FileTypes {
fn default() -> FileTypes {
FileTypes {
files: false,
directories: false,
symlinks: false,
executables_only: false,
empty_only: false,
}
}
}
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum SizeFilter {
Max(u64),
Min(u64),
}
// SI prefixes (powers of 10)
const KILO: u64 = 1000;
const MEGA: u64 = KILO * 1000;
const GIGA: u64 = MEGA * 1000;
const TERA: u64 = GIGA * 1000;
// Binary prefixes (powers of 2)
const KIBI: u64 = 1024;
const MEBI: u64 = KIBI * 1024;
const GIBI: u64 = MEBI * 1024;
const TEBI: u64 = GIBI * 1024;
impl SizeFilter {
pub fn from_string<'a>(s: &str) -> Option<Self> {
if !SIZE_CAPTURES.is_match(s) {
return None;
}
let captures = match SIZE_CAPTURES.captures(s) {
Some(cap) => cap,
None => return None,
};
let limit_kind = captures.get(1).map_or("+", |m| m.as_str());
let quantity = match captures.get(2) {
None => return None,
Some(v) => match v.as_str().parse::<u64>() {
Ok(val) => val,
_ => return None,
},
};
let multiplier = match &captures.get(3).map_or("b", |m| m.as_str()).to_lowercase()[..] {
v if v.starts_with("ki") => KIBI,
v if v.starts_with("k") => KILO,
v if v.starts_with("mi") => MEBI,
v if v.starts_with("m") => MEGA,
v if v.starts_with("gi") => GIBI,
v if v.starts_with("g") => GIGA,
v if v.starts_with("ti") => TEBI,
v if v.starts_with("t") => TERA,
"b" => 1,
_ => return None,
};
let size = quantity * multiplier;
Some(match limit_kind {
"+" => SizeFilter::Min(size),
_ => SizeFilter::Max(size),
})
}
pub fn is_within(&self, size: u64) -> bool {
match self {
&SizeFilter::Max(limit) => size <= limit,
&SizeFilter::Min(limit) => size >= limit,
}
}
}
/// Configuration options for *fd*.
pub struct FdOptions {
/// Whether the search is case-sensitive or case-insensitive.
pub case_sensitive: bool,
/// Whether to search within the full file path or just the base name (filename or directory
/// name).
pub search_full_path: bool,
/// Whether to ignore hidden files and directories (or not).
pub ignore_hidden: bool,
/// Whether to respect `.fdignore` files or not.
pub read_fdignore: bool,
/// Whether to respect VCS ignore files (`.gitignore`, ..) or not.
pub read_vcsignore: bool,
/// Whether to follow symlinks or not.
pub follow_links: bool,
/// Whether elements of output should be separated by a null character
pub null_separator: bool,
/// The maximum search depth, or `None` if no maximum search depth should be set.
///
/// A depth of `1` includes all files under the current directory, a depth of `2` also includes
/// all files under subdirectories of the current directory, etc.
pub max_depth: Option<usize>,
/// The number of threads to use.
pub threads: usize,
/// Time to buffer results internally before streaming to the console. This is useful to
/// provide a sorted output, in case the total execution time is shorter than
/// `max_buffer_time`.
pub max_buffer_time: Option<time::Duration>,
/// `None` if the output should not be colorized. Otherwise, a `LsColors` instance that defines
/// how to style different filetypes.
pub ls_colors: Option<LsColors>,
/// The type of file to search for. If set to `None`, all file types are displayed. If
/// set to `Some(..)`, only the types that are specified are shown.
pub file_types: Option<FileTypes>,
/// The extension to search for. Only entries matching the extension will be included.
///
/// The value (if present) will be a lowercase string without leading dots.
pub extensions: Option<RegexSet>,
/// If a value is supplied, each item found will be used to generate and execute commands.
pub command: Option<CommandTemplate>,
/// A list of glob patterns that should be excluded from the search.
pub exclude_patterns: Vec<String>,
/// A list of custom ignore files.
pub ignore_files: Vec<PathBuf>,
/// The given constraints on the size of returned files
pub size_constraints: Vec<SizeFilter>,
}
/// Print error message to stderr and exit with status `1`.
pub fn error(message: &str) -> ! {
writeln!(&mut ::std::io::stderr(), "{}", message).expect("Failed writing to stderr");
process::exit(1);
}
/// Determine if a regex pattern contains a literal uppercase character.
pub fn pattern_has_uppercase_char(pattern: &str) -> bool {
Parser::new()
.parse(pattern)
.map(|hir| hir_has_uppercase_char(&hir))
.unwrap_or(false)
}
/// Determine if a regex expression contains a literal uppercase character.
fn hir_has_uppercase_char(hir: &Hir) -> bool {
use regex_syntax::hir::*;
match *hir.kind() {
HirKind::Literal(Literal::Unicode(c)) => c.is_uppercase(),
HirKind::Class(Class::Unicode(ref ranges)) => ranges
.iter()
.any(|r| r.start().is_uppercase() || r.end().is_uppercase()),
HirKind::Group(Group { ref hir, .. }) | HirKind::Repetition(Repetition { ref hir, .. }) => {
hir_has_uppercase_char(hir)
}
HirKind::Concat(ref hirs) | HirKind::Alternation(ref hirs) => {
hirs.iter().any(hir_has_uppercase_char)
}
_ => false,
}
}
/// Maximum size of the output buffer before flushing results to the console
pub const MAX_BUFFER_LENGTH: usize = 1000;
/// Exit code representing a general error
pub const EXITCODE_ERROR: i32 = 1;
/// Exit code representing that the process was killed by SIGINT
pub const EXITCODE_SIGINT: i32 = 130;
/// Traverse args_os, looking for -exec and replacing it with --exec.
///
/// # Returns
///
/// * The args, with substitution if required
pub fn transform_args_with_exec<I>(original: I) -> Vec<OsString>
where
I: Iterator<Item = OsString>,
{
let mut in_exec_opt = false;
let target = OsString::from("-exec");
let long_start = OsString::from("--exec");
let short_start = OsString::from("-x");
let exec_end = OsString::from(";");
original.fold(vec![], |mut args, curr| {
if in_exec_opt {
if curr == exec_end {
in_exec_opt = false;
}
args.push(curr);
return args;
}
if curr == target || curr == long_start || curr == short_start {
args.push(if curr == target {
OsString::from("--exec")
} else {
curr
});
in_exec_opt = true;
} else {
args.push(curr);
}
args
})
}
#[cfg(test)]
fn oss(v: &str) -> OsString {
OsString::from(v)
}
/// Ensure that -exec gets transformed into --exec
#[test]
fn normal_exec_substitution() {
let original = vec![oss("fd"), oss("foo"), oss("-exec"), oss("cmd")];
let expected = vec![oss("fd"), oss("foo"), oss("--exec"), oss("cmd")];
let actual = transform_args_with_exec(original.into_iter());
assert_eq!(expected, actual);
}
/// Ensure that --exec is not touched
#[test]
fn passthru_of_original_exec() {
let original = vec![oss("fd"), oss("foo"), oss("--exec"), oss("cmd")];
let expected = vec![oss("fd"), oss("foo"), oss("--exec"), oss("cmd")];
let actual = transform_args_with_exec(original.into_iter());
assert_eq!(expected, actual);
}
#[test]
fn temp_check_that_exec_context_observed() {
let original = vec![
oss("fd"),
oss("foo"),
oss("-exec"),
oss("cmd"),
oss("-exec"),
oss("ls"),
oss(";"),
oss("-exec"),
oss("rm"),
oss(";"),
oss("--exec"),
oss("find"),
oss("-exec"),
oss("rm"),
oss(";"),
oss("-x"),
oss("foo"),
oss("-exec"),
oss("something"),
oss(";"),
oss("-exec"),
];
let expected = vec![
oss("fd"),
oss("foo"),
oss("--exec"),
oss("cmd"),
oss("-exec"),
oss("ls"),
oss(";"),
oss("--exec"),
oss("rm"),
oss(";"),
oss("--exec"),
oss("find"),
oss("-exec"),
oss("rm"),
oss(";"),
oss("-x"),
oss("foo"),
oss("-exec"),
oss("something"),
oss(";"),
oss("--exec"),
];
let actual = transform_args_with_exec(original.into_iter());
assert_eq!(expected, actual);
}
/// Parsing and size conversion tests
#[cfg(test)]
mod size_parsing {
use super::*;
macro_rules! gen_size_filter_parse_test {
($($name: ident: $val: expr,)*) => {
$(
#[test]
fn $name() {
let (txt, expected) = $val;
let actual = SizeFilter::from_string(txt).unwrap();
assert_eq!(actual, expected);
}
)*
};
}
/// Parsing and size conversion tests data. Ensure that each type gets properly interpreted.
/// Call with higher base values to ensure expected multiplication (only need a couple)
gen_size_filter_parse_test! {
byte_plus: ("+1b", SizeFilter::Min(1)),
byte_plus_multiplier: ("+10b", SizeFilter::Min(10)),
byte_minus: ("-1b", SizeFilter::Max(1)),
kilo_plus: ("+1k", SizeFilter::Min(1000)),
kilo_plus_suffix: ("+1kb", SizeFilter::Min(1000)),
kilo_minus: ("-1k", SizeFilter::Max(1000)),
kilo_minus_multiplier: ("-100k", SizeFilter::Max(100000)),
kilo_minus_suffix: ("-1kb", SizeFilter::Max(1000)),
kilo_plus_upper: ("+1K", SizeFilter::Min(1000)),
kilo_plus_suffix_upper: ("+1KB", SizeFilter::Min(1000)),
kilo_minus_upper: ("-1K", SizeFilter::Max(1000)),
kilo_minus_suffix_upper: ("-1Kb", SizeFilter::Max(1000)),
kibi_plus: ("+1ki", SizeFilter::Min(1024)),
kibi_plus_multiplier: ("+10ki", SizeFilter::Min(10240)),
kibi_plus_suffix: ("+1kib", SizeFilter::Min(1024)),
kibi_minus: ("-1ki", SizeFilter::Max(1024)),
kibi_minus_multiplier: ("-100ki", SizeFilter::Max(102400)),
kibi_minus_suffix: ("-1kib", SizeFilter::Max(1024)),
kibi_plus_upper: ("+1KI", SizeFilter::Min(1024)),
kibi_plus_suffix_upper: ("+1KiB", SizeFilter::Min(1024)),
kibi_minus_upper: ("-1Ki", SizeFilter::Max(1024)),
kibi_minus_suffix_upper: ("-1KIB", SizeFilter::Max(1024)),
mega_plus: ("+1m", SizeFilter::Min(1000000)),
mega_plus_suffix: ("+1mb", SizeFilter::Min(1000000)),
mega_minus: ("-1m", SizeFilter::Max(1000000)),
mega_minus_suffix: ("-1mb", SizeFilter::Max(1000000)),
mega_plus_upper: ("+1M", SizeFilter::Min(1000000)),
mega_plus_suffix_upper: ("+1MB", SizeFilter::Min(1000000)),
mega_minus_upper: ("-1M", SizeFilter::Max(1000000)),
mega_minus_suffix_upper: ("-1Mb", SizeFilter::Max(1000000)),
mebi_plus: ("+1mi", SizeFilter::Min(1048576)),
mebi_plus_suffix: ("+1mib", SizeFilter::Min(1048576)),
mebi_minus: ("-1mi", SizeFilter::Max(1048576)),
mebi_minus_suffix: ("-1mib", SizeFilter::Max(1048576)),
mebi_plus_upper: ("+1MI", SizeFilter::Min(1048576)),
mebi_plus_suffix_upper: ("+1MiB", SizeFilter::Min(1048576)),
mebi_minus_upper: ("-1Mi", SizeFilter::Max(1048576)),
mebi_minus_suffix_upper: ("-1MIB", SizeFilter::Max(1048576)),
giga_plus: ("+1g", SizeFilter::Min(1000000000)),
giga_plus_suffix: ("+1gb", SizeFilter::Min(1000000000)),
giga_minus: ("-1g", SizeFilter::Max(1000000000)),
giga_minus_suffix: ("-1gb", SizeFilter::Max(1000000000)),
giga_plus_upper: ("+1G", SizeFilter::Min(1000000000)),
giga_plus_suffix_upper: ("+1GB", SizeFilter::Min(1000000000)),
giga_minus_upper: ("-1G", SizeFilter::Max(1000000000)),
giga_minus_suffix_upper: ("-1Gb", SizeFilter::Max(1000000000)),
gibi_plus: ("+1gi", SizeFilter::Min(1073741824)),
gibi_plus_suffix: ("+1gib", SizeFilter::Min(1073741824)),
gibi_minus: ("-1gi", SizeFilter::Max(1073741824)),
gibi_minus_suffix: ("-1gib", SizeFilter::Max(1073741824)),
gibi_plus_upper: ("+1GI", SizeFilter::Min(1073741824)),
gibi_plus_suffix_upper: ("+1GiB", SizeFilter::Min(1073741824)),
gibi_minus_upper: ("-1Gi", SizeFilter::Max(1073741824)),
gibi_minus_suffix_upper: ("-1GIB", SizeFilter::Max(1073741824)),
tera_plus: ("+1t", SizeFilter::Min(1000000000000)),
tera_plus_suffix: ("+1tb", SizeFilter::Min(1000000000000)),
tera_minus: ("-1t", SizeFilter::Max(1000000000000)),
tera_minus_suffix: ("-1tb", SizeFilter::Max(1000000000000)),
tera_plus_upper: ("+1T", SizeFilter::Min(1000000000000)),
tera_plus_suffix_upper: ("+1TB", SizeFilter::Min(1000000000000)),
tera_minus_upper: ("-1T", SizeFilter::Max(1000000000000)),
tera_minus_suffix_upper: ("-1Tb", SizeFilter::Max(1000000000000)),
tebi_plus: ("+1ti", SizeFilter::Min(1099511627776)),
tebi_plus_suffix: ("+1tib", SizeFilter::Min(1099511627776)),
tebi_minus: ("-1ti", SizeFilter::Max(1099511627776)),
tebi_minus_suffix: ("-1tib", SizeFilter::Max(1099511627776)),
tebi_plus_upper: ("+1TI", SizeFilter::Min(1099511627776)),
tebi_plus_suffix_upper: ("+1TiB", SizeFilter::Min(1099511627776)),
tebi_minus_upper: ("-1Ti", SizeFilter::Max(1099511627776)),
tebi_minus_suffix_upper: ("-1TIB", SizeFilter::Max(1099511627776)),
}
}
/// Invalid parse testing
#[cfg(test)]
macro_rules! gen_size_filter_failure {
($($name:ident: $value:expr,)*) => {
$(
#[test]
fn $name() {
let i = SizeFilter::from_string($value);
assert!(i.is_none());
}
)*
};
}
/// Invalid parse data
#[cfg(test)]
gen_size_filter_failure! {
ensure_missing_symbol_returns_none: "10M",
ensure_missing_number_returns_none: "+g",
ensure_missing_unit_returns_none: "+18",
ensure_bad_format_returns_none_1: "$10M",
ensure_bad_format_returns_none_2: "badval",
ensure_bad_format_returns_none_3: "9999",
ensure_invalid_unit_returns_none_1: "+50a",
ensure_invalid_unit_returns_none_2: "-10v",
ensure_invalid_unit_returns_none_3: "+1Mv",
ensure_bib_format_returns_none: "+1bib",
ensure_bb_format_returns_none: "+1bb",
}
#[test]
fn is_within_less_than() {
let f = SizeFilter::from_string("-1k").unwrap();
assert!(f.is_within(999));
}
#[test]
fn is_within_less_than_equal() {
let f = SizeFilter::from_string("-1k").unwrap();
assert!(f.is_within(1000));
}
#[test]
fn is_within_greater_than() {
let f = SizeFilter::from_string("+1k").unwrap();
assert!(f.is_within(1001));
}
#[test]
fn is_within_greater_than_equal() {
let f = SizeFilter::from_string("+1K").unwrap();
assert!(f.is_within(1000));
}

231
src/lscolors/mod.rs Normal file
View File

@ -0,0 +1,231 @@
// Copyright (c) 2017 fd developers
// Licensed under the Apache License, Version 2.0
// <LICENSE-APACHE or http://www.apache.org/licenses/LICENSE-2.0>
// or the MIT license <LICENSE-MIT or http://opensource.org/licenses/MIT>,
// at your option. All files in the project carrying such
// notice may not be copied, modified, or distributed except
// according to those terms.
use ansi_term::{Colour, Style};
/// A parser for the `LS_COLORS` environment variable.
use std::collections::HashMap;
/// Maps file extensions to ANSI colors / styles.
pub type ExtensionStyles = HashMap<String, Style>;
/// Maps filenames to ANSI colors / styles.
pub type FilenameStyles = HashMap<String, Style>;
const LS_CODES: &[&str] = &[
"no", "no", "fi", "rs", "di", "ln", "ln", "ln", "or", "mi", "pi", "pi", "so", "bd", "bd", "cd",
"cd", "do", "ex", "lc", "lc", "rc", "rc", "ec", "ec", "su", "su", "sg", "sg", "st", "ow", "ow",
"tw", "tw", "ca", "mh", "cl",
];
/// Defines how different file system entries should be colorized / styled.
#[derive(Debug, PartialEq)]
pub struct LsColors {
/// ANSI Style for directories.
pub directory: Style,
/// ANSI style for symbolic links.
pub symlink: Style,
/// ANSI style for executable files.
pub executable: Style,
/// A map that defines ANSI styles for different file extensions.
pub extensions: ExtensionStyles,
/// A map that defines ANSI styles for different specific filenames.
pub filenames: FilenameStyles,
}
impl Default for LsColors {
/// Get a default LsColors structure.
fn default() -> LsColors {
LsColors {
directory: Colour::Blue.bold(),
symlink: Colour::Cyan.normal(),
executable: Colour::Red.bold(),
extensions: HashMap::new(),
filenames: HashMap::new(),
}
}
}
impl LsColors {
/// Parse a single text-decoration code (normal, bold, italic, ...).
fn parse_decoration(code: &str) -> Option<fn(Colour) -> Style> {
match code {
"0" | "00" => Some(Colour::normal),
"1" | "01" => Some(Colour::bold),
"3" | "03" => Some(Colour::italic),
"4" | "04" => Some(Colour::underline),
_ => None,
}
}
/// Parse ANSI escape sequences like `38;5;10;1`.
fn parse_style(code: &str) -> Option<Style> {
let mut split = code.split(';');
if let Some(first) = split.next() {
// Try to match the first part as a text-decoration argument
let mut decoration = LsColors::parse_decoration(first);
let c1 = if decoration.is_none() {
Some(first)
} else {
split.next()
};
let c2 = split.next();
let c3 = split.next();
let color = if c1 == Some("38") && c2 == Some("5") {
let n_white = 7;
let n = if let Some(num) = c3 {
u8::from_str_radix(num, 10).unwrap_or(n_white)
} else {
n_white
};
Colour::Fixed(n)
} else if let Some(color_s) = c1 {
match color_s {
"30" => Colour::Black,
"31" => Colour::Red,
"32" => Colour::Green,
"33" => Colour::Yellow,
"34" => Colour::Blue,
"35" => Colour::Purple,
"36" => Colour::Cyan,
_ => Colour::White,
}
} else {
Colour::White
};
if decoration.is_none() {
// Try to find a decoration somewhere in the sequence
decoration = code.split(';').flat_map(LsColors::parse_decoration).next();
}
let ansi_style = decoration.unwrap_or(Colour::normal)(color);
Some(ansi_style)
} else {
None
}
}
/// Add a new `LS_COLORS` entry.
fn add_entry(&mut self, input: &str) {
let mut parts = input.trim().split('=');
if let Some(pattern) = parts.next() {
if let Some(style_code) = parts.next() {
// Ensure that the input was split into exactly two parts:
if !parts.next().is_none() {
return;
}
if let Some(style) = LsColors::parse_style(style_code) {
// Try to match against one of the known codes
let res = LS_CODES.iter().find(|&&c| c == pattern);
if let Some(code) = res {
match code.as_ref() {
"di" => self.directory = style,
"ln" => self.symlink = style,
"ex" => self.executable = style,
_ => return,
}
} else if pattern.starts_with("*.") {
let extension = String::from(pattern).split_off(2);
self.extensions.insert(extension, style);
} else if pattern.starts_with('*') {
let filename = String::from(pattern).split_off(1);
self.filenames.insert(filename, style);
} else {
// Unknown/corrupt pattern
return;
}
}
}
}
}
/// Generate a `LsColors` structure from a string.
pub fn from_string(input: &str) -> LsColors {
let mut lscolors = LsColors::default();
for s in input.split(':') {
lscolors.add_entry(s);
}
lscolors
}
}
#[test]
fn test_parse_simple() {
assert_eq!(Some(Colour::Red.normal()), LsColors::parse_style("31"));
}
#[test]
fn test_parse_decoration() {
assert_eq!(Some(Colour::Red.normal()), LsColors::parse_style("00;31"));
assert_eq!(Some(Colour::Blue.italic()), LsColors::parse_style("03;34"));
assert_eq!(Some(Colour::Cyan.bold()), LsColors::parse_style("01;36"));
}
#[test]
fn test_parse_decoration_backwards() {
assert_eq!(Some(Colour::Blue.italic()), LsColors::parse_style("34;03"));
assert_eq!(Some(Colour::Cyan.bold()), LsColors::parse_style("36;01"));
assert_eq!(Some(Colour::Red.normal()), LsColors::parse_style("31;00"));
}
#[test]
fn test_parse_256() {
assert_eq!(
Some(Colour::Fixed(115).normal()),
LsColors::parse_style("38;5;115")
);
assert_eq!(
Some(Colour::Fixed(115).normal()),
LsColors::parse_style("00;38;5;115")
);
assert_eq!(
Some(Colour::Fixed(119).bold()),
LsColors::parse_style("01;38;5;119")
);
assert_eq!(
Some(Colour::Fixed(119).bold()),
LsColors::parse_style("38;5;119;01")
);
}
#[test]
fn test_from_string() {
assert_eq!(LsColors::default(), LsColors::from_string(&String::new()));
let result = LsColors::from_string(&String::from(
"rs=0:di=03;34:ln=01;36:*.foo=01;35:*README=33",
));
assert_eq!(Colour::Blue.italic(), result.directory);
assert_eq!(Colour::Cyan.bold(), result.symlink);
assert_eq!(Some(&Colour::Purple.bold()), result.extensions.get("foo"));
assert_eq!(
Some(&Colour::Yellow.normal()),
result.filenames.get("README")
);
}

View File

@ -1,288 +1,191 @@
mod cli; // Copyright (c) 2017 fd developers
mod config; // Licensed under the Apache License, Version 2.0
mod dir_entry; // <LICENSE-APACHE or http://www.apache.org/licenses/LICENSE-2.0>
mod error; // or the MIT license <LICENSE-MIT or http://opensource.org/licenses/MIT>,
// at your option. All files in the project carrying such
// notice may not be copied, modified, or distributed except
// according to those terms.
extern crate ansi_term;
extern crate atty;
#[macro_use]
extern crate clap;
extern crate ignore;
#[macro_use]
extern crate lazy_static;
#[cfg(all(unix, not(target_os = "redox")))]
extern crate libc;
extern crate num_cpus;
extern crate regex;
extern crate regex_syntax;
mod app;
mod exec; mod exec;
mod exit_codes; pub mod fshelper;
mod filesystem; mod internal;
mod filetypes; pub mod lscolors;
mod filter;
mod fmt;
mod hyperlink;
mod output; mod output;
mod regex_helper;
mod walk; mod walk;
use std::env; use std::env;
use std::io::IsTerminal; use std::error::Error;
use std::path::Path; use std::path::{Path, PathBuf};
use std::sync::Arc; use std::sync::Arc;
use std::time; use std::time;
use anyhow::{anyhow, bail, Context, Result}; use atty::Stream;
use clap::{CommandFactory, Parser}; use regex::{RegexBuilder, RegexSetBuilder};
use globset::GlobBuilder;
use exec::CommandTemplate;
use internal::{
error, pattern_has_uppercase_char, transform_args_with_exec, FdOptions, FileTypes, SizeFilter,
};
use lscolors::LsColors; use lscolors::LsColors;
use regex::bytes::{Regex, RegexBuilder, RegexSetBuilder};
use crate::cli::{ColorWhen, HyperlinkWhen, Opts};
use crate::config::Config;
use crate::exec::CommandSet;
use crate::exit_codes::ExitCode;
use crate::filetypes::FileTypes;
#[cfg(unix)]
use crate::filter::OwnerFilter;
use crate::filter::TimeFilter;
use crate::regex_helper::{pattern_has_uppercase_char, pattern_matches_strings_with_leading_dot};
// We use jemalloc for performance reasons, see https://github.com/sharkdp/fd/pull/481
// FIXME: re-enable jemalloc on macOS, see comment in Cargo.toml file for more infos
#[cfg(all(
not(windows),
not(target_os = "android"),
not(target_os = "macos"),
not(target_os = "freebsd"),
not(target_os = "openbsd"),
not(all(target_env = "musl", target_pointer_width = "32")),
not(target_arch = "riscv64"),
feature = "use-jemalloc"
))]
#[global_allocator]
static ALLOC: jemallocator::Jemalloc = jemallocator::Jemalloc;
// vivid --color-mode 8-bit generate molokai
const DEFAULT_LS_COLORS: &str = "
ow=0:or=0;38;5;16;48;5;203:no=0:ex=1;38;5;203:cd=0;38;5;203;48;5;236:mi=0;38;5;16;48;5;203:*~=0;38;5;243:st=0:pi=0;38;5;16;48;5;81:fi=0:di=0;38;5;81:so=0;38;5;16;48;5;203:bd=0;38;5;81;48;5;236:tw=0:ln=0;38;5;203:*.m=0;38;5;48:*.o=0;38;5;243:*.z=4;38;5;203:*.a=1;38;5;203:*.r=0;38;5;48:*.c=0;38;5;48:*.d=0;38;5;48:*.t=0;38;5;48:*.h=0;38;5;48:*.p=0;38;5;48:*.cc=0;38;5;48:*.ll=0;38;5;48:*.jl=0;38;5;48:*css=0;38;5;48:*.md=0;38;5;185:*.gz=4;38;5;203:*.nb=0;38;5;48:*.mn=0;38;5;48:*.go=0;38;5;48:*.xz=4;38;5;203:*.so=1;38;5;203:*.rb=0;38;5;48:*.pm=0;38;5;48:*.bc=0;38;5;243:*.py=0;38;5;48:*.as=0;38;5;48:*.pl=0;38;5;48:*.rs=0;38;5;48:*.sh=0;38;5;48:*.7z=4;38;5;203:*.ps=0;38;5;186:*.cs=0;38;5;48:*.el=0;38;5;48:*.rm=0;38;5;208:*.hs=0;38;5;48:*.td=0;38;5;48:*.ui=0;38;5;149:*.ex=0;38;5;48:*.js=0;38;5;48:*.cp=0;38;5;48:*.cr=0;38;5;48:*.la=0;38;5;243:*.kt=0;38;5;48:*.ml=0;38;5;48:*.vb=0;38;5;48:*.gv=0;38;5;48:*.lo=0;38;5;243:*.hi=0;38;5;243:*.ts=0;38;5;48:*.ko=1;38;5;203:*.hh=0;38;5;48:*.pp=0;38;5;48:*.di=0;38;5;48:*.bz=4;38;5;203:*.fs=0;38;5;48:*.png=0;38;5;208:*.zsh=0;38;5;48:*.mpg=0;38;5;208:*.pid=0;38;5;243:*.xmp=0;38;5;149:*.iso=4;38;5;203:*.m4v=0;38;5;208:*.dot=0;38;5;48:*.ods=0;38;5;186:*.inc=0;38;5;48:*.sxw=0;38;5;186:*.aif=0;38;5;208:*.git=0;38;5;243:*.gvy=0;38;5;48:*.tbz=4;38;5;203:*.log=0;38;5;243:*.txt=0;38;5;185:*.ico=0;38;5;208:*.csx=0;38;5;48:*.vob=0;38;5;208:*.pgm=0;38;5;208:*.pps=0;38;5;186:*.ics=0;38;5;186:*.img=4;38;5;203:*.fon=0;38;5;208:*.hpp=0;38;5;48:*.bsh=0;38;5;48:*.sql=0;38;5;48:*TODO=1:*.php=0;38;5;48:*.pkg=4;38;5;203:*.ps1=0;38;5;48:*.csv=0;38;5;185:*.ilg=0;38;5;243:*.ini=0;38;5;149:*.pyc=0;38;5;243:*.psd=0;38;5;208:*.htc=0;38;5;48:*.swp=0;38;5;243:*.mli=0;38;5;48:*hgrc=0;38;5;149:*.bst=0;38;5;149:*.ipp=0;38;5;48:*.fsi=0;38;5;48:*.tcl=0;38;5;48:*.exs=0;38;5;48:*.out=0;38;5;243:*.jar=4;38;5;203:*.xls=0;38;5;186:*.ppm=0;38;5;208:*.apk=4;38;5;203:*.aux=0;38;5;243:*.rpm=4;38;5;203:*.dll=1;38;5;203:*.eps=0;38;5;208:*.exe=1;38;5;203:*.doc=0;38;5;186:*.wma=0;38;5;208:*.deb=4;38;5;203:*.pod=0;38;5;48:*.ind=0;38;5;243:*.nix=0;38;5;149:*.lua=0;38;5;48:*.epp=0;38;5;48:*.dpr=0;38;5;48:*.htm=0;38;5;185:*.ogg=0;38;5;208:*.bin=4;38;5;203:*.otf=0;38;5;208:*.yml=0;38;5;149:*.pro=0;38;5;149:*.cxx=0;38;5;48:*.tex=0;38;5;48:*.fnt=0;38;5;208:*.erl=0;38;5;48:*.sty=0;38;5;243:*.bag=4;38;5;203:*.rst=0;38;5;185:*.pdf=0;38;5;186:*.pbm=0;38;5;208:*.xcf=0;38;5;208:*.clj=0;38;5;48:*.gif=0;38;5;208:*.rar=4;38;5;203:*.elm=0;38;5;48:*.bib=0;38;5;149:*.tsx=0;38;5;48:*.dmg=4;38;5;203:*.tmp=0;38;5;243:*.bcf=0;38;5;243:*.mkv=0;38;5;208:*.svg=0;38;5;208:*.cpp=0;38;5;48:*.vim=0;38;5;48:*.bmp=0;38;5;208:*.ltx=0;38;5;48:*.fls=0;38;5;243:*.flv=0;38;5;208:*.wav=0;38;5;208:*.m4a=0;38;5;208:*.mid=0;38;5;208:*.hxx=0;38;5;48:*.pas=0;38;5;48:*.wmv=0;38;5;208:*.tif=0;38;5;208:*.kex=0;38;5;186:*.mp4=0;38;5;208:*.bak=0;38;5;243:*.xlr=0;38;5;186:*.dox=0;38;5;149:*.swf=0;38;5;208:*.tar=4;38;5;203:*.tgz=4;38;5;203:*.cfg=0;38;5;149:*.xml=0;
38;5;185:*.jpg=0;38;5;208:*.mir=0;38;5;48:*.sxi=0;38;5;186:*.bz2=4;38;5;203:*.odt=0;38;5;186:*.mov=0;38;5;208:*.toc=0;38;5;243:*.bat=1;38;5;203:*.asa=0;38;5;48:*.awk=0;38;5;48:*.sbt=0;38;5;48:*.vcd=4;38;5;203:*.kts=0;38;5;48:*.arj=4;38;5;203:*.blg=0;38;5;243:*.c++=0;38;5;48:*.odp=0;38;5;186:*.bbl=0;38;5;243:*.idx=0;38;5;243:*.com=1;38;5;203:*.mp3=0;38;5;208:*.avi=0;38;5;208:*.def=0;38;5;48:*.cgi=0;38;5;48:*.zip=4;38;5;203:*.ttf=0;38;5;208:*.ppt=0;38;5;186:*.tml=0;38;5;149:*.fsx=0;38;5;48:*.h++=0;38;5;48:*.rtf=0;38;5;186:*.inl=0;38;5;48:*.yaml=0;38;5;149:*.html=0;38;5;185:*.mpeg=0;38;5;208:*.java=0;38;5;48:*.hgrc=0;38;5;149:*.orig=0;38;5;243:*.conf=0;38;5;149:*.dart=0;38;5;48:*.psm1=0;38;5;48:*.rlib=0;38;5;243:*.fish=0;38;5;48:*.bash=0;38;5;48:*.make=0;38;5;149:*.docx=0;38;5;186:*.json=0;38;5;149:*.psd1=0;38;5;48:*.lisp=0;38;5;48:*.tbz2=4;38;5;203:*.diff=0;38;5;48:*.epub=0;38;5;186:*.xlsx=0;38;5;186:*.pptx=0;38;5;186:*.toml=0;38;5;149:*.h264=0;38;5;208:*.purs=0;38;5;48:*.flac=0;38;5;208:*.tiff=0;38;5;208:*.jpeg=0;38;5;208:*.lock=0;38;5;243:*.less=0;38;5;48:*.dyn_o=0;38;5;243:*.scala=0;38;5;48:*.mdown=0;38;5;185:*.shtml=0;38;5;185:*.class=0;38;5;243:*.cache=0;38;5;243:*.cmake=0;38;5;149:*passwd=0;38;5;149:*.swift=0;38;5;48:*shadow=0;38;5;149:*.xhtml=0;38;5;185:*.patch=0;38;5;48:*.cabal=0;38;5;48:*README=0;38;5;16;48;5;186:*.toast=4;38;5;203:*.ipynb=0;38;5;48:*COPYING=0;38;5;249:*.gradle=0;38;5;48:*.matlab=0;38;5;48:*.config=0;38;5;149:*LICENSE=0;38;5;249:*.dyn_hi=0;38;5;243:*.flake8=0;38;5;149:*.groovy=0;38;5;48:*INSTALL=0;38;5;16;48;5;186:*TODO.md=1:*.ignore=0;38;5;149:*Doxyfile=0;38;5;149:*TODO.txt=1:*setup.py=0;38;5;149:*Makefile=0;38;5;149:*.gemspec=0;38;5;149:*.desktop=0;38;5;149:*.rgignore=0;38;5;149:*.markdown=0;38;5;185:*COPYRIGHT=0;38;5;249:*configure=0;38;5;149:*.DS_Store=0;38;5;243:*.kdevelop=0;38;5;149:*.fdignore=0;38;5;149:*README.md=0;38;5;16;48;5;186:*.cmake.in=0;38;5;149:*SConscript=0;38;5;149:*CODEOWNERS=0;38;5;149:*.localized=0;38;5;243:*.gitignore=0;38;5;149:*Dockerfile=0;38;5;149:*.gitconfig=0;38;5;149:*INSTALL.md=0;38;5;16;48;5;186:*README.txt=0;38;5;16;48;5;186:*SConstruct=0;38;5;149:*.scons_opt=0;38;5;243:*.travis.yml=0;38;5;186:*.gitmodules=0;38;5;149:*.synctex.gz=0;38;5;243:*LICENSE-MIT=0;38;5;249:*MANIFEST.in=0;38;5;149:*Makefile.in=0;38;5;243:*Makefile.am=0;38;5;149:*INSTALL.txt=0;38;5;16;48;5;186:*configure.ac=0;38;5;149:*.applescript=0;38;5;48:*appveyor.yml=0;38;5;186:*.fdb_latexmk=0;38;5;243:*CONTRIBUTORS=0;38;5;16;48;5;186:*.clang-format=0;38;5;149:*LICENSE-APACHE=0;38;5;249:*CMakeLists.txt=0;38;5;149:*CMakeCache.txt=0;38;5;243:*.gitattributes=0;38;5;149:*CONTRIBUTORS.md=0;38;5;16;48;5;186:*.sconsign.dblite=0;38;5;243:*requirements.txt=0;38;5;149:*CONTRIBUTORS.txt=0;38;5;16;48;5;186:*package-lock.json=0;38;5;243:*.CFUserTextEncoding=0;38;5;243
";
fn main() { fn main() {
let result = run(); let checked_args = transform_args_with_exec(env::args_os());
match result { let matches = app::build_app().get_matches_from(checked_args);
Ok(exit_code) => {
exit_code.exit();
}
Err(err) => {
eprintln!("[fd error]: {:#}", err);
ExitCode::GeneralError.exit();
}
}
}
fn run() -> Result<ExitCode> { // Get the search pattern
let opts = Opts::parse(); let pattern = matches.value_of("pattern").unwrap_or("");
#[cfg(feature = "completions")] // Get the current working directory
if let Some(shell) = opts.gen_completions()? { let current_dir = Path::new(".");
return print_completions(shell); if !fshelper::is_dir(current_dir) {
error("Error: could not get current directory.");
} }
set_working_dir(&opts)?; // Get one or more root directories to search.
let search_paths = opts.search_paths()?; let mut dir_vec: Vec<_> = match matches.values_of("path") {
if search_paths.is_empty() { Some(paths) => paths
bail!("No valid search paths given."); .map(|path| {
let path_buffer = PathBuf::from(path);
if !fshelper::is_dir(&path_buffer) {
error(&format!(
"Error: '{}' is not a directory.",
path_buffer.to_string_lossy()
));
}
path_buffer
}).collect::<Vec<_>>(),
None => vec![current_dir.to_path_buf()],
};
if matches.is_present("absolute-path") {
dir_vec = dir_vec
.iter()
.map(|path_buffer| {
path_buffer
.canonicalize()
.and_then(|pb| fshelper::absolute_path(pb.as_path()))
.unwrap()
}).collect();
} }
ensure_search_pattern_is_not_a_path(&opts)?; // Detect if the user accidentally supplied a path instead of a search pattern
let pattern = &opts.pattern; if !matches.is_present("full-path")
let exprs = &opts.exprs; && pattern.contains(std::path::MAIN_SEPARATOR)
let empty = Vec::new(); && fshelper::is_dir(Path::new(pattern))
let pattern_regexps = exprs
.as_ref()
.unwrap_or(&empty)
.iter()
.chain([pattern])
.map(|pat| build_pattern_regex(pat, &opts))
.collect::<Result<Vec<String>>>()?;
let config = construct_config(opts, &pattern_regexps)?;
ensure_use_hidden_option_for_leading_dot_pattern(&config, &pattern_regexps)?;
let regexps = pattern_regexps
.into_iter()
.map(|pat| build_regex(pat, &config))
.collect::<Result<Vec<Regex>>>()?;
walk::scan(&search_paths, regexps, config)
}
#[cfg(feature = "completions")]
#[cold]
fn print_completions(shell: clap_complete::Shell) -> Result<ExitCode> {
// The program name is the first argument.
let first_arg = env::args().next();
let program_name = first_arg
.as_ref()
.map(Path::new)
.and_then(|path| path.file_stem())
.and_then(|file| file.to_str())
.unwrap_or("fd");
let mut cmd = Opts::command();
cmd.build();
clap_complete::generate(shell, &mut cmd, program_name, &mut std::io::stdout());
Ok(ExitCode::Success)
}
fn set_working_dir(opts: &Opts) -> Result<()> {
if let Some(ref base_directory) = opts.base_directory {
if !filesystem::is_existing_directory(base_directory) {
return Err(anyhow!(
"The '--base-directory' path '{}' is not a directory.",
base_directory.to_string_lossy()
));
}
env::set_current_dir(base_directory).with_context(|| {
format!(
"Could not set '{}' as the current working directory",
base_directory.to_string_lossy()
)
})?;
}
Ok(())
}
/// Detect if the user accidentally supplied a path instead of a search pattern
fn ensure_search_pattern_is_not_a_path(opts: &Opts) -> Result<()> {
if !opts.full_path
&& opts.pattern.contains(std::path::MAIN_SEPARATOR)
&& Path::new(&opts.pattern).is_dir()
{ {
Err(anyhow!( error(&format!(
"The search pattern '{pattern}' contains a path-separation character ('{sep}') \ "Error: The search pattern '{pattern}' contains a path-separation character ('{sep}') \
and will not lead to any search results.\n\n\ and will not lead to any search results.\n\n\
If you want to search for all files inside the '{pattern}' directory, use a match-all pattern:\n\n \ If you want to search for all files inside the '{pattern}' directory, use a match-all pattern:\n\n \
fd . '{pattern}'\n\n\ fd . '{pattern}'\n\n\
Instead, if you want your pattern to match the full file path, use:\n\n \ Instead, if you want to search for the pattern in the full path, use:\n\n \
fd --full-path '{pattern}'", fd --full-path '{pattern}'",
pattern = &opts.pattern, pattern = pattern,
sep = std::path::MAIN_SEPARATOR, sep = std::path::MAIN_SEPARATOR,
)) ));
} else {
Ok(())
} }
}
fn build_pattern_regex(pattern: &str, opts: &Opts) -> Result<String> { // Treat pattern as literal string if '--fixed-strings' is used
Ok(if opts.glob && !pattern.is_empty() { let pattern_regex = if matches.is_present("fixed-strings") {
let glob = GlobBuilder::new(pattern).literal_separator(true).build()?;
glob.regex().to_owned()
} else if opts.fixed_strings {
// Treat pattern as literal string if '--fixed-strings' is used
regex::escape(pattern) regex::escape(pattern)
} else { } else {
String::from(pattern) String::from(pattern)
})
}
fn check_path_separator_length(path_separator: Option<&str>) -> Result<()> {
match (cfg!(windows), path_separator) {
(true, Some(sep)) if sep.len() > 1 => Err(anyhow!(
"A path separator must be exactly one byte, but \
the given separator is {} bytes: '{}'.\n\
In some shells on Windows, '/' is automatically \
expanded. Try to use '//' instead.",
sep.len(),
sep
)),
_ => Ok(()),
}
}
fn construct_config(mut opts: Opts, pattern_regexps: &[String]) -> Result<Config> {
// The search will be case-sensitive if the command line flag is set or
// if any of the patterns has an uppercase character (smart case).
let case_sensitive = !opts.ignore_case
&& (opts.case_sensitive
|| pattern_regexps
.iter()
.any(|pat| pattern_has_uppercase_char(pat)));
let path_separator = opts
.path_separator
.take()
.or_else(filesystem::default_path_separator);
let actual_path_separator = path_separator
.clone()
.unwrap_or_else(|| std::path::MAIN_SEPARATOR.to_string());
check_path_separator_length(path_separator.as_deref())?;
let size_limits = std::mem::take(&mut opts.size);
let time_constraints = extract_time_constraints(&opts)?;
#[cfg(unix)]
let owner_constraint: Option<OwnerFilter> = opts.owner.and_then(OwnerFilter::filter_ignore);
#[cfg(windows)]
let ansi_colors_support =
nu_ansi_term::enable_ansi_support().is_ok() || std::env::var_os("TERM").is_some();
#[cfg(not(windows))]
let ansi_colors_support = true;
let interactive_terminal = std::io::stdout().is_terminal();
let colored_output = match opts.color {
ColorWhen::Always => true,
ColorWhen::Never => false,
ColorWhen::Auto => {
let no_color = env::var_os("NO_COLOR").is_some_and(|x| !x.is_empty());
ansi_colors_support && !no_color && interactive_terminal
}
}; };
// The search will be case-sensitive if the command line flag is set or
// if the pattern has an uppercase character (smart case).
let case_sensitive = !matches.is_present("ignore-case")
&& (matches.is_present("case-sensitive") || pattern_has_uppercase_char(&pattern_regex));
let colored_output = match matches.value_of("color") {
Some("always") => true,
Some("never") => false,
_ => atty::is(Stream::Stdout),
};
#[cfg(windows)]
let colored_output = colored_output && ansi_term::enable_ansi_support().is_ok();
let ls_colors = if colored_output { let ls_colors = if colored_output {
Some(LsColors::from_env().unwrap_or_else(|| LsColors::from_string(DEFAULT_LS_COLORS))) Some(
env::var("LS_COLORS")
.ok()
.map(|val| LsColors::from_string(&val))
.unwrap_or_default(),
)
} else { } else {
None None
}; };
let hyperlink = match opts.hyperlink {
HyperlinkWhen::Always => true,
HyperlinkWhen::Never => false,
HyperlinkWhen::Auto => colored_output,
};
let command = extract_command(&mut opts, colored_output)?;
let has_command = command.is_some();
Ok(Config { let command = matches.values_of("exec").map(CommandTemplate::new);
let size_limits: Vec<SizeFilter> = matches
.values_of("size")
.map(|v| {
v.map(|sf| {
if let Some(f) = SizeFilter::from_string(sf) {
return f;
}
error(&format!("Error: {} is not a valid size constraint.", sf));
}).collect()
}).unwrap_or_else(|| vec![]);
let config = FdOptions {
case_sensitive, case_sensitive,
search_full_path: opts.full_path, search_full_path: matches.is_present("full-path"),
ignore_hidden: !(opts.hidden || opts.rg_alias_ignore()), ignore_hidden: !(matches.is_present("hidden")
read_fdignore: !(opts.no_ignore || opts.rg_alias_ignore()), || matches.occurrences_of("rg-alias-hidden-ignore") >= 2),
read_vcsignore: !(opts.no_ignore || opts.rg_alias_ignore() || opts.no_ignore_vcs), read_fdignore: !(matches.is_present("no-ignore")
require_git_to_read_vcsignore: !opts.no_require_git, || matches.is_present("rg-alias-hidden-ignore")),
read_parent_ignore: !opts.no_ignore_parent, read_vcsignore: !(matches.is_present("no-ignore")
read_global_ignore: !(opts.no_ignore || matches.is_present("rg-alias-hidden-ignore")
|| opts.rg_alias_ignore() || matches.is_present("no-ignore-vcs")),
|| opts.no_global_ignore_file), follow_links: matches.is_present("follow"),
follow_links: opts.follow, null_separator: matches.is_present("null_separator"),
one_file_system: opts.one_file_system, max_depth: matches
null_separator: opts.null_separator, .value_of("depth")
quiet: opts.quiet, .and_then(|n| usize::from_str_radix(n, 10).ok()),
max_depth: opts.max_depth(), threads: std::cmp::max(
min_depth: opts.min_depth(), matches
prune: opts.prune, .value_of("threads")
threads: opts.threads().get(), .and_then(|n| usize::from_str_radix(n, 10).ok())
max_buffer_time: opts.max_buffer_time, .unwrap_or_else(num_cpus::get),
1,
),
max_buffer_time: matches
.value_of("max-buffer-time")
.and_then(|n| u64::from_str_radix(n, 10).ok())
.map(time::Duration::from_millis),
ls_colors, ls_colors,
hyperlink, file_types: matches.values_of("file-type").map(|values| {
interactive_terminal,
file_types: opts.filetype.as_ref().map(|values| {
use crate::cli::FileType::*;
let mut file_types = FileTypes::default(); let mut file_types = FileTypes::default();
for value in values { for value in values {
match value { match value {
File => file_types.files = true, "f" | "file" => file_types.files = true,
Directory => file_types.directories = true, "d" | "directory" => file_types.directories = true,
Symlink => file_types.symlinks = true, "l" | "symlink" => file_types.symlinks = true,
Executable => { "x" | "executable" => {
file_types.executables_only = true; file_types.executables_only = true;
file_types.files = true; file_types.files = true;
} }
Empty => file_types.empty_only = true, "e" | "empty" => {
BlockDevice => file_types.block_devices = true, file_types.empty_only = true;
CharDevice => file_types.char_devices = true, }
Socket => file_types.sockets = true, _ => unreachable!(),
Pipe => file_types.pipes = true,
} }
} }
@ -294,197 +197,42 @@ fn construct_config(mut opts: Opts, pattern_regexps: &[String]) -> Result<Config
file_types file_types
}), }),
extensions: opts extensions: matches.values_of("extension").map(|exts| {
.extensions let patterns = exts
.as_ref() .map(|e| e.trim_left_matches('.'))
.map(|exts| { .map(|e| format!(r".\.{}$", regex::escape(e)));
let patterns = exts match RegexSetBuilder::new(patterns)
.iter() .case_insensitive(true)
.map(|e| e.trim_start_matches('.')) .build()
.map(|e| format!(r".\.{}$", regex::escape(e))); {
RegexSetBuilder::new(patterns) Ok(re) => re,
.case_insensitive(true) Err(err) => error(err.description()),
.build() }
}) }),
.transpose()?, command,
format: opts exclude_patterns: matches
.format .values_of("exclude")
.as_deref() .map(|v| v.map(|p| String::from("!") + p).collect())
.map(crate::fmt::FormatTemplate::parse), .unwrap_or_else(|| vec![]),
command: command.map(Arc::new), ignore_files: matches
batch_size: opts.batch_size, .values_of("ignore-file")
exclude_patterns: opts.exclude.iter().map(|p| String::from("!") + p).collect(), .map(|vs| vs.map(PathBuf::from).collect())
ignore_files: std::mem::take(&mut opts.ignore_file), .unwrap_or_else(|| vec![]),
size_constraints: size_limits, size_constraints: size_limits,
time_constraints,
#[cfg(unix)]
owner_constraint,
show_filesystem_errors: opts.show_errors,
path_separator,
actual_path_separator,
max_results: opts.max_results(),
strip_cwd_prefix: opts.strip_cwd_prefix(|| !(opts.null_separator || has_command)),
})
}
fn extract_command(opts: &mut Opts, colored_output: bool) -> Result<Option<CommandSet>> {
opts.exec
.command
.take()
.map(Ok)
.or_else(|| {
if !opts.list_details {
return None;
}
let res = determine_ls_command(colored_output)
.map(|cmd| CommandSet::new_batch([cmd]).unwrap());
Some(res)
})
.transpose()
}
fn determine_ls_command(colored_output: bool) -> Result<Vec<&'static str>> {
#[allow(unused)]
let gnu_ls = |command_name| {
let color_arg = if colored_output {
"--color=always"
} else {
"--color=never"
};
// Note: we use short options here (instead of --long-options) to support more
// platforms (like BusyBox).
vec![
command_name,
"-l", // long listing format
"-h", // human readable file sizes
"-d", // list directories themselves, not their contents
color_arg,
]
}; };
let cmd: Vec<&str> = if cfg!(unix) {
if !cfg!(any(
target_os = "macos",
target_os = "dragonfly",
target_os = "freebsd",
target_os = "netbsd",
target_os = "openbsd"
)) {
// Assume ls is GNU ls
gnu_ls("ls")
} else {
// MacOS, DragonFlyBSD, FreeBSD
use std::process::{Command, Stdio};
// Use GNU ls, if available (support for --color=auto, better LS_COLORS support) match RegexBuilder::new(&pattern_regex)
let gnu_ls_exists = Command::new("gls")
.arg("--version")
.stdout(Stdio::null())
.stderr(Stdio::null())
.status()
.is_ok();
if gnu_ls_exists {
gnu_ls("gls")
} else {
let mut cmd = vec![
"ls", // BSD version of ls
"-l", // long listing format
"-h", // '--human-readable' is not available, '-h' is
"-d", // '--directory' is not available, but '-d' is
];
if !cfg!(any(target_os = "netbsd", target_os = "openbsd")) && colored_output {
// -G is not available in NetBSD's and OpenBSD's ls
cmd.push("-G");
}
cmd
}
}
} else if cfg!(windows) {
use std::process::{Command, Stdio};
// Use GNU ls, if available
let gnu_ls_exists = Command::new("ls")
.arg("--version")
.stdout(Stdio::null())
.stderr(Stdio::null())
.status()
.is_ok();
if gnu_ls_exists {
gnu_ls("ls")
} else {
return Err(anyhow!(
"'fd --list-details' is not supported on Windows unless GNU 'ls' is installed."
));
}
} else {
return Err(anyhow!(
"'fd --list-details' is not supported on this platform."
));
};
Ok(cmd)
}
fn extract_time_constraints(opts: &Opts) -> Result<Vec<TimeFilter>> {
let now = time::SystemTime::now();
let mut time_constraints: Vec<TimeFilter> = Vec::new();
if let Some(ref t) = opts.changed_within {
if let Some(f) = TimeFilter::after(&now, t) {
time_constraints.push(f);
} else {
return Err(anyhow!(
"'{}' is not a valid date or duration. See 'fd --help'.",
t
));
}
}
if let Some(ref t) = opts.changed_before {
if let Some(f) = TimeFilter::before(&now, t) {
time_constraints.push(f);
} else {
return Err(anyhow!(
"'{}' is not a valid date or duration. See 'fd --help'.",
t
));
}
}
Ok(time_constraints)
}
fn ensure_use_hidden_option_for_leading_dot_pattern(
config: &Config,
pattern_regexps: &[String],
) -> Result<()> {
if cfg!(unix)
&& config.ignore_hidden
&& pattern_regexps
.iter()
.any(|pat| pattern_matches_strings_with_leading_dot(pat))
{
Err(anyhow!(
"The pattern(s) seems to only match files with a leading dot, but hidden files are \
filtered by default. Consider adding -H/--hidden to search hidden files as well \
or adjust your search pattern(s)."
))
} else {
Ok(())
}
}
fn build_regex(pattern_regex: String, config: &Config) -> Result<regex::bytes::Regex> {
RegexBuilder::new(&pattern_regex)
.case_insensitive(!config.case_sensitive) .case_insensitive(!config.case_sensitive)
.dot_matches_new_line(true) .dot_matches_new_line(true)
.build() .build()
.map_err(|e| { {
anyhow!( Ok(re) => walk::scan(&dir_vec, Arc::new(re), Arc::new(config)),
"{}\n\nNote: You can use the '--fixed-strings' option to search for a \ Err(err) => error(
literal string instead of a regular expression. Alternatively, you can \ format!(
also use the '--glob' option to match on a glob pattern.", "{}\nHint: You can use the '--fixed-strings' option to search for a \
e.to_string() literal string instead of a regular expression",
) err.description()
}) ).as_str(),
),
}
} }

View File

@ -1,175 +1,137 @@
use std::borrow::Cow; // Copyright (c) 2017 fd developers
// Licensed under the Apache License, Version 2.0
// <LICENSE-APACHE or http://www.apache.org/licenses/LICENSE-2.0>
// or the MIT license <LICENSE-MIT or http://opensource.org/licenses/MIT>,
// at your option. All files in the project carrying such
// notice may not be copied, modified, or distributed except
// according to those terms.
use fshelper::is_executable;
use internal::{FdOptions, EXITCODE_ERROR, EXITCODE_SIGINT};
use lscolors::LsColors;
use std::io::{self, Write}; use std::io::{self, Write};
use std::ops::Deref;
use std::path::{self, Component, Path, PathBuf};
use std::process;
use std::sync::atomic::{AtomicBool, Ordering};
use std::sync::Arc;
use lscolors::{Indicator, LsColors, Style}; use ansi_term;
use crate::config::Config; /// Remove the `./` prefix from a path.
use crate::dir_entry::DirEntry; fn strip_current_dir<'a>(pathbuf: &'a PathBuf) -> &'a Path {
use crate::fmt::FormatTemplate; let mut iter = pathbuf.components();
use crate::hyperlink::PathUrl; let mut iter_next = iter.clone();
if iter_next.next() == Some(Component::CurDir) {
fn replace_path_separator(path: &str, new_path_separator: &str) -> String { iter.next();
path.replace(std::path::MAIN_SEPARATOR, new_path_separator) }
iter.as_path()
} }
// TODO: this function is performance critical and can probably be optimized pub fn print_entry(entry: &PathBuf, config: &FdOptions, wants_to_quit: &Arc<AtomicBool>) {
pub fn print_entry<W: Write>(stdout: &mut W, entry: &DirEntry, config: &Config) -> io::Result<()> { let path = if entry.is_absolute() {
let mut has_hyperlink = false; entry.as_path()
if config.hyperlink {
if let Some(url) = PathUrl::new(entry.path()) {
write!(stdout, "\x1B]8;;{}\x1B\\", url)?;
has_hyperlink = true;
}
}
if let Some(ref format) = config.format {
print_entry_format(stdout, entry, config, format)?;
} else if let Some(ref ls_colors) = config.ls_colors {
print_entry_colorized(stdout, entry, config, ls_colors)?;
} else { } else {
print_entry_uncolorized(stdout, entry, config)?; strip_current_dir(entry)
}; };
if has_hyperlink { let r = if let Some(ref ls_colors) = config.ls_colors {
write!(stdout, "\x1B]8;;\x1B\\")?; print_entry_colorized(path, config, ls_colors, &wants_to_quit)
} else {
print_entry_uncolorized(path, config)
};
if r.is_err() {
// Probably a broken pipe. Exit gracefully.
process::exit(EXITCODE_ERROR);
}
}
fn print_entry_colorized(
path: &Path,
config: &FdOptions,
ls_colors: &LsColors,
wants_to_quit: &Arc<AtomicBool>,
) -> io::Result<()> {
let default_style = ansi_term::Style::default();
let stdout = io::stdout();
let mut handle = stdout.lock();
// Separator to use before the current component.
let mut separator = String::new();
// Full path to the current component.
let mut component_path = PathBuf::new();
// Traverse the path and colorize each component
for component in path.components() {
let comp_str = component.as_os_str().to_string_lossy();
component_path.push(Path::new(comp_str.deref()));
let style = get_path_style(&component_path, ls_colors).unwrap_or(&default_style);
write!(handle, "{}{}", separator, style.paint(comp_str))?;
// Determine separator to print before next component.
separator = match component {
// Prefix needs no separator, as it is always followed by RootDir.
Component::Prefix(_) => String::new(),
// RootDir is already a separator.
Component::RootDir => String::new(),
// Everything else uses a separator that is painted the same way as the component.
_ => style.paint(path::MAIN_SEPARATOR.to_string()).to_string(),
};
if wants_to_quit.load(Ordering::Relaxed) {
write!(handle, "\n")?;
process::exit(EXITCODE_SIGINT);
}
} }
if config.null_separator { if config.null_separator {
write!(stdout, "\0") write!(handle, "\0")
} else { } else {
writeln!(stdout) writeln!(handle, "")
} }
} }
// Display a trailing slash if the path is a directory and the config option is enabled. fn print_entry_uncolorized(path: &Path, config: &FdOptions) -> io::Result<()> {
// If the path_separator option is set, display that instead. let separator = if config.null_separator { "\0" } else { "\n" };
// The trailing slash will not be colored.
#[inline]
fn print_trailing_slash<W: Write>(
stdout: &mut W,
entry: &DirEntry,
config: &Config,
style: Option<&Style>,
) -> io::Result<()> {
if entry.file_type().map_or(false, |ft| ft.is_dir()) {
write!(
stdout,
"{}",
style
.map(Style::to_nu_ansi_term_style)
.unwrap_or_default()
.paint(&config.actual_path_separator)
)?;
}
Ok(())
}
// TODO: this function is performance critical and can probably be optimized
fn print_entry_format<W: Write>(
stdout: &mut W,
entry: &DirEntry,
config: &Config,
format: &FormatTemplate,
) -> io::Result<()> {
let output = format.generate(
entry.stripped_path(config),
config.path_separator.as_deref(),
);
// TODO: support writing raw bytes on unix?
write!(stdout, "{}", output.to_string_lossy())
}
// TODO: this function is performance critical and can probably be optimized
fn print_entry_colorized<W: Write>(
stdout: &mut W,
entry: &DirEntry,
config: &Config,
ls_colors: &LsColors,
) -> io::Result<()> {
// Split the path between the parent and the last component
let mut offset = 0;
let path = entry.stripped_path(config);
let path_str = path.to_string_lossy(); let path_str = path.to_string_lossy();
write!(&mut io::stdout(), "{}{}", path_str, separator)
if let Some(parent) = path.parent() {
offset = parent.to_string_lossy().len();
for c in path_str[offset..].chars() {
if std::path::is_separator(c) {
offset += c.len_utf8();
} else {
break;
}
}
}
if offset > 0 {
let mut parent_str = Cow::from(&path_str[..offset]);
if let Some(ref separator) = config.path_separator {
*parent_str.to_mut() = replace_path_separator(&parent_str, separator);
}
let style = ls_colors
.style_for_indicator(Indicator::Directory)
.map(Style::to_nu_ansi_term_style)
.unwrap_or_default();
write!(stdout, "{}", style.paint(parent_str))?;
}
let style = entry
.style(ls_colors)
.map(Style::to_nu_ansi_term_style)
.unwrap_or_default();
write!(stdout, "{}", style.paint(&path_str[offset..]))?;
print_trailing_slash(
stdout,
entry,
config,
ls_colors.style_for_indicator(Indicator::Directory),
)?;
Ok(())
} }
// TODO: this function is performance critical and can probably be optimized fn get_path_style<'a>(path: &Path, ls_colors: &'a LsColors) -> Option<&'a ansi_term::Style> {
fn print_entry_uncolorized_base<W: Write>( if path
stdout: &mut W, .symlink_metadata()
entry: &DirEntry, .map(|md| md.file_type().is_symlink())
config: &Config, .unwrap_or(false)
) -> io::Result<()> { {
let path = entry.stripped_path(config); return Some(&ls_colors.symlink);
let mut path_string = path.to_string_lossy();
if let Some(ref separator) = config.path_separator {
*path_string.to_mut() = replace_path_separator(&path_string, separator);
} }
write!(stdout, "{}", path_string)?;
print_trailing_slash(stdout, entry, config, None)
}
#[cfg(not(unix))] let metadata = path.metadata();
fn print_entry_uncolorized<W: Write>(
stdout: &mut W,
entry: &DirEntry,
config: &Config,
) -> io::Result<()> {
print_entry_uncolorized_base(stdout, entry, config)
}
#[cfg(unix)] if metadata.as_ref().map(|md| md.is_dir()).unwrap_or(false) {
fn print_entry_uncolorized<W: Write>( Some(&ls_colors.directory)
stdout: &mut W, } else if metadata.map(|md| is_executable(&md)).unwrap_or(false) {
entry: &DirEntry, Some(&ls_colors.executable)
config: &Config, } else if let Some(filename_style) = path
) -> io::Result<()> { .file_name()
use std::os::unix::ffi::OsStrExt; .and_then(|n| n.to_str())
.and_then(|n| ls_colors.filenames.get(n))
if config.interactive_terminal || config.path_separator.is_some() { {
// Fall back to the base implementation Some(filename_style)
print_entry_uncolorized_base(stdout, entry, config) } else if let Some(extension_style) = path
.extension()
.and_then(|e| e.to_str())
.and_then(|e| ls_colors.extensions.get(e))
{
Some(extension_style)
} else { } else {
// Print path as raw bytes, allowing invalid UTF-8 filenames to be passed to other processes None
stdout.write_all(entry.stripped_path(config).as_os_str().as_bytes())?;
print_trailing_slash(stdout, entry, config, None)
} }
} }

View File

@ -1,105 +0,0 @@
use regex_syntax::hir::Hir;
use regex_syntax::ParserBuilder;
/// Determine if a regex pattern contains a literal uppercase character.
pub fn pattern_has_uppercase_char(pattern: &str) -> bool {
let mut parser = ParserBuilder::new().utf8(false).build();
parser
.parse(pattern)
.map(|hir| hir_has_uppercase_char(&hir))
.unwrap_or(false)
}
/// Determine if a regex expression contains a literal uppercase character.
fn hir_has_uppercase_char(hir: &Hir) -> bool {
use regex_syntax::hir::*;
match hir.kind() {
HirKind::Literal(Literal(bytes)) => match std::str::from_utf8(bytes) {
Ok(s) => s.chars().any(|c| c.is_uppercase()),
Err(_) => bytes.iter().any(|b| char::from(*b).is_uppercase()),
},
HirKind::Class(Class::Unicode(ranges)) => ranges
.iter()
.any(|r| r.start().is_uppercase() || r.end().is_uppercase()),
HirKind::Class(Class::Bytes(ranges)) => ranges
.iter()
.any(|r| char::from(r.start()).is_uppercase() || char::from(r.end()).is_uppercase()),
HirKind::Capture(Capture { sub, .. }) | HirKind::Repetition(Repetition { sub, .. }) => {
hir_has_uppercase_char(sub)
}
HirKind::Concat(hirs) | HirKind::Alternation(hirs) => {
hirs.iter().any(hir_has_uppercase_char)
}
_ => false,
}
}
/// Determine if a regex pattern only matches strings starting with a literal dot (hidden files)
pub fn pattern_matches_strings_with_leading_dot(pattern: &str) -> bool {
let mut parser = ParserBuilder::new().utf8(false).build();
parser
.parse(pattern)
.map(|hir| hir_matches_strings_with_leading_dot(&hir))
.unwrap_or(false)
}
/// See above.
fn hir_matches_strings_with_leading_dot(hir: &Hir) -> bool {
use regex_syntax::hir::*;
// Note: this only really detects the simplest case where a regex starts with
// "^\\.", i.e. a start text anchor and a literal dot character. There are a lot
// of other patterns that ONLY match hidden files, e.g. ^(\\.foo|\\.bar) which are
// not (yet) detected by this algorithm.
match hir.kind() {
HirKind::Concat(hirs) => {
let mut hirs = hirs.iter();
if let Some(hir) = hirs.next() {
if hir.kind() != &HirKind::Look(Look::Start) {
return false;
}
} else {
return false;
}
if let Some(hir) = hirs.next() {
match hir.kind() {
HirKind::Literal(Literal(bytes)) => bytes.starts_with(b"."),
_ => false,
}
} else {
false
}
}
_ => false,
}
}
#[test]
fn pattern_has_uppercase_char_simple() {
assert!(pattern_has_uppercase_char("A"));
assert!(pattern_has_uppercase_char("foo.EXE"));
assert!(!pattern_has_uppercase_char("a"));
assert!(!pattern_has_uppercase_char("foo.exe123"));
}
#[test]
fn pattern_has_uppercase_char_advanced() {
assert!(pattern_has_uppercase_char("foo.[a-zA-Z]"));
assert!(!pattern_has_uppercase_char(r"\Acargo"));
assert!(!pattern_has_uppercase_char(r"carg\x6F"));
}
#[test]
fn matches_strings_with_leading_dot_simple() {
assert!(pattern_matches_strings_with_leading_dot("^\\.gitignore"));
assert!(!pattern_matches_strings_with_leading_dot("^.gitignore"));
assert!(!pattern_matches_strings_with_leading_dot("\\.gitignore"));
assert!(!pattern_matches_strings_with_leading_dot("^gitignore"));
}

View File

@ -1,30 +1,32 @@
use std::borrow::Cow; // Copyright (c) 2017 fd developers
use std::ffi::OsStr; // Licensed under the Apache License, Version 2.0
use std::io::{self, Write}; // <LICENSE-APACHE or http://www.apache.org/licenses/LICENSE-2.0>
use std::mem; // or the MIT license <LICENSE-MIT or http://opensource.org/licenses/MIT>,
// at your option. All files in the project carrying such
// notice may not be copied, modified, or distributed except
// according to those terms.
extern crate ctrlc;
use exec;
use fshelper;
use internal::{error, FdOptions, EXITCODE_SIGINT, MAX_BUFFER_LENGTH};
use output;
use std::error::Error;
use std::path::PathBuf; use std::path::PathBuf;
use std::process;
use std::sync::atomic::{AtomicBool, Ordering}; use std::sync::atomic::{AtomicBool, Ordering};
use std::sync::{Arc, Mutex, MutexGuard}; use std::sync::mpsc::channel;
use std::sync::{Arc, Mutex};
use std::thread; use std::thread;
use std::time::{Duration, Instant}; use std::time;
use anyhow::{anyhow, Result}; use ignore::overrides::OverrideBuilder;
use crossbeam_channel::{bounded, Receiver, RecvTimeoutError, SendError, Sender}; use ignore::{self, WalkBuilder};
use etcetera::BaseStrategy; use regex::Regex;
use ignore::overrides::{Override, OverrideBuilder};
use ignore::{WalkBuilder, WalkParallel, WalkState};
use regex::bytes::Regex;
use crate::config::Config;
use crate::dir_entry::DirEntry;
use crate::error::print_error;
use crate::exec;
use crate::exit_codes::{merge_exitcodes, ExitCode};
use crate::filesystem;
use crate::output;
/// The receiver thread can either be buffering results or directly streaming to the console. /// The receiver thread can either be buffering results or directly streaming to the console.
#[derive(PartialEq)]
enum ReceiverMode { enum ReceiverMode {
/// Receiver is still buffering in order to sort the results, if the search finishes fast /// Receiver is still buffering in order to sort the results, if the search finishes fast
/// enough. /// enough.
@ -34,640 +36,269 @@ enum ReceiverMode {
Streaming, Streaming,
} }
/// The Worker threads can result in a valid entry having PathBuf or an error. /// Recursively scan the given search path for files / pathnames matching the pattern.
#[allow(clippy::large_enum_variant)]
#[derive(Debug)]
pub enum WorkerResult {
// Errors should be rare, so it's probably better to allow large_enum_variant than
// to box the Entry variant
Entry(DirEntry),
Error(ignore::Error),
}
/// A batch of WorkerResults to send over a channel.
#[derive(Clone)]
struct Batch {
items: Arc<Mutex<Option<Vec<WorkerResult>>>>,
}
impl Batch {
fn new() -> Self {
Self {
items: Arc::new(Mutex::new(Some(vec![]))),
}
}
fn lock(&self) -> MutexGuard<'_, Option<Vec<WorkerResult>>> {
self.items.lock().unwrap()
}
}
impl IntoIterator for Batch {
type Item = WorkerResult;
type IntoIter = std::vec::IntoIter<WorkerResult>;
fn into_iter(self) -> Self::IntoIter {
self.lock().take().unwrap().into_iter()
}
}
/// Wrapper that sends batches of items at once over a channel.
struct BatchSender {
batch: Batch,
tx: Sender<Batch>,
limit: usize,
}
impl BatchSender {
fn new(tx: Sender<Batch>, limit: usize) -> Self {
Self {
batch: Batch::new(),
tx,
limit,
}
}
/// Check if we need to flush a batch.
fn needs_flush(&self, batch: Option<&Vec<WorkerResult>>) -> bool {
match batch {
// Limit the batch size to provide some backpressure
Some(vec) => vec.len() >= self.limit,
// Batch was already taken by the receiver, so make a new one
None => true,
}
}
/// Add an item to a batch.
fn send(&mut self, item: WorkerResult) -> Result<(), SendError<()>> {
let mut batch = self.batch.lock();
if self.needs_flush(batch.as_ref()) {
drop(batch);
self.batch = Batch::new();
batch = self.batch.lock();
}
let items = batch.as_mut().unwrap();
items.push(item);
if items.len() == 1 {
// New batch, send it over the channel
self.tx
.send(self.batch.clone())
.map_err(|_| SendError(()))?;
}
Ok(())
}
}
/// Maximum size of the output buffer before flushing results to the console
const MAX_BUFFER_LENGTH: usize = 1000;
/// Default duration until output buffering switches to streaming.
const DEFAULT_MAX_BUFFER_TIME: Duration = Duration::from_millis(100);
/// Wrapper for the receiver thread's buffering behavior.
struct ReceiverBuffer<'a, W> {
/// The configuration.
config: &'a Config,
/// For shutting down the senders.
quit_flag: &'a AtomicBool,
/// The ^C notifier.
interrupt_flag: &'a AtomicBool,
/// Receiver for worker results.
rx: Receiver<Batch>,
/// Standard output.
stdout: W,
/// The current buffer mode.
mode: ReceiverMode,
/// The deadline to switch to streaming mode.
deadline: Instant,
/// The buffer of quickly received paths.
buffer: Vec<DirEntry>,
/// Result count.
num_results: usize,
}
impl<'a, W: Write> ReceiverBuffer<'a, W> {
/// Create a new receiver buffer.
fn new(state: &'a WorkerState, rx: Receiver<Batch>, stdout: W) -> Self {
let config = &state.config;
let quit_flag = state.quit_flag.as_ref();
let interrupt_flag = state.interrupt_flag.as_ref();
let max_buffer_time = config.max_buffer_time.unwrap_or(DEFAULT_MAX_BUFFER_TIME);
let deadline = Instant::now() + max_buffer_time;
Self {
config,
quit_flag,
interrupt_flag,
rx,
stdout,
mode: ReceiverMode::Buffering,
deadline,
buffer: Vec::with_capacity(MAX_BUFFER_LENGTH),
num_results: 0,
}
}
/// Process results until finished.
fn process(&mut self) -> ExitCode {
loop {
if let Err(ec) = self.poll() {
self.quit_flag.store(true, Ordering::Relaxed);
return ec;
}
}
}
/// Receive the next worker result.
fn recv(&self) -> Result<Batch, RecvTimeoutError> {
match self.mode {
ReceiverMode::Buffering => {
// Wait at most until we should switch to streaming
self.rx.recv_deadline(self.deadline)
}
ReceiverMode::Streaming => {
// Wait however long it takes for a result
Ok(self.rx.recv()?)
}
}
}
/// Wait for a result or state change.
fn poll(&mut self) -> Result<(), ExitCode> {
match self.recv() {
Ok(batch) => {
for result in batch {
match result {
WorkerResult::Entry(dir_entry) => {
if self.config.quiet {
return Err(ExitCode::HasResults(true));
}
match self.mode {
ReceiverMode::Buffering => {
self.buffer.push(dir_entry);
if self.buffer.len() > MAX_BUFFER_LENGTH {
self.stream()?;
}
}
ReceiverMode::Streaming => {
self.print(&dir_entry)?;
}
}
self.num_results += 1;
if let Some(max_results) = self.config.max_results {
if self.num_results >= max_results {
return self.stop();
}
}
}
WorkerResult::Error(err) => {
if self.config.show_filesystem_errors {
print_error(err.to_string());
}
}
}
}
// If we don't have another batch ready, flush before waiting
if self.mode == ReceiverMode::Streaming && self.rx.is_empty() {
self.flush()?;
}
}
Err(RecvTimeoutError::Timeout) => {
self.stream()?;
}
Err(RecvTimeoutError::Disconnected) => {
return self.stop();
}
}
Ok(())
}
/// Output a path.
fn print(&mut self, entry: &DirEntry) -> Result<(), ExitCode> {
if let Err(e) = output::print_entry(&mut self.stdout, entry, self.config) {
if e.kind() != ::std::io::ErrorKind::BrokenPipe {
print_error(format!("Could not write to output: {}", e));
return Err(ExitCode::GeneralError);
}
}
if self.interrupt_flag.load(Ordering::Relaxed) {
// Ignore any errors on flush, because we're about to exit anyway
let _ = self.flush();
return Err(ExitCode::KilledBySigint);
}
Ok(())
}
/// Switch ourselves into streaming mode.
fn stream(&mut self) -> Result<(), ExitCode> {
self.mode = ReceiverMode::Streaming;
let buffer = mem::take(&mut self.buffer);
for path in buffer {
self.print(&path)?;
}
self.flush()
}
/// Stop looping.
fn stop(&mut self) -> Result<(), ExitCode> {
if self.mode == ReceiverMode::Buffering {
self.buffer.sort();
self.stream()?;
}
if self.config.quiet {
Err(ExitCode::HasResults(self.num_results > 0))
} else {
Err(ExitCode::Success)
}
}
/// Flush stdout if necessary.
fn flush(&mut self) -> Result<(), ExitCode> {
if self.stdout.flush().is_err() {
// Probably a broken pipe. Exit gracefully.
return Err(ExitCode::GeneralError);
}
Ok(())
}
}
/// State shared by the sender and receiver threads.
struct WorkerState {
/// The search patterns.
patterns: Vec<Regex>,
/// The command line configuration.
config: Config,
/// Flag for cleanly shutting down the parallel walk
quit_flag: Arc<AtomicBool>,
/// Flag specifically for quitting due to ^C
interrupt_flag: Arc<AtomicBool>,
}
impl WorkerState {
fn new(patterns: Vec<Regex>, config: Config) -> Self {
let quit_flag = Arc::new(AtomicBool::new(false));
let interrupt_flag = Arc::new(AtomicBool::new(false));
Self {
patterns,
config,
quit_flag,
interrupt_flag,
}
}
fn build_overrides(&self, paths: &[PathBuf]) -> Result<Override> {
let first_path = &paths[0];
let config = &self.config;
let mut builder = OverrideBuilder::new(first_path);
for pattern in &config.exclude_patterns {
builder
.add(pattern)
.map_err(|e| anyhow!("Malformed exclude pattern: {}", e))?;
}
builder
.build()
.map_err(|_| anyhow!("Mismatch in exclude patterns"))
}
fn build_walker(&self, paths: &[PathBuf]) -> Result<WalkParallel> {
let first_path = &paths[0];
let config = &self.config;
let overrides = self.build_overrides(paths)?;
let mut builder = WalkBuilder::new(first_path);
builder
.hidden(config.ignore_hidden)
.ignore(config.read_fdignore)
.parents(config.read_parent_ignore && (config.read_fdignore || config.read_vcsignore))
.git_ignore(config.read_vcsignore)
.git_global(config.read_vcsignore)
.git_exclude(config.read_vcsignore)
.require_git(config.require_git_to_read_vcsignore)
.overrides(overrides)
.follow_links(config.follow_links)
// No need to check for supported platforms, option is unavailable on unsupported ones
.same_file_system(config.one_file_system)
.max_depth(config.max_depth);
if config.read_fdignore {
builder.add_custom_ignore_filename(".fdignore");
}
if config.read_global_ignore {
if let Ok(basedirs) = etcetera::choose_base_strategy() {
let global_ignore_file = basedirs.config_dir().join("fd").join("ignore");
if global_ignore_file.is_file() {
let result = builder.add_ignore(global_ignore_file);
match result {
Some(ignore::Error::Partial(_)) => (),
Some(err) => {
print_error(format!(
"Malformed pattern in global ignore file. {}.",
err
));
}
None => (),
}
}
}
}
for ignore_file in &config.ignore_files {
let result = builder.add_ignore(ignore_file);
match result {
Some(ignore::Error::Partial(_)) => (),
Some(err) => {
print_error(format!("Malformed pattern in custom ignore file. {}.", err));
}
None => (),
}
}
for path in &paths[1..] {
builder.add(path);
}
let walker = builder.threads(config.threads).build_parallel();
Ok(walker)
}
/// Run the receiver work, either on this thread or a pool of background
/// threads (for --exec).
fn receive(&self, rx: Receiver<Batch>) -> ExitCode {
let config = &self.config;
// This will be set to `Some` if the `--exec` argument was supplied.
if let Some(ref cmd) = config.command {
if cmd.in_batch_mode() {
exec::batch(rx.into_iter().flatten(), cmd, config)
} else {
let out_perm = Mutex::new(());
thread::scope(|scope| {
// Each spawned job will store its thread handle in here.
let threads = config.threads;
let mut handles = Vec::with_capacity(threads);
for _ in 0..threads {
let rx = rx.clone();
// Spawn a job thread that will listen for and execute inputs.
let handle = scope
.spawn(|| exec::job(rx.into_iter().flatten(), cmd, &out_perm, config));
// Push the handle of the spawned thread into the vector for later joining.
handles.push(handle);
}
let exit_codes = handles.into_iter().map(|handle| handle.join().unwrap());
merge_exitcodes(exit_codes)
})
}
} else {
let stdout = io::stdout().lock();
let stdout = io::BufWriter::new(stdout);
ReceiverBuffer::new(self, rx, stdout).process()
}
}
/// Spawn the sender threads.
fn spawn_senders(&self, walker: WalkParallel, tx: Sender<Batch>) {
walker.run(|| {
let patterns = &self.patterns;
let config = &self.config;
let quit_flag = self.quit_flag.as_ref();
let mut limit = 0x100;
if let Some(cmd) = &config.command {
if !cmd.in_batch_mode() && config.threads > 1 {
// Evenly distribute work between multiple receivers
limit = 1;
}
}
let mut tx = BatchSender::new(tx.clone(), limit);
Box::new(move |entry| {
if quit_flag.load(Ordering::Relaxed) {
return WalkState::Quit;
}
let entry = match entry {
Ok(ref e) if e.depth() == 0 => {
// Skip the root directory entry.
return WalkState::Continue;
}
Ok(e) => DirEntry::normal(e),
Err(ignore::Error::WithPath {
path,
err: inner_err,
}) => match inner_err.as_ref() {
ignore::Error::Io(io_error)
if io_error.kind() == io::ErrorKind::NotFound
&& path
.symlink_metadata()
.ok()
.map_or(false, |m| m.file_type().is_symlink()) =>
{
DirEntry::broken_symlink(path)
}
_ => {
return match tx.send(WorkerResult::Error(ignore::Error::WithPath {
path,
err: inner_err,
})) {
Ok(_) => WalkState::Continue,
Err(_) => WalkState::Quit,
}
}
},
Err(err) => {
return match tx.send(WorkerResult::Error(err)) {
Ok(_) => WalkState::Continue,
Err(_) => WalkState::Quit,
}
}
};
if let Some(min_depth) = config.min_depth {
if entry.depth().map_or(true, |d| d < min_depth) {
return WalkState::Continue;
}
}
// Check the name first, since it doesn't require metadata
let entry_path = entry.path();
let search_str: Cow<OsStr> = if config.search_full_path {
let path_abs_buf = filesystem::path_absolute_form(entry_path)
.expect("Retrieving absolute path succeeds");
Cow::Owned(path_abs_buf.as_os_str().to_os_string())
} else {
match entry_path.file_name() {
Some(filename) => Cow::Borrowed(filename),
None => unreachable!(
"Encountered file system entry without a file name. This should only \
happen for paths like 'foo/bar/..' or '/' which are not supposed to \
appear in a file system traversal."
),
}
};
if !patterns
.iter()
.all(|pat| pat.is_match(&filesystem::osstr_to_bytes(search_str.as_ref())))
{
return WalkState::Continue;
}
// Filter out unwanted extensions.
if let Some(ref exts_regex) = config.extensions {
if let Some(path_str) = entry_path.file_name() {
if !exts_regex.is_match(&filesystem::osstr_to_bytes(path_str)) {
return WalkState::Continue;
}
} else {
return WalkState::Continue;
}
}
// Filter out unwanted file types.
if let Some(ref file_types) = config.file_types {
if file_types.should_ignore(&entry) {
return WalkState::Continue;
}
}
#[cfg(unix)]
{
if let Some(ref owner_constraint) = config.owner_constraint {
if let Some(metadata) = entry.metadata() {
if !owner_constraint.matches(metadata) {
return WalkState::Continue;
}
} else {
return WalkState::Continue;
}
}
}
// Filter out unwanted sizes if it is a file and we have been given size constraints.
if !config.size_constraints.is_empty() {
if entry_path.is_file() {
if let Some(metadata) = entry.metadata() {
let file_size = metadata.len();
if config
.size_constraints
.iter()
.any(|sc| !sc.is_within(file_size))
{
return WalkState::Continue;
}
} else {
return WalkState::Continue;
}
} else {
return WalkState::Continue;
}
}
// Filter out unwanted modification times
if !config.time_constraints.is_empty() {
let mut matched = false;
if let Some(metadata) = entry.metadata() {
if let Ok(modified) = metadata.modified() {
matched = config
.time_constraints
.iter()
.all(|tf| tf.applies_to(&modified));
}
}
if !matched {
return WalkState::Continue;
}
}
if config.is_printing() {
if let Some(ls_colors) = &config.ls_colors {
// Compute colors in parallel
entry.style(ls_colors);
}
}
let send_result = tx.send(WorkerResult::Entry(entry));
if send_result.is_err() {
return WalkState::Quit;
}
// Apply pruning.
if config.prune {
return WalkState::Skip;
}
WalkState::Continue
})
});
}
/// Perform the recursive scan.
fn scan(&self, paths: &[PathBuf]) -> Result<ExitCode> {
let config = &self.config;
let walker = self.build_walker(paths)?;
if config.ls_colors.is_some() && config.is_printing() {
let quit_flag = Arc::clone(&self.quit_flag);
let interrupt_flag = Arc::clone(&self.interrupt_flag);
ctrlc::set_handler(move || {
quit_flag.store(true, Ordering::Relaxed);
if interrupt_flag.fetch_or(true, Ordering::Relaxed) {
// Ctrl-C has been pressed twice, exit NOW
ExitCode::KilledBySigint.exit();
}
})
.unwrap();
}
let (tx, rx) = bounded(2 * config.threads);
let exit_code = thread::scope(|scope| {
// Spawn the receiver thread(s)
let receiver = scope.spawn(|| self.receive(rx));
// Spawn the sender threads.
self.spawn_senders(walker, tx);
receiver.join().unwrap()
});
if self.interrupt_flag.load(Ordering::Relaxed) {
Ok(ExitCode::KilledBySigint)
} else {
Ok(exit_code)
}
}
}
/// Recursively scan the given search path for files / pathnames matching the patterns.
/// ///
/// If the `--exec` argument was supplied, this will create a thread pool for executing /// If the `--exec` argument was supplied, this will create a thread pool for executing
/// jobs in parallel from a given command line and the discovered paths. Otherwise, each /// jobs in parallel from a given command line and the discovered paths. Otherwise, each
/// path will simply be written to standard output. /// path will simply be written to standard output.
pub fn scan(paths: &[PathBuf], patterns: Vec<Regex>, config: Config) -> Result<ExitCode> { pub fn scan(path_vec: &[PathBuf], pattern: Arc<Regex>, config: Arc<FdOptions>) {
WorkerState::new(patterns, config).scan(paths) let mut path_iter = path_vec.iter();
let first_path_buf = path_iter
.next()
.expect("Error: Path vector can not be empty");
let (tx, rx) = channel();
let threads = config.threads;
let mut override_builder = OverrideBuilder::new(first_path_buf.as_path());
for pattern in &config.exclude_patterns {
let res = override_builder.add(pattern);
if res.is_err() {
error(&format!("Error: malformed exclude pattern '{}'", pattern));
}
}
let overrides = override_builder.build().unwrap_or_else(|_| {
error("Mismatch in exclude patterns");
});
let mut walker = WalkBuilder::new(first_path_buf.as_path());
walker
.hidden(config.ignore_hidden)
.ignore(false)
.parents(config.read_fdignore || config.read_vcsignore)
.git_ignore(config.read_vcsignore)
.git_global(config.read_vcsignore)
.git_exclude(config.read_vcsignore)
.overrides(overrides)
.follow_links(config.follow_links)
.max_depth(config.max_depth);
if config.read_fdignore {
walker.add_custom_ignore_filename(".fdignore");
}
for ignore_file in &config.ignore_files {
let result = walker.add_ignore(ignore_file);
if let Some(err) = result {
match err {
ignore::Error::Partial(_) => (),
_ => {
error(&format!(
"Error while parsing custom ignore file '{}': {}.",
ignore_file.to_string_lossy(),
err.description()
));
}
}
}
}
for path_entry in path_iter {
walker.add(path_entry.as_path());
}
let parallel_walker = walker.threads(threads).build_parallel();
let wants_to_quit = Arc::new(AtomicBool::new(false));
let receiver_wtq = Arc::clone(&wants_to_quit);
let sender_wtq = Arc::clone(&wants_to_quit);
if config.ls_colors.is_some() && config.command.is_none() {
let wq = Arc::clone(&receiver_wtq);
ctrlc::set_handler(move || {
wq.store(true, Ordering::Relaxed);
}).unwrap();
}
// Spawn the thread that receives all results through the channel.
let rx_config = Arc::clone(&config);
let receiver_thread = thread::spawn(move || {
// This will be set to `Some` if the `--exec` argument was supplied.
if let Some(ref cmd) = rx_config.command {
let shared_rx = Arc::new(Mutex::new(rx));
let out_perm = Arc::new(Mutex::new(()));
// TODO: the following line is a workaround to replace the `unsafe` block that was
// previously used here to avoid the (unnecessary?) cloning of the command. The
// `unsafe` block caused problems on some platforms (SIGILL instructions on Linux) and
// therefore had to be removed.
let cmd = Arc::new(cmd.clone());
// Each spawned job will store it's thread handle in here.
let mut handles = Vec::with_capacity(threads);
for _ in 0..threads {
let rx = Arc::clone(&shared_rx);
let cmd = Arc::clone(&cmd);
let out_perm = Arc::clone(&out_perm);
// Spawn a job thread that will listen for and execute inputs.
let handle = thread::spawn(move || exec::job(rx, cmd, out_perm));
// Push the handle of the spawned thread into the vector for later joining.
handles.push(handle);
}
// Wait for all threads to exit before exiting the program.
for h in handles {
h.join().unwrap();
}
} else {
let start = time::Instant::now();
let mut buffer = vec![];
// Start in buffering mode
let mut mode = ReceiverMode::Buffering;
// Maximum time to wait before we start streaming to the console.
let max_buffer_time = rx_config
.max_buffer_time
.unwrap_or_else(|| time::Duration::from_millis(100));
for value in rx {
match mode {
ReceiverMode::Buffering => {
buffer.push(value);
// Have we reached the maximum buffer size or maximum buffering time?
if buffer.len() > MAX_BUFFER_LENGTH
|| time::Instant::now() - start > max_buffer_time
{
// Flush the buffer
for v in &buffer {
output::print_entry(v, &rx_config, &receiver_wtq);
}
buffer.clear();
// Start streaming
mode = ReceiverMode::Streaming;
}
}
ReceiverMode::Streaming => {
output::print_entry(&value, &rx_config, &receiver_wtq);
}
}
}
// If we have finished fast enough (faster than max_buffer_time), we haven't streamed
// anything to the console, yet. In this case, sort the results and print them:
if !buffer.is_empty() {
buffer.sort();
for value in buffer {
output::print_entry(&value, &rx_config, &receiver_wtq);
}
}
}
});
// Spawn the sender threads.
parallel_walker.run(|| {
let config = Arc::clone(&config);
let pattern = Arc::clone(&pattern);
let tx_thread = tx.clone();
let wants_to_quit = Arc::clone(&sender_wtq);
Box::new(move |entry_o| {
if wants_to_quit.load(Ordering::Relaxed) {
return ignore::WalkState::Quit;
}
let entry = match entry_o {
Ok(e) => e,
Err(_) => return ignore::WalkState::Continue,
};
if entry.depth() == 0 {
return ignore::WalkState::Continue;
}
// Filter out unwanted file types.
if let Some(ref file_types) = config.file_types {
if let Some(ref entry_type) = entry.file_type() {
if (!file_types.files && entry_type.is_file())
|| (!file_types.directories && entry_type.is_dir())
|| (!file_types.symlinks && entry_type.is_symlink())
|| (file_types.executables_only && !entry
.metadata()
.map(|m| fshelper::is_executable(&m))
.unwrap_or(false))
|| (file_types.empty_only && !fshelper::is_empty(&entry))
{
return ignore::WalkState::Continue;
} else if !(entry_type.is_file()
|| entry_type.is_dir()
|| entry_type.is_symlink())
{
// This is probably a block device, char device, fifo or socket. Skip it.
return ignore::WalkState::Continue;
}
} else {
return ignore::WalkState::Continue;
}
}
let entry_path = entry.path();
// Filter out unwanted extensions.
if let Some(ref exts_regex) = config.extensions {
if let Some(path_str) = entry_path.file_name().map_or(None, |s| s.to_str()) {
if !exts_regex.is_match(path_str) {
return ignore::WalkState::Continue;
}
} else {
return ignore::WalkState::Continue;
}
}
// Filter out unwanted sizes if it is a file and we have been given size constraints.
if config.size_constraints.len() > 0 {
if entry_path.is_file() {
if let Ok(metadata) = entry_path.metadata() {
let file_size = metadata.len();
if config
.size_constraints
.iter()
.any(|sc| !sc.is_within(file_size))
{
return ignore::WalkState::Continue;
}
} else {
return ignore::WalkState::Continue;
}
} else {
return ignore::WalkState::Continue;
}
}
let search_str_o = if config.search_full_path {
match fshelper::path_absolute_form(entry_path) {
Ok(path_abs_buf) => Some(path_abs_buf.to_string_lossy().into_owned().into()),
Err(_) => error("Error: unable to get full path."),
}
} else {
entry_path.file_name().map(|f| f.to_string_lossy())
};
if let Some(search_str) = search_str_o {
if pattern.is_match(&*search_str) {
// TODO: take care of the unwrap call
tx_thread.send(entry_path.to_owned()).unwrap()
}
}
ignore::WalkState::Continue
})
});
// Drop the initial sender. If we don't do this, the receiver will block even
// if all threads have finished, since there is still one sender around.
drop(tx);
// Wait for the receiver thread to print out all results.
receiver_thread.join().unwrap();
if wants_to_quit.load(Ordering::Relaxed) {
process::exit(EXITCODE_SIGINT);
}
} }

View File

@ -1,14 +1,29 @@
// Copyright (c) 2017 fd developers
// Licensed under the Apache License, Version 2.0
// <LICENSE-APACHE or http://www.apache.org/licenses/LICENSE-2.0>
// or the MIT license <LICENSE-MIT or http://opensource.org/licenses/MIT>,
// at your option. All files in the project carrying such
// notice may not be copied, modified, or distributed except
// according to those terms.
use std;
use std::env; use std::env;
use std::fs; use std::fs;
use std::io::{self, Write}; use std::io;
#[cfg(unix)] use std::io::Write;
use std::os::unix;
#[cfg(windows)]
use std::os::windows;
use std::path::{Path, PathBuf}; use std::path::{Path, PathBuf};
use std::process; use std::process;
use tempfile::TempDir; #[cfg(unix)]
use std::os::unix;
#[cfg(windows)]
use std::os::windows;
extern crate diff;
extern crate tempdir;
use self::tempdir::TempDir;
/// Environment for the integration tests. /// Environment for the integration tests.
pub struct TestEnv { pub struct TestEnv {
@ -17,12 +32,6 @@ pub struct TestEnv {
/// Path to the *fd* executable. /// Path to the *fd* executable.
fd_exe: PathBuf, fd_exe: PathBuf,
/// Normalize each line by sorting the whitespace-separated words
normalize_line: bool,
/// Temporary directory for storing test config (global ignore file)
config_dir: Option<TempDir>,
} }
/// Create the working directory and the test files. /// Create the working directory and the test files.
@ -30,7 +39,7 @@ fn create_working_directory(
directories: &[&'static str], directories: &[&'static str],
files: &[&'static str], files: &[&'static str],
) -> Result<TempDir, io::Error> { ) -> Result<TempDir, io::Error> {
let temp_dir = tempfile::Builder::new().prefix("fd-tests").tempdir()?; let temp_dir = TempDir::new("fd-tests")?;
{ {
let root = temp_dir.path(); let root = temp_dir.path();
@ -62,16 +71,6 @@ fn create_working_directory(
Ok(temp_dir) Ok(temp_dir)
} }
fn create_config_directory_with_global_ignore(ignore_file_content: &str) -> io::Result<TempDir> {
let config_dir = tempfile::Builder::new().prefix("fd-config").tempdir()?;
let fd_dir = config_dir.path().join("fd");
fs::create_dir(&fd_dir)?;
let mut ignore_file = fs::File::create(fd_dir.join("ignore"))?;
ignore_file.write_all(ignore_file_content.as_bytes())?;
Ok(config_dir)
}
/// Find the *fd* executable. /// Find the *fd* executable.
fn find_fd_exe() -> PathBuf { fn find_fd_exe() -> PathBuf {
// Tests exe is in target/debug/deps, the *fd* exe is in target/debug // Tests exe is in target/debug/deps, the *fd* exe is in target/debug
@ -107,8 +106,7 @@ fn format_output_error(args: &[&str], expected: &str, actual: &str) -> String {
diff::Result::Left(l) => format!("-{}", l), diff::Result::Left(l) => format!("-{}", l),
diff::Result::Both(l, _) => format!(" {}", l), diff::Result::Both(l, _) => format!(" {}", l),
diff::Result::Right(r) => format!("+{}", r), diff::Result::Right(r) => format!("+{}", r),
}) }).collect::<Vec<_>>()
.collect::<Vec<_>>()
.join("\n"); .join("\n");
format!( format!(
@ -122,151 +120,48 @@ fn format_output_error(args: &[&str], expected: &str, actual: &str) -> String {
} }
/// Normalize the output for comparison. /// Normalize the output for comparison.
fn normalize_output(s: &str, trim_start: bool, normalize_line: bool) -> String { fn normalize_output(s: &str, trim_left: bool) -> String {
// Split into lines and normalize separators. // Split into lines and normalize separators.
let mut lines = s let mut lines = s
.replace('\0', "NULL\n") .replace('\0', "NULL\n")
.lines() .lines()
.map(|line| { .map(|line| {
let line = if trim_start { line.trim_start() } else { line }; let line = if trim_left { line.trim_left() } else { line };
let line = line.replace('/', std::path::MAIN_SEPARATOR_STR); line.replace('/', &std::path::MAIN_SEPARATOR.to_string())
if normalize_line { }).collect::<Vec<_>>();
let mut words: Vec<_> = line.split_whitespace().collect();
words.sort_unstable(); lines.sort_by_key(|s| s.clone());
return words.join(" ");
}
line
})
.collect::<Vec<_>>();
lines.sort();
lines.join("\n") lines.join("\n")
} }
/// Trim whitespace from the beginning of each line.
fn trim_lines(s: &str) -> String {
s.lines()
.map(|line| line.trim_start())
.fold(String::new(), |mut str, line| {
str.push_str(line);
str.push('\n');
str
})
}
impl TestEnv { impl TestEnv {
pub fn new(directories: &[&'static str], files: &[&'static str]) -> TestEnv { pub fn new(directories: &[&'static str], files: &[&'static str]) -> TestEnv {
let temp_dir = create_working_directory(directories, files).expect("working directory"); let temp_dir = create_working_directory(directories, files).expect("working directory");
let fd_exe = find_fd_exe(); let fd_exe = find_fd_exe();
TestEnv { TestEnv {
temp_dir, temp_dir: temp_dir,
fd_exe, fd_exe: fd_exe,
normalize_line: false,
config_dir: None,
} }
} }
pub fn normalize_line(self, normalize: bool) -> TestEnv {
TestEnv {
temp_dir: self.temp_dir,
fd_exe: self.fd_exe,
normalize_line: normalize,
config_dir: self.config_dir,
}
}
pub fn global_ignore_file(self, content: &str) -> TestEnv {
let config_dir =
create_config_directory_with_global_ignore(content).expect("config directory");
TestEnv {
config_dir: Some(config_dir),
..self
}
}
/// Create a broken symlink at the given path in the temp_dir.
pub fn create_broken_symlink<P: AsRef<Path>>(
&mut self,
link_path: P,
) -> Result<PathBuf, io::Error> {
let root = self.test_root();
let broken_symlink_link = root.join(link_path);
{
let temp_target_dir = tempfile::Builder::new()
.prefix("fd-tests-broken-symlink")
.tempdir()?;
let broken_symlink_target = temp_target_dir.path().join("broken_symlink_target");
fs::File::create(&broken_symlink_target)?;
#[cfg(unix)]
unix::fs::symlink(&broken_symlink_target, &broken_symlink_link)?;
#[cfg(windows)]
windows::fs::symlink_file(&broken_symlink_target, &broken_symlink_link)?;
}
Ok(broken_symlink_link)
}
/// Get the root directory for the tests. /// Get the root directory for the tests.
pub fn test_root(&self) -> PathBuf { pub fn test_root(&self) -> PathBuf {
self.temp_dir.path().to_path_buf() self.temp_dir.path().to_path_buf()
} }
/// Get the path of the fd executable.
#[cfg_attr(windows, allow(unused))]
pub fn test_exe(&self) -> &PathBuf {
&self.fd_exe
}
/// Get the root directory of the file system. /// Get the root directory of the file system.
pub fn system_root(&self) -> PathBuf { pub fn system_root(&self) -> PathBuf {
let mut components = self.temp_dir.path().components(); let mut components = self.temp_dir.path().components();
PathBuf::from(components.next().expect("root directory").as_os_str()) PathBuf::from(components.next().expect("root directory").as_os_str())
} }
/// Assert that calling *fd* in the specified path under the root working directory,
/// and with the specified arguments produces the expected output.
pub fn assert_success_and_get_output<P: AsRef<Path>>(
&self,
path: P,
args: &[&str],
) -> process::Output {
// Run *fd*.
let output = self.run_command(path.as_ref(), args);
// Check for exit status.
if !output.status.success() {
panic!("{}", format_exit_error(args, &output));
}
output
}
pub fn assert_success_and_get_normalized_output<P: AsRef<Path>>(
&self,
path: P,
args: &[&str],
) -> String {
let output = self.assert_success_and_get_output(path, args);
normalize_output(
&String::from_utf8_lossy(&output.stdout),
false,
self.normalize_line,
)
}
/// Assert that calling *fd* with the specified arguments produces the expected output. /// Assert that calling *fd* with the specified arguments produces the expected output.
pub fn assert_output(&self, args: &[&str], expected: &str) { pub fn assert_output(&self, args: &[&str], expected: &str) {
self.assert_output_subdirectory(".", args, expected) self.assert_output_subdirectory(".", args, expected)
} }
/// Similar to assert_output, but able to handle non-utf8 output
#[cfg(all(unix, not(target_os = "macos")))]
pub fn assert_output_raw(&self, args: &[&str], expected: &[u8]) {
let output = self.assert_success_and_get_output(".", args);
assert_eq!(expected, &output.stdout[..]);
}
/// Assert that calling *fd* in the specified path under the root working directory, /// Assert that calling *fd* in the specified path under the root working directory,
/// and with the specified arguments produces the expected output. /// and with the specified arguments produces the expected output.
pub fn assert_output_subdirectory<P: AsRef<Path>>( pub fn assert_output_subdirectory<P: AsRef<Path>>(
@ -275,80 +170,26 @@ impl TestEnv {
args: &[&str], args: &[&str],
expected: &str, expected: &str,
) { ) {
// Normalize both expected and actual output.
let expected = normalize_output(expected, true, self.normalize_line);
let actual = self.assert_success_and_get_normalized_output(path, args);
// Compare actual output to expected output.
if expected != actual {
panic!("{}", format_output_error(args, &expected, &actual));
}
}
/// Assert that calling *fd* with the specified arguments produces the expected error,
/// and does not succeed.
pub fn assert_failure_with_error(&self, args: &[&str], expected: &str) {
let status = self.assert_error_subdirectory(".", args, Some(expected));
if status.success() {
panic!("error '{}' did not occur.", expected);
}
}
/// Assert that calling *fd* with the specified arguments does not succeed.
pub fn assert_failure(&self, args: &[&str]) {
let status = self.assert_error_subdirectory(".", args, None);
if status.success() {
panic!("Failure did not occur as expected.");
}
}
/// Assert that calling *fd* with the specified arguments produces the expected error.
pub fn assert_error(&self, args: &[&str], expected: &str) -> process::ExitStatus {
self.assert_error_subdirectory(".", args, Some(expected))
}
fn run_command(&self, path: &Path, args: &[&str]) -> process::Output {
// Setup *fd* command. // Setup *fd* command.
let mut cmd = process::Command::new(&self.fd_exe); let mut cmd = process::Command::new(&self.fd_exe);
cmd.current_dir(self.temp_dir.path().join(path)); cmd.current_dir(self.temp_dir.path().join(path));
if let Some(config_dir) = &self.config_dir {
cmd.env("XDG_CONFIG_HOME", config_dir.path());
} else {
cmd.arg("--no-global-ignore-file");
}
// Make sure LS_COLORS is unset to ensure consistent
// color output
cmd.env("LS_COLORS", "");
cmd.args(args); cmd.args(args);
// Run *fd*. // Run *fd*.
cmd.output().expect("fd output") let output = cmd.output().expect("fd output");
}
/// Assert that calling *fd* in the specified path under the root working directory, // Check for exit status.
/// and with the specified arguments produces an error with the expected message. if !output.status.success() {
fn assert_error_subdirectory<P: AsRef<Path>>( panic!(format_exit_error(args, &output));
&self,
path: P,
args: &[&str],
expected: Option<&str>,
) -> process::ExitStatus {
let output = self.run_command(path.as_ref(), args);
if let Some(expected) = expected {
// Normalize both expected and actual output.
let expected_error = trim_lines(expected);
let actual_err = trim_lines(&String::from_utf8_lossy(&output.stderr));
// Compare actual output to expected output.
if !actual_err.trim_start().starts_with(&expected_error) {
panic!(
"{}",
format_output_error(args, &expected_error, &actual_err)
);
}
} }
output.status // Normalize both expected and actual output.
let expected = normalize_output(expected, true);
let actual = normalize_output(&String::from_utf8_lossy(&output.stdout), false);
// Compare actual output to expected output.
if expected != actual {
panic!(format_output_error(args, &expected, &actual));
}
} }
} }

File diff suppressed because it is too large Load Diff